diff --git a/.github/actions/awx_devel_image/action.yml b/.github/actions/awx_devel_image/action.yml index 354279f2c3..04ac1a2d1f 100644 --- a/.github/actions/awx_devel_image/action.yml +++ b/.github/actions/awx_devel_image/action.yml @@ -24,9 +24,31 @@ runs: run: | echo "${{ inputs.github-token }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin +<<<<<<< HEAD - uses: ./.github/actions/setup-ssh-agent with: ssh-private-key: ${{ inputs.private-github-key }} +======= + - name: Generate placeholder SSH private key if SSH auth for private repos is not needed + id: generate_key + shell: bash + run: | + if [[ -z "${{ inputs.private-github-key }}" ]]; then + ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519 + echo "SSH_PRIVATE_KEY<> $GITHUB_OUTPUT + cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + else + echo "SSH_PRIVATE_KEY<> $GITHUB_OUTPUT + echo "${{ inputs.private-github-key }}" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + fi + + - name: Add private GitHub key to SSH agent + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }} +>>>>>>> tower/test_stable-2.6 - name: Pre-pull latest devel image to warm cache shell: bash diff --git a/.github/actions/run_awx_devel/action.yml b/.github/actions/run_awx_devel/action.yml index 723e247783..352bb95621 100644 --- a/.github/actions/run_awx_devel/action.yml +++ b/.github/actions/run_awx_devel/action.yml @@ -36,7 +36,7 @@ runs: - name: Upgrade ansible-core shell: bash - run: python3 -m pip install --upgrade ansible-core + run: python3 -m pip install --upgrade 'ansible-core<2.18.0' - name: Install system deps shell: bash diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 94f1e41aa0..687715c1aa 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -39,12 +39,16 @@ jobs: command: /start_tests.sh test_collection_all coverage-upload-name: "awx-collection" - name: api-schema +<<<<<<< HEAD command: >- /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref || github.ref_name }} coverage-upload-name: "" +======= + command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }} +>>>>>>> tower/test_stable-2.6 steps: - uses: actions/checkout@v4 with: @@ -130,9 +134,15 @@ jobs: with: show-progress: false +<<<<<<< HEAD - uses: ./.github/actions/setup-python with: python-version: '3.x' +======= + - uses: actions/setup-python@v5 + with: + python-version: '3.12' +>>>>>>> tower/test_stable-2.6 - uses: ./.github/actions/run_awx_devel id: awx @@ -143,11 +153,14 @@ jobs: - name: Run live dev env tests run: docker exec tools_awx_1 /bin/bash -c "make live_test" +<<<<<<< HEAD - uses: ./.github/actions/upload_awx_devel_logs if: always() with: log-filename: live-tests.log +======= +>>>>>>> tower/test_stable-2.6 awx-operator: runs-on: ubuntu-latest @@ -180,6 +193,26 @@ jobs: run: | python3 -m pip install docker + - name: Generate placeholder SSH private key if SSH auth for private repos is not needed + id: generate_key + shell: bash + run: | + if [[ -z "${{ secrets.PRIVATE_GITHUB_KEY }}" ]]; then + ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519 + echo "SSH_PRIVATE_KEY<> $GITHUB_OUTPUT + cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + else + echo "SSH_PRIVATE_KEY<> $GITHUB_OUTPUT + echo "${{ secrets.PRIVATE_GITHUB_KEY }}" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + fi + + - name: Add private GitHub key to SSH agent + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }} + - name: Build AWX image working-directory: awx run: | @@ -278,9 +311,15 @@ jobs: with: show-progress: false +<<<<<<< HEAD - uses: ./.github/actions/setup-python with: python-version: '3.x' +======= + - uses: actions/setup-python@v5 + with: + python-version: '3.12' +>>>>>>> tower/test_stable-2.6 - uses: ./.github/actions/run_awx_devel id: awx @@ -356,12 +395,18 @@ jobs: persist-credentials: false show-progress: false +<<<<<<< HEAD - uses: ./.github/actions/setup-python with: python-version: '3.x' +======= + - uses: actions/setup-python@v5 + with: + python-version: '3.12' +>>>>>>> tower/test_stable-2.6 - name: Upgrade ansible-core - run: python3 -m pip install --upgrade ansible-core + run: python3 -m pip install --upgrade "ansible-core<2.19" - name: Download coverage artifacts uses: actions/download-artifact@v4 diff --git a/.github/workflows/devel_images.yml b/.github/workflows/devel_images.yml index 069d2c8451..0f5a13c47b 100644 --- a/.github/workflows/devel_images.yml +++ b/.github/workflows/devel_images.yml @@ -10,6 +10,7 @@ on: - devel - release_* - feature_* + - stable-* jobs: push-development-images: runs-on: ubuntu-latest @@ -69,9 +70,31 @@ jobs: make ui if: matrix.build-targets.image-name == 'awx' +<<<<<<< HEAD - uses: ./.github/actions/setup-ssh-agent with: ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }} +======= + - name: Generate placeholder SSH private key if SSH auth for private repos is not needed + id: generate_key + shell: bash + run: | + if [[ -z "${{ secrets.PRIVATE_GITHUB_KEY }}" ]]; then + ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519 + echo "SSH_PRIVATE_KEY<> $GITHUB_OUTPUT + cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + else + echo "SSH_PRIVATE_KEY<> $GITHUB_OUTPUT + echo "${{ secrets.PRIVATE_GITHUB_KEY }}" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + fi + + - name: Add private GitHub key to SSH agent + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }} +>>>>>>> tower/test_stable-2.6 - name: Build and push AWX devel images run: | diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index ec6c9f4a4f..f075e4bf18 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,7 +12,11 @@ jobs: with: show-progress: false +<<<<<<< HEAD - uses: ./.github/actions/setup-python +======= + - uses: actions/setup-python@v5 +>>>>>>> tower/test_stable-2.6 with: python-version: '3.x' diff --git a/.github/workflows/feature_branch_sync.yml b/.github/workflows/feature_branch_sync.yml new file mode 100644 index 0000000000..87521f141c --- /dev/null +++ b/.github/workflows/feature_branch_sync.yml @@ -0,0 +1,35 @@ +name: Rebase release_4.6-next and stable-2.6 + +on: + push: + branches: + - release_4.6 + workflow_dispatch: + # Allows manual triggering of the workflow from the GitHub UI + +jobs: + rebase: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Checkout stable-2.6 branch + uses: actions/checkout@v4 + with: + ref: stable-2.6 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Fetch release_4.6 branch for rebase + run: git fetch origin release_4.6:release_4.6 + + - name: Attempt Rebase release_4.6 into stable-2.6 + id: rebase_attempt + run: | + git config user.name "GitHub Actions" + git config user.email "github-actions[bot]@users.noreply.github.com" + git checkout stable-2.6 + git rebase release_4.6 + + - name: Force Push Rebased stable-2.6 Branch + run: | + git push --force origin stable-2.6 diff --git a/.github/workflows/label_pr.yml b/.github/workflows/label_pr.yml index 43f1e3a291..a3255c8d0b 100644 --- a/.github/workflows/label_pr.yml +++ b/.github/workflows/label_pr.yml @@ -33,7 +33,11 @@ jobs: with: show-progress: false +<<<<<<< HEAD - uses: ./.github/actions/setup-python +======= + - uses: actions/setup-python@v5 +>>>>>>> tower/test_stable-2.6 with: python-version: '3.x' diff --git a/.github/workflows/upload_schema.yml b/.github/workflows/upload_schema.yml index 73eef5d646..639c68034f 100644 --- a/.github/workflows/upload_schema.yml +++ b/.github/workflows/upload_schema.yml @@ -11,6 +11,7 @@ on: - devel - release_** - feature_** + - stable-** jobs: push: runs-on: ubuntu-latest @@ -23,28 +24,76 @@ jobs: with: show-progress: false +<<<<<<< HEAD - uses: ./.github/actions/setup-python +======= + - name: Set lower case owner name + shell: bash + run: echo "OWNER_LC=${OWNER,,}" >> $GITHUB_ENV + env: + OWNER: '${{ github.repository_owner }}' + + - name: Get python version from Makefile + run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV + + - name: Install python ${{ env.py_version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ env.py_version }} +>>>>>>> tower/test_stable-2.6 - name: Log in to registry run: | echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin +<<<<<<< HEAD - uses: ./.github/actions/setup-ssh-agent with: ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }} - name: Pre-pull image to warm build cache +======= + - name: Pre-pull latest devel image to warm cache + shell: bash +>>>>>>> tower/test_stable-2.6 run: | - docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || : + DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \ + COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \ + docker pull -q `make print-DEVEL_IMAGE_NAME` + continue-on-error: true + + - name: Generate placeholder SSH private key if SSH auth for private repos is not needed + id: generate_key + shell: bash + run: | + if [[ -z "${{ secrets.PRIVATE_GITHUB_KEY }}" ]]; then + ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519 + echo "SSH_PRIVATE_KEY<> $GITHUB_OUTPUT + cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + else + echo "SSH_PRIVATE_KEY<> $GITHUB_OUTPUT + echo "${{ secrets.PRIVATE_GITHUB_KEY }}" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + fi + + - name: Add private GitHub key to SSH agent + uses: webfactory/ssh-agent@v0.9.0 + with: + ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }} - name: Build image run: | - DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build + DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \ + COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \ + make docker-compose-build - name: Generate API Schema run: | + DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \ + COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \ docker run -u $(id -u) --rm -v ${{ github.workspace }}:/awx_devel/:Z \ - --workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} /start_tests.sh genschema + --workdir=/awx_devel `make print-DEVEL_IMAGE_NAME` /start_tests.sh genschema - name: Upload API Schema env: diff --git a/.gitignore b/.gitignore index 4ea996b190..1351050ef7 100644 --- a/.gitignore +++ b/.gitignore @@ -122,6 +122,7 @@ reports local/ *.mo requirements/vendor +requirements/requirements_git.credentials.txt .i18n_built .idea/* *credentials*.y*ml* diff --git a/MANIFEST.in b/MANIFEST.in index b304344c7f..07a75cd25c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -28,3 +28,4 @@ include COPYING include Makefile prune awx/public prune awx/projects +prune requirements/requirements_git.credentials.txt diff --git a/Makefile b/Makefile index 45f7fe2456..aa01487ebc 100644 --- a/Makefile +++ b/Makefile @@ -77,7 +77,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio # These should be upgraded in the AWX and Ansible venv before attempting # to install the actual requirements -VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==70.3.0 setuptools_scm[toml]==8.1.0 wheel==0.45.1 cython==3.0.11 +VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==80.9.0 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==3.1.3 NAME ?= awx @@ -378,7 +378,7 @@ test_collection: if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/awx/bin/activate; \ fi && \ - if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi + if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install "ansible-core<2.19"; fi ansible --version py.test $(COLLECTION_TEST_DIRS) $(COVERAGE_ARGS) -v @if [ "${GITHUB_ACTIONS}" = "true" ]; \ @@ -417,7 +417,7 @@ install_collection: build_collection test_collection_sanity: rm -rf awx_collection_build/ rm -rf $(COLLECTION_INSTALL) - if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi + if ! [ -x "$(shell command -v ansible-test)" ]; then pip install "ansible-core<2.19"; fi ansible --version COLLECTION_VERSION=1.0.0 $(MAKE) install_collection cd $(COLLECTION_INSTALL) && \ diff --git a/awx/api/generics.py b/awx/api/generics.py index 207799b27d..3a8815a306 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -162,9 +162,9 @@ def get_view_description(view, html=False): def get_default_schema(): if settings.DYNACONF.is_development_mode: - from awx.api.swagger import schema_view + from awx.api.swagger import AutoSchema - return schema_view + return AutoSchema() else: return views.APIView.schema @@ -844,7 +844,7 @@ class ResourceAccessList(ParentMixin, ListAPIView): if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED: ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True)) qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True) - auditor_role = RoleDefinition.objects.filter(name="Controller System Auditor").first() + auditor_role = RoleDefinition.objects.filter(name="Platform Auditor").first() if auditor_role: qs |= User.objects.filter(role_assignments__role_definition=auditor_role) return qs.distinct() diff --git a/awx/api/permissions.py b/awx/api/permissions.py index ff7a030c72..dcf6028579 100644 --- a/awx/api/permissions.py +++ b/awx/api/permissions.py @@ -234,6 +234,13 @@ class UserPermission(ModelAccessPermission): raise PermissionDenied() +class IsSystemAdmin(permissions.BasePermission): + def has_permission(self, request, view): + if not (request.user and request.user.is_authenticated): + return False + return request.user.is_superuser + + class IsSystemAdminOrAuditor(permissions.BasePermission): """ Allows write access only to system admin users. diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 919aefe992..659766cbb5 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2839,7 +2839,7 @@ class ResourceAccessListElementSerializer(UserSerializer): { "role": { "id": None, - "name": _("Controller System Auditor"), + "name": _("Platform Auditor"), "description": _("Can view all aspects of the system"), "user_capabilities": {"unattach": False}, }, @@ -5998,7 +5998,7 @@ class InstanceGroupSerializer(BaseSerializer): if self.instance and not self.instance.is_container_group: raise serializers.ValidationError(_('pod_spec_override is only valid for container groups')) - pod_spec_override_json = None + pod_spec_override_json = {} # defect if the value is yaml or json if yaml convert to json try: # convert yaml to json diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index 814ac86a0e..00b8311f43 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -55,7 +55,7 @@ from wsgiref.util import FileWrapper # django-ansible-base from ansible_base.lib.utils.requests import get_remote_hosts -from ansible_base.rbac.models import RoleEvaluation, ObjectRole +from ansible_base.rbac.models import RoleEvaluation from ansible_base.rbac import permission_registry # AWX @@ -85,7 +85,6 @@ from awx.api.generics import ( from awx.api.views.labels import LabelSubListCreateAttachDetachView from awx.api.versioning import reverse from awx.main import models -from awx.main.models.rbac import get_role_definition from awx.main.utils import ( camelcase_to_underscore, extract_ansible_vars, @@ -751,17 +750,9 @@ class TeamProjectsList(SubListAPIView): def get_queryset(self): team = self.get_parent_object() self.check_parent_access(team) - model_ct = permission_registry.content_type_model.objects.get_for_model(self.model) - parent_ct = permission_registry.content_type_model.objects.get_for_model(self.parent_model) - - rd = get_role_definition(team.member_role) - role = ObjectRole.objects.filter(object_id=team.id, content_type=parent_ct, role_definition=rd).first() - if role is None: - # Team has no permissions, therefore team has no projects - return self.model.objects.none() - else: - project_qs = self.model.accessible_objects(self.request.user, 'read_role') - return project_qs.filter(id__in=RoleEvaluation.objects.filter(content_type_id=model_ct.id, role=role).values_list('object_id')) + my_qs = self.model.accessible_objects(self.request.user, 'read_role') + team_qs = models.Project.accessible_objects(team, 'read_role') + return my_qs & team_qs class TeamActivityStreamList(SubListAPIView): @@ -876,13 +867,23 @@ class ProjectTeamsList(ListAPIView): serializer_class = serializers.TeamSerializer def get_queryset(self): - p = get_object_or_404(models.Project, pk=self.kwargs['pk']) - if not self.request.user.can_access(models.Project, 'read', p): + parent = get_object_or_404(models.Project, pk=self.kwargs['pk']) + if not self.request.user.can_access(models.Project, 'read', parent): raise PermissionDenied() - project_ct = ContentType.objects.get_for_model(models.Project) + + project_ct = ContentType.objects.get_for_model(parent) team_ct = ContentType.objects.get_for_model(self.model) - all_roles = models.Role.objects.filter(Q(descendents__content_type=project_ct) & Q(descendents__object_id=p.pk), content_type=team_ct) - return self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=[t.content_object.pk for t in all_roles]) + + roles_on_project = models.Role.objects.filter( + content_type=project_ct, + object_id=parent.pk, + ) + + team_member_parent_roles = models.Role.objects.filter(children__in=roles_on_project, role_field='member_role', content_type=team_ct).distinct() + + team_ids = team_member_parent_roles.values_list('object_id', flat=True) + my_qs = self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=team_ids) + return my_qs class ProjectSchedulesList(SubListCreateAPIView): diff --git a/awx/api/views/instance_install_bundle.py b/awx/api/views/instance_install_bundle.py index 6e4d802ed0..e6a0fb98c8 100644 --- a/awx/api/views/instance_install_bundle.py +++ b/awx/api/views/instance_install_bundle.py @@ -12,7 +12,7 @@ import re import asn1 from awx.api import serializers from awx.api.generics import GenericAPIView, Response -from awx.api.permissions import IsSystemAdminOrAuditor +from awx.api.permissions import IsSystemAdmin from awx.main import models from cryptography import x509 from cryptography.hazmat.primitives import hashes, serialization @@ -48,7 +48,7 @@ class InstanceInstallBundle(GenericAPIView): name = _('Install Bundle') model = models.Instance serializer_class = serializers.InstanceSerializer - permission_classes = (IsSystemAdminOrAuditor,) + permission_classes = (IsSystemAdmin,) def get(self, request, *args, **kwargs): instance_obj = self.get_object() diff --git a/awx/main/conf.py b/awx/main/conf.py index 0d2e457f14..50eed36666 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -1094,3 +1094,13 @@ register( category=('PolicyAsCode'), category_slug='policyascode', ) + + +def policy_as_code_validate(serializer, attrs): + opa_host = attrs.get('OPA_HOST', '') + if opa_host and (opa_host.startswith('http://') or opa_host.startswith('https://')): + raise serializers.ValidationError({'OPA_HOST': _("OPA_HOST should not include 'http://' or 'https://' prefixes. Please enter only the hostname.")}) + return attrs + + +register_validate('policyascode', policy_as_code_validate) diff --git a/awx/main/credential_plugins/aim.py b/awx/main/credential_plugins/aim.py new file mode 100644 index 0000000000..1e7af94e33 --- /dev/null +++ b/awx/main/credential_plugins/aim.py @@ -0,0 +1,140 @@ +from .plugin import CredentialPlugin, CertFiles, raise_for_status + +from urllib.parse import quote, urlencode, urljoin + +from django.utils.translation import gettext_lazy as _ +import requests as requests + +aim_inputs = { + 'fields': [ + { + 'id': 'url', + 'label': _('CyberArk CCP URL'), + 'type': 'string', + 'format': 'url', + }, + { + 'id': 'webservice_id', + 'label': _('Web Service ID'), + 'type': 'string', + 'help_text': _('The CCP Web Service ID. Leave blank to default to AIMWebService.'), + }, + { + 'id': 'app_id', + 'label': _('Application ID'), + 'type': 'string', + 'secret': True, + }, + { + 'id': 'client_key', + 'label': _('Client Key'), + 'type': 'string', + 'secret': True, + 'multiline': True, + }, + { + 'id': 'client_cert', + 'label': _('Client Certificate'), + 'type': 'string', + 'secret': True, + 'multiline': True, + }, + { + 'id': 'verify', + 'label': _('Verify SSL Certificates'), + 'type': 'boolean', + 'default': True, + }, + ], + 'metadata': [ + { + 'id': 'object_query', + 'label': _('Object Query'), + 'type': 'string', + 'help_text': _('Lookup query for the object. Ex: Safe=TestSafe;Object=testAccountName123'), + }, + {'id': 'object_query_format', 'label': _('Object Query Format'), 'type': 'string', 'default': 'Exact', 'choices': ['Exact', 'Regexp']}, + { + 'id': 'object_property', + 'label': _('Object Property'), + 'type': 'string', + 'help_text': _('The property of the object to return. Available properties: Username, Password and Address.'), + }, + { + 'id': 'reason', + 'label': _('Reason'), + 'type': 'string', + 'help_text': _('Object request reason. This is only needed if it is required by the object\'s policy.'), + }, + ], + 'required': ['url', 'app_id', 'object_query'], +} + + +def aim_backend(**kwargs): + url = kwargs['url'] + client_cert = kwargs.get('client_cert', None) + client_key = kwargs.get('client_key', None) + verify = kwargs['verify'] + webservice_id = kwargs.get('webservice_id', '') + app_id = kwargs['app_id'] + object_query = kwargs['object_query'] + object_query_format = kwargs['object_query_format'] + object_property = kwargs.get('object_property', '') + reason = kwargs.get('reason', None) + if webservice_id == '': + webservice_id = 'AIMWebService' + + query_params = { + 'AppId': app_id, + 'Query': object_query, + 'QueryFormat': object_query_format, + } + if reason: + query_params['reason'] = reason + + request_qs = '?' + urlencode(query_params, quote_via=quote) + request_url = urljoin(url, '/'.join([webservice_id, 'api', 'Accounts'])) + + with CertFiles(client_cert, client_key) as cert: + res = requests.get( + request_url + request_qs, + timeout=30, + cert=cert, + verify=verify, + allow_redirects=False, + ) + sensitive_query_params = { + 'AppId': '****', + 'Query': '****', + 'QueryFormat': object_query_format, + } + if reason: + sensitive_query_params['reason'] = '****' + sensitive_request_qs = urlencode( + sensitive_query_params, + safe='*', + quote_via=quote, + ) + res.url = f'{request_url}?{sensitive_request_qs}' + + raise_for_status(res) + # CCP returns the property name capitalized, username is camel case + # so we need to handle that case + if object_property == '': + object_property = 'Content' + elif object_property.lower() == 'username': + object_property = 'UserName' + elif object_property.lower() == 'password': + object_property = 'Content' + elif object_property.lower() == 'address': + object_property = 'Address' + elif object_property not in res: + raise KeyError('Property {} not found in object, available properties: Username, Password and Address'.format(object_property)) + else: + object_property = object_property.capitalize() + + return res.json()[object_property] + + +aim_plugin = CredentialPlugin('CyberArk Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend) diff --git a/awx/main/credential_plugins/azure_kv.py b/awx/main/credential_plugins/azure_kv.py new file mode 100644 index 0000000000..58efe4a554 --- /dev/null +++ b/awx/main/credential_plugins/azure_kv.py @@ -0,0 +1,114 @@ +from azure.keyvault.secrets import SecretClient +from azure.identity import ( + ClientSecretCredential, + CredentialUnavailableError, + ManagedIdentityCredential, +) +from azure.core.credentials import TokenCredential +from msrestazure import azure_cloud + +from .plugin import CredentialPlugin + +from django.utils.translation import gettext_lazy as _ + + +# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py +clouds = [vars(azure_cloud)[n] for n in dir(azure_cloud) if n.startswith("AZURE_") and n.endswith("_CLOUD")] +default_cloud = vars(azure_cloud)["AZURE_PUBLIC_CLOUD"] + + +azure_keyvault_inputs = { + 'fields': [ + { + 'id': 'url', + 'label': _('Vault URL (DNS Name)'), + 'type': 'string', + 'format': 'url', + }, + {'id': 'client', 'label': _('Client ID'), 'type': 'string'}, + { + 'id': 'secret', + 'label': _('Client Secret'), + 'type': 'string', + 'secret': True, + }, + {'id': 'tenant', 'label': _('Tenant ID'), 'type': 'string'}, + { + 'id': 'cloud_name', + 'label': _('Cloud Environment'), + 'help_text': _('Specify which azure cloud environment to use.'), + 'choices': list(set([default_cloud.name] + [c.name for c in clouds])), + 'default': default_cloud.name, + }, + ], + 'metadata': [ + { + 'id': 'secret_field', + 'label': _('Secret Name'), + 'type': 'string', + 'help_text': _('The name of the secret to look up.'), + }, + { + 'id': 'secret_version', + 'label': _('Secret Version'), + 'type': 'string', + 'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'), + }, + ], + 'required': ['url', 'secret_field'], +} + + +def _initialize_credential( + tenant: str = '', + client: str = '', + secret: str = '', +) -> TokenCredential: + explicit_credentials_provided = all((tenant, client, secret)) + + if explicit_credentials_provided: + return ClientSecretCredential( + tenant_id=tenant, + client_id=client, + client_secret=secret, + ) + + return ManagedIdentityCredential() + + +def azure_keyvault_backend( + *, url: str, client: str = '', secret: str = '', tenant: str = '', secret_field: str, secret_version: str = '', **kwargs +) -> str | None: + """Get a credential and retrieve a secret from an Azure Key Vault. + + An empty string for an optional parameter counts as not provided. + + :param url: An Azure Key Vault URI. + :param client: The Client ID (optional). + :param secret: The Client Secret (optional). + :param tenant: The Tenant ID (optional). + :param secret_field: The name of the secret to retrieve from the + vault. + :param secret_version: The version of the secret to retrieve + (optional). + :returns: The secret from the Key Vault. + :raises RuntimeError: If the software is not being run on an Azure + VM. + """ + chosen_credential = _initialize_credential(tenant, client, secret) + keyvault = SecretClient(credential=chosen_credential, vault_url=url) + try: + keyvault_secret = keyvault.get_secret( + name=secret_field, + version=secret_version, + ) + except CredentialUnavailableError as secret_lookup_err: + raise RuntimeError( + 'You are not operating on an Azure VM, so the Managed Identity ' + 'feature is unavailable. Please provide the full Client ID, ' + 'Client Secret, and Tenant ID or run the software on an Azure VM.', + ) from secret_lookup_err + return keyvault_secret.value + + +azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend) diff --git a/awx/main/credential_plugins/github_app.py b/awx/main/credential_plugins/github_app.py new file mode 100644 index 0000000000..df302e0162 --- /dev/null +++ b/awx/main/credential_plugins/github_app.py @@ -0,0 +1,176 @@ +"""GitHub App Installation Access Token Credential Plugin. + +This module defines a credential plugin for making use of the +GitHub Apps mechanism, allowing authentication via GitHub App +installation-scoped access tokens. + +Functions: + +- :func:`extract_github_app_install_token`: Generates a GitHub App + Installation token. +- ``github_app_lookup``: Defines the credential plugin interface. +""" + +from github import Auth as Auth, Github +from github.Consts import DEFAULT_BASE_URL as PUBLIC_GH_API_URL +from github.GithubException import ( + BadAttributeException, + GithubException, + UnknownObjectException, +) + +from django.utils.translation import gettext_lazy as _ + +from .plugin import CredentialPlugin + +github_app_inputs = { + 'fields': [ + { + 'id': 'github_api_url', + 'label': _('GitHub API endpoint URL'), + 'type': 'string', + 'help_text': _( + 'Specify the GitHub API URL here. In the case of an Enterprise: ' + 'https://gh.your.org/api/v3 (self-hosted) ' + 'or https://api.SUBDOMAIN.ghe.com (cloud)', + ), + 'default': 'https://api.github.com', + }, + { + 'id': 'app_or_client_id', + 'label': _('GitHub App ID'), + 'type': 'string', + 'help_text': _( + 'The GitHub App ID created by the GitHub Admin. ' + 'Example App ID: 1121547 ' + 'found on https://github.com/settings/apps/ ' + 'required for creating a JWT token for authentication.', + ), + }, + { + 'id': 'install_id', + 'label': _('GitHub App Installation ID'), + 'type': 'string', + 'help_text': _( + 'The Installation ID from the GitHub App installation ' + 'generated by the GitHub Admin. ' + 'Example: 59980338 extracted from the installation link ' + 'https://github.com/settings/installations/59980338 ' + 'required for creating a limited GitHub app token.', + ), + }, + { + 'id': 'private_rsa_key', + 'label': _('RSA Private Key'), + 'type': 'string', + 'format': 'ssh_private_key', + 'secret': True, + 'multiline': True, + 'help_text': _( + 'Paste the contents of the PEM file that the GitHub Admin provided to you with the app and installation IDs.', + ), + }, + ], + 'metadata': [ + { + 'id': 'description', + 'label': _('Description (Optional)'), + 'type': 'string', + 'help_text': _('To be removed after UI is updated'), + }, + ], + 'required': ['app_or_client_id', 'install_id', 'private_rsa_key'], +} + +GH_CLIENT_ID_TRAILER_LENGTH = 16 +HEXADECIMAL_BASE = 16 + + +def _is_intish(app_id_candidate): + return isinstance(app_id_candidate, int) or app_id_candidate.isdigit() + + +def _is_client_id(client_id_candidate): + client_id_prefix = 'Iv1.' + if not client_id_candidate.startswith(client_id_prefix): + return False + + client_id_trailer = client_id_candidate[len(client_id_prefix) :] + + if len(client_id_trailer) != GH_CLIENT_ID_TRAILER_LENGTH: + return False + + try: + int(client_id_trailer, base=HEXADECIMAL_BASE) + except ValueError: + return False + + return True + + +def _is_app_or_client_id(app_or_client_id_candidate): + if _is_intish(app_or_client_id_candidate): + return True + return _is_client_id(app_or_client_id_candidate) + + +def _assert_ids_look_acceptable(app_or_client_id, install_id): + if not _is_app_or_client_id(app_or_client_id): + raise ValueError( + 'Expected GitHub App or Client ID to be an integer or a string ' + f'starting with `Iv1.` followed by 16 hexadecimal digits, ' + f'but got {app_or_client_id !r}', + ) + if isinstance(app_or_client_id, str) and _is_client_id(app_or_client_id): + raise ValueError( + 'Expected GitHub App ID must be an integer or a string ' + f'with an all-digit value, but got {app_or_client_id !r}. ' + 'Client IDs are currently unsupported.', + ) + if not _is_intish(install_id): + raise ValueError( + 'Expected GitHub App Installation ID to be an integer' f' but got {install_id !r}', + ) + + +def extract_github_app_install_token(github_api_url, app_or_client_id, private_rsa_key, install_id, **_discarded_kwargs): + """Generate a GH App Installation access token.""" + _assert_ids_look_acceptable(app_or_client_id, install_id) + + auth = Auth.AppAuth( + app_id=str(app_or_client_id), + private_key=private_rsa_key, + ).get_installation_auth(installation_id=int(install_id)) + + Github( + auth=auth, + base_url=github_api_url if github_api_url else PUBLIC_GH_API_URL, + ) + + doc_url = 'See https://docs.github.com/rest/reference/apps#create-an-installation-access-token-for-an-app' + app_install_context = f'app_or_client_id: {app_or_client_id}, install_id: {install_id}' + + try: + return auth.token + except UnknownObjectException as github_install_not_found_exc: + raise ValueError( + f'Failed to retrieve a GitHub installation token from {github_api_url} using {app_install_context}. Is the app installed? {doc_url}.' + f'\n\n{github_install_not_found_exc}', + ) from github_install_not_found_exc + except GithubException as pygithub_catchall_exc: + raise RuntimeError( + f'An unexpected error happened while talking to GitHub API @ {github_api_url} ({app_install_context}). ' + f'Is the app or client ID correct? And the private RSA key? {doc_url}.' + f'\n\n{pygithub_catchall_exc}', + ) from pygithub_catchall_exc + except BadAttributeException as github_broken_exc: + raise RuntimeError( + f'Broken GitHub @ {github_api_url} with {app_install_context}. It is a bug, please report it to the developers.\n\n{github_broken_exc}', + ) from github_broken_exc + + +github_app_lookup_plugin = CredentialPlugin( + 'GitHub App Installation Access Token Lookup', + inputs=github_app_inputs, + backend=extract_github_app_install_token, +) diff --git a/awx/main/management/commands/create_preload_data.py b/awx/main/management/commands/create_preload_data.py index eb2ca87e80..68ed944cad 100644 --- a/awx/main/management/commands/create_preload_data.py +++ b/awx/main/management/commands/create_preload_data.py @@ -4,6 +4,7 @@ from django.core.management.base import BaseCommand from django.db import transaction from crum import impersonate +from ansible_base.resource_registry.signals.handlers import no_reverse_sync from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate from awx.main.signals import disable_computed_fields @@ -16,8 +17,9 @@ class Command(BaseCommand): def handle(self, *args, **kwargs): # Wrap the operation in an atomic block, so we do not on accident # create the organization but not create the project, etc. - with transaction.atomic(): - self._handle() + with no_reverse_sync(): + with transaction.atomic(): + self._handle() def _handle(self): changed = False diff --git a/awx/main/management/commands/import_auth_config_to_gateway.py b/awx/main/management/commands/import_auth_config_to_gateway.py new file mode 100644 index 0000000000..f89ebf3496 --- /dev/null +++ b/awx/main/management/commands/import_auth_config_to_gateway.py @@ -0,0 +1,247 @@ +import sys +import os + +from django.core.management.base import BaseCommand +from urllib.parse import urlparse, urlunparse +from awx.sso.utils.azure_ad_migrator import AzureADMigrator +from awx.sso.utils.github_migrator import GitHubMigrator +from awx.sso.utils.ldap_migrator import LDAPMigrator +from awx.sso.utils.oidc_migrator import OIDCMigrator +from awx.sso.utils.saml_migrator import SAMLMigrator +from awx.sso.utils.radius_migrator import RADIUSMigrator +from awx.sso.utils.settings_migrator import SettingsMigrator +from awx.sso.utils.tacacs_migrator import TACACSMigrator +from awx.sso.utils.google_oauth2_migrator import GoogleOAuth2Migrator +from awx.main.utils.gateway_client import GatewayClient, GatewayAPIError +from awx.main.utils.gateway_client_svc_token import GatewayClientSVCToken +from ansible_base.resource_registry.tasks.sync import create_api_client + + +class Command(BaseCommand): + help = 'Import existing auth provider configurations to AAP Gateway via API requests' + + def add_arguments(self, parser): + parser.add_argument('--basic-auth', action='store_true', help='Use HTTP Basic Authentication between Controller and Gateway') + parser.add_argument( + '--skip-all-authenticators', + action='store_true', + help='Skip importing all authenticators [GitHub, OIDC, SAML, Azure AD, LDAP, RADIUS, TACACS+, Google OAuth2]', + ) + parser.add_argument('--skip-oidc', action='store_true', help='Skip importing generic OIDC authenticators') + parser.add_argument('--skip-github', action='store_true', help='Skip importing GitHub authenticator') + parser.add_argument('--skip-ldap', action='store_true', help='Skip importing LDAP authenticators') + parser.add_argument('--skip-ad', action='store_true', help='Skip importing Azure AD authenticator') + parser.add_argument('--skip-saml', action='store_true', help='Skip importing SAML authenticator') + parser.add_argument('--skip-radius', action='store_true', help='Skip importing RADIUS authenticator') + parser.add_argument('--skip-tacacs', action='store_true', help='Skip importing TACACS+ authenticator') + parser.add_argument('--skip-google', action='store_true', help='Skip importing Google OAuth2 authenticator') + parser.add_argument('--skip-settings', action='store_true', help='Skip importing settings') + parser.add_argument( + '--force', + action='store_true', + help='Force migration even if configurations already exist. Does not apply to skipped authenticators nor skipped settings.', + ) + + def handle(self, *args, **options): + # Read Gateway connection parameters from environment variables + gateway_base_url = os.getenv('GATEWAY_BASE_URL') + gateway_user = os.getenv('GATEWAY_USER') + gateway_password = os.getenv('GATEWAY_PASSWORD') + gateway_skip_verify = os.getenv('GATEWAY_SKIP_VERIFY', '').lower() in ('true', '1', 'yes', 'on') + + skip_all_authenticators = options['skip_all_authenticators'] + skip_oidc = options['skip_oidc'] + skip_github = options['skip_github'] + skip_ldap = options['skip_ldap'] + skip_ad = options['skip_ad'] + skip_saml = options['skip_saml'] + skip_radius = options['skip_radius'] + skip_tacacs = options['skip_tacacs'] + skip_google = options['skip_google'] + skip_settings = options['skip_settings'] + force = options['force'] + basic_auth = options['basic_auth'] + + management_command_validation_errors = [] + + # If the management command isn't called with all parameters needed to talk to Gateway, consider + # it a dry-run and exit cleanly + if not gateway_base_url and basic_auth: + management_command_validation_errors.append('- GATEWAY_BASE_URL: Base URL of the AAP Gateway instance') + if (not gateway_user or not gateway_password) and basic_auth: + management_command_validation_errors.append('- GATEWAY_USER: Username for AAP Gateway authentication') + management_command_validation_errors.append('- GATEWAY_PASSWORD: Password for AAP Gateway authentication') + + if len(management_command_validation_errors) > 0: + self.stdout.write(self.style.WARNING('Missing required environment variables:')) + for validation_error in management_command_validation_errors: + self.stdout.write(self.style.WARNING(f"{validation_error}")) + self.stdout.write(self.style.WARNING('- GATEWAY_SKIP_VERIFY: Skip SSL certificate verification (optional)')) + sys.exit(0) + + resource_api_client = None + response = None + + if basic_auth: + self.stdout.write(self.style.SUCCESS('HTTP Basic Auth: true')) + self.stdout.write(self.style.SUCCESS(f'Gateway Base URL: {gateway_base_url}')) + self.stdout.write(self.style.SUCCESS(f'Gateway User: {gateway_user}')) + self.stdout.write(self.style.SUCCESS('Gateway Password: *******************')) + self.stdout.write(self.style.SUCCESS(f'Skip SSL Verification: {gateway_skip_verify}')) + + else: + resource_api_client = create_api_client() + resource_api_client.verify_https = not gateway_skip_verify + response = resource_api_client.get_service_metadata() + parsed_url = urlparse(resource_api_client.base_url) + resource_api_client.base_url = urlunparse((parsed_url.scheme, parsed_url.netloc, '/', '', '', '')) + + self.stdout.write(self.style.SUCCESS('Gateway Service Token: true')) + self.stdout.write(self.style.SUCCESS(f'Gateway Base URL: {resource_api_client.base_url}')) + self.stdout.write(self.style.SUCCESS(f'Gateway JWT User: {resource_api_client.jwt_user_id}')) + self.stdout.write(self.style.SUCCESS(f'Gateway JWT Expiration: {resource_api_client.jwt_expiration}')) + self.stdout.write(self.style.SUCCESS(f'Skip SSL Verification: {not resource_api_client.verify_https}')) + self.stdout.write(self.style.SUCCESS(f'Connection Validated: {response.status_code == 200}')) + + if response.status_code != 200: + self.stdout.write( + self.style.ERROR( + f'Gateway Service Token is unable to connect to Gateway via the base URL {resource_api_client.base_url}. Recieved HTTP response code {response.status_code}' + ) + ) + sys.exit(1) + + # Create Gateway client and run migrations + try: + self.stdout.write(self.style.SUCCESS('\n=== Connecting to Gateway ===')) + pre_gateway_client = None + if basic_auth: + self.stdout.write(self.style.SUCCESS('\n=== With Basic HTTP Auth ===')) + pre_gateway_client = GatewayClient( + base_url=gateway_base_url, username=gateway_user, password=gateway_password, skip_verify=gateway_skip_verify, command=self + ) + + else: + self.stdout.write(self.style.SUCCESS('\n=== With Service Token ===')) + pre_gateway_client = GatewayClientSVCToken(resource_api_client=resource_api_client, command=self) + + with pre_gateway_client as gateway_client: + self.stdout.write(self.style.SUCCESS('Successfully connected to Gateway')) + + # Initialize migrators + migrators = [] + if not skip_all_authenticators: + if not skip_oidc: + migrators.append(OIDCMigrator(gateway_client, self, force=force)) + + if not skip_github: + migrators.append(GitHubMigrator(gateway_client, self, force=force)) + + if not skip_saml: + migrators.append(SAMLMigrator(gateway_client, self, force=force)) + + if not skip_ad: + migrators.append(AzureADMigrator(gateway_client, self, force=force)) + + if not skip_ldap: + migrators.append(LDAPMigrator(gateway_client, self, force=force)) + + if not skip_radius: + migrators.append(RADIUSMigrator(gateway_client, self, force=force)) + + if not skip_tacacs: + migrators.append(TACACSMigrator(gateway_client, self, force=force)) + + if not skip_google: + migrators.append(GoogleOAuth2Migrator(gateway_client, self, force=force)) + + if not migrators: + self.stdout.write(self.style.WARNING('No authentication configurations found to migrate.')) + + if not skip_settings: + migrators.append(SettingsMigrator(gateway_client, self, force=force)) + else: + self.stdout.write(self.style.WARNING('Settings migration will not execute.')) + + # Run migrations + total_results = { + 'created': 0, + 'updated': 0, + 'unchanged': 0, + 'failed': 0, + 'mappers_created': 0, + 'mappers_updated': 0, + 'mappers_failed': 0, + 'settings_created': 0, + 'settings_updated': 0, + 'settings_unchanged': 0, + 'settings_failed': 0, + } + + if not migrators: + self.stdout.write(self.style.WARNING('NO MIGRATIONS WILL EXECUTE.')) + # Exit with success code since this is not an error condition + sys.exit(0) + else: + for migrator in migrators: + self.stdout.write(self.style.SUCCESS(f'\n=== Migrating {migrator.get_authenticator_type()} Configurations ===')) + result = migrator.migrate() + self._print_export_summary(migrator.get_authenticator_type(), result) + + # Accumulate results - handle missing keys gracefully + for key in total_results: + total_results[key] += result.get(key, 0) + + # Overall summary + self.stdout.write(self.style.SUCCESS('\n=== Migration Summary ===')) + self.stdout.write(f'Total authenticators created: {total_results["created"]}') + self.stdout.write(f'Total authenticators updated: {total_results["updated"]}') + self.stdout.write(f'Total authenticators unchanged: {total_results["unchanged"]}') + self.stdout.write(f'Total authenticators failed: {total_results["failed"]}') + self.stdout.write(f'Total mappers created: {total_results["mappers_created"]}') + self.stdout.write(f'Total mappers updated: {total_results["mappers_updated"]}') + self.stdout.write(f'Total mappers failed: {total_results["mappers_failed"]}') + self.stdout.write(f'Total settings created: {total_results["settings_created"]}') + self.stdout.write(f'Total settings updated: {total_results["settings_updated"]}') + self.stdout.write(f'Total settings unchanged: {total_results["settings_unchanged"]}') + self.stdout.write(f'Total settings failed: {total_results["settings_failed"]}') + + # Check for any failures and return appropriate status code + has_failures = total_results["failed"] > 0 or total_results["mappers_failed"] > 0 or total_results["settings_failed"] > 0 + + if has_failures: + self.stdout.write(self.style.ERROR('\nMigration completed with failures.')) + sys.exit(1) + else: + self.stdout.write(self.style.SUCCESS('\nMigration completed successfully.')) + sys.exit(0) + + except GatewayAPIError as e: + self.stdout.write(self.style.ERROR(f'Gateway API Error: {e.message}')) + if e.status_code: + self.stdout.write(self.style.ERROR(f'Status Code: {e.status_code}')) + if e.response_data: + self.stdout.write(self.style.ERROR(f'Response: {e.response_data}')) + sys.exit(1) + except Exception as e: + self.stdout.write(self.style.ERROR(f'Unexpected error during migration: {str(e)}')) + sys.exit(1) + + def _print_export_summary(self, config_type, result): + """Print a summary of the export results.""" + self.stdout.write(f'\n--- {config_type} Export Summary ---') + + if config_type in ['GitHub', 'OIDC', 'SAML', 'Azure AD', 'LDAP', 'RADIUS', 'TACACS+', 'Google OAuth2']: + self.stdout.write(f'Authenticators created: {result.get("created", 0)}') + self.stdout.write(f'Authenticators updated: {result.get("updated", 0)}') + self.stdout.write(f'Authenticators unchanged: {result.get("unchanged", 0)}') + self.stdout.write(f'Authenticators failed: {result.get("failed", 0)}') + self.stdout.write(f'Mappers created: {result.get("mappers_created", 0)}') + self.stdout.write(f'Mappers updated: {result.get("mappers_updated", 0)}') + self.stdout.write(f'Mappers failed: {result.get("mappers_failed", 0)}') + + if config_type == 'Settings': + self.stdout.write(f'Settings created: {result.get("settings_created", 0)}') + self.stdout.write(f'Settings updated: {result.get("settings_updated", 0)}') + self.stdout.write(f'Settings unchanged: {result.get("settings_unchanged", 0)}') + self.stdout.write(f'Settings failed: {result.get("settings_failed", 0)}') diff --git a/awx/main/migrations/0192_custom_roles.py b/awx/main/migrations/0192_custom_roles.py index c91823aa34..ba75694c7f 100644 --- a/awx/main/migrations/0192_custom_roles.py +++ b/awx/main/migrations/0192_custom_roles.py @@ -8,7 +8,7 @@ from awx.main.migrations._dab_rbac import migrate_to_new_rbac, create_permission class Migration(migrations.Migration): dependencies = [ ('main', '0191_add_django_permissions'), - ('dab_rbac', '__first__'), + ('dab_rbac', '0003_alter_dabpermission_codename_and_more'), ] operations = [ diff --git a/awx/main/migrations/0200_template_name_constraint.py b/awx/main/migrations/0200_template_name_constraint.py index 4d9a824ac3..5627a91728 100644 --- a/awx/main/migrations/0200_template_name_constraint.py +++ b/awx/main/migrations/0200_template_name_constraint.py @@ -26,6 +26,11 @@ def change_inventory_source_org_unique(apps, schema_editor): logger.info(f'Set database constraint rule for {r} inventory source objects') +def rename_wfjt(apps, schema_editor): + cls = apps.get_model('main', 'WorkflowJobTemplate') + _rename_duplicates(cls) + + class Migration(migrations.Migration): dependencies = [ @@ -40,6 +45,7 @@ class Migration(migrations.Migration): name='org_unique', field=models.BooleanField(blank=True, default=True, editable=False, help_text='Used internally to selectively enforce database constraint on name'), ), + migrations.RunPython(rename_wfjt, migrations.RunPython.noop), migrations.RunPython(change_inventory_source_org_unique, migrations.RunPython.noop), migrations.AddConstraint( model_name='unifiedjobtemplate', diff --git a/awx/main/migrations/0201_create_managed_creds.py b/awx/main/migrations/0201_create_managed_creds.py index c5beecdbe2..310eabfc4c 100644 --- a/awx/main/migrations/0201_create_managed_creds.py +++ b/awx/main/migrations/0201_create_managed_creds.py @@ -1,9 +1,20 @@ from django.db import migrations +# AWX +from awx.main.models import CredentialType +from awx.main.utils.common import set_current_apps + + +def setup_tower_managed_defaults(apps, schema_editor): + set_current_apps(apps) + CredentialType.setup_tower_managed_defaults(apps) + class Migration(migrations.Migration): dependencies = [ ('main', '0200_template_name_constraint'), ] - operations = [] + operations = [ + migrations.RunPython(setup_tower_managed_defaults), + ] diff --git a/awx/main/migrations/0202_convert_controller_role_definitions.py b/awx/main/migrations/0202_convert_controller_role_definitions.py new file mode 100644 index 0000000000..9a0c0b40fb --- /dev/null +++ b/awx/main/migrations/0202_convert_controller_role_definitions.py @@ -0,0 +1,102 @@ +# Generated by Django migration for converting Controller role definitions + +from ansible_base.rbac.migrations._utils import give_permissions +from django.db import migrations + + +def convert_controller_role_definitions(apps, schema_editor): + """ + Convert Controller role definitions to regular role definitions: + - Controller Organization Admin -> Organization Admin + - Controller Organization Member -> Organization Member + - Controller Team Admin -> Team Admin + - Controller Team Member -> Team Member + - Controller System Auditor -> Platform Auditor + + Then delete the old Controller role definitions. + """ + RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition') + RoleUserAssignment = apps.get_model('dab_rbac', 'RoleUserAssignment') + RoleTeamAssignment = apps.get_model('dab_rbac', 'RoleTeamAssignment') + Permission = apps.get_model('dab_rbac', 'DABPermission') + + # Mapping of old Controller role names to new role names + role_mappings = { + 'Controller Organization Admin': 'Organization Admin', + 'Controller Organization Member': 'Organization Member', + 'Controller Team Admin': 'Team Admin', + 'Controller Team Member': 'Team Member', + } + + for old_name, new_name in role_mappings.items(): + # Find the old Controller role definition + old_role = RoleDefinition.objects.filter(name=old_name).first() + if not old_role: + continue # Skip if the old role doesn't exist + + # Find the new role definition + new_role = RoleDefinition.objects.get(name=new_name) + + # Collect all the assignments that need to be migrated + # Group by object (content_type + object_id) to batch the give_permissions calls + assignments_by_object = {} + + # Get user assignments + user_assignments = RoleUserAssignment.objects.filter(role_definition=old_role).select_related('object_role') + for assignment in user_assignments: + key = (assignment.object_role.content_type_id, assignment.object_role.object_id) + if key not in assignments_by_object: + assignments_by_object[key] = {'users': [], 'teams': []} + assignments_by_object[key]['users'].append(assignment.user) + + # Get team assignments + team_assignments = RoleTeamAssignment.objects.filter(role_definition=old_role).select_related('object_role') + for assignment in team_assignments: + key = (assignment.object_role.content_type_id, assignment.object_role.object_id) + if key not in assignments_by_object: + assignments_by_object[key] = {'users': [], 'teams': []} + assignments_by_object[key]['teams'].append(assignment.team.id) + + # Use give_permissions to create new assignments with the new role definition + for (content_type_id, object_id), data in assignments_by_object.items(): + if data['users'] or data['teams']: + give_permissions( + apps, + new_role, + users=data['users'], + teams=data['teams'], + object_id=object_id, + content_type_id=content_type_id, + ) + + # Delete the old role definition (this will cascade to delete old assignments and ObjectRoles) + old_role.delete() + + # Create or get Platform Auditor + auditor_rd, created = RoleDefinition.objects.get_or_create( + name='Platform Auditor', + defaults={'description': 'Migrated singleton role giving read permission to everything', 'managed': True}, + ) + if created: + auditor_rd.permissions.add(*list(Permission.objects.filter(codename__startswith='view'))) + + old_rd = RoleDefinition.objects.filter(name='Controller System Auditor').first() + if old_rd: + for assignment in RoleUserAssignment.objects.filter(role_definition=old_rd): + RoleUserAssignment.objects.create( + user=assignment.user, + role_definition=auditor_rd, + ) + + # Delete the Controller System Auditor role + RoleDefinition.objects.filter(name='Controller System Auditor').delete() + + +class Migration(migrations.Migration): + dependencies = [ + ('main', '0201_create_managed_creds'), + ] + + operations = [ + migrations.RunPython(convert_controller_role_definitions), + ] diff --git a/awx/main/migrations/0203_remove_team_of_teams.py b/awx/main/migrations/0203_remove_team_of_teams.py new file mode 100644 index 0000000000..905f5a40c4 --- /dev/null +++ b/awx/main/migrations/0203_remove_team_of_teams.py @@ -0,0 +1,22 @@ +import logging + +from django.db import migrations + +from awx.main.migrations._dab_rbac import consolidate_indirect_user_roles + +logger = logging.getLogger('awx.main.migrations') + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0202_convert_controller_role_definitions'), + ] + # The DAB RBAC app makes substantial model changes which by change-ordering comes after this + # not including run_before might sometimes work but this enforces a more strict and stable order + # for both applying migrations forwards and backwards + run_before = [("dab_rbac", "0004_remote_permissions_additions")] + + operations = [ + migrations.RunPython(consolidate_indirect_user_roles, migrations.RunPython.noop), + ] diff --git a/awx/main/migrations/_OrgAdmin_to_use_ig.py b/awx/main/migrations/_OrgAdmin_to_use_ig.py index b0c41cdd4a..5af468b9ef 100644 --- a/awx/main/migrations/_OrgAdmin_to_use_ig.py +++ b/awx/main/migrations/_OrgAdmin_to_use_ig.py @@ -1,5 +1,6 @@ import logging + logger = logging.getLogger('awx.main.migrations') diff --git a/awx/main/migrations/_dab_rbac.py b/awx/main/migrations/_dab_rbac.py index 183da9fe13..4286be039b 100644 --- a/awx/main/migrations/_dab_rbac.py +++ b/awx/main/migrations/_dab_rbac.py @@ -1,5 +1,6 @@ import json import logging +from collections import defaultdict from django.apps import apps as global_apps from django.db.models import ForeignKey @@ -17,6 +18,7 @@ logger = logging.getLogger('awx.main.migrations._dab_rbac') def create_permissions_as_operation(apps, schema_editor): + logger.info('Running data migration create_permissions_as_operation') # NOTE: the DAB ContentType changes adjusted how they fire # before they would fire on every app config, like contenttypes create_dab_permissions(global_apps.get_app_config("main"), apps=apps) @@ -166,11 +168,15 @@ def migrate_to_new_rbac(apps, schema_editor): This method moves the assigned permissions from the old rbac.py models to the new RoleDefinition and ObjectRole models """ + logger.info('Running data migration migrate_to_new_rbac') Role = apps.get_model('main', 'Role') RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition') RoleUserAssignment = apps.get_model('dab_rbac', 'RoleUserAssignment') Permission = apps.get_model('dab_rbac', 'DABPermission') + if Permission.objects.count() == 0: + raise RuntimeError('Running migrate_to_new_rbac requires DABPermission objects created first') + # remove add premissions that are not valid for migrations from old versions for perm_str in ('add_organization', 'add_jobtemplate'): perm = Permission.objects.filter(codename=perm_str).first() @@ -250,11 +256,14 @@ def migrate_to_new_rbac(apps, schema_editor): # Create new replacement system auditor role new_system_auditor, created = RoleDefinition.objects.get_or_create( - name='Controller System Auditor', + name='Platform Auditor', defaults={'description': 'Migrated singleton role giving read permission to everything', 'managed': True}, ) new_system_auditor.permissions.add(*list(Permission.objects.filter(codename__startswith='view'))) + if created: + logger.info(f'Created RoleDefinition {new_system_auditor.name} pk={new_system_auditor.pk} with {new_system_auditor.permissions.count()} permissions') + # migrate is_system_auditor flag, because it is no longer handled by a system role old_system_auditor = Role.objects.filter(singleton_name='system_auditor').first() if old_system_auditor: @@ -283,8 +292,9 @@ def get_or_create_managed(name, description, ct, permissions, RoleDefinition): def setup_managed_role_definitions(apps, schema_editor): """ - Idepotent method to create or sync the managed role definitions + Idempotent method to create or sync the managed role definitions """ + logger.info('Running data migration setup_managed_role_definitions') to_create = { 'object_admin': '{cls.__name__} Admin', 'org_admin': 'Organization Admin', @@ -448,3 +458,115 @@ def setup_managed_role_definitions(apps, schema_editor): for role_definition in unexpected_role_definitions: logger.info(f'Deleting old managed role definition {role_definition.name}, pk={role_definition.pk}') role_definition.delete() + + +def get_team_to_team_relationships(apps, team_member_role): + """ + Find all team-to-team relationships where one team is a member of another. + Returns a dict mapping parent_team_id -> [child_team_id, ...] + """ + team_to_team_relationships = defaultdict(list) + + # Find all team assignments with the Team Member role + RoleTeamAssignment = apps.get_model('dab_rbac', 'RoleTeamAssignment') + team_assignments = RoleTeamAssignment.objects.filter(role_definition=team_member_role).select_related('team') + + for assignment in team_assignments: + parent_team_id = int(assignment.object_id) + child_team_id = assignment.team.id + team_to_team_relationships[parent_team_id].append(child_team_id) + + return team_to_team_relationships + + +def get_all_user_members_of_team(apps, team_member_role, team_id, team_to_team_map, visited=None): + """ + Recursively find all users who are members of a team, including through nested teams. + """ + if visited is None: + visited = set() + + if team_id in visited: + return set() # Avoid infinite recursion + + visited.add(team_id) + all_users = set() + + # Get direct user assignments to this team + RoleUserAssignment = apps.get_model('dab_rbac', 'RoleUserAssignment') + user_assignments = RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_id).select_related('user') + + for assignment in user_assignments: + all_users.add(assignment.user) + + # Get team-to-team assignments and recursively find their users + child_team_ids = team_to_team_map.get(team_id, []) + for child_team_id in child_team_ids: + nested_users = get_all_user_members_of_team(apps, team_member_role, child_team_id, team_to_team_map, visited.copy()) + all_users.update(nested_users) + + return all_users + + +def remove_team_to_team_assignment(apps, team_member_role, parent_team_id, child_team_id): + """ + Remove team-to-team memberships. + """ + Team = apps.get_model('main', 'Team') + RoleTeamAssignment = apps.get_model('dab_rbac', 'RoleTeamAssignment') + + parent_team = Team.objects.get(id=parent_team_id) + child_team = Team.objects.get(id=child_team_id) + + # Remove all team-to-team RoleTeamAssignments + RoleTeamAssignment.objects.filter(role_definition=team_member_role, object_id=parent_team_id, team=child_team).delete() + + # Check mirroring Team model for children under member_role + parent_team.member_role.children.filter(object_id=child_team_id).delete() + + +def consolidate_indirect_user_roles(apps, schema_editor): + """ + A user should have a member role for every team they were indirectly + a member of. ex. Team A is a member of Team B. All users in Team A + previously were only members of Team A. They should now be members of + Team A and Team B. + """ + + # get models for membership on teams + RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition') + Team = apps.get_model('main', 'Team') + + team_member_role = RoleDefinition.objects.get(name='Team Member') + + team_to_team_map = get_team_to_team_relationships(apps, team_member_role) + + if not team_to_team_map: + return # No team-to-team relationships to consolidate + + # Get content type for Team - needed for give_permissions + try: + from django.contrib.contenttypes.models import ContentType + + team_content_type = ContentType.objects.get_for_model(Team) + except ImportError: + # Fallback if ContentType is not available + ContentType = apps.get_model('contenttypes', 'ContentType') + team_content_type = ContentType.objects.get_for_model(Team) + + # Get all users who should be direct members of a team + for parent_team_id, child_team_ids in team_to_team_map.items(): + all_users = get_all_user_members_of_team(apps, team_member_role, parent_team_id, team_to_team_map) + + # Create direct RoleUserAssignments for all users + if all_users: + give_permissions(apps=apps, rd=team_member_role, users=list(all_users), object_id=parent_team_id, content_type_id=team_content_type.id) + + # Mirror assignments to Team model + parent_team = Team.objects.get(id=parent_team_id) + for user in all_users: + parent_team.member_role.members.add(user.id) + + # Remove all team-to-team assignments for parent team + for child_team_id in child_team_ids: + remove_team_to_team_assignment(apps, team_member_role, parent_team_id, child_team_id) diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py index e2c2f11ff2..95aa1a0396 100644 --- a/awx/main/models/__init__.py +++ b/awx/main/models/__init__.py @@ -200,7 +200,7 @@ User.add_to_class('created', created) def get_system_auditor_role(): rd, created = RoleDefinition.objects.get_or_create( - name='Controller System Auditor', defaults={'description': 'Migrated singleton role giving read permission to everything'} + name='Platform Auditor', defaults={'description': 'Migrated singleton role giving read permission to everything'} ) if created: rd.permissions.add(*list(permission_registry.permission_qs.filter(codename__startswith='view'))) diff --git a/awx/main/models/credential/__init__.py b/awx/main/models/credential/__init__.py new file mode 100644 index 0000000000..c282fed86f --- /dev/null +++ b/awx/main/models/credential/__init__.py @@ -0,0 +1,1421 @@ +# Copyright (c) 2015 Ansible, Inc. +# All Rights Reserved. +import functools +import inspect +import logging +import os +from pkg_resources import iter_entry_points +import re +import stat +import tempfile +from types import SimpleNamespace + +# Jinja2 +from jinja2 import sandbox + +# Django +from django.db import models +from django.utils.translation import gettext_lazy as _, gettext_noop +from django.core.exceptions import ValidationError +from django.conf import settings +from django.utils.encoding import force_str +from django.utils.functional import cached_property +from django.utils.timezone import now +from django.contrib.auth.models import User + +# DRF +from rest_framework.serializers import ValidationError as DRFValidationError + +# AWX +from awx.api.versioning import reverse +from awx.main.fields import ( + ImplicitRoleField, + CredentialInputField, + CredentialTypeInputField, + CredentialTypeInjectorField, + DynamicCredentialInputField, +) +from awx.main.utils import decrypt_field, classproperty, set_environ +from awx.main.utils.safe_yaml import safe_dump +from awx.main.utils.execution_environments import to_container_path +from awx.main.validators import validate_ssh_private_key +from awx.main.models.base import CommonModelNameNotUnique, PasswordFieldsModel, PrimordialModel +from awx.main.models.mixins import ResourceMixin +from awx.main.models.rbac import ( + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, + ROLE_SINGLETON_SYSTEM_AUDITOR, +) +from awx.main.models import Team, Organization +from awx.main.utils import encrypt_field +from . import injectors as builtin_injectors + +# DAB +from ansible_base.resource_registry.tasks.sync import get_resource_server_client +from ansible_base.resource_registry.utils.settings import resource_server_defined + + +__all__ = ['Credential', 'CredentialType', 'CredentialInputSource', 'build_safe_env'] + +logger = logging.getLogger('awx.main.models.credential') +credential_plugins = dict((ep.name, ep.load()) for ep in iter_entry_points('awx.credential_plugins')) + +HIDDEN_PASSWORD = '**********' + + +def build_safe_env(env): + """ + Build environment dictionary, hiding potentially sensitive information + such as passwords or keys. + """ + hidden_re = re.compile(r'API|TOKEN|KEY|SECRET|PASS', re.I) + urlpass_re = re.compile(r'^.*?://[^:]+:(.*?)@.*?$') + safe_env = dict(env) + for k, v in safe_env.items(): + if k == 'AWS_ACCESS_KEY_ID': + continue + elif k.startswith('ANSIBLE_') and not k.startswith('ANSIBLE_NET') and not k.startswith('ANSIBLE_GALAXY_SERVER'): + continue + elif hidden_re.search(k): + safe_env[k] = HIDDEN_PASSWORD + elif type(v) == str and urlpass_re.match(v): + safe_env[k] = urlpass_re.sub(HIDDEN_PASSWORD, v) + return safe_env + + +def check_resource_server_for_user_in_organization(user, organization, requesting_user): + if not resource_server_defined(): + return False + + if not requesting_user: + return False + + client = get_resource_server_client(settings.RESOURCE_SERVICE_PATH, jwt_user_id=str(requesting_user.resource.ansible_id), raise_if_bad_request=False) + # need to get the organization object_id in resource server, by querying with ansible_id + response = client._make_request(path=f'resources/?ansible_id={str(organization.resource.ansible_id)}', method='GET') + response_json = response.json() + if response.status_code != 200: + logger.error(f'Failed to get organization object_id in resource server: {response_json.get("detail", "")}') + return False + + if response_json.get('count', 0) == 0: + return False + org_id_in_resource_server = response_json['results'][0]['object_id'] + + client.base_url = client.base_url.replace('/api/gateway/v1/service-index/', '/api/gateway/v1/') + # find role assignments with: + # - roles Organization Member or Organization Admin + # - user ansible id + # - organization object id + + response = client._make_request( + path=f'role_user_assignments/?role_definition__name__in=Organization Member,Organization Admin&user__resource__ansible_id={str(user.resource.ansible_id)}&object_id={org_id_in_resource_server}', + method='GET', + ) + response_json = response.json() + if response.status_code != 200: + logger.error(f'Failed to get role user assignments in resource server: {response_json.get("detail", "")}') + return False + + if response_json.get('count', 0) > 0: + return True + + return False + + +class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin): + """ + A credential contains information about how to talk to a remote resource + Usually this is a SSH key location, and possibly an unlock password. + If used with sudo, a sudo password should be set if required. + """ + + class Meta: + app_label = 'main' + ordering = ('name',) + unique_together = ('organization', 'name', 'credential_type') + permissions = [('use_credential', 'Can use credential in a job or related resource')] + + PASSWORD_FIELDS = ['inputs'] + FIELDS_TO_PRESERVE_AT_COPY = ['input_sources'] + + credential_type = models.ForeignKey( + 'CredentialType', + related_name='credentials', + null=False, + on_delete=models.CASCADE, + help_text=_('Specify the type of credential you want to create. Refer to the documentation for details on each type.'), + ) + managed = models.BooleanField(default=False, editable=False) + organization = models.ForeignKey( + 'Organization', + null=True, + default=None, + blank=True, + on_delete=models.CASCADE, + related_name='credentials', + ) + inputs = CredentialInputField( + blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. Refer to the documentation for example syntax.') + ) + admin_role = ImplicitRoleField( + parent_role=[ + 'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, + 'organization.credential_admin_role', + ], + ) + use_role = ImplicitRoleField( + parent_role=[ + 'admin_role', + ] + ) + read_role = ImplicitRoleField( + parent_role=[ + 'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR, + 'organization.auditor_role', + 'use_role', + 'admin_role', + ] + ) + + @property + def kind(self): + return self.credential_type.namespace + + @property + def cloud(self): + return self.credential_type.kind == 'cloud' + + @property + def kubernetes(self): + return self.credential_type.kind == 'kubernetes' + + def get_absolute_url(self, request=None): + return reverse('api:credential_detail', kwargs={'pk': self.pk}, request=request) + + # + # TODO: the SSH-related properties below are largely used for validation + # and for determining passwords necessary for job/ad-hoc launch + # + # These are SSH-specific; should we move them elsewhere? + # + @property + def needs_ssh_password(self): + return self.credential_type.kind == 'ssh' and self.inputs.get('password') == 'ASK' + + @property + def has_encrypted_ssh_key_data(self): + try: + ssh_key_data = self.get_input('ssh_key_data') + except AttributeError: + return False + + try: + pem_objects = validate_ssh_private_key(ssh_key_data) + for pem_object in pem_objects: + if pem_object.get('key_enc', False): + return True + except ValidationError: + pass + return False + + @property + def needs_ssh_key_unlock(self): + if self.credential_type.kind == 'ssh' and self.inputs.get('ssh_key_unlock') in ('ASK', ''): + return self.has_encrypted_ssh_key_data + return False + + @property + def needs_become_password(self): + return self.credential_type.kind == 'ssh' and self.inputs.get('become_password') == 'ASK' + + @property + def needs_vault_password(self): + return self.credential_type.kind == 'vault' and self.inputs.get('vault_password') == 'ASK' + + @property + def passwords_needed(self): + needed = [] + for field in ('ssh_password', 'become_password', 'ssh_key_unlock'): + if getattr(self, 'needs_%s' % field): + needed.append(field) + if self.needs_vault_password: + if self.inputs.get('vault_id'): + needed.append('vault_password.{}'.format(self.inputs.get('vault_id'))) + else: + needed.append('vault_password') + return needed + + @cached_property + def dynamic_input_fields(self): + # if the credential is not yet saved we can't access the input_sources + if not self.id: + return [] + return [obj.input_field_name for obj in self.input_sources.all()] + + def _password_field_allows_ask(self, field): + return field in self.credential_type.askable_fields + + def save(self, *args, **kwargs): + self.PASSWORD_FIELDS = self.credential_type.secret_fields + + if self.pk: + cred_before = Credential.objects.get(pk=self.pk) + inputs_before = cred_before.inputs + # Look up the currently persisted value so that we can replace + # $encrypted$ with the actual DB-backed value + for field in self.PASSWORD_FIELDS: + if self.inputs.get(field) == '$encrypted$': + self.inputs[field] = inputs_before[field] + + super(Credential, self).save(*args, **kwargs) + + def mark_field_for_save(self, update_fields, field): + if 'inputs' not in update_fields: + update_fields.append('inputs') + + def encrypt_field(self, field, ask): + if field not in self.inputs: + return None + encrypted = encrypt_field(self, field, ask=ask) + if encrypted: + self.inputs[field] = encrypted + elif field in self.inputs: + del self.inputs[field] + + def display_inputs(self): + field_val = self.inputs.copy() + for k, v in field_val.items(): + if force_str(v).startswith('$encrypted$'): + field_val[k] = '$encrypted$' + return field_val + + def unique_hash(self, display=False): + """ + Credential exclusivity is not defined solely by the related + credential type (due to vault), so this produces a hash + that can be used to evaluate exclusivity + """ + if display: + type_alias = self.credential_type.name + else: + type_alias = self.credential_type_id + if self.credential_type.kind == 'vault' and self.has_input('vault_id'): + if display: + fmt_str = '{} (id={})' + else: + fmt_str = '{}_{}' + return fmt_str.format(type_alias, self.get_input('vault_id')) + return str(type_alias) + + @staticmethod + def unique_dict(cred_qs): + ret = {} + for cred in cred_qs: + ret[cred.unique_hash()] = cred + return ret + + def get_input(self, field_name, **kwargs): + """ + Get an injectable and decrypted value for an input field. + + Retrieves the value for a given credential input field name. Return + values for secret input fields are decrypted. If the credential doesn't + have an input value defined for the given field name, an AttributeError + is raised unless a default value is provided. + + :param field_name(str): The name of the input field. + :param default(optional[str]): A default return value to use. + """ + if self.credential_type.kind != 'external' and field_name in self.dynamic_input_fields: + return self._get_dynamic_input(field_name) + if field_name in self.credential_type.secret_fields: + try: + return decrypt_field(self, field_name) + except AttributeError: + for field in self.credential_type.inputs.get('fields', []): + if field['id'] == field_name and 'default' in field: + return field['default'] + if 'default' in kwargs: + return kwargs['default'] + raise AttributeError(field_name) + if field_name in self.inputs: + return self.inputs[field_name] + if 'default' in kwargs: + return kwargs['default'] + for field in self.credential_type.inputs.get('fields', []): + if field['id'] == field_name and 'default' in field: + return field['default'] + raise AttributeError(field_name) + + def has_input(self, field_name): + if field_name in self.dynamic_input_fields: + return True + return field_name in self.inputs and self.inputs[field_name] not in ('', None) + + def has_inputs(self, field_names=()): + for name in field_names: + if not self.has_input(name): + raise ValueError('{} is not an input field'.format(name)) + return True + + def _get_dynamic_input(self, field_name): + for input_source in self.input_sources.all(): + if input_source.input_field_name == field_name: + return input_source.get_input_value() + else: + raise ValueError('{} is not a dynamic input field'.format(field_name)) + + def validate_role_assignment(self, actor, role_definition, **kwargs): + if self.organization: + if isinstance(actor, User): + if actor.is_superuser: + return + if Organization.access_qs(actor, 'member').filter(id=self.organization.id).exists(): + return + + requesting_user = kwargs.get('requesting_user', None) + if check_resource_server_for_user_in_organization(actor, self.organization, requesting_user): + return + if isinstance(actor, Team): + if actor.organization == self.organization: + return + raise DRFValidationError({'detail': _(f"You cannot grant credential access to a {actor._meta.object_name} not in the credentials' organization")}) + + +class CredentialType(CommonModelNameNotUnique): + """ + A reusable schema for a credential. + + Used to define a named credential type with fields (e.g., an API key) and + output injectors (i.e., an environment variable that uses the API key). + """ + + class Meta: + app_label = 'main' + ordering = ('kind', 'name') + unique_together = (('name', 'kind'),) + + KIND_CHOICES = ( + ('ssh', _('Machine')), + ('vault', _('Vault')), + ('net', _('Network')), + ('scm', _('Source Control')), + ('cloud', _('Cloud')), + ('registry', _('Container Registry')), + ('token', _('Personal Access Token')), + ('insights', _('Insights')), + ('external', _('External')), + ('kubernetes', _('Kubernetes')), + ('galaxy', _('Galaxy/Automation Hub')), + ('cryptography', _('Cryptography')), + ) + + kind = models.CharField(max_length=32, choices=KIND_CHOICES) + managed = models.BooleanField(default=False, editable=False) + namespace = models.CharField(max_length=1024, null=True, default=None, editable=False) + inputs = CredentialTypeInputField( + blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. Refer to the documentation for example syntax.') + ) + injectors = CredentialTypeInjectorField( + blank=True, + default=dict, + help_text=_('Enter injectors using either JSON or YAML syntax. Refer to the documentation for example syntax.'), + ) + + @classmethod + def from_db(cls, db, field_names, values): + instance = super(CredentialType, cls).from_db(db, field_names, values) + if instance.managed and instance.namespace: + native = ManagedCredentialType.registry[instance.namespace] + instance.inputs = native.inputs + instance.injectors = native.injectors + return instance + + def get_absolute_url(self, request=None): + return reverse('api:credential_type_detail', kwargs={'pk': self.pk}, request=request) + + @property + def defined_fields(self): + return [field.get('id') for field in self.inputs.get('fields', [])] + + @property + def secret_fields(self): + return [field['id'] for field in self.inputs.get('fields', []) if field.get('secret', False) is True] + + @property + def askable_fields(self): + return [field['id'] for field in self.inputs.get('fields', []) if field.get('ask_at_runtime', False) is True] + + @property + def plugin(self): + if self.kind != 'external': + raise AttributeError('plugin') + [plugin] = [plugin for ns, plugin in credential_plugins.items() if ns == self.namespace] + return plugin + + def default_for_field(self, field_id): + for field in self.inputs.get('fields', []): + if field['id'] == field_id: + if 'choices' in field: + return field['choices'][0] + return {'string': '', 'boolean': False}[field['type']] + + @classproperty + def defaults(cls): + return dict((k, functools.partial(v.create)) for k, v in ManagedCredentialType.registry.items()) + + @classmethod + def setup_tower_managed_defaults(cls, apps=None): + if apps is not None: + ct_class = apps.get_model('main', 'CredentialType') + else: + ct_class = CredentialType + for default in ManagedCredentialType.registry.values(): + existing = ct_class.objects.filter(name=default.name, kind=default.kind).first() + if existing is not None: + existing.namespace = default.namespace + existing.inputs = {} + existing.injectors = {} + existing.save() + continue + logger.debug(_("adding %s credential type" % default.name)) + params = default.get_creation_params() + if 'managed' not in [f.name for f in ct_class._meta.get_fields()]: + params['managed_by_tower'] = params.pop('managed') + params['created'] = params['modified'] = now() # CreatedModifiedModel service + created = ct_class(**params) + created.inputs = created.injectors = {} + created.save() + + @classmethod + def load_plugin(cls, ns, plugin): + ManagedCredentialType(namespace=ns, name=plugin.name, kind='external', inputs=plugin.inputs) + + def inject_credential(self, credential, env, safe_env, args, private_data_dir, container_root=None): + """ + Inject credential data into the environment variables and arguments + passed to `ansible-playbook` + + :param credential: a :class:`awx.main.models.Credential` instance + :param env: a dictionary of environment variables used in + the `ansible-playbook` call. This method adds + additional environment variables based on + custom `env` injectors defined on this + CredentialType. + :param safe_env: a dictionary of environment variables stored + in the database for the job run + (`UnifiedJob.job_env`); secret values should + be stripped + :param args: a list of arguments passed to + `ansible-playbook` in the style of + `subprocess.call(args)`. This method appends + additional arguments based on custom + `extra_vars` injectors defined on this + CredentialType. + :param private_data_dir: a temporary directory to store files generated + by `file` injectors (like config files or key + files) + + :param container_root: root directory inside of container to mount the + private data directory to + """ + if not self.injectors: + if self.managed and credential.credential_type.namespace in dir(builtin_injectors): + injected_env = {} + getattr(builtin_injectors, credential.credential_type.namespace)(credential, injected_env, private_data_dir) + env.update(injected_env) + safe_env.update(build_safe_env(injected_env)) + return + + class TowerNamespace: + pass + + tower_namespace = TowerNamespace() + + # maintain a normal namespace for building the ansible-playbook arguments (env and args) + namespace = {'tower': tower_namespace} + + # maintain a sanitized namespace for building the DB-stored arguments (safe_env) + safe_namespace = {'tower': tower_namespace} + + # build a normal namespace with secret values decrypted (for + # ansible-playbook) and a safe namespace with secret values hidden (for + # DB storage) + injectable_fields = list(credential.inputs.keys()) + credential.dynamic_input_fields + for field_name in list(set(injectable_fields)): + value = credential.get_input(field_name) + + if type(value) is bool: + # boolean values can't be secret/encrypted/external + safe_namespace[field_name] = namespace[field_name] = value + continue + + if field_name in self.secret_fields: + safe_namespace[field_name] = '**********' + elif len(value): + safe_namespace[field_name] = value + if len(value): + namespace[field_name] = value + + for field in self.inputs.get('fields', []): + # default missing boolean fields to False + if field['type'] == 'boolean' and field['id'] not in credential.inputs.keys(): + namespace[field['id']] = safe_namespace[field['id']] = False + # make sure private keys end with a \n + if field.get('format') == 'ssh_private_key': + if field['id'] in namespace and not namespace[field['id']].endswith('\n'): + namespace[field['id']] += '\n' + + file_tmpls = self.injectors.get('file', {}) + # If any file templates are provided, render the files and update the + # special `tower` template namespace so the filename can be + # referenced in other injectors + + sandbox_env = sandbox.ImmutableSandboxedEnvironment() + + for file_label, file_tmpl in file_tmpls.items(): + data = sandbox_env.from_string(file_tmpl).render(**namespace) + _, path = tempfile.mkstemp(dir=os.path.join(private_data_dir, 'env')) + with open(path, 'w') as f: + f.write(data) + os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) + container_path = to_container_path(path, private_data_dir) + + # determine if filename indicates single file or many + if file_label.find('.') == -1: + tower_namespace.filename = container_path + else: + if not hasattr(tower_namespace, 'filename'): + tower_namespace.filename = TowerNamespace() + file_label = file_label.split('.')[1] + setattr(tower_namespace.filename, file_label, container_path) + + injector_field = self._meta.get_field('injectors') + for env_var, tmpl in self.injectors.get('env', {}).items(): + try: + injector_field.validate_env_var_allowed(env_var) + except ValidationError as e: + logger.error('Ignoring prohibited env var {}, reason: {}'.format(env_var, e)) + continue + env[env_var] = sandbox_env.from_string(tmpl).render(**namespace) + safe_env[env_var] = sandbox_env.from_string(tmpl).render(**safe_namespace) + + if 'INVENTORY_UPDATE_ID' not in env: + # awx-manage inventory_update does not support extra_vars via -e + def build_extra_vars(node): + if isinstance(node, dict): + return {build_extra_vars(k): build_extra_vars(v) for k, v in node.items()} + elif isinstance(node, list): + return [build_extra_vars(x) for x in node] + else: + return sandbox_env.from_string(node).render(**namespace) + + def build_extra_vars_file(vars, private_dir): + handle, path = tempfile.mkstemp(dir=os.path.join(private_dir, 'env')) + f = os.fdopen(handle, 'w') + f.write(safe_dump(vars)) + f.close() + os.chmod(path, stat.S_IRUSR) + return path + + extra_vars = build_extra_vars(self.injectors.get('extra_vars', {})) + if extra_vars: + path = build_extra_vars_file(extra_vars, private_data_dir) + container_path = to_container_path(path, private_data_dir, container_root=container_root) + args.extend(['-e', '@%s' % container_path]) + + +class ManagedCredentialType(SimpleNamespace): + registry = {} + + def __init__(self, namespace, **kwargs): + for k in ('inputs', 'injectors'): + if k not in kwargs: + kwargs[k] = {} + super(ManagedCredentialType, self).__init__(namespace=namespace, **kwargs) + if namespace in ManagedCredentialType.registry: + raise ValueError( + 'a ManagedCredentialType with namespace={} is already defined in {}'.format( + namespace, inspect.getsourcefile(ManagedCredentialType.registry[namespace].__class__) + ) + ) + ManagedCredentialType.registry[namespace] = self + + def get_creation_params(self): + return dict( + namespace=self.namespace, + kind=self.kind, + name=self.name, + managed=True, + inputs=self.inputs, + injectors=self.injectors, + ) + + def create(self): + return CredentialType(**self.get_creation_params()) + + +ManagedCredentialType( + namespace='ssh', + kind='ssh', + name=gettext_noop('Machine'), + inputs={ + 'fields': [ + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + {'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + {'id': 'ssh_key_data', 'label': gettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, + { + 'id': 'ssh_public_key_data', + 'label': gettext_noop('Signed SSH Certificate'), + 'type': 'string', + 'multiline': True, + 'secret': True, + }, + {'id': 'ssh_key_unlock', 'label': gettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + { + 'id': 'become_method', + 'label': gettext_noop('Privilege Escalation Method'), + 'type': 'string', + 'help_text': gettext_noop('Specify a method for "become" operations. This is equivalent to specifying the --become-method Ansible parameter.'), + }, + { + 'id': 'become_username', + 'label': gettext_noop('Privilege Escalation Username'), + 'type': 'string', + }, + {'id': 'become_password', 'label': gettext_noop('Privilege Escalation Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + ], + }, +) + +ManagedCredentialType( + namespace='scm', + kind='scm', + name=gettext_noop('Source Control'), + managed=True, + inputs={ + 'fields': [ + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + {'id': 'password', 'label': gettext_noop('Password'), 'type': 'string', 'secret': True}, + {'id': 'ssh_key_data', 'label': gettext_noop('SCM Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, + {'id': 'ssh_key_unlock', 'label': gettext_noop('Private Key Passphrase'), 'type': 'string', 'secret': True}, + ], + }, +) + +ManagedCredentialType( + namespace='vault', + kind='vault', + name=gettext_noop('Vault'), + managed=True, + inputs={ + 'fields': [ + {'id': 'vault_password', 'label': gettext_noop('Vault Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True}, + { + 'id': 'vault_id', + 'label': gettext_noop('Vault Identifier'), + 'type': 'string', + 'format': 'vault_id', + 'help_text': gettext_noop( + 'Specify an (optional) Vault ID. This is ' + 'equivalent to specifying the --vault-id ' + 'Ansible parameter for providing multiple Vault ' + 'passwords. Note: this feature only works in ' + 'Ansible 2.4+.' + ), + }, + ], + 'required': ['vault_password'], + }, +) + +ManagedCredentialType( + namespace='net', + kind='net', + name=gettext_noop('Network'), + managed=True, + inputs={ + 'fields': [ + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + { + 'id': 'password', + 'label': gettext_noop('Password'), + 'type': 'string', + 'secret': True, + }, + {'id': 'ssh_key_data', 'label': gettext_noop('SSH Private Key'), 'type': 'string', 'format': 'ssh_private_key', 'secret': True, 'multiline': True}, + { + 'id': 'ssh_key_unlock', + 'label': gettext_noop('Private Key Passphrase'), + 'type': 'string', + 'secret': True, + }, + { + 'id': 'authorize', + 'label': gettext_noop('Authorize'), + 'type': 'boolean', + }, + { + 'id': 'authorize_password', + 'label': gettext_noop('Authorize Password'), + 'type': 'string', + 'secret': True, + }, + ], + 'dependencies': { + 'authorize_password': ['authorize'], + }, + 'required': ['username'], + }, +) + +ManagedCredentialType( + namespace='aws', + kind='cloud', + name=gettext_noop('Amazon Web Services'), + managed=True, + inputs={ + 'fields': [ + {'id': 'username', 'label': gettext_noop('Access Key'), 'type': 'string'}, + { + 'id': 'password', + 'label': gettext_noop('Secret Key'), + 'type': 'string', + 'secret': True, + }, + { + 'id': 'security_token', + 'label': gettext_noop('STS Token'), + 'type': 'string', + 'secret': True, + 'help_text': gettext_noop( + 'Security Token Service (STS) is a web service ' + 'that enables you to request temporary, ' + 'limited-privilege credentials for AWS Identity ' + 'and Access Management (IAM) users.' + ), + }, + ], + 'required': ['username', 'password'], + }, +) + +ManagedCredentialType( + namespace='openstack', + kind='cloud', + name=gettext_noop('OpenStack'), + managed=True, + inputs={ + 'fields': [ + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + { + 'id': 'password', + 'label': gettext_noop('Password (API Key)'), + 'type': 'string', + 'secret': True, + }, + { + 'id': 'host', + 'label': gettext_noop('Host (Authentication URL)'), + 'type': 'string', + 'help_text': gettext_noop('The host to authenticate with. For example, https://openstack.business.com/v2.0/'), + }, + { + 'id': 'project', + 'label': gettext_noop('Project (Tenant Name)'), + 'type': 'string', + }, + { + 'id': 'project_domain_name', + 'label': gettext_noop('Project (Domain Name)'), + 'type': 'string', + }, + { + 'id': 'domain', + 'label': gettext_noop('Domain Name'), + 'type': 'string', + 'help_text': gettext_noop( + 'OpenStack domains define administrative boundaries. ' + 'It is only needed for Keystone v3 authentication ' + 'URLs. Refer to the documentation for ' + 'common scenarios.' + ), + }, + { + 'id': 'region', + 'label': gettext_noop('Region Name'), + 'type': 'string', + 'help_text': gettext_noop('For some cloud providers, like OVH, region must be specified'), + }, + { + 'id': 'verify_ssl', + 'label': gettext_noop('Verify SSL'), + 'type': 'boolean', + 'default': True, + }, + ], + 'required': ['username', 'password', 'host', 'project'], + }, +) + +ManagedCredentialType( + namespace='vmware', + kind='cloud', + name=gettext_noop('VMware vCenter'), + managed=True, + inputs={ + 'fields': [ + { + 'id': 'host', + 'label': gettext_noop('VCenter Host'), + 'type': 'string', + 'help_text': gettext_noop('Enter the hostname or IP address that corresponds to your VMware vCenter.'), + }, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + { + 'id': 'password', + 'label': gettext_noop('Password'), + 'type': 'string', + 'secret': True, + }, + ], + 'required': ['host', 'username', 'password'], + }, +) + +ManagedCredentialType( + namespace='satellite6', + kind='cloud', + name=gettext_noop('Red Hat Satellite 6'), + managed=True, + inputs={ + 'fields': [ + { + 'id': 'host', + 'label': gettext_noop('Satellite 6 URL'), + 'type': 'string', + 'help_text': gettext_noop('Enter the URL that corresponds to your Red Hat Satellite 6 server. For example, https://satellite.example.org'), + }, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + { + 'id': 'password', + 'label': gettext_noop('Password'), + 'type': 'string', + 'secret': True, + }, + ], + 'required': ['host', 'username', 'password'], + }, +) + +ManagedCredentialType( + namespace='gce', + kind='cloud', + name=gettext_noop('Google Compute Engine'), + managed=True, + inputs={ + 'fields': [ + { + 'id': 'username', + 'label': gettext_noop('Service Account Email Address'), + 'type': 'string', + 'help_text': gettext_noop('The email address assigned to the Google Compute Engine service account.'), + }, + { + 'id': 'project', + 'label': 'Project', + 'type': 'string', + 'help_text': gettext_noop( + 'The Project ID is the GCE assigned identification. ' + 'It is often constructed as three words or two words ' + 'followed by a three-digit number. Examples: project-id-000 ' + 'and another-project-id' + ), + }, + { + 'id': 'ssh_key_data', + 'label': gettext_noop('RSA Private Key'), + 'type': 'string', + 'format': 'ssh_private_key', + 'secret': True, + 'multiline': True, + 'help_text': gettext_noop('Paste the contents of the PEM file associated with the service account email.'), + }, + ], + 'required': ['username', 'ssh_key_data'], + }, +) + +ManagedCredentialType( + namespace='azure_rm', + kind='cloud', + name=gettext_noop('Microsoft Azure Resource Manager'), + managed=True, + inputs={ + 'fields': [ + { + 'id': 'subscription', + 'label': gettext_noop('Subscription ID'), + 'type': 'string', + 'help_text': gettext_noop('Subscription ID is an Azure construct, which is mapped to a username.'), + }, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + { + 'id': 'password', + 'label': gettext_noop('Password'), + 'type': 'string', + 'secret': True, + }, + {'id': 'client', 'label': gettext_noop('Client ID'), 'type': 'string'}, + { + 'id': 'secret', + 'label': gettext_noop('Client Secret'), + 'type': 'string', + 'secret': True, + }, + {'id': 'tenant', 'label': gettext_noop('Tenant ID'), 'type': 'string'}, + { + 'id': 'cloud_environment', + 'label': gettext_noop('Azure Cloud Environment'), + 'type': 'string', + 'help_text': gettext_noop('Environment variable AZURE_CLOUD_ENVIRONMENT when using Azure GovCloud or Azure stack.'), + }, + ], + 'required': ['subscription'], + }, +) + +ManagedCredentialType( + namespace='github_token', + kind='token', + name=gettext_noop('GitHub Personal Access Token'), + managed=True, + inputs={ + 'fields': [ + { + 'id': 'token', + 'label': gettext_noop('Token'), + 'type': 'string', + 'secret': True, + 'help_text': gettext_noop('This token needs to come from your profile settings in GitHub'), + } + ], + 'required': ['token'], + }, +) + +ManagedCredentialType( + namespace='gitlab_token', + kind='token', + name=gettext_noop('GitLab Personal Access Token'), + managed=True, + inputs={ + 'fields': [ + { + 'id': 'token', + 'label': gettext_noop('Token'), + 'type': 'string', + 'secret': True, + 'help_text': gettext_noop('This token needs to come from your profile settings in GitLab'), + } + ], + 'required': ['token'], + }, +) + +ManagedCredentialType( + namespace='bitbucket_dc_token', + kind='token', + name=gettext_noop('Bitbucket Data Center HTTP Access Token'), + managed=True, + inputs={ + 'fields': [ + { + 'id': 'token', + 'label': gettext_noop('Token'), + 'type': 'string', + 'secret': True, + 'help_text': gettext_noop('This token needs to come from your user settings in Bitbucket'), + } + ], + 'required': ['token'], + }, +) + +insights = ManagedCredentialType( + namespace='insights', + kind='insights', + name=gettext_noop('Insights'), + managed=True, + inputs={ + 'fields': [ + { + 'id': 'username', + 'label': gettext_noop('Username'), + 'type': 'string', + 'help_text': gettext_noop( + 'Username is required for basic authentication.', + ), + }, + { + 'id': 'password', + 'label': gettext_noop('Password'), + 'type': 'string', + 'secret': True, + 'help_text': gettext_noop( + 'Password is required for basic authentication', + ), + }, + { + 'id': 'client_id', + 'label': gettext_noop('Client ID'), + 'type': 'string', + 'help_text': gettext_noop( + 'Enter client ID to create a service account credential.', + ), + }, + { + 'id': 'client_secret', + 'label': gettext_noop('Client Secret'), + 'type': 'string', + 'secret': True, + 'help_text': gettext_noop( + 'Enter client secret to create a service account credential.', + ), + }, + ], + 'required': [], + }, + injectors={ + 'extra_vars': { + 'scm_username': '{{username}}', + 'scm_password': '{{password}}', + 'client_id': '{{client_id}}', + 'client_secret': '{{client_secret}}', + 'authentication': '{% if client_id %}service_account{% else %}basic{% endif %}', + }, + 'env': { + 'INSIGHTS_USER': '{{username}}', + 'INSIGHTS_PASSWORD': '{{password}}', + 'INSIGHTS_CLIENT_ID': '{{client_id}}', + 'INSIGHTS_CLIENT_SECRET': '{{client_secret}}', + }, + }, +) + + +ManagedCredentialType( + namespace='rhv', + kind='cloud', + name=gettext_noop('Red Hat Virtualization'), + managed=True, + inputs={ + 'fields': [ + {'id': 'host', 'label': gettext_noop('Host (Authentication URL)'), 'type': 'string', 'help_text': gettext_noop('The host to authenticate with.')}, + {'id': 'username', 'label': gettext_noop('Username'), 'type': 'string'}, + { + 'id': 'password', + 'label': gettext_noop('Password'), + 'type': 'string', + 'secret': True, + }, + { + 'id': 'ca_file', + 'label': gettext_noop('CA File'), + 'type': 'string', + 'help_text': gettext_noop('Absolute file path to the CA file to use (optional)'), + }, + ], + 'required': ['host', 'username', 'password'], + }, + injectors={ + # The duplication here is intentional; the ovirt4 inventory plugin + # writes a .ini file for authentication, while the ansible modules for + # ovirt4 use a separate authentication process that support + # environment variables; by injecting both, we support both + 'file': { + 'template': '\n'.join( + [ + '[ovirt]', + 'ovirt_url={{host}}', + 'ovirt_username={{username}}', + 'ovirt_password={{password}}', + '{% if ca_file %}ovirt_ca_file={{ca_file}}{% endif %}', + ] + ) + }, + 'env': {'OVIRT_INI_PATH': '{{tower.filename}}', 'OVIRT_URL': '{{host}}', 'OVIRT_USERNAME': '{{username}}', 'OVIRT_PASSWORD': '{{password}}'}, + }, +) + +ManagedCredentialType( + namespace='controller', + kind='cloud', + name=gettext_noop('Red Hat Ansible Automation Platform'), + managed=True, + inputs={ + 'fields': [ + { + 'id': 'host', + 'label': gettext_noop('Red Hat Ansible Automation Platform'), + 'type': 'string', + 'help_text': gettext_noop('Red Hat Ansible Automation Platform base URL to authenticate with.'), + }, + { + 'id': 'username', + 'label': gettext_noop('Username'), + 'type': 'string', + 'help_text': gettext_noop( + 'Red Hat Ansible Automation Platform username id to authenticate as.This should not be set if an OAuth token is being used.' + ), + }, + { + 'id': 'password', + 'label': gettext_noop('Password'), + 'type': 'string', + 'secret': True, + }, + { + 'id': 'oauth_token', + 'label': gettext_noop('OAuth Token'), + 'type': 'string', + 'secret': True, + 'help_text': gettext_noop('An OAuth token to use to authenticate with.This should not be set if username/password are being used.'), + }, + {'id': 'verify_ssl', 'label': gettext_noop('Verify SSL'), 'type': 'boolean', 'secret': False}, + ], + 'required': ['host'], + }, + injectors={ + 'env': { + 'TOWER_HOST': '{{host}}', + 'TOWER_USERNAME': '{{username}}', + 'TOWER_PASSWORD': '{{password}}', + 'TOWER_VERIFY_SSL': '{{verify_ssl}}', + 'TOWER_OAUTH_TOKEN': '{{oauth_token}}', + 'CONTROLLER_HOST': '{{host}}', + 'CONTROLLER_USERNAME': '{{username}}', + 'CONTROLLER_PASSWORD': '{{password}}', + 'CONTROLLER_VERIFY_SSL': '{{verify_ssl}}', + 'CONTROLLER_OAUTH_TOKEN': '{{oauth_token}}', + } + }, +) + +ManagedCredentialType( + namespace='kubernetes_bearer_token', + kind='kubernetes', + name=gettext_noop('OpenShift or Kubernetes API Bearer Token'), + inputs={ + 'fields': [ + { + 'id': 'host', + 'label': gettext_noop('OpenShift or Kubernetes API Endpoint'), + 'type': 'string', + 'help_text': gettext_noop('The OpenShift or Kubernetes API Endpoint to authenticate with.'), + }, + { + 'id': 'bearer_token', + 'label': gettext_noop('API authentication bearer token'), + 'type': 'string', + 'secret': True, + }, + { + 'id': 'verify_ssl', + 'label': gettext_noop('Verify SSL'), + 'type': 'boolean', + 'default': True, + }, + { + 'id': 'ssl_ca_cert', + 'label': gettext_noop('Certificate Authority data'), + 'type': 'string', + 'secret': True, + 'multiline': True, + }, + ], + 'required': ['host', 'bearer_token'], + }, +) + +ManagedCredentialType( + namespace='registry', + kind='registry', + name=gettext_noop('Container Registry'), + inputs={ + 'fields': [ + { + 'id': 'host', + 'label': gettext_noop('Authentication URL'), + 'type': 'string', + 'help_text': gettext_noop('Authentication endpoint for the container registry.'), + 'default': 'quay.io', + }, + { + 'id': 'username', + 'label': gettext_noop('Username'), + 'type': 'string', + }, + { + 'id': 'password', + 'label': gettext_noop('Password or Token'), + 'type': 'string', + 'secret': True, + 'help_text': gettext_noop('A password or token used to authenticate with'), + }, + { + 'id': 'verify_ssl', + 'label': gettext_noop('Verify SSL'), + 'type': 'boolean', + 'default': True, + }, + ], + 'required': ['host'], + }, +) + + +ManagedCredentialType( + namespace='galaxy_api_token', + kind='galaxy', + name=gettext_noop('Ansible Galaxy/Automation Hub API Token'), + inputs={ + 'fields': [ + { + 'id': 'url', + 'label': gettext_noop('Galaxy Server URL'), + 'type': 'string', + 'help_text': gettext_noop('The URL of the Galaxy instance to connect to.'), + }, + { + 'id': 'auth_url', + 'label': gettext_noop('Auth Server URL'), + 'type': 'string', + 'help_text': gettext_noop('The URL of a Keycloak server token_endpoint, if using SSO auth.'), + }, + { + 'id': 'token', + 'label': gettext_noop('API Token'), + 'type': 'string', + 'secret': True, + 'help_text': gettext_noop('A token to use for authentication against the Galaxy instance.'), + }, + ], + 'required': ['url'], + }, +) + +ManagedCredentialType( + namespace='gpg_public_key', + kind='cryptography', + name=gettext_noop('GPG Public Key'), + inputs={ + 'fields': [ + { + 'id': 'gpg_public_key', + 'label': gettext_noop('GPG Public Key'), + 'type': 'string', + 'secret': True, + 'multiline': True, + 'help_text': gettext_noop('GPG Public Key used to validate content signatures.'), + }, + ], + 'required': ['gpg_public_key'], + }, +) + +ManagedCredentialType( + namespace='terraform', + kind='cloud', + name=gettext_noop('Terraform backend configuration'), + managed=True, + inputs={ + 'fields': [ + { + 'id': 'configuration', + 'label': gettext_noop('Backend configuration'), + 'type': 'string', + 'secret': True, + 'multiline': True, + 'help_text': gettext_noop('Terraform backend config as Hashicorp configuration language.'), + }, + { + 'id': 'gce_credentials', + 'label': gettext_noop('Google Cloud Platform account credentials'), + 'type': 'string', + 'secret': True, + 'multiline': True, + 'help_text': gettext_noop('Google Cloud Platform account credentials in JSON format.'), + }, + ], + 'required': ['configuration'], + }, +) + + +class CredentialInputSource(PrimordialModel): + class Meta: + app_label = 'main' + unique_together = (('target_credential', 'input_field_name'),) + ordering = ( + 'target_credential', + 'source_credential', + 'input_field_name', + ) + + FIELDS_TO_PRESERVE_AT_COPY = ['source_credential', 'metadata', 'input_field_name'] + + target_credential = models.ForeignKey( + 'Credential', + related_name='input_sources', + on_delete=models.CASCADE, + null=True, + ) + source_credential = models.ForeignKey( + 'Credential', + related_name='target_input_sources', + on_delete=models.CASCADE, + null=True, + ) + input_field_name = models.CharField( + max_length=1024, + ) + metadata = DynamicCredentialInputField(blank=True, default=dict) + + def clean_target_credential(self): + if self.target_credential.credential_type.kind == 'external': + raise ValidationError(_('Target must be a non-external credential')) + return self.target_credential + + def clean_source_credential(self): + if self.source_credential.credential_type.kind != 'external': + raise ValidationError(_('Source must be an external credential')) + return self.source_credential + + def clean_input_field_name(self): + defined_fields = self.target_credential.credential_type.defined_fields + if self.input_field_name not in defined_fields: + raise ValidationError(_('Input field must be defined on target credential (options are {}).'.format(', '.join(sorted(defined_fields))))) + return self.input_field_name + + def get_input_value(self): + backend = self.source_credential.credential_type.plugin.backend + backend_kwargs = {} + for field_name, value in self.source_credential.inputs.items(): + if field_name in self.source_credential.credential_type.secret_fields: + backend_kwargs[field_name] = decrypt_field(self.source_credential, field_name) + else: + backend_kwargs[field_name] = value + + backend_kwargs.update(self.metadata) + + with set_environ(**settings.AWX_TASK_ENV): + return backend(**backend_kwargs) + + def get_absolute_url(self, request=None): + view_name = 'api:credential_input_source_detail' + return reverse(view_name, kwargs={'pk': self.pk}, request=request) + + +for ns, plugin in credential_plugins.items(): + CredentialType.load_plugin(ns, plugin) diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index f389fa1331..00df2de7a0 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -1024,7 +1024,9 @@ class InventorySourceOptions(BaseModel): # If a credential was provided, it's important that it matches # the actual inventory source being used (Amazon requires Amazon # credentials; Rackspace requires Rackspace credentials; etc...) - if source.replace('ec2', 'aws') != cred.kind: + if source == 'vmware_esxi' and source.replace('vmware_esxi', 'vmware') != cred.kind: + return _('VMWARE inventory sources (such as %s) require credentials for the matching cloud service.') % source + if source == 'ec2' and source.replace('ec2', 'aws') != cred.kind: return _('Cloud-based inventory sources (such as %s) require credentials for the matching cloud service.') % source # Allow an EC2 source to omit the credential. If Tower is running on # an EC2 instance with an IAM Role assigned, boto will use credentials diff --git a/awx/main/models/rbac.py b/awx/main/models/rbac.py index fa79ffc406..a6597d7335 100644 --- a/awx/main/models/rbac.py +++ b/awx/main/models/rbac.py @@ -27,6 +27,7 @@ from django.conf import settings # Ansible_base app from ansible_base.rbac.models import RoleDefinition, RoleUserAssignment, RoleTeamAssignment +from ansible_base.rbac.sync import maybe_reverse_sync_assignment, maybe_reverse_sync_unassignment from ansible_base.rbac import permission_registry from ansible_base.lib.utils.models import get_type_for_model @@ -560,24 +561,12 @@ def get_role_definition(role): f = obj._meta.get_field(role.role_field) action_name = f.name.rsplit("_", 1)[0] model_print = type(obj).__name__ + rd_name = f'{model_print} {action_name.title()} Compat' perm_list = get_role_codenames(role) defaults = { 'content_type': permission_registry.content_type_model.objects.get_by_natural_key(role.content_type.app_label, role.content_type.model), 'description': f'Has {action_name.title()} permission to {model_print} for backwards API compatibility', } - # use Controller-specific role definitions for Team/Organization and member/admin - # instead of platform role definitions - # these should exist in the system already, so just do a lookup by role definition name - if model_print in ['Team', 'Organization'] and action_name in ['member', 'admin']: - rd_name = f'Controller {model_print} {action_name.title()}' - rd = RoleDefinition.objects.filter(name=rd_name).first() - if rd: - return rd - else: - return RoleDefinition.objects.create_from_permissions(permissions=perm_list, name=rd_name, managed=True, **defaults) - - else: - rd_name = f'{model_print} {action_name.title()} Compat' with impersonate(None): try: @@ -633,12 +622,14 @@ def get_role_from_object_role(object_role): return getattr(object_role.content_object, role_name) -def give_or_remove_permission(role, actor, giving=True): +def give_or_remove_permission(role, actor, giving=True, rd=None): obj = role.content_object if obj is None: return - rd = get_role_definition(role) - rd.give_or_remove_permission(actor, obj, giving=giving) + if not rd: + rd = get_role_definition(role) + assignment = rd.give_or_remove_permission(actor, obj, giving=giving) + return assignment class SyncEnabled(threading.local): @@ -690,7 +681,15 @@ def sync_members_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs) role = Role.objects.get(pk=user_or_role_id) else: user = get_user_model().objects.get(pk=user_or_role_id) - give_or_remove_permission(role, user, giving=is_giving) + rd = get_role_definition(role) + assignment = give_or_remove_permission(role, user, giving=is_giving, rd=rd) + + # sync to resource server + if rbac_sync_enabled.enabled: + if is_giving: + maybe_reverse_sync_assignment(assignment) + else: + maybe_reverse_sync_unassignment(rd, user, role.content_object) def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs): @@ -733,7 +732,90 @@ def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs) from awx.main.models.organization import Team team = Team.objects.get(pk=parent_role.object_id) - give_or_remove_permission(child_role, team, giving=is_giving) + rd = get_role_definition(child_role) + assignment = give_or_remove_permission(child_role, team, giving=is_giving, rd=rd) + + # sync to resource server + if rbac_sync_enabled.enabled: + if is_giving: + maybe_reverse_sync_assignment(assignment) + else: + maybe_reverse_sync_unassignment(rd, team, child_role.content_object) + + +ROLE_DEFINITION_TO_ROLE_FIELD = { + 'Organization Member': 'member_role', + 'WorkflowJobTemplate Admin': 'admin_role', + 'Organization WorkflowJobTemplate Admin': 'workflow_admin_role', + 'WorkflowJobTemplate Execute': 'execute_role', + 'WorkflowJobTemplate Approve': 'approval_role', + 'InstanceGroup Admin': 'admin_role', + 'InstanceGroup Use': 'use_role', + 'Organization ExecutionEnvironment Admin': 'execution_environment_admin_role', + 'Project Admin': 'admin_role', + 'Organization Project Admin': 'project_admin_role', + 'Project Use': 'use_role', + 'Project Update': 'update_role', + 'JobTemplate Admin': 'admin_role', + 'Organization JobTemplate Admin': 'job_template_admin_role', + 'JobTemplate Execute': 'execute_role', + 'Inventory Admin': 'admin_role', + 'Organization Inventory Admin': 'inventory_admin_role', + 'Inventory Use': 'use_role', + 'Inventory Adhoc': 'adhoc_role', + 'Inventory Update': 'update_role', + 'Organization NotificationTemplate Admin': 'notification_admin_role', + 'Credential Admin': 'admin_role', + 'Organization Credential Admin': 'credential_admin_role', + 'Credential Use': 'use_role', + 'Team Admin': 'admin_role', + 'Team Member': 'member_role', + 'Organization Admin': 'admin_role', + 'Organization Audit': 'auditor_role', + 'Organization Execute': 'execute_role', + 'Organization Approval': 'approval_role', +} + + +def _sync_assignments_to_old_rbac(instance, delete=True): + from awx.main.signals import disable_activity_stream + + with disable_activity_stream(): + with disable_rbac_sync(): + field_name = ROLE_DEFINITION_TO_ROLE_FIELD.get(instance.role_definition.name) + if not field_name: + return + try: + role = getattr(instance.object_role.content_object, field_name) + # in the case RoleUserAssignment is being cascade deleted, then + # object_role might not exist. In which case the object is about to be removed + # anyways so just return + except ObjectDoesNotExist: + return + if isinstance(instance.actor, get_user_model()): + # user + if delete: + role.members.remove(instance.actor) + else: + role.members.add(instance.actor) + else: + # team + if delete: + instance.team.member_role.children.remove(role) + else: + instance.team.member_role.children.add(role) + + +@receiver(post_delete, sender=RoleUserAssignment) +@receiver(post_delete, sender=RoleTeamAssignment) +def sync_assignments_to_old_rbac_delete(instance, **kwargs): + _sync_assignments_to_old_rbac(instance, delete=True) + + +@receiver(post_save, sender=RoleUserAssignment) +@receiver(post_save, sender=RoleTeamAssignment) +def sync_user_assignments_to_old_rbac_create(instance, **kwargs): + _sync_assignments_to_old_rbac(instance, delete=False) ROLE_DEFINITION_TO_ROLE_FIELD = { diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 50dd2c8f3b..3a3ce545a5 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -1200,6 +1200,13 @@ class UnifiedJob( fd = StringIO(fd.getvalue().replace('\\r\\n', '\n')) return fd + def _fix_double_escapes(self, content): + """ + Collapse double-escaped sequences into single-escaped form. + """ + # Replace \\ followed by one of ' " \ n r t + return re.sub(r'\\([\'"\\nrt])', r'\1', content) + def _escape_ascii(self, content): # Remove ANSI escape sequences used to embed event data. content = re.sub(r'\x1b\[K(?:[A-Za-z0-9+/=]+\x1b\[\d+D)+\x1b\[K', '', content) @@ -1207,12 +1214,14 @@ class UnifiedJob( content = re.sub(r'\x1b[^m]*m', '', content) return content - def _result_stdout_raw(self, redact_sensitive=False, escape_ascii=False): + def _result_stdout_raw(self, redact_sensitive=False, escape_ascii=False, fix_escapes=False): content = self.result_stdout_raw_handle().read() if redact_sensitive: content = UriCleaner.remove_sensitive(content) if escape_ascii: content = self._escape_ascii(content) + if fix_escapes: + content = self._fix_double_escapes(content) return content @property @@ -1221,9 +1230,10 @@ class UnifiedJob( @property def result_stdout(self): - return self._result_stdout_raw(escape_ascii=True) + # Human-facing output should fix escapes + return self._result_stdout_raw(escape_ascii=True, fix_escapes=True) - def _result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=True, escape_ascii=False): + def _result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=True, escape_ascii=False, fix_escapes=False): return_buffer = StringIO() if end_line is not None: end_line = int(end_line) @@ -1246,14 +1256,18 @@ class UnifiedJob( return_buffer = UriCleaner.remove_sensitive(return_buffer) if escape_ascii: return_buffer = self._escape_ascii(return_buffer) + if fix_escapes: + return_buffer = self._fix_double_escapes(return_buffer) return return_buffer, start_actual, end_actual, absolute_end def result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=False): + # Raw should NOT fix escapes return self._result_stdout_raw_limited(start_line, end_line, redact_sensitive) def result_stdout_limited(self, start_line=0, end_line=None, redact_sensitive=False): - return self._result_stdout_raw_limited(start_line, end_line, redact_sensitive, escape_ascii=True) + # Human-facing should fix escapes + return self._result_stdout_raw_limited(start_line, end_line, redact_sensitive, escape_ascii=True, fix_escapes=True) @property def workflow_job_id(self): diff --git a/awx/main/notifications/irc_backend.py b/awx/main/notifications/irc_backend.py index 44693e0865..83327e99aa 100644 --- a/awx/main/notifications/irc_backend.py +++ b/awx/main/notifications/irc_backend.py @@ -5,8 +5,6 @@ import time import ssl import logging -import irc.client - from django.utils.encoding import smart_str from django.utils.translation import gettext_lazy as _ @@ -16,6 +14,19 @@ from awx.main.notifications.custom_notification_base import CustomNotificationBa logger = logging.getLogger('awx.main.notifications.irc_backend') +def _irc(): + """ + Prime the real jaraco namespace before importing irc.* so that + setuptools' vendored 'setuptools._vendor.jaraco' doesn't shadow + external 'jaraco.*' packages (e.g., jaraco.stream). + """ + import jaraco.stream # ensure the namespace package is established # noqa: F401 + import irc.client as irc_client + import irc.connection as irc_connection + + return irc_client, irc_connection + + class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase): init_parameters = { "server": {"label": "IRC Server Address", "type": "string"}, @@ -40,12 +51,15 @@ class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase): def open(self): if self.connection is not None: return False + + irc_client, irc_connection = _irc() + if self.use_ssl: - connection_factory = irc.connection.Factory(wrapper=ssl.wrap_socket) + connection_factory = irc_connection.Factory(wrapper=ssl.wrap_socket) else: - connection_factory = irc.connection.Factory() + connection_factory = irc_connection.Factory() try: - self.reactor = irc.client.Reactor() + self.reactor = irc_client.Reactor() self.connection = self.reactor.server().connect( self.server, self.port, @@ -53,7 +67,7 @@ class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase): password=self.password, connect_factory=connection_factory, ) - except irc.client.ServerConnectionError as e: + except irc_client.ServerConnectionError as e: logger.error(smart_str(_("Exception connecting to irc server: {}").format(e))) if not self.fail_silently: raise @@ -65,8 +79,9 @@ class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase): self.connection = None def on_connect(self, connection, event): + irc_client, _ = _irc() for c in self.channels: - if irc.client.is_channel(c): + if irc_client.is_channel(c): connection.join(c) else: for m in self.channels[c]: diff --git a/awx/main/tasks/host_indirect.py b/awx/main/tasks/host_indirect.py index e57b437c4a..632a04a687 100644 --- a/awx/main/tasks/host_indirect.py +++ b/awx/main/tasks/host_indirect.py @@ -12,7 +12,7 @@ from django.db import transaction # Django flags from flags.state import flag_enabled -from awx.main.dispatch.publish import task as task_awx +from awx.main.dispatch.publish import task from awx.main.dispatch import get_task_queuename from awx.main.models.indirect_managed_node_audit import IndirectManagedNodeAudit from awx.main.models.event_query import EventQuery @@ -159,7 +159,7 @@ def cleanup_old_indirect_host_entries() -> None: IndirectManagedNodeAudit.objects.filter(created__lt=limit).delete() -@task_awx(queue=get_task_queuename) +@task(queue=get_task_queuename) def save_indirect_host_entries(job_id: int, wait_for_events: bool = True) -> None: try: job = Job.objects.get(id=job_id) @@ -201,7 +201,7 @@ def save_indirect_host_entries(job_id: int, wait_for_events: bool = True) -> Non logger.exception(f'Error processing indirect host data for job_id={job_id}') -@task_awx(queue=get_task_queuename) +@task(queue=get_task_queuename) def cleanup_and_save_indirect_host_entries_fallback() -> None: if not flag_enabled("FEATURE_INDIRECT_NODE_COUNTING_ENABLED"): return diff --git a/awx/main/tasks/jobs.py b/awx/main/tasks/jobs.py index a30a639ed4..e59f30ad66 100644 --- a/awx/main/tasks/jobs.py +++ b/awx/main/tasks/jobs.py @@ -21,6 +21,8 @@ from django.db import transaction # Shared code for the AWX platform from awx_plugins.interfaces._temporary_private_container_api import CONTAINER_ROOT, get_incontainer_path +from django.utils.translation import gettext_lazy as _ +from rest_framework.exceptions import PermissionDenied # Runner import ansible_runner @@ -87,8 +89,9 @@ from awx.main.utils.common import ( from awx.conf.license import get_license from awx.main.utils.handlers import SpecialInventoryHandler from awx.main.utils.update_model import update_model -from rest_framework.exceptions import PermissionDenied -from django.utils.translation import gettext_lazy as _ + +# Django flags +from flags.state import flag_enabled # Django flags from flags.state import flag_enabled diff --git a/awx/main/tasks/system.py b/awx/main/tasks/system.py index d80bace63d..4164cd3b8c 100644 --- a/awx/main/tasks/system.py +++ b/awx/main/tasks/system.py @@ -13,6 +13,25 @@ from datetime import datetime from distutils.version import LooseVersion as Version from io import StringIO +# Django +from django.conf import settings +from django.db import connection, transaction, DatabaseError, IntegrityError +from django.db.models.fields.related import ForeignKey +from django.utils.timezone import now, timedelta +from django.utils.encoding import smart_str +from django.contrib.auth.models import User +from django.utils.translation import gettext_lazy as _ +from django.utils.translation import gettext_noop +from django.core.cache import cache +from django.core.exceptions import ObjectDoesNotExist +from django.db.models.query import QuerySet + +# Django-CRUM +from crum import impersonate + +# Django flags +from flags.state import flag_enabled + # Runner import ansible_runner.cleanup import psycopg @@ -72,6 +91,13 @@ from awx.main.tasks.receptor import administrative_workunit_reaper, get_receptor from awx.main.utils.common import ignore_inventory_computed_fields, ignore_inventory_group_removal from awx.main.utils.reload import stop_local_services from dispatcherd.publish import task +from awx.main.tasks.receptor import get_receptor_ctl, worker_info, worker_cleanup, administrative_workunit_reaper, write_receptor_config +from awx.main.consumers import emit_channel_notification +from awx.main import analytics +from awx.conf import settings_registry +from awx.main.analytics.subsystem_metrics import DispatcherMetrics + +from rest_framework.exceptions import PermissionDenied logger = logging.getLogger('awx.main.tasks.system') diff --git a/awx/main/tests/data/inventory/plugins/vmware_esxi/env.json b/awx/main/tests/data/inventory/plugins/vmware_esxi/env.json new file mode 100644 index 0000000000..e2e8c5bd91 --- /dev/null +++ b/awx/main/tests/data/inventory/plugins/vmware_esxi/env.json @@ -0,0 +1,6 @@ +{ + "VMWARE_HOST": "https://foo.invalid", + "VMWARE_PASSWORD": "fooo", + "VMWARE_USER": "fooo", + "VMWARE_VALIDATE_CERTS": "False" +} \ No newline at end of file diff --git a/awx/main/tests/data/projects/host_query/meta/event_query.yml b/awx/main/tests/data/projects/host_query/meta/event_query.yml new file mode 100644 index 0000000000..0c9e398c66 --- /dev/null +++ b/awx/main/tests/data/projects/host_query/meta/event_query.yml @@ -0,0 +1,4 @@ +--- +{ + "demo.query.example": "" +} diff --git a/awx/main/tests/data/sleep_task.py b/awx/main/tests/data/sleep_task.py index 59bc6254e2..f9ff58b69a 100644 --- a/awx/main/tests/data/sleep_task.py +++ b/awx/main/tests/data/sleep_task.py @@ -1,57 +1,17 @@ import time import logging -from dispatcherd.publish import task - -from django.db import connection - from awx.main.dispatch import get_task_queuename -from awx.main.dispatch.publish import task as old_task - -from ansible_base.lib.utils.db import advisory_lock +from awx.main.dispatch.publish import task logger = logging.getLogger(__name__) -@old_task(queue=get_task_queuename) +@task(queue=get_task_queuename) def sleep_task(seconds=10, log=False): if log: logger.info('starting sleep_task') time.sleep(seconds) if log: logger.info('finished sleep_task') - - -@task() -def sleep_break_connection(seconds=0.2): - """ - Interact with the database in an intentionally breaking way. - After this finishes, queries made by this connection are expected to error - with "the connection is closed" - This is obviously a problem for any task that comes afterwards. - So this is used to break things so that the fixes may be demonstrated. - """ - with connection.cursor() as cursor: - cursor.execute(f"SET idle_session_timeout = '{seconds / 2}s';") - - logger.info(f'sleeping for {seconds}s > {seconds / 2}s session timeout') - time.sleep(seconds) - - for i in range(1, 3): - logger.info(f'\nRunning query number {i}') - try: - with connection.cursor() as cursor: - cursor.execute("SELECT 1;") - logger.info(' query worked, not expected') - except Exception as exc: - logger.info(f' query errored as expected\ntype: {type(exc)}\nstr: {str(exc)}') - - logger.info(f'Connection present: {bool(connection.connection)}, reports closed: {getattr(connection.connection, "closed", "not_found")}') - - -@task() -def advisory_lock_exception(): - time.sleep(0.2) # so it can fill up all the workers... hacky for now - with advisory_lock('advisory_lock_exception', lock_session_timeout_milliseconds=20): - raise RuntimeError('this is an intentional error') diff --git a/awx/main/tests/functional/api/test_credential.py b/awx/main/tests/functional/api/test_credential.py index d3efb2133a..2ed8a6a88c 100644 --- a/awx/main/tests/functional/api/test_credential.py +++ b/awx/main/tests/functional/api/test_credential.py @@ -1224,6 +1224,30 @@ def test_custom_credential_type_create(get, post, organization, admin): assert decrypt_field(cred, 'api_token') == 'secret' +@pytest.mark.django_db +def test_galaxy_create_ok(post, organization, admin): + params = { + 'credential_type': 1, + 'name': 'Galaxy credential', + 'inputs': { + 'url': 'https://galaxy.ansible.com', + 'token': 'some_galaxy_token', + }, + } + galaxy = CredentialType.defaults['galaxy_api_token']() + galaxy.save() + params['user'] = admin.id + params['credential_type'] = galaxy.pk + response = post(reverse('api:credential_list'), params, admin) + assert response.status_code == 201 + + assert Credential.objects.count() == 1 + cred = Credential.objects.all()[:1].get() + assert cred.credential_type == galaxy + assert cred.inputs['url'] == 'https://galaxy.ansible.com' + assert decrypt_field(cred, 'token') == 'some_galaxy_token' + + # # misc xfail conditions # diff --git a/awx/main/tests/functional/api/test_instance.py b/awx/main/tests/functional/api/test_instance.py index a2918e968e..3afd3184c8 100644 --- a/awx/main/tests/functional/api/test_instance.py +++ b/awx/main/tests/functional/api/test_instance.py @@ -1,3 +1,5 @@ +from unittest import mock + import pytest from awx.api.versioning import reverse @@ -5,6 +7,9 @@ from awx.main.models.activity_stream import ActivityStream from awx.main.models.ha import Instance from django.test.utils import override_settings +from django.http import HttpResponse + +from rest_framework import status INSTANCE_KWARGS = dict(hostname='example-host', cpu=6, node_type='execution', memory=36000000000, cpu_capacity=6, mem_capacity=42) @@ -87,3 +92,11 @@ def test_custom_hostname_regex(post, admin_user): "peers": [], } post(url=url, user=admin_user, data=data, expect=value[1]) + + +def test_instance_install_bundle(get, admin_user, system_auditor): + instance = Instance.objects.create(**INSTANCE_KWARGS) + url = reverse('api:instance_install_bundle', kwargs={'pk': instance.pk}) + with mock.patch('awx.api.views.instance_install_bundle.InstanceInstallBundle.get', return_value=HttpResponse({'test': 'data'}, status=status.HTTP_200_OK)): + get(url=url, user=admin_user, expect=200) + get(url=url, user=system_auditor, expect=403) diff --git a/awx/main/tests/functional/api/test_inventory.py b/awx/main/tests/functional/api/test_inventory.py index f09e2d511d..ed39d78f7d 100644 --- a/awx/main/tests/functional/api/test_inventory.py +++ b/awx/main/tests/functional/api/test_inventory.py @@ -521,6 +521,19 @@ class TestInventorySourceCredential: patch(url=inv_src.get_absolute_url(), data={'credential': aws_cred.pk}, expect=200, user=admin_user) assert list(inv_src.credentials.values_list('id', flat=True)) == [aws_cred.pk] + def test_vmware_cred_create_esxi_source(self, inventory, admin_user, organization, post, get): + """Test that a vmware esxi source can be added with a vmware credential""" + from awx.main.models.credential import Credential, CredentialType + + vmware = CredentialType.defaults['vmware']() + vmware.save() + vmware_cred = Credential.objects.create(credential_type=vmware, name="bar", organization=organization) + inv_src = InventorySource.objects.create(inventory=inventory, name='foobar', source='vmware_esxi') + r = post(url=reverse('api:inventory_source_credentials_list', kwargs={'pk': inv_src.pk}), data={'id': vmware_cred.pk}, expect=204, user=admin_user) + g = get(inv_src.get_absolute_url(), admin_user) + assert r.status_code == 204 + assert g.data['credential'] == vmware_cred.pk + @pytest.mark.django_db class TestControlledBySCM: diff --git a/awx/main/tests/functional/api/test_user.py b/awx/main/tests/functional/api/test_user.py index e3747fa82f..1efaa9b2b1 100644 --- a/awx/main/tests/functional/api/test_user.py +++ b/awx/main/tests/functional/api/test_user.py @@ -5,6 +5,7 @@ import pytest from django.contrib.sessions.middleware import SessionMiddleware from django.test.utils import override_settings + from awx.main.models import User from awx.api.versioning import reverse diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index 6fbed69760..8c48ac3a23 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -1,3 +1,5 @@ +import logging + # Python import pytest from unittest import mock @@ -8,7 +10,7 @@ import importlib # Django from django.urls import resolve from django.http import Http404 -from django.apps import apps +from django.apps import apps as global_apps from django.core.handlers.exception import response_for_exception from django.contrib.auth.models import User from django.core.serializers.json import DjangoJSONEncoder @@ -47,6 +49,8 @@ from awx.main.models.ad_hoc_commands import AdHocCommand from awx.main.models.execution_environments import ExecutionEnvironment from awx.main.utils import is_testing +logger = logging.getLogger(__name__) + __SWAGGER_REQUESTS__ = {} @@ -54,8 +58,17 @@ __SWAGGER_REQUESTS__ = {} dab_rr_initial = importlib.import_module('ansible_base.resource_registry.migrations.0001_initial') +def create_service_id(app_config, apps=global_apps, **kwargs): + try: + apps.get_model("dab_resource_registry", "ServiceID") + except LookupError: + logger.info('Looks like reverse migration, not creating resource registry ServiceID') + return + dab_rr_initial.create_service_id(apps, None) + + if is_testing(): - post_migrate.connect(lambda **kwargs: dab_rr_initial.create_service_id(apps, None)) + post_migrate.connect(create_service_id) @pytest.fixture(scope="session") @@ -126,7 +139,7 @@ def execution_environment(): @pytest.fixture def setup_managed_roles(): "Run the migration script to pre-create managed role definitions" - setup_managed_role_definitions(apps, None) + setup_managed_role_definitions(global_apps, None) @pytest.fixture diff --git a/awx/main/tests/functional/dab_rbac/test_consolidate_teams.py b/awx/main/tests/functional/dab_rbac/test_consolidate_teams.py new file mode 100644 index 0000000000..1e42059e56 --- /dev/null +++ b/awx/main/tests/functional/dab_rbac/test_consolidate_teams.py @@ -0,0 +1,147 @@ +import pytest + +from django.contrib.contenttypes.models import ContentType +from django.test import override_settings +from django.apps import apps + +from ansible_base.rbac.models import RoleDefinition, RoleUserAssignment, RoleTeamAssignment +from ansible_base.rbac.migrations._utils import give_permissions + +from awx.main.models import User, Team +from awx.main.migrations._dab_rbac import consolidate_indirect_user_roles + + +@pytest.mark.django_db +@override_settings(ANSIBLE_BASE_ALLOW_TEAM_PARENTS=True) +def test_consolidate_indirect_user_roles_with_nested_teams(setup_managed_roles, organization): + """ + Test the consolidate_indirect_user_roles function with a nested team hierarchy. + Setup: + - Users: A, B, C, D + - Teams: E, F, G + - Direct assignments: A→(E,F,G), B→E, C→F, D→G + - Team hierarchy: F→E (F is member of E), G→F (G is member of F) + Expected result after consolidation: + - Team E should have users: A, B, C, D (A directly, B directly, C through F, D through G→F) + - Team F should have users: A, C, D (A directly, C directly, D through G) + - Team G should have users: A, D (A directly, D directly) + """ + user_a = User.objects.create_user(username='user_a') + user_b = User.objects.create_user(username='user_b') + user_c = User.objects.create_user(username='user_c') + user_d = User.objects.create_user(username='user_d') + + team_e = Team.objects.create(name='Team E', organization=organization) + team_f = Team.objects.create(name='Team F', organization=organization) + team_g = Team.objects.create(name='Team G', organization=organization) + + # Get role definition and content type for give_permissions + team_member_role = RoleDefinition.objects.get(name='Team Member') + team_content_type = ContentType.objects.get_for_model(Team) + + # Assign users to teams + give_permissions(apps=apps, rd=team_member_role, users=[user_a], object_id=team_e.id, content_type_id=team_content_type.id) + give_permissions(apps=apps, rd=team_member_role, users=[user_a], object_id=team_f.id, content_type_id=team_content_type.id) + give_permissions(apps=apps, rd=team_member_role, users=[user_a], object_id=team_g.id, content_type_id=team_content_type.id) + give_permissions(apps=apps, rd=team_member_role, users=[user_b], object_id=team_e.id, content_type_id=team_content_type.id) + give_permissions(apps=apps, rd=team_member_role, users=[user_c], object_id=team_f.id, content_type_id=team_content_type.id) + give_permissions(apps=apps, rd=team_member_role, users=[user_d], object_id=team_g.id, content_type_id=team_content_type.id) + + # Mirror user assignments in the old RBAC system because signals don't run in tests + team_e.member_role.members.add(user_a.id, user_b.id) + team_f.member_role.members.add(user_a.id, user_c.id) + team_g.member_role.members.add(user_a.id, user_d.id) + + # Setup team-to-team relationships + give_permissions(apps=apps, rd=team_member_role, teams=[team_f], object_id=team_e.id, content_type_id=team_content_type.id) + give_permissions(apps=apps, rd=team_member_role, teams=[team_g], object_id=team_f.id, content_type_id=team_content_type.id) + + # Verify initial direct assignments + team_e_users_before = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_e.id).values_list('user_id', flat=True)) + assert team_e_users_before == {user_a.id, user_b.id} + team_f_users_before = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_f.id).values_list('user_id', flat=True)) + assert team_f_users_before == {user_a.id, user_c.id} + team_g_users_before = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_g.id).values_list('user_id', flat=True)) + assert team_g_users_before == {user_a.id, user_d.id} + + # Verify team-to-team relationships exist + assert RoleTeamAssignment.objects.filter(role_definition=team_member_role, team=team_f, object_id=team_e.id).exists() + assert RoleTeamAssignment.objects.filter(role_definition=team_member_role, team=team_g, object_id=team_f.id).exists() + + # Run the consolidation function + consolidate_indirect_user_roles(apps, None) + + # Verify consolidation + team_e_users_after = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_e.id).values_list('user_id', flat=True)) + assert team_e_users_after == {user_a.id, user_b.id, user_c.id, user_d.id}, f"Team E should have users A, B, C, D but has {team_e_users_after}" + team_f_users_after = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_f.id).values_list('user_id', flat=True)) + assert team_f_users_after == {user_a.id, user_c.id, user_d.id}, f"Team F should have users A, C, D but has {team_f_users_after}" + team_g_users_after = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_g.id).values_list('user_id', flat=True)) + assert team_g_users_after == {user_a.id, user_d.id}, f"Team G should have users A, D but has {team_g_users_after}" + + # Verify team member changes are mirrored to the old RBAC system + assert team_e_users_after == set(team_e.member_role.members.all().values_list('id', flat=True)) + assert team_f_users_after == set(team_f.member_role.members.all().values_list('id', flat=True)) + assert team_g_users_after == set(team_g.member_role.members.all().values_list('id', flat=True)) + + # Verify team-to-team relationships are removed after consolidation + assert not RoleTeamAssignment.objects.filter( + role_definition=team_member_role, team=team_f, object_id=team_e.id + ).exists(), "Team-to-team relationship F→E should be removed" + assert not RoleTeamAssignment.objects.filter( + role_definition=team_member_role, team=team_g, object_id=team_f.id + ).exists(), "Team-to-team relationship G→F should be removed" + + +@pytest.mark.django_db +@override_settings(ANSIBLE_BASE_ALLOW_TEAM_PARENTS=True) +def test_consolidate_indirect_user_roles_no_team_relationships(setup_managed_roles, organization): + """ + Test that the function handles the case where there are no team-to-team relationships. + It should return early without making any changes. + """ + # Create a user and team with direct assignment + user = User.objects.create_user(username='test_user') + team = Team.objects.create(name='Test Team', organization=organization) + + team_member_role = RoleDefinition.objects.get(name='Team Member') + team_content_type = ContentType.objects.get_for_model(Team) + give_permissions(apps=apps, rd=team_member_role, users=[user], object_id=team.id, content_type_id=team_content_type.id) + + # Compare count of assignments before and after consolidation + assignments_before = RoleUserAssignment.objects.filter(role_definition=team_member_role).count() + consolidate_indirect_user_roles(apps, None) + assignments_after = RoleUserAssignment.objects.filter(role_definition=team_member_role).count() + + assert assignments_before == assignments_after, "Number of assignments should not change when there are no team-to-team relationships" + + +@pytest.mark.django_db +@override_settings(ANSIBLE_BASE_ALLOW_TEAM_PARENTS=True) +def test_consolidate_indirect_user_roles_circular_reference(setup_managed_roles, organization): + """ + Test that the function handles circular team references without infinite recursion. + """ + team_a = Team.objects.create(name='Team A', organization=organization) + team_b = Team.objects.create(name='Team B', organization=organization) + + # Create a user assigned to team A + user = User.objects.create_user(username='test_user') + + team_member_role = RoleDefinition.objects.get(name='Team Member') + team_content_type = ContentType.objects.get_for_model(Team) + give_permissions(apps=apps, rd=team_member_role, users=[user], object_id=team_a.id, content_type_id=team_content_type.id) + + # Create circular team relationships: A → B → A + give_permissions(apps=apps, rd=team_member_role, teams=[team_b], object_id=team_a.id, content_type_id=team_content_type.id) + give_permissions(apps=apps, rd=team_member_role, teams=[team_a], object_id=team_b.id, content_type_id=team_content_type.id) + + # Run the consolidation function - should not raise an exception + consolidate_indirect_user_roles(apps, None) + + # Both teams should have the user assigned + team_a_users = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_a.id).values_list('user_id', flat=True)) + team_b_users = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_b.id).values_list('user_id', flat=True)) + + assert user.id in team_a_users, "User should be assigned to team A" + assert user.id in team_b_users, "User should be assigned to team B" diff --git a/awx/main/tests/functional/dab_rbac/test_dab_rbac_api.py b/awx/main/tests/functional/dab_rbac/test_dab_rbac_api.py index eea7f4c6e7..3b09272d8c 100644 --- a/awx/main/tests/functional/dab_rbac/test_dab_rbac_api.py +++ b/awx/main/tests/functional/dab_rbac/test_dab_rbac_api.py @@ -151,14 +151,6 @@ def test_assign_credential_to_user_of_another_org(setup_managed_roles, credentia post(url=url, data={"user": org_admin.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=201) -@pytest.mark.django_db -def test_team_member_role_not_assignable(team, rando, post, admin_user, setup_managed_roles): - member_rd = RoleDefinition.objects.get(name='Organization Member') - url = django_reverse('roleuserassignment-list') - r = post(url, data={'object_id': team.id, 'role_definition': member_rd.id, 'user': rando.id}, user=admin_user, expect=400) - assert 'Not managed locally' in str(r.data) - - @pytest.mark.django_db def test_adding_user_to_org_member_role(setup_managed_roles, organization, admin, bob, post, get): ''' @@ -178,10 +170,17 @@ def test_adding_user_to_org_member_role(setup_managed_roles, organization, admin @pytest.mark.django_db @pytest.mark.parametrize('actor', ['user', 'team']) @pytest.mark.parametrize('role_name', ['Organization Admin', 'Organization Member', 'Team Admin', 'Team Member']) -def test_prevent_adding_actor_to_platform_roles(setup_managed_roles, role_name, actor, organization, team, admin, bob, post): +def test_adding_actor_to_platform_roles(setup_managed_roles, role_name, actor, organization, team, admin, bob, post): ''' - Prevent user or team from being added to platform-level roles + Allow user to be added to platform-level roles + Exceptions: + - Team cannot be added to Organization Member or Admin role + - Team cannot be added to Team Admin or Team Member role ''' + if actor == 'team': + expect = 400 + else: + expect = 201 rd = RoleDefinition.objects.get(name=role_name) endpoint = 'roleuserassignment-list' if actor == 'user' else 'roleteamassignment-list' url = django_reverse(endpoint) @@ -189,37 +188,9 @@ def test_prevent_adding_actor_to_platform_roles(setup_managed_roles, role_name, data = {'object_id': object_id, 'role_definition': rd.id} actor_id = bob.id if actor == 'user' else team.id data[actor] = actor_id - r = post(url, data=data, user=admin, expect=400) - assert 'Not managed locally' in str(r.data) - - -@pytest.mark.django_db -@pytest.mark.parametrize('role_name', ['Controller Team Admin', 'Controller Team Member']) -def test_adding_user_to_controller_team_roles(setup_managed_roles, role_name, team, admin, bob, post, get): - ''' - Allow user to be added to Controller Team Admin or Controller Team Member - ''' - url_detail = reverse('api:team_detail', kwargs={'pk': team.id}) - get(url_detail, user=bob, expect=403) - - rd = RoleDefinition.objects.get(name=role_name) - url = django_reverse('roleuserassignment-list') - post(url, data={'object_id': team.id, 'role_definition': rd.id, 'user': bob.id}, user=admin, expect=201) - - get(url_detail, user=bob, expect=200) - - -@pytest.mark.django_db -@pytest.mark.parametrize('role_name', ['Controller Organization Admin', 'Controller Organization Member']) -def test_adding_user_to_controller_organization_roles(setup_managed_roles, role_name, organization, admin, bob, post, get): - ''' - Allow user to be added to Controller Organization Admin or Controller Organization Member - ''' - url_detail = reverse('api:organization_detail', kwargs={'pk': organization.id}) - get(url_detail, user=bob, expect=403) - - rd = RoleDefinition.objects.get(name=role_name) - url = django_reverse('roleuserassignment-list') - post(url, data={'object_id': organization.id, 'role_definition': rd.id, 'user': bob.id}, user=admin, expect=201) - - get(url, user=bob, expect=200) + r = post(url, data=data, user=admin, expect=expect) + if expect == 400: + if 'Organization' in role_name: + assert 'Assigning organization member permission to teams is not allowed' in str(r.data) + if 'Team' in role_name: + assert 'Assigning team permissions to other teams is not allowed' in str(r.data) diff --git a/awx/main/tests/functional/dab_rbac/test_managed_roles.py b/awx/main/tests/functional/dab_rbac/test_managed_roles.py index 594428fdef..82fd661fa5 100644 --- a/awx/main/tests/functional/dab_rbac/test_managed_roles.py +++ b/awx/main/tests/functional/dab_rbac/test_managed_roles.py @@ -15,6 +15,14 @@ def test_roles_to_not_create(setup_managed_roles): raise Exception(f'Found RoleDefinitions that should not exist: {bad_names}') +@pytest.mark.django_db +def test_org_admin_role(setup_managed_roles): + rd = RoleDefinition.objects.get(name='Organization Admin') + codenames = list(rd.permissions.values_list('codename', flat=True)) + assert 'view_inventory' in codenames + assert 'change_inventory' in codenames + + @pytest.mark.django_db def test_project_update_role(setup_managed_roles): """Role to allow updating a project on the object-level should exist""" @@ -31,32 +39,18 @@ def test_org_child_add_permission(setup_managed_roles): assert not DABPermission.objects.filter(codename='add_jobtemplate').exists() -@pytest.mark.django_db -def test_controller_specific_roles_have_correct_permissions(setup_managed_roles): - ''' - Controller specific roles should have the same permissions as the platform roles - e.g. Controller Team Admin should have same permission set as Team Admin - ''' - for rd_name in ['Controller Team Admin', 'Controller Team Member', 'Controller Organization Member', 'Controller Organization Admin']: - rd = RoleDefinition.objects.get(name=rd_name) - rd_platform = RoleDefinition.objects.get(name=rd_name.split('Controller ')[1]) - assert set(rd.permissions.all()) == set(rd_platform.permissions.all()) - - @pytest.mark.django_db @pytest.mark.parametrize('resource_name', ['Team', 'Organization']) @pytest.mark.parametrize('action', ['Member', 'Admin']) -def test_legacy_RBAC_uses_controller_specific_roles(setup_managed_roles, resource_name, action, team, bob, organization): +def test_legacy_RBAC_uses_platform_roles(setup_managed_roles, resource_name, action, team, bob, organization): ''' - Assignment to legacy RBAC roles should use controller specific role definitions - e.g. Controller Team Admin, Controller Team Member, Controller Organization Member, Controller Organization Admin + Assignment to legacy RBAC roles should use platform role definitions + e.g. Team Admin, Team Member, Organization Member, Organization Admin ''' resource = team if resource_name == 'Team' else organization if action == 'Member': resource.member_role.members.add(bob) else: resource.admin_role.members.add(bob) - rd = RoleDefinition.objects.get(name=f'Controller {resource_name} {action}') - rd_platform = RoleDefinition.objects.get(name=f'{resource_name} {action}') + rd = RoleDefinition.objects.get(name=f'{resource_name} {action}') assert RoleUserAssignment.objects.filter(role_definition=rd, user=bob, object_id=resource.id).exists() - assert not RoleUserAssignment.objects.filter(role_definition=rd_platform, user=bob, object_id=resource.id).exists() diff --git a/awx/main/tests/functional/dab_rbac/test_translation_layer.py b/awx/main/tests/functional/dab_rbac/test_translation_layer.py index 2c92763b1e..dbbc4926e9 100644 --- a/awx/main/tests/functional/dab_rbac/test_translation_layer.py +++ b/awx/main/tests/functional/dab_rbac/test_translation_layer.py @@ -173,20 +173,6 @@ def test_creator_permission(rando, admin_user, inventory, setup_managed_roles): assert rando in inventory.admin_role.members.all() -@pytest.mark.django_db -def test_team_team_read_role(rando, team, admin_user, post, setup_managed_roles): - orgs = [Organization.objects.create(name=f'foo-{i}') for i in range(2)] - teams = [Team.objects.create(name=f'foo-{i}', organization=orgs[i]) for i in range(2)] - teams[1].member_role.members.add(rando) - - # give second team read permission to first team through the API for regression testing - url = reverse('api:role_teams_list', kwargs={'pk': teams[0].read_role.pk, 'version': 'v2'}) - post(url, {'id': teams[1].id}, user=admin_user) - - # user should be able to view the first team - assert rando in teams[0].read_role - - @pytest.mark.django_db def test_implicit_parents_no_assignments(organization): """Through the normal course of creating models, we should not be changing DAB RBAC permissions""" @@ -206,19 +192,19 @@ def test_user_auditor_rel(organization, rando, setup_managed_roles): @pytest.mark.django_db @pytest.mark.parametrize('resource_name', ['Organization', 'Team']) @pytest.mark.parametrize('role_name', ['Member', 'Admin']) -def test_mapping_from_controller_role_definitions_to_roles(organization, team, rando, role_name, resource_name, setup_managed_roles): +def test_mapping_from_role_definitions_to_roles(organization, team, rando, role_name, resource_name, setup_managed_roles): """ - ensure mappings for controller roles are correct + ensure mappings for platform roles are correct e.g. - Controller Organization Member > organization.member_role - Controller Organization Admin > organization.admin_role - Controller Team Member > team.member_role - Controller Team Admin > team.admin_role + Organization Member > organization.member_role + Organization Admin > organization.admin_role + Team Member > team.member_role + Team Admin > team.admin_role """ resource = organization if resource_name == 'Organization' else team old_role_name = f"{role_name.lower()}_role" getattr(resource, old_role_name).members.add(rando) assignment = RoleUserAssignment.objects.get(user=rando) - assert assignment.role_definition.name == f'Controller {resource_name} {role_name}' + assert assignment.role_definition.name == f'{resource_name} {role_name}' old_role = get_role_from_object_role(assignment.object_role) assert old_role.id == getattr(resource, old_role_name).id diff --git a/awx/main/tests/functional/dab_rbac/test_translation_layer_new_to_old.py b/awx/main/tests/functional/dab_rbac/test_translation_layer_new_to_old.py index 946c76179f..92efef8387 100644 --- a/awx/main/tests/functional/dab_rbac/test_translation_layer_new_to_old.py +++ b/awx/main/tests/functional/dab_rbac/test_translation_layer_new_to_old.py @@ -35,21 +35,21 @@ class TestNewToOld: def test_new_to_old_rbac_team_member_addition(self, admin, post, team, bob, setup_managed_roles): ''' - Assign user to Controller Team Member role definition, should be added to team.member_role.members + Assign user to Team Member role definition, should be added to team.member_role.members ''' - rd = RoleDefinition.objects.get(name='Controller Team Member') + rd = RoleDefinition.objects.get(name='Team Member') url = get_relative_url('roleuserassignment-list') post(url, user=admin, data={'role_definition': rd.id, 'user': bob.id, 'object_id': team.id}, expect=201) assert bob in team.member_role.members.all() - def test_new_to_old_rbac_team_member_removal(self, admin, delete, team, bob): + def test_new_to_old_rbac_team_member_removal(self, admin, delete, team, bob, setup_managed_roles): ''' - Remove user from Controller Team Member role definition, should be deleted from team.member_role.members + Remove user from Team Member role definition, should be deleted from team.member_role.members ''' team.member_role.members.add(bob) - rd = RoleDefinition.objects.get(name='Controller Team Member') + rd = RoleDefinition.objects.get(name='Team Member') user_assignment = RoleUserAssignment.objects.get(user=bob, role_definition=rd, object_id=team.id) url = get_relative_url('roleuserassignment-detail', kwargs={'pk': user_assignment.id}) diff --git a/awx/main/tests/functional/github_app_test.py b/awx/main/tests/functional/github_app_test.py new file mode 100644 index 0000000000..2341a03654 --- /dev/null +++ b/awx/main/tests/functional/github_app_test.py @@ -0,0 +1,344 @@ +"""Tests for GitHub App Installation access token extraction plugin.""" + +from typing import TypedDict + +import pytest +from pytest_mock import MockerFixture + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.asymmetric.rsa import ( + RSAPrivateKey, + RSAPublicKey, + generate_private_key, +) +from cryptography.hazmat.primitives.serialization import ( + Encoding, + NoEncryption, + PrivateFormat, + PublicFormat, +) +from github.Auth import AppInstallationAuth +from github.Consts import DEFAULT_JWT_ALGORITHM +from github.GithubException import ( + BadAttributeException, + GithubException, + UnknownObjectException, +) +from jwt import decode as decode_jwt + +from awx.main.credential_plugins import github_app + + +github_app_jwt_client_id_unsupported = pytest.mark.xfail( + raises=(AssertionError, ValueError), + reason='Client ID in JWT is not currently supported by ' 'PyGitHub and is disabled.\n\n' 'Ref: https://github.com/PyGithub/PyGithub/issues/3213', +) + + +RSA_PUBLIC_EXPONENT = 65_537 # noqa: WPS303 +MINIMUM_RSA_KEY_SIZE = 1024 # the lowest value chosen for performance in tests + + +@pytest.fixture(scope='module') +def rsa_private_key() -> RSAPrivateKey: + """Generate an RSA private key.""" + return generate_private_key( + public_exponent=RSA_PUBLIC_EXPONENT, + key_size=MINIMUM_RSA_KEY_SIZE, # would be 4096 or higher in production + backend=default_backend(), + ) + + +@pytest.fixture(scope='module') +def rsa_public_key(rsa_private_key: RSAPrivateKey) -> RSAPublicKey: + """Extract a public key out of the private one.""" + return rsa_private_key.public_key() + + +@pytest.fixture(scope='module') +def rsa_private_key_bytes(rsa_private_key: RSAPrivateKey) -> bytes: + r"""Generate an unencrypted PKCS#1 formatted RSA private key. + + Encoded as PEM-bytes. + + This is what the GitHub-downloaded PEM files contain. + + Ref: https://developer.github.com/apps/building-github-apps/\ + authenticating-with-github-apps/ + """ + return rsa_private_key.private_bytes( + encoding=Encoding.PEM, + format=PrivateFormat.TraditionalOpenSSL, # A.K.A. PKCS#1 + encryption_algorithm=NoEncryption(), + ) + + +@pytest.fixture(scope='module') +def rsa_private_key_str(rsa_private_key_bytes: bytes) -> str: + """Return private key as an instance of string.""" + return rsa_private_key_bytes.decode('utf-8') + + +@pytest.fixture(scope='module') +def rsa_public_key_bytes(rsa_public_key: RSAPublicKey) -> bytes: + """Return a PKCS#1 formatted RSA public key encoded as PEM.""" + return rsa_public_key.public_bytes( + encoding=Encoding.PEM, + format=PublicFormat.PKCS1, + ) + + +class AppInstallIds(TypedDict): + """Schema for augmented extractor function keyword args.""" + + app_or_client_id: str + install_id: str + + +@pytest.mark.parametrize( + ('extract_github_app_install_token_args', 'expected_error_msg'), + ( + pytest.param( + { + 'app_or_client_id': 'invalid', + 'install_id': '666', + }, + '^Expected GitHub App or Client ID to be an integer or a string ' r'starting with `Iv1\.` followed by 16 hexadecimal digits, but got' " 'invalid'$", + id='gh-app-id-broken-text', + ), + pytest.param( + { + 'app_or_client_id': 'Iv1.bbbbbbbbbbbbbbb', + 'install_id': '666', + }, + '^Expected GitHub App or Client ID to be an integer or a string ' + r'starting with `Iv1\.` followed by 16 hexadecimal digits, but got' + " 'Iv1.bbbbbbbbbbbbbbb'$", + id='gh-app-id-client-id-not-enough-chars', + ), + pytest.param( + { + 'app_or_client_id': 'Iv1.bbbbbbbbbbbbbbbx', + 'install_id': '666', + }, + '^Expected GitHub App or Client ID to be an integer or a string ' + r'starting with `Iv1\.` followed by 16 hexadecimal digits, but got' + " 'Iv1.bbbbbbbbbbbbbbbx'$", + id='gh-app-id-client-id-broken-hex', + ), + pytest.param( + { + 'app_or_client_id': 'Iv1.bbbbbbbbbbbbbbbbb', + 'install_id': '666', + }, + '^Expected GitHub App or Client ID to be an integer or a string ' + r'starting with `Iv1\.` followed by 16 hexadecimal digits, but got' + " 'Iv1.bbbbbbbbbbbbbbbbb'$", + id='gh-app-id-client-id-too-many-chars', + ), + pytest.param( + { + 'app_or_client_id': 999, + 'install_id': 'invalid', + }, + '^Expected GitHub App Installation ID to be an integer ' "but got 'invalid'$", + id='gh-app-invalid-install-id-with-int-app-id', + ), + pytest.param( + { + 'app_or_client_id': '999', + 'install_id': 'invalid', + }, + '^Expected GitHub App Installation ID to be an integer ' "but got 'invalid'$", + id='gh-app-invalid-install-id-with-str-digit-app-id', + ), + pytest.param( + { + 'app_or_client_id': 'Iv1.cccccccccccccccc', + 'install_id': 'invalid', + }, + '^Expected GitHub App Installation ID to be an integer ' "but got 'invalid'$", + id='gh-app-invalid-install-id-with-client-id', + marks=github_app_jwt_client_id_unsupported, + ), + ), +) +def test_github_app_invalid_args( + extract_github_app_install_token_args: AppInstallIds, + expected_error_msg: str, +) -> None: + """Test that invalid arguments make token extractor bail early.""" + with pytest.raises(ValueError, match=expected_error_msg): + github_app.extract_github_app_install_token( + github_api_url='https://github.com', + private_rsa_key='key', + **extract_github_app_install_token_args, + ) + + +@pytest.mark.parametrize( + ( + 'github_exception', + 'transformed_exception', + 'error_msg', + ), + ( + ( + BadAttributeException( + '', + {}, + Exception(), + ), + RuntimeError, + ( + r'^Broken GitHub @ https://github\.com with ' + r'app_or_client_id: 123, install_id: 456\. It is a bug, ' + 'please report it to the ' + r"developers\.\n\n\('', \{\}, Exception\(\)\)$" + ), + ), + ( + GithubException(-1), + RuntimeError, + ( + '^An unexpected error happened while talking to GitHub API ' + r'@ https://github\.com ' + r'\(app_or_client_id: 123, install_id: 456\)\. ' + r'Is the app or client ID correct\? ' + r'And the private RSA key\? ' + r'See https://docs\.github\.com/rest/reference/apps' + r'#create-an-installation-access-token-for-an-app\.' + r'\n\n-1$' + ), + ), + ( + UnknownObjectException(-1), + ValueError, + ( + '^Failed to retrieve a GitHub installation token from ' + r'https://github\.com using ' + r'app_or_client_id: 123, install_id: 456\. ' + r'Is the app installed\? See ' + r'https://docs\.github\.com/rest/reference/apps' + r'#create-an-installation-access-token-for-an-app\.' + r'\n\n-1$' + ), + ), + ), + ids=( + 'github-broken', + 'unexpected-error', + 'no-install', + ), +) +def test_github_app_api_errors( + mocker: MockerFixture, + github_exception: Exception, + transformed_exception: type[Exception], + error_msg: str, +) -> None: + """Test successful GitHub authentication.""" + application_id = 123 + installation_id = 456 + + mocker.patch.object( + github_app.Auth.AppInstallationAuth, + 'token', + new_callable=mocker.PropertyMock, + side_effect=github_exception, + ) + + with pytest.raises(transformed_exception, match=error_msg): + github_app.extract_github_app_install_token( + github_api_url='https://github.com', + app_or_client_id=application_id, + install_id=installation_id, + private_rsa_key='key', + ) + + +class _FakeAppInstallationAuth(AppInstallationAuth): + @property + def token(self: '_FakeAppInstallationAuth') -> str: + return 'token-sentinel' + + +@pytest.mark.parametrize( + 'application_id', + ( + 123, + '123', + pytest.param( + 'Iv1.aaaaaaaaaaaaaaaa', + marks=github_app_jwt_client_id_unsupported, + ), + ), + ids=('app-id-int', 'app-id-str', 'client-id'), +) +@pytest.mark.parametrize( + 'installation_id', + (456, '456'), + ids=('install-id-int', 'install-id-str'), +) +# pylint: disable-next=too-many-arguments,too-many-positional-arguments +def test_github_app_github_authentication( # noqa: WPS211 + application_id: int | str, + installation_id: int | str, + mocker: MockerFixture, + monkeypatch: pytest.MonkeyPatch, + rsa_private_key_str: str, + rsa_public_key_bytes: bytes, +) -> None: + """Test successful GitHub authentication.""" + monkeypatch.setattr( + github_app.Auth, + 'AppInstallationAuth', + _FakeAppInstallationAuth, + ) + + get_installation_auth_spy = mocker.spy( + github_app.Auth, + 'AppInstallationAuth', + ) + github_initializer_spy = mocker.spy(github_app, 'Github') + + token = github_app.extract_github_app_install_token( + github_api_url='https://github.com', + app_or_client_id=application_id, + install_id=installation_id, + private_rsa_key=rsa_private_key_str, + ) + + observed_pygithub_obj = github_initializer_spy.spy_return + observed_gh_install_auth_obj = get_installation_auth_spy.spy_return + # pylint: disable-next=protected-access + signed_jwt = observed_gh_install_auth_obj._app_auth.token # noqa: WPS437 + + assert token == 'token-sentinel' + + assert observed_pygithub_obj.requester.base_url == 'https://github.com' + + assert observed_gh_install_auth_obj.installation_id == int(installation_id) + assert isinstance(observed_gh_install_auth_obj, _FakeAppInstallationAuth) + + # NOTE: The `decode_jwt()` call asserts that no + # NOTE: `jwt.exceptions.InvalidSignatureError()` exception gets raised + # NOTE: which would indicate incorrect RSA key or corrupted payload if + # NOTE: that was to happen. This verifies that JWT is signed with the + # NOTE: private RSA key we passed by using its public counterpart. + decode_jwt( + signed_jwt, + key=rsa_public_key_bytes, + algorithms=[DEFAULT_JWT_ALGORITHM], + options={ + 'require': ['exp', 'iat', 'iss'], + 'strict_aud': False, + 'verify_aud': True, + 'verify_exp': True, + 'verify_signature': True, + 'verify_nbf': True, + }, + audience=None, # GH App JWT don't set the audience claim + issuer=str(application_id), + leeway=0.001, # noqa: WPS432 + ) diff --git a/awx/main/tests/functional/test_credential_plugins.py b/awx/main/tests/functional/test_credential_plugins.py new file mode 100644 index 0000000000..ecfb77c3b8 --- /dev/null +++ b/awx/main/tests/functional/test_credential_plugins.py @@ -0,0 +1,217 @@ +import pytest +from unittest import mock +from awx.main.credential_plugins import hashivault, azure_kv + +from azure.keyvault.secrets import ( + KeyVaultSecret, + SecretClient, + SecretProperties, +) + + +def test_imported_azure_cloud_sdk_vars(): + from awx.main.credential_plugins import azure_kv + + assert len(azure_kv.clouds) > 0 + assert all([hasattr(c, 'name') for c in azure_kv.clouds]) + assert all([hasattr(c, 'suffixes') for c in azure_kv.clouds]) + assert all([hasattr(c.suffixes, 'keyvault_dns') for c in azure_kv.clouds]) + + +def test_hashivault_approle_auth(): + kwargs = { + 'role_id': 'the_role_id', + 'secret_id': 'the_secret_id', + } + expected_res = { + 'role_id': 'the_role_id', + 'secret_id': 'the_secret_id', + } + res = hashivault.approle_auth(**kwargs) + assert res == expected_res + + +def test_hashivault_kubernetes_auth(): + kwargs = { + 'kubernetes_role': 'the_kubernetes_role', + } + expected_res = { + 'role': 'the_kubernetes_role', + 'jwt': 'the_jwt', + } + with mock.patch('pathlib.Path') as path_mock: + mock.mock_open(path_mock.return_value.open, read_data='the_jwt') + res = hashivault.kubernetes_auth(**kwargs) + path_mock.assert_called_with('/var/run/secrets/kubernetes.io/serviceaccount/token') + assert res == expected_res + + +def test_hashivault_client_cert_auth_explicit_role(): + kwargs = { + 'client_cert_role': 'test-cert-1', + } + expected_res = { + 'name': 'test-cert-1', + } + res = hashivault.client_cert_auth(**kwargs) + assert res == expected_res + + +def test_hashivault_client_cert_auth_no_role(): + kwargs = {} + expected_res = { + 'name': None, + } + res = hashivault.client_cert_auth(**kwargs) + assert res == expected_res + + +def test_hashivault_userpass_auth(): + kwargs = {'username': 'the_username', 'password': 'the_password'} + expected_res = {'username': 'the_username', 'password': 'the_password'} + res = hashivault.userpass_auth(**kwargs) + assert res == expected_res + + +def test_hashivault_handle_auth_token(): + kwargs = { + 'token': 'the_token', + } + token = hashivault.handle_auth(**kwargs) + assert token == kwargs['token'] + + +def test_hashivault_handle_auth_approle(): + kwargs = { + 'role_id': 'the_role_id', + 'secret_id': 'the_secret_id', + } + with mock.patch.object(hashivault, 'method_auth') as method_mock: + method_mock.return_value = 'the_token' + token = hashivault.handle_auth(**kwargs) + method_mock.assert_called_with(**kwargs, auth_param=kwargs) + assert token == 'the_token' + + +def test_hashivault_handle_auth_kubernetes(): + kwargs = { + 'kubernetes_role': 'the_kubernetes_role', + } + with mock.patch.object(hashivault, 'method_auth') as method_mock: + with mock.patch('pathlib.Path') as path_mock: + mock.mock_open(path_mock.return_value.open, read_data='the_jwt') + method_mock.return_value = 'the_token' + token = hashivault.handle_auth(**kwargs) + method_mock.assert_called_with(**kwargs, auth_param={'role': 'the_kubernetes_role', 'jwt': 'the_jwt'}) + assert token == 'the_token' + + +def test_hashivault_handle_auth_client_cert(): + kwargs = { + 'client_cert_public': "foo", + 'client_cert_private': "bar", + 'client_cert_role': 'test-cert-1', + } + auth_params = { + 'name': 'test-cert-1', + } + with mock.patch.object(hashivault, 'method_auth') as method_mock: + method_mock.return_value = 'the_token' + token = hashivault.handle_auth(**kwargs) + method_mock.assert_called_with(**kwargs, auth_param=auth_params) + assert token == 'the_token' + + +def test_hashivault_handle_auth_not_enough_args(): + with pytest.raises(Exception): + hashivault.handle_auth() + + +class TestDelineaImports: + """ + These module have a try-except for ImportError which will allow using the older library + but we do not want the awx_devel image to have the older library, + so these tests are designed to fail if these wind up using the fallback import + """ + + def test_dsv_import(self): + from awx.main.credential_plugins.dsv import SecretsVault # noqa + + # assert this module as opposed to older thycotic.secrets.vault + assert SecretsVault.__module__ == 'delinea.secrets.vault' + + def test_tss_import(self): + from awx.main.credential_plugins.tss import DomainPasswordGrantAuthorizer, PasswordGrantAuthorizer, SecretServer, ServerSecret # noqa + + for cls in (DomainPasswordGrantAuthorizer, PasswordGrantAuthorizer, SecretServer, ServerSecret): + # assert this module as opposed to older thycotic.secrets.server + assert cls.__module__ == 'delinea.secrets.server' + + +class _FakeSecretClient(SecretClient): + def get_secret( + self: '_FakeSecretClient', + name: str, + version: str | None = None, + **kwargs: str, + ) -> KeyVaultSecret: + props = SecretProperties(None, None) + return KeyVaultSecret(properties=props, value='test-secret') + + +def test_azure_kv_invalid_env() -> None: + """Test running outside of Azure raises error.""" + error_msg = ( + 'You are not operating on an Azure VM, so the Managed Identity ' + 'feature is unavailable. Please provide the full Client ID, ' + 'Client Secret, and Tenant ID or run the software on an Azure VM.' + ) + + with pytest.raises( + RuntimeError, + match=error_msg, + ): + azure_kv.azure_keyvault_backend( + url='https://test.vault.azure.net', + client='', + secret='client-secret', + tenant='tenant-id', + secret_field='secret', + secret_version='', + ) + + +@pytest.mark.parametrize( + ('client', 'secret', 'tenant'), + ( + pytest.param('', '', '', id='managed-identity'), + pytest.param( + 'client-id', + 'client-secret', + 'tenant-id', + id='client-secret-credential', + ), + ), +) +def test_azure_kv_valid_auth( + monkeypatch: pytest.MonkeyPatch, + client: str, + secret: str, + tenant: str, +) -> None: + """Test successful Azure authentication via Managed Identity and credentials.""" + monkeypatch.setattr( + azure_kv, + 'SecretClient', + _FakeSecretClient, + ) + + keyvault_secret = azure_kv.azure_keyvault_backend( + url='https://test.vault.azure.net', + client=client, + secret=secret, + tenant=tenant, + secret_field='secret', + secret_version='', + ) + assert keyvault_secret == 'test-secret' diff --git a/awx/main/tests/functional/test_fixture_factories.py b/awx/main/tests/functional/test_fixture_factories.py index 5792197177..30fa1247ae 100644 --- a/awx/main/tests/functional/test_fixture_factories.py +++ b/awx/main/tests/functional/test_fixture_factories.py @@ -50,13 +50,11 @@ def test_org_factory_roles(organization_factory): teams=['team1', 'team2'], users=['team1:foo', 'bar'], projects=['baz', 'bang'], - roles=['team2.member_role:foo', 'team1.admin_role:bar', 'team1.member_role:team2.admin_role', 'baz.admin_role:foo'], + roles=['team2.member_role:foo', 'team1.admin_role:bar', 'baz.admin_role:foo'], ) - - assert objects.users.bar in objects.teams.team2.admin_role + assert objects.users.bar in objects.teams.team1.admin_role assert objects.users.foo in objects.projects.baz.admin_role assert objects.users.foo in objects.teams.team1.member_role - assert objects.teams.team2.admin_role in objects.teams.team1.member_role.children.all() @pytest.mark.django_db diff --git a/awx/main/tests/functional/test_ha.py b/awx/main/tests/functional/test_ha.py new file mode 100644 index 0000000000..bf8c5309c7 --- /dev/null +++ b/awx/main/tests/functional/test_ha.py @@ -0,0 +1,45 @@ +import pytest + +# AWX +from awx.main.ha import is_ha_environment +from awx.main.models.ha import Instance +from awx.main.dispatch.pool import get_auto_max_workers + +# Django +from django.test.utils import override_settings + + +@pytest.mark.django_db +def test_multiple_instances(): + for i in range(2): + Instance.objects.create(hostname=f'foo{i}', node_type='hybrid') + assert is_ha_environment() + + +@pytest.mark.django_db +def test_db_localhost(): + Instance.objects.create(hostname='foo', node_type='hybrid') + Instance.objects.create(hostname='bar', node_type='execution') + assert is_ha_environment() is False + + +@pytest.mark.django_db +@pytest.mark.parametrize( + 'settings', + [ + dict(SYSTEM_TASK_ABS_MEM='16Gi', SYSTEM_TASK_ABS_CPU='24', SYSTEM_TASK_FORKS_MEM=400, SYSTEM_TASK_FORKS_CPU=4), + dict(SYSTEM_TASK_ABS_MEM='124Gi', SYSTEM_TASK_ABS_CPU='2', SYSTEM_TASK_FORKS_MEM=None, SYSTEM_TASK_FORKS_CPU=None), + ], + ids=['cpu_dominated', 'memory_dominated'], +) +def test_dispatcher_max_workers_reserve(settings, fake_redis): + """This tests that the dispatcher max_workers matches instance capacity + + Assumes capacity_adjustment is 1, + plus reserve worker count + """ + with override_settings(**settings): + i = Instance.objects.create(hostname='test-1', node_type='hybrid') + i.local_health_check() + + assert get_auto_max_workers() == i.capacity + 7, (i.cpu, i.memory, i.cpu_capacity, i.mem_capacity) diff --git a/awx/main/tests/functional/test_inventory_source_injectors.py b/awx/main/tests/functional/test_inventory_source_injectors.py index 71ce9af4c2..8f740db72f 100644 --- a/awx/main/tests/functional/test_inventory_source_injectors.py +++ b/awx/main/tests/functional/test_inventory_source_injectors.py @@ -49,7 +49,6 @@ def credential_kind(source): """Given the inventory source kind, return expected credential kind""" if source == 'openshift_virtualization': return 'kubernetes_bearer_token' - return source.replace('ec2', 'aws') diff --git a/awx/main/tests/functional/test_jt_rename_migration.py b/awx/main/tests/functional/test_jt_rename_migration.py new file mode 100644 index 0000000000..4d624c41be --- /dev/null +++ b/awx/main/tests/functional/test_jt_rename_migration.py @@ -0,0 +1,56 @@ +import pytest + +from awx.main.migrations._db_constraints import _rename_duplicates +from awx.main.models import JobTemplate + + +@pytest.mark.django_db +def test_rename_job_template_duplicates(organization, project): + ids = [] + for i in range(5): + jt = JobTemplate.objects.create(name=f'jt-{i}', organization=organization, project=project) + ids.append(jt.id) # saved in order of creation + + # Hack to first allow duplicate names of JT to test migration + JobTemplate.objects.filter(id__in=ids).update(org_unique=False) + + # Set all JTs to the same name + JobTemplate.objects.filter(id__in=ids).update(name='same_name_for_test') + + _rename_duplicates(JobTemplate) + + first_jt = JobTemplate.objects.get(id=ids[0]) + assert first_jt.name == 'same_name_for_test' + + for i, pk in enumerate(ids): + if i == 0: + continue + jt = JobTemplate.objects.get(id=pk) + # Name should be set based on creation order + assert jt.name == f'same_name_for_test_dup{i}' + + +@pytest.mark.django_db +def test_rename_job_template_name_too_long(organization, project): + ids = [] + for i in range(3): + jt = JobTemplate.objects.create(name=f'jt-{i}', organization=organization, project=project) + ids.append(jt.id) # saved in order of creation + + JobTemplate.objects.filter(id__in=ids).update(org_unique=False) + + chars = 512 + # Set all JTs to the same reaaaaaaly long name + JobTemplate.objects.filter(id__in=ids).update(name='A' * chars) + + _rename_duplicates(JobTemplate) + + first_jt = JobTemplate.objects.get(id=ids[0]) + assert first_jt.name == 'A' * chars + + for i, pk in enumerate(ids): + if i == 0: + continue + jt = JobTemplate.objects.get(id=pk) + assert jt.name.endswith(f'dup{i}') + assert len(jt.name) <= 512 diff --git a/awx/main/tests/functional/test_migrations.py b/awx/main/tests/functional/test_migrations.py index f406dec796..a3c188efd1 100644 --- a/awx/main/tests/functional/test_migrations.py +++ b/awx/main/tests/functional/test_migrations.py @@ -70,15 +70,18 @@ class TestMigrationSmoke: user = User.objects.create(username='random-user') org.read_role.members.add(user) org.member_role.members.add(user) + team = Team.objects.create(name='arbitrary-team', organization=org, created=now(), modified=now()) team.member_role.members.add(user) + new_state = migrator.apply_tested_migration( ('main', '0192_custom_roles'), ) RoleUserAssignment = new_state.apps.get_model('dab_rbac', 'RoleUserAssignment') assert RoleUserAssignment.objects.filter(user=user.id, object_id=org.id).exists() - assert RoleUserAssignment.objects.filter(user=user.id, role_definition__name='Controller Organization Member', object_id=org.id).exists() - assert RoleUserAssignment.objects.filter(user=user.id, role_definition__name='Controller Team Member', object_id=team.id).exists() + assert RoleUserAssignment.objects.filter(user=user.id, role_definition__name='Organization Member', object_id=org.id).exists() + assert RoleUserAssignment.objects.filter(user=user.id, role_definition__name='Team Member', object_id=team.id).exists() + # Regression testing for bug that comes from current vs past models mismatch RoleDefinition = new_state.apps.get_model('dab_rbac', 'RoleDefinition') assert not RoleDefinition.objects.filter(name='Organization Organization Admin').exists() @@ -91,22 +94,39 @@ class TestMigrationSmoke: ) DABPermission = new_state.apps.get_model('dab_rbac', 'DABPermission') assert not DABPermission.objects.filter(codename='view_executionenvironment').exists() + # Test create a Project with a duplicate name Organization = new_state.apps.get_model('main', 'Organization') Project = new_state.apps.get_model('main', 'Project') + WorkflowJobTemplate = new_state.apps.get_model('main', 'WorkflowJobTemplate') org = Organization.objects.create(name='duplicate-obj-organization', created=now(), modified=now()) proj_ids = [] for i in range(3): proj = Project.objects.create(name='duplicate-project-name', organization=org, created=now(), modified=now()) proj_ids.append(proj.id) + + # Test create WorkflowJobTemplate with duplicate names + wfjt_ids = [] + for i in range(3): + wfjt = WorkflowJobTemplate.objects.create(name='duplicate-workflow-name', organization=org, created=now(), modified=now()) + wfjt_ids.append(wfjt.id) + # The uniqueness rules will not apply to InventorySource Inventory = new_state.apps.get_model('main', 'Inventory') InventorySource = new_state.apps.get_model('main', 'InventorySource') inv = Inventory.objects.create(name='migration-test-inv', organization=org, created=now(), modified=now()) InventorySource.objects.create(name='migration-test-src', source='file', inventory=inv, organization=org, created=now(), modified=now()) + + # Apply migration 0200 which should rename duplicates new_state = migrator.apply_tested_migration( ('main', '0200_template_name_constraint'), ) + + # Get the models from the new state for verification + Project = new_state.apps.get_model('main', 'Project') + WorkflowJobTemplate = new_state.apps.get_model('main', 'WorkflowJobTemplate') + InventorySource = new_state.apps.get_model('main', 'InventorySource') + for i, proj_id in enumerate(proj_ids): proj = Project.objects.get(id=proj_id) if i == 0: @@ -114,61 +134,37 @@ class TestMigrationSmoke: else: assert proj.name != 'duplicate-project-name' assert proj.name.startswith('duplicate-project-name') + + # Verify WorkflowJobTemplate duplicates are renamed + for i, wfjt_id in enumerate(wfjt_ids): + wfjt = WorkflowJobTemplate.objects.get(id=wfjt_id) + if i == 0: + assert wfjt.name == 'duplicate-workflow-name' + else: + assert wfjt.name != 'duplicate-workflow-name' + assert wfjt.name.startswith('duplicate-workflow-name') + # The inventory source had this field set to avoid the constrains - InventorySource = new_state.apps.get_model('main', 'InventorySource') inv_src = InventorySource.objects.get(name='migration-test-src') assert inv_src.org_unique is False - Project = new_state.apps.get_model('main', 'Project') for proj in Project.objects.all(): assert proj.org_unique is True + # Piggyback test for the new credential types + validate_exists = ['GitHub App Installation Access Token Lookup', 'Terraform backend configuration'] + CredentialType = new_state.apps.get_model('main', 'CredentialType') + # simulate an upgrade by deleting existing types with these names + for expected_name in validate_exists: + ct = CredentialType.objects.filter(name=expected_name).first() + if ct: + ct.delete() -@pytest.mark.django_db -class TestGithubAppBug: - """ - Tests that `awx-manage createsuperuser` runs successfully after - the `github_app` CredentialType kind is updated to `github_app_lookup` - via the migration. - """ - - def test_after_github_app_kind_migration(self, migrator): - """ - Verifies that `createsuperuser` does not raise a KeyError - after the 0202_squashed_deletions migration (which includes - the `update_github_app_kind` logic) is applied. - """ - # 1. Apply migrations up to the point *before* the 0202_squashed_deletions migration. - # This simulates the state where the problematic CredentialType might exist. - # We use 0201_create_managed_creds as the direct predecessor. - old_state = migrator.apply_tested_migration(('main', '0201_create_managed_creds')) - - # Get the CredentialType model from the historical state. - CredentialType = old_state.apps.get_model('main', 'CredentialType') - - # Create a CredentialType with the old, problematic 'kind' value - CredentialType.objects.create( - name='Legacy GitHub App Credential', - kind='github_app', # The old, problematic 'kind' value - namespace='github_app', # The namespace that causes the KeyError in the registry lookup - managed=True, - created=timezone.now(), - modified=timezone.now(), + new_state = migrator.apply_tested_migration( + ('main', '0201_create_managed_creds'), ) - # Apply the migration that includes the fix (0202_squashed_deletions). - new_state = migrator.apply_tested_migration(('main', '0202_squashed_deletions')) - - # Verify that the CredentialType with the old 'kind' no longer exists - # and the 'kind' has been updated to the new value. - CredentialType = new_state.apps.get_model('main', 'CredentialType') # Get CredentialType model from the new state - - # Assertion 1: The CredentialType with the old 'github_app' kind should no longer exist. - assert not CredentialType.objects.filter( - kind='github_app' - ).exists(), "CredentialType with old 'github_app' kind should no longer exist after migration." - - # Assertion 2: The CredentialType should now exist with the new 'github_app_lookup' kind - # and retain its original name. - assert CredentialType.objects.filter( - kind='github_app_lookup', name='Legacy GitHub App Credential' - ).exists(), "CredentialType should be updated to 'github_app_lookup' and retain its name." + CredentialType = new_state.apps.get_model('main', 'CredentialType') + for expected_name in validate_exists: + assert CredentialType.objects.filter( + name=expected_name + ).exists(), f'Could not find {expected_name} credential type name, all names: {list(CredentialType.objects.values_list("name", flat=True))}' diff --git a/awx/main/tests/functional/test_projects.py b/awx/main/tests/functional/test_projects.py index 1824c888f1..7d389d1e16 100644 --- a/awx/main/tests/functional/test_projects.py +++ b/awx/main/tests/functional/test_projects.py @@ -334,6 +334,69 @@ def test_team_project_list(get, team_project_list): ) +@pytest.mark.django_db +def test_project_teams_list_multiple_roles_distinct(get, organization_factory): + # test projects with multiple roles on the same team + objects = organization_factory( + 'org1', + superusers=['admin'], + teams=['teamA'], + projects=['proj1'], + roles=[ + 'teamA.member_role:proj1.admin_role', + 'teamA.member_role:proj1.use_role', + 'teamA.member_role:proj1.update_role', + 'teamA.member_role:proj1.read_role', + ], + ) + admin = objects.superusers.admin + proj1 = objects.projects.proj1 + + res = get(reverse('api:project_teams_list', kwargs={'pk': proj1.pk}), admin).data + names = [t['name'] for t in res['results']] + assert names == ['teamA'] + + +@pytest.mark.django_db +def test_project_teams_list_multiple_teams(get, organization_factory): + # test projects with multiple teams + objs = organization_factory( + 'org1', + superusers=['admin'], + teams=['teamA', 'teamB', 'teamC', 'teamD'], + projects=['proj1'], + roles=[ + 'teamA.member_role:proj1.admin_role', + 'teamB.member_role:proj1.update_role', + 'teamC.member_role:proj1.use_role', + 'teamD.member_role:proj1.read_role', + ], + ) + admin = objs.superusers.admin + proj1 = objs.projects.proj1 + + res = get(reverse('api:project_teams_list', kwargs={'pk': proj1.pk}), admin).data + names = sorted([t['name'] for t in res['results']]) + assert names == ['teamA', 'teamB', 'teamC', 'teamD'] + + +@pytest.mark.django_db +def test_project_teams_list_no_direct_assignments(get, organization_factory): + # test projects with no direct team assignments + objects = organization_factory( + 'org1', + superusers=['admin'], + teams=['teamA'], + projects=['proj1'], + roles=[], + ) + admin = objects.superusers.admin + proj1 = objects.projects.proj1 + + res = get(reverse('api:project_teams_list', kwargs={'pk': proj1.pk}), admin).data + assert res['count'] == 0 + + @pytest.mark.parametrize("u,expected_status_code", [('rando', 403), ('org_member', 403), ('org_admin', 201), ('admin', 201)]) @pytest.mark.django_db def test_create_project(post, organization, org_admin, org_member, admin, rando, u, expected_status_code): diff --git a/awx/main/tests/functional/test_tasks.py b/awx/main/tests/functional/test_tasks.py new file mode 100644 index 0000000000..8e5305807f --- /dev/null +++ b/awx/main/tests/functional/test_tasks.py @@ -0,0 +1,96 @@ +import pytest +import os +import tempfile +import shutil + +from awx.main.tasks.jobs import RunJob +from awx.main.tasks.system import CleanupImagesAndFiles, execution_node_health_check +from awx.main.models import Instance, Job + + +@pytest.fixture +def scm_revision_file(tmpdir_factory): + # Returns path to temporary testing revision file + revision_file = tmpdir_factory.mktemp('revisions').join('revision.txt') + with open(str(revision_file), 'w') as f: + f.write('1234567890123456789012345678901234567890') + return os.path.join(revision_file.dirname, 'revision.txt') + + +@pytest.mark.django_db +@pytest.mark.parametrize('node_type', ('control. hybrid')) +def test_no_worker_info_on_AWX_nodes(node_type): + hostname = 'us-south-3-compute.invalid' + Instance.objects.create(hostname=hostname, node_type=node_type) + assert execution_node_health_check(hostname) is None + + +@pytest.fixture +def job_folder_factory(request): + def _rf(job_id='1234'): + pdd_path = tempfile.mkdtemp(prefix=f'awx_{job_id}_') + + def test_folder_cleanup(): + if os.path.exists(pdd_path): + shutil.rmtree(pdd_path) + + request.addfinalizer(test_folder_cleanup) + + return pdd_path + + return _rf + + +@pytest.fixture +def mock_job_folder(job_folder_factory): + return job_folder_factory() + + +@pytest.mark.django_db +def test_folder_cleanup_stale_file(mock_job_folder, mock_me): + CleanupImagesAndFiles.run() + assert os.path.exists(mock_job_folder) # grace period should protect folder from deletion + + CleanupImagesAndFiles.run(grace_period=0) + assert not os.path.exists(mock_job_folder) # should be deleted + + +@pytest.mark.django_db +def test_folder_cleanup_running_job(mock_job_folder, me_inst): + job = Job.objects.create(id=1234, controller_node=me_inst.hostname, status='running') + CleanupImagesAndFiles.run(grace_period=0) + assert os.path.exists(mock_job_folder) # running job should prevent folder from getting deleted + + job.status = 'failed' + job.save(update_fields=['status']) + CleanupImagesAndFiles.run(grace_period=0) + assert not os.path.exists(mock_job_folder) # job is finished and no grace period, should delete + + +@pytest.mark.django_db +def test_folder_cleanup_multiple_running_jobs(job_folder_factory, me_inst): + jobs = [] + dirs = [] + num_jobs = 3 + + for i in range(num_jobs): + job = Job.objects.create(controller_node=me_inst.hostname, status='running') + dirs.append(job_folder_factory(job.id)) + jobs.append(job) + + CleanupImagesAndFiles.run(grace_period=0) + + assert [os.path.exists(d) for d in dirs] == [True for i in range(num_jobs)] + + +@pytest.mark.django_db +def test_does_not_run_reaped_job(mocker, mock_me): + job = Job.objects.create(status='failed', job_explanation='This job has been reaped.') + mock_run = mocker.patch('awx.main.tasks.jobs.ansible_runner.interface.run') + try: + RunJob().run(job.id) + except Exception: + pass + job.refresh_from_db() + assert job.status == 'failed' + mock_run.assert_not_called() diff --git a/awx/main/tests/live/tests/conftest.py b/awx/main/tests/live/tests/conftest.py index 6c932d7b86..5842aa94c0 100644 --- a/awx/main/tests/live/tests/conftest.py +++ b/awx/main/tests/live/tests/conftest.py @@ -3,7 +3,6 @@ import time import os import shutil import tempfile -import logging import pytest @@ -14,15 +13,11 @@ from awx.api.versioning import reverse # These tests are invoked from the awx/main/tests/live/ subfolder # so any fixtures from higher-up conftest files must be explicitly included from awx.main.tests.functional.conftest import * # noqa -from awx.main.tests.conftest import load_all_credentials # noqa: F401; pylint: disable=unused-import from awx.main.tests import data from awx.main.models import Project, JobTemplate, Organization, Inventory -logger = logging.getLogger(__name__) - - PROJ_DATA = os.path.join(os.path.dirname(data.__file__), 'projects') @@ -138,29 +133,30 @@ def podman_image_generator(): @pytest.fixture -def project_factory(post, default_org, admin): - def _rf(scm_url=None, local_path=None): - proj_kwargs = {} +def run_job_from_playbook(default_org, demo_inv, post, admin): + def _rf(test_name, playbook, local_path=None, scm_url=None, jt_params=None): + project_name = f'{test_name} project' + jt_name = f'{test_name} JT: {playbook}' + + old_proj = Project.objects.filter(name=project_name).first() + if old_proj: + old_proj.delete() + + old_jt = JobTemplate.objects.filter(name=jt_name).first() + if old_jt: + old_jt.delete() + + proj_kwargs = {'name': project_name, 'organization': default_org.id} if local_path: # manual path - project_name = f'Manual roject {local_path}' proj_kwargs['scm_type'] = '' proj_kwargs['local_path'] = local_path elif scm_url: - project_name = f'Project {scm_url}' proj_kwargs['scm_type'] = 'git' proj_kwargs['scm_url'] = scm_url else: raise RuntimeError('Need to provide scm_url or local_path') - proj_kwargs['name'] = project_name - proj_kwargs['organization'] = default_org.id - - old_proj = Project.objects.filter(name=project_name).first() - if old_proj: - logger.info(f'Deleting existing project {project_name}') - old_proj.delete() - result = post( reverse('api:project_list'), proj_kwargs, @@ -168,23 +164,6 @@ def project_factory(post, default_org, admin): expect=201, ) proj = Project.objects.get(id=result.data['id']) - return proj - - return _rf - - -@pytest.fixture -def run_job_from_playbook(demo_inv, post, admin, project_factory): - def _rf(test_name, playbook, local_path=None, scm_url=None, jt_params=None, proj=None, wait=True): - jt_name = f'{test_name} JT: {playbook}' - - if not proj: - proj = project_factory(scm_url=scm_url, local_path=local_path) - - old_jt = JobTemplate.objects.filter(name=jt_name).first() - if old_jt: - logger.info(f'Deleting existing JT {jt_name}') - old_jt.delete() if proj.current_job: wait_for_job(proj.current_job) @@ -206,9 +185,7 @@ def run_job_from_playbook(demo_inv, post, admin, project_factory): job = jt.create_unified_job() job.signal_start() - if wait: - wait_for_job(job) - assert job.status == 'successful' - return {'job': job, 'job_template': jt, 'project': proj} + wait_for_job(job) + assert job.status == 'successful' return _rf diff --git a/awx/main/tests/live/tests/test_ansible_facts.py b/awx/main/tests/live/tests/test_ansible_facts.py index e03620ee8b..f6db48345e 100644 --- a/awx/main/tests/live/tests/test_ansible_facts.py +++ b/awx/main/tests/live/tests/test_ansible_facts.py @@ -1,20 +1,14 @@ import pytest -from awx.main.tests.live.tests.conftest import wait_for_events, wait_for_job +from awx.main.tests.live.tests.conftest import wait_for_events from awx.main.models import Job, Inventory -@pytest.fixture -def facts_project(live_tmp_folder, project_factory): - return project_factory(scm_url=f'file://{live_tmp_folder}/facts') - - def assert_facts_populated(name): job = Job.objects.filter(name__icontains=name).order_by('-created').first() assert job is not None wait_for_events(job) - wait_for_job(job) inventory = job.inventory assert inventory.hosts.count() > 0 # sanity @@ -23,24 +17,24 @@ def assert_facts_populated(name): @pytest.fixture -def general_facts_test(facts_project, run_job_from_playbook): +def general_facts_test(live_tmp_folder, run_job_from_playbook): def _rf(slug, jt_params): jt_params['use_fact_cache'] = True - standard_kwargs = dict(jt_params=jt_params) + standard_kwargs = dict(scm_url=f'file://{live_tmp_folder}/facts', jt_params=jt_params) # GATHER FACTS name = f'test_gather_ansible_facts_{slug}' - run_job_from_playbook(name, 'gather.yml', proj=facts_project, **standard_kwargs) + run_job_from_playbook(name, 'gather.yml', **standard_kwargs) assert_facts_populated(name) # KEEP FACTS name = f'test_clear_ansible_facts_{slug}' - run_job_from_playbook(name, 'no_op.yml', proj=facts_project, **standard_kwargs) + run_job_from_playbook(name, 'no_op.yml', **standard_kwargs) assert_facts_populated(name) # CLEAR FACTS name = f'test_clear_ansible_facts_{slug}' - run_job_from_playbook(name, 'clear.yml', proj=facts_project, **standard_kwargs) + run_job_from_playbook(name, 'clear.yml', **standard_kwargs) job = Job.objects.filter(name__icontains=name).order_by('-created').first() assert job is not None diff --git a/awx/main/tests/unit/commands/test_import_auth_config_to_gateway.py b/awx/main/tests/unit/commands/test_import_auth_config_to_gateway.py new file mode 100644 index 0000000000..cfb7e64ac5 --- /dev/null +++ b/awx/main/tests/unit/commands/test_import_auth_config_to_gateway.py @@ -0,0 +1,581 @@ +import os +import pytest +from unittest.mock import patch, Mock, call, DEFAULT +from io import StringIO +from unittest import TestCase + +from awx.main.management.commands.import_auth_config_to_gateway import Command +from awx.main.utils.gateway_client import GatewayAPIError + + +class TestImportAuthConfigToGatewayCommand(TestCase): + def setUp(self): + self.command = Command() + + def options_basic_auth_full_send(self): + return { + 'basic_auth': True, + 'skip_all_authenticators': False, + 'skip_oidc': False, + 'skip_github': False, + 'skip_ldap': False, + 'skip_ad': False, + 'skip_saml': False, + 'skip_radius': False, + 'skip_tacacs': False, + 'skip_google': False, + 'skip_settings': False, + 'force': False, + } + + def options_basic_auth_skip_all_individual(self): + return { + 'basic_auth': True, + 'skip_all_authenticators': False, + 'skip_oidc': True, + 'skip_github': True, + 'skip_ldap': True, + 'skip_ad': True, + 'skip_saml': True, + 'skip_radius': True, + 'skip_tacacs': True, + 'skip_google': True, + 'skip_settings': True, + 'force': False, + } + + def options_svc_token_full_send(self): + options = self.options_basic_auth_full_send() + options['basic_auth'] = False + return options + + def options_svc_token_skip_all(self): + options = self.options_basic_auth_skip_all_individual() + options['basic_auth'] = False + return options + + def create_mock_migrator( + self, + mock_migrator_class, + authenticator_type="TestAuth", + created=0, + updated=0, + unchanged=0, + failed=0, + mappers_created=0, + mappers_updated=0, + mappers_failed=0, + settings_created=0, + settings_updated=0, + settings_unchanged=0, + settings_failed=0, + ): + """Helper method to create a mock migrator with specified return values.""" + mock_migrator = Mock() + mock_migrator.get_authenticator_type.return_value = authenticator_type + mock_migrator.migrate.return_value = { + 'created': created, + 'updated': updated, + 'unchanged': unchanged, + 'failed': failed, + 'mappers_created': mappers_created, + 'mappers_updated': mappers_updated, + 'mappers_failed': mappers_failed, + } + mock_migrator_class.return_value = mock_migrator + return mock_migrator + + def test_add_arguments(self): + """Test that all expected arguments are properly added to the parser.""" + parser = Mock() + self.command.add_arguments(parser) + + expected_calls = [ + call('--basic-auth', action='store_true', help='Use HTTP Basic Authentication between Controller and Gateway'), + call( + '--skip-all-authenticators', + action='store_true', + help='Skip importing all authenticators [GitHub, OIDC, SAML, Azure AD, LDAP, RADIUS, TACACS+, Google OAuth2]', + ), + call('--skip-oidc', action='store_true', help='Skip importing generic OIDC authenticators'), + call('--skip-github', action='store_true', help='Skip importing GitHub authenticator'), + call('--skip-ldap', action='store_true', help='Skip importing LDAP authenticators'), + call('--skip-ad', action='store_true', help='Skip importing Azure AD authenticator'), + call('--skip-saml', action='store_true', help='Skip importing SAML authenticator'), + call('--skip-radius', action='store_true', help='Skip importing RADIUS authenticator'), + call('--skip-tacacs', action='store_true', help='Skip importing TACACS+ authenticator'), + call('--skip-google', action='store_true', help='Skip importing Google OAuth2 authenticator'), + call('--skip-settings', action='store_true', help='Skip importing settings'), + call( + '--force', + action='store_true', + help='Force migration even if configurations already exist. Does not apply to skipped authenticators nor skipped settings.', + ), + ] + + parser.add_argument.assert_has_calls(expected_calls, any_order=True) + + @patch.dict(os.environ, {}, clear=True) + @patch('sys.stdout', new_callable=StringIO) + def test_handle_missing_env_vars_basic_auth(self, mock_stdout): + """Test that missing environment variables cause clean exit when using basic auth.""" + + with patch.object(self.command, 'stdout', mock_stdout): + with pytest.raises(SystemExit) as exc_info: + self.command.handle(**self.options_basic_auth_full_send()) + # Should exit with code 0 for successful early validation + assert exc_info.value.code == 0 + + output = mock_stdout.getvalue() + self.assertIn('Missing required environment variables:', output) + self.assertIn('GATEWAY_BASE_URL', output) + self.assertIn('GATEWAY_USER', output) + self.assertIn('GATEWAY_PASSWORD', output) + + @patch.dict( + os.environ, + {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass', 'GATEWAY_SKIP_VERIFY': 'true'}, + ) + @patch('awx.main.management.commands.import_auth_config_to_gateway.SettingsMigrator') + @patch.multiple( + 'awx.main.management.commands.import_auth_config_to_gateway', + GitHubMigrator=DEFAULT, + OIDCMigrator=DEFAULT, + SAMLMigrator=DEFAULT, + AzureADMigrator=DEFAULT, + LDAPMigrator=DEFAULT, + RADIUSMigrator=DEFAULT, + TACACSMigrator=DEFAULT, + GoogleOAuth2Migrator=DEFAULT, + ) + @patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient') + @patch('sys.stdout', new_callable=StringIO) + def test_handle_basic_auth_success(self, mock_stdout, mock_gateway_client, mock_settings_migrator, **mock_migrators): + """Test successful execution with basic auth.""" + # Mock gateway client context manager + mock_client_instance = Mock() + mock_gateway_client.return_value.__enter__.return_value = mock_client_instance + mock_gateway_client.return_value.__exit__.return_value = None + + for mock_migrator_class in mock_migrators.values(): + self.create_mock_migrator(mock_migrator_class, created=1, mappers_created=2) + + self.create_mock_migrator(mock_settings_migrator, settings_created=1, settings_updated=0, settings_unchanged=2, settings_failed=0) + + with patch.object(self.command, 'stdout', mock_stdout): + with pytest.raises(SystemExit) as exc_info: + self.command.handle(**self.options_basic_auth_full_send()) + # Should exit with code 0 for success + assert exc_info.value.code == 0 + + # Verify gateway client was created with correct parameters + mock_gateway_client.assert_called_once_with( + base_url='https://gateway.example.com', username='testuser', password='testpass', skip_verify=True, command=self.command + ) + + # Verify all migrators were created + for mock_migrator in mock_migrators.values(): + mock_migrator.assert_called_once_with(mock_client_instance, self.command, force=False) + + mock_settings_migrator.assert_called_once_with(mock_client_instance, self.command, force=False) + + # Verify output contains success messages + output = mock_stdout.getvalue() + + self.assertIn('HTTP Basic Auth: true', output) + self.assertIn('Successfully connected to Gateway', output) + self.assertIn('Migration Summary', output) + self.assertIn('authenticators', output) + self.assertIn('mappers', output) + self.assertIn('settings', output) + + @patch.dict(os.environ, {'GATEWAY_SKIP_VERIFY': 'false'}, clear=True) # Ensure verify_https=True + @patch('awx.main.management.commands.import_auth_config_to_gateway.create_api_client') + @patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClientSVCToken') + @patch('awx.main.management.commands.import_auth_config_to_gateway.urlparse') + @patch('awx.main.management.commands.import_auth_config_to_gateway.urlunparse') + @patch('sys.stdout', new_callable=StringIO) + def test_handle_service_token_success(self, mock_stdout, mock_urlunparse, mock_urlparse, mock_gateway_client_svc, mock_create_api_client): + """Test successful execution with service token.""" + # Mock resource API client + mock_resource_client = Mock() + mock_resource_client.base_url = 'https://gateway.example.com/api/v1' + mock_resource_client.jwt_user_id = 'test-user' + mock_resource_client.jwt_expiration = '2024-12-31' + mock_resource_client.verify_https = True + mock_response = Mock() + mock_response.status_code = 200 + mock_resource_client.get_service_metadata.return_value = mock_response + mock_create_api_client.return_value = mock_resource_client + + # Mock URL parsing + mock_parsed = Mock() + mock_parsed.scheme = 'https' + mock_parsed.netloc = 'gateway.example.com' + mock_urlparse.return_value = mock_parsed + mock_urlunparse.return_value = 'https://gateway.example.com/' + + # Mock gateway client context manager + mock_client_instance = Mock() + mock_gateway_client_svc.return_value.__enter__.return_value = mock_client_instance + mock_gateway_client_svc.return_value.__exit__.return_value = None + + with patch.object(self.command, 'stdout', mock_stdout): + with patch('sys.exit'): + self.command.handle(**self.options_svc_token_skip_all()) + # Should call sys.exit(0) for success, but may not due to test setup + # Just verify the command completed without raising an exception + + # Verify resource API client was created and configured + mock_create_api_client.assert_called_once() + self.assertTrue(mock_resource_client.verify_https) # Should be True when GATEWAY_SKIP_VERIFY='false' + mock_resource_client.get_service_metadata.assert_called_once() + + # Verify service token client was created + mock_gateway_client_svc.assert_called_once_with(resource_api_client=mock_resource_client, command=self.command) + + # Verify output contains service token messages + output = mock_stdout.getvalue() + self.assertIn('Gateway Service Token: true', output) + self.assertIn('Connection Validated: True', output) + self.assertIn('No authentication configurations found to migrate.', output) + + @patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'}) + @patch.multiple( + 'awx.main.management.commands.import_auth_config_to_gateway', + GitHubMigrator=DEFAULT, + OIDCMigrator=DEFAULT, + SAMLMigrator=DEFAULT, + AzureADMigrator=DEFAULT, + LDAPMigrator=DEFAULT, + RADIUSMigrator=DEFAULT, + TACACSMigrator=DEFAULT, + GoogleOAuth2Migrator=DEFAULT, + SettingsMigrator=DEFAULT, + ) + @patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient') + @patch('sys.stdout', new_callable=StringIO) + def test_skip_flags_prevent_authenticator_individual_and_settings_migration(self, mock_stdout, mock_gateway_client, **mock_migrators): + """Test that skip flags prevent corresponding migrators from being created.""" + + # Mock gateway client context manager + mock_client_instance = Mock() + mock_gateway_client.return_value.__enter__.return_value = mock_client_instance + mock_gateway_client.return_value.__exit__.return_value = None + + with patch.object(self.command, 'stdout', mock_stdout): + with patch('sys.exit'): + self.command.handle(**self.options_basic_auth_skip_all_individual()) + # Should call sys.exit(0) for success, but may not due to test setup + # Just verify the command completed without raising an exception + + # Verify no migrators were created + for mock_migrator in mock_migrators.values(): + mock_migrator.assert_not_called() + + # Verify warning message about no configurations + output = mock_stdout.getvalue() + self.assertIn('No authentication configurations found to migrate.', output) + self.assertIn('Settings migration will not execute.', output) + self.assertIn('NO MIGRATIONS WILL EXECUTE.', output) + + @patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'}) + @patch.multiple( + 'awx.main.management.commands.import_auth_config_to_gateway', + GitHubMigrator=DEFAULT, + OIDCMigrator=DEFAULT, + SAMLMigrator=DEFAULT, + AzureADMigrator=DEFAULT, + LDAPMigrator=DEFAULT, + RADIUSMigrator=DEFAULT, + TACACSMigrator=DEFAULT, + GoogleOAuth2Migrator=DEFAULT, + ) + @patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient') + @patch('sys.stdout', new_callable=StringIO) + def test_skip_flags_prevent_authenticator_migration(self, mock_stdout, mock_gateway_client, **mock_migrators): + """Test that skip flags prevent corresponding migrators from being created.""" + + # Mock gateway client context manager + mock_client_instance = Mock() + mock_gateway_client.return_value.__enter__.return_value = mock_client_instance + mock_gateway_client.return_value.__exit__.return_value = None + + options = self.options_basic_auth_full_send() + options['skip_all_authenticators'] = True + + with patch.object(self.command, 'stdout', mock_stdout): + with pytest.raises(SystemExit) as exc_info: + self.command.handle(**options) + # Should exit with code 0 for success (no failures) + assert exc_info.value.code == 0 + + # Verify no migrators were created + for mock_migrator in mock_migrators.values(): + mock_migrator.assert_not_called() + + # Verify warning message about no configurations + output = mock_stdout.getvalue() + self.assertIn('No authentication configurations found to migrate.', output) + self.assertNotIn('Settings migration will not execute.', output) + self.assertNotIn('NO MIGRATIONS WILL EXECUTE.', output) + + @patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'}) + @patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient') + @patch('sys.stdout', new_callable=StringIO) + def test_handle_gateway_api_error(self, mock_stdout, mock_gateway_client): + """Test handling of GatewayAPIError exceptions.""" + # Mock gateway client to raise GatewayAPIError + mock_gateway_client.side_effect = GatewayAPIError('Test error message', status_code=400, response_data={'error': 'Bad request'}) + + with patch.object(self.command, 'stdout', mock_stdout): + with pytest.raises(SystemExit) as exc_info: + self.command.handle(**self.options_basic_auth_full_send()) + # Should exit with code 1 for errors + assert exc_info.value.code == 1 + + # Verify error message output + output = mock_stdout.getvalue() + self.assertIn('Gateway API Error: Test error message', output) + self.assertIn('Status Code: 400', output) + self.assertIn("Response: {'error': 'Bad request'}", output) + + @patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'}) + @patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient') + @patch('sys.stdout', new_callable=StringIO) + def test_handle_unexpected_error(self, mock_stdout, mock_gateway_client): + """Test handling of unexpected exceptions.""" + # Mock gateway client to raise unexpected error + mock_gateway_client.side_effect = ValueError('Unexpected error') + + with patch.object(self.command, 'stdout', mock_stdout): + with pytest.raises(SystemExit) as exc_info: + self.command.handle(**self.options_basic_auth_full_send()) + # Should exit with code 1 for errors + assert exc_info.value.code == 1 + + # Verify error message output + output = mock_stdout.getvalue() + self.assertIn('Unexpected error during migration: Unexpected error', output) + + @patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'}) + @patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient') + @patch('awx.main.management.commands.import_auth_config_to_gateway.GitHubMigrator') + @patch('awx.main.management.commands.import_auth_config_to_gateway.SettingsMigrator') + @patch('sys.stdout', new_callable=StringIO) + def test_force_flag_passed_to_migrators(self, mock_stdout, mock_github, mock_settings_migrator, mock_gateway_client): + """Test that force flag is properly passed to migrators.""" + # Mock gateway client context manager + mock_client_instance = Mock() + mock_gateway_client.return_value.__enter__.return_value = mock_client_instance + mock_gateway_client.return_value.__exit__.return_value = None + + # Mock migrator + self.create_mock_migrator(mock_github, authenticator_type="GitHub", created=0, mappers_created=2) + self.create_mock_migrator( + mock_settings_migrator, authenticator_type="Settings", settings_created=0, settings_updated=2, settings_unchanged=0, settings_failed=0 + ) + + options = self.options_basic_auth_skip_all_individual() + options['force'] = True + options['skip_github'] = False + options['skip_settings'] = False + + with patch.object(self.command, 'stdout', mock_stdout): + with pytest.raises(SystemExit) as exc_info: + self.command.handle(**options) + # Should exit with code 0 for success + assert exc_info.value.code == 0 + + # Verify migrator was created with force=True + mock_github.assert_called_once_with(mock_client_instance, self.command, force=True) + + # Verify settings migrator was created with force=True + mock_settings_migrator.assert_called_once_with(mock_client_instance, self.command, force=True) + + @patch('sys.stdout', new_callable=StringIO) + def test_print_export_summary(self, mock_stdout): + """Test the _print_export_summary method.""" + result = { + 'created': 2, + 'updated': 1, + 'unchanged': 3, + 'failed': 0, + 'mappers_created': 5, + 'mappers_updated': 2, + 'mappers_failed': 1, + } + + with patch.object(self.command, 'stdout', mock_stdout): + self.command._print_export_summary('SAML', result) + + output = mock_stdout.getvalue() + self.assertIn('--- SAML Export Summary ---', output) + self.assertIn('Authenticators created: 2', output) + self.assertIn('Authenticators updated: 1', output) + self.assertIn('Authenticators unchanged: 3', output) + self.assertIn('Authenticators failed: 0', output) + self.assertIn('Mappers created: 5', output) + self.assertIn('Mappers updated: 2', output) + self.assertIn('Mappers failed: 1', output) + + @patch('sys.stdout', new_callable=StringIO) + def test_print_export_summary_settings(self, mock_stdout): + """Test the _print_export_summary method.""" + result = { + 'settings_created': 2, + 'settings_updated': 1, + 'settings_unchanged': 3, + 'settings_failed': 0, + } + + with patch.object(self.command, 'stdout', mock_stdout): + self.command._print_export_summary('Settings', result) + + output = mock_stdout.getvalue() + self.assertIn('--- Settings Export Summary ---', output) + self.assertIn('Settings created: 2', output) + self.assertIn('Settings updated: 1', output) + self.assertIn('Settings unchanged: 3', output) + self.assertIn('Settings failed: 0', output) + + @patch('sys.stdout', new_callable=StringIO) + def test_print_export_summary_missing_keys(self, mock_stdout): + """Test _print_export_summary handles missing keys gracefully.""" + result = { + 'created': 1, + 'updated': 2, + # Missing other keys + } + + with patch.object(self.command, 'stdout', mock_stdout): + self.command._print_export_summary('LDAP', result) + + output = mock_stdout.getvalue() + self.assertIn('--- LDAP Export Summary ---', output) + self.assertIn('Authenticators created: 1', output) + self.assertIn('Authenticators updated: 2', output) + self.assertIn('Authenticators unchanged: 0', output) # Default value + self.assertIn('Mappers created: 0', output) # Default value + + @patch.dict(os.environ, {'GATEWAY_BASE_URL': 'https://gateway.example.com', 'GATEWAY_USER': 'testuser', 'GATEWAY_PASSWORD': 'testpass'}) + @patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient') + @patch('awx.main.management.commands.import_auth_config_to_gateway.GitHubMigrator') + @patch('awx.main.management.commands.import_auth_config_to_gateway.OIDCMigrator') + @patch('sys.stdout', new_callable=StringIO) + def test_total_results_accumulation(self, mock_stdout, mock_oidc, mock_github, mock_gateway_client): + """Test that results from multiple migrators are properly accumulated.""" + # Mock gateway client context manager + mock_client_instance = Mock() + mock_gateway_client.return_value.__enter__.return_value = mock_client_instance + mock_gateway_client.return_value.__exit__.return_value = None + + # Mock migrators with different results + self.create_mock_migrator(mock_github, authenticator_type="GitHub", created=1, mappers_created=2) + self.create_mock_migrator(mock_oidc, authenticator_type="OIDC", created=0, updated=1, unchanged=1, mappers_created=1, mappers_updated=1) + + options = self.options_basic_auth_skip_all_individual() + options['skip_oidc'] = False + options['skip_github'] = False + + with patch.object(self.command, 'stdout', mock_stdout): + with pytest.raises(SystemExit) as exc_info: + self.command.handle(**options) + # Should exit with code 0 for success + assert exc_info.value.code == 0 + + # Verify total results are accumulated correctly + output = mock_stdout.getvalue() + self.assertIn('Total authenticators created: 1', output) # 1 + 0 + self.assertIn('Total authenticators updated: 1', output) # 0 + 1 + self.assertIn('Total authenticators unchanged: 1', output) # 0 + 1 + self.assertIn('Total authenticators failed: 0', output) # 0 + 0 + self.assertIn('Total mappers created: 3', output) # 2 + 1 + self.assertIn('Total mappers updated: 1', output) # 0 + 1 + self.assertIn('Total mappers failed: 0', output) # 0 + 0 + + @patch('sys.stdout', new_callable=StringIO) + def test_environment_variable_parsing(self, mock_stdout): + """Test that environment variables are parsed correctly.""" + test_cases = [ + ('true', True), + ('1', True), + ('yes', True), + ('on', True), + ('TRUE', True), + ('false', False), + ('0', False), + ('no', False), + ('off', False), + ('', False), + ('random', False), + ] + + for env_value, expected in test_cases: + with patch.dict( + os.environ, + { + 'GATEWAY_BASE_URL': 'https://gateway.example.com', + 'GATEWAY_USER': 'testuser', + 'GATEWAY_PASSWORD': 'testpass', + 'GATEWAY_SKIP_VERIFY': env_value, + }, + ): + with patch('awx.main.management.commands.import_auth_config_to_gateway.GatewayClient') as mock_gateway_client: + # Mock gateway client context manager + mock_client_instance = Mock() + mock_gateway_client.return_value.__enter__.return_value = mock_client_instance + mock_gateway_client.return_value.__exit__.return_value = None + + with patch.object(self.command, 'stdout', mock_stdout): + with patch('sys.exit'): + self.command.handle(**self.options_basic_auth_skip_all_individual()) + + # Verify gateway client was called with correct skip_verify value + mock_gateway_client.assert_called_once_with( + base_url='https://gateway.example.com', username='testuser', password='testpass', skip_verify=expected, command=self.command + ) + + # Reset for next iteration + mock_gateway_client.reset_mock() + mock_stdout.seek(0) + mock_stdout.truncate(0) + + @patch.dict(os.environ, {'GATEWAY_SKIP_VERIFY': 'false'}) + @patch('awx.main.management.commands.import_auth_config_to_gateway.create_api_client') + @patch('awx.main.management.commands.import_auth_config_to_gateway.urlparse') + @patch('awx.main.management.commands.import_auth_config_to_gateway.urlunparse') + @patch('awx.main.management.commands.import_auth_config_to_gateway.SettingsMigrator') + @patch('sys.stdout', new_callable=StringIO) + def test_service_token_connection_validation_failure(self, mock_stdout, mock_settings_migrator, mock_urlunparse, mock_urlparse, mock_create_api_client): + """Test that non-200 response from get_service_metadata causes error exit.""" + # Mock resource API client with failing response + mock_resource_client = Mock() + mock_resource_client.base_url = 'https://gateway.example.com/api/v1' + mock_resource_client.jwt_user_id = 'test-user' + mock_resource_client.jwt_expiration = '2024-12-31' + mock_resource_client.verify_https = True + mock_response = Mock() + mock_response.status_code = 401 # Simulate unauthenticated error + mock_resource_client.get_service_metadata.return_value = mock_response + mock_create_api_client.return_value = mock_resource_client + + # Mock URL parsing (needed for the service token flow) + mock_parsed = Mock() + mock_parsed.scheme = 'https' + mock_parsed.netloc = 'gateway.example.com' + mock_urlparse.return_value = mock_parsed + mock_urlunparse.return_value = 'https://gateway.example.com/' + + with patch.object(self.command, 'stdout', mock_stdout): + with pytest.raises(SystemExit) as exc_info: + self.command.handle(**self.options_svc_token_skip_all()) + # Should exit with code 1 for connection failure + assert exc_info.value.code == 1 + + # Verify error message is displayed + output = mock_stdout.getvalue() + self.assertIn( + 'Gateway Service Token is unable to connect to Gateway via the base URL https://gateway.example.com/. Recieved HTTP response code 401', output + ) + self.assertIn('Connection Validated: False', output) diff --git a/awx/main/tests/unit/models/test_jobs.py b/awx/main/tests/unit/models/test_jobs.py index f22255de31..ff1887f34e 100644 --- a/awx/main/tests/unit/models/test_jobs.py +++ b/awx/main/tests/unit/models/test_jobs.py @@ -125,9 +125,6 @@ def test_finish_job_fact_cache_clear(hosts, mocker, ref_time, tmpdir): for host in (hosts[0], hosts[2], hosts[3]): assert host.ansible_facts == {"a": 1, "b": 2} assert host.ansible_facts_modified == ref_time - - # Verify facts were cleared for host with deleted cache file - assert hosts[1].ansible_facts == {} assert hosts[1].ansible_facts_modified > ref_time # Current implementation skips the call entirely if hosts_to_update == [] diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index 4ddbd5e5ce..a2c527cb04 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -871,6 +871,314 @@ class TestJobCredentials(TestJobExecution): assert f.read() == self.EXAMPLE_PRIVATE_KEY assert safe_env['ANSIBLE_NET_PASSWORD'] == HIDDEN_PASSWORD + def test_terraform_cloud_credentials(self, job, private_data_dir, mock_me): + terraform = CredentialType.defaults['terraform']() + hcl_config = ''' + backend "s3" { + bucket = "s3_sample_bucket" + key = "/tf_state/" + region = "us-east-1" + } + ''' + credential = Credential(pk=1, credential_type=terraform, inputs={'configuration': hcl_config}) + credential.inputs['configuration'] = encrypt_field(credential, 'configuration') + job.credentials.add(credential) + + env = {} + safe_env = {} + credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir) + + local_path = to_host_path(env['TF_BACKEND_CONFIG_FILE'], private_data_dir) + config = open(local_path, 'r').read() + assert config == hcl_config + + def test_terraform_gcs_backend_credentials(self, job, private_data_dir, mock_me): + terraform = CredentialType.defaults['terraform']() + hcl_config = ''' + backend "gcs" { + bucket = "gce_storage" + } + ''' + gce_backend_credentials = ''' + { + "type": "service_account", + "project_id": "sample", + "private_key_id": "eeeeeeeeeeeeeeeeeeeeeeeeeee", + "private_key": "-----BEGIN PRIVATE KEY-----\naaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\n-----END PRIVATE KEY-----\n", + "client_email": "sample@sample.iam.gserviceaccount.com", + "client_id": "0123456789", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/cloud-content-robot%40sample.iam.gserviceaccount.com", + } + ''' + credential = Credential(pk=1, credential_type=terraform, inputs={'configuration': hcl_config, 'gce_credentials': gce_backend_credentials}) + credential.inputs['configuration'] = encrypt_field(credential, 'configuration') + credential.inputs['gce_credentials'] = encrypt_field(credential, 'gce_credentials') + job.credentials.add(credential) + + env = {} + safe_env = {} + credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir) + + local_path = to_host_path(env['TF_BACKEND_CONFIG_FILE'], private_data_dir) + config = open(local_path, 'r').read() + assert config == hcl_config + + credentials_path = to_host_path(env['GOOGLE_BACKEND_CREDENTIALS'], private_data_dir) + credentials = open(credentials_path, 'r').read() + assert credentials == gce_backend_credentials + + def test_custom_environment_injectors_with_jinja_syntax_error(self, private_data_dir, mock_me): + some_cloud = CredentialType( + kind='cloud', + name='SomeCloud', + managed=False, + inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]}, + injectors={'env': {'MY_CLOUD_API_TOKEN': '{{api_token.foo()}}'}}, + ) + credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'}) + + with pytest.raises(jinja2.exceptions.UndefinedError): + credential.credential_type.inject_credential(credential, {}, {}, [], private_data_dir) + + def test_custom_environment_injectors(self, private_data_dir, mock_me): + some_cloud = CredentialType( + kind='cloud', + name='SomeCloud', + managed=False, + inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]}, + injectors={'env': {'MY_CLOUD_API_TOKEN': '{{api_token}}'}}, + ) + credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'}) + + env = {} + credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir) + + assert env['MY_CLOUD_API_TOKEN'] == 'ABC123' + + def test_custom_environment_injectors_with_boolean_env_var(self, private_data_dir, mock_me): + some_cloud = CredentialType( + kind='cloud', + name='SomeCloud', + managed=False, + inputs={'fields': [{'id': 'turbo_button', 'label': 'Turbo Button', 'type': 'boolean'}]}, + injectors={'env': {'TURBO_BUTTON': '{{turbo_button}}'}}, + ) + credential = Credential(pk=1, credential_type=some_cloud, inputs={'turbo_button': True}) + + env = {} + credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir) + + assert env['TURBO_BUTTON'] == str(True) + + def test_custom_environment_injectors_with_reserved_env_var(self, private_data_dir, job, mock_me): + task = jobs.RunJob() + task.instance = job + some_cloud = CredentialType( + kind='cloud', + name='SomeCloud', + managed=False, + inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]}, + injectors={'env': {'JOB_ID': 'reserved'}}, + ) + credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'}) + job.credentials.add(credential) + + env = task.build_env(job, private_data_dir) + + assert env['JOB_ID'] == str(job.pk) + + def test_custom_environment_injectors_with_secret_field(self, private_data_dir, mock_me): + some_cloud = CredentialType( + kind='cloud', + name='SomeCloud', + managed=False, + inputs={'fields': [{'id': 'password', 'label': 'Password', 'type': 'string', 'secret': True}]}, + injectors={'env': {'MY_CLOUD_PRIVATE_VAR': '{{password}}'}}, + ) + credential = Credential(pk=1, credential_type=some_cloud, inputs={'password': 'SUPER-SECRET-123'}) + credential.inputs['password'] = encrypt_field(credential, 'password') + + env = {} + safe_env = {} + credential.credential_type.inject_credential(credential, env, safe_env, [], private_data_dir) + + assert env['MY_CLOUD_PRIVATE_VAR'] == 'SUPER-SECRET-123' + assert 'SUPER-SECRET-123' not in safe_env.values() + assert safe_env['MY_CLOUD_PRIVATE_VAR'] == HIDDEN_PASSWORD + + def test_custom_environment_injectors_with_extra_vars(self, private_data_dir, job, mock_me): + task = jobs.RunJob() + some_cloud = CredentialType( + kind='cloud', + name='SomeCloud', + managed=False, + inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]}, + injectors={'extra_vars': {'api_token': '{{api_token}}'}}, + ) + credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'}) + job.credentials.add(credential) + + args = task.build_args(job, private_data_dir, {}) + credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir) + extra_vars = parse_extra_vars(args, private_data_dir) + + assert extra_vars["api_token"] == "ABC123" + assert hasattr(extra_vars["api_token"], '__UNSAFE__') + + def test_custom_environment_injectors_with_boolean_extra_vars(self, job, private_data_dir, mock_me): + task = jobs.RunJob() + some_cloud = CredentialType( + kind='cloud', + name='SomeCloud', + managed=False, + inputs={'fields': [{'id': 'turbo_button', 'label': 'Turbo Button', 'type': 'boolean'}]}, + injectors={'extra_vars': {'turbo_button': '{{turbo_button}}'}}, + ) + credential = Credential(pk=1, credential_type=some_cloud, inputs={'turbo_button': True}) + job.credentials.add(credential) + + args = task.build_args(job, private_data_dir, {}) + credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir) + extra_vars = parse_extra_vars(args, private_data_dir) + + assert extra_vars["turbo_button"] == "True" + + def test_custom_environment_injectors_with_nested_extra_vars(self, private_data_dir, job, mock_me): + task = jobs.RunJob() + some_cloud = CredentialType( + kind='cloud', + name='SomeCloud', + managed=False, + inputs={'fields': [{'id': 'host', 'label': 'Host', 'type': 'string'}]}, + injectors={'extra_vars': {'auth': {'host': '{{host}}'}}}, + ) + credential = Credential(pk=1, credential_type=some_cloud, inputs={'host': 'example.com'}) + job.credentials.add(credential) + + args = task.build_args(job, private_data_dir, {}) + credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir) + extra_vars = parse_extra_vars(args, private_data_dir) + + assert extra_vars["auth"]["host"] == "example.com" + + def test_custom_environment_injectors_with_templated_extra_vars_key(self, private_data_dir, job, mock_me): + task = jobs.RunJob() + some_cloud = CredentialType( + kind='cloud', + name='SomeCloud', + managed=False, + inputs={'fields': [{'id': 'environment', 'label': 'Environment', 'type': 'string'}, {'id': 'host', 'label': 'Host', 'type': 'string'}]}, + injectors={'extra_vars': {'{{environment}}_auth': {'host': '{{host}}'}}}, + ) + credential = Credential(pk=1, credential_type=some_cloud, inputs={'environment': 'test', 'host': 'example.com'}) + job.credentials.add(credential) + + args = task.build_args(job, private_data_dir, {}) + credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir) + extra_vars = parse_extra_vars(args, private_data_dir) + + assert extra_vars["test_auth"]["host"] == "example.com" + + def test_custom_environment_injectors_with_complicated_boolean_template(self, job, private_data_dir, mock_me): + task = jobs.RunJob() + some_cloud = CredentialType( + kind='cloud', + name='SomeCloud', + managed=False, + inputs={'fields': [{'id': 'turbo_button', 'label': 'Turbo Button', 'type': 'boolean'}]}, + injectors={'extra_vars': {'turbo_button': '{% if turbo_button %}FAST!{% else %}SLOW!{% endif %}'}}, + ) + credential = Credential(pk=1, credential_type=some_cloud, inputs={'turbo_button': True}) + job.credentials.add(credential) + + args = task.build_args(job, private_data_dir, {}) + credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir) + extra_vars = parse_extra_vars(args, private_data_dir) + + assert extra_vars["turbo_button"] == "FAST!" + + def test_custom_environment_injectors_with_secret_extra_vars(self, job, private_data_dir, mock_me): + """ + extra_vars that contain secret field values should be censored in the DB + """ + task = jobs.RunJob() + some_cloud = CredentialType( + kind='cloud', + name='SomeCloud', + managed=False, + inputs={'fields': [{'id': 'password', 'label': 'Password', 'type': 'string', 'secret': True}]}, + injectors={'extra_vars': {'password': '{{password}}'}}, + ) + credential = Credential(pk=1, credential_type=some_cloud, inputs={'password': 'SUPER-SECRET-123'}) + credential.inputs['password'] = encrypt_field(credential, 'password') + job.credentials.add(credential) + + args = task.build_args(job, private_data_dir, {}) + credential.credential_type.inject_credential(credential, {}, {}, args, private_data_dir) + + extra_vars = parse_extra_vars(args, private_data_dir) + assert extra_vars["password"] == "SUPER-SECRET-123" + + def test_custom_environment_injectors_with_file(self, private_data_dir, mock_me): + some_cloud = CredentialType( + kind='cloud', + name='SomeCloud', + managed=False, + inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]}, + injectors={'file': {'template': '[mycloud]\n{{api_token}}'}, 'env': {'MY_CLOUD_INI_FILE': '{{tower.filename}}'}}, + ) + credential = Credential(pk=1, credential_type=some_cloud, inputs={'api_token': 'ABC123'}) + + env = {} + credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir) + + path = to_host_path(env['MY_CLOUD_INI_FILE'], private_data_dir) + assert open(path, 'r').read() == '[mycloud]\nABC123' + + def test_custom_environment_injectors_with_unicode_content(self, private_data_dir, mock_me): + value = 'Iñtërnâtiônàlizætiøn' + some_cloud = CredentialType( + kind='cloud', + name='SomeCloud', + managed=False, + inputs={'fields': []}, + injectors={'file': {'template': value}, 'env': {'MY_CLOUD_INI_FILE': '{{tower.filename}}'}}, + ) + credential = Credential( + pk=1, + credential_type=some_cloud, + ) + + env = {} + credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir) + + path = to_host_path(env['MY_CLOUD_INI_FILE'], private_data_dir) + assert open(path, 'r').read() == value + + def test_custom_environment_injectors_with_files(self, private_data_dir, mock_me): + some_cloud = CredentialType( + kind='cloud', + name='SomeCloud', + managed=False, + inputs={'fields': [{'id': 'cert', 'label': 'Certificate', 'type': 'string'}, {'id': 'key', 'label': 'Key', 'type': 'string'}]}, + injectors={ + 'file': {'template.cert': '[mycert]\n{{cert}}', 'template.key': '[mykey]\n{{key}}'}, + 'env': {'MY_CERT_INI_FILE': '{{tower.filename.cert}}', 'MY_KEY_INI_FILE': '{{tower.filename.key}}'}, + }, + ) + credential = Credential(pk=1, credential_type=some_cloud, inputs={'cert': 'CERT123', 'key': 'KEY123'}) + + env = {} + credential.credential_type.inject_credential(credential, env, {}, [], private_data_dir) + + cert_path = to_host_path(env['MY_CERT_INI_FILE'], private_data_dir) + key_path = to_host_path(env['MY_KEY_INI_FILE'], private_data_dir) + assert open(cert_path, 'r').read() == '[mycert]\nCERT123' + assert open(key_path, 'r').read() == '[mykey]\nKEY123' + def test_multi_cloud(self, private_data_dir, mock_me): gce = CredentialType.defaults['gce']() gce_credential = Credential(pk=1, credential_type=gce, inputs={'username': 'bob', 'project': 'some-project', 'ssh_key_data': self.EXAMPLE_PRIVATE_KEY}) diff --git a/awx/main/tests/unit/utils/test_auth_migration.py b/awx/main/tests/unit/utils/test_auth_migration.py new file mode 100644 index 0000000000..e00bdb839b --- /dev/null +++ b/awx/main/tests/unit/utils/test_auth_migration.py @@ -0,0 +1,1137 @@ +""" +Unit tests for auth migration utilities. +""" + +import pytest +import re +from awx.main.utils.gateway_mapping import ( + org_map_to_gateway_format, + team_map_to_gateway_format, + role_map_to_gateway_format, + process_sso_user_list, + process_ldap_user_list, +) + + +def get_org_mappers(org_map, start_order=1, auth_type='sso'): + """Helper function to get just the mappers from org_map_to_gateway_format.""" + result, _ = org_map_to_gateway_format(org_map, start_order, auth_type=auth_type) + return result + + +def get_team_mappers(team_map, start_order=1, auth_type='sso'): + """Helper function to get just the mappers from team_map_to_gateway_format.""" + result, _ = team_map_to_gateway_format(team_map, start_order, auth_type=auth_type) + return result + + +def get_role_mappers(role_map, start_order=1): + """Helper function to get just the mappers from role_map_to_gateway_format.""" + result, _ = role_map_to_gateway_format(role_map, start_order) + return result + + +class TestProcessSSOUserList: + """Tests for the process_sso_user_list function (consolidated version).""" + + def test_false_boolean(self): + """Test that False creates 'Never Allow' trigger.""" + result = process_sso_user_list(False) + + assert result["name"] == "Never Allow" + assert result["trigger"] == {"never": {}} + + def test_true_boolean(self): + """Test that True creates 'Always Allow' trigger.""" + result = process_sso_user_list(True) + + assert result["name"] == "Always Allow" + assert result["trigger"] == {"always": {}} + + def test_false_string_list(self): + """Test that ['false'] creates 'Never Allow' trigger.""" + result = process_sso_user_list(["false"]) + + assert result["name"] == "Never Allow" + assert result["trigger"] == {"never": {}} + + def test_true_string_list(self): + """Test that ['true'] creates 'Always Allow' trigger.""" + result = process_sso_user_list(["true"]) + + assert result["name"] == "Always Allow" + assert result["trigger"] == {"always": {}} + + def test_string_user_list(self): + """Test that regular string users are processed correctly.""" + result = process_sso_user_list(["testuser"]) + + assert result["name"] == "U:1" + assert result["trigger"]["attributes"]["username"]["equals"] == "testuser" + + def test_email_user_list(self): + """Test that email addresses are processed correctly.""" + result = process_sso_user_list(["test@example.com"]) + + assert result["name"] == "E:1" + assert result["trigger"]["attributes"]["email"]["equals"] == "test@example.com" + + def test_mixed_string_list(self): + """Test that mixed list with 'true', 'false', and regular users works correctly.""" + result = process_sso_user_list(["true", "testuser", "false"]) + + # Should consolidate all usernames and show count + assert result["name"] == "U:3" + assert result["trigger"]["attributes"]["username"]["in"] == ["true", "testuser", "false"] + + def test_custom_email_username_attrs(self): + """Test that custom email and username attributes work correctly.""" + result = process_sso_user_list(["test@example.com"], email_attr='custom_email', username_attr='custom_username') + + assert result["trigger"]["attributes"]["custom_email"]["equals"] == "test@example.com" + + def test_regex_pattern(self): + """Test that regex patterns create both username and email matches.""" + pattern = re.compile(r"^admin.*@example\.com$") + result = process_sso_user_list([pattern]) + + assert result["name"] == "UP:1 EP:1" + assert result["trigger"]["attributes"]["username"]["matches"] == "/^admin.*@example\\.com$/" + assert result["trigger"]["attributes"]["email"]["matches"] == "/^admin.*@example\\.com$/" + + def test_multiple_emails(self): + """Test that multiple emails use count-based names.""" + emails = [f"user{i}@example.com" for i in range(10)] + result = process_sso_user_list(emails) + + assert result["name"] == "E:10" + assert result["trigger"]["attributes"]["email"]["in"] == emails + + def test_multiple_usernames(self): + """Test that multiple usernames use count-based names.""" + usernames = [f"user{i}" for i in range(8)] + result = process_sso_user_list(usernames) + + assert result["name"] == "U:8" + assert result["trigger"]["attributes"]["username"]["in"] == usernames + + def test_mixed_emails_and_usernames(self): + """Test mixed emails and usernames use count-based names.""" + emails = ["user1@example.com", "user2@example.com"] + usernames = ["admin1", "admin2", "admin3"] + users = emails + usernames + result = process_sso_user_list(users) + + assert result["name"] == "E:2 U:3" + assert result["trigger"]["attributes"]["email"]["in"] == emails + assert result["trigger"]["attributes"]["username"]["in"] == usernames + + def test_multiple_regex_patterns(self): + """Test that multiple regex patterns use count-based names.""" + patterns = [re.compile(f"pattern{i}") for i in range(5)] + result = process_sso_user_list(patterns) + + assert result["name"] == "UP:5 EP:5" + + def test_empty_list(self): + """Test that empty list creates default trigger.""" + result = process_sso_user_list([]) + assert result["name"] == "Mixed Rules" + assert result["trigger"]["attributes"]["join_condition"] == "or" + + +class TestProcessLdapUserList: + """Tests for the process_ldap_user_list function.""" + + def test_none_input(self): + """Test that None creates no triggers (empty list).""" + result = process_ldap_user_list(None) + assert len(result) == 0 + + def test_none_in_list(self): + """Test that [None] creates no triggers (empty list).""" + result = process_ldap_user_list([None]) + assert len(result) == 0 + + def test_true_boolean(self): + """Test that True creates 'Always Allow' trigger.""" + result = process_ldap_user_list(True) + assert len(result) == 1 + assert result[0]["name"] == "Always Allow" + assert result[0]["trigger"] == {"always": {}} + + def test_true_boolean_in_list(self): + """Test that [True] creates 'Always Allow' trigger.""" + result = process_ldap_user_list([True]) + assert len(result) == 1 + assert result[0]["name"] == "Always Allow" + assert result[0]["trigger"] == {"always": {}} + + def test_false_boolean(self): + """Test that False creates 'Never Allow' trigger.""" + result = process_ldap_user_list(False) + assert len(result) == 1 + assert result[0]["name"] == "Never Allow" + assert result[0]["trigger"] == {"never": {}} + + def test_false_boolean_in_list(self): + """Test that [False] creates 'Never Allow' trigger.""" + result = process_ldap_user_list([False]) + assert len(result) == 1 + assert result[0]["name"] == "Never Allow" + assert result[0]["trigger"] == {"never": {}} + + def test_single_string_group(self): + """Test that a single string creates group match trigger.""" + result = process_ldap_user_list("admin_group") + assert len(result) == 1 + assert result[0]["name"] == "Match User Groups" + assert result[0]["trigger"]["groups"]["has_or"] == ["admin_group"] + + def test_single_string_group_in_list(self): + """Test that a single string in list creates group match trigger.""" + result = process_ldap_user_list(["admin_group"]) + assert len(result) == 1 + assert result[0]["name"] == "Match User Groups" + assert result[0]["trigger"]["groups"]["has_or"] == ["admin_group"] + + def test_multiple_groups(self): + """Test that multiple groups create single trigger with all groups.""" + result = process_ldap_user_list(["group1", "group2", "group3"]) + assert len(result) == 1 + assert result[0]["name"] == "Match User Groups" + assert result[0]["trigger"]["groups"]["has_or"] == ["group1", "group2", "group3"] + + def test_mixed_types_with_none(self): + """Test that mixed types including None are handled correctly.""" + result = process_ldap_user_list(["group1", None, "group2"]) + assert len(result) == 1 + assert result[0]["name"] == "Match User Groups" + assert result[0]["trigger"]["groups"]["has_or"] == ["group1", None, "group2"] + + def test_mixed_types_with_boolean_string(self): + """Test that boolean values mixed with strings are handled correctly.""" + result = process_ldap_user_list(["group1", False, "group2"]) + assert len(result) == 1 + assert result[0]["name"] == "Match User Groups" + assert result[0]["trigger"]["groups"]["has_or"] == ["group1", False, "group2"] + + def test_empty_list(self): + """Test that empty list creates no triggers.""" + result = process_ldap_user_list([]) + assert len(result) == 0 + + def test_numeric_values(self): + """Test that numeric values are handled correctly.""" + result = process_ldap_user_list([123, "group1"]) + assert len(result) == 1 + assert result[0]["name"] == "Match User Groups" + assert result[0]["trigger"]["groups"]["has_or"] == [123, "group1"] + + +class TestOrgMapToGatewayFormat: + + def test_none_input(self): + """Test that None input returns empty list.""" + result, next_order = org_map_to_gateway_format(None) + assert result == [] + assert next_order == 1 # Default start_order + + def test_empty_dict(self): + """Test that empty dict returns empty list.""" + result, next_order = org_map_to_gateway_format({}) + assert result == [] + assert next_order == 1 + + def test_order_increments_correctly(self): + """Test that order values increment correctly.""" + org_map = {"myorg": {"admins": True, "users": True}} + + result, _ = org_map_to_gateway_format(org_map) + + assert len(result) == 2 + assert result[0]["order"] == 1 + assert result[1]["order"] == 2 + + def test_org_with_admin_true(self): + """Test organization with admin access set to True.""" + org_map = {"myorg": {"admins": True}} + + result, _ = org_map_to_gateway_format(org_map) + + assert len(result) == 1 + mapping = result[0] + assert mapping["name"] == "myorg - Admins Always Allow" + assert mapping["map_type"] == "organization" + assert mapping["organization"] == "myorg" + assert mapping["team"] is None + assert mapping["role"] == "Organization Admin" + assert mapping["revoke"] is False + assert mapping["order"] == 1 + assert mapping["triggers"] == {"always": {}} + + def test_org_with_admin_false(self): + """Test organization with admin access set to False.""" + org_map = {"myorg": {"admins": False}} + + result, _ = org_map_to_gateway_format(org_map) + + assert len(result) == 1 + mapping = result[0] + assert mapping["name"] == "myorg - Admins Never Allow" + assert mapping["triggers"] == {"never": {}} + assert mapping["role"] == "Organization Admin" + + def test_org_with_admin_false_string(self): + """Test organization with admin access set to ['false'].""" + org_map = {"myorg": {"admins": ["false"]}} + + result, _ = org_map_to_gateway_format(org_map) + + assert len(result) == 1 + mapping = result[0] + assert mapping["name"] == "myorg - Admins Never Allow" + assert mapping["triggers"] == {"never": {}} + assert mapping["role"] == "Organization Admin" + + def test_org_with_users_true_string(self): + """Test organization with users access set to ['true'].""" + org_map = {"myorg": {"users": ["true"]}} + + result, _ = org_map_to_gateway_format(org_map) + + assert len(result) == 1 + mapping = result[0] + assert mapping["name"] == "myorg - Users Always Allow" + assert mapping["triggers"] == {"always": {}} + assert mapping["role"] == "Organization Member" + + def test_org_with_users_true(self): + """Test organization with users access set to True.""" + org_map = {"myorg": {"users": True}} + + result, _ = org_map_to_gateway_format(org_map) + + assert len(result) == 1 + mapping = result[0] + assert mapping["name"] == "myorg - Users Always Allow" + assert mapping["triggers"] == {"always": {}} + assert mapping["role"] == "Organization Member" + + def test_org_with_admin_string(self): + """Test organization with admin access set to a specific group.""" + org_map = {"myorg": {"admins": "admin-username"}} + + result, _ = org_map_to_gateway_format(org_map) + + assert len(result) == 1 + mapping = result[0] + assert mapping["name"] == "myorg - Admins U:1" + assert mapping["triggers"] == {"attributes": {"join_condition": "or", "username": {"equals": "admin-username"}}} + assert mapping["role"] == "Organization Admin" + + def test_org_with_admin_list(self): + """Test organization with admin access set to multiple groups.""" + org_map = {"myorg": {"admins": ["admin-username1", "admin-username2"]}} + + result, _ = org_map_to_gateway_format(org_map) + + assert len(result) == 1 + mapping = result[0] + assert mapping["name"] == "myorg - Admins U:2" + assert mapping["triggers"]["attributes"]["username"]["in"] == ["admin-username1", "admin-username2"] + assert mapping["order"] == 1 + + def test_org_with_email_detection(self): + """Test that email addresses are correctly identified and handled.""" + org_map = {"myorg": {"users": ["user@example.com", "admin@test.org", "not-an-email"]}} + + result, _ = org_map_to_gateway_format(org_map) + + assert len(result) == 1 + mapping = result[0] + + # Should consolidate emails and usernames in one mapper + assert mapping["name"] == "myorg - Users E:2 U:1" + + # Should have both email and username attributes + assert mapping["triggers"]["attributes"]["email"]["in"] == ["user@example.com", "admin@test.org"] + assert mapping["triggers"]["attributes"]["username"]["equals"] == "not-an-email" + assert mapping["triggers"]["attributes"]["join_condition"] == "or" + + def test_org_with_remove_flags(self): + """Test organization with remove flags.""" + org_map = {"myorg": {"admins": True, "users": ["user-group"], "remove_admins": True, "remove_users": True}} + + result, _ = org_map_to_gateway_format(org_map) + + assert len(result) == 2 + assert result[0]["revoke"] is True # admin mapping should have revoke=True + assert result[1]["revoke"] is True # user mapping should have revoke=True + + def test_org_with_custom_email_username_attrs(self): + """Test org mapping with custom email and username attributes.""" + org_map = {"myorg": {"admins": ["test@example.com"]}} + + result, _ = org_map_to_gateway_format(org_map, email_attr='custom_email', username_attr='custom_username') + + assert len(result) == 1 + mapping = result[0] + assert mapping["triggers"]["attributes"]["custom_email"]["equals"] == "test@example.com" + + def test_org_with_regex_pattern_objects(self): + """Test org mapping with actual re.Pattern objects.""" + regex_str = "^admin.*@example\\.com$" + + org_map = {"myorg": {"users": [re.compile(regex_str)]}} + + result, _ = org_map_to_gateway_format(org_map) + + # Should create 1 consolidated mapping with both username and email matches + assert len(result) == 1, f"Expected 1 item but got: {result}" + + mapping = result[0] + assert mapping["name"] == "myorg - Users UP:1 EP:1" + assert mapping["triggers"]["attributes"]["username"]["matches"] == f"/{regex_str}/" + assert mapping["triggers"]["attributes"]["email"]["matches"] == f"/{regex_str}/" + + def test_org_with_none_values_skipped(self): + """Test that entries with None values are skipped.""" + org_map = {"myorg": {"admins": None, "users": True}} + + result, _ = org_map_to_gateway_format(org_map) + + assert len(result) == 1 + assert result[0]["role"] == "Organization Member" # Only users mapping should be present + + def test_org_with_start_order_parameter(self): + """Test that start_order parameter works correctly.""" + org_map = {"org1": {"admins": True}, "org2": {"users": ["username1", "username2"]}} + + result, next_order = org_map_to_gateway_format(org_map, start_order=10) + + # Should have 2 mappings total (1 for org1, 1 for org2) + assert len(result) == 2 + assert result[0]["order"] == 10 + assert result[1]["order"] == 11 + assert next_order == 12 + + def test_org_comprehensive_field_validation(self): + """Test comprehensive validation of all fields in org mappings.""" + org_map = {"test-org": {"admins": ["test-admin"], "remove_admins": False}} + + result, next_order = org_map_to_gateway_format(org_map, start_order=5) + + assert len(result) == 1 + mapping = result[0] + + # Validate all required fields and their types + assert isinstance(mapping["name"], str) + assert mapping["name"] == "test-org - Admins U:1" + + assert mapping["map_type"] == "organization" + assert mapping["order"] == 5 + assert mapping["authenticator"] == -1 + + assert isinstance(mapping["triggers"], dict) + assert "attributes" in mapping["triggers"] + + assert mapping["organization"] == "test-org" + assert mapping["team"] is None + assert mapping["role"] == "Organization Admin" + assert mapping["revoke"] is False + + # Validate next_order is incremented correctly + assert next_order == 6 + + def test_org_next_order_calculation(self): + """Test that next_order is calculated correctly in various scenarios.""" + # Test with no orgs + result, next_order = org_map_to_gateway_format({}) + assert next_order == 1 + + # Test with orgs that have no admins/users (should be skipped) + org_map = {"skipped": {"admins": None, "users": None}} + result, next_order = org_map_to_gateway_format(org_map) + assert len(result) == 0 + assert next_order == 1 + + # Test with single org + org_map = {"single": {"admins": True}} + result, next_order = org_map_to_gateway_format(org_map) + assert len(result) == 1 + assert next_order == 2 + + # Test with multiple mappings from single org - now consolidated into one + org_map = {"multi": {"users": ["user1", "user2"]}} + result, next_order = org_map_to_gateway_format(org_map) + assert len(result) == 1 + assert next_order == 2 + + def test_org_with_auth_type_sso(self): + """Test org mapping with auth_type='sso' (default behavior).""" + org_map = {"myorg": {"users": ["testuser"]}} + + result, _ = org_map_to_gateway_format(org_map, auth_type='sso') + + assert len(result) == 1 + mapping = result[0] + assert mapping["name"] == "myorg - Users U:1" + assert mapping["triggers"]["attributes"]["username"]["equals"] == "testuser" + + def test_org_with_auth_type_ldap(self): + """Test org mapping with auth_type='ldap'.""" + org_map = {"myorg": {"users": ["admin_group"]}} + + result, _ = org_map_to_gateway_format(org_map, auth_type='ldap') + + assert len(result) == 1 + mapping = result[0] + assert "Match User Groups" in mapping["name"] + assert mapping["triggers"]["groups"]["has_or"] == ["admin_group"] + + def test_org_with_auth_type_ldap_boolean(self): + """Test org mapping with auth_type='ldap' and boolean values.""" + org_map = {"myorg": {"users": True, "admins": False}} + + result, _ = org_map_to_gateway_format(org_map, auth_type='ldap') + + assert len(result) == 2 + user_mapping = next(m for m in result if "Users" in m["name"]) + admin_mapping = next(m for m in result if "Admins" in m["name"]) + + assert "Always Allow" in user_mapping["name"] + assert user_mapping["triggers"]["always"] == {} + + assert "Never Allow" in admin_mapping["name"] + assert admin_mapping["triggers"]["never"] == {} + + +class TestTeamMapToGatewayFormat: + """Tests for team_map_to_gateway_format function.""" + + def test_none_input(self): + """Test that None input returns empty list.""" + result, next_order = team_map_to_gateway_format(None) + assert result == [] + assert next_order == 1 # Default start_order + + def test_empty_dict(self): + """Test that empty dict returns empty list.""" + result, next_order = team_map_to_gateway_format({}) + assert result == [] + assert next_order == 1 + + def test_order_increments_correctly(self): + """Test that order values increment correctly for teams.""" + team_map = {"team1": {"organization": "myorg", "users": True}, "team2": {"organization": "myorg", "users": True}} + + result, _ = team_map_to_gateway_format(team_map) + + assert len(result) == 2 + assert result[0]["order"] == 1 + assert result[1]["order"] == 2 + + def test_team_with_email_detection(self): + """Test that email addresses are correctly identified and handled.""" + team_map = {"email-team": {"organization": "myorg", "users": ["user@example.com", "admin@test.org", "not-an-email"]}} + + result, _ = team_map_to_gateway_format(team_map) + + # Should have 1 consolidated mapping + assert len(result) == 1 + mapping = result[0] + + # Should consolidate emails and usernames in one mapper + assert mapping["name"] == "myorg - email-team E:2 U:1" + + # Should have both email and username attributes + assert mapping["triggers"]["attributes"]["email"]["in"] == ["user@example.com", "admin@test.org"] + assert mapping["triggers"]["attributes"]["username"]["equals"] == "not-an-email" + assert mapping["triggers"]["attributes"]["join_condition"] == "or" + + def test_team_with_custom_email_username_attrs(self): + """Test team mapping with custom email and username attributes.""" + team_map = {"custom-team": {"organization": "myorg", "users": ["test@example.com"]}} + + result, _ = team_map_to_gateway_format(team_map, email_attr='custom_email', username_attr='custom_username') + + assert len(result) == 1 + mapping = result[0] + assert mapping["triggers"]["attributes"]["custom_email"]["equals"] == "test@example.com" + assert mapping["name"] == "myorg - custom-team E:1" + + def test_team_with_regex_pattern_objects(self): + """Test team mapping with actual re.Pattern objects.""" + regex_str = "^admin.*@example\\.com$" + + team_map = {"regex-team": {"organization": "myorg", "users": [re.compile(regex_str)]}} + + result, _ = team_map_to_gateway_format(team_map) + + # Should create 1 consolidated mapping with both username and email matches + assert len(result) == 1, f"Expected 1 item but got: {result}" + + mapping = result[0] + assert mapping["name"] == "myorg - regex-team UP:1 EP:1" + assert mapping["triggers"]["attributes"]["username"]["matches"] == f"/{regex_str}/" + assert mapping["triggers"]["attributes"]["email"]["matches"] == f"/{regex_str}/" + + def test_team_with_non_string_objects(self): + """Test team mapping with non-string objects that get converted.""" + + class CustomObject: + def __str__(self): + return "custom_object_string" + + custom_obj = CustomObject() + team_map = {"object-team": {"organization": "myorg", "users": [custom_obj, 12345]}} + + result, _ = team_map_to_gateway_format(team_map) + + # Should create 1 consolidated mapping with both username and email attributes + assert len(result) == 1 + + mapping = result[0] + # Both objects should be treated as usernames and emails + assert mapping["triggers"]["attributes"]["username"]["in"] == ["custom_object_string", "12345"] + assert mapping["triggers"]["attributes"]["email"]["in"] == ["custom_object_string", "12345"] + + def test_team_with_mixed_data_types(self): + """Test team mapping with mixed data types in users list.""" + regex_str = 'test.*' + + team_map = {"mixed-team": {"organization": "myorg", "users": ["string_user", "email@test.com", re.compile(regex_str), 999, True]}} + + result, _ = team_map_to_gateway_format(team_map) + + # Should create 1 consolidated mapping with all types handled + assert len(result) == 1 + + mapping = result[0] + # All types should be consolidated into one mapper name + assert mapping["name"] == "myorg - mixed-team E:3 U:3 UP:1 EP:1" + + # Verify trigger structure contains all the data types + triggers = mapping["triggers"]["attributes"] + assert "email" in triggers + assert "username" in triggers + + def test_team_with_start_order_parameter(self): + """Test that start_order parameter works correctly.""" + team_map = {"team1": {"organization": "org1", "users": True}, "team2": {"organization": "org2", "users": ["username1", "username2"]}} + + result, next_order = team_map_to_gateway_format(team_map, start_order=10) + + # First mapping should start at order 10 + assert result[0]["order"] == 10 + # Should increment properly + orders = [mapping["order"] for mapping in result] + assert orders == sorted(orders) # Should be in ascending order + assert min(orders) == 10 + # next_order should be one more than the last used order + assert next_order == max(orders) + 1 + + def test_team_with_empty_strings(self): + """Test team mapping with empty strings.""" + team_map = { + "": {"organization": "myorg", "users": [""]}, # Empty team name and user + "normal-team": {"organization": "", "users": True}, # Empty organization + } + + result, _ = team_map_to_gateway_format(team_map) + + # Should handle empty strings gracefully + assert len(result) == 2 + + # Check empty team name mapping + empty_team_mapping = [m for m in result if m["team"] == ""][0] + assert " - " in empty_team_mapping["name"] and "U:1" in empty_team_mapping["name"] + assert empty_team_mapping["team"] == "" + + # Check empty organization mapping + empty_org_mapping = [m for m in result if m["organization"] == ""][0] + assert empty_org_mapping["organization"] == "" + assert "Always Allow" in empty_org_mapping["name"] + + def test_team_with_special_characters(self): + """Test team mapping with special characters in names.""" + team_map = { + "team-with-special!@#$%^&*()_+chars": {"organization": "org with spaces & symbols!", "users": ["user@domain.com", "user-with-special!chars"]} + } + + result, _ = team_map_to_gateway_format(team_map) + + assert len(result) == 1 + + # Verify special characters are preserved in names + mapping = result[0] + assert "team-with-special!@#$%^&*()_+chars" in mapping["name"] + assert "org with spaces & symbols!" in mapping["name"] + assert mapping["team"] == "team-with-special!@#$%^&*()_+chars" + assert mapping["organization"] == "org with spaces & symbols!" + + def test_team_with_unicode_characters(self): + """Test team mapping with unicode characters.""" + team_map = { + "チーム": { # Japanese for "team" + "organization": "組織", # Japanese for "organization" + "users": ["ユーザー@example.com", "用户"], # Mixed Japanese/Chinese + } + } + + result, _ = team_map_to_gateway_format(team_map) + + assert len(result) == 1 + + # Verify unicode characters are handled correctly + mapping = result[0] + assert "チーム" in mapping["name"] + assert "組織" in mapping["name"] + assert mapping["team"] == "チーム" + assert mapping["organization"] == "組織" + + def test_team_next_order_calculation(self): + """Test that next_order is calculated correctly in various scenarios.""" + # Test with no teams + result, next_order = team_map_to_gateway_format({}) + assert next_order == 1 + + # Test with teams that have no users (should be skipped) + team_map = {"skipped": {"organization": "org", "users": None}} + result, next_order = team_map_to_gateway_format(team_map) + assert len(result) == 0 + assert next_order == 1 + + # Test with single team + team_map = {"single": {"organization": "org", "users": True}} + result, next_order = team_map_to_gateway_format(team_map) + assert len(result) == 1 + assert next_order == 2 + + # Test with multiple mappings from single team - now consolidated into one + team_map = {"multi": {"organization": "org", "users": ["user1", "user2"]}} + result, next_order = team_map_to_gateway_format(team_map) + assert len(result) == 1 + assert next_order == 2 + + def test_team_large_dataset_performance(self): + """Test team mapping with a large number of teams and users.""" + # Create a large team map + team_map = {} + for i in range(100): + team_map[f"team_{i}"] = { + "organization": f"org_{i % 10}", # 10 different orgs + "users": [f"user_{j}@org_{i % 10}.com" for j in range(5)], # 5 users per team + } + + result, next_order = team_map_to_gateway_format(team_map) + + # Should create 100 mappings (1 per team, with consolidated users) + assert len(result) == 100 + + # Verify orders are sequential + orders = [mapping["order"] for mapping in result] + assert orders == list(range(1, 101)) + assert next_order == 101 + + # Verify all teams are represented + teams = {mapping["team"] for mapping in result} + assert len(teams) == 100 + + def test_team_mapping_field_validation(self): + """Test that all required fields are present and have correct types.""" + team_map = {"validation-team": {"organization": "test-org", "users": ["test@example.com"], "remove": True}} + + result, _ = team_map_to_gateway_format(team_map) + + for mapping in result: + # Check required fields exist + required_fields = ["name", "map_type", "order", "authenticator", "triggers", "organization", "team", "role", "revoke"] + for field in required_fields: + assert field in mapping, f"Missing required field: {field}" + + # Check field types + assert isinstance(mapping["name"], str) + assert isinstance(mapping["map_type"], str) + assert isinstance(mapping["order"], int) + assert isinstance(mapping["authenticator"], int) + assert isinstance(mapping["triggers"], dict) + assert isinstance(mapping["organization"], str) + assert isinstance(mapping["team"], str) + assert isinstance(mapping["role"], str) + assert isinstance(mapping["revoke"], bool) + + # Check specific values + assert mapping["map_type"] == "team" + assert mapping["authenticator"] == -1 + assert mapping["role"] == "Team Member" + assert mapping["revoke"] == True # Because remove was set to True + + def test_team_trigger_structure_validation(self): + """Test that trigger structures are correctly formatted.""" + team_map = {"trigger-test": {"organization": "org", "users": ["test@example.com", "username"]}} + + result, _ = team_map_to_gateway_format(team_map) + + for mapping in result: + triggers = mapping["triggers"] + + if "always" in triggers: + assert triggers["always"] == {} + elif "never" in triggers: + assert triggers["never"] == {} + elif "attributes" in triggers: + attrs = triggers["attributes"] + assert "join_condition" in attrs + assert attrs["join_condition"] == "or" # Implementation uses 'or' + + # Should have either username or email attribute + assert ("username" in attrs) or ("email" in attrs) + + # The attribute should have either "equals", "matches", or "has_or" + for attr_name in ["username", "email"]: + if attr_name in attrs: + attr_value = attrs[attr_name] + assert ("equals" in attr_value) or ("matches" in attr_value) or ("has_or" in attr_value) + + def test_team_boolean_false_trigger(self): + """Test that False users value creates never trigger correctly.""" + team_map = {"never-team": {"organization": "org", "users": False}} + + result, _ = team_map_to_gateway_format(team_map) + + assert len(result) == 1 + mapping = result[0] + assert mapping["triggers"] == {"never": {}} + assert "Never Allow" in mapping["name"] + + def test_team_boolean_true_trigger(self): + """Test that True users value creates always trigger correctly.""" + team_map = {"always-team": {"organization": "org", "users": True}} + + result, _ = team_map_to_gateway_format(team_map) + + assert len(result) == 1 + mapping = result[0] + assert mapping["triggers"] == {"always": {}} + assert "Always Allow" in mapping["name"] + + def test_team_string_false_trigger(self): + """Test that ['false'] users value creates never trigger correctly.""" + team_map = {"never-team": {"organization": "org", "users": ["false"]}} + + result, _ = team_map_to_gateway_format(team_map) + + assert len(result) == 1 + mapping = result[0] + assert mapping["triggers"] == {"never": {}} + assert "Never Allow" in mapping["name"] + + def test_team_string_true_trigger(self): + """Test that ['true'] users value creates always trigger correctly.""" + team_map = {"always-team": {"organization": "org", "users": ["true"]}} + + result, _ = team_map_to_gateway_format(team_map) + + assert len(result) == 1 + mapping = result[0] + assert mapping["triggers"] == {"always": {}} + assert "Always Allow" in mapping["name"] + + def test_team_with_join_condition_or(self): + """Test that all attribute-based triggers use 'or' join condition.""" + team_map = {"test-team": {"organization": "org", "users": ["user1", "user2"]}} + + result, _ = team_map_to_gateway_format(team_map) + + for mapping in result: + if "attributes" in mapping["triggers"]: + assert mapping["triggers"]["attributes"]["join_condition"] == "or" + + def test_team_with_default_organization_fallback(self): + """Test that teams without organization get 'Unknown' as default.""" + team_map = {"orphan-team": {"users": ["user1"]}} + + result, _ = team_map_to_gateway_format(team_map) + + assert len(result) == 1 + assert result[0]["organization"] == "Unknown" + assert "Unknown - orphan-team" in result[0]["name"] + + def test_team_with_regex_string_patterns(self): + """Test team mapping with regex patterns as strings (not compiled patterns).""" + team_map = {"regex-team": {"organization": "myorg", "users": ["/^admin.*@example\\.com$/"]}} + + result, _ = team_map_to_gateway_format(team_map) + + # String patterns should be treated as regular strings, not regex + assert len(result) == 1 + mapping = result[0] + assert mapping["name"] == "myorg - regex-team U:1" + assert mapping["triggers"]["attributes"]["username"]["equals"] == "/^admin.*@example\\.com$/" + + def test_team_comprehensive_field_validation(self): + """Test comprehensive validation of all fields in team mappings.""" + team_map = {"comprehensive-team": {"organization": "test-org", "users": ["test-user"], "remove": False}} + + result, next_order = team_map_to_gateway_format(team_map, start_order=5) + + assert len(result) == 1 + mapping = result[0] + + # Validate all required fields and their types + assert isinstance(mapping["name"], str) + assert mapping["name"] == "test-org - comprehensive-team U:1" + + assert mapping["map_type"] == "team" + assert mapping["order"] == 5 + assert mapping["authenticator"] == -1 + + assert isinstance(mapping["triggers"], dict) + assert "attributes" in mapping["triggers"] + + assert mapping["organization"] == "test-org" + assert mapping["team"] == "comprehensive-team" + assert mapping["role"] == "Team Member" + assert mapping["revoke"] == False + + # Validate next_order is incremented correctly + assert next_order == 6 + + def test_team_with_none_and_remove_flag(self): + """Test that teams with None users are skipped even with remove flag.""" + team_map = { + "skipped-team": {"organization": "org", "users": None, "remove": True}, + "valid-team": {"organization": "org", "users": True, "remove": True}, + } + + result, _ = team_map_to_gateway_format(team_map) + + # Should only have one result (the valid team) + assert len(result) == 1 + assert result[0]["team"] == "valid-team" + assert result[0]["revoke"] == True + + def test_team_error_handling_edge_cases(self): + """Test various edge cases for error handling.""" + # Test with completely empty team config + team_map = {"empty-team": {}} + + try: + _, _ = team_map_to_gateway_format(team_map) + # Should not crash, but might skip the team due to missing 'users' key + except KeyError: + # This is expected if 'users' key is required + pass + + def test_team_ordering_with_mixed_types(self): + """Test that ordering works correctly with mixed user types.""" + team_map = { + "team1": {"organization": "org1", "users": True}, # 1 mapping + "team2": {"organization": "org2", "users": ["user1", "user2"]}, # 1 mapping (consolidated) + "team3": {"organization": "org3", "users": False}, # 1 mapping + } + + result, next_order = team_map_to_gateway_format(team_map, start_order=10) + + # Should have 3 total mappings (consolidated behavior) + assert len(result) == 3 + + # Orders should be sequential starting from 10 + orders = [mapping["order"] for mapping in result] + assert orders == [10, 11, 12] + assert next_order == 13 + + # Verify teams are represented correctly + teams = [mapping["team"] for mapping in result] + assert "team1" in teams + assert "team2" in teams + assert "team3" in teams + assert teams.count("team2") == 1 # team2 should appear once (consolidated) + + def test_team_with_auth_type_sso(self): + """Test team mapping with auth_type='sso' (default behavior).""" + team_map = {"testteam": {"organization": "testorg", "users": ["testuser"]}} + + result, _ = team_map_to_gateway_format(team_map, auth_type='sso') + + assert len(result) == 1 + mapping = result[0] + assert mapping["name"] == "testorg - testteam U:1" + assert mapping["triggers"]["attributes"]["username"]["equals"] == "testuser" + + def test_team_with_auth_type_ldap(self): + """Test team mapping with auth_type='ldap'.""" + team_map = {"testteam": {"organization": "testorg", "users": ["admin_group"]}} + + result, _ = team_map_to_gateway_format(team_map, auth_type='ldap') + + assert len(result) == 1 + mapping = result[0] + assert "Match User Groups" in mapping["name"] + assert mapping["triggers"]["groups"]["has_or"] == ["admin_group"] + + def test_team_with_auth_type_ldap_boolean(self): + """Test team mapping with auth_type='ldap' and boolean values.""" + team_map_true = {"testteam": {"organization": "testorg", "users": True}} + team_map_false = {"testteam": {"organization": "testorg", "users": False}} + + result_true, _ = team_map_to_gateway_format(team_map_true, auth_type='ldap') + result_false, _ = team_map_to_gateway_format(team_map_false, auth_type='ldap') + + assert len(result_true) == 1 + assert "Always Allow" in result_true[0]["name"] + assert result_true[0]["triggers"]["always"] == {} + + assert len(result_false) == 1 + assert "Never Allow" in result_false[0]["name"] + assert result_false[0]["triggers"]["never"] == {} + + +# Parametrized tests for edge cases +@pytest.mark.parametrize( + "org_map,expected_length", + [ + (None, 0), + ({}, 0), + ({"org1": {}}, 0), # Organization with no admin/user mappings + ({"org1": {"admins": True}}, 1), + ({"org1": {"users": True}}, 1), + ({"org1": {"admins": True, "users": True}}, 2), + ({"org1": {"admins": True}, "org2": {"users": True}}, 2), + ], +) +def test_org_map_result_lengths(org_map, expected_length): + """Test that org_map_to_gateway_format returns expected number of mappings.""" + result, _ = org_map_to_gateway_format(org_map) + assert len(result) == expected_length + + +# Test for Gateway format compliance +@pytest.mark.parametrize( + "org_map", + [ + {"org1": {"admins": True}}, + {"org1": {"users": ["username1"]}}, + {"org1": {"admins": False}}, + ], +) +def test_gateway_format_compliance(org_map): + """Test that all results comply with Gateway mapping format.""" + result, _ = org_map_to_gateway_format(org_map) + + for mapping in result: + # Required fields per Gateway spec + assert "name" in mapping + assert "authenticator" in mapping + assert "map_type" in mapping + assert "organization" in mapping + assert "team" in mapping + assert "triggers" in mapping + assert "role" in mapping + assert "revoke" in mapping + assert "order" in mapping + + # Field types + assert isinstance(mapping["name"], str) + assert isinstance(mapping["authenticator"], int) + assert mapping["map_type"] == "organization" # For org mappings + assert isinstance(mapping["organization"], str) + assert mapping["team"] is None # For org mappings, team should be None + assert isinstance(mapping["triggers"], dict) + assert isinstance(mapping["role"], str) + assert isinstance(mapping["revoke"], bool) + assert isinstance(mapping["order"], int) + + +# Parametrized tests for team mappings +@pytest.mark.parametrize( + "team_map,expected_length", + [ + (None, 0), + ({}, 0), + ({"team1": {"organization": "org1", "users": None}}, 0), # Team with None users should be skipped + ({"team1": {"organization": "org1", "users": True}}, 1), + ({"team1": {"organization": "org1", "users": ["username1"]}}, 1), + ({"team1": {"organization": "org1", "users": True}, "team2": {"organization": "org2", "users": False}}, 2), + ], +) +def test_team_map_result_lengths(team_map, expected_length): + """Test that team_map_to_gateway_format returns expected number of mappings.""" + result, _ = team_map_to_gateway_format(team_map) + assert len(result) == expected_length + + +# Test for Gateway format compliance for teams +@pytest.mark.parametrize( + "team_map", + [ + {"team1": {"organization": "org1", "users": True}}, + {"team1": {"organization": "org1", "users": ["username1"]}}, + {"team1": {"organization": "org1", "users": False}}, + ], +) +def test_team_gateway_format_compliance(team_map): + """Test that all team results comply with Gateway mapping format.""" + result, _ = team_map_to_gateway_format(team_map) + + for mapping in result: + # Required fields per Gateway spec + assert "name" in mapping + assert "authenticator" in mapping + assert "map_type" in mapping + assert "organization" in mapping + assert "team" in mapping + assert "triggers" in mapping + assert "role" in mapping + assert "revoke" in mapping + assert "order" in mapping + + # Field types + assert isinstance(mapping["name"], str) + assert isinstance(mapping["authenticator"], int) + assert mapping["map_type"] == "team" # For team mappings + assert isinstance(mapping["organization"], str) + assert isinstance(mapping["team"], str) + assert isinstance(mapping["triggers"], dict) + assert isinstance(mapping["role"], str) + assert isinstance(mapping["revoke"], bool) + assert isinstance(mapping["order"], int) + + +class TestAAP51531SpecificCase: + """Test case specifically for JIRA AAP-51531 requirements.""" + + def test_ldap_networking_org_mapping_aap_51531(self): + """Test the specific LDAP organization mapping case for JIRA AAP-51531.""" + # This case is added for JIRA AAP-51531 + org_map = {"Networking": {"admins": "cn=networkadmins,ou=groups,dc=example,dc=com", "users": True, "remove_admins": True, "remove_users": True}} + + result = get_org_mappers(org_map, auth_type='ldap') + + # Should create 2 mappers: one for admins, one for users + assert len(result) == 2 + + # Find admin and user mappers + admin_mapper = next((m for m in result if m['role'] == 'Organization Admin'), None) + user_mapper = next((m for m in result if m['role'] == 'Organization Member'), None) + + assert admin_mapper is not None + assert user_mapper is not None + + # Verify admin mapper details + assert admin_mapper['organization'] == 'Networking' + assert admin_mapper['revoke'] is True # remove_admins: true + assert 'Match User Groups' in admin_mapper['name'] + assert admin_mapper['triggers']['groups']['has_or'] == ['cn=networkadmins,ou=groups,dc=example,dc=com'] + + # Verify user mapper details + assert user_mapper['organization'] == 'Networking' + assert user_mapper['revoke'] is True # remove_users: true + assert 'Always Allow' in user_mapper['name'] + assert user_mapper['triggers']['always'] == {} + + # Verify both mappers have correct map_type + assert admin_mapper['map_type'] == 'organization' + assert user_mapper['map_type'] == 'organization' diff --git a/awx/main/tests/unit/utils/test_base_migrator.py b/awx/main/tests/unit/utils/test_base_migrator.py new file mode 100644 index 0000000000..9319483156 --- /dev/null +++ b/awx/main/tests/unit/utils/test_base_migrator.py @@ -0,0 +1,1243 @@ +""" +Unit tests for base authenticator migrator functionality. +""" + +import pytest +from unittest.mock import Mock, patch +from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator + + +class TestBaseAuthenticatorMigrator: + """Tests for BaseAuthenticatorMigrator class.""" + + def setup_method(self): + """Set up test fixtures.""" + self.gateway_client = Mock() + self.command = Mock() + self.migrator = BaseAuthenticatorMigrator(self.gateway_client, self.command) + + def test_generate_authenticator_slug(self): + """Test slug generation is deterministic.""" + slug1 = self.migrator._generate_authenticator_slug('github', 'github-org') + slug2 = self.migrator._generate_authenticator_slug('github', 'github-org') + + assert slug1 == slug2 + assert slug1 == 'aap-github-github-org' + + def test_generate_authenticator_slug_different_inputs(self): + """Test that different inputs generate different slugs.""" + slug1 = self.migrator._generate_authenticator_slug('github', 'github-org') + slug2 = self.migrator._generate_authenticator_slug('github', 'github-team') + slug3 = self.migrator._generate_authenticator_slug('ldap', 'ldap') + + assert slug1 != slug2 + assert slug1 != slug3 + assert slug2 != slug3 + assert slug1 == 'aap-github-github-org' + assert slug2 == 'aap-github-github-team' + assert slug3 == 'aap-ldap-ldap' + + def test_generate_authenticator_slug_ldap_variants(self): + """Test LDAP authenticator slug generation for all supported variants.""" + # Test all LDAP authenticator naming variants + ldap_base = self.migrator._generate_authenticator_slug('ldap', 'ldap') + ldap1 = self.migrator._generate_authenticator_slug('ldap', 'ldap1') + ldap2 = self.migrator._generate_authenticator_slug('ldap', 'ldap2') + ldap3 = self.migrator._generate_authenticator_slug('ldap', 'ldap3') + ldap4 = self.migrator._generate_authenticator_slug('ldap', 'ldap4') + ldap5 = self.migrator._generate_authenticator_slug('ldap', 'ldap5') + + # Verify correct slug format + assert ldap_base == 'aap-ldap-ldap' + assert ldap1 == 'aap-ldap-ldap1' + assert ldap2 == 'aap-ldap-ldap2' + assert ldap3 == 'aap-ldap-ldap3' + assert ldap4 == 'aap-ldap-ldap4' + assert ldap5 == 'aap-ldap-ldap5' + + # Verify all slugs are unique + all_slugs = [ldap_base, ldap1, ldap2, ldap3, ldap4, ldap5] + assert len(all_slugs) == len(set(all_slugs)) + + def test_generate_authenticator_slug_github_variants(self): + """Test GitHub authenticator slug generation for all supported variants.""" + # Test all GitHub authenticator naming variants + github_base = self.migrator._generate_authenticator_slug('github', 'github') + github_org = self.migrator._generate_authenticator_slug('github', 'github-org') + github_team = self.migrator._generate_authenticator_slug('github', 'github-team') + github_enterprise_org = self.migrator._generate_authenticator_slug('github', 'github-enterprise-org') + github_enterprise_team = self.migrator._generate_authenticator_slug('github', 'github-enterprise-team') + + # Verify correct slug format + assert github_base == 'aap-github-github' + assert github_org == 'aap-github-github-org' + assert github_team == 'aap-github-github-team' + assert github_enterprise_org == 'aap-github-github-enterprise-org' + assert github_enterprise_team == 'aap-github-github-enterprise-team' + + # Verify all slugs are unique + all_slugs = [github_base, github_org, github_team, github_enterprise_org, github_enterprise_team] + assert len(all_slugs) == len(set(all_slugs)) + + def test_get_mapper_ignore_keys_default(self): + """Test default mapper ignore keys.""" + ignore_keys = self.migrator._get_mapper_ignore_keys() + + expected_keys = ['id', 'authenticator', 'created', 'modified', 'summary_fields', 'modified_by', 'created_by', 'related', 'url'] + assert ignore_keys == expected_keys + + +class TestAuthenticatorConfigComparison: + """Tests for authenticator configuration comparison methods.""" + + def setup_method(self): + """Set up test fixtures.""" + self.gateway_client = Mock() + self.command = Mock() + self.migrator = BaseAuthenticatorMigrator(self.gateway_client, self.command) + + def test_authenticator_configs_match_identical(self): + """Test that identical configurations match.""" + existing_auth = { + 'name': 'GitHub Auth', + 'type': 'ansible_base.authentication.authenticator_plugins.github', + 'enabled': True, + 'create_objects': True, + 'remove_users': False, + 'configuration': {'KEY': 'client123', 'SECRET': 'secret456'}, + } + + new_config = existing_auth.copy() + new_config['configuration'] = existing_auth['configuration'].copy() + + assert self.migrator._authenticator_configs_match(existing_auth, new_config) == (True, []) + + def test_authenticator_configs_match_with_ignore_keys(self): + """Test that configurations match when ignoring specified keys.""" + existing_auth = { + 'name': 'GitHub Auth', + 'type': 'ansible_base.authentication.authenticator_plugins.github', + 'enabled': True, + 'create_objects': True, + 'remove_users': False, + 'configuration': {'KEY': 'client123', 'SECRET': 'secret456', 'CALLBACK_URL': 'https://gateway.example.com/callback'}, + } + + new_config = { + 'name': 'GitHub Auth', + 'type': 'ansible_base.authentication.authenticator_plugins.github', + 'enabled': True, + 'create_objects': True, + 'remove_users': False, + 'configuration': {'KEY': 'client123', 'SECRET': 'secret456'}, + } + + # Should not match without ignore keys + assert self.migrator._authenticator_configs_match(existing_auth, new_config) == ( + False, + [' CALLBACK_URL: existing="https://gateway.example.com/callback" vs new='], + ) + + # Should match when ignoring CALLBACK_URL + ignore_keys = ['CALLBACK_URL'] + assert self.migrator._authenticator_configs_match(existing_auth, new_config, ignore_keys) == (True, []) + + def test_authenticator_configs_different_basic_fields(self): + """Test that configurations don't match when basic fields differ.""" + existing_auth = { + 'name': 'GitHub Auth', + 'type': 'ansible_base.authentication.authenticator_plugins.github', + 'enabled': True, + 'create_objects': True, + 'remove_users': False, + 'configuration': {'KEY': 'client123', 'SECRET': 'secret456'}, + } + + # Test different name + new_config = existing_auth.copy() + new_config['name'] = 'Different GitHub Auth' + match, differences = self.migrator._authenticator_configs_match(existing_auth, new_config) + assert match is False + assert len(differences) == 1 + assert 'name:' in differences[0] + + # Test different type + new_config = existing_auth.copy() + new_config['type'] = 'ansible_base.authentication.authenticator_plugins.ldap' + match, differences = self.migrator._authenticator_configs_match(existing_auth, new_config) + assert match is False + assert len(differences) == 1 + assert 'type:' in differences[0] + + # Test different enabled + new_config = existing_auth.copy() + new_config['enabled'] = False + match, differences = self.migrator._authenticator_configs_match(existing_auth, new_config) + assert match is False + assert len(differences) == 1 + assert 'enabled:' in differences[0] + + def test_authenticator_configs_different_configuration(self): + """Test that configurations don't match when configuration section differs.""" + existing_auth = { + 'name': 'GitHub Auth', + 'type': 'ansible_base.authentication.authenticator_plugins.github', + 'enabled': True, + 'create_objects': True, + 'remove_users': False, + 'configuration': {'KEY': 'client123', 'SECRET': 'secret456', 'SCOPE': 'read:org'}, + } + + # Test different KEY + new_config = existing_auth.copy() + new_config['configuration'] = {'KEY': 'client789', 'SECRET': 'secret456', 'SCOPE': 'read:org'} + match, differences = self.migrator._authenticator_configs_match(existing_auth, new_config) + assert match is False + assert len(differences) == 1 + assert 'KEY:' in differences[0] + assert 'existing="client123"' in differences[0] + assert 'new="client789"' in differences[0] + + # Test missing key in new config + new_config = existing_auth.copy() + new_config['configuration'] = {'KEY': 'client123', 'SECRET': 'secret456'} + match, differences = self.migrator._authenticator_configs_match(existing_auth, new_config) + assert match is False + assert len(differences) == 1 + assert 'SCOPE:' in differences[0] + assert 'vs new=' in differences[0] + + # Test extra key in new config + new_config = existing_auth.copy() + new_config['configuration'] = {'KEY': 'client123', 'SECRET': 'secret456', 'SCOPE': 'read:org', 'EXTRA_KEY': 'extra_value'} + match, differences = self.migrator._authenticator_configs_match(existing_auth, new_config) + assert match is False + assert len(differences) == 1 + assert 'EXTRA_KEY:' in differences[0] + assert 'existing=' in differences[0] + + def test_authenticator_configs_differences_details(self): + """Test that difference tracking provides detailed information.""" + existing_auth = { + 'name': 'GitHub Auth', + 'type': 'ansible_base.authentication.authenticator_plugins.github', + 'enabled': True, + 'create_objects': True, + 'remove_users': False, + 'configuration': {'KEY': 'client123', 'SECRET': 'secret456', 'SCOPE': 'read:org', 'CALLBACK_URL': 'https://gateway.example.com/callback'}, + } + + # Test multiple differences with ignore keys + new_config = { + 'name': 'GitHub Auth', + 'type': 'ansible_base.authentication.authenticator_plugins.github', + 'enabled': True, + 'create_objects': True, + 'remove_users': False, + 'configuration': { + 'KEY': 'client456', # Different value + 'SECRET': 'newsecret', # Different value + 'SCOPE': 'read:org', # Same value + # CALLBACK_URL missing (but ignored) + 'NEW_FIELD': 'new_value', # Extra field + }, + } + + ignore_keys = ['CALLBACK_URL'] + match, differences = self.migrator._authenticator_configs_match(existing_auth, new_config, ignore_keys) + + assert match is False + assert len(differences) == 2 # KEY, NEW_FIELD (SECRET shows up only if --force is used) + + # Check that all expected differences are captured + difference_text = ' '.join(differences) + assert 'KEY:' in difference_text + # assert 'SECRET:' in difference_text # SECRET shows up only if --force is used + assert 'NEW_FIELD:' in difference_text + assert 'CALLBACK_URL' not in difference_text # Should be ignored + + +class TestMapperComparison: + """Tests for mapper comparison methods.""" + + def setup_method(self): + """Set up test fixtures.""" + self.gateway_client = Mock() + self.command = Mock() + self.migrator = BaseAuthenticatorMigrator(self.gateway_client, self.command) + + def test_mappers_match_structurally_identical(self): + """Test that identical mappers match structurally.""" + mapper1 = {'name': 'myorg - engineering', 'organization': 'myorg', 'team': 'engineering', 'map_type': 'team', 'role': 'Team Member'} + + mapper2 = mapper1.copy() + + assert self.migrator._mappers_match_structurally(mapper1, mapper2) is True + + def test_mappers_match_structurally_different_fields(self): + """Test that mappers match structurally when only name is the same.""" + base_mapper = {'name': 'myorg - engineering', 'organization': 'myorg', 'team': 'engineering', 'map_type': 'team', 'role': 'Team Member'} + + # Test different organization but same name - should still match + mapper2 = base_mapper.copy() + mapper2['organization'] = 'otherorg' + assert self.migrator._mappers_match_structurally(base_mapper, mapper2) is True + + # Test different team but same name - should still match + mapper2 = base_mapper.copy() + mapper2['team'] = 'qa' + assert self.migrator._mappers_match_structurally(base_mapper, mapper2) is True + + # Test different map_type but same name - should still match + mapper2 = base_mapper.copy() + mapper2['map_type'] = 'organization' + assert self.migrator._mappers_match_structurally(base_mapper, mapper2) is True + + # Test different role but same name - should still match + mapper2 = base_mapper.copy() + mapper2['role'] = 'Organization Admin' + assert self.migrator._mappers_match_structurally(base_mapper, mapper2) is True + + # Test different name - should not match + mapper2 = base_mapper.copy() + mapper2['name'] = 'otherorg - qa' + assert self.migrator._mappers_match_structurally(base_mapper, mapper2) is False + + def test_mapper_configs_match_identical(self): + """Test that identical mapper configurations match.""" + mapper1 = { + 'name': 'myorg - engineering', + 'organization': 'myorg', + 'team': 'engineering', + 'map_type': 'team', + 'role': 'Team Member', + 'order': 1, + 'triggers': {'groups': {'has_or': ['engineers']}}, + 'revoke': False, + } + + mapper2 = mapper1.copy() + + assert self.migrator._mapper_configs_match(mapper1, mapper2) is True + + def test_mapper_configs_match_with_ignore_keys(self): + """Test that mapper configurations match when ignoring specified keys.""" + existing_mapper = { + 'id': 123, + 'authenticator': 456, + 'name': 'myorg - engineering', + 'organization': 'myorg', + 'team': 'engineering', + 'map_type': 'team', + 'role': 'Team Member', + 'order': 1, + 'triggers': {'groups': {'has_or': ['engineers']}}, + 'revoke': False, + 'created': '2023-01-01T00:00:00Z', + 'modified': '2023-01-01T00:00:00Z', + } + + new_mapper = { + 'name': 'myorg - engineering', + 'organization': 'myorg', + 'team': 'engineering', + 'map_type': 'team', + 'role': 'Team Member', + 'order': 1, + 'triggers': {'groups': {'has_or': ['engineers']}}, + 'revoke': False, + } + + # Should not match without ignore keys + assert self.migrator._mapper_configs_match(existing_mapper, new_mapper) is False + + # Should match when ignoring auto-generated fields + ignore_keys = ['id', 'authenticator', 'created', 'modified'] + assert self.migrator._mapper_configs_match(existing_mapper, new_mapper, ignore_keys) is True + + def test_mapper_configs_different_values(self): + """Test that mapper configurations don't match when values differ.""" + mapper1 = { + 'name': 'myorg - engineering', + 'organization': 'myorg', + 'team': 'engineering', + 'map_type': 'team', + 'role': 'Team Member', + 'order': 1, + 'triggers': {'groups': {'has_or': ['engineers']}}, + 'revoke': False, + } + + # Test different name + mapper2 = mapper1.copy() + mapper2['name'] = 'myorg - qa' + assert self.migrator._mapper_configs_match(mapper1, mapper2) is False + + # Test different order + mapper2 = mapper1.copy() + mapper2['order'] = 2 + assert self.migrator._mapper_configs_match(mapper1, mapper2) is False + + # Test different triggers + mapper2 = mapper1.copy() + mapper2['triggers'] = {'groups': {'has_or': ['qa-team']}} + assert self.migrator._mapper_configs_match(mapper1, mapper2) is False + + # Test different revoke + mapper2 = mapper1.copy() + mapper2['revoke'] = True + assert self.migrator._mapper_configs_match(mapper1, mapper2) is False + + +class TestCompareMapperLists: + """Tests for _compare_mapper_lists method.""" + + def setup_method(self): + """Set up test fixtures.""" + self.gateway_client = Mock() + self.command = Mock() + self.migrator = BaseAuthenticatorMigrator(self.gateway_client, self.command) + + def test_compare_mapper_lists_empty(self): + """Test comparing empty mapper lists.""" + existing_mappers = [] + new_mappers = [] + + mappers_to_update, mappers_to_create = self.migrator._compare_mapper_lists(existing_mappers, new_mappers) + + assert mappers_to_update == [] + assert mappers_to_create == [] + + def test_compare_mapper_lists_all_new(self): + """Test when all new mappers need to be created.""" + existing_mappers = [] + new_mappers = [ + { + 'name': 'myorg - engineering', + 'organization': 'myorg', + 'team': 'engineering', + 'map_type': 'team', + 'role': 'Team Member', + 'order': 1, + 'triggers': {'groups': {'has_or': ['engineers']}}, + 'revoke': False, + }, + { + 'name': 'myorg - qa', + 'organization': 'myorg', + 'team': 'qa', + 'map_type': 'team', + 'role': 'Team Member', + 'order': 2, + 'triggers': {'groups': {'has_or': ['qa-team']}}, + 'revoke': False, + }, + ] + + mappers_to_update, mappers_to_create = self.migrator._compare_mapper_lists(existing_mappers, new_mappers) + + assert mappers_to_update == [] + assert mappers_to_create == new_mappers + + def test_compare_mapper_lists_all_existing_match(self): + """Test when all existing mappers match exactly.""" + existing_mappers = [ + { + 'id': 123, + 'authenticator': 456, + 'name': 'myorg - engineering', + 'organization': 'myorg', + 'team': 'engineering', + 'map_type': 'team', + 'role': 'Team Member', + 'order': 1, + 'triggers': {'groups': {'has_or': ['engineers']}}, + 'revoke': False, + 'created': '2023-01-01T00:00:00Z', + 'modified': '2023-01-01T00:00:00Z', + } + ] + + new_mappers = [ + { + 'name': 'myorg - engineering', + 'organization': 'myorg', + 'team': 'engineering', + 'map_type': 'team', + 'role': 'Team Member', + 'order': 1, + 'triggers': {'groups': {'has_or': ['engineers']}}, + 'revoke': False, + } + ] + + ignore_keys = ['id', 'authenticator', 'created', 'modified'] + mappers_to_update, mappers_to_create = self.migrator._compare_mapper_lists(existing_mappers, new_mappers, ignore_keys) + + assert mappers_to_update == [] + assert mappers_to_create == [] + + def test_compare_mapper_lists_needs_update(self): + """Test when existing mappers need updates.""" + existing_mappers = [ + { + 'id': 123, + 'authenticator': 456, + 'name': 'myorg - engineering', + 'organization': 'myorg', + 'team': 'engineering', + 'map_type': 'team', + 'role': 'Team Member', + 'order': 1, + 'triggers': {'groups': {'has_or': ['old-engineers']}}, + 'revoke': False, + 'created': '2023-01-01T00:00:00Z', + 'modified': '2023-01-01T00:00:00Z', + } + ] + + new_mappers = [ + { + 'name': 'myorg - engineering', + 'organization': 'myorg', + 'team': 'engineering', + 'map_type': 'team', + 'role': 'Team Member', + 'order': 1, + 'triggers': {'groups': {'has_or': ['new-engineers']}}, + 'revoke': False, + } + ] + + ignore_keys = ['id', 'authenticator', 'created', 'modified'] + mappers_to_update, mappers_to_create = self.migrator._compare_mapper_lists(existing_mappers, new_mappers, ignore_keys) + + assert len(mappers_to_update) == 1 + assert mappers_to_update[0] == (existing_mappers[0], new_mappers[0]) + assert mappers_to_create == [] + + def test_compare_mapper_lists_mixed_operations(self): + """Test mix of updates and creates.""" + existing_mappers = [ + { + 'id': 123, + 'authenticator': 456, + 'name': 'myorg - engineering', + 'organization': 'myorg', + 'team': 'engineering', + 'map_type': 'team', + 'role': 'Team Member', + 'order': 1, + 'triggers': {'groups': {'has_or': ['old-engineers']}}, + 'revoke': False, + 'created': '2023-01-01T00:00:00Z', + 'modified': '2023-01-01T00:00:00Z', + } + ] + + new_mappers = [ + { + 'name': 'myorg - engineering', + 'organization': 'myorg', + 'team': 'engineering', + 'map_type': 'team', + 'role': 'Team Member', + 'order': 1, + 'triggers': {'groups': {'has_or': ['new-engineers']}}, + 'revoke': False, + }, + { + 'name': 'myorg - qa', + 'organization': 'myorg', + 'team': 'qa', + 'map_type': 'team', + 'role': 'Team Member', + 'order': 2, + 'triggers': {'groups': {'has_or': ['qa-team']}}, + 'revoke': False, + }, + ] + + ignore_keys = ['id', 'authenticator', 'created', 'modified'] + mappers_to_update, mappers_to_create = self.migrator._compare_mapper_lists(existing_mappers, new_mappers, ignore_keys) + + assert len(mappers_to_update) == 1 + assert mappers_to_update[0] == (existing_mappers[0], new_mappers[0]) + assert len(mappers_to_create) == 1 + assert mappers_to_create[0] == new_mappers[1] + + def test_compare_mapper_lists_no_structural_match(self): + """Test when existing and new mappers don't match structurally.""" + existing_mappers = [ + { + 'id': 123, + 'authenticator': 456, + 'name': 'myorg - engineering', + 'organization': 'myorg', + 'team': 'engineering', + 'map_type': 'team', + 'role': 'Team Member', + 'order': 1, + 'triggers': {'groups': {'has_or': ['engineers']}}, + 'revoke': False, + } + ] + + new_mappers = [ + { + 'name': 'otherorg - qa', + 'organization': 'otherorg', # Different organization + 'team': 'qa', # Different team + 'map_type': 'team', + 'role': 'Team Member', + 'order': 1, + 'triggers': {'groups': {'has_or': ['qa-team']}}, + 'revoke': False, + } + ] + + mappers_to_update, mappers_to_create = self.migrator._compare_mapper_lists(existing_mappers, new_mappers) + + assert mappers_to_update == [] + assert mappers_to_create == new_mappers + + +# Parametrized tests for edge cases +@pytest.mark.parametrize( + "existing_auth,new_config,ignore_keys,expected_match,expected_differences_count", + [ + # Test with None values + ({'name': 'Test', 'configuration': {'KEY': None}}, {'name': 'Test', 'configuration': {'KEY': None}}, [], True, 0), + # Test with empty configuration + ({'name': 'Test', 'configuration': {}}, {'name': 'Test', 'configuration': {}}, [], True, 0), + # Test missing configuration section + ({'name': 'Test'}, {'name': 'Test'}, [], True, 0), + # Test with ignore keys matching + ( + {'name': 'Test', 'configuration': {'KEY': 'value', 'IGNORE_ME': 'old'}}, + {'name': 'Test', 'configuration': {'KEY': 'value', 'IGNORE_ME': 'new'}}, + ['IGNORE_ME'], + True, + 0, + ), + # Test with differences that are not ignored + ( + {'name': 'Test', 'configuration': {'KEY': 'value1'}}, + {'name': 'Test', 'configuration': {'KEY': 'value2'}}, + [], + False, + 1, + ), + ], +) +def test_authenticator_configs_match_edge_cases(existing_auth, new_config, ignore_keys, expected_match, expected_differences_count): + """Test edge cases for authenticator configuration matching.""" + gateway_client = Mock() + command = Mock() + migrator = BaseAuthenticatorMigrator(gateway_client, command) + + match, differences = migrator._authenticator_configs_match(existing_auth, new_config, ignore_keys) + assert match == expected_match + assert len(differences) == expected_differences_count + + +@pytest.mark.parametrize( + "mapper1,mapper2,expected", + [ + # Test with same name + ( + {'name': 'myorg - Organization Admins', 'organization': 'myorg', 'team': None, 'map_type': 'organization', 'role': 'Organization Admin'}, + {'name': 'myorg - Organization Admins', 'organization': 'myorg', 'team': None, 'map_type': 'organization', 'role': 'Organization Admin'}, + True, + ), + # Test with same name but different other fields + ( + {'name': 'myorg - eng', 'organization': 'myorg', 'team': 'eng', 'map_type': 'team', 'role': 'Team Member', 'id': 123}, + {'name': 'myorg - eng', 'organization': 'otherorg', 'team': 'qa', 'map_type': 'organization', 'role': 'Organization Admin', 'id': 456}, + True, + ), + # Test with different names + ( + {'name': 'myorg - eng', 'organization': 'myorg', 'team': 'eng', 'map_type': 'team', 'role': 'Team Member'}, + {'name': 'myorg - qa', 'organization': 'myorg', 'team': 'qa', 'map_type': 'team', 'role': 'Team Member'}, + False, + ), + # Test with missing name + ( + {'organization': 'myorg', 'team': 'eng', 'map_type': 'team', 'role': 'Team Member'}, + {'name': 'myorg - eng', 'organization': 'myorg', 'team': 'eng', 'map_type': 'team', 'role': 'Team Member'}, + False, + ), + # Test with both missing name + ( + {'organization': 'myorg', 'team': 'eng', 'map_type': 'team', 'role': 'Team Member'}, + {'organization': 'myorg', 'team': 'eng', 'map_type': 'team', 'role': 'Team Member'}, + True, + ), + ], +) +def test_mappers_match_structurally_edge_cases(mapper1, mapper2, expected): + """Test edge cases for mapper structural matching based on name.""" + gateway_client = Mock() + command = Mock() + migrator = BaseAuthenticatorMigrator(gateway_client, command) + + result = migrator._mappers_match_structurally(mapper1, mapper2) + assert result == expected + + +class TestSocialAuthMapFunctions: + """Test cases for social auth map functions.""" + + def setup_method(self): + """Set up test fixtures.""" + self.gateway_client = Mock() + self.command_obj = Mock() + self.migrator = BaseAuthenticatorMigrator(self.gateway_client, self.command_obj) + + @patch('awx.sso.utils.base_migrator.settings') + def test_get_social_org_map_with_authenticator_specific_setting(self, mock_settings): + """Test get_social_org_map returns authenticator-specific setting when available.""" + # Set up mock settings + authenticator_map = {'org1': ['team1', 'team2']} + global_map = {'global_org': ['global_team']} + + mock_settings.SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP = authenticator_map + mock_settings.SOCIAL_AUTH_ORGANIZATION_MAP = global_map + + # Mock getattr to return the specific setting + with patch('awx.sso.utils.base_migrator.getattr') as mock_getattr: + mock_getattr.side_effect = lambda obj, name, default=None: { + 'SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP': authenticator_map, + 'SOCIAL_AUTH_ORGANIZATION_MAP': global_map, + }.get(name, default) + + result = self.migrator.get_social_org_map('SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP') + + assert result == authenticator_map + # Verify it was called with the authenticator-specific setting first + mock_getattr.assert_any_call(mock_settings, 'SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP', None) + + @patch('awx.sso.utils.base_migrator.settings') + def test_get_social_org_map_fallback_to_global(self, mock_settings): + """Test get_social_org_map falls back to global setting when authenticator-specific is empty.""" + # Set up mock settings + global_map = {'global_org': ['global_team']} + + # Mock getattr to return None for authenticator-specific, global for fallback + with patch('awx.sso.utils.base_migrator.getattr') as mock_getattr: + mock_getattr.side_effect = lambda obj, name, default=None: { + 'SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP': None, + 'SOCIAL_AUTH_ORGANIZATION_MAP': global_map, + }.get(name, default) + + result = self.migrator.get_social_org_map('SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP') + + assert result == global_map + # Verify both calls were made + mock_getattr.assert_any_call(mock_settings, 'SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP', None) + mock_getattr.assert_any_call(mock_settings, 'SOCIAL_AUTH_ORGANIZATION_MAP', {}) + + @patch('awx.sso.utils.base_migrator.settings') + def test_get_social_org_map_empty_dict_fallback(self, mock_settings): + """Test get_social_org_map returns empty dict when neither setting exists.""" + # Mock getattr to return None for both settings + with patch('awx.sso.utils.base_migrator.getattr') as mock_getattr: + mock_getattr.side_effect = lambda obj, name, default=None: {'SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP': None, 'SOCIAL_AUTH_ORGANIZATION_MAP': {}}.get( + name, default + ) + + result = self.migrator.get_social_org_map('SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP') + + assert result == {} + + @patch('awx.sso.utils.base_migrator.settings') + def test_get_social_team_map_with_authenticator_specific_setting(self, mock_settings): + """Test get_social_team_map returns authenticator-specific setting when available.""" + # Set up mock settings + authenticator_map = {'team1': {'organization': 'org1'}} + global_map = {'global_team': {'organization': 'global_org'}} + + # Mock getattr to return the specific setting + with patch('awx.sso.utils.base_migrator.getattr') as mock_getattr: + mock_getattr.side_effect = lambda obj, name, default=None: { + 'SOCIAL_AUTH_GITHUB_TEAM_MAP': authenticator_map, + 'SOCIAL_AUTH_TEAM_MAP': global_map, + }.get(name, default) + + result = self.migrator.get_social_team_map('SOCIAL_AUTH_GITHUB_TEAM_MAP') + + assert result == authenticator_map + # Verify it was called with the authenticator-specific setting first + mock_getattr.assert_any_call(mock_settings, 'SOCIAL_AUTH_GITHUB_TEAM_MAP', None) + + @patch('awx.sso.utils.base_migrator.settings') + def test_get_social_team_map_fallback_to_global(self, mock_settings): + """Test get_social_team_map falls back to global setting when authenticator-specific is empty.""" + # Set up mock settings + global_map = {'global_team': {'organization': 'global_org'}} + + # Mock getattr to return None for authenticator-specific, global for fallback + with patch('awx.sso.utils.base_migrator.getattr') as mock_getattr: + mock_getattr.side_effect = lambda obj, name, default=None: {'SOCIAL_AUTH_GITHUB_TEAM_MAP': None, 'SOCIAL_AUTH_TEAM_MAP': global_map}.get( + name, default + ) + + result = self.migrator.get_social_team_map('SOCIAL_AUTH_GITHUB_TEAM_MAP') + + assert result == global_map + # Verify both calls were made + mock_getattr.assert_any_call(mock_settings, 'SOCIAL_AUTH_GITHUB_TEAM_MAP', None) + mock_getattr.assert_any_call(mock_settings, 'SOCIAL_AUTH_TEAM_MAP', {}) + + @patch('awx.sso.utils.base_migrator.settings') + def test_get_social_team_map_empty_dict_fallback(self, mock_settings): + """Test get_social_team_map returns empty dict when neither setting exists.""" + # Mock getattr to return None for both settings + with patch('awx.sso.utils.base_migrator.getattr') as mock_getattr: + mock_getattr.side_effect = lambda obj, name, default=None: {'SOCIAL_AUTH_GITHUB_TEAM_MAP': None, 'SOCIAL_AUTH_TEAM_MAP': {}}.get(name, default) + + result = self.migrator.get_social_team_map('SOCIAL_AUTH_GITHUB_TEAM_MAP') + + assert result == {} + + @patch('awx.sso.utils.base_migrator.settings') + def test_get_social_org_map_with_empty_string_fallback(self, mock_settings): + """Test get_social_org_map falls back to global when authenticator-specific is empty string.""" + # Set up mock settings + global_map = {'global_org': ['global_team']} + + # Mock getattr to return empty string for authenticator-specific + with patch('awx.sso.utils.base_migrator.getattr') as mock_getattr: + mock_getattr.side_effect = lambda obj, name, default=None: { + 'SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP': '', + 'SOCIAL_AUTH_ORGANIZATION_MAP': global_map, + }.get(name, default) + + result = self.migrator.get_social_org_map('SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP') + + assert result == global_map + + @patch('awx.sso.utils.base_migrator.settings') + def test_get_social_team_map_with_empty_dict_fallback(self, mock_settings): + """Test get_social_team_map falls back to global when authenticator-specific is empty dict.""" + # Set up mock settings + global_map = {'global_team': {'organization': 'global_org'}} + + # Mock getattr to return empty dict for authenticator-specific + with patch('awx.sso.utils.base_migrator.getattr') as mock_getattr: + mock_getattr.side_effect = lambda obj, name, default=None: {'SOCIAL_AUTH_GITHUB_TEAM_MAP': {}, 'SOCIAL_AUTH_TEAM_MAP': global_map}.get( + name, default + ) + + result = self.migrator.get_social_team_map('SOCIAL_AUTH_GITHUB_TEAM_MAP') + + # Empty dict is falsy, so it should fall back to global + assert result == global_map + + def test_get_social_org_map_different_authenticators(self): + """Test get_social_org_map works with different authenticator setting names.""" + test_cases = [ + 'SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP', + 'SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP', + 'SOCIAL_AUTH_SAML_ORGANIZATION_MAP', + 'SOCIAL_AUTH_OIDC_ORGANIZATION_MAP', + ] + + for setting_name in test_cases: + with patch('awx.sso.utils.base_migrator.getattr') as mock_getattr: + mock_getattr.side_effect = lambda obj, name, default=None: { + setting_name: {'test_org': ['test_team']}, + 'SOCIAL_AUTH_ORGANIZATION_MAP': {'fallback_org': ['fallback_team']}, + }.get(name, default) + + result = self.migrator.get_social_org_map(setting_name) + + assert result == {'test_org': ['test_team']} + + def test_get_social_team_map_different_authenticators(self): + """Test get_social_team_map works with different authenticator setting names.""" + test_cases = ['SOCIAL_AUTH_GITHUB_TEAM_MAP', 'SOCIAL_AUTH_AZUREAD_OAUTH2_TEAM_MAP', 'SOCIAL_AUTH_SAML_TEAM_MAP', 'SOCIAL_AUTH_OIDC_TEAM_MAP'] + + for setting_name in test_cases: + with patch('awx.sso.utils.base_migrator.getattr') as mock_getattr: + mock_getattr.side_effect = lambda obj, name, default=None: { + setting_name: {'test_team': {'organization': 'test_org'}}, + 'SOCIAL_AUTH_TEAM_MAP': {'fallback_team': {'organization': 'fallback_org'}}, + }.get(name, default) + + result = self.migrator.get_social_team_map(setting_name) + + assert result == {'test_team': {'organization': 'test_org'}} + + +class TestHandleLoginOverride: + """Tests for handle_login_override method.""" + + def setup_method(self): + """Set up test fixtures.""" + self.gateway_client = Mock() + self.command = Mock() + self.migrator = BaseAuthenticatorMigrator(self.gateway_client, self.command) + + # Reset the class-level variables before each test + BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator = False + BaseAuthenticatorMigrator.login_redirect_override_new_url = None + + def test_handle_login_override_no_login_redirect_override(self): + """Test that method returns early when no login_redirect_override is provided.""" + config = {} + valid_login_urls = ['/sso/login/github'] + + self.migrator.handle_login_override(config, valid_login_urls) + + # Should not call any gateway client methods + self.gateway_client.get_base_url.assert_not_called() + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is False + + def test_handle_login_override_empty_login_redirect_override(self): + """Test that method returns early when login_redirect_override is empty.""" + config = {'login_redirect_override': ''} + valid_login_urls = ['/sso/login/github'] + + self.migrator.handle_login_override(config, valid_login_urls) + + # Should not call any gateway client methods + self.gateway_client.get_base_url.assert_not_called() + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is False + + def test_handle_login_override_no_url_match(self): + """Test that method returns early when login_redirect_override doesn't match valid URLs.""" + config = {'login_redirect_override': 'https://localhost:3000/sso/login/saml'} + valid_login_urls = ['/sso/login/github', '/sso/login/azuread-oauth2'] + + self.migrator.handle_login_override(config, valid_login_urls) + + # Should not call any gateway client methods + self.gateway_client.get_base_url.assert_not_called() + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is False + + def test_handle_login_override_no_gateway_authenticator(self): + """Test that method returns early when gateway_authenticator is missing.""" + config = {'login_redirect_override': 'https://localhost:3000/sso/login/github'} + valid_login_urls = ['/sso/login/github'] + + self.migrator.handle_login_override(config, valid_login_urls) + + # Should not call any gateway client methods + self.gateway_client.get_base_url.assert_not_called() + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is False + + def test_handle_login_override_empty_gateway_authenticator(self): + """Test that method returns early when gateway_authenticator is empty.""" + config = {'login_redirect_override': 'https://localhost:3000/sso/login/github', 'gateway_authenticator': {}} + valid_login_urls = ['/sso/login/github'] + + self.migrator.handle_login_override(config, valid_login_urls) + + # Should not call any gateway client methods + self.gateway_client.get_base_url.assert_not_called() + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is False + + def test_handle_login_override_no_sso_login_url(self): + """Test that method returns early when sso_login_url is missing.""" + config = {'login_redirect_override': 'https://localhost:3000/sso/login/github', 'gateway_authenticator': {'id': 123}} + valid_login_urls = ['/sso/login/github'] + + self.migrator.handle_login_override(config, valid_login_urls) + + # Should not call any gateway client methods + self.gateway_client.get_base_url.assert_not_called() + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is False + + def test_handle_login_override_empty_sso_login_url(self): + """Test that method returns early when sso_login_url is empty.""" + config = {'login_redirect_override': 'https://localhost:3000/sso/login/github', 'gateway_authenticator': {'id': 123, 'sso_login_url': ''}} + valid_login_urls = ['/sso/login/github'] + + self.migrator.handle_login_override(config, valid_login_urls) + + # Should not call any gateway client methods + self.gateway_client.get_base_url.assert_not_called() + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is False + + def test_handle_login_override_successful_update(self): + """Test successful LOGIN_REDIRECT_OVERRIDE update.""" + config = { + 'login_redirect_override': 'https://localhost:3000/sso/login/github', + 'gateway_authenticator': {'id': 123, 'sso_login_url': '/sso/auth/login/123/'}, + } + valid_login_urls = ['/sso/login/github'] + + # Mock gateway client methods + self.gateway_client.get_base_url.return_value = 'https://gateway.example.com' + self.migrator.handle_login_override(config, valid_login_urls) + + # Verify gateway client methods were called correctly + self.gateway_client.get_base_url.assert_called_once() + # update_gateway_setting should NOT be called - URL is stored in class variable instead + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is True + assert BaseAuthenticatorMigrator.login_redirect_override_new_url == 'https://gateway.example.com/sso/auth/login/123/' + + def test_handle_login_override_multiple_valid_urls_first_matches(self): + """Test that first matching URL in valid_login_urls is used.""" + config = { + 'login_redirect_override': 'https://localhost:3000/sso/login/github-org', + 'gateway_authenticator': {'id': 123, 'sso_login_url': '/sso/auth/login/123/'}, + } + valid_login_urls = ['/sso/login/github-org', '/sso/login/github-team', '/sso/login/github'] + + # Mock gateway client methods + self.gateway_client.get_base_url.return_value = 'https://gateway.example.com' + + self.migrator.handle_login_override(config, valid_login_urls) + + # Should still work since first URL matches + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is True + assert BaseAuthenticatorMigrator.login_redirect_override_new_url == 'https://gateway.example.com/sso/auth/login/123/' + + def test_handle_login_override_multiple_valid_urls_last_matches(self): + """Test that last matching URL in valid_login_urls is used.""" + config = { + 'login_redirect_override': 'https://localhost:3000/sso/login/github', + 'gateway_authenticator': {'id': 123, 'sso_login_url': '/sso/auth/login/123/'}, + } + valid_login_urls = ['/sso/login/github-org', '/sso/login/github-team', '/sso/login/github'] + + # Mock gateway client methods + self.gateway_client.get_base_url.return_value = 'https://gateway.example.com' + + self.migrator.handle_login_override(config, valid_login_urls) + + # Should work since last URL matches + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is True + assert BaseAuthenticatorMigrator.login_redirect_override_new_url == 'https://gateway.example.com/sso/auth/login/123/' + + def test_handle_login_override_partial_url_match(self): + """Test that partial URL matching works (using 'in' operator).""" + config = { + 'login_redirect_override': 'https://controller.example.com/sso/login/azuread-oauth2/?next=%2Fdashboard', + 'gateway_authenticator': {'id': 456, 'sso_login_url': '/auth/login/azuread/456/'}, + } + valid_login_urls = ['/sso/login/azuread-oauth2'] + + # Mock gateway client methods + self.gateway_client.get_base_url.return_value = 'https://gateway.example.com:8080' + self.migrator.handle_login_override(config, valid_login_urls) + + # Should work since valid URL is contained in login_redirect_override + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is True + assert BaseAuthenticatorMigrator.login_redirect_override_new_url == 'https://gateway.example.com:8080/auth/login/azuread/456/?next=%2Fdashboard' + + def test_handle_login_override_saml_with_parameters(self): + """Test LOGIN_REDIRECT_OVERRIDE with SAML IDP parameters.""" + config = { + 'login_redirect_override': 'https://localhost:3000/sso/login/saml/?idp=mycompany', + 'gateway_authenticator': {'id': 789, 'sso_login_url': '/auth/login/saml/789/'}, + } + valid_login_urls = ['/sso/login/saml/?idp=mycompany'] + + # Mock gateway client methods + self.gateway_client.get_base_url.return_value = 'https://gateway.local' + + self.migrator.handle_login_override(config, valid_login_urls) + + # Should work with SAML parameter URLs + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is True + assert BaseAuthenticatorMigrator.login_redirect_override_new_url == 'https://gateway.local/auth/login/saml/789/?idp=mycompany' + + def test_handle_login_override_github_with_trailing_slash(self): + """Test LOGIN_REDIRECT_OVERRIDE with trailing slash.""" + config = { + 'login_redirect_override': 'https://localhost:3000/sso/login/github-enterprise/', + 'gateway_authenticator': {'id': 999, 'sso_login_url': '/auth/login/github/999/'}, + } + valid_login_urls = ['/sso/login/github-enterprise', '/sso/login/github-enterprise/'] + + # Mock gateway client methods + self.gateway_client.get_base_url.return_value = 'https://gateway.internal' + + self.migrator.handle_login_override(config, valid_login_urls) + + # Should work with trailing slash URLs + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is True + assert BaseAuthenticatorMigrator.login_redirect_override_new_url == 'https://gateway.internal/auth/login/github/999/' + + def test_handle_login_override_empty_valid_urls_list(self): + """Test that method returns early when valid_login_urls is empty.""" + config = { + 'login_redirect_override': 'https://localhost:3000/sso/login/github', + 'gateway_authenticator': {'id': 123, 'sso_login_url': '/sso/auth/login/123/'}, + } + valid_login_urls = [] + + self.migrator.handle_login_override(config, valid_login_urls) + + # Should not call any gateway client methods + self.gateway_client.get_base_url.assert_not_called() + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is False + + def test_handle_login_override_already_handled_raises_error(self): + """Test that calling handle_login_override when already handled raises RuntimeError.""" + # Set flag to True initially + BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator = True + + config = { + 'login_redirect_override': 'https://localhost:3000/sso/login/github', + 'gateway_authenticator': {'id': 123, 'sso_login_url': '/sso/auth/login/123/'}, + } + valid_login_urls = ['/sso/login/github'] + + # Should raise RuntimeError when trying to call again + with pytest.raises(RuntimeError, match="LOGIN_REDIRECT_OVERRIDE has already been handled by another migrator"): + self.migrator.handle_login_override(config, valid_login_urls) + + def test_handle_login_override_writes_output_message(self): + """Test that method writes output message when updating.""" + config = { + 'login_redirect_override': 'https://localhost:3000/sso/login/google-oauth2', + 'gateway_authenticator': {'id': 555, 'sso_login_url': '/auth/login/google/555/'}, + } + valid_login_urls = ['/sso/login/google-oauth2'] + + # Mock gateway client methods + self.gateway_client.get_base_url.return_value = 'https://gateway.test' + + # Mock _write_output method + with patch.object(self.migrator, '_write_output') as mock_write_output: + self.migrator.handle_login_override(config, valid_login_urls) + + # Verify output message was written + mock_write_output.assert_called_once_with('LOGIN_REDIRECT_OVERRIDE will be updated to: https://gateway.test/auth/login/google/555/') + # Verify class variables were set correctly + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is True + assert BaseAuthenticatorMigrator.login_redirect_override_new_url == 'https://gateway.test/auth/login/google/555/' + + @pytest.mark.parametrize( + "login_redirect_override,valid_urls,expected_match", + [ + # Test Azure AD variations + ('https://localhost:3000/sso/login/azuread-oauth2', ['/sso/login/azuread-oauth2'], True), + ('https://localhost:3000/sso/login/azuread-oauth2/', ['/sso/login/azuread-oauth2'], True), + ('https://controller.example.com/sso/login/azuread-oauth2?next=/home', ['/sso/login/azuread-oauth2'], True), + # Test Google OAuth2 variations + ('https://localhost:3000/sso/login/google-oauth2', ['/sso/login/google-oauth2'], True), + ('https://localhost:3000/sso/login/google-oauth2/', ['/sso/login/google-oauth2'], True), + # Test GitHub variations + ('https://localhost:3000/sso/login/github', ['/sso/login/github'], True), + ('https://localhost:3000/sso/login/github-org', ['/sso/login/github-org'], True), + ('https://localhost:3000/sso/login/github-team', ['/sso/login/github-team'], True), + ('https://localhost:3000/sso/login/github-enterprise', ['/sso/login/github-enterprise'], True), + # Test SAML variations + ('https://localhost:3000/sso/login/saml/?idp=company', ['/sso/login/saml/?idp=company'], True), + ('https://localhost:3000/sso/login/saml/?idp=test-org', ['/sso/login/saml/?idp=test-org'], True), + # Test non-matching cases + ('https://localhost:3000/sso/login/ldap', ['/sso/login/github'], False), + ('https://localhost:3000/sso/login/azuread-oauth2', ['/sso/login/google-oauth2'], False), + ('https://localhost:3000/sso/login/saml/?idp=wrong', ['/sso/login/saml/?idp=company'], False), + # Test multiple valid URLs + ('https://localhost:3000/sso/login/github-org', ['/sso/login/github', '/sso/login/github-org'], True), + ('https://localhost:3000/sso/login/github', ['/sso/login/github-org', '/sso/login/github'], True), + # Test improved URL parsing scenarios - better boundary detection + ('https://localhost:3000/sso/login/github-enterprise', ['/sso/login/github'], False), # Should NOT match due to better parsing + ('https://localhost:3000/sso/login/saml/?idp=company&next=/home', ['/sso/login/saml/?idp=company'], True), + ('https://localhost:3000/sso/login/saml/?idp=company', ['/sso/login/saml/?idp=different'], False), + ('https://controller.example.com:8080/sso/login/azuread-oauth2/?next=/dashboard', ['/sso/login/azuread-oauth2'], True), + ('http://localhost/sso/login/github?state=abc123', ['/sso/login/github'], True), + # Test boundary detection edge cases + ('https://localhost:3000/sso/login/github/', ['/sso/login/github'], True), # Trailing slash should match + ('https://localhost:3000/sso/login/github#section', ['/sso/login/github'], True), # Fragment should match + ], + ) + def test_handle_login_override_url_matching_variations(self, login_redirect_override, valid_urls, expected_match): + """Test various URL matching scenarios parametrically.""" + config = {'login_redirect_override': login_redirect_override, 'gateway_authenticator': {'id': 123, 'sso_login_url': '/auth/login/test/123/'}} + + # Mock gateway client methods + self.gateway_client.get_base_url.return_value = 'https://gateway.test' + + self.migrator.handle_login_override(config, valid_urls) + + if expected_match: + # Should call get_base_url when URL matches but NOT update_gateway_setting + self.gateway_client.get_base_url.assert_called_once() + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is True + assert BaseAuthenticatorMigrator.login_redirect_override_new_url is not None + else: + # Should not call gateway methods when URL doesn't match + self.gateway_client.get_base_url.assert_not_called() + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is False + assert BaseAuthenticatorMigrator.login_redirect_override_new_url is None + + def test_handle_login_override_improved_url_parsing(self): + """Test that improved URL parsing with proper path boundary detection prevents false positive matches.""" + # This test demonstrates the improvement over simple string matching + config = { + 'login_redirect_override': 'https://localhost:3000/sso/login/github-enterprise', + 'gateway_authenticator': {'id': 123, 'sso_login_url': '/auth/login/test/123/'}, + } + + # With the old simple string matching, this would incorrectly match + # because '/sso/login/github' is contained in '/sso/login/github-enterprise' + # But with proper URL parsing, it should NOT match + valid_login_urls = ['/sso/login/github'] + + self.migrator.handle_login_override(config, valid_login_urls) + + # Should NOT match due to improved parsing + self.gateway_client.get_base_url.assert_not_called() + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is False + + def test_handle_login_override_query_parameter_handling(self): + """Test that query parameters are properly handled in URL matching.""" + config = { + 'login_redirect_override': 'https://localhost:3000/sso/login/saml/?idp=mycompany&next=%2Fdashboard', + 'gateway_authenticator': {'id': 456, 'sso_login_url': '/auth/login/saml/456/?idp=IdP'}, + } + + # Should match the SAML URL with the correct IDP parameter (boundary-aware matching) + valid_login_urls = ['/sso/login/saml/?idp=mycompany'] + + self.gateway_client.get_base_url.return_value = 'https://gateway.test' + + self.migrator.handle_login_override(config, valid_login_urls) + + # Should match because the query parameter is properly contained with boundaries + self.gateway_client.get_base_url.assert_called_once() + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is True + assert BaseAuthenticatorMigrator.login_redirect_override_new_url == 'https://gateway.test/auth/login/saml/456/?idp=IdP&next=%2Fdashboard' + + def test_handle_login_override_different_query_parameters(self): + """Test that different query parameters don't match.""" + config = { + 'login_redirect_override': 'https://localhost:3000/sso/login/saml/?idp=company-a', + 'gateway_authenticator': {'id': 456, 'sso_login_url': '/auth/login/saml/456/'}, + } + + # Should NOT match SAML URL with different IDP parameter + valid_login_urls = ['/sso/login/saml/?idp=company-b'] + + self.migrator.handle_login_override(config, valid_login_urls) + + # Should NOT match because the query parameters are different + self.gateway_client.get_base_url.assert_not_called() + self.gateway_client.update_gateway_setting.assert_not_called() + assert BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator is False diff --git a/awx/main/tests/unit/utils/test_github_migrator.py b/awx/main/tests/unit/utils/test_github_migrator.py new file mode 100644 index 0000000000..8c585091fc --- /dev/null +++ b/awx/main/tests/unit/utils/test_github_migrator.py @@ -0,0 +1,124 @@ +""" +Unit tests for GitHub authenticator migrator functionality. +""" + +from unittest.mock import Mock, patch +from awx.sso.utils.github_migrator import GitHubMigrator + + +class TestGitHubMigrator: + """Tests for GitHubMigrator class.""" + + def setup_method(self): + """Set up test fixtures.""" + self.gateway_client = Mock() + self.command = Mock() + self.migrator = GitHubMigrator(self.gateway_client, self.command) + + def test_create_gateway_authenticator_returns_boolean_causes_crash(self): + """ + Test that verifies create_gateway_authenticator returns proper dictionary + structure instead of boolean when credentials are missing. + + This test verifies the fix for the bug. + """ + # Mock the get_controller_config to return a GitHub config with missing credentials + github_config_missing_creds = { + 'category': 'github', + 'settings': {'SOCIAL_AUTH_GITHUB_KEY': '', 'SOCIAL_AUTH_GITHUB_SECRET': 'test-secret'}, # Missing key + 'org_mappers': [], + 'team_mappers': [], + 'login_redirect_override': None, + } + + with patch.object(self.migrator, 'get_controller_config', return_value=[github_config_missing_creds]): + with patch.object(self.migrator, '_write_output'): # Mock output to avoid noise + # This should NOT crash now that the bug is fixed + result = self.migrator.migrate() + + # Verify the migration ran successfully without crashing + assert 'created' in result + assert 'failed' in result + # Should have failed=1 since the config has success=False (missing credentials) + assert result['failed'] == 1 + + def test_create_gateway_authenticator_returns_boolean_with_unknown_category(self): + """ + Test that verifies create_gateway_authenticator returns proper dictionary + structure instead of boolean when category is unknown. + + This test verifies the fix for the bug. + """ + # Mock the get_controller_config to return a GitHub config with unknown category + github_config_unknown_category = { + 'category': 'unknown-category', + 'settings': {'SOCIAL_AUTH_UNKNOWN_KEY': 'test-key', 'SOCIAL_AUTH_UNKNOWN_SECRET': 'test-secret'}, + 'org_mappers': [], + 'team_mappers': [], + 'login_redirect_override': None, + } + + with patch.object(self.migrator, 'get_controller_config', return_value=[github_config_unknown_category]): + with patch.object(self.migrator, '_write_output'): # Mock output to avoid noise + # This should NOT crash now that the bug is fixed + result = self.migrator.migrate() + + # Verify the migration ran successfully without crashing + assert 'created' in result + assert 'failed' in result + # Should have failed=1 since the config has success=False (unknown category) + assert result['failed'] == 1 + + def test_create_gateway_authenticator_direct_boolean_return_missing_creds(self): + """ + Test that directly calls create_gateway_authenticator and verifies it returns + proper dictionary structure instead of boolean for missing credentials. + """ + # Config with missing key (empty string) + config_missing_key = { + 'category': 'github', + 'settings': {'SOCIAL_AUTH_GITHUB_KEY': '', 'SOCIAL_AUTH_GITHUB_SECRET': 'test-secret'}, # Missing key + 'org_mappers': [], + 'team_mappers': [], + 'login_redirect_override': None, + } + + with patch.object(self.migrator, '_write_output'): # Mock output to avoid noise + result = self.migrator.create_gateway_authenticator(config_missing_key) + + # Now the method should return a proper dictionary structure + assert isinstance(result, dict), f"Expected dict, got {type(result)} with value: {result}" + assert 'success' in result, f"Expected 'success' key in result: {result}" + assert 'action' in result, f"Expected 'action' key in result: {result}" + assert 'error' in result, f"Expected 'error' key in result: {result}" + # Verify the expected values + assert result['success'] is False + assert result['action'] == 'skipped' + assert 'Missing OAuth2 credentials' in result['error'] + + def test_create_gateway_authenticator_direct_boolean_return_unknown_category(self): + """ + Test that directly calls create_gateway_authenticator and verifies it returns + proper dictionary structure instead of boolean for unknown category. + """ + # Config with unknown category + config_unknown_category = { + 'category': 'unknown-category', + 'settings': {'SOCIAL_AUTH_UNKNOWN_KEY': 'test-key', 'SOCIAL_AUTH_UNKNOWN_SECRET': 'test-secret'}, + 'org_mappers': [], + 'team_mappers': [], + 'login_redirect_override': None, + } + + with patch.object(self.migrator, '_write_output'): # Mock output to avoid noise + result = self.migrator.create_gateway_authenticator(config_unknown_category) + + # Now the method should return a proper dictionary structure + assert isinstance(result, dict), f"Expected dict, got {type(result)} with value: {result}" + assert 'success' in result, f"Expected 'success' key in result: {result}" + assert 'action' in result, f"Expected 'action' key in result: {result}" + assert 'error' in result, f"Expected 'error' key in result: {result}" + # Verify the expected values + assert result['success'] is False + assert result['action'] == 'skipped' + assert 'Unknown category unknown-category' in result['error'] diff --git a/awx/main/tests/unit/utils/test_ldap_migrator.py b/awx/main/tests/unit/utils/test_ldap_migrator.py new file mode 100644 index 0000000000..975b82d2bc --- /dev/null +++ b/awx/main/tests/unit/utils/test_ldap_migrator.py @@ -0,0 +1,1024 @@ +""" +Unit tests for LDAP authenticator migrator. +""" + +import ldap +from unittest.mock import Mock, patch +from awx.sso.utils.ldap_migrator import LDAPMigrator + + +class TestLDAPMigrator: + """Tests for LDAPMigrator class.""" + + def setup_method(self): + """Set up test fixtures.""" + self.gateway_client = Mock() + self.command = Mock() + self.migrator = LDAPMigrator(self.gateway_client, self.command) + + def test_get_authenticator_type(self): + """Test that get_authenticator_type returns 'LDAP'.""" + assert self.migrator.get_authenticator_type() == "LDAP" + + @patch('awx.sso.utils.ldap_migrator.settings') + def test_get_controller_config_no_server_uri(self, mock_settings): + """Test that LDAP configs without SERVER_URI are skipped.""" + # Mock settings to return None for SERVER_URI + mock_settings.AUTH_LDAP_SERVER_URI = None + mock_settings.AUTH_LDAP_1_SERVER_URI = None + + # Mock all other required attributes to avoid AttributeError + for i in [None, 1, 2, 3, 4, 5]: + prefix = f"AUTH_LDAP_{i}_" if i is not None else "AUTH_LDAP_" + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + 'REQUIRE_GROUP', + 'DENY_GROUP', + 'USER_FLAGS_BY_GROUP', + 'ORGANIZATION_MAP', + 'TEAM_MAP', + ]: + setattr(mock_settings, f"{prefix}{key}", None) + + result = self.migrator.get_controller_config() + assert result == [] + + @patch('awx.sso.utils.ldap_migrator.settings') + def test_get_controller_config_with_server_uri(self, mock_settings): + """Test that LDAP config with SERVER_URI is processed.""" + # Mock basic LDAP configuration + mock_settings.AUTH_LDAP_SERVER_URI = "ldap://ldap.example.com" + mock_settings.AUTH_LDAP_BIND_DN = "cn=admin,dc=example,dc=com" + mock_settings.AUTH_LDAP_BIND_PASSWORD = "password" + mock_settings.AUTH_LDAP_START_TLS = False + mock_settings.AUTH_LDAP_CONNECTION_OPTIONS = {} + mock_settings.AUTH_LDAP_USER_SEARCH = None + mock_settings.AUTH_LDAP_USER_DN_TEMPLATE = None + mock_settings.AUTH_LDAP_USER_ATTR_MAP = {} + mock_settings.AUTH_LDAP_GROUP_SEARCH = None + mock_settings.AUTH_LDAP_GROUP_TYPE = None + mock_settings.AUTH_LDAP_GROUP_TYPE_PARAMS = {} + mock_settings.AUTH_LDAP_REQUIRE_GROUP = None + mock_settings.AUTH_LDAP_DENY_GROUP = None + mock_settings.AUTH_LDAP_USER_FLAGS_BY_GROUP = {} + mock_settings.AUTH_LDAP_ORGANIZATION_MAP = {} + mock_settings.AUTH_LDAP_TEAM_MAP = {} + + # Mock all other instances to return None for SERVER_URI + for i in [1, 2, 3, 4, 5]: + prefix = f"AUTH_LDAP_{i}_" + setattr(mock_settings, f"{prefix}SERVER_URI", None) + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + 'REQUIRE_GROUP', + 'DENY_GROUP', + 'USER_FLAGS_BY_GROUP', + 'ORGANIZATION_MAP', + 'TEAM_MAP', + ]: + setattr(mock_settings, f"{prefix}{key}", None) + + result = self.migrator.get_controller_config() + + assert len(result) == 1 + config = result[0] + assert config['category'] == 'ldap' + assert config['settings']['SERVER_URI'] == ['ldap://ldap.example.com'] + assert config['settings']['BIND_DN'] == "cn=admin,dc=example,dc=com" + assert 'org_mappers' in config + assert 'team_mappers' in config + assert 'role_mappers' in config + assert 'allow_mappers' in config + + @patch('awx.sso.utils.ldap_migrator.settings') + def test_get_controller_config_multiple_instances(self, mock_settings): + """Test processing multiple LDAP instances.""" + # Mock two LDAP instances + mock_settings.AUTH_LDAP_SERVER_URI = "ldap://ldap1.example.com" + mock_settings.AUTH_LDAP_1_SERVER_URI = "ldap://ldap2.example.com" + + # Mock all required attributes for both instances + for prefix in ["AUTH_LDAP_", "AUTH_LDAP_1_"]: + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + 'REQUIRE_GROUP', + 'DENY_GROUP', + 'USER_FLAGS_BY_GROUP', + 'ORGANIZATION_MAP', + 'TEAM_MAP', + ]: + setattr(mock_settings, f"{prefix}{key}", None) + + # Mock remaining instances to return None + for i in [2, 3, 4, 5]: + prefix = f"AUTH_LDAP_{i}_" + setattr(mock_settings, f"{prefix}SERVER_URI", None) + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + 'REQUIRE_GROUP', + 'DENY_GROUP', + 'USER_FLAGS_BY_GROUP', + 'ORGANIZATION_MAP', + 'TEAM_MAP', + ]: + setattr(mock_settings, f"{prefix}{key}", None) + + result = self.migrator.get_controller_config() + + assert len(result) == 2 + assert result[0]['category'] == 'ldap' + assert result[1]['category'] == 'ldap' + assert result[0]['settings']['SERVER_URI'] == ['ldap://ldap1.example.com'] + assert result[1]['settings']['SERVER_URI'] == ['ldap://ldap2.example.com'] + + def test_get_ldap_instance_config_basic(self): + """Test _get_ldap_instance_config with basic settings.""" + with patch('awx.sso.utils.ldap_migrator.settings') as mock_settings: + mock_settings.AUTH_LDAP_SERVER_URI = "ldap://ldap.example.com" + mock_settings.AUTH_LDAP_BIND_DN = "cn=admin,dc=example,dc=com" + mock_settings.AUTH_LDAP_BIND_PASSWORD = "password" + mock_settings.AUTH_LDAP_START_TLS = True + + # Mock all other settings to None + for key in [ + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + 'REQUIRE_GROUP', + 'DENY_GROUP', + 'USER_FLAGS_BY_GROUP', + 'ORGANIZATION_MAP', + 'TEAM_MAP', + ]: + setattr(mock_settings, f"AUTH_LDAP_{key}", None) + + result = self.migrator._get_ldap_instance_config("AUTH_LDAP_") + + assert result['SERVER_URI'] == ['ldap://ldap.example.com'] + assert result['BIND_DN'] == "cn=admin,dc=example,dc=com" + assert result['BIND_PASSWORD'] == "password" + assert result['START_TLS'] is True + + def test_get_ldap_instance_config_server_uri_list(self): + """Test SERVER_URI conversion from comma-separated string to list.""" + with patch('awx.sso.utils.ldap_migrator.settings') as mock_settings: + mock_settings.AUTH_LDAP_SERVER_URI = "ldap://ldap1.example.com, ldap://ldap2.example.com" + + # Mock all other settings to None + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + 'REQUIRE_GROUP', + 'DENY_GROUP', + 'USER_FLAGS_BY_GROUP', + 'ORGANIZATION_MAP', + 'TEAM_MAP', + ]: + setattr(mock_settings, f"AUTH_LDAP_{key}", None) + + result = self.migrator._get_ldap_instance_config("AUTH_LDAP_") + + assert result['SERVER_URI'] == ['ldap://ldap1.example.com', 'ldap://ldap2.example.com'] + + def test_get_ldap_instance_config_user_search(self): + """Test USER_SEARCH conversion from LDAPSearch object.""" + with patch('awx.sso.utils.ldap_migrator.settings') as mock_settings: + # Mock LDAPSearch object + mock_search = Mock() + mock_search.base_dn = "ou=users,dc=example,dc=com" + mock_search.filterstr = "(uid=%(user)s)" + mock_search.scope = ldap.SCOPE_SUBTREE + + mock_settings.AUTH_LDAP_SERVER_URI = "ldap://ldap.example.com" + mock_settings.AUTH_LDAP_USER_SEARCH = mock_search + + # Mock all other settings to None + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + 'REQUIRE_GROUP', + 'DENY_GROUP', + 'USER_FLAGS_BY_GROUP', + 'ORGANIZATION_MAP', + 'TEAM_MAP', + ]: + setattr(mock_settings, f"AUTH_LDAP_{key}", None) + + result = self.migrator._get_ldap_instance_config("AUTH_LDAP_") + + assert result['USER_SEARCH'] == ["ou=users,dc=example,dc=com", "SCOPE_SUBTREE", "(uid=%(user)s)"] + + def test_get_ldap_instance_config_group_type(self): + """Test GROUP_TYPE conversion from class to string.""" + with patch('awx.sso.utils.ldap_migrator.settings') as mock_settings: + # Mock group type class with proper __name__ attribute + mock_group_type = Mock() + # Use type() to create a proper class name + mock_group_type.__name__ = "PosixGroupType" + type(mock_group_type).__name__ = "PosixGroupType" + + mock_settings.AUTH_LDAP_SERVER_URI = "ldap://ldap.example.com" + mock_settings.AUTH_LDAP_GROUP_TYPE = mock_group_type + + # Mock all other settings to None + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE_PARAMS', + 'REQUIRE_GROUP', + 'DENY_GROUP', + 'USER_FLAGS_BY_GROUP', + 'ORGANIZATION_MAP', + 'TEAM_MAP', + ]: + setattr(mock_settings, f"AUTH_LDAP_{key}", None) + + result = self.migrator._get_ldap_instance_config("AUTH_LDAP_") + + assert result['GROUP_TYPE'] == "PosixGroupType" + + def test_build_ldap_configuration(self): + """Test _build_ldap_configuration method.""" + settings = { + 'SERVER_URI': ['ldap://ldap.example.com'], + 'BIND_DN': 'cn=admin,dc=example,dc=com', + 'BIND_PASSWORD': 'password', + 'START_TLS': True, + 'USER_SEARCH': ['ou=users,dc=example,dc=com', 'SCOPE_SUBTREE', '(uid=%(user)s)'], + 'USER_ATTR_MAP': {'first_name': 'givenName', 'last_name': 'sn', 'email': 'mail'}, + 'GROUP_SEARCH': ['ou=groups,dc=example,dc=com', 'SCOPE_SUBTREE', '(objectClass=posixGroup)'], + 'GROUP_TYPE': 'PosixGroupType', + 'GROUP_TYPE_PARAMS': {'name_attr': 'cn'}, + 'USER_DN_TEMPLATE': 'uid=%(user)s,ou=users,dc=example,dc=com', + 'CONNECTION_OPTIONS': {ldap.OPT_REFERRALS: 0}, + } + + result = self.migrator._build_ldap_configuration(settings) + + assert result['SERVER_URI'] == ['ldap://ldap.example.com'] + assert result['BIND_DN'] == 'cn=admin,dc=example,dc=com' + assert result['BIND_PASSWORD'] == 'password' + assert result['START_TLS'] is True + assert result['USER_SEARCH'] == ['ou=users,dc=example,dc=com', 'SCOPE_SUBTREE', '(uid=%(user)s)'] + assert result['USER_ATTR_MAP'] == {'first_name': 'givenName', 'last_name': 'sn', 'email': 'mail'} + assert result['GROUP_SEARCH'] == ['ou=groups,dc=example,dc=com', 'SCOPE_SUBTREE', '(objectClass=posixGroup)'] + assert result['GROUP_TYPE'] == 'PosixGroupType' + assert result['GROUP_TYPE_PARAMS'] == {'name_attr': 'cn'} + assert result['USER_DN_TEMPLATE'] == 'uid=%(user)s,ou=users,dc=example,dc=com' + assert 'CONNECTION_OPTIONS' in result + + def test_build_ldap_configuration_minimal(self): + """Test _build_ldap_configuration with minimal settings.""" + settings = {'SERVER_URI': ['ldap://ldap.example.com']} + + result = self.migrator._build_ldap_configuration(settings) + + assert result == {'SERVER_URI': ['ldap://ldap.example.com']} + + def test_convert_ldap_connection_options(self): + """Test _convert_ldap_connection_options method.""" + connection_options = { + ldap.OPT_REFERRALS: 0, + ldap.OPT_PROTOCOL_VERSION: 3, + ldap.OPT_NETWORK_TIMEOUT: 30, + ldap.OPT_X_TLS_REQUIRE_CERT: ldap.OPT_X_TLS_NEVER, + } + + result = self.migrator._convert_ldap_connection_options(connection_options) + + assert result['OPT_REFERRALS'] == 0 + assert result['OPT_PROTOCOL_VERSION'] == 3 + assert result['OPT_NETWORK_TIMEOUT'] == 30 + assert result['OPT_X_TLS_REQUIRE_CERT'] == ldap.OPT_X_TLS_NEVER + + def test_convert_ldap_connection_options_unknown_option(self): + """Test _convert_ldap_connection_options with unknown option.""" + connection_options = {999999: 'unknown_value', ldap.OPT_REFERRALS: 0} # Unknown LDAP option + + result = self.migrator._convert_ldap_connection_options(connection_options) + + # Unknown option should be ignored + assert 'OPT_REFERRALS' in result + assert len(result) == 1 + + def test_ldap_group_allow_to_gateway_format_none(self): + """Test _ldap_group_allow_to_gateway_format with None group.""" + result = [] + output_result, next_order = self.migrator._ldap_group_allow_to_gateway_format(result, None, deny=False, start_order=1) + + assert output_result == [] + assert next_order == 1 + + def test_ldap_group_allow_to_gateway_format_require_group(self): + """Test _ldap_group_allow_to_gateway_format for require group.""" + result = [] + ldap_group = "cn=allowed_users,dc=example,dc=com" + + output_result, next_order = self.migrator._ldap_group_allow_to_gateway_format(result, ldap_group, deny=False, start_order=1) + + expected = [ + { + "name": "LDAP-RequireGroup", + "authenticator": -1, + "map_type": "allow", + "revoke": False, + "triggers": {"groups": {"has_and": ["cn=allowed_users,dc=example,dc=com"]}}, + "order": 1, + } + ] + + assert output_result == expected + assert next_order == 2 + + def test_ldap_group_allow_to_gateway_format_deny_group(self): + """Test _ldap_group_allow_to_gateway_format for deny group.""" + result = [] + ldap_group = "cn=blocked_users,dc=example,dc=com" + + output_result, next_order = self.migrator._ldap_group_allow_to_gateway_format(result, ldap_group, deny=True, start_order=5) + + expected = [ + { + "name": "LDAP-DenyGroup", + "authenticator": -1, + "map_type": "allow", + "revoke": True, + "triggers": {"groups": {"has_or": ["cn=blocked_users,dc=example,dc=com"]}}, + "order": 5, + } + ] + + assert output_result == expected + assert next_order == 6 + + def test_create_gateway_authenticator(self): + """Test create_gateway_authenticator method.""" + config = { + 'category': 'ldap', + 'settings': {'SERVER_URI': ['ldap://ldap.example.com'], 'BIND_DN': 'cn=admin,dc=example,dc=com', 'BIND_PASSWORD': 'password'}, + 'org_mappers': [], + 'team_mappers': [], + 'role_mappers': [], + 'allow_mappers': [], + } + + with patch.object(self.migrator, 'submit_authenticator') as mock_submit: + mock_submit.return_value = {'id': 123, 'name': 'ldap'} + + result = self.migrator.create_gateway_authenticator(config) + + # Verify submit_authenticator was called + mock_submit.assert_called_once() + call_args = mock_submit.call_args + gateway_config = call_args[0][0] + + assert gateway_config['name'] == 'ldap' + assert gateway_config['type'] == 'ansible_base.authentication.authenticator_plugins.ldap' + assert gateway_config['create_objects'] is True + assert gateway_config['remove_users'] is False + assert gateway_config['enabled'] is True + assert 'configuration' in gateway_config + + assert result == {'id': 123, 'name': 'ldap'} + + def test_create_gateway_authenticator_slug_generation(self): + """Test that create_gateway_authenticator generates correct slug.""" + config = { + 'category': 'ldap', + 'settings': {'SERVER_URI': ['ldap://ldap.example.com']}, + 'org_mappers': [], + 'team_mappers': [], + 'role_mappers': [], + 'allow_mappers': [], + } + + with patch.object(self.migrator, 'submit_authenticator') as mock_submit: + with patch.object(self.migrator, '_generate_authenticator_slug', return_value='aap-ldap-ldap') as mock_slug: + mock_submit.return_value = {'id': 123, 'name': 'ldap'} + + self.migrator.create_gateway_authenticator(config) + + mock_slug.assert_called_once_with('ldap', 'ldap') + call_args = mock_submit.call_args + gateway_config = call_args[0][0] + assert gateway_config['slug'] == 'aap-ldap-ldap' + + @patch('awx.sso.utils.ldap_migrator.settings') + def test_get_controller_config_with_mappings(self, mock_settings): + """Test get_controller_config with organization and team mappings.""" + # Mock LDAP configuration with mappings + mock_settings.AUTH_LDAP_SERVER_URI = "ldap://ldap.example.com" + mock_settings.AUTH_LDAP_ORGANIZATION_MAP = {"TestOrg": {"users": ["admin_group"], "admins": ["super_admin_group"]}} + mock_settings.AUTH_LDAP_TEAM_MAP = {"TestTeam": {"organization": "TestOrg", "users": ["team_group"]}} + mock_settings.AUTH_LDAP_USER_FLAGS_BY_GROUP = {"is_superuser": ["super_group"]} + mock_settings.AUTH_LDAP_REQUIRE_GROUP = "cn=allowed,dc=example,dc=com" + mock_settings.AUTH_LDAP_DENY_GROUP = "cn=blocked,dc=example,dc=com" + + # Mock all other settings to None + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + ]: + setattr(mock_settings, f"AUTH_LDAP_{key}", None) + + # Mock all other instances to return None + for i in [1, 2, 3, 4, 5]: + prefix = f"AUTH_LDAP_{i}_" + setattr(mock_settings, f"{prefix}SERVER_URI", None) + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + 'REQUIRE_GROUP', + 'DENY_GROUP', + 'USER_FLAGS_BY_GROUP', + 'ORGANIZATION_MAP', + 'TEAM_MAP', + ]: + setattr(mock_settings, f"{prefix}{key}", None) + + result = self.migrator.get_controller_config() + + assert len(result) == 1 + config = result[0] + + # Check that mappers were generated + assert len(config['org_mappers']) > 0 + assert len(config['team_mappers']) > 0 + assert len(config['role_mappers']) > 0 + assert len(config['allow_mappers']) > 0 # Should have deny and require group mappers + + # Verify allow mappers contain deny and require groups + allow_mapper_names = [mapper['name'] for mapper in config['allow_mappers']] + assert 'LDAP-DenyGroup' in allow_mapper_names + assert 'LDAP-RequireGroup' in allow_mapper_names + + @patch('awx.sso.utils.ldap_migrator.settings') + def test_get_controller_config_with_specific_org_mapping(self, mock_settings): + """Test get_controller_config with specific organization mapping including remove flags.""" + # Mock LDAP configuration with the exact mapping from the user request + # This case is added for AAP-51531 + mock_settings.AUTH_LDAP_SERVER_URI = "ldap://ldap.example.com" + mock_settings.AUTH_LDAP_ORGANIZATION_MAP = { + "Networking": {"admins": "cn=networkadmins,ou=groups,dc=example,dc=com", "users": True, "remove_admins": True, "remove_users": True} + } + mock_settings.AUTH_LDAP_TEAM_MAP = {} + mock_settings.AUTH_LDAP_USER_FLAGS_BY_GROUP = {} + mock_settings.AUTH_LDAP_REQUIRE_GROUP = None + mock_settings.AUTH_LDAP_DENY_GROUP = None + + # Mock all other settings to None + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + ]: + setattr(mock_settings, f"AUTH_LDAP_{key}", None) + + # Mock all other instances to return None + for i in [1, 2, 3, 4, 5]: + prefix = f"AUTH_LDAP_{i}_" + setattr(mock_settings, f"{prefix}SERVER_URI", None) + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + 'REQUIRE_GROUP', + 'DENY_GROUP', + 'USER_FLAGS_BY_GROUP', + 'ORGANIZATION_MAP', + 'TEAM_MAP', + ]: + setattr(mock_settings, f"{prefix}{key}", None) + + result = self.migrator.get_controller_config() + + assert len(result) == 1 + config = result[0] + + # Should have 2 organization mappers: 1 for admins, 1 for users + assert len(config['org_mappers']) == 2 + + # Find the admin and user mappers + admin_mapper = next((m for m in config['org_mappers'] if 'Admins' in m['name']), None) + user_mapper = next((m for m in config['org_mappers'] if 'Users' in m['name']), None) + + assert admin_mapper is not None + assert user_mapper is not None + + # Verify admin mapper details + assert admin_mapper['organization'] == 'Networking' + assert admin_mapper['role'] == 'Organization Admin' + assert admin_mapper['revoke'] is True # remove_admins: true + assert 'Match User Groups' in admin_mapper['name'] + assert admin_mapper['triggers']['groups']['has_or'] == ['cn=networkadmins,ou=groups,dc=example,dc=com'] + + # Verify user mapper details + assert user_mapper['organization'] == 'Networking' + assert user_mapper['role'] == 'Organization Member' + assert user_mapper['revoke'] is True # remove_users: true + assert 'Always Allow' in user_mapper['name'] + assert user_mapper['triggers']['always'] == {} + + # Verify ordering (admin mapper should come before user mapper) + admin_order = admin_mapper['order'] + user_order = user_mapper['order'] + assert admin_order < user_order + + @patch('awx.sso.utils.ldap_migrator.settings') + def test_get_controller_config_with_complex_org_mapping(self, mock_settings): + """Test get_controller_config with complex organization mapping scenarios.""" + # Mock LDAP configuration with various mapping types + mock_settings.AUTH_LDAP_SERVER_URI = "ldap://ldap.example.com" + mock_settings.AUTH_LDAP_ORGANIZATION_MAP = { + # This case is added for AAP-51531 + "Networking": {"admins": "cn=networkadmins,ou=groups,dc=example,dc=com", "users": True, "remove_admins": True, "remove_users": True}, + "Development": { + "admins": ["cn=devadmins,ou=groups,dc=example,dc=com", "cn=leaddevs,ou=groups,dc=example,dc=com"], + "users": ["cn=developers,ou=groups,dc=example,dc=com"], + "remove_admins": False, + "remove_users": False, + }, + "QA": {"users": False, "remove_users": False}, # Never allow + } + mock_settings.AUTH_LDAP_TEAM_MAP = {} + mock_settings.AUTH_LDAP_USER_FLAGS_BY_GROUP = {} + mock_settings.AUTH_LDAP_REQUIRE_GROUP = None + mock_settings.AUTH_LDAP_DENY_GROUP = None + + # Mock all other settings to None + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + ]: + setattr(mock_settings, f"AUTH_LDAP_{key}", None) + + # Mock all other instances to return None + for i in [1, 2, 3, 4, 5]: + prefix = f"AUTH_LDAP_{i}_" + setattr(mock_settings, f"{prefix}SERVER_URI", None) + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + 'REQUIRE_GROUP', + 'DENY_GROUP', + 'USER_FLAGS_BY_GROUP', + 'ORGANIZATION_MAP', + 'TEAM_MAP', + ]: + setattr(mock_settings, f"{prefix}{key}", None) + + result = self.migrator.get_controller_config() + + assert len(result) == 1 + config = result[0] + + # Should have 5 organization mappers total: + # Networking: 2 (admins + users) + # Development: 2 (admins list creates 1 + users list creates 1) + # QA: 1 (users False creates 1) + assert len(config['org_mappers']) == 5 + + # Verify Networking mappers + networking_mappers = [m for m in config['org_mappers'] if m['organization'] == 'Networking'] + assert len(networking_mappers) == 2 + + # Verify Development mappers (should have 2: 1 admin group + 1 user group) + development_mappers = [m for m in config['org_mappers'] if m['organization'] == 'Development'] + assert len(development_mappers) == 2 + + # Verify QA mappers (should have 1: users = False creates Never Allow) + qa_mappers = [m for m in config['org_mappers'] if m['organization'] == 'QA'] + assert len(qa_mappers) == 1 + qa_user_mapper = qa_mappers[0] + assert 'Never Allow' in qa_user_mapper['name'] + assert qa_user_mapper['triggers']['never'] == {} + assert qa_user_mapper['revoke'] is False + + def test_ldap_organization_mapping_with_remove_flags_integration(self): + """Integration test for the specific organization mapping with remove flags.""" + # Test the exact scenario from the user's request using the gateway mapping functions directly + from awx.main.utils.gateway_mapping import org_map_to_gateway_format + + # This case is added for AAP-51531 + org_map = {"Networking": {"admins": "cn=networkadmins,ou=groups,dc=example,dc=com", "users": True, "remove_admins": True, "remove_users": True}} + + result, next_order = org_map_to_gateway_format(org_map, start_order=1, auth_type='ldap') + + assert len(result) == 2 + + # Find admin and user mappers + admin_mapper = next((m for m in result if m['role'] == 'Organization Admin'), None) + user_mapper = next((m for m in result if m['role'] == 'Organization Member'), None) + + assert admin_mapper is not None + assert user_mapper is not None + + # Verify admin mapper + assert admin_mapper['organization'] == 'Networking' + assert admin_mapper['revoke'] is True + assert admin_mapper['triggers']['groups']['has_or'] == ['cn=networkadmins,ou=groups,dc=example,dc=com'] + assert 'Match User Groups' in admin_mapper['name'] + + # Verify user mapper + assert user_mapper['organization'] == 'Networking' + assert user_mapper['revoke'] is True + assert user_mapper['triggers']['always'] == {} + assert 'Always Allow' in user_mapper['name'] + + @patch('awx.sso.utils.ldap_migrator.settings') + def test_ldap_mixed_boolean_and_group_mappings(self, mock_settings): + """Test organization mapping with mixed boolean and group assignments.""" + mock_settings.AUTH_LDAP_SERVER_URI = "ldap://ldap.example.com" + mock_settings.AUTH_LDAP_ORGANIZATION_MAP = { + "MixedOrg": { + "admins": True, # All users are admins + "users": ["cn=engineers,ou=groups,dc=example,dc=com", "cn=qa,ou=groups,dc=example,dc=com"], # Specific groups are users + "remove_admins": False, + "remove_users": True, + } + } + mock_settings.AUTH_LDAP_TEAM_MAP = {} + + # Mock all other settings to None + for i in [None, 1, 2, 3, 4, 5]: + prefix = f"AUTH_LDAP_{i}_" if i is not None else "AUTH_LDAP_" + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + 'USER_FLAGS_BY_GROUP', + 'REQUIRE_GROUP', + 'DENY_GROUP', + ]: + if i is None and key in ['SERVER_URI', 'ORGANIZATION_MAP', 'TEAM_MAP']: + continue + setattr(mock_settings, f"{prefix}{key}", None) + + result = self.migrator.get_controller_config() + assert len(result) == 1 + config = result[0] + + # Should have 2 mappers: admin (True) and users (groups) + assert len(config['org_mappers']) == 2 + + # Find admin mapper (should have 'always' trigger) + admin_mapper = next(mapper for mapper in config['org_mappers'] if 'Admins' in mapper['name']) + assert admin_mapper['triggers']['always'] == {} + + # Find user mapper (should have groups trigger) + user_mapper = next(mapper for mapper in config['org_mappers'] if 'Users' in mapper['name']) + assert user_mapper['triggers']['groups']['has_or'] == ["cn=engineers,ou=groups,dc=example,dc=com", "cn=qa,ou=groups,dc=example,dc=com"] + + @patch('awx.sso.utils.ldap_migrator.settings') + def test_ldap_user_flags_multiple_types(self, mock_settings): + """Test LDAP user flags with multiple flag types simultaneously.""" + mock_settings.AUTH_LDAP_SERVER_URI = "ldap://ldap.example.com" + mock_settings.AUTH_LDAP_ORGANIZATION_MAP = {} + mock_settings.AUTH_LDAP_TEAM_MAP = {} + mock_settings.AUTH_LDAP_USER_FLAGS_BY_GROUP = { + "is_superuser": ["cn=superusers,ou=groups,dc=example,dc=com", "cn=admins,ou=groups,dc=example,dc=com"], + "is_system_auditor": "cn=auditors,ou=groups,dc=example,dc=com", + } + + # Mock all other settings to None + for i in [None, 1, 2, 3, 4, 5]: + prefix = f"AUTH_LDAP_{i}_" if i is not None else "AUTH_LDAP_" + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + 'REQUIRE_GROUP', + 'DENY_GROUP', + ]: + if i is None and key in ['SERVER_URI', 'ORGANIZATION_MAP', 'TEAM_MAP', 'USER_FLAGS_BY_GROUP']: + continue + setattr(mock_settings, f"{prefix}{key}", None) + + result = self.migrator.get_controller_config() + assert len(result) == 1 + config = result[0] + + # Should have role mappers for both flag types + assert len(config['role_mappers']) == 2 + + role_mapper_names = [mapper['name'] for mapper in config['role_mappers']] + assert "is_superuser - role" in role_mapper_names + assert "is_system_auditor - role" in role_mapper_names + + # Verify superuser mapper has multiple groups + superuser_mapper = next(mapper for mapper in config['role_mappers'] if mapper['name'] == "is_superuser - role") + assert superuser_mapper['triggers']['groups']['has_or'] == ["cn=superusers,ou=groups,dc=example,dc=com", "cn=admins,ou=groups,dc=example,dc=com"] + + # Verify auditor mapper has single group + auditor_mapper = next(mapper for mapper in config['role_mappers'] if mapper['name'] == "is_system_auditor - role") + assert auditor_mapper['triggers']['groups']['has_or'] == ["cn=auditors,ou=groups,dc=example,dc=com"] + + @patch('awx.sso.utils.ldap_migrator.settings') + def test_ldap_team_mapping_nonexistent_organization(self, mock_settings): + """Test team mapping that references a non-existent organization.""" + mock_settings.AUTH_LDAP_SERVER_URI = "ldap://ldap.example.com" + mock_settings.AUTH_LDAP_ORGANIZATION_MAP = {} # No organizations defined + mock_settings.AUTH_LDAP_TEAM_MAP = { + "OrphanTeam": {"organization": "NonExistentOrg", "users": "cn=teamusers,ou=groups,dc=example,dc=com", "remove": True} + } + + # Mock all other settings to None + for i in [None, 1, 2, 3, 4, 5]: + prefix = f"AUTH_LDAP_{i}_" if i is not None else "AUTH_LDAP_" + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + 'USER_FLAGS_BY_GROUP', + 'REQUIRE_GROUP', + 'DENY_GROUP', + ]: + if i is None and key in ['SERVER_URI', 'ORGANIZATION_MAP', 'TEAM_MAP']: + continue + setattr(mock_settings, f"{prefix}{key}", None) + + result = self.migrator.get_controller_config() + assert len(result) == 1 + config = result[0] + + # Should still create team mapper + assert len(config['team_mappers']) == 1 + team_mapper = config['team_mappers'][0] + assert "OrphanTeam" in team_mapper['name'] + assert team_mapper['organization'] == "NonExistentOrg" + + @patch('awx.sso.utils.ldap_migrator.settings') + def test_ldap_organization_with_special_characters(self, mock_settings): + """Test organization mapping with special characters in organization names.""" + mock_settings.AUTH_LDAP_SERVER_URI = "ldap://ldap.example.com" + mock_settings.AUTH_LDAP_ORGANIZATION_MAP = { + "Org-With-Dashes": {"users": True, "admins": False}, + "Org With Spaces": {"users": "cn=users,dc=example,dc=com", "admins": None}, + "Org_With_Underscores": {"users": ["cn=group1,dc=example,dc=com"], "admins": True}, + } + mock_settings.AUTH_LDAP_TEAM_MAP = {} + + # Mock all other settings to None + for i in [None, 1, 2, 3, 4, 5]: + prefix = f"AUTH_LDAP_{i}_" if i is not None else "AUTH_LDAP_" + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + 'USER_FLAGS_BY_GROUP', + 'REQUIRE_GROUP', + 'DENY_GROUP', + ]: + if i is None and key in ['SERVER_URI', 'ORGANIZATION_MAP', 'TEAM_MAP']: + continue + setattr(mock_settings, f"{prefix}{key}", None) + + result = self.migrator.get_controller_config() + assert len(result) == 1 + config = result[0] + + # Should create mappers for all organizations with special characters + assert len(config['org_mappers']) == 5 # 3 orgs: 2 mappers for Org-With-Dashes, 1 for Org With Spaces, 2 for Org_With_Underscores + + org_mapper_names = [mapper['name'] for mapper in config['org_mappers']] + assert "Org-With-Dashes - Users Always Allow" in org_mapper_names + assert "Org With Spaces - Users Match User Groups" in org_mapper_names + assert "Org_With_Underscores - Admins Always Allow" in org_mapper_names + assert "Org_With_Underscores - Users Match User Groups" in org_mapper_names + + @patch('awx.sso.utils.ldap_migrator.settings') + def test_ldap_empty_organization_mapping(self, mock_settings): + """Test LDAP config with empty organization mapping.""" + mock_settings.AUTH_LDAP_SERVER_URI = "ldap://ldap.example.com" + mock_settings.AUTH_LDAP_ORGANIZATION_MAP = {} # Empty mapping + mock_settings.AUTH_LDAP_TEAM_MAP = {} + + # Mock all other settings to None + for i in [None, 1, 2, 3, 4, 5]: + prefix = f"AUTH_LDAP_{i}_" if i is not None else "AUTH_LDAP_" + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + 'USER_FLAGS_BY_GROUP', + 'REQUIRE_GROUP', + 'DENY_GROUP', + ]: + if i is None and key in ['SERVER_URI', 'ORGANIZATION_MAP', 'TEAM_MAP']: + continue + setattr(mock_settings, f"{prefix}{key}", None) + + result = self.migrator.get_controller_config() + assert len(result) == 1 + config = result[0] + + # Should have no organization mappers + assert len(config['org_mappers']) == 0 + assert len(config['team_mappers']) == 0 + + @patch('awx.sso.utils.ldap_migrator.settings') + def test_ldap_networking_org_mapping_aap_51531_dedicated(self, mock_settings): + """Dedicated test for the specific LDAP organization mapping case for JIRA AAP-51531.""" + # This case is added for JIRA AAP-51531 + mock_settings.AUTH_LDAP_SERVER_URI = "ldap://ldap.example.com" + mock_settings.AUTH_LDAP_ORGANIZATION_MAP = { + "Networking": {"admins": "cn=networkadmins,ou=groups,dc=example,dc=com", "users": True, "remove_admins": True, "remove_users": True} + } + mock_settings.AUTH_LDAP_TEAM_MAP = {} + mock_settings.AUTH_LDAP_USER_FLAGS_BY_GROUP = {} + mock_settings.AUTH_LDAP_REQUIRE_GROUP = None + mock_settings.AUTH_LDAP_DENY_GROUP = None + + # Mock all other settings to None + for i in [None, 1, 2, 3, 4, 5]: + prefix = f"AUTH_LDAP_{i}_" if i is not None else "AUTH_LDAP_" + for key in [ + 'BIND_DN', + 'BIND_PASSWORD', + 'START_TLS', + 'CONNECTION_OPTIONS', + 'USER_SEARCH', + 'USER_DN_TEMPLATE', + 'USER_ATTR_MAP', + 'GROUP_SEARCH', + 'GROUP_TYPE', + 'GROUP_TYPE_PARAMS', + ]: + if i is None and key in ['SERVER_URI', 'ORGANIZATION_MAP', 'TEAM_MAP', 'USER_FLAGS_BY_GROUP', 'REQUIRE_GROUP', 'DENY_GROUP']: + continue + setattr(mock_settings, f"{prefix}{key}", None) + + result = self.migrator.get_controller_config() + assert len(result) == 1 + config = result[0] + + # Should create exactly 2 organization mappers for the Networking org + assert len(config['org_mappers']) == 2 + assert config['category'] == 'ldap' + + # Find admin and user mappers + admin_mapper = next((m for m in config['org_mappers'] if 'Admins' in m['name']), None) + user_mapper = next((m for m in config['org_mappers'] if 'Users' in m['name']), None) + + assert admin_mapper is not None + assert user_mapper is not None + + # Verify admin mapper details for JIRA AAP-51531 + assert admin_mapper['organization'] == 'Networking' + assert admin_mapper['revoke'] is True # remove_admins: true + assert 'Match User Groups' in admin_mapper['name'] + assert admin_mapper['triggers']['groups']['has_or'] == ['cn=networkadmins,ou=groups,dc=example,dc=com'] + + # Verify user mapper details for JIRA AAP-51531 + assert user_mapper['organization'] == 'Networking' + assert user_mapper['revoke'] is True # remove_users: true + assert 'Always Allow' in user_mapper['name'] + assert user_mapper['triggers']['always'] == {} + + # Verify both mappers have correct properties + assert admin_mapper['map_type'] == 'organization' + assert user_mapper['map_type'] == 'organization' + assert admin_mapper['authenticator'] == -1 + assert user_mapper['authenticator'] == -1 diff --git a/awx/main/tests/unit/utils/test_role_mapping.py b/awx/main/tests/unit/utils/test_role_mapping.py new file mode 100644 index 0000000000..72c0de84bb --- /dev/null +++ b/awx/main/tests/unit/utils/test_role_mapping.py @@ -0,0 +1,614 @@ +""" +Unit tests for role mapping utilities. +""" + +import pytest +from awx.main.utils.gateway_mapping import role_map_to_gateway_format +from awx.sso.utils.ldap_migrator import LDAPMigrator + + +def get_role_mappers(role_map, start_order=1): + """Helper function to get just the mappers from role_map_to_gateway_format.""" + result, _ = role_map_to_gateway_format(role_map, start_order) + return result + + +def ldap_group_allow_to_gateway_format(result, ldap_group, deny=False, start_order=1): + """Helper function to test LDAP group allow mapping via LDAPMigrator.""" + migrator = LDAPMigrator() + return migrator._ldap_group_allow_to_gateway_format(result, ldap_group, deny, start_order) + + +class TestRoleMapToGatewayFormat: + """Tests for role_map_to_gateway_format function.""" + + def test_none_input(self): + """Test that None input returns empty list.""" + result, next_order = role_map_to_gateway_format(None) + assert result == [] + assert next_order == 1 # Default start_order + + def test_empty_dict(self): + """Test that empty dict returns empty list.""" + result, next_order = role_map_to_gateway_format({}) + assert result == [] + assert next_order == 1 + + def test_is_superuser_single_group(self): + """Test is_superuser with single group.""" + role_map = {"is_superuser": "cn=awx_super_users,OU=administration groups,DC=contoso,DC=com"} + + result, _ = role_map_to_gateway_format(role_map) + + expected = [ + { + "name": "is_superuser - role", + "authenticator": -1, + "revoke": True, + "map_type": "is_superuser", + "team": None, + "organization": None, + "triggers": { + "groups": { + "has_or": ["cn=awx_super_users,OU=administration groups,DC=contoso,DC=com"], + } + }, + "order": 1, + } + ] + + assert result == expected + + def test_is_superuser_multiple_groups(self): + """Test is_superuser with multiple groups.""" + role_map = {"is_superuser": ["cn=super_users,dc=example,dc=com", "cn=admins,dc=example,dc=com"]} + + result, _ = role_map_to_gateway_format(role_map) + + expected = [ + { + "name": "is_superuser - role", + "authenticator": -1, + "revoke": True, + "map_type": "is_superuser", + "team": None, + "organization": None, + "triggers": { + "groups": { + "has_or": ["cn=super_users,dc=example,dc=com", "cn=admins,dc=example,dc=com"], + } + }, + "order": 1, + } + ] + + assert result == expected + + def test_is_system_auditor_single_group(self): + """Test is_system_auditor with single group.""" + role_map = {"is_system_auditor": "cn=auditors,dc=example,dc=com"} + + result, _ = role_map_to_gateway_format(role_map) + + expected = [ + { + "name": "is_system_auditor - role", + "authenticator": -1, + "revoke": True, + "map_type": "role", + "role": "Platform Auditor", + "team": None, + "organization": None, + "triggers": { + "groups": { + "has_or": ["cn=auditors,dc=example,dc=com"], + } + }, + "order": 1, + } + ] + + assert result == expected + + def test_is_system_auditor_multiple_groups(self): + """Test is_system_auditor with multiple groups.""" + role_map = {"is_system_auditor": ["cn=auditors,dc=example,dc=com", "cn=viewers,dc=example,dc=com"]} + + result, _ = role_map_to_gateway_format(role_map) + + expected = [ + { + "name": "is_system_auditor - role", + "authenticator": -1, + "revoke": True, + "map_type": "role", + "role": "Platform Auditor", + "team": None, + "organization": None, + "triggers": { + "groups": { + "has_or": ["cn=auditors,dc=example,dc=com", "cn=viewers,dc=example,dc=com"], + } + }, + "order": 1, + } + ] + + assert result == expected + + def test_multiple_roles(self): + """Test multiple role mappings.""" + role_map = {"is_superuser": "cn=super_users,dc=example,dc=com", "is_system_auditor": "cn=auditors,dc=example,dc=com"} + + result, _ = role_map_to_gateway_format(role_map) + + expected = [ + { + "name": "is_superuser - role", + "authenticator": -1, + "revoke": True, + "map_type": "is_superuser", + "team": None, + "organization": None, + "triggers": { + "groups": { + "has_or": ["cn=super_users,dc=example,dc=com"], + } + }, + "order": 1, + }, + { + "name": "is_system_auditor - role", + "authenticator": -1, + "revoke": True, + "map_type": "role", + "role": "Platform Auditor", + "team": None, + "organization": None, + "triggers": { + "groups": { + "has_or": ["cn=auditors,dc=example,dc=com"], + } + }, + "order": 2, + }, + ] + + assert result == expected + + def test_unsupported_role_flag(self): + """Test that unsupported role flags are ignored.""" + role_map = { + "is_superuser": "cn=super_users,dc=example,dc=com", + "is_staff": "cn=staff,dc=example,dc=com", # Unsupported flag + "is_system_auditor": "cn=auditors,dc=example,dc=com", + } + + result, _ = role_map_to_gateway_format(role_map) + + # Should only have 2 mappers (is_superuser and is_system_auditor) + assert len(result) == 2 + assert result[0]["map_type"] == "is_superuser" + assert result[1]["map_type"] == "role" + assert result[1]["role"] == "Platform Auditor" + + def test_order_increments_correctly(self): + """Test that order values increment correctly.""" + role_map = {"is_superuser": "cn=super_users,dc=example,dc=com", "is_system_auditor": "cn=auditors,dc=example,dc=com"} + + result, _ = role_map_to_gateway_format(role_map) + + assert len(result) == 2 + assert result[0]["order"] == 1 + assert result[1]["order"] == 2 + + def test_start_order_parameter(self): + """Test that start_order parameter is respected.""" + role_map = {"is_superuser": "cn=super_users,dc=example,dc=com"} + + result, next_order = role_map_to_gateway_format(role_map, start_order=5) + + assert result[0]["order"] == 5 + assert next_order == 6 + + def test_string_to_list_conversion(self): + """Test that string groups are converted to lists.""" + role_map = {"is_superuser": "single-group"} + + result, _ = role_map_to_gateway_format(role_map) + + # Should convert string to list for has_or + assert result[0]["triggers"]["groups"]["has_or"] == ["single-group"] + + def test_triggers_format_validation(self): + """Test that trigger formats match Gateway specification.""" + role_map = {"is_superuser": ["group1", "group2"]} + + result, _ = role_map_to_gateway_format(role_map) + + # Validate that triggers follow Gateway format + triggers = result[0]["triggers"] + assert "groups" in triggers + assert "has_or" in triggers["groups"] + assert isinstance(triggers["groups"]["has_or"], list) + assert triggers["groups"]["has_or"] == ["group1", "group2"] + + def test_ldap_dn_format(self): + """Test with realistic LDAP DN format.""" + role_map = { + "is_superuser": "cn=awx_super_users,OU=administration groups,DC=contoso,DC=com", + "is_system_auditor": "cn=awx_auditors,OU=administration groups,DC=contoso,DC=com", + } + + result, _ = role_map_to_gateway_format(role_map) + + assert len(result) == 2 + assert result[0]["triggers"]["groups"]["has_or"] == ["cn=awx_super_users,OU=administration groups,DC=contoso,DC=com"] + assert result[1]["triggers"]["groups"]["has_or"] == ["cn=awx_auditors,OU=administration groups,DC=contoso,DC=com"] + + def test_gateway_format_compliance(self): + """Test that all results comply with Gateway role mapping format.""" + role_map = {"is_superuser": "cn=super_users,dc=example,dc=com", "is_system_auditor": "cn=auditors,dc=example,dc=com"} + + result, _ = role_map_to_gateway_format(role_map) + + for mapping in result: + # Required fields per Gateway spec + assert "name" in mapping + assert "authenticator" in mapping + assert "map_type" in mapping + assert "organization" in mapping + assert "team" in mapping + assert "triggers" in mapping + assert "order" in mapping + assert "revoke" in mapping + + # Field types + assert isinstance(mapping["name"], str) + assert isinstance(mapping["authenticator"], int) + assert mapping["map_type"] in ["is_superuser", "role"] + assert mapping["organization"] is None + assert mapping["team"] is None + assert isinstance(mapping["triggers"], dict) + assert isinstance(mapping["order"], int) + assert isinstance(mapping["revoke"], bool) + + # Specific field validations based on map_type + if mapping["map_type"] == "is_superuser": + assert "role" not in mapping + elif mapping["map_type"] == "role": + assert "role" in mapping + assert isinstance(mapping["role"], str) + assert mapping["role"] == "Platform Auditor" + + +# Parametrized tests for role mappings +@pytest.mark.parametrize( + "role_map,expected_length", + [ + (None, 0), + ({}, 0), + ({"is_superuser": "group1"}, 1), + ({"is_system_auditor": "group1"}, 1), + ({"is_superuser": "group1", "is_system_auditor": "group2"}, 2), + ({"is_staff": "group1"}, 0), # Unsupported flag + ({"is_superuser": "group1", "is_staff": "group2", "is_system_auditor": "group3"}, 2), # Mixed supported/unsupported + ], +) +def test_role_map_result_lengths(role_map, expected_length): + """Test that role_map_to_gateway_format returns expected number of mappings.""" + result, _ = role_map_to_gateway_format(role_map) + assert len(result) == expected_length + + +# Edge case tests +def test_empty_groups_handling(): + """Test handling of empty group lists.""" + role_map = {"is_superuser": []} + + result, _ = role_map_to_gateway_format(role_map) + + assert len(result) == 1 + assert result[0]["triggers"]["groups"]["has_or"] == [] + + +def test_mixed_group_types(): + """Test handling of mixed group types (string and list).""" + role_map = {"is_superuser": "single-group", "is_system_auditor": ["group1", "group2"]} + + result, _ = role_map_to_gateway_format(role_map) + + assert len(result) == 2 + assert result[0]["triggers"]["groups"]["has_or"] == ["single-group"] + assert result[1]["triggers"]["groups"]["has_or"] == ["group1", "group2"] + + +def test_realistic_ldap_user_flags_by_group(): + """Test with realistic LDAP USER_FLAGS_BY_GROUP data.""" + role_map = {"is_superuser": "cn=awx_super_users,OU=administration groups,DC=contoso,DC=com"} + + result, _ = role_map_to_gateway_format(role_map) + + # This is exactly the use case from the user's example + assert len(result) == 1 + assert result[0]["map_type"] == "is_superuser" + assert result[0]["triggers"]["groups"]["has_or"] == ["cn=awx_super_users,OU=administration groups,DC=contoso,DC=com"] + assert result[0]["revoke"] is True + assert result[0]["team"] is None + assert result[0]["organization"] is None + + +class TestLdapGroupAllowToGatewayFormat: + """Tests for ldap_group_allow_to_gateway_format function.""" + + def test_none_input_with_empty_result(self): + """Test that None input with empty result returns unchanged result.""" + result = [] + output_result, next_order = ldap_group_allow_to_gateway_format(result, None, deny=False) + + assert output_result == [] + assert next_order == 1 # Default start_order + + def test_none_input_with_existing_result(self): + """Test that None input with existing mappers returns unchanged result.""" + result = [{"existing": "mapper"}] + output_result, next_order = ldap_group_allow_to_gateway_format(result, None, deny=False, start_order=5) + + assert output_result == [{"existing": "mapper"}] + assert next_order == 5 # start_order unchanged + + def test_require_group_mapping(self): + """Test LDAP REQUIRE_GROUP mapping (deny=False).""" + result = [] + ldap_group = "cn=allowed_users,dc=example,dc=com" + + output_result, next_order = ldap_group_allow_to_gateway_format(result, ldap_group, deny=False, start_order=1) + + expected = [ + { + "name": "LDAP-RequireGroup", + "authenticator": -1, + "map_type": "allow", + "revoke": False, + "triggers": {"groups": {"has_and": ["cn=allowed_users,dc=example,dc=com"]}}, + "order": 1, + } + ] + + assert output_result == expected + assert next_order == 2 + + def test_deny_group_mapping(self): + """Test LDAP DENY_GROUP mapping (deny=True).""" + result = [] + ldap_group = "cn=blocked_users,dc=example,dc=com" + + output_result, next_order = ldap_group_allow_to_gateway_format(result, ldap_group, deny=True, start_order=1) + + expected = [ + { + "name": "LDAP-DenyGroup", + "authenticator": -1, + "map_type": "allow", + "revoke": True, + "triggers": {"groups": {"has_or": ["cn=blocked_users,dc=example,dc=com"]}}, + "order": 1, + } + ] + + assert output_result == expected + assert next_order == 2 + + def test_appending_to_existing_result(self): + """Test appending to existing result list.""" + existing_mapper = { + "name": "existing-mapper", + "authenticator": -1, + "map_type": "role", + "order": 1, + } + result = [existing_mapper] + ldap_group = "cn=new_group,dc=example,dc=com" + + output_result, next_order = ldap_group_allow_to_gateway_format(result, ldap_group, deny=False, start_order=2) + + assert len(output_result) == 2 + assert output_result[0] == existing_mapper # Original mapper unchanged + assert output_result[1]["name"] == "LDAP-RequireGroup" + assert output_result[1]["order"] == 2 + assert next_order == 3 + + def test_custom_start_order(self): + """Test that custom start_order is respected.""" + result = [] + ldap_group = "cn=test_group,dc=example,dc=com" + + output_result, next_order = ldap_group_allow_to_gateway_format(result, ldap_group, deny=False, start_order=10) + + assert output_result[0]["order"] == 10 + assert next_order == 11 + + def test_require_vs_deny_trigger_differences(self): + """Test the difference between require and deny group triggers.""" + ldap_group = "cn=test_group,dc=example,dc=com" + + # Test require group (deny=False) + require_result, _ = ldap_group_allow_to_gateway_format([], ldap_group, deny=False) + + # Test deny group (deny=True) + deny_result, _ = ldap_group_allow_to_gateway_format([], ldap_group, deny=True) + + # Require group should use has_and + assert require_result[0]["triggers"]["groups"]["has_and"] == ["cn=test_group,dc=example,dc=com"] + assert require_result[0]["revoke"] is False + assert require_result[0]["name"] == "LDAP-RequireGroup" + + # Deny group should use has_or + assert deny_result[0]["triggers"]["groups"]["has_or"] == ["cn=test_group,dc=example,dc=com"] + assert deny_result[0]["revoke"] is True + assert deny_result[0]["name"] == "LDAP-DenyGroup" + + def test_realistic_ldap_dn_format(self): + """Test with realistic LDAP DN format.""" + result = [] + + # Test with require group + require_group = "cn=awx_users,OU=application groups,DC=contoso,DC=com" + output_result, next_order = ldap_group_allow_to_gateway_format(result, require_group, deny=False, start_order=1) + + assert len(output_result) == 1 + assert output_result[0]["triggers"]["groups"]["has_and"] == ["cn=awx_users,OU=application groups,DC=contoso,DC=com"] + assert output_result[0]["name"] == "LDAP-RequireGroup" + assert next_order == 2 + + def test_multiple_sequential_calls(self): + """Test multiple sequential calls to build complex allow mappers.""" + result = [] + + # Add deny group first + result, next_order = ldap_group_allow_to_gateway_format(result, "cn=blocked,dc=example,dc=com", deny=True, start_order=1) + + # Add require group second + result, next_order = ldap_group_allow_to_gateway_format(result, "cn=allowed,dc=example,dc=com", deny=False, start_order=next_order) + + assert len(result) == 2 + + # First mapper should be deny group + assert result[0]["name"] == "LDAP-DenyGroup" + assert result[0]["revoke"] is True + assert result[0]["triggers"]["groups"]["has_or"] == ["cn=blocked,dc=example,dc=com"] + assert result[0]["order"] == 1 + + # Second mapper should be require group + assert result[1]["name"] == "LDAP-RequireGroup" + assert result[1]["revoke"] is False + assert result[1]["triggers"]["groups"]["has_and"] == ["cn=allowed,dc=example,dc=com"] + assert result[1]["order"] == 2 + + assert next_order == 3 + + def test_gateway_format_compliance(self): + """Test that all results comply with Gateway allow mapping format.""" + result = [] + + # Test both deny and require groups + result, _ = ldap_group_allow_to_gateway_format(result, "cn=denied,dc=example,dc=com", deny=True, start_order=1) + result, _ = ldap_group_allow_to_gateway_format(result, "cn=required,dc=example,dc=com", deny=False, start_order=2) + + for mapping in result: + # Required fields per Gateway spec + assert "name" in mapping + assert "authenticator" in mapping + assert "map_type" in mapping + assert "triggers" in mapping + assert "order" in mapping + assert "revoke" in mapping + + # Field types + assert isinstance(mapping["name"], str) + assert isinstance(mapping["authenticator"], int) + assert mapping["map_type"] == "allow" + assert isinstance(mapping["triggers"], dict) + assert isinstance(mapping["order"], int) + assert isinstance(mapping["revoke"], bool) + + # Trigger format validation + assert "groups" in mapping["triggers"] + groups_trigger = mapping["triggers"]["groups"] + + # Should have either has_and or has_or, but not both + has_and = "has_and" in groups_trigger + has_or = "has_or" in groups_trigger + assert has_and != has_or # XOR - exactly one should be true + + if has_and: + assert isinstance(groups_trigger["has_and"], list) + assert len(groups_trigger["has_and"]) == 1 + if has_or: + assert isinstance(groups_trigger["has_or"], list) + assert len(groups_trigger["has_or"]) == 1 + + def test_original_result_not_modified_when_none(self): + """Test that original result list is not modified when ldap_group is None.""" + original_result = [{"original": "mapper"}] + result_copy = original_result.copy() + + output_result, _ = ldap_group_allow_to_gateway_format(original_result, None, deny=False) + + # Original list should be unchanged + assert original_result == result_copy + # Output should be the same reference + assert output_result is original_result + + def test_empty_string_group(self): + """Test handling of empty string group.""" + result = [] + + output_result, next_order = ldap_group_allow_to_gateway_format(result, "", deny=False, start_order=1) + + # Should still create a mapper even with empty string + assert len(output_result) == 1 + assert output_result[0]["triggers"]["groups"]["has_and"] == [""] + assert next_order == 2 + + +# Parametrized tests for ldap_group_allow_to_gateway_format +@pytest.mark.parametrize( + "ldap_group,deny,expected_name,expected_revoke,expected_trigger_type", + [ + ("cn=test,dc=example,dc=com", True, "LDAP-DenyGroup", True, "has_or"), + ("cn=test,dc=example,dc=com", False, "LDAP-RequireGroup", False, "has_and"), + ("cn=users,ou=groups,dc=company,dc=com", True, "LDAP-DenyGroup", True, "has_or"), + ("cn=users,ou=groups,dc=company,dc=com", False, "LDAP-RequireGroup", False, "has_and"), + ], +) +def test_ldap_group_parametrized(ldap_group, deny, expected_name, expected_revoke, expected_trigger_type): + """Parametrized test for various LDAP group configurations.""" + result = [] + + output_result, next_order = ldap_group_allow_to_gateway_format(result, ldap_group, deny=deny, start_order=1) + + assert len(output_result) == 1 + mapper = output_result[0] + + assert mapper["name"] == expected_name + assert mapper["revoke"] == expected_revoke + assert expected_trigger_type in mapper["triggers"]["groups"] + assert mapper["triggers"]["groups"][expected_trigger_type] == [ldap_group] + assert next_order == 2 + + +def test_realistic_awx_ldap_migration_scenario(): + """Test realistic scenario from AWX LDAP migration.""" + result = [] + + # Simulate AWX LDAP configuration with both REQUIRE_GROUP and DENY_GROUP + deny_group = "cn=blocked_users,OU=blocked groups,DC=contoso,DC=com" + require_group = "cn=awx_users,OU=application groups,DC=contoso,DC=com" + + # Add deny group first (as in the migrator) + result, next_order = ldap_group_allow_to_gateway_format(result, deny_group, deny=True, start_order=1) + + # Add require group second + result, next_order = ldap_group_allow_to_gateway_format(result, require_group, deny=False, start_order=next_order) + + # Should have 2 allow mappers + assert len(result) == 2 + + # Verify deny group mapper + deny_mapper = result[0] + assert deny_mapper["name"] == "LDAP-DenyGroup" + assert deny_mapper["map_type"] == "allow" + assert deny_mapper["revoke"] is True + assert deny_mapper["triggers"]["groups"]["has_or"] == [deny_group] + assert deny_mapper["order"] == 1 + + # Verify require group mapper + require_mapper = result[1] + assert require_mapper["name"] == "LDAP-RequireGroup" + assert require_mapper["map_type"] == "allow" + assert require_mapper["revoke"] is False + assert require_mapper["triggers"]["groups"]["has_and"] == [require_group] + assert require_mapper["order"] == 2 + + assert next_order == 3 diff --git a/awx/main/utils/gateway_client.py b/awx/main/utils/gateway_client.py new file mode 100644 index 0000000000..dad1641553 --- /dev/null +++ b/awx/main/utils/gateway_client.py @@ -0,0 +1,511 @@ +""" +Gateway API client for AAP Gateway interactions. + +This module provides a client class to interact with the AAP Gateway REST API, +specifically for creating authenticators and mapping configurations. +""" + +import requests +import logging +from typing import Dict, List, Optional, Any +from urllib.parse import urljoin + + +logger = logging.getLogger(__name__) + + +class GatewayAPIError(Exception): + """Exception raised for Gateway API errors.""" + + def __init__(self, message: str, status_code: Optional[int] = None, response_data: Optional[Dict] = None): + self.message = message + self.status_code = status_code + self.response_data = response_data + super().__init__(self.message) + + +class GatewayClient: + """Client for AAP Gateway REST API interactions.""" + + def __init__(self, base_url: str, username: str, password: str, skip_verify: bool = False, skip_session_init: bool = False, command=None): + """Initialize Gateway client. + + Args: + base_url: Base URL of the AAP Gateway instance + username: Username for authentication + password: Password for authentication + skip_verify: Skip SSL certificate verification + skip_session_init: Skip initializing the session. Only set to True if you are using a base class that doesn't need the initialization of the session. + command: The command object. This is used to write output to the console. + """ + self.base_url = base_url.rstrip('/') + self.username = username + self.password = password + self.skip_verify = skip_verify + self.command = command + self.session_was_not_initialized = skip_session_init + + # Initialize session + if not skip_session_init: + self.session = requests.Session() + + # Configure SSL verification + if skip_verify: + self.session.verify = False + # Disable SSL warnings when verification is disabled + import urllib3 + + urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + + # Set default headers + self.session.headers.update( + { + 'User-Agent': 'AWX-Gateway-Migration-Client/1.0', + 'Accept': 'application/json', + 'Content-Type': 'application/json', + } + ) + else: + self.session = None + + # Authentication state + self._authenticated = False + + def authenticate(self) -> bool: + """Authenticate with the Gateway using HTTP Basic Authentication. + + Returns: + bool: True if authentication successful, False otherwise + + Raises: + GatewayAPIError: If authentication fails + """ + try: + # Set up HTTP Basic Authentication + from requests.auth import HTTPBasicAuth + + self.session.auth = HTTPBasicAuth(self.username, self.password) + + # Test authentication by making a simple request to the API + test_url = urljoin(self.base_url, '/api/gateway/v1/authenticators/') + + response = self.session.get(test_url) + + if response.status_code in [200, 401]: # 401 means auth is working but might need permissions + self._authenticated = True + logger.info("Successfully authenticated with Gateway using Basic Auth") + return True + else: + error_msg = f"Authentication test failed with status {response.status_code}" + try: + error_data = response.json() + error_msg += f": {error_data}" + except requests.exceptions.JSONDecodeError: + error_msg += f": {response.text}" + + raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None) + + except requests.RequestException as e: + raise GatewayAPIError(f"Network error during authentication: {str(e)}") + + def _ensure_authenticated(self): + """Ensure the client is authenticated, authenticate if needed.""" + if not self._authenticated: + self.authenticate() + + def _make_request(self, method: str, endpoint: str, data: Optional[Dict] = None, params: Optional[Dict] = None) -> requests.Response: + """Make an authenticated request to the Gateway API. + + Args: + method: HTTP method (GET, POST, PUT, DELETE, etc.) + endpoint: API endpoint (without base URL) + data: JSON data to send in request body + params: Query parameters + + Returns: + requests.Response: The response object + + Raises: + GatewayAPIError: If request fails + """ + self._ensure_authenticated() + + url = urljoin(self.base_url, endpoint.lstrip('/')) + + try: + response = self.session.request(method=method.upper(), url=url, json=data, params=params) + + # Log request details + logger.debug(f"{method.upper()} {url} - Status: {response.status_code}") + + return response + + except requests.RequestException as e: + raise GatewayAPIError(f"Request failed: {str(e)}") + + def create_authenticator(self, authenticator_config: Dict[str, Any]) -> Dict[str, Any]: + """Create a new authenticator in Gateway. + + Args: + authenticator_config: Authenticator configuration dictionary + + Returns: + dict: Created authenticator data + + Raises: + GatewayAPIError: If creation fails + """ + endpoint = '/api/gateway/v1/authenticators/' + + try: + response = self._make_request('POST', endpoint, data=authenticator_config) + + if response.status_code == 201: + result = response.json() + logger.info(f"Successfully created authenticator: {result.get('name', 'Unknown')}") + return result + else: + error_msg = f"Failed to create authenticator. Status: {response.status_code}" + try: + error_data = response.json() + error_msg += f", Error: {error_data}" + except requests.exceptions.JSONDecodeError: + error_msg += f", Response: {response.text}" + + raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None) + + except requests.RequestException as e: + raise GatewayAPIError(f"Failed to create authenticator: {str(e)}") + + def update_authenticator(self, authenticator_id: int, authenticator_config: Dict[str, Any]) -> Dict[str, Any]: + """Update an existing authenticator in Gateway. + + Args: + authenticator_id: ID of the authenticator to update + authenticator_config: Authenticator configuration dictionary + + Returns: + dict: Updated authenticator data + + Raises: + GatewayAPIError: If update fails + """ + endpoint = f'/api/gateway/v1/authenticators/{authenticator_id}/' + + try: + response = self._make_request('PATCH', endpoint, data=authenticator_config) + + if response.status_code == 200: + result = response.json() + logger.info(f"Successfully updated authenticator: {result.get('name', 'Unknown')}") + return result + else: + error_msg = f"Failed to update authenticator. Status: {response.status_code}" + try: + error_data = response.json() + error_msg += f", Error: {error_data}" + except requests.exceptions.JSONDecodeError: + error_msg += f", Response: {response.text}" + + raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None) + + except requests.RequestException as e: + raise GatewayAPIError(f"Failed to update authenticator: {str(e)}") + + def create_authenticator_map(self, authenticator_id: int, mapper_config: Dict[str, Any]) -> Dict[str, Any]: + """Create a new authenticator map in Gateway. + + Args: + authenticator_id: ID of the authenticator to create map for + mapper_config: Mapper configuration dictionary + + Returns: + dict: Created mapper data + + Raises: + GatewayAPIError: If creation fails + """ + endpoint = '/api/gateway/v1/authenticator_maps/' + + try: + + response = self._make_request('POST', endpoint, data=mapper_config) + + if response.status_code == 201: + result = response.json() + logger.info(f"Successfully created authenticator map: {result.get('name', 'Unknown')}") + return result + else: + error_msg = f"Failed to create authenticator map. Status: {response.status_code}" + try: + error_data = response.json() + error_msg += f", Error: {error_data}" + except requests.exceptions.JSONDecodeError: + error_msg += f", Response: {response.text}" + + raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None) + + except requests.RequestException as e: + raise GatewayAPIError(f"Failed to create authenticator map: {str(e)}") + + def update_authenticator_map(self, mapper_id: int, mapper_config: Dict[str, Any]) -> Dict[str, Any]: + """Update an existing authenticator map in Gateway. + + Args: + mapper_id: ID of the authenticator map to update + mapper_config: Mapper configuration dictionary + + Returns: + dict: Updated mapper data + + Raises: + GatewayAPIError: If update fails + """ + endpoint = f'/api/gateway/v1/authenticator_maps/{mapper_id}/' + + try: + response = self._make_request('PATCH', endpoint, data=mapper_config) + + if response.status_code == 200: + result = response.json() + logger.info(f"Successfully updated authenticator map: {result.get('name', 'Unknown')}") + return result + else: + error_msg = f"Failed to update authenticator map. Status: {response.status_code}" + try: + error_data = response.json() + error_msg += f", Error: {error_data}" + except requests.exceptions.JSONDecodeError: + error_msg += f", Response: {response.text}" + + raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None) + + except requests.RequestException as e: + raise GatewayAPIError(f"Failed to update authenticator map: {str(e)}") + + def get_authenticators(self, params: Optional[Dict] = None) -> List[Dict[str, Any]]: + """Get list of authenticators from Gateway. + + Args: + params: Optional query parameters + + Returns: + list: List of authenticator configurations + + Raises: + GatewayAPIError: If request fails + """ + endpoint = '/api/gateway/v1/authenticators/' + + try: + response = self._make_request('GET', endpoint, params=params) + + if response.status_code == 200: + result = response.json() + # Handle paginated response + if isinstance(result, dict) and 'results' in result: + return result['results'] + return result + else: + error_msg = f"Failed to get authenticators. Status: {response.status_code}" + raise GatewayAPIError(error_msg, response.status_code) + + except requests.RequestException as e: + raise GatewayAPIError(f"Failed to get authenticators: {str(e)}") + + def get_authenticator_by_slug(self, slug: str) -> Optional[Dict[str, Any]]: + """Get a specific authenticator by slug. + + Args: + slug: The authenticator slug to search for + + Returns: + dict: The authenticator data if found, None otherwise + + Raises: + GatewayAPIError: If request fails + """ + try: + # Use query parameter to filter by slug - more efficient than getting all + authenticators = self.get_authenticators(params={'slug': slug}) + + # Return the first match (slugs should be unique) + if authenticators: + return authenticators[0] + return None + + except GatewayAPIError as e: + # Re-raise Gateway API errors + raise e + except Exception as e: + raise GatewayAPIError(f"Failed to get authenticator by slug: {str(e)}") + + def get_authenticator_maps(self, authenticator_id: int) -> List[Dict[str, Any]]: + """Get list of maps for a specific authenticator. + + Args: + authenticator_id: ID of the authenticator + + Returns: + list: List of authenticator maps + + Raises: + GatewayAPIError: If request fails + """ + endpoint = f'/api/gateway/v1/authenticators/{authenticator_id}/authenticator_maps/' + + try: + response = self._make_request('GET', endpoint) + + if response.status_code == 200: + result = response.json() + # Handle paginated response + if isinstance(result, dict) and 'results' in result: + return result['results'] + return result + else: + error_msg = f"Failed to get authenticator maps. Status: {response.status_code}" + raise GatewayAPIError(error_msg, response.status_code) + + except requests.RequestException as e: + raise GatewayAPIError(f"Failed to get authenticator maps: {str(e)}") + + def create_github_authenticator( + self, name: str, client_id: str, client_secret: str, enabled: bool = True, create_objects: bool = False, remove_users: bool = False + ) -> Dict[str, Any]: + """Create a GitHub authenticator with the specified configuration. + + Args: + name: Name for the authenticator + client_id: GitHub OAuth App Client ID + client_secret: GitHub OAuth App Client Secret + enabled: Whether authenticator should be enabled + create_objects: Whether to create users/orgs/teams automatically + remove_users: Whether to remove users when they lose access + + Returns: + dict: Created authenticator data + """ + config = { + "name": name, + "type": "ansible_base.authentication.authenticator_plugins.github", + "enabled": enabled, + "create_objects": create_objects, + "remove_users": remove_users, + "configuration": {"KEY": client_id, "SECRET": client_secret}, + } + + return self.create_authenticator(config) + + def update_gateway_setting(self, setting_name: str, setting_value: Any) -> Dict[str, Any]: + """Update a Gateway setting via the settings API. + + Args: + setting_name: Name of the setting to update + setting_value: Value to set for the setting + + Returns: + dict: Upon successful update, well formed responses are returned, otherwise the original payload is returned. + + Raises: + GatewayAPIError: If update fails, anything other than a 200 or 204 response code. + """ + endpoint = '/api/gateway/v1/settings/all/' + + # Create the JSON payload with the setting name and value + payload = {setting_name: setting_value} + + try: + response = self._make_request('PUT', endpoint, data=payload) + + if response.status_code in [200, 204]: + logger.info(f"Successfully updated Gateway setting: {setting_name}") + # Return the response data if available, otherwise return the payload + if response.content: + try: + return response.json() + except requests.exceptions.JSONDecodeError: + return payload + return payload + else: + error_msg = f"Failed to update Gateway setting. Status: {response.status_code}" + try: + error_data = response.json() + error_msg += f", Error: {error_data}" + except requests.exceptions.JSONDecodeError: + error_msg += f", Response: {response.text}" + + raise GatewayAPIError(error_msg, response.status_code, response.json() if response.content else None) + + except requests.RequestException as e: + raise GatewayAPIError(f"Failed to update Gateway setting: {str(e)}") + + def get_gateway_setting(self, setting_name: str) -> Any: + """Get a Gateway setting value via the settings API. + + Args: + setting_name: Name of the setting to retrieve + + Returns: + Any: The value of the setting, or None if not found + + Raises: + GatewayAPIError: If request fails + """ + endpoint = '/api/gateway/v1/settings/all/' + + try: + response = self._make_request('GET', endpoint) + + if response.status_code == 200: + settings_data = response.json() + logger.info("Successfully retrieved Gateway settings") + + # Return the specific setting value or None if not found + return settings_data.get(setting_name) + else: + error_msg = f"Failed to get Gateway settings from '{endpoint}' for '{setting_name}'. Status: {response.status_code}" + error_data = response.text + try: + error_data = response.json() + error_msg += f", Error: {error_data}" + except requests.exceptions.JSONDecodeError: + error_msg += f", Response: {response.text}" + + raise GatewayAPIError(error_msg, response.status_code, error_data) + + except requests.RequestException as e: + raise GatewayAPIError(f"Failed to get Gateway settings from '{endpoint}' for '{setting_name}'. Unexpected Exception - Error: {str(e)}") + + def get_base_url(self) -> str: + """Get the base URL of the Gateway instance. + + Returns: + str: The base URL of the Gateway instance + """ + return self.base_url + + def close(self): + """Close the session and clean up resources.""" + if self.session: + self.session.close() + + def __enter__(self): + """Context manager entry.""" + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Context manager exit.""" + self.close() + + def _write_output(self, message, style=None): + """Write output message if command is available.""" + if self.command: + if style == 'success': + self.command.stdout.write(self.command.style.SUCCESS(message)) + elif style == 'warning': + self.command.stdout.write(self.command.style.WARNING(message)) + elif style == 'error': + self.command.stdout.write(self.command.style.ERROR(message)) + else: + self.command.stdout.write(message) diff --git a/awx/main/utils/gateway_client_svc_token.py b/awx/main/utils/gateway_client_svc_token.py new file mode 100644 index 0000000000..1a7cd8fcac --- /dev/null +++ b/awx/main/utils/gateway_client_svc_token.py @@ -0,0 +1,77 @@ +""" +Gateway API client for AAP Gateway interactions with Service Tokens. + +This module provides a client class to interact with the AAP Gateway REST API, +specifically for creating authenticators and mapping configurations. +""" + +import requests +import logging +from typing import Dict, Optional +from awx.main.utils.gateway_client import GatewayClient, GatewayAPIError + + +logger = logging.getLogger(__name__) + + +class GatewayClientSVCToken(GatewayClient): + """Client for AAP Gateway REST API interactions.""" + + def __init__(self, resource_api_client=None, command=None): + """Initialize Gateway client. + + Args: + resource_api_client: Resource API Client for Gateway leveraging service tokens + """ + super().__init__( + base_url=resource_api_client.base_url, + username=resource_api_client.jwt_user_id, + password="required-in-GatewayClient-authenticate()-but-unused-by-GatewayClientSVCToken", + skip_verify=(not resource_api_client.verify_https), + skip_session_init=True, + command=command, + ) + self.resource_api_client = resource_api_client + # Authentication state + self._authenticated = True + + def authenticate(self) -> bool: + """Overload the base class method to always return True. + + Returns: + bool: True always + """ + + return True + + def _ensure_authenticated(self): + """Refresh JWT service token""" + self.resource_api_client.refresh_jwt() + + def _make_request(self, method: str, endpoint: str, data: Optional[Dict] = None, params: Optional[Dict] = None) -> requests.Response: + """Make a service token authenticated request to the Gateway API. + + Args: + method: HTTP method (GET, POST, PUT, DELETE, etc.) + endpoint: API endpoint (without base URL) + data: JSON data to send in request body + params: Query parameters + + Returns: + requests.Response: The response object + + Raises: + GatewayAPIError: If request fails + """ + self._ensure_authenticated() + + try: + response = self.resource_api_client._make_request(method=method, path=endpoint, data=data, params=params) + + # Log request details + logger.debug(f"{method.upper()} {self.base_url}{endpoint} - Status: {response.status_code}") + + return response + + except requests.RequestException as e: + raise GatewayAPIError(f"Request failed: {str(e)}") diff --git a/awx/main/utils/gateway_mapping.py b/awx/main/utils/gateway_mapping.py new file mode 100644 index 0000000000..0d6bd2bd4e --- /dev/null +++ b/awx/main/utils/gateway_mapping.py @@ -0,0 +1,361 @@ +""" +Gateway mapping conversion utilities. + +This module contains functions to convert AWX authentication mappings +(organization and team mappings) to AAP Gateway format. +""" + +import re +from typing import cast, Any, Literal, Pattern, Union + +email_regex = re.compile(r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$") + + +def truncate_name(name: str, max_length: int = 128) -> str: + """Truncate a name to the specified maximum length.""" + if len(name) <= max_length: + return name + return name[:max_length] + + +def build_truncated_name(org_name: str, entity_name: str, trigger_name: str, max_component_length: int = 40) -> str: + """Build a name by truncating each component individually and joining with ' - '.""" + truncated_org = truncate_name(org_name, max_component_length) + truncated_entity = truncate_name(entity_name, max_component_length) + truncated_trigger = truncate_name(trigger_name, max_component_length) + return f"{truncated_org} - {truncated_entity} {truncated_trigger}" + + +def pattern_to_slash_format(pattern: Any) -> str: + """Convert a re.Pattern object to /pattern/flags format.""" + if not isinstance(pattern, re.Pattern): + return str(pattern) + + flags_str = "" + if pattern.flags & re.IGNORECASE: + flags_str += "i" + if pattern.flags & re.MULTILINE: + flags_str += "m" + if pattern.flags & re.DOTALL: + flags_str += "s" + if pattern.flags & re.VERBOSE: + flags_str += "x" + + return f"/{pattern.pattern}/{flags_str}" + + +def process_ldap_user_list( + groups: Union[None, str, bool, list[Union[None, str, bool]]], +) -> list[dict[str, Any]]: + if not isinstance(groups, list): + groups = [groups] + + # Type cast to help mypy understand the type after conversion + groups_list: list[Union[str, bool, None]] = cast(list[Union[str, bool, None]], groups) + + triggers = [] + if groups_list == [None]: + # A None value means we shouldn't update whatever this is based on LDAP values + pass + elif groups_list == []: + # Empty list means no triggers should be created + pass + elif groups_list == [True]: + triggers.append({"name": "Always Allow", "trigger": {"always": {}}}) + elif groups_list == [False]: + triggers.append( + { + "name": "Never Allow", + "trigger": {"never": {}}, + } + ) + else: + triggers.append({"name": "Match User Groups", "trigger": {"groups": {"has_or": groups_list}}}) + return triggers + + +def process_sso_user_list( + users: Union[str, bool, Pattern[str], list[Union[str, bool, Pattern[str]]]], email_attr: str = 'email', username_attr: str = 'username' +) -> dict[str, Union[str, dict[str, dict[str, Union[str, list[str]]]]]]: + """Process SSO user list and return a single consolidated trigger instead of multiple separate ones.""" + if not isinstance(users, list): + users = [users] + + # Type cast to help mypy understand the type after conversion + user_list: list[Union[str, bool, Pattern[str]]] = cast(list[Union[str, bool, Pattern[str]]], users) + + if user_list == ["false"] or user_list == [False]: + return {"name": "Never Allow", "trigger": {"never": {}}} + elif user_list == ["true"] or user_list == [True]: + return {"name": "Always Allow", "trigger": {"always": {}}} + else: + # Group users by type + emails = [] + usernames = [] + regexes_username = [] + regexes_email = [] + + for user_or_email in user_list: + if isinstance(user_or_email, re.Pattern): + pattern_str = pattern_to_slash_format(user_or_email) + regexes_username.append(pattern_str) + regexes_email.append(pattern_str) + elif isinstance(user_or_email, str): + if email_regex.match(user_or_email): + emails.append(user_or_email) + else: + usernames.append(user_or_email) + else: + # Convert other objects to string and treat as both + str_val = str(user_or_email) + usernames.append(str_val) + emails.append(str_val) + + # Build consolidated trigger + attributes = {"join_condition": "or"} + + if emails: + if len(emails) == 1: + attributes[email_attr] = {"equals": emails[0]} + else: + attributes[email_attr] = {"in": emails} + + if usernames: + if len(usernames) == 1: + attributes[username_attr] = {"equals": usernames[0]} + else: + attributes[username_attr] = {"in": usernames} + + # For regex patterns, we need to create separate matches conditions since there's no matches_or + for i, pattern in enumerate(regexes_username): + pattern_key = f"{username_attr}_pattern_{i}" if len(regexes_username) > 1 else username_attr + if pattern_key not in attributes: + attributes[pattern_key] = {} + attributes[pattern_key]["matches"] = pattern + + for i, pattern in enumerate(regexes_email): + pattern_key = f"{email_attr}_pattern_{i}" if len(regexes_email) > 1 else email_attr + if pattern_key not in attributes: + attributes[pattern_key] = {} + attributes[pattern_key]["matches"] = pattern + + # Create a deterministic, concise name based on trigger types and counts + name_parts = [] + if emails: + name_parts.append(f"E:{len(emails)}") + if usernames: + name_parts.append(f"U:{len(usernames)}") + if regexes_username: + name_parts.append(f"UP:{len(regexes_username)}") + if regexes_email: + name_parts.append(f"EP:{len(regexes_email)}") + + name = " ".join(name_parts) if name_parts else "Mixed Rules" + + return {"name": name, "trigger": {"attributes": attributes}} + + +def team_map_to_gateway_format(team_map, start_order=1, email_attr: str = 'email', username_attr: str = 'username', auth_type: Literal['sso', 'ldap'] = 'sso'): + """Convert AWX team mapping to Gateway authenticator format. + + Args: + team_map: The SOCIAL_AUTH_*_TEAM_MAP setting value + start_order: Starting order value for the mappers + email_attr: The attribute representing the email + username_attr: The attribute representing the username + + Returns: + tuple: (List of Gateway-compatible team mappers, next_order) + """ + if team_map is None: + return [], start_order + + result = [] + order = start_order + + for team_name in team_map.keys(): + team = team_map[team_name] + # TODO: Confirm that if we have None with remove we still won't remove + if team['users'] is None: + continue + + # Get the organization name + organization_name = team.get('organization', 'Unknown') + + # Check for remove flag + revoke = team.get('remove', False) + + if auth_type == 'ldap': + triggers = process_ldap_user_list(team['users']) + for trigger in triggers: + result.append( + { + "name": build_truncated_name(organization_name, team_name, trigger['name']), + "map_type": "team", + "order": order, + "authenticator": -1, # Will be updated when creating the mapper + "triggers": trigger['trigger'], + "organization": organization_name, + "team": team_name, + "role": "Team Member", # Gateway team member role + "revoke": revoke, + } + ) + order += 1 + + if auth_type == 'sso': + trigger = process_sso_user_list(team['users'], email_attr=email_attr, username_attr=username_attr) + result.append( + { + "name": build_truncated_name(organization_name, team_name, trigger['name']), + "map_type": "team", + "order": order, + "authenticator": -1, # Will be updated when creating the mapper + "triggers": trigger['trigger'], + "organization": organization_name, + "team": team_name, + "role": "Team Member", # Gateway team member role + "revoke": revoke, + } + ) + order += 1 + + return result, order + + +def org_map_to_gateway_format(org_map, start_order=1, email_attr: str = 'email', username_attr: str = 'username', auth_type: Literal['sso', 'ldap'] = 'sso'): + """Convert AWX organization mapping to Gateway authenticator format. + + Args: + org_map: The SOCIAL_AUTH_*_ORGANIZATION_MAP setting value + start_order: Starting order value for the mappers + email_attr: The attribute representing the email + username_attr: The attribute representing the username + + Returns: + tuple: (List of Gateway-compatible organization mappers, next_order) + """ + if org_map is None: + return [], start_order + + result = [] + order = start_order + + for organization_name in org_map.keys(): + organization = org_map[organization_name] + for user_type in ['admins', 'users']: + if organization.get(user_type, None) is None: + # TODO: Confirm that if we have None with remove we still won't remove + continue + + # Get the permission type + permission_type = user_type.title() + + # Map AWX admin/users to appropriate Gateway organization roles + role = "Organization Admin" if user_type == "admins" else "Organization Member" + + # Check for remove flags + revoke = False + if organization.get(f"remove_{user_type}"): + revoke = True + + if auth_type == 'ldap': + triggers = process_ldap_user_list(organization[user_type]) + for trigger in triggers: + result.append( + { + "name": build_truncated_name(organization_name, permission_type, trigger['name']), + "map_type": "organization", + "order": order, + "authenticator": -1, # Will be updated when creating the mapper + "triggers": trigger['trigger'], + "organization": organization_name, + "team": None, # Organization-level mapping, not team-specific + "role": role, + "revoke": revoke, + } + ) + order += 1 + + if auth_type == 'sso': + trigger = process_sso_user_list(organization[user_type], email_attr=email_attr, username_attr=username_attr) + result.append( + { + "name": build_truncated_name(organization_name, permission_type, trigger['name']), + "map_type": "organization", + "order": order, + "authenticator": -1, # Will be updated when creating the mapper + "triggers": trigger['trigger'], + "organization": organization_name, + "team": None, # Organization-level mapping, not team-specific + "role": role, + "revoke": revoke, + } + ) + order += 1 + + return result, order + + +def role_map_to_gateway_format(role_map, start_order=1): + """Convert AWX role mapping to Gateway authenticator format. + + Args: + role_map: An LDAP or SAML role mapping + start_order: Starting order value for the mappers + + Returns: + tuple: (List of Gateway-compatible organization mappers, next_order) + """ + if role_map is None: + return [], start_order + + result = [] + order = start_order + + for flag in role_map: + groups = role_map[flag] + if type(groups) is str: + groups = [groups] + + if flag == 'is_superuser': + # Gateway has a special map_type for superusers + result.append( + { + "name": f"{flag} - role", + "authenticator": -1, + "revoke": True, + "map_type": flag, + "team": None, + "organization": None, + "triggers": { + "groups": { + "has_or": groups, + } + }, + "order": order, + } + ) + elif flag == 'is_system_auditor': + # roles other than superuser must be represented as a generic role mapper + result.append( + { + "name": f"{flag} - role", + "authenticator": -1, + "revoke": True, + "map_type": "role", + "role": "Platform Auditor", + "team": None, + "organization": None, + "triggers": { + "groups": { + "has_or": groups, + } + }, + "order": order, + } + ) + + order += 1 + + return result, order diff --git a/awx/main/utils/licensing.py b/awx/main/utils/licensing.py index 2df0a07e5e..780ad97249 100644 --- a/awx/main/utils/licensing.py +++ b/awx/main/utils/licensing.py @@ -249,7 +249,7 @@ class Licenser(object): 'GET', host, verify=True, - timeout=(5, 20), + timeout=(31, 31), ) except requests.RequestException: logger.warning("Failed to connect to console.redhat.com using Service Account credentials. Falling back to basic auth.") @@ -258,7 +258,7 @@ class Licenser(object): host, auth=(client_id, client_secret), verify=True, - timeout=(5, 20), + timeout=(31, 31), ) subs.raise_for_status() subs_formatted = [] diff --git a/awx/playbooks/action_plugins/insights.py b/awx/playbooks/action_plugins/insights.py index e3f9b9b6e8..b2f8403b64 100644 --- a/awx/playbooks/action_plugins/insights.py +++ b/awx/playbooks/action_plugins/insights.py @@ -38,7 +38,7 @@ class ActionModule(ActionBase): def _obtain_auth_token(self, oidc_endpoint, client_id, client_secret): if oidc_endpoint.endswith('/'): - oidc_endpoint = oidc_endpoint.rstrip('/') + oidc_endpoint = oidc_endpoint[:-1] main_url = oidc_endpoint + '/.well-known/openid-configuration' response = requests.get(url=main_url, headers={'Accept': 'application/json'}) data = {} diff --git a/awx/resource_api.py b/awx/resource_api.py index 2009dfab8b..f169bf9c5a 100644 --- a/awx/resource_api.py +++ b/awx/resource_api.py @@ -1,5 +1,7 @@ from ansible_base.resource_registry.registry import ParentResource, ResourceConfig, ServiceAPIConfig, SharedResource from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType +from ansible_base.rbac.models import RoleDefinition +from ansible_base.resource_registry.shared_types import RoleDefinitionType from awx.main import models @@ -19,4 +21,8 @@ RESOURCE_LIST = ( shared_resource=SharedResource(serializer=TeamType, is_provider=False), parent_resources=[ParentResource(model=models.Organization, field_name="organization")], ), + ResourceConfig( + RoleDefinition, + shared_resource=SharedResource(serializer=RoleDefinitionType, is_provider=False), + ), ) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 740758bee1..0ed9b96125 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -83,7 +83,7 @@ USE_I18N = True USE_TZ = True STATICFILES_DIRS = [ - os.path.join(BASE_DIR, 'ui', 'build'), + os.path.join(BASE_DIR, 'ui', 'build', 'static'), os.path.join(BASE_DIR, 'static'), ] @@ -540,7 +540,7 @@ AWX_AUTO_DEPROVISION_INSTANCES = False # If True, allow users to be assigned to roles that were created via JWT -ALLOW_LOCAL_ASSIGNING_JWT_ROLES = False +ALLOW_LOCAL_ASSIGNING_JWT_ROLES = True # Enable Pendo on the UI, possible values are 'off', 'anonymous', and 'detailed' # Note: This setting may be overridden by database settings. @@ -599,6 +599,11 @@ VMWARE_EXCLUDE_EMPTY_GROUPS = True VMWARE_VALIDATE_CERTS = False +# ----------------- +# -- VMware ESXi -- +# ----------------- +VMWARE_ESXI_EXCLUDE_EMPTY_GROUPS = True + # --------------------------- # -- Google Compute Engine -- # --------------------------- @@ -711,7 +716,7 @@ DISABLE_LOCAL_AUTH = False TOWER_URL_BASE = "https://platformhost" INSIGHTS_URL_BASE = "https://example.org" -INSIGHTS_OIDC_ENDPOINT = "https://sso.example.org" +INSIGHTS_OIDC_ENDPOINT = "https://sso.example.org/" INSIGHTS_AGENT_MIME = 'application/example' # See https://github.com/ansible/awx-facts-playbooks INSIGHTS_SYSTEM_ID_FILE = '/etc/redhat-access-insights/machine-id' @@ -1069,6 +1074,7 @@ ANSIBLE_BASE_CACHE_PARENT_PERMISSIONS = True # Currently features are enabled to keep compatibility with old system, except custom roles ANSIBLE_BASE_ALLOW_TEAM_ORG_ADMIN = False # ANSIBLE_BASE_ALLOW_CUSTOM_ROLES = True +ANSIBLE_BASE_ALLOW_TEAM_PARENTS = False ANSIBLE_BASE_ALLOW_CUSTOM_TEAM_ROLES = False ANSIBLE_BASE_ALLOW_SINGLETON_USER_ROLES = True ANSIBLE_BASE_ALLOW_SINGLETON_TEAM_ROLES = False # System auditor has always been restricted to users @@ -1089,6 +1095,9 @@ INDIRECT_HOST_QUERY_FALLBACK_GIVEUP_DAYS = 3 # Older records will be cleaned up INDIRECT_HOST_AUDIT_RECORD_MAX_AGE_DAYS = 7 +# setting for Policy as Code feature +FEATURE_POLICY_AS_CODE_ENABLED = False + OPA_HOST = '' # The hostname used to connect to the OPA server. If empty, policy enforcement will be disabled. OPA_PORT = 8181 # The port used to connect to the OPA server. Defaults to 8181. OPA_SSL = False # Enable or disable the use of SSL to connect to the OPA server. Defaults to false. diff --git a/awx/settings/development_defaults.py b/awx/settings/development_defaults.py index 9725b0abb7..bb726cb372 100644 --- a/awx/settings/development_defaults.py +++ b/awx/settings/development_defaults.py @@ -73,5 +73,4 @@ AWX_DISABLE_TASK_MANAGERS = False def set_dev_flags(settings): defaults_flags = settings.get("FLAGS", {}) defaults_flags['FEATURE_INDIRECT_NODE_COUNTING_ENABLED'] = [{'condition': 'boolean', 'value': True}] - defaults_flags['FEATURE_DISPATCHERD_ENABLED'] = [{'condition': 'boolean', 'value': True}] return {'FLAGS': defaults_flags} diff --git a/awx/settings/production_defaults.py b/awx/settings/production_defaults.py index 35bebc1f7b..5599b81753 100644 --- a/awx/settings/production_defaults.py +++ b/awx/settings/production_defaults.py @@ -23,13 +23,8 @@ ALLOWED_HOSTS = [] # only used for deprecated fields and management commands for them BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv") -# Switch to a writable location for the dispatcher sockfile location -DISPATCHERD_DEBUGGING_SOCKFILE = os.path.realpath('/var/run/tower/dispatcherd.sock') - # Very important that this is editable (not read_only) in the API AWX_ISOLATION_SHOW_PATHS = [ '/etc/pki/ca-trust:/etc/pki/ca-trust:O', '/usr/share/pki:/usr/share/pki:O', ] - -del os diff --git a/awx/sso/backends.py b/awx/sso/backends.py new file mode 100644 index 0000000000..572afc3ef0 --- /dev/null +++ b/awx/sso/backends.py @@ -0,0 +1,469 @@ +# Copyright (c) 2015 Ansible, Inc. +# All Rights Reserved. + +# Python +from collections import OrderedDict +import logging +import uuid + +import ldap + +# Django +from django.dispatch import receiver +from django.contrib.auth.models import User +from django.conf import settings as django_settings +from django.core.signals import setting_changed +from django.utils.encoding import force_str +from django.http import HttpResponse + +# django-auth-ldap +from django_auth_ldap.backend import LDAPSettings as BaseLDAPSettings +from django_auth_ldap.backend import LDAPBackend as BaseLDAPBackend +from django_auth_ldap.backend import populate_user +from django.core.exceptions import ImproperlyConfigured + +# radiusauth +from radiusauth.backends import RADIUSBackend as BaseRADIUSBackend + +# tacacs+ auth +import tacacs_plus + +# social +from social_core.backends.saml import OID_USERID +from social_core.backends.saml import SAMLAuth as BaseSAMLAuth +from social_core.backends.saml import SAMLIdentityProvider as BaseSAMLIdentityProvider + +# Ansible Tower +from awx.sso.models import UserEnterpriseAuth +from awx.sso.common import create_org_and_teams, reconcile_users_org_team_mappings + +logger = logging.getLogger('awx.sso.backends') + + +class LDAPSettings(BaseLDAPSettings): + defaults = dict(list(BaseLDAPSettings.defaults.items()) + list({'ORGANIZATION_MAP': {}, 'TEAM_MAP': {}, 'GROUP_TYPE_PARAMS': {}}.items())) + + def __init__(self, prefix='AUTH_LDAP_', defaults={}): + super(LDAPSettings, self).__init__(prefix, defaults) + + # If a DB-backed setting is specified that wipes out the + # OPT_NETWORK_TIMEOUT, fall back to a sane default + if ldap.OPT_NETWORK_TIMEOUT not in getattr(self, 'CONNECTION_OPTIONS', {}): + options = getattr(self, 'CONNECTION_OPTIONS', {}) + options[ldap.OPT_NETWORK_TIMEOUT] = 30 + self.CONNECTION_OPTIONS = options + + # when specifying `.set_option()` calls for TLS in python-ldap, the + # *order* in which you invoke them *matters*, particularly in Python3, + # where dictionary insertion order is persisted + # + # specifically, it is *critical* that `ldap.OPT_X_TLS_NEWCTX` be set *last* + # this manual sorting puts `OPT_X_TLS_NEWCTX` *after* other TLS-related + # options + # + # see: https://github.com/python-ldap/python-ldap/issues/55 + newctx_option = self.CONNECTION_OPTIONS.pop(ldap.OPT_X_TLS_NEWCTX, None) + self.CONNECTION_OPTIONS = OrderedDict(self.CONNECTION_OPTIONS) + if newctx_option is not None: + self.CONNECTION_OPTIONS[ldap.OPT_X_TLS_NEWCTX] = newctx_option + + +class LDAPBackend(BaseLDAPBackend): + """ + Custom LDAP backend for AWX. + """ + + settings_prefix = 'AUTH_LDAP_' + + def __init__(self, *args, **kwargs): + self._dispatch_uid = uuid.uuid4() + super(LDAPBackend, self).__init__(*args, **kwargs) + setting_changed.connect(self._on_setting_changed, dispatch_uid=self._dispatch_uid) + + def _on_setting_changed(self, sender, **kwargs): + # If any AUTH_LDAP_* setting changes, force settings to be reloaded for + # this backend instance. + if kwargs.get('setting', '').startswith(self.settings_prefix): + self._settings = None + + def _get_settings(self): + if self._settings is None: + self._settings = LDAPSettings(self.settings_prefix) + return self._settings + + def _set_settings(self, settings): + self._settings = settings + + settings = property(_get_settings, _set_settings) + + def authenticate(self, request, username, password): + if self.settings.START_TLS and ldap.OPT_X_TLS_REQUIRE_CERT in self.settings.CONNECTION_OPTIONS: + # with python-ldap, if you want to set connection-specific TLS + # parameters, you must also specify OPT_X_TLS_NEWCTX = 0 + # see: https://stackoverflow.com/a/29722445 + # see: https://stackoverflow.com/a/38136255 + self.settings.CONNECTION_OPTIONS[ldap.OPT_X_TLS_NEWCTX] = 0 + + if not self.settings.SERVER_URI: + return None + try: + user = User.objects.get(username=username) + if user and (not user.profile or not user.profile.ldap_dn): + return None + except User.DoesNotExist: + pass + + try: + for setting_name, type_ in [('GROUP_SEARCH', 'LDAPSearch'), ('GROUP_TYPE', 'LDAPGroupType')]: + if getattr(self.settings, setting_name) is None: + raise ImproperlyConfigured("{} must be an {} instance.".format(setting_name, type_)) + ldap_user = super(LDAPBackend, self).authenticate(request, username, password) + # If we have an LDAP user and that user we found has an ldap_user internal object and that object has a bound connection + # Then we can try and force an unbind to close the sticky connection + if ldap_user and ldap_user.ldap_user and ldap_user.ldap_user._connection_bound: + logger.debug("Forcing LDAP connection to close") + try: + ldap_user.ldap_user._connection.unbind_s() + ldap_user.ldap_user._connection_bound = False + except Exception: + logger.exception(f"Got unexpected LDAP exception when forcing LDAP disconnect for user {ldap_user}, login will still proceed") + return ldap_user + except Exception: + logger.exception("Encountered an error authenticating to LDAP") + return None + + def get_user(self, user_id): + if not self.settings.SERVER_URI: + return None + return super(LDAPBackend, self).get_user(user_id) + + # Disable any LDAP based authorization / permissions checking. + + def has_perm(self, user, perm, obj=None): + return False + + def has_module_perms(self, user, app_label): + return False + + def get_all_permissions(self, user, obj=None): + return set() + + def get_group_permissions(self, user, obj=None): + return set() + + +class LDAPBackend1(LDAPBackend): + settings_prefix = 'AUTH_LDAP_1_' + + +class LDAPBackend2(LDAPBackend): + settings_prefix = 'AUTH_LDAP_2_' + + +class LDAPBackend3(LDAPBackend): + settings_prefix = 'AUTH_LDAP_3_' + + +class LDAPBackend4(LDAPBackend): + settings_prefix = 'AUTH_LDAP_4_' + + +class LDAPBackend5(LDAPBackend): + settings_prefix = 'AUTH_LDAP_5_' + + +def _decorate_enterprise_user(user, provider): + user.set_unusable_password() + user.save() + enterprise_auth, _ = UserEnterpriseAuth.objects.get_or_create(user=user, provider=provider) + return enterprise_auth + + +def _get_or_set_enterprise_user(username, password, provider): + created = False + try: + user = User.objects.prefetch_related('enterprise_auth').get(username=username) + except User.DoesNotExist: + user = User(username=username) + enterprise_auth = _decorate_enterprise_user(user, provider) + logger.debug("Created enterprise user %s via %s backend." % (username, enterprise_auth.get_provider_display())) + created = True + if created or user.is_in_enterprise_category(provider): + return user + logger.warning("Enterprise user %s already defined in Tower." % username) + + +class RADIUSBackend(BaseRADIUSBackend): + """ + Custom Radius backend to verify license status + """ + + def authenticate(self, request, username, password): + if not django_settings.RADIUS_SERVER: + return None + return super(RADIUSBackend, self).authenticate(request, username, password) + + def get_user(self, user_id): + if not django_settings.RADIUS_SERVER: + return None + user = super(RADIUSBackend, self).get_user(user_id) + if not user.has_usable_password(): + return user + + def get_django_user(self, username, password=None, groups=[], is_staff=False, is_superuser=False): + return _get_or_set_enterprise_user(force_str(username), force_str(password), 'radius') + + +class TACACSPlusBackend(object): + """ + Custom TACACS+ auth backend for AWX + """ + + def authenticate(self, request, username, password): + if not django_settings.TACACSPLUS_HOST: + return None + try: + # Upstream TACACS+ client does not accept non-string, so convert if needed. + tacacs_client = tacacs_plus.TACACSClient( + django_settings.TACACSPLUS_HOST, + django_settings.TACACSPLUS_PORT, + django_settings.TACACSPLUS_SECRET, + timeout=django_settings.TACACSPLUS_SESSION_TIMEOUT, + ) + auth_kwargs = {'authen_type': tacacs_plus.TAC_PLUS_AUTHEN_TYPES[django_settings.TACACSPLUS_AUTH_PROTOCOL]} + if django_settings.TACACSPLUS_AUTH_PROTOCOL: + client_ip = self._get_client_ip(request) + if client_ip: + auth_kwargs['rem_addr'] = client_ip + auth = tacacs_client.authenticate(username, password, **auth_kwargs) + except Exception as e: + logger.exception("TACACS+ Authentication Error: %s" % str(e)) + return None + if auth.valid: + return _get_or_set_enterprise_user(username, password, 'tacacs+') + + def get_user(self, user_id): + if not django_settings.TACACSPLUS_HOST: + return None + try: + return User.objects.get(pk=user_id) + except User.DoesNotExist: + return None + + def _get_client_ip(self, request): + if not request or not hasattr(request, 'META'): + return None + + x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR') + if x_forwarded_for: + ip = x_forwarded_for.split(',')[0] + else: + ip = request.META.get('REMOTE_ADDR') + return ip + + +class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider): + """ + Custom Identity Provider to make attributes to what we expect. + """ + + def get_user_permanent_id(self, attributes): + uid = attributes[self.conf.get('attr_user_permanent_id', OID_USERID)] + if isinstance(uid, str): + return uid + return uid[0] + + def get_attr(self, attributes, conf_key, default_attribute): + """ + Get the attribute 'default_attribute' out of the attributes, + unless self.conf[conf_key] overrides the default by specifying + another attribute to use. + """ + key = self.conf.get(conf_key, default_attribute) + value = attributes[key] if key in attributes else None + # In certain implementations (like https://pagure.io/ipsilon) this value is a string, not a list + if isinstance(value, (list, tuple)): + value = value[0] + if conf_key in ('attr_first_name', 'attr_last_name', 'attr_username', 'attr_email') and value is None: + logger.warning( + "Could not map user detail '%s' from SAML attribute '%s'; update SOCIAL_AUTH_SAML_ENABLED_IDPS['%s']['%s'] with the correct SAML attribute.", + conf_key[5:], + key, + self.name, + conf_key, + ) + return str(value) if value is not None else value + + +class SAMLAuth(BaseSAMLAuth): + """ + Custom SAMLAuth backend to verify license status + """ + + def get_idp(self, idp_name): + idp_config = self.setting('ENABLED_IDPS')[idp_name] + return TowerSAMLIdentityProvider(idp_name, **idp_config) + + def authenticate(self, request, *args, **kwargs): + if not all( + [ + django_settings.SOCIAL_AUTH_SAML_SP_ENTITY_ID, + django_settings.SOCIAL_AUTH_SAML_SP_PUBLIC_CERT, + django_settings.SOCIAL_AUTH_SAML_SP_PRIVATE_KEY, + django_settings.SOCIAL_AUTH_SAML_ORG_INFO, + django_settings.SOCIAL_AUTH_SAML_TECHNICAL_CONTACT, + django_settings.SOCIAL_AUTH_SAML_SUPPORT_CONTACT, + django_settings.SOCIAL_AUTH_SAML_ENABLED_IDPS, + ] + ): + return None + pipeline_result = super(SAMLAuth, self).authenticate(request, *args, **kwargs) + + if isinstance(pipeline_result, HttpResponse): + return pipeline_result + else: + user = pipeline_result + + # Comes from https://github.com/omab/python-social-auth/blob/v0.2.21/social/backends/base.py#L91 + if getattr(user, 'is_new', False): + enterprise_auth = _decorate_enterprise_user(user, 'saml') + logger.debug("Created enterprise user %s from %s backend." % (user.username, enterprise_auth.get_provider_display())) + elif user and not user.is_in_enterprise_category('saml'): + return None + if user: + logger.debug("Enterprise user %s already created in Tower." % user.username) + return user + + def get_user(self, user_id): + if not all( + [ + django_settings.SOCIAL_AUTH_SAML_SP_ENTITY_ID, + django_settings.SOCIAL_AUTH_SAML_SP_PUBLIC_CERT, + django_settings.SOCIAL_AUTH_SAML_SP_PRIVATE_KEY, + django_settings.SOCIAL_AUTH_SAML_ORG_INFO, + django_settings.SOCIAL_AUTH_SAML_TECHNICAL_CONTACT, + django_settings.SOCIAL_AUTH_SAML_SUPPORT_CONTACT, + django_settings.SOCIAL_AUTH_SAML_ENABLED_IDPS, + ] + ): + return None + return super(SAMLAuth, self).get_user(user_id) + + +def _update_m2m_from_groups(ldap_user, opts, remove=True): + """ + Hepler function to evaluate the LDAP team/org options to determine if LDAP user should + be a member of the team/org based on their ldap group dns. + + Returns: + True - User should be added + False - User should be removed + None - Users membership should not be changed + """ + if opts is None: + return None + elif not opts: + pass + elif isinstance(opts, bool) and opts is True: + return True + else: + if isinstance(opts, str): + opts = [opts] + # If any of the users groups matches any of the list options + for group_dn in opts: + if not isinstance(group_dn, str): + continue + if ldap_user._get_groups().is_member_of(group_dn): + return True + if remove: + return False + return None + + +@receiver(populate_user, dispatch_uid='populate-ldap-user') +def on_populate_user(sender, **kwargs): + """ + Handle signal from LDAP backend to populate the user object. Update user + organization/team memberships according to their LDAP groups. + """ + user = kwargs['user'] + ldap_user = kwargs['ldap_user'] + backend = ldap_user.backend + + # Boolean to determine if we should force an user update + # to avoid duplicate SQL update statements + force_user_update = False + + # Prefetch user's groups to prevent LDAP queries for each org/team when + # checking membership. + ldap_user._get_groups().get_group_dns() + + # If the LDAP user has a first or last name > $maxlen chars, truncate it + for field in ('first_name', 'last_name'): + max_len = User._meta.get_field(field).max_length + field_len = len(getattr(user, field)) + if field_len > max_len: + setattr(user, field, getattr(user, field)[:max_len]) + force_user_update = True + logger.warning('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len)) + + org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {}) + team_map_settings = getattr(backend.settings, 'TEAM_MAP', {}) + orgs_list = list(org_map.keys()) + team_map = {} + for team_name, team_opts in team_map_settings.items(): + if not team_opts.get('organization', None): + # You can't save the LDAP config in the UI w/o an org (or '' or null as the org) so if we somehow got this condition its an error + logger.error("Team named {} in LDAP team map settings is invalid due to missing organization".format(team_name)) + continue + team_map[team_name] = team_opts['organization'] + + create_org_and_teams(orgs_list, team_map, 'LDAP') + + # Compute in memory what the state is of the different LDAP orgs + org_roles_and_ldap_attributes = {'admin_role': 'admins', 'auditor_role': 'auditors', 'member_role': 'users'} + desired_org_states = {} + for org_name, org_opts in org_map.items(): + remove = bool(org_opts.get('remove', True)) + desired_org_states[org_name] = {} + for org_role_name in org_roles_and_ldap_attributes.keys(): + ldap_name = org_roles_and_ldap_attributes[org_role_name] + opts = org_opts.get(ldap_name, None) + remove = bool(org_opts.get('remove_{}'.format(ldap_name), remove)) + desired_org_states[org_name][org_role_name] = _update_m2m_from_groups(ldap_user, opts, remove) + + # If everything returned None (because there was no configuration) we can remove this org from our map + # This will prevent us from loading the org in the next query + if all(desired_org_states[org_name][org_role_name] is None for org_role_name in org_roles_and_ldap_attributes.keys()): + del desired_org_states[org_name] + + # Compute in memory what the state is of the different LDAP teams + desired_team_states = {} + for team_name, team_opts in team_map_settings.items(): + if 'organization' not in team_opts: + continue + users_opts = team_opts.get('users', None) + remove = bool(team_opts.get('remove', True)) + state = _update_m2m_from_groups(ldap_user, users_opts, remove) + if state is not None: + organization = team_opts['organization'] + if organization not in desired_team_states: + desired_team_states[organization] = {} + desired_team_states[organization][team_name] = {'member_role': state} + + # Check if user.profile is available, otherwise force user.save() + try: + _ = user.profile + except ValueError: + force_user_update = True + finally: + if force_user_update: + user.save() + + # Update user profile to store LDAP DN. + profile = user.profile + if profile.ldap_dn != ldap_user.dn: + profile.ldap_dn = ldap_user.dn + profile.save() + + reconcile_users_org_team_mappings(user, desired_org_states, desired_team_states, 'LDAP') diff --git a/awx/sso/middleware.py b/awx/sso/middleware.py new file mode 100644 index 0000000000..59c2a3c0e3 --- /dev/null +++ b/awx/sso/middleware.py @@ -0,0 +1,83 @@ +# Copyright (c) 2015 Ansible, Inc. +# All Rights Reserved. + +# Python +import urllib.parse + +# Django +from django.conf import settings +from django.utils.functional import LazyObject +from django.shortcuts import redirect + +# Python Social Auth +from social_core.exceptions import SocialAuthBaseException +from social_core.utils import social_logger +from social_django import utils +from social_django.middleware import SocialAuthExceptionMiddleware + + +class SocialAuthMiddleware(SocialAuthExceptionMiddleware): + def __call__(self, request): + return self.process_request(request) + + def process_request(self, request): + if request.path.startswith('/sso'): + # See upgrade blocker note in requirements/README.md + utils.BACKENDS = settings.AUTHENTICATION_BACKENDS + token_key = request.COOKIES.get('token', '') + token_key = urllib.parse.quote(urllib.parse.unquote(token_key).strip('"')) + + if not hasattr(request, 'successful_authenticator'): + request.successful_authenticator = None + + if not request.path.startswith('/sso/') and 'migrations_notran' not in request.path: + if request.user and request.user.is_authenticated: + # The rest of the code base rely hevily on type/inheritance checks, + # LazyObject sent from Django auth middleware can be buggy if not + # converted back to its original object. + if isinstance(request.user, LazyObject) and request.user._wrapped: + request.user = request.user._wrapped + request.session.pop('social_auth_error', None) + request.session.pop('social_auth_last_backend', None) + return self.get_response(request) + + def process_view(self, request, callback, callback_args, callback_kwargs): + if request.path.startswith('/sso/login/'): + request.session['social_auth_last_backend'] = callback_kwargs['backend'] + + def process_exception(self, request, exception): + strategy = getattr(request, 'social_strategy', None) + if strategy is None or self.raise_exception(request, exception): + return + + if isinstance(exception, SocialAuthBaseException) or request.path.startswith('/sso/'): + backend = getattr(request, 'backend', None) + backend_name = getattr(backend, 'name', 'unknown-backend') + + message = self.get_message(request, exception) + if request.session.get('social_auth_last_backend') != backend_name: + backend_name = request.session.get('social_auth_last_backend') + message = request.GET.get('error_description', message) + + full_backend_name = backend_name + try: + idp_name = strategy.request_data()['RelayState'] + full_backend_name = '%s:%s' % (backend_name, idp_name) + except KeyError: + pass + + social_logger.error(message) + + url = self.get_redirect_uri(request, exception) + request.session['social_auth_error'] = (full_backend_name, message) + return redirect(url) + + def get_message(self, request, exception): + msg = str(exception) + if msg and msg[-1] not in '.?!': + msg = msg + '.' + return msg + + def get_redirect_uri(self, request, exception): + strategy = getattr(request, 'social_strategy', None) + return strategy.session_get('next', '') or strategy.setting('LOGIN_ERROR_URL') diff --git a/awx/sso/tests/conftest.py b/awx/sso/tests/conftest.py new file mode 100644 index 0000000000..825640e902 --- /dev/null +++ b/awx/sso/tests/conftest.py @@ -0,0 +1,150 @@ +import pytest + +from django.contrib.auth.models import User + +from awx.sso.backends import TACACSPlusBackend +from awx.sso.models import UserEnterpriseAuth + + +@pytest.fixture +def tacacsplus_backend(): + return TACACSPlusBackend() + + +@pytest.fixture +def existing_normal_user(): + try: + user = User.objects.get(username="alice") + except User.DoesNotExist: + user = User(username="alice", password="password") + user.save() + return user + + +@pytest.fixture +def existing_tacacsplus_user(): + try: + user = User.objects.get(username="foo") + except User.DoesNotExist: + user = User(username="foo") + user.set_unusable_password() + user.save() + enterprise_auth = UserEnterpriseAuth(user=user, provider='tacacs+') + enterprise_auth.save() + return user + + +@pytest.fixture +def test_radius_config(settings): + settings.RADIUS_SERVER = '127.0.0.1' + settings.RADIUS_PORT = 1812 + settings.RADIUS_SECRET = 'secret' + + +@pytest.fixture +def basic_saml_config(settings): + settings.SAML_SECURITY_CONFIG = { + "wantNameId": True, + "signMetadata": False, + "digestAlgorithm": "http://www.w3.org/2001/04/xmlenc#sha256", + "nameIdEncrypted": False, + "signatureAlgorithm": "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256", + "authnRequestsSigned": False, + "logoutRequestSigned": False, + "wantNameIdEncrypted": False, + "logoutResponseSigned": False, + "wantAssertionsSigned": True, + "requestedAuthnContext": False, + "wantAssertionsEncrypted": False, + } + settings.SOCIAL_AUTH_SAML_ENABLED_IDPS = { + "example": { + "attr_email": "email", + "attr_first_name": "first_name", + "attr_last_name": "last_name", + "attr_user_permanent_id": "username", + "attr_username": "username", + "entity_id": "https://www.example.com/realms/sample", + "url": "https://www.example.com/realms/sample/protocol/saml", + "x509cert": "A" * 64 + "B" * 64 + "C" * 23, + } + } + + settings.SOCIAL_AUTH_SAML_TEAM_ATTR = { + "remove": False, + "saml_attr": "group_name", + "team_org_map": [ + {"team": "internal:unix:domain:admins", "team_alias": "Administrators", "organization": "Default"}, + {"team": "East Coast", "organization": "North America"}, + {"team": "developers", "organization": "North America"}, + {"team": "developers", "organization": "South America"}, + ], + } + + settings.SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR = { + "is_superuser_role": ["wilma"], + "is_superuser_attr": "friends", + "is_superuser_value": ["barney", "fred"], + "remove_superusers": False, + "is_system_auditor_role": ["fred"], + "is_system_auditor_attr": "auditor", + "is_system_auditor_value": ["bamm-bamm"], + } + + settings.SOCIAL_AUTH_SAML_ORGANIZATION_ATTR = {"saml_attr": "member-of", "remove": True, "saml_admin_attr": "admin-of", "remove_admins": False} + + +@pytest.fixture +def test_tacacs_config(settings): + settings.TACACSPLUS_HOST = "tacacshost" + settings.TACACSPLUS_PORT = 49 + settings.TACACSPLUS_SECRET = "secret" + settings.TACACSPLUS_SESSION_TIMEOUT = 10 + settings.TACACSPLUS_AUTH_PROTOCOL = "pap" + settings.TACACSPLUS_REM_ADDR = True + + +@pytest.fixture +def saml_config_user_flags_no_value(settings): + settings.SAML_SECURITY_CONFIG = { + "wantNameId": True, + "signMetadata": False, + "digestAlgorithm": "http://www.w3.org/2001/04/xmlenc#sha256", + "nameIdEncrypted": False, + "signatureAlgorithm": "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256", + "authnRequestsSigned": False, + "logoutRequestSigned": False, + "wantNameIdEncrypted": False, + "logoutResponseSigned": False, + "wantAssertionsSigned": True, + "requestedAuthnContext": False, + "wantAssertionsEncrypted": False, + } + settings.SOCIAL_AUTH_SAML_ENABLED_IDPS = { + "example": { + "attr_email": "email", + "attr_first_name": "first_name", + "attr_last_name": "last_name", + "attr_user_permanent_id": "username", + "attr_username": "username", + "entity_id": "https://www.example.com/realms/sample", + "url": "https://www.example.com/realms/sample/protocol/saml", + "x509cert": "A" * 64 + "B" * 64 + "C" * 23, + } + } + + settings.SOCIAL_AUTH_SAML_TEAM_ATTR = { + "remove": False, + "saml_attr": "group_name", + "team_org_map": [ + {"team": "internal:unix:domain:admins", "team_alias": "Administrators", "organization": "Default"}, + {"team": "East Coast", "organization": "North America"}, + {"team": "developers", "organization": "North America"}, + {"team": "developers", "organization": "South America"}, + ], + } + + settings.SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR = { + "is_superuser_role": ["wilma"], + "is_superuser_attr": "friends", + } diff --git a/awx/sso/tests/unit/test_google_oauth2_migrator.py b/awx/sso/tests/unit/test_google_oauth2_migrator.py new file mode 100644 index 0000000000..ec7fca5939 --- /dev/null +++ b/awx/sso/tests/unit/test_google_oauth2_migrator.py @@ -0,0 +1,104 @@ +import pytest +from unittest.mock import MagicMock +from awx.sso.utils.google_oauth2_migrator import GoogleOAuth2Migrator + + +@pytest.fixture +def test_google_config(settings): + settings.SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = "test_key" + settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = "test_secret" + settings.SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL = "https://tower.example.com/sso/complete/google-oauth2/" + settings.SOCIAL_AUTH_GOOGLE_OAUTH2_ORGANIZATION_MAP = {"My Org": {"users": True}} + settings.SOCIAL_AUTH_GOOGLE_OAUTH2_TEAM_MAP = {"My Team": {"organization": "My Org", "users": True}} + settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE = ["profile", "email"] + + +@pytest.mark.django_db +def test_get_controller_config(test_google_config): + gateway_client = MagicMock() + command_obj = MagicMock() + obj = GoogleOAuth2Migrator(gateway_client, command_obj) + + result = obj.get_controller_config() + assert len(result) == 1 + config = result[0] + assert config['category'] == 'Google OAuth2' + settings = config['settings'] + assert settings['SOCIAL_AUTH_GOOGLE_OAUTH2_KEY'] == 'test_key' + assert settings['SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET'] == 'test_secret' + assert settings['SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL'] == "https://tower.example.com/sso/complete/google-oauth2/" + assert settings['SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE'] == ["profile", "email"] + # Assert that other settings are not present in the returned config + assert 'SOCIAL_AUTH_GOOGLE_OAUTH2_ORGANIZATION_MAP' not in settings + assert 'SOCIAL_AUTH_GOOGLE_OAUTH2_TEAM_MAP' not in settings + + +@pytest.mark.django_db +def test_create_gateway_authenticator(mocker, test_google_config): + mocker.patch('django.conf.settings.LOGGING', {}) + + gateway_client = MagicMock() + command_obj = MagicMock() + obj = GoogleOAuth2Migrator(gateway_client, command_obj) + mock_submit = MagicMock(return_value=True) + obj.submit_authenticator = mock_submit + + configs = obj.get_controller_config() + result = obj.create_gateway_authenticator(configs[0]) + + assert result is True + mock_submit.assert_called_once() + + # Assert payload sent to gateway + payload = mock_submit.call_args[0][0] + assert payload['name'] == 'google' + assert payload['slug'] == 'aap-google-oauth2-google-oauth2' + assert payload['type'] == 'ansible_base.authentication.authenticator_plugins.google_oauth2' + assert payload['enabled'] is False + assert payload['create_objects'] is True + assert payload['remove_users'] is False + + # Assert configuration details + configuration = payload['configuration'] + assert configuration['KEY'] == 'test_key' + assert configuration['SECRET'] == 'test_secret' + assert configuration['CALLBACK_URL'] == 'https://tower.example.com/sso/complete/google-oauth2/' + assert configuration['SCOPE'] == ['profile', 'email'] + + # Assert mappers + assert len(payload['mappers']) == 2 + assert payload['mappers'][0]['map_type'] == 'organization' + assert payload['mappers'][1]['map_type'] == 'team' + + # Assert ignore_keys + ignore_keys = mock_submit.call_args[0][1] + assert ignore_keys == ["ACCESS_TOKEN_METHOD", "REVOKE_TOKEN_METHOD"] + + +@pytest.mark.django_db +def test_create_gateway_authenticator_no_optional_values(mocker, settings): + settings.SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = "test_key" + settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = "test_secret" + settings.SOCIAL_AUTH_GOOGLE_OAUTH2_ORGANIZATION_MAP = {} + settings.SOCIAL_AUTH_GOOGLE_OAUTH2_TEAM_MAP = {} + settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE = None + settings.SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL = None + + mocker.patch('django.conf.settings.LOGGING', {}) + + gateway_client = MagicMock() + command_obj = MagicMock() + obj = GoogleOAuth2Migrator(gateway_client, command_obj) + mock_submit = MagicMock(return_value=True) + obj.submit_authenticator = mock_submit + + configs = obj.get_controller_config() + obj.create_gateway_authenticator(configs[0]) + + payload = mock_submit.call_args[0][0] + assert 'CALLBACK_URL' not in payload['configuration'] + assert 'SCOPE' not in payload['configuration'] + + ignore_keys = mock_submit.call_args[0][1] + assert 'CALLBACK_URL' in ignore_keys + assert 'SCOPE' in ignore_keys diff --git a/awx/sso/tests/unit/test_radius_migrator.py b/awx/sso/tests/unit/test_radius_migrator.py new file mode 100644 index 0000000000..ffc1bbed36 --- /dev/null +++ b/awx/sso/tests/unit/test_radius_migrator.py @@ -0,0 +1,17 @@ +import pytest +from unittest.mock import MagicMock +from awx.sso.utils.radius_migrator import RADIUSMigrator + + +@pytest.mark.django_db +def test_get_controller_config(test_radius_config): + gateway_client = MagicMock() + command_obj = MagicMock() + obj = RADIUSMigrator(gateway_client, command_obj) + + result = obj.get_controller_config() + config = result[0]['settings']['configuration'] + assert config['SERVER'] == '127.0.0.1' + assert config['PORT'] == 1812 + assert config['SECRET'] == 'secret' + assert len(config) == 3 diff --git a/awx/sso/tests/unit/test_saml_migrator.py b/awx/sso/tests/unit/test_saml_migrator.py new file mode 100644 index 0000000000..97dca5c607 --- /dev/null +++ b/awx/sso/tests/unit/test_saml_migrator.py @@ -0,0 +1,272 @@ +import pytest +from unittest.mock import MagicMock, patch +from awx.sso.utils.saml_migrator import SAMLMigrator + + +@pytest.mark.django_db +def test_get_controller_config(basic_saml_config): + gateway_client = MagicMock() + command_obj = MagicMock() + obj = SAMLMigrator(gateway_client, command_obj) + + result = obj.get_controller_config() + lines = result[0]['settings']['configuration']['IDP_X509_CERT'].splitlines() + assert lines[0] == '-----BEGIN CERTIFICATE-----' + assert lines[1] == "A" * 64 + assert lines[2] == "B" * 64 + assert lines[3] == "C" * 23 + assert lines[-1] == '-----END CERTIFICATE-----' + + +@pytest.mark.django_db +def test_get_controller_config_with_mapper(saml_config_user_flags_no_value): + gateway_client = MagicMock() + command_obj = MagicMock() + obj = SAMLMigrator(gateway_client, command_obj) + + result = obj.get_controller_config() + expected_maps = [ + { + 'map_type': 'team', + 'role': 'Team Member', + 'organization': 'Default', + 'team': 'Administrators', + 'name': 'Team-Administrators-Default', + 'revoke': False, + 'authenticator': -1, + 'triggers': {'attributes': {'group_name': {'in': ['internal:unix:domain:admins']}, 'join_condition': 'or'}}, + 'order': 1, + }, + { + 'map_type': 'team', + 'role': 'Team Member', + 'organization': 'North America', + 'team': 'East Coast', + 'name': 'Team-East Coast-North America', + 'revoke': False, + 'authenticator': -1, + 'triggers': {'attributes': {'group_name': {'in': ['East Coast']}, 'join_condition': 'or'}}, + 'order': 2, + }, + { + 'map_type': 'team', + 'role': 'Team Member', + 'organization': 'North America', + 'team': 'developers', + 'name': 'Team-developers-North America', + 'revoke': False, + 'authenticator': -1, + 'triggers': {'attributes': {'group_name': {'in': ['developers']}, 'join_condition': 'or'}}, + 'order': 3, + }, + { + 'map_type': 'team', + 'role': 'Team Member', + 'organization': 'South America', + 'team': 'developers', + 'name': 'Team-developers-South America', + 'revoke': False, + 'authenticator': -1, + 'triggers': {'attributes': {'group_name': {'in': ['developers']}, 'join_condition': 'or'}}, + 'order': 4, + }, + { + 'map_type': 'is_superuser', + 'role': None, + 'name': 'Role-is_superuser', + 'organization': None, + 'team': None, + 'revoke': True, + 'order': 5, + 'authenticator': -1, + 'triggers': {'attributes': {'Role': {'in': ['wilma']}, 'join_condition': 'or'}}, + }, + { + 'map_type': 'is_superuser', + 'role': None, + 'name': 'Role-is_superuser-attr', + 'organization': None, + 'team': None, + 'revoke': True, + 'order': 6, + 'authenticator': -1, + 'triggers': {'attributes': {'friends': {}, 'join_condition': 'or'}}, + }, + ] + assert result[0]['team_mappers'] == expected_maps + extra_data = result[0]['settings']['configuration']['EXTRA_DATA'] + assert ['Role', 'Role'] in extra_data + assert ['friends', 'friends'] in extra_data + assert ['group_name', 'group_name'] in extra_data + + +@pytest.mark.django_db +def test_get_controller_config_with_roles(basic_saml_config): + gateway_client = MagicMock() + command_obj = MagicMock() + obj = SAMLMigrator(gateway_client, command_obj) + + result = obj.get_controller_config() + + expected_maps = [ + { + 'map_type': 'team', + 'role': 'Team Member', + 'organization': 'Default', + 'team': 'Administrators', + 'name': 'Team-Administrators-Default', + 'revoke': False, + 'authenticator': -1, + 'triggers': {'attributes': {'group_name': {'in': ['internal:unix:domain:admins']}, 'join_condition': 'or'}}, + 'order': 1, + }, + { + 'map_type': 'team', + 'role': 'Team Member', + 'organization': 'North America', + 'team': 'East Coast', + 'name': 'Team-East Coast-North America', + 'revoke': False, + 'authenticator': -1, + 'triggers': {'attributes': {'group_name': {'in': ['East Coast']}, 'join_condition': 'or'}}, + 'order': 2, + }, + { + 'map_type': 'team', + 'role': 'Team Member', + 'organization': 'North America', + 'team': 'developers', + 'name': 'Team-developers-North America', + 'revoke': False, + 'authenticator': -1, + 'triggers': {'attributes': {'group_name': {'in': ['developers']}, 'join_condition': 'or'}}, + 'order': 3, + }, + { + 'map_type': 'team', + 'role': 'Team Member', + 'organization': 'South America', + 'team': 'developers', + 'name': 'Team-developers-South America', + 'revoke': False, + 'authenticator': -1, + 'triggers': {'attributes': {'group_name': {'in': ['developers']}, 'join_condition': 'or'}}, + 'order': 4, + }, + { + 'map_type': 'is_superuser', + 'role': None, + 'name': 'Role-is_superuser', + 'organization': None, + 'team': None, + 'revoke': False, + 'order': 5, + 'authenticator': -1, + 'triggers': {'attributes': {'Role': {'in': ['wilma']}, 'join_condition': 'or'}}, + }, + { + 'map_type': 'role', + 'role': 'Platform Auditor', + 'name': 'Role-Platform Auditor', + 'organization': None, + 'team': None, + 'revoke': True, + 'order': 6, + 'authenticator': -1, + 'triggers': {'attributes': {'Role': {'in': ['fred']}, 'join_condition': 'or'}}, + }, + { + 'map_type': 'is_superuser', + 'role': None, + 'name': 'Role-is_superuser-attr', + 'organization': None, + 'team': None, + 'revoke': False, + 'order': 7, + 'authenticator': -1, + 'triggers': {'attributes': {'friends': {'in': ['barney', 'fred']}, 'join_condition': 'or'}}, + }, + { + 'map_type': 'role', + 'role': 'Platform Auditor', + 'name': 'Role-Platform Auditor-attr', + 'organization': None, + 'team': None, + 'revoke': True, + 'order': 8, + 'authenticator': -1, + 'triggers': {'attributes': {'auditor': {'in': ['bamm-bamm']}, 'join_condition': 'or'}}, + }, + { + 'map_type': 'organization', + 'role': 'Organization Member', + 'name': 'Role-Organization Member-attr', + 'organization': "{% for_attr_value('member-of') %}", + 'team': None, + 'revoke': True, + 'order': 9, + 'authenticator': -1, + 'triggers': {'attributes': {'member-of': {}, 'join_condition': 'or'}}, + }, + { + 'map_type': 'organization', + 'role': 'Organization Admin', + 'name': 'Role-Organization Admin-attr', + 'organization': "{% for_attr_value('admin-of') %}", + 'team': None, + 'revoke': False, + 'order': 10, + 'authenticator': -1, + 'triggers': {'attributes': {'admin-of': {}, 'join_condition': 'or'}}, + }, + ] + + assert result[0]['team_mappers'] == expected_maps + extra_data = result[0]['settings']['configuration']['EXTRA_DATA'] + extra_data_items = [ + ['member-of', 'member-of'], + ['admin-of', 'admin-of'], + ['Role', 'Role'], + ['friends', 'friends'], + ['group_name', 'group_name'], + ] + for item in extra_data_items: + assert item in extra_data + assert extra_data.count(item) == 1 + + +@pytest.mark.django_db +def test_get_controller_config_enabled_false(basic_saml_config): + """SAML controller export marks settings.enabled False by default.""" + gateway_client = MagicMock() + command_obj = MagicMock() + obj = SAMLMigrator(gateway_client, command_obj) + + result = obj.get_controller_config() + assert isinstance(result, list) and len(result) >= 1 + assert result[0]['settings']['enabled'] is False + + +@pytest.mark.django_db +def test_create_gateway_authenticator_submits_disabled(basic_saml_config): + """Submitted Gateway authenticator config must have enabled=False and correct ignore keys.""" + gateway_client = MagicMock() + command_obj = MagicMock() + obj = SAMLMigrator(gateway_client, command_obj) + + config = obj.get_controller_config()[0] + + with patch.object( + obj, + 'submit_authenticator', + return_value={'success': True, 'action': 'created', 'error': None}, + ) as submit_mock: + obj.create_gateway_authenticator(config) + + # Extract submitted args: gateway_config, ignore_keys, original_config + submitted_gateway_config = submit_mock.call_args[0][0] + ignore_keys = submit_mock.call_args[0][1] + + assert submitted_gateway_config['enabled'] is False + assert 'CALLBACK_URL' in ignore_keys + assert 'SP_PRIVATE_KEY' in ignore_keys diff --git a/awx/sso/tests/unit/test_settings_migrator.py b/awx/sso/tests/unit/test_settings_migrator.py new file mode 100644 index 0000000000..f7bd5fc342 --- /dev/null +++ b/awx/sso/tests/unit/test_settings_migrator.py @@ -0,0 +1,384 @@ +""" +Unit tests for SettingsMigrator class. +""" + +import pytest +from unittest.mock import Mock, patch +from awx.sso.utils.settings_migrator import SettingsMigrator + + +class TestSettingsMigrator: + """Tests for SettingsMigrator class.""" + + def setup_method(self): + """Set up test fixtures.""" + self.gateway_client = Mock() + self.command = Mock() + self.migrator = SettingsMigrator(self.gateway_client, self.command) + + def test_get_authenticator_type(self): + """Test that get_authenticator_type returns 'Settings'.""" + assert self.migrator.get_authenticator_type() == "Settings" + + @pytest.mark.parametrize( + "input_name,expected_output", + [ + ('CUSTOM_LOGIN_INFO', 'custom_login_info'), + ('CUSTOM_LOGO', 'custom_logo'), + ('UNKNOWN_SETTING', 'UNKNOWN_SETTING'), + ('ANOTHER_UNKNOWN', 'ANOTHER_UNKNOWN'), + ], + ) + def test_convert_setting_name(self, input_name, expected_output): + """Test setting name conversion.""" + result = self.migrator._convert_setting_name(input_name) + assert result == expected_output + + @pytest.mark.parametrize( + "transformer_method,test_values", + [ + ('_transform_social_auth_username_is_full_email', [True, False]), + ('_transform_allow_oauth2_for_external_users', [True, False]), + ], + ) + def test_boolean_transformers(self, transformer_method, test_values): + """Test that boolean transformers return values as-is.""" + transformer = getattr(self.migrator, transformer_method) + for value in test_values: + assert transformer(value) is value + + @pytest.mark.parametrize( + "settings_values,expected_count", + [ + # Test case: all settings are None + ( + { + 'SESSION_COOKIE_AGE': None, + 'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': None, + 'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': None, + 'LOGIN_REDIRECT_OVERRIDE': None, + 'ORG_ADMINS_CAN_SEE_ALL_USERS': None, + 'MANAGE_ORGANIZATION_AUTH': None, + }, + 0, + ), + # Test case: all settings are empty strings + ( + { + 'SESSION_COOKIE_AGE': "", + 'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': "", + 'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': "", + 'LOGIN_REDIRECT_OVERRIDE': "", + 'ORG_ADMINS_CAN_SEE_ALL_USERS': "", + 'MANAGE_ORGANIZATION_AUTH': "", + }, + 0, + ), + # Test case: only new settings have values + ( + { + 'SESSION_COOKIE_AGE': None, + 'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': None, + 'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': None, + 'LOGIN_REDIRECT_OVERRIDE': None, + 'ORG_ADMINS_CAN_SEE_ALL_USERS': True, + 'MANAGE_ORGANIZATION_AUTH': False, + }, + 2, + ), + ], + ) + @patch('awx.sso.utils.settings_migrator.settings') + def test_get_controller_config_various_scenarios(self, mock_settings, settings_values, expected_count): + """Test get_controller_config with various setting combinations.""" + # Apply the settings values to the mock + for setting_name, setting_value in settings_values.items(): + setattr(mock_settings, setting_name, setting_value) + + result = self.migrator.get_controller_config() + assert len(result) == expected_count + + # Verify structure if we have results + if result: + for config in result: + assert config['category'] == 'global-settings' + assert 'setting_name' in config + assert 'setting_value' in config + assert config['org_mappers'] == [] + assert config['team_mappers'] == [] + assert config['role_mappers'] == [] + assert config['allow_mappers'] == [] + + @patch('awx.sso.utils.settings_migrator.settings') + def test_get_controller_config_with_all_settings(self, mock_settings): + """Test get_controller_config with all settings configured.""" + # Mock all settings with valid values + mock_settings.SESSION_COOKIE_AGE = 3600 + mock_settings.SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = True + mock_settings.ALLOW_OAUTH2_FOR_EXTERNAL_USERS = False + mock_settings.LOGIN_REDIRECT_OVERRIDE = "https://example.com/login" + mock_settings.ORG_ADMINS_CAN_SEE_ALL_USERS = True + mock_settings.MANAGE_ORGANIZATION_AUTH = False + + # Mock the login redirect override to not be set by migrator + with patch.object(self.migrator.__class__.__bases__[0], 'login_redirect_override_set_by_migrator', False): + result = self.migrator.get_controller_config() + + assert len(result) == 6 + + # Check that all expected settings are present + setting_names = [config['setting_name'] for config in result] + expected_settings = [ + 'SESSION_COOKIE_AGE', + 'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL', + 'ALLOW_OAUTH2_FOR_EXTERNAL_USERS', + 'LOGIN_REDIRECT_OVERRIDE', + 'ORG_ADMINS_CAN_SEE_ALL_USERS', + 'MANAGE_ORGANIZATION_AUTH', + ] + + for setting in expected_settings: + assert setting in setting_names + + # Verify structure of returned configs + for config in result: + assert config['category'] == 'global-settings' + assert 'setting_name' in config + assert 'setting_value' in config + assert config['org_mappers'] == [] + assert config['team_mappers'] == [] + assert config['role_mappers'] == [] + assert config['allow_mappers'] == [] + + @patch('awx.sso.utils.settings_migrator.settings') + def test_get_controller_config_with_new_settings_only(self, mock_settings): + """Test get_controller_config with only the new settings configured.""" + # Mock only the new settings + mock_settings.SESSION_COOKIE_AGE = None + mock_settings.SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = None + mock_settings.ALLOW_OAUTH2_FOR_EXTERNAL_USERS = None + mock_settings.LOGIN_REDIRECT_OVERRIDE = None + mock_settings.ORG_ADMINS_CAN_SEE_ALL_USERS = True + mock_settings.MANAGE_ORGANIZATION_AUTH = False + + result = self.migrator.get_controller_config() + + assert len(result) == 2 + + # Check the new settings are present + setting_names = [config['setting_name'] for config in result] + assert 'ORG_ADMINS_CAN_SEE_ALL_USERS' in setting_names + assert 'MANAGE_ORGANIZATION_AUTH' in setting_names + + # Verify the values + org_admins_config = next(c for c in result if c['setting_name'] == 'ORG_ADMINS_CAN_SEE_ALL_USERS') + assert org_admins_config['setting_value'] is True + + manage_org_auth_config = next(c for c in result if c['setting_name'] == 'MANAGE_ORGANIZATION_AUTH') + assert manage_org_auth_config['setting_value'] is False + + @patch('awx.sso.utils.settings_migrator.settings') + def test_get_controller_config_with_login_redirect_override_from_migrator(self, mock_settings): + """Test get_controller_config when LOGIN_REDIRECT_OVERRIDE is set by migrator.""" + # Mock settings + mock_settings.SESSION_COOKIE_AGE = None + mock_settings.SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = None + mock_settings.ALLOW_OAUTH2_FOR_EXTERNAL_USERS = None + mock_settings.LOGIN_REDIRECT_OVERRIDE = "https://original.com/login" + mock_settings.ORG_ADMINS_CAN_SEE_ALL_USERS = None + mock_settings.MANAGE_ORGANIZATION_AUTH = None + + # Mock the login redirect override to be set by migrator + with patch.object(self.migrator.__class__.__bases__[0], 'login_redirect_override_set_by_migrator', True): + with patch.object(self.migrator.__class__.__bases__[0], 'login_redirect_override_new_url', 'https://new.com/login'): + result = self.migrator.get_controller_config() + + assert len(result) == 1 + assert result[0]['setting_name'] == 'LOGIN_REDIRECT_OVERRIDE' + assert result[0]['setting_value'] == 'https://new.com/login' # Should use the migrator URL + + @pytest.mark.parametrize( + "config,current_value,expected_action,should_update", + [ + # Test case: setting needs update + ({'setting_name': 'ORG_ADMINS_CAN_SEE_ALL_USERS', 'setting_value': True}, False, 'updated', True), + # Test case: setting is unchanged + ({'setting_name': 'MANAGE_ORGANIZATION_AUTH', 'setting_value': False}, False, 'skipped', False), + # Test case: another setting needs update + ({'setting_name': 'SESSION_COOKIE_AGE', 'setting_value': 7200}, 3600, 'updated', True), + # Test case: another setting is unchanged + ({'setting_name': 'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL', 'setting_value': True}, True, 'skipped', False), + ], + ) + def test_create_gateway_authenticator_success_scenarios(self, config, current_value, expected_action, should_update): + """Test create_gateway_authenticator success scenarios.""" + # Mock gateway client methods + self.gateway_client.get_gateway_setting.return_value = current_value + self.gateway_client.update_gateway_setting.return_value = None + + result = self.migrator.create_gateway_authenticator(config) + + assert result['success'] is True + assert result['action'] == expected_action + assert result['error'] is None + + # Verify gateway client calls + expected_setting_name = config['setting_name'] + self.gateway_client.get_gateway_setting.assert_called_once_with(expected_setting_name) + + if should_update: + self.gateway_client.update_gateway_setting.assert_called_once_with(expected_setting_name, config['setting_value']) + else: + self.gateway_client.update_gateway_setting.assert_not_called() + + # Reset mocks for next iteration + self.gateway_client.reset_mock() + + def test_create_gateway_authenticator_with_setting_name_conversion(self): + """Test create_gateway_authenticator with setting name that needs conversion.""" + config = {'setting_name': 'CUSTOM_LOGIN_INFO', 'setting_value': 'Some custom info'} + + # Mock gateway client methods + self.gateway_client.get_gateway_setting.return_value = 'Old info' # Different value + self.gateway_client.update_gateway_setting.return_value = None + + result = self.migrator.create_gateway_authenticator(config) + + assert result['success'] is True + assert result['action'] == 'updated' + + # Verify gateway client was called with converted name + self.gateway_client.get_gateway_setting.assert_called_once_with('custom_login_info') + self.gateway_client.update_gateway_setting.assert_called_once_with('custom_login_info', 'Some custom info') + + def test_create_gateway_authenticator_failure(self): + """Test create_gateway_authenticator when gateway update fails.""" + config = {'setting_name': 'SESSION_COOKIE_AGE', 'setting_value': 7200} + + # Mock gateway client to raise exception + self.gateway_client.get_gateway_setting.return_value = 3600 + self.gateway_client.update_gateway_setting.side_effect = Exception("Gateway error") + + result = self.migrator.create_gateway_authenticator(config) + + assert result['success'] is False + assert result['action'] == 'failed' + assert result['error'] == 'Gateway error' + + @pytest.mark.parametrize( + "scenario,settings_config,gateway_responses,update_side_effects,expected_counts", + [ + # Scenario 1: No settings configured + ( + "no_settings", + { + 'SESSION_COOKIE_AGE': None, + 'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': None, + 'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': None, + 'LOGIN_REDIRECT_OVERRIDE': None, + 'ORG_ADMINS_CAN_SEE_ALL_USERS': None, + 'MANAGE_ORGANIZATION_AUTH': None, + }, + [], # No gateway calls expected + [], # No update calls expected + {'settings_created': 0, 'settings_updated': 0, 'settings_unchanged': 0, 'settings_failed': 0}, + ), + # Scenario 2: All updates successful + ( + "successful_updates", + { + 'SESSION_COOKIE_AGE': None, + 'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': None, + 'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': None, + 'LOGIN_REDIRECT_OVERRIDE': None, + 'ORG_ADMINS_CAN_SEE_ALL_USERS': True, + 'MANAGE_ORGANIZATION_AUTH': False, + }, + [False, True], # Different values to trigger updates + [None, None], # Successful updates + {'settings_created': 0, 'settings_updated': 2, 'settings_unchanged': 0, 'settings_failed': 0}, + ), + # Scenario 3: One unchanged, one updated + ( + "mixed_results", + { + 'SESSION_COOKIE_AGE': None, + 'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': None, + 'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': None, + 'LOGIN_REDIRECT_OVERRIDE': None, + 'ORG_ADMINS_CAN_SEE_ALL_USERS': True, + 'MANAGE_ORGANIZATION_AUTH': False, + }, + [True, True], # Gateway returns: ORG_ADMINS_CAN_SEE_ALL_USERS=True (unchanged), MANAGE_ORGANIZATION_AUTH=True (needs update) + [ValueError("Update failed")], # Only one update call (for MANAGE_ORGANIZATION_AUTH), and it fails + {'settings_created': 0, 'settings_updated': 0, 'settings_unchanged': 1, 'settings_failed': 1}, + ), + ], + ) + @patch('awx.sso.utils.settings_migrator.settings') + def test_migrate_scenarios(self, mock_settings, scenario, settings_config, gateway_responses, update_side_effects, expected_counts): + """Test migrate method with various scenarios.""" + # Apply settings configuration + for setting_name, setting_value in settings_config.items(): + setattr(mock_settings, setting_name, setting_value) + + # Mock gateway client responses + if gateway_responses: + self.gateway_client.get_gateway_setting.side_effect = gateway_responses + if update_side_effects: + self.gateway_client.update_gateway_setting.side_effect = update_side_effects + + # Mock the login redirect override to not be set by migrator for these tests + with patch.object(self.migrator.__class__.__bases__[0], 'login_redirect_override_set_by_migrator', False): + result = self.migrator.migrate() + + # Verify expected counts + for key, expected_value in expected_counts.items(): + assert result[key] == expected_value, f"Scenario {scenario}: Expected {key}={expected_value}, got {result[key]}" + + # All authenticator/mapper counts should be 0 since settings don't have them + authenticator_mapper_keys = ['created', 'updated', 'unchanged', 'failed', 'mappers_created', 'mappers_updated', 'mappers_failed'] + for key in authenticator_mapper_keys: + assert result[key] == 0, f"Scenario {scenario}: Expected {key}=0, got {result[key]}" + + def test_setting_transformers_defined(self): + """Test that setting transformers are properly defined.""" + expected_transformers = {'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL', 'ALLOW_OAUTH2_FOR_EXTERNAL_USERS'} + + actual_transformers = set(self.migrator.setting_transformers.keys()) + assert actual_transformers == expected_transformers + + @pytest.mark.parametrize( + "transformer_return_value,expected_result_count", + [ + (None, 0), # Transformer returns None - should be excluded + ("", 0), # Transformer returns empty string - should be excluded + (True, 1), # Transformer returns valid value - should be included + ], + ) + @patch('awx.sso.utils.settings_migrator.settings') + def test_get_controller_config_transformer_edge_cases(self, mock_settings, transformer_return_value, expected_result_count): + """Test get_controller_config when transformer returns various edge case values.""" + # Mock settings - only one setting with a value that has a transformer + mock_settings.SESSION_COOKIE_AGE = None + mock_settings.SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = True + mock_settings.ALLOW_OAUTH2_FOR_EXTERNAL_USERS = None + mock_settings.LOGIN_REDIRECT_OVERRIDE = None + mock_settings.ORG_ADMINS_CAN_SEE_ALL_USERS = None + mock_settings.MANAGE_ORGANIZATION_AUTH = None + + # Mock transformer to return the specified value + # We need to patch the transformer in the dictionary, not just the method + original_transformer = self.migrator.setting_transformers.get('SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL') + self.migrator.setting_transformers['SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL'] = lambda x: transformer_return_value + + try: + # Mock the login redirect override to not be set by migrator + with patch.object(self.migrator.__class__.__bases__[0], 'login_redirect_override_set_by_migrator', False): + result = self.migrator.get_controller_config() + finally: + # Restore the original transformer + if original_transformer: + self.migrator.setting_transformers['SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL'] = original_transformer + + assert len(result) == expected_result_count diff --git a/awx/sso/tests/unit/test_tacacs_migrator.py b/awx/sso/tests/unit/test_tacacs_migrator.py new file mode 100644 index 0000000000..05c075d33b --- /dev/null +++ b/awx/sso/tests/unit/test_tacacs_migrator.py @@ -0,0 +1,37 @@ +import pytest +from unittest.mock import MagicMock +from awx.sso.utils.tacacs_migrator import TACACSMigrator + + +@pytest.mark.django_db +def test_get_controller_config(test_tacacs_config): + gateway_client = MagicMock() + command_obj = MagicMock() + obj = TACACSMigrator(gateway_client, command_obj) + + result = obj.get_controller_config() + assert len(result) == 1 + config = result[0] + assert config['category'] == 'TACACSPLUS' + settings_data = config['settings'] + assert settings_data['name'] == 'default' + assert settings_data['type'] == 'ansible_base.authentication.authenticator_plugins.tacacs' + + configuration = settings_data['configuration'] + assert configuration['HOST'] == 'tacacshost' + assert configuration['PORT'] == 49 + assert configuration['SECRET'] == 'secret' + assert configuration['SESSION_TIMEOUT'] == 10 + assert configuration['AUTH_PROTOCOL'] == 'pap' + assert configuration['REM_ADDR'] is True + + +@pytest.mark.django_db +def test_get_controller_config_no_host(settings): + settings.TACACSPLUS_HOST = "" + gateway_client = MagicMock() + command_obj = MagicMock() + obj = TACACSMigrator(gateway_client, command_obj) + + result = obj.get_controller_config() + assert len(result) == 0 diff --git a/awx/sso/utils/__init__.py b/awx/sso/utils/__init__.py new file mode 100644 index 0000000000..4d9f494723 --- /dev/null +++ b/awx/sso/utils/__init__.py @@ -0,0 +1,17 @@ +from awx.sso.utils.azure_ad_migrator import AzureADMigrator +from awx.sso.utils.github_migrator import GitHubMigrator +from awx.sso.utils.google_oauth2_migrator import GoogleOAuth2Migrator +from awx.sso.utils.ldap_migrator import LDAPMigrator +from awx.sso.utils.oidc_migrator import OIDCMigrator +from awx.sso.utils.radius_migrator import RADIUSMigrator +from awx.sso.utils.saml_migrator import SAMLMigrator + +__all__ = [ + 'AzureADMigrator', + 'GitHubMigrator', + 'GoogleOAuth2Migrator', + 'LDAPMigrator', + 'OIDCMigrator', + 'RADIUSMigrator', + 'SAMLMigrator', +] diff --git a/awx/sso/utils/azure_ad_migrator.py b/awx/sso/utils/azure_ad_migrator.py new file mode 100644 index 0000000000..121fa59fc9 --- /dev/null +++ b/awx/sso/utils/azure_ad_migrator.py @@ -0,0 +1,97 @@ +""" +Azure AD authenticator migrator. + +This module handles the migration of Azure AD authenticators from AWX to Gateway. +""" + +from django.conf import settings +from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format +from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator + + +class AzureADMigrator(BaseAuthenticatorMigrator): + """ + Handles the migration of Azure AD authenticators from AWX to Gateway. + """ + + def get_authenticator_type(self): + """Get the human-readable authenticator type name.""" + return "Azure AD" + + def get_controller_config(self): + """ + Export Azure AD authenticators. An Azure AD authenticator is only exported if + KEY and SECRET are configured. + + Returns: + list: List of configured Azure AD authentication providers with their settings + """ + key_value = getattr(settings, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', None) + secret_value = getattr(settings, 'SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET', None) + + # Skip this category if OIDC Key and/or Secret are not configured + if not key_value or not secret_value: + return [] + + # If we have both key and secret, collect all settings + org_map_value = getattr(settings, 'SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP', None) + team_map_value = getattr(settings, 'SOCIAL_AUTH_AZUREAD_OAUTH2_TEAM_MAP', None) + login_redirect_override = getattr(settings, "LOGIN_REDIRECT_OVERRIDE", None) + + # Convert GitHub org and team mappings from AWX to the Gateway format + # Start with order 1 and maintain sequence across both org and team mappers + org_mappers, next_order = org_map_to_gateway_format(org_map_value, start_order=1) + team_mappers, _ = team_map_to_gateway_format(team_map_value, start_order=next_order) + + category = 'AzureAD' + + # Generate authenticator name and slug + authenticator_name = "Azure AD" + authenticator_slug = self._generate_authenticator_slug("azure_ad", category) + + return [ + { + 'category': category, + 'settings': { + "name": authenticator_name, + "slug": authenticator_slug, + "type": "ansible_base.authentication.authenticator_plugins.azuread", + "enabled": False, + "create_objects": True, + "remove_users": False, + "configuration": { + "KEY": key_value, + "SECRET": secret_value, + "GROUPS_CLAIM": "groups", + }, + }, + 'org_mappers': org_mappers, + 'team_mappers': team_mappers, + 'login_redirect_override': login_redirect_override, + } + ] + + def create_gateway_authenticator(self, config): + """Create an Azure AD authenticator in Gateway.""" + + category = config["category"] + gateway_config = config["settings"] + + self._write_output(f"\n--- Processing {category} authenticator ---") + self._write_output(f"Name: {gateway_config['name']}") + self._write_output(f"Slug: {gateway_config['slug']}") + self._write_output(f"Type: {gateway_config['type']}") + + # CALLBACK_URL - automatically created by Gateway + # GROUPS_CLAIM - Not an AWX feature + # ADDITIONAL_UNVERIFIED_ARGS - Not an AWX feature + ignore_keys = ["CALLBACK_URL", "GROUPS_CLAIM"] + + # Submit the authenticator (create or update as needed) + result = self.submit_authenticator(gateway_config, ignore_keys, config) + + # Handle LOGIN_REDIRECT_OVERRIDE if applicable + valid_login_urls = ['/sso/login/azuread-oauth2'] + self.handle_login_override(config, valid_login_urls) + + return result diff --git a/awx/sso/utils/base_migrator.py b/awx/sso/utils/base_migrator.py new file mode 100644 index 0000000000..2d3c9246cf --- /dev/null +++ b/awx/sso/utils/base_migrator.py @@ -0,0 +1,679 @@ +""" +Base authenticator migrator class. + +This module defines the contract that all specific authenticator migrators must follow. +""" + +from urllib.parse import urlparse, parse_qs, urlencode +from django.conf import settings +from awx.main.utils.gateway_client import GatewayAPIError + + +class BaseAuthenticatorMigrator: + """ + Base class for all authenticator migrators. + Defines the contract that all specific authenticator migrators must follow. + """ + + KEYS_TO_PRESERVE = ['idp'] + # Class-level flag to track if LOGIN_REDIRECT_OVERRIDE was set by any migrator + login_redirect_override_set_by_migrator = False + # Class-level variable to store the new LOGIN_REDIRECT_OVERRIDE URL computed by migrators + login_redirect_override_new_url = None + + def __init__(self, gateway_client=None, command=None, force=False): + """ + Initialize the authenticator migrator. + + Args: + gateway_client: GatewayClient instance for API calls + command: Optional Django management command instance (for styled output) + force: If True, force migration even if configurations already exist + """ + self.gateway_client = gateway_client + self.command = command + self.force = force + self.encrypted_fields = [ + # LDAP Fields + 'BIND_PASSWORD', + # The following authenticators all use the same key to store encrypted information: + # Generic OIDC + # RADIUS + # TACACS+ + # GitHub OAuth2 + # Azure AD OAuth2 + # Google OAuth2 + 'SECRET', + # SAML Fields + 'SP_PRIVATE_KEY', + ] + + def migrate(self): + """ + Main entry point - orchestrates the migration process. + + Returns: + dict: Summary of migration results + """ + # Get configuration from AWX/Controller + configs = self.get_controller_config() + + if not configs: + self._write_output(f'No {self.get_authenticator_type()} authenticators found to migrate.', 'warning') + return {'created': 0, 'updated': 0, 'unchanged': 0, 'failed': 0, 'mappers_created': 0, 'mappers_updated': 0, 'mappers_failed': 0} + + self._write_output(f'Found {len(configs)} {self.get_authenticator_type()} authentication configuration(s).', 'success') + + # Process each authenticator configuration + created_authenticators = [] + updated_authenticators = [] + unchanged_authenticators = [] + failed_authenticators = [] + + for config in configs: + result = self.create_gateway_authenticator(config) + if result['success']: + if result['action'] == 'created': + created_authenticators.append(config) + elif result['action'] == 'updated': + updated_authenticators.append(config) + elif result['action'] == 'skipped': + unchanged_authenticators.append(config) + else: + failed_authenticators.append(config) + + # Process mappers for successfully created/updated/unchanged authenticators + mappers_created = 0 + mappers_updated = 0 + mappers_failed = 0 + successful_authenticators = created_authenticators + updated_authenticators + unchanged_authenticators + if successful_authenticators: + self._write_output('\n=== Processing Authenticator Mappers ===', 'success') + for config in successful_authenticators: + mapper_result = self._process_gateway_mappers(config) + mappers_created += mapper_result['created'] + mappers_updated += mapper_result['updated'] + mappers_failed += mapper_result['failed'] + + # Authenticators don't have settings, so settings counts are always 0 + return { + 'created': len(created_authenticators), + 'updated': len(updated_authenticators), + 'unchanged': len(unchanged_authenticators), + 'failed': len(failed_authenticators), + 'mappers_created': mappers_created, + 'mappers_updated': mappers_updated, + 'mappers_failed': mappers_failed, + 'settings_created': 0, + 'settings_updated': 0, + 'settings_unchanged': 0, + 'settings_failed': 0, + } + + def get_controller_config(self): + """ + Gather configuration from AWX/Controller. + + Returns: + list: List of configuration dictionaries + """ + raise NotImplementedError("Subclasses must implement get_controller_config()") + + def create_gateway_authenticator(self, config): + """ + Create authenticator in Gateway. + + Args: + config: Configuration dictionary from get_controller_config() + + Returns: + bool: True if authenticator was created successfully, False otherwise + """ + raise NotImplementedError("Subclasses must implement create_gateway_authenticator()") + + def get_authenticator_type(self): + """ + Get the human-readable authenticator type name. + + Returns: + str: Authenticator type name for logging + """ + raise NotImplementedError("Subclasses must implement get_authenticator_type()") + + def _generate_authenticator_slug(self, auth_type, category): + """Generate a deterministic slug for an authenticator.""" + return f"aap-{auth_type}-{category}".lower() + + def submit_authenticator(self, gateway_config, ignore_keys=[], config={}): + """ + Submit an authenticator to Gateway - either create new or update existing. + + Args: + gateway_config: Complete Gateway authenticator configuration + ignore_keys: List of configuration keys to ignore during comparison + config: Optional AWX config dict to store result data + + Returns: + dict: Result with 'success' (bool), 'action' ('created', 'updated', 'skipped'), 'error' (str or None) + """ + authenticator_slug = gateway_config.get('slug') + if not authenticator_slug: + self._write_output('Gateway config missing slug, cannot submit authenticator', 'error') + return {'success': False, 'action': None, 'error': 'Missing slug'} + + try: + # Check if authenticator already exists by slug + existing_authenticator = self.gateway_client.get_authenticator_by_slug(authenticator_slug) + + if existing_authenticator: + # Authenticator exists, check if configuration matches + authenticator_id = existing_authenticator.get('id') + + configs_match, differences = self._authenticator_configs_match(existing_authenticator, gateway_config, ignore_keys) + + if configs_match: + self._write_output(f'⚠ Authenticator already exists with matching configuration (ID: {authenticator_id})', 'warning') + # Store the existing result for mapper creation + config['gateway_authenticator_id'] = authenticator_id + config['gateway_authenticator'] = existing_authenticator + return {'success': True, 'action': 'skipped', 'error': None} + else: + self._write_output(f'⚠ Authenticator exists but configuration differs (ID: {authenticator_id})', 'warning') + self._write_output(' Configuration comparison:') + + # Log differences between the existing and the new configuration in case of an update + for difference in differences: + self._write_output(f' {difference}') + + # Update the existing authenticator + self._write_output(' Updating authenticator with new configuration...') + try: + # Don't include the slug in the update since it shouldn't change + update_config = gateway_config.copy() + if 'slug' in update_config: + del update_config['slug'] + + result = self.gateway_client.update_authenticator(authenticator_id, update_config) + self._write_output(f'✓ Successfully updated authenticator with ID: {authenticator_id}', 'success') + + # Store the updated result for mapper creation + config['gateway_authenticator_id'] = authenticator_id + config['gateway_authenticator'] = result + return {'success': True, 'action': 'updated', 'error': None} + except GatewayAPIError as e: + self._write_output(f'✗ Failed to update authenticator: {e.message}', 'error') + if e.response_data: + self._write_output(f' Details: {e.response_data}', 'error') + return {'success': False, 'action': 'update_failed', 'error': e.message} + else: + # Authenticator doesn't exist, create it + self._write_output('Creating new authenticator...') + + # Create the authenticator + result = self.gateway_client.create_authenticator(gateway_config) + + self._write_output(f'✓ Successfully created authenticator with ID: {result.get("id")}', 'success') + + # Store the result for potential mapper creation later + config['gateway_authenticator_id'] = result.get('id') + config['gateway_authenticator'] = result + return {'success': True, 'action': 'created', 'error': None} + + except GatewayAPIError as e: + self._write_output(f'✗ Failed to submit authenticator: {e.message}', 'error') + if e.response_data: + self._write_output(f' Details: {e.response_data}', 'error') + return {'success': False, 'action': 'failed', 'error': e.message} + except Exception as e: + self._write_output(f'✗ Unexpected error submitting authenticator: {str(e)}', 'error') + return {'success': False, 'action': 'failed', 'error': str(e)} + + def _authenticator_configs_match(self, existing_auth, new_config, ignore_keys=[]): + """ + Compare existing authenticator configuration with new configuration. + + Args: + existing_auth: Existing authenticator data from Gateway + new_config: New authenticator configuration to be created + ignore_keys: List of configuration keys to ignore during comparison + (e.g., ['CALLBACK_URL'] for auto-generated fields) + + Returns: + bool: True if configurations match, False otherwise + """ + # Add encrypted fields to ignore_keys if force flag is not set + # This prevents secrets from being updated unless explicitly forced + effective_ignore_keys = ignore_keys.copy() + if not self.force: + effective_ignore_keys.extend(self.encrypted_fields) + + # Keep track of the differences between the existing and the new configuration + # Logging them makes debugging much easier + differences = [] + + if existing_auth.get('name') != new_config.get('name'): + differences.append(f' name: existing="{existing_auth.get("name")}" vs new="{new_config.get("name")}"') + elif existing_auth.get('type') != new_config.get('type'): + differences.append(f' type: existing="{existing_auth.get("type")}" vs new="{new_config.get("type")}"') + elif existing_auth.get('enabled') != new_config.get('enabled'): + differences.append(f' enabled: existing="{existing_auth.get("enabled")}" vs new="{new_config.get("enabled")}"') + elif existing_auth.get('create_objects') != new_config.get('create_objects'): + differences.append(f' create_objects: existing="{existing_auth.get("create_objects")}" vs new="{new_config.get("create_objects")}"') + elif existing_auth.get('remove_users') != new_config.get('remove_users'): + differences.append(f' create_objects: existing="{existing_auth.get("remove_users")}" vs new="{new_config.get("remove_users")}"') + + # Compare configuration section + existing_config = existing_auth.get('configuration', {}) + new_config_section = new_config.get('configuration', {}) + + # Helper function to check if a key should be ignored + def should_ignore_key(config_key): + return config_key in effective_ignore_keys + + # Check if all keys in new config exist in existing config with same values + for key, value in new_config_section.items(): + if should_ignore_key(key): + continue + if key not in existing_config: + differences.append(f' {key}: existing= vs new="{value}"') + elif existing_config[key] != value: + differences.append(f' {key}: existing="{existing_config.get(key)}" vs new="{value}"') + + # Check if existing config has extra keys that new config doesn't have + # (this might indicate configuration drift), but ignore keys in ignore_keys + for key in existing_config: + if should_ignore_key(key): + continue + if key not in new_config_section: + differences.append(f' {key}: existing="{existing_config.get(key)}" vs new=') + + return len(differences) == 0, differences + + def _compare_mapper_lists(self, existing_mappers, new_mappers, ignore_keys=None): + """ + Compare existing and new mapper lists to determine which need updates vs creation. + + Args: + existing_mappers: List of existing mapper configurations from Gateway + new_mappers: List of new mapper configurations to be created/updated + ignore_keys: List of keys to ignore during comparison (e.g., auto-generated fields) + + Returns: + tuple: (mappers_to_update, mappers_to_create) + mappers_to_update: List of tuples (existing_mapper, new_mapper) for updates + mappers_to_create: List of new_mapper configs that don't match any existing + """ + if ignore_keys is None: + ignore_keys = [] + + mappers_to_update = [] + mappers_to_create = [] + + for new_mapper in new_mappers: + matched_existing = None + + # Try to find a matching existing mapper + for existing_mapper in existing_mappers: + if self._mappers_match_structurally(existing_mapper, new_mapper): + matched_existing = existing_mapper + break + + if matched_existing: + # Check if the configuration actually differs (ignoring auto-generated fields) + if not self._mapper_configs_match(matched_existing, new_mapper, ignore_keys): + mappers_to_update.append((matched_existing, new_mapper)) + # If configs match exactly, no action needed (mapper is up to date) + else: + # No matching existing mapper found, needs to be created + mappers_to_create.append(new_mapper) + + return mappers_to_update, mappers_to_create + + def _mappers_match_structurally(self, existing_mapper, new_mapper): + """ + Check if two mappers match structurally (same organization, team, map_type, role). + This identifies if they represent the same logical mapping. + + Args: + existing_mapper: Existing mapper configuration from Gateway + new_mapper: New mapper configuration + + Returns: + bool: True if mappers represent the same logical mapping + """ + + # Compare key structural fields that identify the same logical mapper + structural_fields = ['name'] + + for field in structural_fields: + if existing_mapper.get(field) != new_mapper.get(field): + return False + + return True + + def _mapper_configs_match(self, existing_mapper, new_mapper, ignore_keys=None): + """ + Compare mapper configurations to check if they are identical. + + Args: + existing_mapper: Existing mapper configuration from Gateway + new_mapper: New mapper configuration + ignore_keys: List of keys to ignore during comparison + + Returns: + bool: True if configurations match, False otherwise + """ + if ignore_keys is None: + ignore_keys = [] + + # Helper function to check if a key should be ignored + def should_ignore_key(config_key): + return config_key in ignore_keys + + # Compare all mapper fields except ignored ones + all_keys = set(existing_mapper.keys()) | set(new_mapper.keys()) + + for key in all_keys: + if should_ignore_key(key): + continue + + existing_value = existing_mapper.get(key) + new_value = new_mapper.get(key) + + if existing_value != new_value: + return False + + return True + + def _process_gateway_mappers(self, config): + """Process authenticator mappers in Gateway from AWX config - create or update as needed.""" + authenticator_id = config.get('gateway_authenticator_id') + if not authenticator_id: + self._write_output(f'No authenticator ID found for {config["category"]}, skipping mappers', 'error') + return {'created': 0, 'updated': 0, 'failed': 0} + + category = config['category'] + org_mappers = config.get('org_mappers', []) + team_mappers = config.get('team_mappers', []) + role_mappers = config.get('role_mappers', []) + allow_mappers = config.get('allow_mappers', []) + all_new_mappers = org_mappers + team_mappers + role_mappers + allow_mappers + + if len(all_new_mappers) == 0: + self._write_output(f'No mappers to process for {category} authenticator') + return {'created': 0, 'updated': 0, 'failed': 0} + + self._write_output(f'\n--- Processing mappers for {category} authenticator (ID: {authenticator_id}) ---') + self._write_output(f'Organization mappers: {len(org_mappers)}') + self._write_output(f'Team mappers: {len(team_mappers)}') + self._write_output(f'Role mappers: {len(role_mappers)}') + self._write_output(f'Allow mappers: {len(allow_mappers)}') + + # Get existing mappers from Gateway + try: + existing_mappers = self.gateway_client.get_authenticator_maps(authenticator_id) + except GatewayAPIError as e: + self._write_output(f'Failed to retrieve existing mappers: {e.message}', 'error') + return {'created': 0, 'updated': 0, 'failed': len(all_new_mappers)} + + # Define mapper-specific ignore keys (can be overridden by subclasses) + ignore_keys = self._get_mapper_ignore_keys() + + # Compare existing vs new mappers + mappers_to_update, mappers_to_create = self._compare_mapper_lists(existing_mappers, all_new_mappers, ignore_keys) + + self._write_output(f'Mappers to create: {len(mappers_to_create)}') + self._write_output(f'Mappers to update: {len(mappers_to_update)}') + + created_count = 0 + updated_count = 0 + failed_count = 0 + + # Process updates + for existing_mapper, new_mapper in mappers_to_update: + if self._update_single_mapper(existing_mapper, new_mapper): + updated_count += 1 + else: + failed_count += 1 + + # Process creations + for new_mapper in mappers_to_create: + mapper_type = new_mapper.get('map_type', 'unknown') + if self._create_single_mapper(authenticator_id, new_mapper, mapper_type): + created_count += 1 + else: + failed_count += 1 + + # Summary + self._write_output(f'Mappers created: {created_count}, updated: {updated_count}, failed: {failed_count}') + return {'created': created_count, 'updated': updated_count, 'failed': failed_count} + + def _get_mapper_ignore_keys(self): + """ + Get list of mapper keys to ignore during comparison. + Can be overridden by subclasses for mapper-specific ignore keys. + + Returns: + list: List of keys to ignore (e.g., auto-generated fields) + """ + return ['id', 'authenticator', 'created', 'modified', 'summary_fields', 'modified_by', 'created_by', 'related', 'url'] + + def _update_single_mapper(self, existing_mapper, new_mapper): + """Update a single mapper in Gateway. + + Args: + existing_mapper: Existing mapper data from Gateway + new_mapper: New mapper configuration to update to + + Returns: + bool: True if mapper was updated successfully, False otherwise + """ + try: + mapper_id = existing_mapper.get('id') + if not mapper_id: + self._write_output(' ✗ Existing mapper missing ID, cannot update', 'error') + return False + + # Prepare update config - don't include fields that shouldn't be updated + update_config = new_mapper.copy() + + # Remove fields that shouldn't be updated (read-only or auto-generated) + fields_to_remove = ['id', 'authenticator', 'created', 'modified'] + for field in fields_to_remove: + update_config.pop(field, None) + + # Update the mapper + self.gateway_client.update_authenticator_map(mapper_id, update_config) + + mapper_name = new_mapper.get('name', 'Unknown') + self._write_output(f' ✓ Updated mapper: {mapper_name}', 'success') + return True + + except GatewayAPIError as e: + mapper_name = new_mapper.get('name', 'Unknown') + self._write_output(f' ✗ Failed to update mapper "{mapper_name}": {e.message}', 'error') + if e.response_data: + self._write_output(f' Details: {e.response_data}', 'error') + return False + except Exception as e: + mapper_name = new_mapper.get('name', 'Unknown') + self._write_output(f' ✗ Unexpected error updating mapper "{mapper_name}": {str(e)}', 'error') + return False + + def _create_single_mapper(self, authenticator_id, mapper_config, mapper_type): + """Create a single mapper in Gateway.""" + try: + # Update the mapper config with the correct authenticator ID + mapper_config = mapper_config.copy() # Don't modify the original + mapper_config['authenticator'] = authenticator_id + + # Create the mapper + self.gateway_client.create_authenticator_map(authenticator_id, mapper_config) + + mapper_name = mapper_config.get('name', 'Unknown') + self._write_output(f' ✓ Created {mapper_type} mapper: {mapper_name}', 'success') + return True + + except GatewayAPIError as e: + mapper_name = mapper_config.get('name', 'Unknown') + self._write_output(f' ✗ Failed to create {mapper_type} mapper "{mapper_name}": {e.message}', 'error') + if e.response_data: + self._write_output(f' Details: {e.response_data}', 'error') + return False + except Exception as e: + mapper_name = mapper_config.get('name', 'Unknown') + self._write_output(f' ✗ Unexpected error creating {mapper_type} mapper "{mapper_name}": {str(e)}', 'error') + return False + + def get_social_org_map(self, authenticator_setting_name=None): + """ + Get social auth organization map with fallback to global setting. + + Args: + authenticator_setting_name: Name of the authenticator-specific organization map setting + (e.g., 'SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP') + + Returns: + dict: Organization mapping configuration, with fallback to global setting + """ + # Try authenticator-specific setting first + if authenticator_setting_name: + if authenticator_map := getattr(settings, authenticator_setting_name, None): + return authenticator_map + + # Fall back to global setting + global_map = getattr(settings, 'SOCIAL_AUTH_ORGANIZATION_MAP', {}) + return global_map + + def get_social_team_map(self, authenticator_setting_name=None): + """ + Get social auth team map with fallback to global setting. + + Args: + authenticator_setting_name: Name of the authenticator-specific team map setting + (e.g., 'SOCIAL_AUTH_GITHUB_TEAM_MAP') + + Returns: + dict: Team mapping configuration, with fallback to global setting + """ + # Try authenticator-specific setting first + if authenticator_setting_name: + if authenticator_map := getattr(settings, authenticator_setting_name, None): + return authenticator_map + + # Fall back to global setting + global_map = getattr(settings, 'SOCIAL_AUTH_TEAM_MAP', {}) + return global_map + + def handle_login_override(self, config, valid_login_urls): + """ + Handle LOGIN_REDIRECT_OVERRIDE setting for this authenticator. + + This method checks if the login_redirect_override from the config matches + any of the provided valid_login_urls. If it matches, it updates the + LOGIN_REDIRECT_OVERRIDE setting in Gateway with the new authenticator's + URL and sets the class flag to indicate it was handled. + + Args: + config: Configuration dictionary containing: + - login_redirect_override: The current LOGIN_REDIRECT_OVERRIDE value + - gateway_authenticator: The created/updated authenticator info + valid_login_urls: List of URL patterns to match against + """ + # Check if another migrator has already handled login redirect override + if BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator: + raise RuntimeError("LOGIN_REDIRECT_OVERRIDE has already been handled by another migrator") + + login_redirect_override = config.get('login_redirect_override') + if not login_redirect_override: + return + + # Check if the login_redirect_override matches any of the provided valid URLs + url_matches = False + parsed_redirect = urlparse(login_redirect_override) + self.redirect_query_dict = parse_qs(parsed_redirect.query, keep_blank_values=True) if parsed_redirect.query else {} + + for valid_url in valid_login_urls: + parsed_valid = urlparse(valid_url) + + # Compare path: redirect path should match or contain the valid path at proper boundaries + if parsed_redirect.path == parsed_valid.path: + path_matches = True + elif parsed_redirect.path.startswith(parsed_valid.path): + # Ensure the match is at a path boundary (followed by '/' or end of string) + next_char_pos = len(parsed_valid.path) + if next_char_pos >= len(parsed_redirect.path) or parsed_redirect.path[next_char_pos] in ['/', '?']: + path_matches = True + else: + path_matches = False + else: + path_matches = False + + # Compare query: if valid URL has query params, they should be present in redirect URL + query_matches = True + if parsed_valid.query: + # Parse query parameters for both URLs + valid_params = parse_qs(parsed_valid.query, keep_blank_values=True) + + # All valid URL query params must be present in redirect URL with same values + query_matches = all(param in self.redirect_query_dict and self.redirect_query_dict[param] == values for param, values in valid_params.items()) + + if path_matches and query_matches: + url_matches = True + break + + if not url_matches: + return + + # Extract the created authenticator from config + gateway_authenticator = config.get('gateway_authenticator') + if not gateway_authenticator: + return + + sso_login_url = gateway_authenticator.get('sso_login_url') + if not sso_login_url: + return + + # Compute the new LOGIN_REDIRECT_OVERRIDE URL with the Gateway URL + gateway_base_url = self.gateway_client.get_base_url() + parsed_sso = urlparse(sso_login_url) + parsed_gw = urlparse(gateway_base_url) + updated_query = self._updated_query_string(parsed_sso) + complete_url = parsed_redirect._replace(scheme=parsed_gw.scheme, path=parsed_sso.path, netloc=parsed_gw.netloc, query=updated_query).geturl() + self._write_output(f'LOGIN_REDIRECT_OVERRIDE will be updated to: {complete_url}') + + # Store the new URL in class variable for settings migrator to use + BaseAuthenticatorMigrator.login_redirect_override_new_url = complete_url + + # Set the class-level flag to indicate LOGIN_REDIRECT_OVERRIDE was handled by a migrator + BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator = True + + def _updated_query_string(self, parsed_sso): + if parsed_sso.query: + parsed_sso_dict = parse_qs(parsed_sso.query, keep_blank_values=True) + else: + parsed_sso_dict = {} + + result = {} + for k, v in self.redirect_query_dict.items(): + if k in self.KEYS_TO_PRESERVE and k in parsed_sso_dict: + v = parsed_sso_dict[k] + + if isinstance(v, list) and len(v) == 1: + result[k] = v[0] + else: + result[k] = v + + return urlencode(result, doseq=True) if result else "" + + def _write_output(self, message, style=None): + """Write output message if command is available.""" + if self.command: + if style == 'success': + self.command.stdout.write(self.command.style.SUCCESS(message)) + elif style == 'warning': + self.command.stdout.write(self.command.style.WARNING(message)) + elif style == 'error': + self.command.stdout.write(self.command.style.ERROR(message)) + else: + self.command.stdout.write(message) diff --git a/awx/sso/utils/github_migrator.py b/awx/sso/utils/github_migrator.py new file mode 100644 index 0000000000..01057740c2 --- /dev/null +++ b/awx/sso/utils/github_migrator.py @@ -0,0 +1,217 @@ +""" +GitHub authenticator migrator. + +This module handles the migration of GitHub authenticators from AWX to Gateway. +""" + +from django.conf import settings +from awx.conf import settings_registry +from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format +from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator +import re + + +class GitHubMigrator(BaseAuthenticatorMigrator): + """ + Handles the migration of GitHub authenticators from AWX to Gateway. + """ + + def get_authenticator_type(self): + """Get the human-readable authenticator type name.""" + return "GitHub" + + def get_controller_config(self): + """ + Export all GitHub authenticators. A GitHub authenticator is only exported if both, + id and secret, are defined. Otherwise it will be skipped. + + Returns: + list: List of configured GitHub authentication providers with their settings + """ + github_categories = ['github', 'github-org', 'github-team', 'github-enterprise', 'github-enterprise-org', 'github-enterprise-team'] + login_redirect_override = getattr(settings, "LOGIN_REDIRECT_OVERRIDE", None) + + found_configs = [] + + for category in github_categories: + try: + category_settings = settings_registry.get_registered_settings(category_slug=category) + if category_settings: + config_data = {} + + key_setting = None + secret_setting = None + + # Ensure category_settings is iterable and contains strings + if isinstance(category_settings, re.Pattern) or not hasattr(category_settings, '__iter__') or isinstance(category_settings, str): + continue + + for setting_name in category_settings: + # Skip if setting_name is not a string (e.g., regex pattern) + if not isinstance(setting_name, str): + continue + if setting_name.endswith('_KEY'): + key_setting = setting_name + elif setting_name.endswith('_SECRET'): + secret_setting = setting_name + + # Skip this category if KEY or SECRET is missing or empty + if not key_setting or not secret_setting: + continue + + key_value = getattr(settings, key_setting, None) + secret_value = getattr(settings, secret_setting, None) + + # Skip this category if OIDC Key and/or Secret are not configured + if not key_value or not secret_value: + continue + + # If we have both key and secret, collect all settings + org_map_setting_name = None + team_map_setting_name = None + + for setting_name in category_settings: + # Skip if setting_name is not a string (e.g., regex pattern) + if not isinstance(setting_name, str): + continue + value = getattr(settings, setting_name, None) + config_data[setting_name] = value + + # Capture org and team map setting names for special processing + if setting_name.endswith('_ORGANIZATION_MAP'): + org_map_setting_name = setting_name + elif setting_name.endswith('_TEAM_MAP'): + team_map_setting_name = setting_name + + # Get org and team mappings using the new fallback functions + org_map_value = self.get_social_org_map(org_map_setting_name) if org_map_setting_name else {} + team_map_value = self.get_social_team_map(team_map_setting_name) if team_map_setting_name else {} + + # Convert GitHub org and team mappings from AWX to the Gateway format + # Start with order 1 and maintain sequence across both org and team mappers + org_mappers, next_order = org_map_to_gateway_format(org_map_value, start_order=1) + team_mappers, _ = team_map_to_gateway_format(team_map_value, start_order=next_order) + + found_configs.append( + { + 'category': category, + 'settings': config_data, + 'org_mappers': org_mappers, + 'team_mappers': team_mappers, + 'login_redirect_override': login_redirect_override, + } + ) + + except Exception as e: + raise Exception(f'Could not retrieve {category} settings: {str(e)}') + + return found_configs + + def create_gateway_authenticator(self, config): + """Create a GitHub/OIDC authenticator in Gateway.""" + category = config['category'] + settings = config['settings'] + + # Extract the OAuth2 credentials + key_value = None + secret_value = None + + for setting_name, value in settings.items(): + if setting_name.endswith('_KEY') and value: + key_value = value + elif setting_name.endswith('_SECRET') and value: + secret_value = value + + if not key_value or not secret_value: + self._write_output(f'Skipping {category}: missing OAuth2 credentials', 'warning') + return {'success': False, 'action': 'skipped', 'error': 'Missing OAuth2 credentials'} + + # Generate authenticator name and slug + authenticator_name = category + authenticator_slug = self._generate_authenticator_slug('github', category) + + # Map AWX category to Gateway authenticator type + type_mapping = { + 'github': 'ansible_base.authentication.authenticator_plugins.github', + 'github-org': 'ansible_base.authentication.authenticator_plugins.github_org', + 'github-team': 'ansible_base.authentication.authenticator_plugins.github_team', + 'github-enterprise': 'ansible_base.authentication.authenticator_plugins.github_enterprise', + 'github-enterprise-org': 'ansible_base.authentication.authenticator_plugins.github_enterprise_org', + 'github-enterprise-team': 'ansible_base.authentication.authenticator_plugins.github_enterprise_team', + } + + authenticator_type = type_mapping.get(category) + if not authenticator_type: + self._write_output(f'Unknown category {category}, skipping', 'warning') + return {'success': False, 'action': 'skipped', 'error': f'Unknown category {category}'} + + self._write_output(f'\n--- Processing {category} authenticator ---') + self._write_output(f'Name: {authenticator_name}') + self._write_output(f'Slug: {authenticator_slug}') + self._write_output(f'Type: {authenticator_type}') + self._write_output(f'Client ID: {key_value}') + self._write_output(f'Client Secret: {"*" * 8}') + + # Build Gateway authenticator configuration + gateway_config = { + "name": authenticator_name, + "slug": authenticator_slug, + "type": authenticator_type, + "enabled": False, + "create_objects": True, # Allow Gateway to create users/orgs/teams + "remove_users": False, # Don't remove users by default + "configuration": {"KEY": key_value, "SECRET": secret_value}, + } + + # Add any additional configuration based on AWX settings + additional_config = self._build_additional_config(category, settings) + gateway_config["configuration"].update(additional_config) + + # GitHub authenticators have auto-generated fields that should be ignored during comparison + # CALLBACK_URL - automatically created by Gateway + # SCOPE - relevant for mappers with team/org requirement, allows to read the org or team + # SECRET - the secret is encrypted in Gateway, we have no way of comparing the decrypted value + ignore_keys = ['CALLBACK_URL', 'SCOPE'] + + # Submit the authenticator (create or update as needed) + result = self.submit_authenticator(gateway_config, ignore_keys, config) + + # Handle LOGIN_REDIRECT_OVERRIDE if applicable + valid_login_urls = [f'/sso/login/{category}', f'/sso/login/{category}/'] + self.handle_login_override(config, valid_login_urls) + + return result + + def _build_additional_config(self, category, settings): + """Build additional configuration for specific authenticator types.""" + additional_config = {} + + # Add scope configuration if present + for setting_name, value in settings.items(): + if setting_name.endswith('_SCOPE') and value: + additional_config['SCOPE'] = value + break + + # Add GitHub Enterprise URL if present + if 'enterprise' in category: + for setting_name, value in settings.items(): + if setting_name.endswith('_API_URL') and value: + additional_config['API_URL'] = value + elif setting_name.endswith('_URL') and value: + additional_config['URL'] = value + + # Add organization name for org-specific authenticators + if 'org' in category: + for setting_name, value in settings.items(): + if setting_name.endswith('_NAME') and value: + additional_config['NAME'] = value + break + + # Add team ID for team-specific authenticators + if 'team' in category: + for setting_name, value in settings.items(): + if setting_name.endswith('_ID') and value: + additional_config['ID'] = value + break + + return additional_config diff --git a/awx/sso/utils/google_oauth2_migrator.py b/awx/sso/utils/google_oauth2_migrator.py new file mode 100644 index 0000000000..7d47f532a3 --- /dev/null +++ b/awx/sso/utils/google_oauth2_migrator.py @@ -0,0 +1,102 @@ +""" +Google OAuth2 authenticator migrator. + +This module handles the migration of Google OAuth2 authenticators from AWX to Gateway. +""" + +from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format +from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator + + +class GoogleOAuth2Migrator(BaseAuthenticatorMigrator): + """ + Handles the migration of Google OAuth2 authenticators from AWX to Gateway. + """ + + def get_authenticator_type(self): + """Get the human-readable authenticator type name.""" + return "Google OAuth2" + + def get_controller_config(self): + """ + Export Google OAuth2 authenticators. A Google OAuth2 authenticator is only exported if + KEY and SECRET are configured. + + Returns: + list: List of configured Google OAuth2 authentication providers with their settings + """ + from django.conf import settings + + if not getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None): + return [] + + config_data = { + 'SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL': settings.SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL, + 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY': settings.SOCIAL_AUTH_GOOGLE_OAUTH2_KEY, + 'SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET': settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET, + 'SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE': settings.SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE, + } + + login_redirect_override = getattr(settings, "LOGIN_REDIRECT_OVERRIDE", None) + + return [ + { + "category": self.get_authenticator_type(), + "settings": config_data, + "login_redirect_override": login_redirect_override, + } + ] + + def _build_mappers(self): + org_map = self.get_social_org_map('SOCIAL_AUTH_GOOGLE_OAUTH2_ORGANIZATION_MAP') + team_map = self.get_social_team_map('SOCIAL_AUTH_GOOGLE_OAUTH2_TEAM_MAP') + + mappers, order = org_map_to_gateway_format(org_map, 1) + team_mappers, _ = team_map_to_gateway_format(team_map, order) + + mappers.extend(team_mappers) + + return mappers + + def create_gateway_authenticator(self, config): + """Create a Google OAuth2 authenticator in Gateway.""" + category = config["category"] + config_settings = config['settings'] + + authenticator_slug = self._generate_authenticator_slug('google-oauth2', category.replace(" ", "-")) + + self._write_output(f"\n--- Processing {category} authenticator ---") + + gateway_config = { + "name": "google", + "slug": authenticator_slug, + "type": "ansible_base.authentication.authenticator_plugins.google_oauth2", + "enabled": False, + "create_objects": True, # Allow Gateway to create users/orgs/teams + "remove_users": False, # Don't remove users by default + "configuration": { + "KEY": config_settings.get('SOCIAL_AUTH_GOOGLE_OAUTH2_KEY'), + "SECRET": config_settings.get('SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET'), + "REDIRECT_STATE": True, + }, + "mappers": self._build_mappers(), + } + + ignore_keys = ["ACCESS_TOKEN_METHOD", "REVOKE_TOKEN_METHOD"] + optional = { + "CALLBACK_URL": config_settings.get('SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL'), + "SCOPE": config_settings.get('SOCIAL_AUTH_GOOGLE_OAUTH2_SCOPE'), + } + for key, value in optional.items(): + if value: + gateway_config["configuration"][key] = value + else: + ignore_keys.append(key) + + result = self.submit_authenticator(gateway_config, ignore_keys, config) + + # Handle LOGIN_REDIRECT_OVERRIDE if applicable + valid_login_urls = ['/sso/login/google-oauth2'] + self.handle_login_override(config, valid_login_urls) + + return result diff --git a/awx/sso/utils/ldap_migrator.py b/awx/sso/utils/ldap_migrator.py new file mode 100644 index 0000000000..de06d34888 --- /dev/null +++ b/awx/sso/utils/ldap_migrator.py @@ -0,0 +1,368 @@ +""" +LDAP authenticator migrator. + +This module handles the migration of LDAP authenticators from AWX to Gateway. +""" + +from django.conf import settings +from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format, role_map_to_gateway_format +from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator +import ldap + + +class LDAPMigrator(BaseAuthenticatorMigrator): + """ + Handles the migration of LDAP authenticators from AWX to Gateway. + """ + + def get_authenticator_type(self): + """Get the human-readable authenticator type name.""" + return "LDAP" + + def get_controller_config(self): + """ + Export all LDAP authenticators. An LDAP authenticator is only exported if + SERVER_URI is configured. Otherwise it will be skipped. + + Returns: + list: List of configured LDAP authentication providers with their settings + """ + # AWX supports up to 6 LDAP configurations: AUTH_LDAP (default) and AUTH_LDAP_1 through AUTH_LDAP_5 + ldap_instances = [None, 1, 2, 3, 4, 5] # None represents the default AUTH_LDAP_ configuration + found_configs = [] + + for instance in ldap_instances: + # Build the prefix for this LDAP instance + prefix = f"AUTH_LDAP_{instance}_" if instance is not None else "AUTH_LDAP_" + # The authenticator category is always "ldap" + category = "ldap" + + try: + # Get all LDAP settings for this instance + config_data = self._get_ldap_instance_config(prefix) + except Exception as e: + raise Exception(f'Could not retrieve {category} settings: {str(e)}') + + # Skip if SERVER_URI is not configured (required for LDAP to function) + if not config_data.get('SERVER_URI'): + continue + + # Convert organization, team, and role mappings to Gateway format + org_map_value = config_data.get('ORGANIZATION_MAP', {}) + team_map_value = config_data.get('TEAM_MAP', {}) + role_map_value = config_data.get('USER_FLAGS_BY_GROUP', {}) + require_group_value = config_data.get('REQUIRE_GROUP', {}) + deny_group_value = config_data.get('DENY_GROUP', {}) + + allow_mappers = [] + + # Start with order 1 and maintain sequence across org, team, and role mappers + allow_mappers, next_order = self._ldap_group_allow_to_gateway_format(allow_mappers, deny_group_value, deny=True, start_order=1) + allow_mappers, next_order = self._ldap_group_allow_to_gateway_format(allow_mappers, require_group_value, deny=False, start_order=next_order) + + org_mappers, next_order = org_map_to_gateway_format(org_map_value, start_order=next_order, auth_type='ldap') + team_mappers, next_order = team_map_to_gateway_format(team_map_value, start_order=next_order, auth_type='ldap') + role_mappers, _ = role_map_to_gateway_format(role_map_value, start_order=next_order) + + found_configs.append( + { + 'category': category, + 'settings': config_data, + 'org_mappers': org_mappers, + 'team_mappers': team_mappers, + 'role_mappers': role_mappers, + 'allow_mappers': allow_mappers, + } + ) + + return found_configs + + def _get_ldap_instance_config(self, prefix): + """ + Get all LDAP configuration settings for a specific instance. + + Args: + prefix: The setting prefix (e.g., 'AUTH_LDAP_' or 'AUTH_LDAP_1_') + + Returns: + dict: Dictionary of LDAP configuration settings + """ + # Define all LDAP setting keys + ldap_keys = [ + 'SERVER_URI', # Required: LDAP server URI(s) + 'BIND_DN', # Optional: Bind DN for authentication + 'BIND_PASSWORD', # Optional: Bind password + 'START_TLS', # Optional: Enable TLS + 'CONNECTION_OPTIONS', # Optional: LDAP connection options + 'USER_SEARCH', # Optional: User search configuration + 'USER_DN_TEMPLATE', # Optional: User DN template + 'USER_ATTR_MAP', # Optional: User attribute mapping + 'GROUP_SEARCH', # Optional: Group search configuration + 'GROUP_TYPE', # Optional: Group type class + 'GROUP_TYPE_PARAMS', # Optional: Group type parameters + 'REQUIRE_GROUP', # Optional: Required group DN + 'DENY_GROUP', # Optional: Denied group DN + 'USER_FLAGS_BY_GROUP', # Optional: User flags mapping + 'ORGANIZATION_MAP', # Optional: Organization mapping + 'TEAM_MAP', # Optional: Team mapping + ] + + config_data = {} + + for key in ldap_keys: + setting_name = f"{prefix}{key}" + value = getattr(settings, setting_name, None) + + # Handle special field types that need conversion + if key == 'GROUP_TYPE' and value: + # Convert GROUP_TYPE class to string representation + config_data[key] = type(value).__name__ + elif key == 'SERVER_URI' and value: + # Convert SERVER_URI to list format if it's a comma-separated string + config_data[key] = [uri.strip() for uri in value.split(',')] + elif key in ['USER_SEARCH', 'GROUP_SEARCH'] and value: + # Convert LDAPSearch objects to list format [base_dn, scope, filter] + if hasattr(value, 'base_dn') and hasattr(value, 'filterstr'): + # Get the actual scope instead of hardcoding SCOPE_SUBTREE + scope = getattr(value, 'scope', ldap.SCOPE_SUBTREE) # 2 is SCOPE_SUBTREE default + scope_name = {ldap.SCOPE_BASE: 'SCOPE_BASE', ldap.SCOPE_ONELEVEL: 'SCOPE_ONELEVEL', ldap.SCOPE_SUBTREE: 'SCOPE_SUBTREE'}.get( + scope, 'SCOPE_SUBTREE' + ) + config_data[key] = [value.base_dn, scope_name, value.filterstr] + else: + config_data[key] = value + elif key in ['USER_ATTR_MAP', 'GROUP_TYPE_PARAMS', 'USER_FLAGS_BY_GROUP', 'ORGANIZATION_MAP', 'TEAM_MAP']: + # Ensure dict fields are properly handled + config_data[key] = value if value is not None else {} + elif key == 'CONNECTION_OPTIONS' and value: + # CONNECTION_OPTIONS is a dict of LDAP options + config_data[key] = value if value is not None else {} + else: + # Store the value as-is for other fields + config_data[key] = value + + return config_data + + def create_gateway_authenticator(self, config): + """Create an LDAP authenticator in Gateway.""" + category = config['category'] + settings = config['settings'] + + # Extract the first server URI for slug generation + authenticator_slug = self._generate_authenticator_slug('ldap', category) + + # Build the gateway payload + gateway_config = { + 'name': category, + 'slug': authenticator_slug, + 'type': 'ansible_base.authentication.authenticator_plugins.ldap', + 'create_objects': True, + 'remove_users': False, + 'enabled': True, + 'configuration': self._build_ldap_configuration(settings), + } + + self._write_output(f'Creating LDAP authenticator: {gateway_config["name"]}') + + # LDAP authenticators have auto-generated fields that should be ignored during comparison + # BIND_PASSWORD - encrypted value, can't be compared + ignore_keys = [] + + # Submit the authenticator using the base class method + return self.submit_authenticator(gateway_config, config=config, ignore_keys=ignore_keys) + + def _build_ldap_configuration(self, settings): + """Build the LDAP configuration section for Gateway.""" + config = {} + + # Server URI is required + if settings.get('SERVER_URI'): + config['SERVER_URI'] = settings['SERVER_URI'] + + # Authentication settings + if settings.get('BIND_DN'): + config['BIND_DN'] = settings['BIND_DN'] + if settings.get('BIND_PASSWORD'): + config['BIND_PASSWORD'] = settings['BIND_PASSWORD'] + + # TLS settings + if settings.get('START_TLS') is not None: + config['START_TLS'] = settings['START_TLS'] + + # User search configuration + if settings.get('USER_SEARCH'): + config['USER_SEARCH'] = settings['USER_SEARCH'] + + # User attribute mapping + if settings.get('USER_ATTR_MAP'): + config['USER_ATTR_MAP'] = settings['USER_ATTR_MAP'] + + # Group search configuration + if settings.get('GROUP_SEARCH'): + config['GROUP_SEARCH'] = settings['GROUP_SEARCH'] + + # Group type and parameters + if settings.get('GROUP_TYPE'): + config['GROUP_TYPE'] = settings['GROUP_TYPE'] + if settings.get('GROUP_TYPE_PARAMS'): + config['GROUP_TYPE_PARAMS'] = settings['GROUP_TYPE_PARAMS'] + + # Connection options - convert numeric LDAP constants to string keys + if settings.get('CONNECTION_OPTIONS'): + config['CONNECTION_OPTIONS'] = self._convert_ldap_connection_options(settings['CONNECTION_OPTIONS']) + + # User DN template + if settings.get('USER_DN_TEMPLATE'): + config['USER_DN_TEMPLATE'] = settings['USER_DN_TEMPLATE'] + + # REQUIRE_GROUP and DENY_GROUP are handled as allow mappers, not included in config + # USER_FLAGS_BY_GROUP is handled as role mappers, not included in config + + return config + + def _convert_ldap_connection_options(self, connection_options): + """ + Convert numeric LDAP connection option constants to their string representations. + Uses the actual constants from the python-ldap library. + + Args: + connection_options: Dictionary with numeric LDAP option keys + + Returns: + dict: Dictionary with string LDAP option keys + """ + # Comprehensive mapping using LDAP constants as keys + ldap_option_map = { + # Basic LDAP options + ldap.OPT_API_INFO: 'OPT_API_INFO', + ldap.OPT_DEREF: 'OPT_DEREF', + ldap.OPT_SIZELIMIT: 'OPT_SIZELIMIT', + ldap.OPT_TIMELIMIT: 'OPT_TIMELIMIT', + ldap.OPT_REFERRALS: 'OPT_REFERRALS', + ldap.OPT_RESULT_CODE: 'OPT_RESULT_CODE', + ldap.OPT_ERROR_NUMBER: 'OPT_ERROR_NUMBER', + ldap.OPT_RESTART: 'OPT_RESTART', + ldap.OPT_PROTOCOL_VERSION: 'OPT_PROTOCOL_VERSION', + ldap.OPT_SERVER_CONTROLS: 'OPT_SERVER_CONTROLS', + ldap.OPT_CLIENT_CONTROLS: 'OPT_CLIENT_CONTROLS', + ldap.OPT_API_FEATURE_INFO: 'OPT_API_FEATURE_INFO', + ldap.OPT_HOST_NAME: 'OPT_HOST_NAME', + ldap.OPT_DESC: 'OPT_DESC', + ldap.OPT_DIAGNOSTIC_MESSAGE: 'OPT_DIAGNOSTIC_MESSAGE', + ldap.OPT_ERROR_STRING: 'OPT_ERROR_STRING', + ldap.OPT_MATCHED_DN: 'OPT_MATCHED_DN', + ldap.OPT_DEBUG_LEVEL: 'OPT_DEBUG_LEVEL', + ldap.OPT_TIMEOUT: 'OPT_TIMEOUT', + ldap.OPT_REFHOPLIMIT: 'OPT_REFHOPLIMIT', + ldap.OPT_NETWORK_TIMEOUT: 'OPT_NETWORK_TIMEOUT', + ldap.OPT_URI: 'OPT_URI', + # TLS options + ldap.OPT_X_TLS: 'OPT_X_TLS', + ldap.OPT_X_TLS_CTX: 'OPT_X_TLS_CTX', + ldap.OPT_X_TLS_CACERTFILE: 'OPT_X_TLS_CACERTFILE', + ldap.OPT_X_TLS_CACERTDIR: 'OPT_X_TLS_CACERTDIR', + ldap.OPT_X_TLS_CERTFILE: 'OPT_X_TLS_CERTFILE', + ldap.OPT_X_TLS_KEYFILE: 'OPT_X_TLS_KEYFILE', + ldap.OPT_X_TLS_REQUIRE_CERT: 'OPT_X_TLS_REQUIRE_CERT', + ldap.OPT_X_TLS_CIPHER_SUITE: 'OPT_X_TLS_CIPHER_SUITE', + ldap.OPT_X_TLS_RANDOM_FILE: 'OPT_X_TLS_RANDOM_FILE', + ldap.OPT_X_TLS_DHFILE: 'OPT_X_TLS_DHFILE', + ldap.OPT_X_TLS_NEVER: 'OPT_X_TLS_NEVER', + ldap.OPT_X_TLS_HARD: 'OPT_X_TLS_HARD', + ldap.OPT_X_TLS_DEMAND: 'OPT_X_TLS_DEMAND', + ldap.OPT_X_TLS_ALLOW: 'OPT_X_TLS_ALLOW', + ldap.OPT_X_TLS_TRY: 'OPT_X_TLS_TRY', + ldap.OPT_X_TLS_CRL_NONE: 'OPT_X_TLS_CRL_NONE', + ldap.OPT_X_TLS_CRL_PEER: 'OPT_X_TLS_CRL_PEER', + ldap.OPT_X_TLS_CRL_ALL: 'OPT_X_TLS_CRL_ALL', + # SASL options + ldap.OPT_X_SASL_MECH: 'OPT_X_SASL_MECH', + ldap.OPT_X_SASL_REALM: 'OPT_X_SASL_REALM', + ldap.OPT_X_SASL_AUTHCID: 'OPT_X_SASL_AUTHCID', + ldap.OPT_X_SASL_AUTHZID: 'OPT_X_SASL_AUTHZID', + ldap.OPT_X_SASL_SSF: 'OPT_X_SASL_SSF', + ldap.OPT_X_SASL_SSF_EXTERNAL: 'OPT_X_SASL_SSF_EXTERNAL', + ldap.OPT_X_SASL_SECPROPS: 'OPT_X_SASL_SECPROPS', + ldap.OPT_X_SASL_SSF_MIN: 'OPT_X_SASL_SSF_MIN', + ldap.OPT_X_SASL_SSF_MAX: 'OPT_X_SASL_SSF_MAX', + } + + # Add optional options that may not be available in all versions + optional_options = [ + 'OPT_TCP_USER_TIMEOUT', + 'OPT_DEFBASE', + 'OPT_X_TLS_VERSION', + 'OPT_X_TLS_CIPHER', + 'OPT_X_TLS_PEERCERT', + 'OPT_X_TLS_CRLCHECK', + 'OPT_X_TLS_CRLFILE', + 'OPT_X_TLS_NEWCTX', + 'OPT_X_TLS_PROTOCOL_MIN', + 'OPT_X_TLS_PACKAGE', + 'OPT_X_TLS_ECNAME', + 'OPT_X_TLS_REQUIRE_SAN', + 'OPT_X_TLS_PROTOCOL_MAX', + 'OPT_X_TLS_PROTOCOL_SSL3', + 'OPT_X_TLS_PROTOCOL_TLS1_0', + 'OPT_X_TLS_PROTOCOL_TLS1_1', + 'OPT_X_TLS_PROTOCOL_TLS1_2', + 'OPT_X_TLS_PROTOCOL_TLS1_3', + 'OPT_X_SASL_NOCANON', + 'OPT_X_SASL_USERNAME', + 'OPT_CONNECT_ASYNC', + 'OPT_X_KEEPALIVE_IDLE', + 'OPT_X_KEEPALIVE_PROBES', + 'OPT_X_KEEPALIVE_INTERVAL', + ] + + for option_name in optional_options: + if hasattr(ldap, option_name): + ldap_option_map[getattr(ldap, option_name)] = option_name + + converted_options = {} + + for key, value in connection_options.items(): + if key in ldap_option_map: + converted_options[ldap_option_map[key]] = value + + return converted_options + + def _ldap_group_allow_to_gateway_format(self, result: list, ldap_group: str, deny=False, start_order=1): + """Convert an LDAP require or deny group to a Gateway mapper + + Args: + result: array to append the mapper to + ldap_group: An LDAP group query + deny: Whether the mapper denies or requires users to be in the group + start_order: Starting order value for the mappers + + Returns: + tuple: (List of Gateway-compatible organization mappers, next_order) + """ + if ldap_group is None: + return result, start_order + + if deny: + result.append( + { + "name": "LDAP-DenyGroup", + "authenticator": -1, + "map_type": "allow", + "revoke": True, + "triggers": {"groups": {"has_or": [ldap_group]}}, + "order": start_order, + } + ) + else: + result.append( + { + "name": "LDAP-RequireGroup", + "authenticator": -1, + "map_type": "allow", + "revoke": False, + "triggers": {"groups": {"has_and": [ldap_group]}}, + "order": start_order, + } + ) + + return result, start_order + 1 diff --git a/awx/sso/utils/oidc_migrator.py b/awx/sso/utils/oidc_migrator.py new file mode 100644 index 0000000000..f0802234d7 --- /dev/null +++ b/awx/sso/utils/oidc_migrator.py @@ -0,0 +1,113 @@ +""" +Generic OIDC authenticator migrator. + +This module handles the migration of generic OIDC authenticators from AWX to Gateway. +""" + +from django.conf import settings +from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format +from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator + + +class OIDCMigrator(BaseAuthenticatorMigrator): + """ + Handles the migration of generic OIDC authenticators from AWX to Gateway. + """ + + CATEGORY = "OIDC" + AUTH_TYPE = "ansible_base.authentication.authenticator_plugins.oidc" + + def get_authenticator_type(self): + """Get the human-readable authenticator type name.""" + return "OIDC" + + def get_controller_config(self): + """ + Export generic OIDC authenticators. An OIDC authenticator is only exported if both, + key and secret, are defined. Otherwise it will be skipped. + + Returns: + list: List of configured OIDC authentication providers with their settings + """ + key_value = getattr(settings, "SOCIAL_AUTH_OIDC_KEY", None) + secret_value = getattr(settings, "SOCIAL_AUTH_OIDC_SECRET", None) + oidc_endpoint = getattr(settings, "SOCIAL_AUTH_OIDC_OIDC_ENDPOINT", None) + + # Skip if required settings are not configured + if not key_value or not secret_value or not oidc_endpoint: + return [] + + # Get additional OIDC configuration + verify_ssl = getattr(settings, "SOCIAL_AUTH_OIDC_VERIFY_SSL", True) + + # Get organization and team mappings + org_map_value = self.get_social_org_map() + team_map_value = self.get_social_team_map() + + # Convert org and team mappings from AWX to the Gateway format + # Start with order 1 and maintain sequence across both org and team mappers + org_mappers, next_order = org_map_to_gateway_format(org_map_value, start_order=1) + team_mappers, _ = team_map_to_gateway_format(team_map_value, start_order=next_order) + + config_data = { + "name": "default", + "type": self.AUTH_TYPE, + "enabled": False, + "create_objects": True, + "remove_users": False, + "configuration": { + "OIDC_ENDPOINT": oidc_endpoint, + "KEY": key_value, + "SECRET": secret_value, + "VERIFY_SSL": verify_ssl, + }, + } + + return [ + { + "category": self.CATEGORY, + "settings": config_data, + "org_mappers": org_mappers, + "team_mappers": team_mappers, + } + ] + + def create_gateway_authenticator(self, config): + """Create a generic OIDC authenticator in Gateway.""" + category = config["category"] + config_settings = config["settings"] + + # Generate authenticator name and slug + authenticator_name = "oidc" + authenticator_slug = self._generate_authenticator_slug("oidc", category) + + self._write_output(f"\n--- Processing {category} authenticator ---") + self._write_output(f"Name: {authenticator_name}") + self._write_output(f"Slug: {authenticator_slug}") + self._write_output(f"Type: {config_settings['type']}") + + # Build Gateway authenticator configuration + gateway_config = { + "name": authenticator_name, + "slug": authenticator_slug, + "type": config_settings["type"], + "enabled": config_settings["enabled"], + "create_objects": config_settings["create_objects"], + "remove_users": config_settings["remove_users"], + "configuration": config_settings["configuration"], + } + + # OIDC authenticators have auto-generated fields that should be ignored during comparison + # CALLBACK_URL - automatically created by Gateway + # SCOPE - defaults are set by Gateway plugin + # SECRET - the secret is encrypted in Gateway, we have no way of comparing the decrypted value + ignore_keys = ['CALLBACK_URL', 'SCOPE'] + + # Submit the authenticator (create or update as needed) + result = self.submit_authenticator(gateway_config, ignore_keys, config) + + # Handle LOGIN_REDIRECT_OVERRIDE if applicable + valid_login_urls = ['/sso/login/oidc'] + self.handle_login_override(config, valid_login_urls) + + return result diff --git a/awx/sso/utils/radius_migrator.py b/awx/sso/utils/radius_migrator.py new file mode 100644 index 0000000000..e61702a109 --- /dev/null +++ b/awx/sso/utils/radius_migrator.py @@ -0,0 +1,85 @@ +""" +RADIUS authenticator migrator. + +This module handles the migration of RADIUS authenticators from AWX to Gateway. +""" + +from django.conf import settings + +from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator + + +class RADIUSMigrator(BaseAuthenticatorMigrator): + """ + Handles the migration of RADIUS authenticators from AWX to Gateway. + """ + + CATEGORY = "RADIUS" + AUTH_TYPE = "ansible_base.authentication.authenticator_plugins.radius" + + def get_authenticator_type(self): + """Get the human-readable authenticator type name.""" + return "RADIUS" + + def get_controller_config(self): + """ + Export RADIUS authenticators. A RADIUS authenticator is only exported if + required configuration is present. + + Returns: + list: List of configured RADIUS authentication providers with their settings + """ + server = getattr(settings, "RADIUS_SERVER", None) + if not server: + return [] + + port = getattr(settings, "RADIUS_PORT", 1812) + secret = getattr(settings, "RADIUS_SECRET", "") + + config_data = { + "name": "default", + "type": self.AUTH_TYPE, + "enabled": True, + "create_objects": True, + "remove_users": False, + "configuration": { + "SERVER": server, + "PORT": port, + "SECRET": secret, + }, + } + + return [ + { + "category": self.CATEGORY, + "settings": config_data, + } + ] + + def create_gateway_authenticator(self, config): + """Create a RADIUS authenticator in Gateway.""" + category = config["category"] + config_settings = config["settings"] + + # Generate authenticator name and slug + authenticator_name = "radius" + authenticator_slug = self._generate_authenticator_slug("radius", category) + + self._write_output(f"\n--- Processing {category} authenticator ---") + self._write_output(f"Name: {authenticator_name}") + self._write_output(f"Slug: {authenticator_slug}") + self._write_output(f"Type: {config_settings['type']}") + + # Build Gateway authenticator configuration + gateway_config = { + "name": authenticator_name, + "slug": authenticator_slug, + "type": config_settings["type"], + "enabled": config_settings["enabled"], + "create_objects": config_settings["create_objects"], + "remove_users": config_settings["remove_users"], + "configuration": config_settings["configuration"], + } + + # Submit the authenticator (create or update as needed) + return self.submit_authenticator(gateway_config, config=config) diff --git a/awx/sso/utils/saml_migrator.py b/awx/sso/utils/saml_migrator.py new file mode 100644 index 0000000000..736cb4e843 --- /dev/null +++ b/awx/sso/utils/saml_migrator.py @@ -0,0 +1,308 @@ +""" +SAML authenticator migrator. + +This module handles the migration of SAML authenticators from AWX to Gateway. +""" + +from django.conf import settings + +from awx.main.utils.gateway_mapping import org_map_to_gateway_format, team_map_to_gateway_format +from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator + +ROLE_MAPPER = { + "is_superuser_role": {"role": None, "map_type": "is_superuser", "revoke": "remove_superusers"}, + "is_system_auditor_role": {"role": "Platform Auditor", "map_type": "role", "revoke": "remove_system_auditors"}, +} + +ATTRIBUTE_VALUE_MAPPER = { + "is_superuser_attr": {"role": None, "map_type": "is_superuser", "value": "is_superuser_value", "revoke": "remove_superusers"}, + "is_system_auditor_attr": {"role": "Platform Auditor", "map_type": "role", "value": "is_system_auditor_value", "revoke": "remove_system_auditors"}, +} + +ORG_ATTRIBUTE_MAPPER = { + "saml_attr": {"role": "Organization Member", "revoke": "remove"}, + "saml_admin_attr": {"role": "Organization Admin", "revoke": "remove_admins"}, +} + + +def _split_chunks(data: str, length: int = 64) -> list[str]: + return [data[i : i + length] for i in range(0, len(data), length)] + + +def _to_pem_cert(data: str) -> list[str]: + items = ["-----BEGIN CERTIFICATE-----"] + items += _split_chunks(data) + items.append("-----END CERTIFICATE-----") + return items + + +class SAMLMigrator(BaseAuthenticatorMigrator): + """ + Handles the migration of SAML authenticators from AWX to Gateway. + """ + + CATEGORY = "SAML" + AUTH_TYPE = "ansible_base.authentication.authenticator_plugins.saml" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.next_order = 1 + self.team_mappers = [] + + def get_authenticator_type(self): + """Get the human-readable authenticator type name.""" + return "SAML" + + def get_controller_config(self): + """ + Export SAML authenticators. A SAML authenticator is only exported if + required configuration is present. + + Returns: + list: List of configured SAML authentication providers with their settings + """ + found_configs = [] + + enabled = False + remove_users = True + create_objects = getattr(settings, "SAML_AUTO_CREATE_OBJECTS", True) + idps = getattr(settings, "SOCIAL_AUTH_SAML_ENABLED_IDPS", {}) + security_config = getattr(settings, "SOCIAL_AUTH_SAML_SECURITY_CONFIG", {}) + + # Get org and team mappings using the new fallback functions + org_map_value = self.get_social_org_map("SOCIAL_AUTH_SAML_ORGANIZATION_MAP") + team_map_value = self.get_social_team_map("SOCIAL_AUTH_SAML_TEAM_MAP") + self.extra_data = getattr(settings, "SOCIAL_AUTH_SAML_EXTRA_DATA", []) + self._add_to_extra_data(['Role', 'Role']) + + support_contact = getattr(settings, "SOCIAL_AUTH_SAML_SUPPORT_CONTACT", {}) + technical_contact = getattr(settings, "SOCIAL_AUTH_SAML_TECHNICAL_CONTACT", {}) + org_info = getattr(settings, "SOCIAL_AUTH_SAML_ORG_INFO", {}) + + sp_private_key = getattr(settings, "SOCIAL_AUTH_SAML_SP_PRIVATE_KEY", None) + sp_public_cert = getattr(settings, "SOCIAL_AUTH_SAML_SP_PUBLIC_CERT", None) + sp_entity_id = getattr(settings, "SOCIAL_AUTH_SAML_SP_ENTITY_ID", None) + sp_extra = getattr(settings, "SOCIAL_AUTH_SAML_SP_EXTRA", {}) + saml_team_attr = getattr(settings, "SOCIAL_AUTH_SAML_TEAM_ATTR", {}) + org_attr = getattr(settings, "SOCIAL_AUTH_SAML_ORGANIZATION_ATTR", {}) + user_flags_by_attr = getattr(settings, "SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR", {}) + login_redirect_override = getattr(settings, "LOGIN_REDIRECT_OVERRIDE", None) + + org_mappers, self.next_order = org_map_to_gateway_format(org_map_value, start_order=self.next_order) + self.team_mappers, self.next_order = team_map_to_gateway_format(team_map_value, start_order=self.next_order) + + self._team_attr_to_gateway_format(saml_team_attr) + self._user_flags_by_role_to_gateway_format(user_flags_by_attr) + self._user_flags_by_attr_value_to_gateway_format(user_flags_by_attr) + self._org_attr_to_gateway_format(org_attr) + + for name, value in idps.items(): + config_data = { + "name": name, + "type": self.AUTH_TYPE, + "enabled": enabled, + "create_objects": create_objects, + "remove_users": remove_users, + "configuration": { + "IDP_URL": value.get("url"), + "IDP_X509_CERT": "\n".join(_to_pem_cert(value.get("x509cert"))), + "IDP_ENTITY_ID": value.get("entity_id"), + "IDP_ATTR_EMAIL": value.get("attr_email"), + "IDP_ATTR_USERNAME": value.get("attr_username"), + "IDP_ATTR_FIRST_NAME": value.get("attr_first_name"), + "IDP_ATTR_LAST_NAME": value.get("attr_last_name"), + "IDP_ATTR_USER_PERMANENT_ID": value.get("attr_user_permanent_id"), + "IDP_GROUPS": value.get("attr_groups"), + "SP_ENTITY_ID": sp_entity_id, + "SP_PUBLIC_CERT": sp_public_cert, + "SP_PRIVATE_KEY": sp_private_key, + "ORG_INFO": org_info, + "TECHNICAL_CONTACT": technical_contact, + "SUPPORT_CONTACT": support_contact, + "SECURITY_CONFIG": security_config, + "SP_EXTRA": sp_extra, + "EXTRA_DATA": self.extra_data, + }, + } + + found_configs.append( + { + "category": self.CATEGORY, + "settings": config_data, + "org_mappers": org_mappers, + "team_mappers": self.team_mappers, + "login_redirect_override": login_redirect_override, + } + ) + return found_configs + + def create_gateway_authenticator(self, config): + """Create a SAML authenticator in Gateway.""" + category = config["category"] + config_settings = config["settings"] + name = config_settings["name"] + + # Generate authenticator name and slug + authenticator_name = f"{category.replace('-', '_').title()}-{name}" + authenticator_slug = self._generate_authenticator_slug("saml", name) + + self._write_output(f"\n--- Processing {category} authenticator ---") + self._write_output(f"Name: {authenticator_name}") + self._write_output(f"Slug: {authenticator_slug}") + self._write_output(f"Type: {config_settings['type']}") + + # Build Gateway authenticator configuration + gateway_config = { + "name": authenticator_name, + "slug": authenticator_slug, + "type": config_settings["type"], + "enabled": False, + "create_objects": True, # Allow Gateway to create users/orgs/teams + "remove_users": False, # Don't remove users by default + "configuration": config_settings["configuration"], + } + + # CALLBACK_URL - automatically created by Gateway + ignore_keys = ["CALLBACK_URL", "SP_PRIVATE_KEY"] + + # Submit the authenticator (create or update as needed) + result = self.submit_authenticator(gateway_config, ignore_keys, config) + + # Handle LOGIN_REDIRECT_OVERRIDE if applicable + valid_login_urls = [f'/sso/login/saml/?idp={name}', f'/sso/login/saml/?idp={name}/'] + self.handle_login_override(config, valid_login_urls) + + return result + + def _team_attr_to_gateway_format(self, saml_team_attr): + saml_attr = saml_team_attr.get("saml_attr") + if not saml_attr: + return + + revoke = saml_team_attr.get('remove', True) + self._add_to_extra_data([saml_attr, saml_attr]) + + for item in saml_team_attr["team_org_map"]: + team_list = item["team"] + if isinstance(team_list, str): + team_list = [team_list] + team = item.get("team_alias") or item["team"] + self.team_mappers.append( + { + "map_type": "team", + "role": "Team Member", + "organization": item["organization"], + "team": team, + "name": "Team" + "-" + team + "-" + item["organization"], + "revoke": revoke, + "authenticator": -1, + "triggers": {"attributes": {saml_attr: {"in": team_list}, "join_condition": "or"}}, + "order": self.next_order, + } + ) + self.next_order += 1 + + def _user_flags_by_role_to_gateway_format(self, user_flags_by_attr): + for k, v in ROLE_MAPPER.items(): + if k in user_flags_by_attr: + if v['role']: + name = f"Role-{v['role']}" + else: + name = f"Role-{v['map_type']}" + + revoke = user_flags_by_attr.get(v['revoke'], True) + self.team_mappers.append( + { + "map_type": v["map_type"], + "role": v["role"], + "name": name, + "organization": None, + "team": None, + "revoke": revoke, + "order": self.next_order, + "authenticator": -1, + "triggers": { + "attributes": { + "Role": {"in": user_flags_by_attr[k]}, + "join_condition": "or", + } + }, + } + ) + self.next_order += 1 + + def _user_flags_by_attr_value_to_gateway_format(self, user_flags_by_attr): + for k, v in ATTRIBUTE_VALUE_MAPPER.items(): + if k in user_flags_by_attr: + value = user_flags_by_attr.get(v['value']) + + if value: + if isinstance(value, list): + value = {'in': value} + else: + value = {'in': [value]} + else: + value = {} + + revoke = user_flags_by_attr.get(v['revoke'], True) + attr_name = user_flags_by_attr[k] + self._add_to_extra_data([attr_name, attr_name]) + + if v['role']: + name = f"Role-{v['role']}-attr" + else: + name = f"Role-{v['map_type']}-attr" + + self.team_mappers.append( + { + "map_type": v["map_type"], + "role": v["role"], + "name": name, + "organization": None, + "team": None, + "revoke": revoke, + "order": self.next_order, + "authenticator": -1, + "triggers": { + "attributes": { + attr_name: value, + "join_condition": "or", + } + }, + } + ) + self.next_order += 1 + + def _org_attr_to_gateway_format(self, org_attr): + for k, v in ORG_ATTRIBUTE_MAPPER.items(): + if k in org_attr: + attr_name = org_attr.get(k) + organization = "{% " + f"for_attr_value('{attr_name}')" + " %}" + revoke = org_attr.get(v['revoke'], True) + + self._add_to_extra_data([attr_name, attr_name]) + + name = f"Role-{v['role']}-attr" + self.team_mappers.append( + { + "map_type": 'organization', + "role": v['role'], + "name": name, + "organization": organization, + "team": None, + "revoke": revoke, + "order": self.next_order, + "authenticator": -1, + "triggers": { + "attributes": { + attr_name: {}, + "join_condition": "or", + } + }, + } + ) + self.next_order += 1 + + def _add_to_extra_data(self, item: list): + if item not in self.extra_data: + self.extra_data.append(item) diff --git a/awx/sso/utils/settings_migrator.py b/awx/sso/utils/settings_migrator.py new file mode 100644 index 0000000000..e501d7aa77 --- /dev/null +++ b/awx/sso/utils/settings_migrator.py @@ -0,0 +1,197 @@ +""" +Settings migrator. + +This module handles the migration of AWX settings to Gateway. +""" + +from django.conf import settings +from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator + + +class SettingsMigrator(BaseAuthenticatorMigrator): + """ + Handles the migration of AWX settings to Gateway. + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + # Define transformer functions for each setting + self.setting_transformers = { + 'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL': self._transform_social_auth_username_is_full_email, + 'ALLOW_OAUTH2_FOR_EXTERNAL_USERS': self._transform_allow_oauth2_for_external_users, + } + + def _convert_setting_name(self, setting): + keys = { + "CUSTOM_LOGIN_INFO": "custom_login_info", + "CUSTOM_LOGO": "custom_logo", + } + return keys.get(setting, setting) + + def _transform_social_auth_username_is_full_email(self, value): + # SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL is a boolean and does not need to be transformed + return value + + def _transform_allow_oauth2_for_external_users(self, value): + # ALLOW_OAUTH2_FOR_EXTERNAL_USERS is a boolean and does not need to be transformed + return value + + def get_authenticator_type(self): + """Get the human-readable authenticator type name.""" + return "Settings" + + def get_controller_config(self): + """ + Export relevant AWX settings that need to be migrated to Gateway. + + Returns: + list: List of configured settings that need to be migrated + """ + # Define settings that should be migrated from AWX to Gateway + settings_to_migrate = [ + 'SESSION_COOKIE_AGE', + 'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL', + 'ALLOW_OAUTH2_FOR_EXTERNAL_USERS', + 'LOGIN_REDIRECT_OVERRIDE', + 'ORG_ADMINS_CAN_SEE_ALL_USERS', + 'MANAGE_ORGANIZATION_AUTH', + ] + + found_configs = [] + + for setting_name in settings_to_migrate: + # Handle LOGIN_REDIRECT_OVERRIDE specially + if setting_name == 'LOGIN_REDIRECT_OVERRIDE': + if BaseAuthenticatorMigrator.login_redirect_override_set_by_migrator: + # Use the URL computed by the authenticator migrator + setting_value = BaseAuthenticatorMigrator.login_redirect_override_new_url + else: + # Use the original controller setting value + setting_value = getattr(settings, setting_name, None) + else: + setting_value = getattr(settings, setting_name, None) + + # Only include settings that have non-None and non-empty values + if setting_value is not None and setting_value != "": + # Apply transformer function if available + transformer = self.setting_transformers.get(setting_name) + if transformer: + setting_value = transformer(setting_value) + + # Skip migration if transformer returned None or empty string + if setting_value is not None and setting_value != "": + found_configs.append( + { + 'category': 'global-settings', + 'setting_name': setting_name, + 'setting_value': setting_value, + 'org_mappers': [], # Settings don't have mappers + 'team_mappers': [], # Settings don't have mappers + 'role_mappers': [], # Settings don't have mappers + 'allow_mappers': [], # Settings don't have mappers + } + ) + else: + self._write_output(f'\nIgnoring {setting_name} because it is None or empty after transformation') + else: + self._write_output(f'\nIgnoring {setting_name} because it is None or empty') + + return found_configs + + def create_gateway_authenticator(self, config): + """ + Migrate AWX settings to Gateway. + + Note: This doesn't create authenticators, but updates Gateway settings. + """ + setting_name = config['setting_name'] + setting_value = config['setting_value'] + + self._write_output(f'\n--- Migrating setting: {setting_name} ---') + + try: + gateway_setting_name = self._convert_setting_name(setting_name) + + # Get current gateway setting value to check if update is needed + current_gateway_value = self.gateway_client.get_gateway_setting(gateway_setting_name) + + # Compare current gateway value with controller value + if current_gateway_value == setting_value: + self._write_output(f'↷ Setting unchanged: {setting_name} (value already matches)', 'warning') + return {'success': True, 'action': 'skipped', 'error': None} + + self._write_output(f'Current value: {current_gateway_value}') + self._write_output(f'New value: {setting_value}') + + # Use the new update_gateway_setting method + self.gateway_client.update_gateway_setting(gateway_setting_name, setting_value) + + self._write_output(f'✓ Successfully migrated setting: {setting_name}', 'success') + + # Return success result in the expected format + return {'success': True, 'action': 'updated', 'error': None} + + except Exception as e: + self._write_output(f'✗ Failed to migrate setting {setting_name}: {str(e)}', 'error') + return {'success': False, 'action': 'failed', 'error': str(e)} + + def migrate(self): + """ + Main entry point - orchestrates the settings migration process. + + Returns: + dict: Summary of migration results + """ + # Get settings from AWX/Controller + configs = self.get_controller_config() + + if not configs: + self._write_output('No settings found to migrate.', 'warning') + return { + 'created': 0, + 'updated': 0, + 'unchanged': 0, + 'failed': 0, + 'mappers_created': 0, + 'mappers_updated': 0, + 'mappers_failed': 0, + 'settings_created': 0, + 'settings_updated': 0, + 'settings_unchanged': 0, + 'settings_failed': 0, + } + + self._write_output(f'Found {len(configs)} setting(s) to migrate.', 'success') + + # Process each setting + created_settings = [] + updated_settings = [] + unchanged_settings = [] + failed_settings = [] + + for config in configs: + result = self.create_gateway_authenticator(config) + if result['success']: + if result['action'] == 'created': + created_settings.append(config) + elif result['action'] == 'updated': + updated_settings.append(config) + elif result['action'] == 'skipped': + unchanged_settings.append(config) + else: + failed_settings.append(config) + + # Settings don't have mappers, or authenticators, so authenticator and mapper counts are always 0 + return { + 'created': 0, + 'updated': 0, + 'unchanged': 0, + 'failed': 0, + 'mappers_created': 0, + 'mappers_updated': 0, + 'mappers_failed': 0, + 'settings_created': len(created_settings), + 'settings_updated': len(updated_settings), + 'settings_unchanged': len(unchanged_settings), + 'settings_failed': len(failed_settings), + } diff --git a/awx/sso/utils/tacacs_migrator.py b/awx/sso/utils/tacacs_migrator.py new file mode 100644 index 0000000000..39666a7097 --- /dev/null +++ b/awx/sso/utils/tacacs_migrator.py @@ -0,0 +1,93 @@ +""" +TACACS+ authenticator migrator. + +This module handles the migration of TACACS+ authenticators from AWX to Gateway. +""" + +from django.conf import settings + +from awx.sso.utils.base_migrator import BaseAuthenticatorMigrator + + +class TACACSMigrator(BaseAuthenticatorMigrator): + """ + Handles the migration of TACACS+ authenticators from AWX to Gateway. + """ + + CATEGORY = "TACACSPLUS" + AUTH_TYPE = "ansible_base.authentication.authenticator_plugins.tacacs" + + def get_authenticator_type(self): + """Get the human-readable authenticator type name. + Named TACACSPLUS because `+` is not allowed in authenticator slug. + """ + return "TACACSPLUS" + + def get_controller_config(self): + """ + Export TACACS+ authenticator. A TACACS+ authenticator is only exported if + required configuration is present. + + Returns: + list: List of configured TACACS+ authentication providers with their settings + """ + host = getattr(settings, "TACACSPLUS_HOST", None) + if not host: + return [] + + port = getattr(settings, "TACACSPLUS_PORT", 49) + secret = getattr(settings, "TACACSPLUS_SECRET", "") + session_timeout = getattr(settings, "TACACSPLUS_SESSION_TIMEOUT", 5) + auth_protocol = getattr(settings, "TACACSPLUS_AUTH_PROTOCOL", "ascii") + rem_addr = getattr(settings, "TACACSPLUS_REM_ADDR", False) + + config_data = { + "name": "default", + "type": self.AUTH_TYPE, + "enabled": True, + "create_objects": True, + "remove_users": False, + "configuration": { + "HOST": host, + "PORT": port, + "SECRET": secret, + "SESSION_TIMEOUT": session_timeout, + "AUTH_PROTOCOL": auth_protocol, + "REM_ADDR": rem_addr, + }, + } + + return [ + { + "category": self.CATEGORY, + "settings": config_data, + } + ] + + def create_gateway_authenticator(self, config): + """Create a TACACS+ authenticator in Gateway.""" + category = config["category"] + config_settings = config["settings"] + + # Generate authenticator name and slug + authenticator_name = "tacacs" + authenticator_slug = self._generate_authenticator_slug("tacacs", category) + + self._write_output(f"\n--- Processing {category} authenticator ---") + self._write_output(f"Name: {authenticator_name}") + self._write_output(f"Slug: {authenticator_slug}") + self._write_output(f"Type: {config_settings['type']}") + + # Build Gateway authenticator configuration + gateway_config = { + "name": authenticator_name, + "slug": authenticator_slug, + "type": config_settings["type"], + "enabled": config_settings["enabled"], + "create_objects": config_settings["create_objects"], + "remove_users": config_settings["remove_users"], + "configuration": config_settings["configuration"], + } + + # Submit the authenticator (create or update as needed) + return self.submit_authenticator(gateway_config, config=config) diff --git a/awx/urls.py b/awx/urls.py index 862dc5dcd9..9fe1ea0286 100644 --- a/awx/urls.py +++ b/awx/urls.py @@ -5,6 +5,7 @@ from django.conf import settings from django.urls import re_path, include, path from ansible_base.lib.dynamic_config.dynamic_urls import api_urls, api_version_urls, root_urls +from ansible_base.rbac.service_api.urls import rbac_service_urls from ansible_base.resource_registry.urls import urlpatterns as resource_api_urls @@ -23,6 +24,7 @@ def get_urlpatterns(prefix=None): urlpatterns += [ path(f'api{prefix}v2/', include(resource_api_urls)), + path(f'api{prefix}v2/', include(rbac_service_urls)), path(f'api{prefix}v2/', include(api_version_urls)), path(f'api{prefix}', include(api_urls)), path('', include(root_urls)), diff --git a/awx_collection/README.md b/awx_collection/README.md index 767073c4ef..d730c627ed 100644 --- a/awx_collection/README.md +++ b/awx_collection/README.md @@ -32,11 +32,12 @@ Installing the `tar.gz` involves no special instructions. ## Running Non-deprecated modules in this collection have no Python requirements, but -may require the official [AWX CLI](https://pypi.org/project/awxkit/) +may require the AWX CLI in the future. The `DOCUMENTATION` for each module will report this. You can specify authentication by host, username, and password. +<<<<<<< HEAD These can be specified via (from highest to lowest precedence): - direct module parameters @@ -54,6 +55,8 @@ verify_ssl = true username = foo password = bar ``` +======= +>>>>>>> tower/test_stable-2.6 ## Release and Upgrade Notes diff --git a/awx_collection/meta/runtime.yml b/awx_collection/meta/runtime.yml index c663b95daa..cdb5714b36 100644 --- a/awx_collection/meta/runtime.yml +++ b/awx_collection/meta/runtime.yml @@ -263,7 +263,14 @@ plugin_routing: removal_date: '2022-01-23' warning_text: The tower_* modules have been deprecated, use awx.awx.workflow_node_wait instead. redirect: awx.awx.workflow_node_wait +<<<<<<< HEAD role: deprecation: removal_version: '25.0.0' warning_text: This is replaced by the DAB role system, via the role_definition module. +======= + application: + deprecation: + removal_version: '25.0.0' + warning_text: The application module manages a legacy authentication feature that is being phased out, migrate to token-based authentication instead. +>>>>>>> tower/test_stable-2.6 diff --git a/awx_collection/plugins/doc_fragments/auth.py b/awx_collection/plugins/doc_fragments/auth.py index d5f3f1a295..9508577fb9 100644 --- a/awx_collection/plugins/doc_fragments/auth.py +++ b/awx_collection/plugins/doc_fragments/auth.py @@ -40,6 +40,7 @@ options: - A dictionary structure as returned by the token module. - If value not set, will try environment variable C(CONTROLLER_OAUTH_TOKEN) and then config files type: raw + aliases: [ controller_oauthtoken ] version_added: "3.7.0" validate_certs: description: @@ -60,7 +61,7 @@ options: - Path to the controller config file. - If provided, the other locations for config files will not be considered. type: path - aliases: [tower_config_file] + aliases: [ tower_config_file ] notes: - If no I(config_file) is provided we will attempt to use the tower-cli library diff --git a/awx_collection/plugins/doc_fragments/auth_plugin.py b/awx_collection/plugins/doc_fragments/auth_plugin.py index 44ad326eda..14c3c12f69 100644 --- a/awx_collection/plugins/doc_fragments/auth_plugin.py +++ b/awx_collection/plugins/doc_fragments/auth_plugin.py @@ -40,11 +40,20 @@ options: version: '4.0.0' why: Collection name change alternatives: 'TOWER_PASSWORD, AAP_PASSWORD' +<<<<<<< HEAD aap_token: +======= + oauth_token: +>>>>>>> tower/test_stable-2.6 description: - The OAuth token to use. env: - name: AAP_TOKEN +<<<<<<< HEAD +======= + - name: CONTROLLER_OAUTH_TOKEN + - name: TOWER_OAUTH_TOKEN +>>>>>>> tower/test_stable-2.6 deprecated: collection_name: 'awx.awx' version: '4.0.0' diff --git a/awx_collection/plugins/lookup/schedule_rrule.py b/awx_collection/plugins/lookup/schedule_rrule.py index c50d4bf716..64b41ddff9 100644 --- a/awx_collection/plugins/lookup/schedule_rrule.py +++ b/awx_collection/plugins/lookup/schedule_rrule.py @@ -73,9 +73,15 @@ DOCUMENTATION = """ """ EXAMPLES = """ +<<<<<<< HEAD - name: Create a string for a schedule debug: msg: "{{ lookup('awx.awx.schedule_rrule', 'none', start_date='1979-09-13 03:45:07') }}" +======= + - name: Create a string for a schedule + debug: + msg: "{{ lookup('awx.awx.schedule_rrule', 'none', start_date='1979-09-13 03:45:07') }}" +>>>>>>> tower/test_stable-2.6 """ RETURN = """ diff --git a/awx_collection/plugins/lookup/schedule_rruleset.py b/awx_collection/plugins/lookup/schedule_rruleset.py index 21ca283c2c..12f58f0fbb 100644 --- a/awx_collection/plugins/lookup/schedule_rruleset.py +++ b/awx_collection/plugins/lookup/schedule_rruleset.py @@ -107,6 +107,7 @@ DOCUMENTATION = """ """ EXAMPLES = """ +<<<<<<< HEAD - name: Create a ruleset for everyday except Sundays set_fact: complex_rule: "{{ lookup(awx.awx.schedule_rruleset, '2022-04-30 10:30:45', rules=rrules, timezone='UTC' ) }}" @@ -118,6 +119,19 @@ EXAMPLES = """ interval: 1 byweekday: 'sunday' include: false +======= + - name: Create a ruleset for everyday except Sundays + set_fact: + complex_rule: "{{ lookup(awx.awx.schedule_rruleset, '2022-04-30 10:30:45', rules=rrules, timezone='UTC' ) }}" + vars: + rrules: + - frequency: 'day' + interval: 1 + - frequency: 'day' + interval: 1 + byweekday: 'sunday' + include: False +>>>>>>> tower/test_stable-2.6 """ RETURN = """ diff --git a/awx_collection/plugins/module_utils/controller_api.py b/awx_collection/plugins/module_utils/controller_api.py index 5bca328453..ee7d22b4a7 100644 --- a/awx_collection/plugins/module_utils/controller_api.py +++ b/awx_collection/plugins/module_utils/controller_api.py @@ -75,6 +75,10 @@ class ControllerModule(AnsibleModule): aap_token=dict( type='raw', no_log=True, +<<<<<<< HEAD +======= + aliases=['controller_oauthtoken',], +>>>>>>> tower/test_stable-2.6 required=False, fallback=(env_fallback, ['CONTROLLER_OAUTH_TOKEN', 'TOWER_OAUTH_TOKEN', 'AAP_TOKEN']) ), @@ -132,6 +136,23 @@ class ControllerModule(AnsibleModule): if direct_value is not None: setattr(self, short_param, direct_value) +<<<<<<< HEAD +======= + # Perform magic depending on whether aap_token is a string or a dict + if self.params.get('aap_token'): + token_param = self.params.get('aap_token') + if isinstance(token_param, dict): + if 'token' in token_param: + self.oauth_token = self.params.get('aap_token')['token'] + else: + self.fail_json(msg="The provided dict in aap_token did not properly contain the token entry") + elif isinstance(token_param, string_types): + self.oauth_token = self.params.get('aap_token') + else: + error_msg = "The provided aap_token type was not valid ({0}). Valid options are str or dict.".format(type(token_param).__name__) + self.fail_json(msg=error_msg) + +>>>>>>> tower/test_stable-2.6 # Perform some basic validation if not self.host.startswith(("https://", "http://")): # NOSONAR self.host = "https://{0}".format(self.host) @@ -538,7 +559,18 @@ class ControllerAPIModule(ControllerModule): self.fail_json(msg='Invalid authentication credentials for {0} (HTTP 401).'.format(url.path)) # Sanity check: Did we get a forbidden response, which means that the user isn't allowed to do this? Report that. elif he.code == 403: - self.fail_json(msg="You don't have permission to {1} to {0} (HTTP 403).".format(url.path, method)) + # Hack: Tell the customer to use the platform supported collection when interacting with Org, Team, User Controller endpoints + err_msg = he.fp.read().decode('utf-8') + try: + # Defensive coding. Handle json responses and non-json responses + err_msg = loads(err_msg) + err_msg = err_msg['detail'] + # JSONDecodeError only available on Python 3.5+ + except ValueError: + pass + prepend_msg = " Use the collection ansible.platform to modify resources Organization, User, or Team." if ( + "this resource via the platform ingress") in err_msg else "" + self.fail_json(msg="You don't have permission to {1} to {0} (HTTP 403).{2}".format(url.path, method, prepend_msg)) # Sanity check: Did we get a 404 response? # Requests with primary keys will return a 404 if there is no response, and we want to consistently trap these. elif he.code == 404: diff --git a/awx_collection/plugins/modules/application.py b/awx_collection/plugins/modules/application.py new file mode 100644 index 0000000000..9e28195db1 --- /dev/null +++ b/awx_collection/plugins/modules/application.py @@ -0,0 +1,166 @@ +#!/usr/bin/python +# coding: utf-8 -*- + +# (c) 2020,Geoffrey Bachelot +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} + + +DOCUMENTATION = ''' +--- +module: application +author: "Geoffrey Bacheot (@jffz)" +short_description: create, update, or destroy Automation Platform Controller applications +deprecated: + removed_in: '25.0.0' + why: This module manages a legacy authentication feature that is being phased out. + alternative: Migrate to token-based authentication. +description: + - Create, update, or destroy Automation Platform Controller applications. See + U(https://www.ansible.com/tower) for an overview. +options: + name: + description: + - Name of the application. + required: True + type: str + new_name: + description: + - Setting this option will change the existing name (looked up via the name field). + type: str + description: + description: + - Description of the application. + type: str + authorization_grant_type: + description: + - The grant type the user must use for acquire tokens for this application. + choices: ["password", "authorization-code"] + type: str + required: False + client_type: + description: + - Set to public or confidential depending on how secure the client device is. + choices: ["public", "confidential"] + type: str + required: False + organization: + description: + - Name, ID, or named URL of organization for application. + type: str + required: True + redirect_uris: + description: + - Allowed urls list, space separated. Required when authorization-grant-type=authorization-code + type: list + elements: str + state: + description: + - Desired state of the resource. + default: "present" + choices: ["present", "absent", "exists"] + type: str + skip_authorization: + description: + - Set True to skip authorization step for completely trusted applications. + type: bool + +extends_documentation_fragment: awx.awx.auth +''' + + +EXAMPLES = ''' +- name: Add Foo application + application: + name: "Foo" + description: "Foo bar application" + organization: "test" + state: present + authorization_grant_type: password + client_type: public + +- name: Add Foo application + application: + name: "Foo" + description: "Foo bar application" + organization: "test" + state: present + authorization_grant_type: authorization-code + client_type: confidential + redirect_uris: + - http://tower.com/api/v2/ +''' + +from ..module_utils.controller_api import ControllerAPIModule + + +def main(): + # Any additional arguments that are not fields of the item can be added here + argument_spec = dict( + name=dict(required=True), + new_name=dict(), + description=dict(), + authorization_grant_type=dict(choices=["password", "authorization-code"]), + client_type=dict(choices=['public', 'confidential']), + organization=dict(required=True), + redirect_uris=dict(type="list", elements='str'), + state=dict(choices=['present', 'absent', 'exists'], default='present'), + skip_authorization=dict(type='bool'), + ) + + # Create a module for ourselves + module = ControllerAPIModule(argument_spec=argument_spec) + + # Extract our parameters + name = module.params.get('name') + new_name = module.params.get("new_name") + description = module.params.get('description') + authorization_grant_type = module.params.get('authorization_grant_type') + client_type = module.params.get('client_type') + organization = module.params.get('organization') + redirect_uris = module.params.get('redirect_uris') + skip_authorization = module.params.get('skip_authorization') + state = module.params.get('state') + + # Attempt to look up the related items the user specified (these will fail the module if not found) + org_id = module.resolve_name_to_id('organizations', organization) + + # Attempt to look up application based on the provided name and org ID + application = module.get_one('applications', name_or_id=name, check_exists=(state == 'exists'), **{'data': {'organization': org_id}}) + + if state == 'absent': + # If the state was absent we can let the module delete it if needed, the module will handle exiting from this + module.delete_if_needed(application) + + # Create the data that gets sent for create and update + application_fields = { + 'name': new_name if new_name else (module.get_item_name(application) if application else name), + 'organization': org_id, + } + if authorization_grant_type is not None: + application_fields['authorization_grant_type'] = authorization_grant_type + if client_type is not None: + application_fields['client_type'] = client_type + if description is not None: + application_fields['description'] = description + if redirect_uris is not None: + application_fields['redirect_uris'] = ' '.join(redirect_uris) + if skip_authorization is not None: + application_fields['skip_authorization'] = skip_authorization + + response = module.create_or_update_if_needed(application, application_fields, endpoint='applications', item_type='application', auto_exit=False) + if 'client_id' in response: + module.json_output['client_id'] = response['client_id'] + if 'client_secret' in response: + module.json_output['client_secret'] = response['client_secret'] + module.exit_json(**module.json_output) + + +if __name__ == '__main__': + main() diff --git a/awx_collection/plugins/modules/schedule.py b/awx_collection/plugins/modules/schedule.py index 2d651805a7..c25e0024b8 100644 --- a/awx_collection/plugins/modules/schedule.py +++ b/awx_collection/plugins/modules/schedule.py @@ -180,8 +180,13 @@ EXAMPLES = ''' - frequency: 'day' interval: 1 - frequency: 'day' +<<<<<<< HEAD interval: 1 byweekday: 'sunday' +======= + every: 1 + on_days: 'sunday' +>>>>>>> tower/test_stable-2.6 include: false - name: Delete 'my_schedule' schedule for my_workflow diff --git a/awx_collection/test/awx/conftest.py b/awx_collection/test/awx/conftest.py index 371ae66014..1fcaf362b0 100644 --- a/awx_collection/test/awx/conftest.py +++ b/awx_collection/test/awx/conftest.py @@ -19,8 +19,11 @@ from ansible.module_utils.six import raise_from from ansible_base.rbac.models import RoleDefinition, DABPermission from ansible_base.rbac import permission_registry +<<<<<<< HEAD from awx.main.tests.conftest import load_all_credentials # noqa: F401; pylint: disable=unused-import +======= +>>>>>>> tower/test_stable-2.6 from awx.main.tests.functional.conftest import _request from awx.main.tests.functional.conftest import credentialtype_scm, credentialtype_ssh # noqa: F401; pylint: disable=unused-import from awx.main.models import ( diff --git a/awx_collection/test/awx/test_export.py b/awx_collection/test/awx/test_export.py index 7438c6ba1c..b9b3dceb0e 100644 --- a/awx_collection/test/awx/test_export.py +++ b/awx_collection/test/awx/test_export.py @@ -65,7 +65,7 @@ def test_export(run_module, admin_user): all_assets_except_users = {k: v for k, v in assets.items() if k != 'users'} for k, v in all_assets_except_users.items(): - assert v == [], f"Expected resource {k} to be empty. Instead it is {v}" + assert v == [] or v is None, f"Expected resource {k} to be empty. Instead it is {v}" @pytest.mark.django_db diff --git a/awx_collection/test/awx/test_organization.py b/awx_collection/test/awx/test_organization.py index a38ca3dbe1..c87245372f 100644 --- a/awx_collection/test/awx/test_organization.py +++ b/awx_collection/test/awx/test_organization.py @@ -20,6 +20,10 @@ def test_create_organization(run_module, admin_user): 'controller_username': None, 'controller_password': None, 'validate_certs': None, +<<<<<<< HEAD +======= + 'aap_token': None, +>>>>>>> tower/test_stable-2.6 'controller_config_file': None, } @@ -52,6 +56,10 @@ def test_galaxy_credential_order(run_module, admin_user): 'controller_username': None, 'controller_password': None, 'validate_certs': None, +<<<<<<< HEAD +======= + 'aap_token': None, +>>>>>>> tower/test_stable-2.6 'controller_config_file': None, 'galaxy_credentials': cred_ids, } @@ -76,6 +84,10 @@ def test_galaxy_credential_order(run_module, admin_user): 'controller_username': None, 'controller_password': None, 'validate_certs': None, +<<<<<<< HEAD +======= + 'aap_token': None, +>>>>>>> tower/test_stable-2.6 'controller_config_file': None, 'galaxy_credentials': cred_ids, } diff --git a/awx_collection/test/awx/test_token.py b/awx_collection/test/awx/test_token.py new file mode 100644 index 0000000000..cc8f880e26 --- /dev/null +++ b/awx_collection/test/awx/test_token.py @@ -0,0 +1,30 @@ +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import pytest + +from awx.main.models import OAuth2AccessToken + + +@pytest.mark.django_db +def test_create_token(run_module, admin_user): + + module_args = { + 'description': 'barfoo', + 'state': 'present', + 'scope': 'read', + 'controller_host': None, + 'controller_username': None, + 'controller_password': None, + 'validate_certs': None, + 'aap_token': None, + 'controller_config_file': None, + } + + result = run_module('token', module_args, admin_user) + assert result.get('changed'), result + + tokens = OAuth2AccessToken.objects.filter(description='barfoo') + assert len(tokens) == 1, 'Tokens with description of barfoo != 0: {0}'.format(len(tokens)) + assert tokens[0].scope == 'read', 'Token was not given read access' diff --git a/awx_collection/tests/integration/targets/credential/tasks/main.yml b/awx_collection/tests/integration/targets/credential/tasks/main.yml index 1b96b2b856..4d499c139c 100644 --- a/awx_collection/tests/integration/targets/credential/tasks/main.yml +++ b/awx_collection/tests/integration/targets/credential/tasks/main.yml @@ -775,6 +775,33 @@ - "result is changed" when: insights_found +- name: Create a valid Insights token credential + credential: + name: "{{ insights_cred_name2 }}" + organization: Default + state: present + credential_type: Insights + inputs: + client_id: joe + client_secret: secret + register: result + +- assert: + that: + - "result is changed" + +- name: Delete an Insights token credential + credential: + name: "{{ insights_cred_name2 }}" + organization: Default + state: absent + credential_type: Insights + register: result + +- assert: + that: + - "result is changed" + - name: Create a valid Tower-to-Tower credential credential: name: "{{ tower_cred_name1 }}" diff --git a/awx_collection/tests/integration/targets/params/tasks/main.yml b/awx_collection/tests/integration/targets/params/tasks/main.yml index 4becc6c3e0..431136bd66 100644 --- a/awx_collection/tests/integration/targets/params/tasks/main.yml +++ b/awx_collection/tests/integration/targets/params/tasks/main.yml @@ -4,7 +4,7 @@ name: "Demo Inventory" organization: Default aap_hostname: https://foohostbar.invalid - ignore_errors: yes + ignore_errors: true register: result - assert: diff --git a/awx_collection/tests/integration/targets/schedule_rrule/tasks/main.yml b/awx_collection/tests/integration/targets/schedule_rrule/tasks/main.yml index d6c7cd788e..90b37c927a 100644 --- a/awx_collection/tests/integration/targets/schedule_rrule/tasks/main.yml +++ b/awx_collection/tests/integration/targets/schedule_rrule/tasks/main.yml @@ -7,10 +7,16 @@ ansible.builtin.set_fact: plugin_name: "{{ controller_meta.prefix }}.schedule_rrule" +<<<<<<< HEAD - name: Lookup with too many parameters (should fail) ansible.builtin.set_fact: _rrule: "{{ query(plugin_name, days_of_week=[1, 2], days_of_month=[15]) }}" register: result_too_many_params +======= +- name: Test too many params (failure from validation of terms) + ansible.builtin.debug: + msg: "{{ lookup(plugin_name | string, 'none', 'weekly', start_date='2020-4-16 03:45:07') }}" +>>>>>>> tower/test_stable-2.6 ignore_errors: true - name: Assert proper error is reported for too many parameters @@ -21,8 +27,12 @@ - name: Attempt invalid schedule_rrule lookup with bad frequency ansible.builtin.debug: +<<<<<<< HEAD msg: "{{ lookup(plugin_name, 'john', start_date='2020-04-16 03:45:07') }}" register: result_bad_freq +======= + msg: "{{ lookup(plugin_name, 'john', start_date='2020-4-16 03:45:07') }}" +>>>>>>> tower/test_stable-2.6 ignore_errors: true - name: Assert proper error is reported for bad frequency @@ -34,7 +44,10 @@ - name: Test an invalid start date ansible.builtin.debug: msg: "{{ lookup(plugin_name, 'none', start_date='invalid') }}" +<<<<<<< HEAD register: result_bad_date +======= +>>>>>>> tower/test_stable-2.6 ignore_errors: true - name: Assert plugin error message for invalid start date @@ -46,7 +59,11 @@ - name: Test end_on as count (generic success case) ansible.builtin.debug: msg: "{{ lookup(plugin_name, 'minute', start_date='2020-4-16 03:45:07', end_on='2') }}" +<<<<<<< HEAD register: result_success +======= + register: result +>>>>>>> tower/test_stable-2.6 - name: Assert successful rrule generation ansible.builtin.assert: diff --git a/awx_collection/tests/integration/targets/token/tasks/main.yml b/awx_collection/tests/integration/targets/token/tasks/main.yml new file mode 100644 index 0000000000..92aace82cc --- /dev/null +++ b/awx_collection/tests/integration/targets/token/tasks/main.yml @@ -0,0 +1,154 @@ +--- +- name: Generate a test ID + set_fact: + test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}" + when: test_id is not defined + +- name: Generate names + set_fact: + token_description: "AWX-Collection-tests-token-description-{{ test_id }}" + +- name: Try to use a token as a dict which is missing the token parameter + job_list: + controller_oauthtoken: + not_token: "This has no token entry" + register: results + ignore_errors: true + +- assert: + that: + - results is failed + - '"The provided dict in aap_token did not properly contain the token entry" == results.msg' + +- name: Try to use a token as a list + job_list: + controller_oauthtoken: + - dummy_token + register: results + ignore_errors: true + +- assert: + that: + - results is failed + - '"The provided aap_token type was not valid (list). Valid options are str or dict." == results.msg' + +- name: Try to delete a token with no existing_token or existing_token_id + token: + state: absent + register: results + ignore_errors: true + +- assert: + that: + - results is failed + # We don't assert a message here because it's handled by ansible + +- name: Try to delete a token with both existing_token or existing_token_id + token: + existing_token: + id: 1234 + existing_token_id: 1234 + state: absent + register: results + ignore_errors: true + +- assert: + that: + - results is failed + # We don't assert a message here because it's handled by ansible + + +- block: + - name: Create a Token + token: + description: '{{ token_description }}' + scope: "write" + state: present + register: new_token + + - name: Validate our token works by token + job_list: + controller_oauthtoken: "{{ controller_token.token }}" + register: job_list + + - name: Validate our token works by object + job_list: + controller_oauthtoken: "{{ controller_token }}" + register: job_list + + always: + - name: Delete our Token with our own token + token: + existing_token: "{{ controller_token }}" + controller_oauthtoken: "{{ controller_token }}" + state: absent + when: controller_token is defined + register: results + + - assert: + that: + - results is changed or results is skipped + +- block: + - name: Create a second token + token: + description: '{{ token_description }}' + scope: "write" + state: present + register: results + + - assert: + that: + - results is changed + + always: + - name: Delete the second Token with our own token + token: + existing_token_id: "{{ controller_token['id'] }}" + controller_oauthtoken: "{{ controller_token }}" + state: absent + when: controller_token is defined + register: results + + - assert: + that: + - results is changed or resuslts is skipped + +- block: + - name: Create a less privileged token (read) + token: + description: '{{ token_description }}' + scope: "read" + state: present + register: read_only_token + + - debug: + msg: "{{read_only_token}}" + + - name: Exercise the aap_token parameter with the new token. + job_list: + aap_token: "{{ read_only_token.ansible_facts.controller_token.token }}" + + - name: Ensure the new token is being used and not the default token for the tests. + token: + aap_token: "{{ read_only_token.ansible_facts.controller_token.token }}" + scope: "write" + state: present + ignore_errors: true + register: result + + - assert: + that: + - "'You don\\'t have permission to POST' in result.msg" + + always: + - name: Delete the less privileged token + token: + existing_token_id: "{{ read_only_token['id'] }}" + state: absent + when: read_only_token is defined + register: result + + - assert: + that: + - result is changed diff --git a/awx_collection/tools/roles/template_galaxy/templates/README.md.j2 b/awx_collection/tools/roles/template_galaxy/templates/README.md.j2 index bffd3eefab..74ed3b56a7 100644 --- a/awx_collection/tools/roles/template_galaxy/templates/README.md.j2 +++ b/awx_collection/tools/roles/template_galaxy/templates/README.md.j2 @@ -119,7 +119,11 @@ The following notes are changes that may require changes to playbooks: - The `notification_configuration` parameter of `tower_notification_template` has changed from a string to a dict. Please use the `lookup` plugin to read an existing file into a dict. - `tower_credential` no longer supports passing a file name to `ssh_key_data`. - The HipChat `notification_type` has been removed and can no longer be created using the `tower_notification_template` module. +<<<<<<< HEAD - Lookup plugins now always return a list, and if you want a scalar value use `lookup` as opposed to `query` +======= + - Lookup plugins now always reutrn a list, and if you want a scalar value use `lookup` as opposed to `query` +>>>>>>> tower/test_stable-2.6 {% if collection_package | lower() == "awx" %} ## Running Unit Tests diff --git a/licenses/pbr.txt b/licenses/pbr.txt new file mode 100644 index 0000000000..68c771a099 --- /dev/null +++ b/licenses/pbr.txt @@ -0,0 +1,176 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + diff --git a/licenses/pygithub-2.6.0.tar.gz b/licenses/pygithub-2.6.0.tar.gz new file mode 100644 index 0000000000..91a2591c13 Binary files /dev/null and b/licenses/pygithub-2.6.0.tar.gz differ diff --git a/requirements/README.md b/requirements/README.md index 141973be69..35c73350dd 100644 --- a/requirements/README.md +++ b/requirements/README.md @@ -74,3 +74,4 @@ If modifying this library make sure testing with the offline build is performed Version 4.8 makes us a little bit nervous with changes to `searchwindowsize` https://github.com/pexpect/pexpect/pull/579/files Pin to `pexpect==4.7.x` until we have more time to move to `4.8` and test. + diff --git a/requirements/requirements.in b/requirements/requirements.in index 3afb0fef06..48ddac2b47 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -1,4 +1,3 @@ - aiohttp>=3.9.4 # CVE-2024-30251 ansi2html # Used to format the stdout from jobs into html for display jq # used for indirect host counting feature @@ -29,7 +28,7 @@ filelock GitPython>=3.1.37 # CVE-2023-41040 grpcio irc -jinja2>=3.1.3 # CVE-2024-22195 +jinja2>=3.1.6 # CVE-2025-27516 JSON-log-formatter jsonschema Markdown # used for formatting API help @@ -47,9 +46,10 @@ prometheus_client psycopg psutil pygerduty +PyGithub <= 2.6.0 pyopenssl>=23.2.0 # resolve dep conflict from cryptography pin above pyparsing==2.4.6 # Upgrading to v3 of pyparsing introduce errors on smart host filtering: Expected 'or' term, found 'or' (at char 15), (line:1, col:16) -python-daemon>3.0.0 +python-daemon python-dsv-sdk>=1.0.4 python-tss-sdk>=1.2.1 pyyaml>=6.0.2 # require packing fix for cython 3 or higher @@ -60,14 +60,20 @@ redis[hiredis] requests slack-sdk twilio -twisted[tls]>=23.10.0 # CVE-2023-46137 -uWSGI +twisted[tls]>=24.7.0 # CVE-2024-41810 +urllib3>=1.26.19 # CVE-2024-37891 +uWSGI>=2.0.28 uwsgitop wheel>=0.38.1 # CVE-2022-40898 pip==21.2.4 # see UPGRADE BLOCKERs -setuptools<71.0.0 # see UPGRADE BLOCKERs, path hack in v71 breaks irc deps +setuptools==80.9.0 # see UPGRADE BLOCKERs setuptools_scm[toml] # see UPGRADE BLOCKERs, xmlsec build dep setuptools-rust>=0.11.4 # cryptography build dep pkgconfig>=1.5.1 # xmlsec build dep - needed for offline build django-flags>=5.0.13 dispatcherd # tasking system, previously part of AWX code base +protobuf>=4.25.8 # CVE-2025-4565 +idna>=3.10 # CVE-2024-3651 +# Temporarily added to use ansible-runner from git branch, to be removed +# when ansible-runner moves from requirements_git.txt to here +pbr diff --git a/requirements/requirements.txt b/requirements/requirements.txt index eedef91c1a..09edb99ce9 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -104,7 +104,7 @@ click==8.1.8 # via receptorctl constantly==23.10.4 # via twisted -cryptography==41.0.7 +cryptography==42.0.8 # via # -r /awx_devel/requirements/requirements.in # adal @@ -210,6 +210,7 @@ hyperlink==21.0.0 # twisted idna==3.10 # via + # -r /awx_devel/requirements/requirements.in # hyperlink # requests # twisted @@ -364,6 +365,11 @@ propcache==0.2.1 # yarl protobuf==5.29.3 # via + # aiohttp + # yarl +protobuf==4.25.8 + # via + # -r /awx_devel/requirements/requirements.in # googleapis-common-protos # opentelemetry-proto psutil==6.1.1 @@ -494,7 +500,6 @@ smmap==5.0.2 # via gitdb sqlparse==0.5.3 # via - # -r /awx_devel/requirements/requirements.in # django # django-ansible-base tempora==5.8.0 @@ -523,6 +528,7 @@ typing-extensions==4.12.2 # twisted urllib3==2.3.0 # via + # -r /awx_devel/requirements/requirements.in # botocore # django-ansible-base # kubernetes diff --git a/requirements/requirements_git.credentials.txt b/requirements/requirements_git.credentials.txt new file mode 100644 index 0000000000..1984112ab9 --- /dev/null +++ b/requirements/requirements_git.credentials.txt @@ -0,0 +1,2 @@ +# If sources in requirements_git.txt require authentication add git-credentials in this file, Example: +# https://x-access-token:${PAT}@github.com" diff --git a/requirements/requirements_git.txt b/requirements/requirements_git.txt index 51a0f4f697..49e435be2f 100644 --- a/requirements/requirements_git.txt +++ b/requirements/requirements_git.txt @@ -1,5 +1,11 @@ git+https://github.com/ansible/system-certifi.git@devel#egg=certifi +<<<<<<< HEAD git+https://github.com/ansible/ansible-runner.git@devel#egg=ansible-runner awx-plugins-core @ git+https://github.com/ansible/awx-plugins.git@devel#egg=awx-plugins-core[credentials-github-app] django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel#egg=django-ansible-base[rest-filters,jwt_consumer,resource-registry,rbac,feature-flags] awx_plugins.interfaces @ git+https://github.com/ansible/awx_plugins.interfaces.git +======= +# Remove pbr from requirements.in when moving ansible-runner to requirements.in +git+https://github.com/ansible/python3-saml.git@devel#egg=python3-saml +django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel#egg=django-ansible-base[rest-filters,jwt_consumer,resource-registry,rbac,feature-flags] +>>>>>>> tower/test_stable-2.6 diff --git a/setup.cfg b/setup.cfg index 6d7d0873e5..1430906129 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,4 +13,14 @@ include_package_data = True [options.entry_points] console_scripts = awx-manage = awx:manage - +awx.credential_plugins = + conjur = awx.main.credential_plugins.conjur:conjur_plugin + hashivault_kv = awx.main.credential_plugins.hashivault:hashivault_kv_plugin + hashivault_ssh = awx.main.credential_plugins.hashivault:hashivault_ssh_plugin + azure_kv = awx.main.credential_plugins.azure_kv:azure_keyvault_plugin + aim = awx.main.credential_plugins.aim:aim_plugin + centrify_vault_kv = awx.main.credential_plugins.centrify_vault:centrify_plugin + thycotic_dsv = awx.main.credential_plugins.dsv:dsv_plugin + thycotic_tss = awx.main.credential_plugins.tss:tss_plugin + aws_secretsmanager_credential = awx.main.credential_plugins.aws_secretsmanager:aws_secretmanager_plugin + github_app = awx.main.credential_plugins.github_app:github_app_lookup_plugin diff --git a/tools/ansible/roles/dockerfile/templates/Dockerfile.j2 b/tools/ansible/roles/dockerfile/templates/Dockerfile.j2 index 2f685c2a60..0ae7f8b997 100644 --- a/tools/ansible/roles/dockerfile/templates/Dockerfile.j2 +++ b/tools/ansible/roles/dockerfile/templates/Dockerfile.j2 @@ -34,6 +34,7 @@ RUN rpm --import /etc/pki/rpm-gpg/RPM-GPG-KEY-centosofficial RUN dnf -y update && dnf install -y 'dnf-command(config-manager)' && \ dnf config-manager --set-enabled crb && \ dnf -y install \ + openssh-clients \ iputils \ gcc \ gcc-c++ \ @@ -58,6 +59,10 @@ RUN dnf -y update && dnf install -y 'dnf-command(config-manager)' && \ xmlsec1-devel \ xmlsec1-openssl-devel +# Add github.com to known hosts +RUN mkdir -p ~/.ssh && chmod 0700 ~/.ssh +RUN ssh-keyscan github.com > ~/.ssh/known_hosts + RUN pip3.11 install -vv build {% if image_architecture == 'ppc64le' %} @@ -74,9 +79,11 @@ RUN mkdir /tmp/requirements ADD requirements/requirements.txt \ requirements/requirements_tower_uninstall.txt \ requirements/requirements_git.txt \ + requirements/requirements_git.credentials.txt \ /tmp/requirements/ -RUN cd /tmp && make requirements_awx +RUN git config --global credential.helper "store --file=/tmp/requirements/requirements_git.credentials.txt" +RUN --mount=type=ssh cd /tmp && make requirements_awx ARG VERSION ARG SETUPTOOLS_SCM_PRETEND_VERSION @@ -102,6 +109,7 @@ RUN DJANGO_SETTINGS_MODULE=awx.settings.defaults SKIP_SECRET_KEY_CHECK=yes SKIP_ {% endif %} +RUN rm /tmp/requirements/requirements_git.credentials.txt # Final container(s) FROM quay.io/centos/centos:stream9 diff --git a/tools/scripts/firehose_tasks.py b/tools/scripts/firehose_tasks.py old mode 100755 new mode 100644