Compare commits

..

1 Commits

Author SHA1 Message Date
Elyézer Rezende
0928571777 Pin ansible-core for collection tests
Signed-off-by: Elyézer Rezende <elyezermr@gmail.com>
2025-07-25 10:42:50 -04:00
107 changed files with 1143 additions and 2256 deletions

View File

@@ -36,7 +36,7 @@ runs:
- name: Upgrade ansible-core - name: Upgrade ansible-core
shell: bash shell: bash
run: python3 -m pip install --upgrade ansible-core run: python3 -m pip install --upgrade 'ansible-core<2.18'
- name: Install system deps - name: Install system deps
shell: bash shell: bash

View File

@@ -172,10 +172,9 @@ jobs:
repository: ansible/awx-operator repository: ansible/awx-operator
path: awx-operator path: awx-operator
- name: Setup python, referencing action at awx relative path - uses: ./awx/.github/actions/setup-python
uses: ./awx/.github/actions/setup-python
with: with:
python-version: '3.x' working-directory: awx
- name: Install playbook dependencies - name: Install playbook dependencies
run: | run: |
@@ -362,7 +361,7 @@ jobs:
python-version: '3.x' python-version: '3.x'
- name: Upgrade ansible-core - name: Upgrade ansible-core
run: python3 -m pip install --upgrade ansible-core run: python3 -m pip install --upgrade 'ansible-core<2.18'
- name: Download coverage artifacts - name: Download coverage artifacts
uses: actions/download-artifact@v4 uses: actions/download-artifact@v4

View File

@@ -10,7 +10,6 @@ on:
- devel - devel
- release_* - release_*
- feature_* - feature_*
- stable-*
jobs: jobs:
push-development-images: push-development-images:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -85,11 +85,9 @@ jobs:
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/ cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
- name: Setup node and npm for new UI build - name: Setup node and npm for new UI build
uses: actions/setup-node@v4 uses: actions/setup-node@v2
with: with:
node-version: '18' node-version: '18'
cache: 'npm'
cache-dependency-path: awx/awx/ui/**/package-lock.json
- name: Prebuild new UI for awx image (to speed up build process) - name: Prebuild new UI for awx image (to speed up build process)
working-directory: awx working-directory: awx

View File

@@ -11,7 +11,6 @@ on:
- devel - devel
- release_** - release_**
- feature_** - feature_**
- stable-**
jobs: jobs:
push: push:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -24,25 +23,35 @@ jobs:
with: with:
show-progress: false show-progress: false
- name: Build awx_devel image to use for schema gen - uses: ./.github/actions/setup-python
uses: ./.github/actions/awx_devel_image
- name: Log in to registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- uses: ./.github/actions/setup-ssh-agent
with: with:
github-token: ${{ secrets.GITHUB_TOKEN }} ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
private-github-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
- name: Pre-pull image to warm build cache
run: |
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
- name: Build image
run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
- name: Generate API Schema - name: Generate API Schema
run: | run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} \
COMPOSE_TAG=${{ github.base_ref || github.ref_name }} \
docker run -u $(id -u) --rm -v ${{ github.workspace }}:/awx_devel/:Z \ docker run -u $(id -u) --rm -v ${{ github.workspace }}:/awx_devel/:Z \
--workdir=/awx_devel `make print-DEVEL_IMAGE_NAME` /start_tests.sh genschema --workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} /start_tests.sh genschema
- name: Upload API Schema - name: Upload API Schema
uses: keithweaver/aws-s3-github-action@v1.0.0 env:
with: AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
command: cp AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
source: ${{ github.workspace }}/schema.json AWS_REGION: 'us-east-1'
destination: s3://awx-public-ci-files/${{ github.ref_name }}/schema.json run: |
aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY }} ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
aws_secret_access_key: ${{ secrets.AWS_SECRET_KEY }} ansible localhost -c local -m aws_s3 \
aws_region: us-east-1 -a "src=${{ github.workspace }}/schema.json bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=put permission=public-read"

View File

@@ -77,7 +77,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
# These should be upgraded in the AWX and Ansible venv before attempting # These should be upgraded in the AWX and Ansible venv before attempting
# to install the actual requirements # to install the actual requirements
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==80.9.0 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==3.1.3 VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==70.3.0 setuptools_scm[toml]==8.1.0 wheel==0.45.1 cython==3.0.11
NAME ?= awx NAME ?= awx
@@ -378,7 +378,7 @@ test_collection:
if [ "$(VENV_BASE)" ]; then \ if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \ . $(VENV_BASE)/awx/bin/activate; \
fi && \ fi && \
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install "ansible-core<2.19"; fi
ansible --version ansible --version
py.test $(COLLECTION_TEST_DIRS) $(COVERAGE_ARGS) -v py.test $(COLLECTION_TEST_DIRS) $(COVERAGE_ARGS) -v
@if [ "${GITHUB_ACTIONS}" = "true" ]; \ @if [ "${GITHUB_ACTIONS}" = "true" ]; \
@@ -417,7 +417,7 @@ install_collection: build_collection
test_collection_sanity: test_collection_sanity:
rm -rf awx_collection_build/ rm -rf awx_collection_build/
rm -rf $(COLLECTION_INSTALL) rm -rf $(COLLECTION_INSTALL)
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi if ! [ -x "$(shell command -v ansible-test)" ]; then pip install "ansible-core<2.19"; fi
ansible --version ansible --version
COLLECTION_VERSION=1.0.0 $(MAKE) install_collection COLLECTION_VERSION=1.0.0 $(MAKE) install_collection
cd $(COLLECTION_INSTALL) && \ cd $(COLLECTION_INSTALL) && \

View File

@@ -844,7 +844,7 @@ class ResourceAccessList(ParentMixin, ListAPIView):
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED: if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True)) ancestors = set(RoleEvaluation.objects.filter(content_type_id=content_type.id, object_id=obj.id).values_list('role_id', flat=True))
qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True) qs = User.objects.filter(has_roles__in=ancestors) | User.objects.filter(is_superuser=True)
auditor_role = RoleDefinition.objects.filter(name="Platform Auditor").first() auditor_role = RoleDefinition.objects.filter(name="Controller System Auditor").first()
if auditor_role: if auditor_role:
qs |= User.objects.filter(role_assignments__role_definition=auditor_role) qs |= User.objects.filter(role_assignments__role_definition=auditor_role)
return qs.distinct() return qs.distinct()

View File

@@ -10,7 +10,7 @@ from rest_framework import permissions
# AWX # AWX
from awx.main.access import check_user_access from awx.main.access import check_user_access
from awx.main.models import Inventory, UnifiedJob, Organization from awx.main.models import Inventory, UnifiedJob
from awx.main.utils import get_object_or_400 from awx.main.utils import get_object_or_400
logger = logging.getLogger('awx.api.permissions') logger = logging.getLogger('awx.api.permissions')
@@ -228,19 +228,12 @@ class InventoryInventorySourcesUpdatePermission(ModelAccessPermission):
class UserPermission(ModelAccessPermission): class UserPermission(ModelAccessPermission):
def check_post_permissions(self, request, view, obj=None): def check_post_permissions(self, request, view, obj=None):
if not request.data: if not request.data:
return Organization.access_qs(request.user, 'change').exists() return request.user.admin_of_organizations.exists()
elif request.user.is_superuser: elif request.user.is_superuser:
return True return True
raise PermissionDenied() raise PermissionDenied()
class IsSystemAdmin(permissions.BasePermission):
def has_permission(self, request, view):
if not (request.user and request.user.is_authenticated):
return False
return request.user.is_superuser
class IsSystemAdminOrAuditor(permissions.BasePermission): class IsSystemAdminOrAuditor(permissions.BasePermission):
""" """
Allows write access only to system admin users. Allows write access only to system admin users.

View File

@@ -2839,7 +2839,7 @@ class ResourceAccessListElementSerializer(UserSerializer):
{ {
"role": { "role": {
"id": None, "id": None,
"name": _("Platform Auditor"), "name": _("Controller System Auditor"),
"description": _("Can view all aspects of the system"), "description": _("Can view all aspects of the system"),
"user_capabilities": {"unattach": False}, "user_capabilities": {"unattach": False},
}, },
@@ -3027,6 +3027,11 @@ class CredentialSerializer(BaseSerializer):
ret.remove(field) ret.remove(field)
return ret return ret
def validate_organization(self, org):
if self.instance and (not self.instance.managed) and self.instance.credential_type.kind == 'galaxy' and org is None:
raise serializers.ValidationError(_("Galaxy credentials must be owned by an Organization."))
return org
def validate_credential_type(self, credential_type): def validate_credential_type(self, credential_type):
if self.instance and credential_type.pk != self.instance.credential_type.pk: if self.instance and credential_type.pk != self.instance.credential_type.pk:
for related_objects in ( for related_objects in (
@@ -3102,6 +3107,9 @@ class CredentialSerializerCreate(CredentialSerializer):
if attrs.get('team'): if attrs.get('team'):
attrs['organization'] = attrs['team'].organization attrs['organization'] = attrs['team'].organization
if 'credential_type' in attrs and attrs['credential_type'].kind == 'galaxy' and list(owner_fields) != ['organization']:
raise serializers.ValidationError({"organization": _("Galaxy credentials must be owned by an Organization.")})
return super(CredentialSerializerCreate, self).validate(attrs) return super(CredentialSerializerCreate, self).validate(attrs)
def create(self, validated_data): def create(self, validated_data):
@@ -5998,7 +6006,7 @@ class InstanceGroupSerializer(BaseSerializer):
if self.instance and not self.instance.is_container_group: if self.instance and not self.instance.is_container_group:
raise serializers.ValidationError(_('pod_spec_override is only valid for container groups')) raise serializers.ValidationError(_('pod_spec_override is only valid for container groups'))
pod_spec_override_json = {} pod_spec_override_json = None
# defect if the value is yaml or json if yaml convert to json # defect if the value is yaml or json if yaml convert to json
try: try:
# convert yaml to json # convert yaml to json

View File

@@ -55,7 +55,8 @@ from wsgiref.util import FileWrapper
# django-ansible-base # django-ansible-base
from ansible_base.lib.utils.requests import get_remote_hosts from ansible_base.lib.utils.requests import get_remote_hosts
from ansible_base.rbac.models import RoleEvaluation from ansible_base.rbac.models import RoleEvaluation, ObjectRole
from ansible_base.rbac import permission_registry
# AWX # AWX
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
@@ -84,6 +85,7 @@ from awx.api.generics import (
from awx.api.views.labels import LabelSubListCreateAttachDetachView from awx.api.views.labels import LabelSubListCreateAttachDetachView
from awx.api.versioning import reverse from awx.api.versioning import reverse
from awx.main import models from awx.main import models
from awx.main.models.rbac import get_role_definition
from awx.main.utils import ( from awx.main.utils import (
camelcase_to_underscore, camelcase_to_underscore,
extract_ansible_vars, extract_ansible_vars,
@@ -749,9 +751,17 @@ class TeamProjectsList(SubListAPIView):
def get_queryset(self): def get_queryset(self):
team = self.get_parent_object() team = self.get_parent_object()
self.check_parent_access(team) self.check_parent_access(team)
my_qs = self.model.accessible_objects(self.request.user, 'read_role') model_ct = permission_registry.content_type_model.objects.get_for_model(self.model)
team_qs = models.Project.accessible_objects(team, 'read_role') parent_ct = permission_registry.content_type_model.objects.get_for_model(self.parent_model)
return my_qs & team_qs
rd = get_role_definition(team.member_role)
role = ObjectRole.objects.filter(object_id=team.id, content_type=parent_ct, role_definition=rd).first()
if role is None:
# Team has no permissions, therefore team has no projects
return self.model.objects.none()
else:
project_qs = self.model.accessible_objects(self.request.user, 'read_role')
return project_qs.filter(id__in=RoleEvaluation.objects.filter(content_type_id=model_ct.id, role=role).values_list('object_id'))
class TeamActivityStreamList(SubListAPIView): class TeamActivityStreamList(SubListAPIView):
@@ -866,23 +876,13 @@ class ProjectTeamsList(ListAPIView):
serializer_class = serializers.TeamSerializer serializer_class = serializers.TeamSerializer
def get_queryset(self): def get_queryset(self):
parent = get_object_or_404(models.Project, pk=self.kwargs['pk']) p = get_object_or_404(models.Project, pk=self.kwargs['pk'])
if not self.request.user.can_access(models.Project, 'read', parent): if not self.request.user.can_access(models.Project, 'read', p):
raise PermissionDenied() raise PermissionDenied()
project_ct = ContentType.objects.get_for_model(models.Project)
project_ct = ContentType.objects.get_for_model(parent)
team_ct = ContentType.objects.get_for_model(self.model) team_ct = ContentType.objects.get_for_model(self.model)
all_roles = models.Role.objects.filter(Q(descendents__content_type=project_ct) & Q(descendents__object_id=p.pk), content_type=team_ct)
roles_on_project = models.Role.objects.filter( return self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=[t.content_object.pk for t in all_roles])
content_type=project_ct,
object_id=parent.pk,
)
team_member_parent_roles = models.Role.objects.filter(children__in=roles_on_project, role_field='member_role', content_type=team_ct).distinct()
team_ids = team_member_parent_roles.values_list('object_id', flat=True)
my_qs = self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=team_ids)
return my_qs
class ProjectSchedulesList(SubListCreateAPIView): class ProjectSchedulesList(SubListCreateAPIView):
@@ -1152,6 +1152,7 @@ class UserOrganizationsList(OrganizationCountsMixin, SubListAPIView):
model = models.Organization model = models.Organization
serializer_class = serializers.OrganizationSerializer serializer_class = serializers.OrganizationSerializer
parent_model = models.User parent_model = models.User
relationship = 'organizations'
def get_queryset(self): def get_queryset(self):
parent = self.get_parent_object() parent = self.get_parent_object()
@@ -1165,6 +1166,7 @@ class UserAdminOfOrganizationsList(OrganizationCountsMixin, SubListAPIView):
model = models.Organization model = models.Organization
serializer_class = serializers.OrganizationSerializer serializer_class = serializers.OrganizationSerializer
parent_model = models.User parent_model = models.User
relationship = 'admin_of_organizations'
def get_queryset(self): def get_queryset(self):
parent = self.get_parent_object() parent = self.get_parent_object()

View File

@@ -12,7 +12,7 @@ import re
import asn1 import asn1
from awx.api import serializers from awx.api import serializers
from awx.api.generics import GenericAPIView, Response from awx.api.generics import GenericAPIView, Response
from awx.api.permissions import IsSystemAdmin from awx.api.permissions import IsSystemAdminOrAuditor
from awx.main import models from awx.main import models
from cryptography import x509 from cryptography import x509
from cryptography.hazmat.primitives import hashes, serialization from cryptography.hazmat.primitives import hashes, serialization
@@ -48,7 +48,7 @@ class InstanceInstallBundle(GenericAPIView):
name = _('Install Bundle') name = _('Install Bundle')
model = models.Instance model = models.Instance
serializer_class = serializers.InstanceSerializer serializer_class = serializers.InstanceSerializer
permission_classes = (IsSystemAdmin,) permission_classes = (IsSystemAdminOrAuditor,)
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
instance_obj = self.get_object() instance_obj = self.get_object()

View File

@@ -8,8 +8,6 @@ import operator
from collections import OrderedDict from collections import OrderedDict
from django.conf import settings from django.conf import settings
from django.core.cache import cache
from django.db import connection
from django.utils.encoding import smart_str from django.utils.encoding import smart_str
from django.utils.decorators import method_decorator from django.utils.decorators import method_decorator
from django.views.decorators.csrf import ensure_csrf_cookie from django.views.decorators.csrf import ensure_csrf_cookie
@@ -28,7 +26,6 @@ from awx.api.generics import APIView
from awx.conf.registry import settings_registry from awx.conf.registry import settings_registry
from awx.main.analytics import all_collectors from awx.main.analytics import all_collectors
from awx.main.ha import is_ha_environment from awx.main.ha import is_ha_environment
from awx.main.tasks.system import clear_setting_cache
from awx.main.utils import get_awx_version, get_custom_venv_choices from awx.main.utils import get_awx_version, get_custom_venv_choices
from awx.main.utils.licensing import validate_entitlement_manifest from awx.main.utils.licensing import validate_entitlement_manifest
from awx.api.versioning import URLPathVersioning, reverse, drf_reverse from awx.api.versioning import URLPathVersioning, reverse, drf_reverse
@@ -224,12 +221,8 @@ class ApiV2AttachView(APIView):
subscription_id = data.get('subscription_id', None) subscription_id = data.get('subscription_id', None)
if not subscription_id: if not subscription_id:
return Response({"error": _("No subscription ID provided.")}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": _("No subscription ID provided.")}, status=status.HTTP_400_BAD_REQUEST)
# Ensure we always use the latest subscription credentials
cache.delete_many(['SUBSCRIPTIONS_CLIENT_ID', 'SUBSCRIPTIONS_CLIENT_SECRET'])
user = getattr(settings, 'SUBSCRIPTIONS_CLIENT_ID', None) user = getattr(settings, 'SUBSCRIPTIONS_CLIENT_ID', None)
pw = getattr(settings, 'SUBSCRIPTIONS_CLIENT_SECRET', None) pw = getattr(settings, 'SUBSCRIPTIONS_CLIENT_SECRET', None)
if not (user and pw):
return Response({"error": _("Missing subscription credentials")}, status=status.HTTP_400_BAD_REQUEST)
if subscription_id and user and pw: if subscription_id and user and pw:
data = request.data.copy() data = request.data.copy()
try: try:
@@ -252,7 +245,6 @@ class ApiV2AttachView(APIView):
if sub['subscription_id'] == subscription_id: if sub['subscription_id'] == subscription_id:
sub['valid_key'] = True sub['valid_key'] = True
settings.LICENSE = sub settings.LICENSE = sub
connection.on_commit(lambda: clear_setting_cache.delay(['LICENSE']))
return Response(sub) return Response(sub)
return Response({"error": _("Error processing subscription metadata.")}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": _("Error processing subscription metadata.")}, status=status.HTTP_400_BAD_REQUEST)
@@ -272,6 +264,7 @@ class ApiV2ConfigView(APIView):
'''Return various sitewide configuration settings''' '''Return various sitewide configuration settings'''
license_data = get_licenser().validate() license_data = get_licenser().validate()
if not license_data.get('valid_key', False): if not license_data.get('valid_key', False):
license_data = {} license_data = {}
@@ -335,7 +328,6 @@ class ApiV2ConfigView(APIView):
try: try:
license_data_validated = get_licenser().license_from_manifest(license_data) license_data_validated = get_licenser().license_from_manifest(license_data)
connection.on_commit(lambda: clear_setting_cache.delay(['LICENSE']))
except Exception: except Exception:
logger.warning(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username)) logger.warning(smart_str(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
@@ -354,7 +346,6 @@ class ApiV2ConfigView(APIView):
def delete(self, request): def delete(self, request):
try: try:
settings.LICENSE = {} settings.LICENSE = {}
connection.on_commit(lambda: clear_setting_cache.delay(['LICENSE']))
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
except Exception: except Exception:
# FIX: Log # FIX: Log

View File

@@ -639,9 +639,7 @@ class UserAccess(BaseAccess):
prefetch_related = ('resource',) prefetch_related = ('resource',)
def filtered_queryset(self): def filtered_queryset(self):
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and ( if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
Organization.access_qs(self.user, 'change').exists() or Organization.access_qs(self.user, 'audit').exists()
):
qs = User.objects.all() qs = User.objects.all()
else: else:
qs = ( qs = (
@@ -1226,9 +1224,7 @@ class TeamAccess(BaseAccess):
) )
def filtered_queryset(self): def filtered_queryset(self):
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and ( if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
Organization.access_qs(self.user, 'change').exists() or Organization.access_qs(self.user, 'audit').exists()
):
return self.model.objects.all() return self.model.objects.all()
return self.model.objects.filter( return self.model.objects.filter(
Q(organization__in=Organization.accessible_pk_qs(self.user, 'member_role')) | Q(pk__in=self.model.accessible_pk_qs(self.user, 'read_role')) Q(organization__in=Organization.accessible_pk_qs(self.user, 'member_role')) | Q(pk__in=self.model.accessible_pk_qs(self.user, 'read_role'))
@@ -2568,7 +2564,7 @@ class NotificationTemplateAccess(BaseAccess):
if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED: if settings.ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED:
return self.model.access_qs(self.user, 'view') return self.model.access_qs(self.user, 'view')
return self.model.objects.filter( return self.model.objects.filter(
Q(organization__in=Organization.access_qs(self.user, 'add_notificationtemplate')) | Q(organization__in=Organization.access_qs(self.user, 'audit')) Q(organization__in=Organization.access_qs(self.user, 'add_notificationtemplate')) | Q(organization__in=self.user.auditor_of_organizations)
).distinct() ).distinct()
@check_superuser @check_superuser
@@ -2603,7 +2599,7 @@ class NotificationAccess(BaseAccess):
def filtered_queryset(self): def filtered_queryset(self):
return self.model.objects.filter( return self.model.objects.filter(
Q(notification_template__organization__in=Organization.access_qs(self.user, 'add_notificationtemplate')) Q(notification_template__organization__in=Organization.access_qs(self.user, 'add_notificationtemplate'))
| Q(notification_template__organization__in=Organization.access_qs(self.user, 'audit')) | Q(notification_template__organization__in=self.user.auditor_of_organizations)
).distinct() ).distinct()
def can_delete(self, obj): def can_delete(self, obj):

View File

@@ -1094,13 +1094,3 @@ register(
category=('PolicyAsCode'), category=('PolicyAsCode'),
category_slug='policyascode', category_slug='policyascode',
) )
def policy_as_code_validate(serializer, attrs):
opa_host = attrs.get('OPA_HOST', '')
if opa_host and (opa_host.startswith('http://') or opa_host.startswith('https://')):
raise serializers.ValidationError({'OPA_HOST': _("OPA_HOST should not include 'http://' or 'https://' prefixes. Please enter only the hostname.")})
return attrs
register_validate('policyascode', policy_as_code_validate)

View File

@@ -4,7 +4,6 @@
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from django.db import transaction from django.db import transaction
from crum import impersonate from crum import impersonate
from ansible_base.resource_registry.signals.handlers import no_reverse_sync
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate
from awx.main.signals import disable_computed_fields from awx.main.signals import disable_computed_fields
@@ -17,9 +16,8 @@ class Command(BaseCommand):
def handle(self, *args, **kwargs): def handle(self, *args, **kwargs):
# Wrap the operation in an atomic block, so we do not on accident # Wrap the operation in an atomic block, so we do not on accident
# create the organization but not create the project, etc. # create the organization but not create the project, etc.
with no_reverse_sync(): with transaction.atomic():
with transaction.atomic(): self._handle()
self._handle()
def _handle(self): def _handle(self):
changed = False changed = False

View File

@@ -26,11 +26,6 @@ def change_inventory_source_org_unique(apps, schema_editor):
logger.info(f'Set database constraint rule for {r} inventory source objects') logger.info(f'Set database constraint rule for {r} inventory source objects')
def rename_wfjt(apps, schema_editor):
cls = apps.get_model('main', 'WorkflowJobTemplate')
_rename_duplicates(cls)
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
@@ -45,7 +40,6 @@ class Migration(migrations.Migration):
name='org_unique', name='org_unique',
field=models.BooleanField(blank=True, default=True, editable=False, help_text='Used internally to selectively enforce database constraint on name'), field=models.BooleanField(blank=True, default=True, editable=False, help_text='Used internally to selectively enforce database constraint on name'),
), ),
migrations.RunPython(rename_wfjt, migrations.RunPython.noop),
migrations.RunPython(change_inventory_source_org_unique, migrations.RunPython.noop), migrations.RunPython(change_inventory_source_org_unique, migrations.RunPython.noop),
migrations.AddConstraint( migrations.AddConstraint(
model_name='unifiedjobtemplate', model_name='unifiedjobtemplate',

View File

@@ -1,20 +1,9 @@
from django.db import migrations from django.db import migrations
# AWX
from awx.main.models import CredentialType
from awx.main.utils.common import set_current_apps
def setup_tower_managed_defaults(apps, schema_editor):
set_current_apps(apps)
CredentialType.setup_tower_managed_defaults(apps)
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
('main', '0200_template_name_constraint'), ('main', '0200_template_name_constraint'),
] ]
operations = [ operations = []
migrations.RunPython(setup_tower_managed_defaults),
]

View File

@@ -1,102 +0,0 @@
# Generated by Django migration for converting Controller role definitions
from ansible_base.rbac.migrations._utils import give_permissions
from django.db import migrations
def convert_controller_role_definitions(apps, schema_editor):
"""
Convert Controller role definitions to regular role definitions:
- Controller Organization Admin -> Organization Admin
- Controller Organization Member -> Organization Member
- Controller Team Admin -> Team Admin
- Controller Team Member -> Team Member
- Controller System Auditor -> Platform Auditor
Then delete the old Controller role definitions.
"""
RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition')
RoleUserAssignment = apps.get_model('dab_rbac', 'RoleUserAssignment')
RoleTeamAssignment = apps.get_model('dab_rbac', 'RoleTeamAssignment')
Permission = apps.get_model('dab_rbac', 'DABPermission')
# Mapping of old Controller role names to new role names
role_mappings = {
'Controller Organization Admin': 'Organization Admin',
'Controller Organization Member': 'Organization Member',
'Controller Team Admin': 'Team Admin',
'Controller Team Member': 'Team Member',
}
for old_name, new_name in role_mappings.items():
# Find the old Controller role definition
old_role = RoleDefinition.objects.filter(name=old_name).first()
if not old_role:
continue # Skip if the old role doesn't exist
# Find the new role definition
new_role = RoleDefinition.objects.get(name=new_name)
# Collect all the assignments that need to be migrated
# Group by object (content_type + object_id) to batch the give_permissions calls
assignments_by_object = {}
# Get user assignments
user_assignments = RoleUserAssignment.objects.filter(role_definition=old_role).select_related('object_role')
for assignment in user_assignments:
key = (assignment.object_role.content_type_id, assignment.object_role.object_id)
if key not in assignments_by_object:
assignments_by_object[key] = {'users': [], 'teams': []}
assignments_by_object[key]['users'].append(assignment.user)
# Get team assignments
team_assignments = RoleTeamAssignment.objects.filter(role_definition=old_role).select_related('object_role')
for assignment in team_assignments:
key = (assignment.object_role.content_type_id, assignment.object_role.object_id)
if key not in assignments_by_object:
assignments_by_object[key] = {'users': [], 'teams': []}
assignments_by_object[key]['teams'].append(assignment.team.id)
# Use give_permissions to create new assignments with the new role definition
for (content_type_id, object_id), data in assignments_by_object.items():
if data['users'] or data['teams']:
give_permissions(
apps,
new_role,
users=data['users'],
teams=data['teams'],
object_id=object_id,
content_type_id=content_type_id,
)
# Delete the old role definition (this will cascade to delete old assignments and ObjectRoles)
old_role.delete()
# Create or get Platform Auditor
auditor_rd, created = RoleDefinition.objects.get_or_create(
name='Platform Auditor',
defaults={'description': 'Migrated singleton role giving read permission to everything', 'managed': True},
)
if created:
auditor_rd.permissions.add(*list(Permission.objects.filter(codename__startswith='view')))
old_rd = RoleDefinition.objects.filter(name='Controller System Auditor').first()
if old_rd:
for assignment in RoleUserAssignment.objects.filter(role_definition=old_rd):
RoleUserAssignment.objects.create(
user=assignment.user,
role_definition=auditor_rd,
)
# Delete the Controller System Auditor role
RoleDefinition.objects.filter(name='Controller System Auditor').delete()
class Migration(migrations.Migration):
dependencies = [
('main', '0201_create_managed_creds'),
]
operations = [
migrations.RunPython(convert_controller_role_definitions),
]

View File

@@ -1,55 +1,34 @@
# Generated by Django 4.2.10 on 2024-09-16 10:22 # Generated by Django 4.2.10 on 2024-09-16 10:22
from django.db import migrations, models from django.db import migrations, models
from awx.main.migrations._create_system_jobs import delete_clear_tokens_sjt from awx.main.migrations._create_system_jobs import delete_clear_tokens_sjt
# --- START of function merged from 0203_rename_github_app_kind.py ---
def update_github_app_kind(apps, schema_editor):
"""
Updates the 'kind' field for CredentialType records
from 'github_app' to 'github_app_lookup'.
This addresses a change in the entry point key for the GitHub App plugin.
"""
CredentialType = apps.get_model('main', 'CredentialType')
db_alias = schema_editor.connection.alias
CredentialType.objects.using(db_alias).filter(kind='github_app').update(kind='github_app_lookup')
# --- END of function merged from 0203_rename_github_app_kind.py ---
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
('main', '0203_remove_team_of_teams'), ('main', '0201_create_managed_creds'),
] ]
operations = [ operations = [
migrations.DeleteModel( migrations.DeleteModel(
name='Profile', name='Profile',
), ),
# Remove SSO app content # Remove SSO app content
# delete all sso application migrations # delete all sso application migrations
# Added reverse_sql=migrations.RunSQL.noop to make this reversible for tests migrations.RunSQL("DELETE FROM django_migrations WHERE app = 'sso';"),
migrations.RunSQL("DELETE FROM django_migrations WHERE app = 'sso';", reverse_sql=migrations.RunSQL.noop),
# delete all sso application content group permissions # delete all sso application content group permissions
# Added reverse_sql=migrations.RunSQL.noop to make this reversible for tests
migrations.RunSQL( migrations.RunSQL(
"DELETE FROM auth_group_permissions " "DELETE FROM auth_group_permissions "
"WHERE permission_id IN " "WHERE permission_id IN "
"(SELECT id FROM auth_permission WHERE content_type_id in (SELECT id FROM django_content_type WHERE app_label = 'sso'));", "(SELECT id FROM auth_permission WHERE content_type_id in (SELECT id FROM django_content_type WHERE app_label = 'sso'));"
reverse_sql=migrations.RunSQL.noop,
), ),
# delete all sso application content permissions # delete all sso application content permissions
# Added reverse_sql=migrations.RunSQL.noop to make this reversible for tests migrations.RunSQL("DELETE FROM auth_permission " "WHERE content_type_id IN (SELECT id FROM django_content_type WHERE app_label = 'sso');"),
migrations.RunSQL(
"DELETE FROM auth_permission " "WHERE content_type_id IN (SELECT id FROM django_content_type WHERE app_label = 'sso');",
reverse_sql=migrations.RunSQL.noop,
),
# delete sso application content type # delete sso application content type
# Added reverse_sql=migrations.RunSQL.noop to make this reversible for tests migrations.RunSQL("DELETE FROM django_content_type WHERE app_label = 'sso';"),
migrations.RunSQL("DELETE FROM django_content_type WHERE app_label = 'sso';", reverse_sql=migrations.RunSQL.noop),
# drop sso application created table # drop sso application created table
# Added reverse_sql=migrations.RunSQL.noop to make this reversible for tests migrations.RunSQL("DROP TABLE IF EXISTS sso_userenterpriseauth;"),
migrations.RunSQL("DROP TABLE IF EXISTS sso_userenterpriseauth;", reverse_sql=migrations.RunSQL.noop),
# Alter inventory source source field # Alter inventory source source field
migrations.AlterField( migrations.AlterField(
model_name='inventorysource', model_name='inventorysource',
@@ -118,7 +97,4 @@ class Migration(migrations.Migration):
max_length=32, max_length=32,
), ),
), ),
# --- START of operations merged from 0203_rename_github_app_kind.py ---
migrations.RunPython(update_github_app_kind, migrations.RunPython.noop),
# --- END of operations merged from 0203_rename_github_app_kind.py ---
] ]

View File

@@ -1,22 +0,0 @@
import logging
from django.db import migrations
from awx.main.migrations._dab_rbac import consolidate_indirect_user_roles
logger = logging.getLogger('awx.main.migrations')
class Migration(migrations.Migration):
dependencies = [
('main', '0202_convert_controller_role_definitions'),
]
# The DAB RBAC app makes substantial model changes which by change-ordering comes after this
# not including run_before might sometimes work but this enforces a more strict and stable order
# for both applying migrations forwards and backwards
run_before = [("dab_rbac", "0004_remote_permissions_additions")]
operations = [
migrations.RunPython(consolidate_indirect_user_roles, migrations.RunPython.noop),
]

View File

@@ -1,6 +1,5 @@
import logging import logging
logger = logging.getLogger('awx.main.migrations') logger = logging.getLogger('awx.main.migrations')

View File

@@ -1,6 +1,5 @@
import json import json
import logging import logging
from collections import defaultdict
from django.apps import apps as global_apps from django.apps import apps as global_apps
from django.db.models import ForeignKey from django.db.models import ForeignKey
@@ -18,7 +17,6 @@ logger = logging.getLogger('awx.main.migrations._dab_rbac')
def create_permissions_as_operation(apps, schema_editor): def create_permissions_as_operation(apps, schema_editor):
logger.info('Running data migration create_permissions_as_operation')
# NOTE: the DAB ContentType changes adjusted how they fire # NOTE: the DAB ContentType changes adjusted how they fire
# before they would fire on every app config, like contenttypes # before they would fire on every app config, like contenttypes
create_dab_permissions(global_apps.get_app_config("main"), apps=apps) create_dab_permissions(global_apps.get_app_config("main"), apps=apps)
@@ -168,15 +166,11 @@ def migrate_to_new_rbac(apps, schema_editor):
This method moves the assigned permissions from the old rbac.py models This method moves the assigned permissions from the old rbac.py models
to the new RoleDefinition and ObjectRole models to the new RoleDefinition and ObjectRole models
""" """
logger.info('Running data migration migrate_to_new_rbac')
Role = apps.get_model('main', 'Role') Role = apps.get_model('main', 'Role')
RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition') RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition')
RoleUserAssignment = apps.get_model('dab_rbac', 'RoleUserAssignment') RoleUserAssignment = apps.get_model('dab_rbac', 'RoleUserAssignment')
Permission = apps.get_model('dab_rbac', 'DABPermission') Permission = apps.get_model('dab_rbac', 'DABPermission')
if Permission.objects.count() == 0:
raise RuntimeError('Running migrate_to_new_rbac requires DABPermission objects created first')
# remove add premissions that are not valid for migrations from old versions # remove add premissions that are not valid for migrations from old versions
for perm_str in ('add_organization', 'add_jobtemplate'): for perm_str in ('add_organization', 'add_jobtemplate'):
perm = Permission.objects.filter(codename=perm_str).first() perm = Permission.objects.filter(codename=perm_str).first()
@@ -256,14 +250,11 @@ def migrate_to_new_rbac(apps, schema_editor):
# Create new replacement system auditor role # Create new replacement system auditor role
new_system_auditor, created = RoleDefinition.objects.get_or_create( new_system_auditor, created = RoleDefinition.objects.get_or_create(
name='Platform Auditor', name='Controller System Auditor',
defaults={'description': 'Migrated singleton role giving read permission to everything', 'managed': True}, defaults={'description': 'Migrated singleton role giving read permission to everything', 'managed': True},
) )
new_system_auditor.permissions.add(*list(Permission.objects.filter(codename__startswith='view'))) new_system_auditor.permissions.add(*list(Permission.objects.filter(codename__startswith='view')))
if created:
logger.info(f'Created RoleDefinition {new_system_auditor.name} pk={new_system_auditor.pk} with {new_system_auditor.permissions.count()} permissions')
# migrate is_system_auditor flag, because it is no longer handled by a system role # migrate is_system_auditor flag, because it is no longer handled by a system role
old_system_auditor = Role.objects.filter(singleton_name='system_auditor').first() old_system_auditor = Role.objects.filter(singleton_name='system_auditor').first()
if old_system_auditor: if old_system_auditor:
@@ -292,9 +283,8 @@ def get_or_create_managed(name, description, ct, permissions, RoleDefinition):
def setup_managed_role_definitions(apps, schema_editor): def setup_managed_role_definitions(apps, schema_editor):
""" """
Idempotent method to create or sync the managed role definitions Idepotent method to create or sync the managed role definitions
""" """
logger.info('Running data migration setup_managed_role_definitions')
to_create = { to_create = {
'object_admin': '{cls.__name__} Admin', 'object_admin': '{cls.__name__} Admin',
'org_admin': 'Organization Admin', 'org_admin': 'Organization Admin',
@@ -336,6 +326,16 @@ def setup_managed_role_definitions(apps, schema_editor):
to_create['object_admin'].format(cls=cls), f'Has all permissions to a single {cls._meta.verbose_name}', ct, indiv_perms, RoleDefinition to_create['object_admin'].format(cls=cls), f'Has all permissions to a single {cls._meta.verbose_name}', ct, indiv_perms, RoleDefinition
) )
) )
if cls_name == 'team':
managed_role_definitions.append(
get_or_create_managed(
'Controller Team Admin',
f'Has all permissions to a single {cls._meta.verbose_name}',
ct,
indiv_perms,
RoleDefinition,
)
)
if 'org_children' in to_create and (cls_name not in ('organization', 'instancegroup', 'team')): if 'org_children' in to_create and (cls_name not in ('organization', 'instancegroup', 'team')):
org_child_perms = object_perms.copy() org_child_perms = object_perms.copy()
@@ -376,6 +376,18 @@ def setup_managed_role_definitions(apps, schema_editor):
RoleDefinition, RoleDefinition,
) )
) )
if action == 'member' and cls_name in ('organization', 'team'):
suffix = to_create['special'].format(cls=cls, action=action.title())
rd_name = f'Controller {suffix}'
managed_role_definitions.append(
get_or_create_managed(
rd_name,
f'Has {action} permissions to a single {cls._meta.verbose_name}',
ct,
perm_list,
RoleDefinition,
)
)
if 'org_admin' in to_create: if 'org_admin' in to_create:
managed_role_definitions.append( managed_role_definitions.append(
@@ -387,6 +399,15 @@ def setup_managed_role_definitions(apps, schema_editor):
RoleDefinition, RoleDefinition,
) )
) )
managed_role_definitions.append(
get_or_create_managed(
'Controller Organization Admin',
'Has all permissions to a single organization and all objects inside of it',
org_ct,
org_perms,
RoleDefinition,
)
)
# Special "organization action" roles # Special "organization action" roles
audit_permissions = [perm for perm in org_perms if perm.codename.startswith('view_')] audit_permissions = [perm for perm in org_perms if perm.codename.startswith('view_')]
@@ -427,115 +448,3 @@ def setup_managed_role_definitions(apps, schema_editor):
for role_definition in unexpected_role_definitions: for role_definition in unexpected_role_definitions:
logger.info(f'Deleting old managed role definition {role_definition.name}, pk={role_definition.pk}') logger.info(f'Deleting old managed role definition {role_definition.name}, pk={role_definition.pk}')
role_definition.delete() role_definition.delete()
def get_team_to_team_relationships(apps, team_member_role):
"""
Find all team-to-team relationships where one team is a member of another.
Returns a dict mapping parent_team_id -> [child_team_id, ...]
"""
team_to_team_relationships = defaultdict(list)
# Find all team assignments with the Team Member role
RoleTeamAssignment = apps.get_model('dab_rbac', 'RoleTeamAssignment')
team_assignments = RoleTeamAssignment.objects.filter(role_definition=team_member_role).select_related('team')
for assignment in team_assignments:
parent_team_id = int(assignment.object_id)
child_team_id = assignment.team.id
team_to_team_relationships[parent_team_id].append(child_team_id)
return team_to_team_relationships
def get_all_user_members_of_team(apps, team_member_role, team_id, team_to_team_map, visited=None):
"""
Recursively find all users who are members of a team, including through nested teams.
"""
if visited is None:
visited = set()
if team_id in visited:
return set() # Avoid infinite recursion
visited.add(team_id)
all_users = set()
# Get direct user assignments to this team
RoleUserAssignment = apps.get_model('dab_rbac', 'RoleUserAssignment')
user_assignments = RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_id).select_related('user')
for assignment in user_assignments:
all_users.add(assignment.user)
# Get team-to-team assignments and recursively find their users
child_team_ids = team_to_team_map.get(team_id, [])
for child_team_id in child_team_ids:
nested_users = get_all_user_members_of_team(apps, team_member_role, child_team_id, team_to_team_map, visited.copy())
all_users.update(nested_users)
return all_users
def remove_team_to_team_assignment(apps, team_member_role, parent_team_id, child_team_id):
"""
Remove team-to-team memberships.
"""
Team = apps.get_model('main', 'Team')
RoleTeamAssignment = apps.get_model('dab_rbac', 'RoleTeamAssignment')
parent_team = Team.objects.get(id=parent_team_id)
child_team = Team.objects.get(id=child_team_id)
# Remove all team-to-team RoleTeamAssignments
RoleTeamAssignment.objects.filter(role_definition=team_member_role, object_id=parent_team_id, team=child_team).delete()
# Check mirroring Team model for children under member_role
parent_team.member_role.children.filter(object_id=child_team_id).delete()
def consolidate_indirect_user_roles(apps, schema_editor):
"""
A user should have a member role for every team they were indirectly
a member of. ex. Team A is a member of Team B. All users in Team A
previously were only members of Team A. They should now be members of
Team A and Team B.
"""
# get models for membership on teams
RoleDefinition = apps.get_model('dab_rbac', 'RoleDefinition')
Team = apps.get_model('main', 'Team')
team_member_role = RoleDefinition.objects.get(name='Team Member')
team_to_team_map = get_team_to_team_relationships(apps, team_member_role)
if not team_to_team_map:
return # No team-to-team relationships to consolidate
# Get content type for Team - needed for give_permissions
try:
from django.contrib.contenttypes.models import ContentType
team_content_type = ContentType.objects.get_for_model(Team)
except ImportError:
# Fallback if ContentType is not available
ContentType = apps.get_model('contenttypes', 'ContentType')
team_content_type = ContentType.objects.get_for_model(Team)
# Get all users who should be direct members of a team
for parent_team_id, child_team_ids in team_to_team_map.items():
all_users = get_all_user_members_of_team(apps, team_member_role, parent_team_id, team_to_team_map)
# Create direct RoleUserAssignments for all users
if all_users:
give_permissions(apps=apps, rd=team_member_role, users=list(all_users), object_id=parent_team_id, content_type_id=team_content_type.id)
# Mirror assignments to Team model
parent_team = Team.objects.get(id=parent_team_id)
for user in all_users:
parent_team.member_role.members.add(user.id)
# Remove all team-to-team assignments for parent team
for child_team_id in child_team_ids:
remove_team_to_team_assignment(apps, team_member_role, parent_team_id, child_team_id)

View File

@@ -172,17 +172,35 @@ def cleanup_created_modified_by(sender, **kwargs):
pre_delete.connect(cleanup_created_modified_by, sender=User) pre_delete.connect(cleanup_created_modified_by, sender=User)
@property
def user_get_organizations(user):
return Organization.access_qs(user, 'member')
@property
def user_get_admin_of_organizations(user):
return Organization.access_qs(user, 'change')
@property
def user_get_auditor_of_organizations(user):
return Organization.access_qs(user, 'audit')
@property @property
def created(user): def created(user):
return user.date_joined return user.date_joined
User.add_to_class('organizations', user_get_organizations)
User.add_to_class('admin_of_organizations', user_get_admin_of_organizations)
User.add_to_class('auditor_of_organizations', user_get_auditor_of_organizations)
User.add_to_class('created', created) User.add_to_class('created', created)
def get_system_auditor_role(): def get_system_auditor_role():
rd, created = RoleDefinition.objects.get_or_create( rd, created = RoleDefinition.objects.get_or_create(
name='Platform Auditor', defaults={'description': 'Migrated singleton role giving read permission to everything'} name='Controller System Auditor', defaults={'description': 'Migrated singleton role giving read permission to everything'}
) )
if created: if created:
rd.permissions.add(*list(permission_registry.permission_qs.filter(codename__startswith='view'))) rd.permissions.add(*list(permission_registry.permission_qs.filter(codename__startswith='view')))

View File

@@ -1024,10 +1024,7 @@ class InventorySourceOptions(BaseModel):
# If a credential was provided, it's important that it matches # If a credential was provided, it's important that it matches
# the actual inventory source being used (Amazon requires Amazon # the actual inventory source being used (Amazon requires Amazon
# credentials; Rackspace requires Rackspace credentials; etc...) # credentials; Rackspace requires Rackspace credentials; etc...)
# TODO: AAP-53978 check that this matches new awx-plugin content for ESXI if source.replace('ec2', 'aws') != cred.kind:
if source == 'vmware_esxi' and source.replace('vmware_esxi', 'vmware') != cred.kind:
return _('VMWARE inventory sources (such as %s) require credentials for the matching cloud service.') % source
if source == 'ec2' and source.replace('ec2', 'aws') != cred.kind:
return _('Cloud-based inventory sources (such as %s) require credentials for the matching cloud service.') % source return _('Cloud-based inventory sources (such as %s) require credentials for the matching cloud service.') % source
# Allow an EC2 source to omit the credential. If Tower is running on # Allow an EC2 source to omit the credential. If Tower is running on
# an EC2 instance with an IAM Role assigned, boto will use credentials # an EC2 instance with an IAM Role assigned, boto will use credentials

View File

@@ -27,9 +27,7 @@ from django.conf import settings
# Ansible_base app # Ansible_base app
from ansible_base.rbac.models import RoleDefinition, RoleUserAssignment, RoleTeamAssignment from ansible_base.rbac.models import RoleDefinition, RoleUserAssignment, RoleTeamAssignment
from ansible_base.rbac.sync import maybe_reverse_sync_assignment, maybe_reverse_sync_unassignment, maybe_reverse_sync_role_definition
from ansible_base.rbac import permission_registry from ansible_base.rbac import permission_registry
from ansible_base.resource_registry.signals.handlers import no_reverse_sync
from ansible_base.lib.utils.models import get_type_for_model from ansible_base.lib.utils.models import get_type_for_model
# AWX # AWX
@@ -562,27 +560,34 @@ def get_role_definition(role):
f = obj._meta.get_field(role.role_field) f = obj._meta.get_field(role.role_field)
action_name = f.name.rsplit("_", 1)[0] action_name = f.name.rsplit("_", 1)[0]
model_print = type(obj).__name__ model_print = type(obj).__name__
rd_name = f'{model_print} {action_name.title()} Compat'
perm_list = get_role_codenames(role) perm_list = get_role_codenames(role)
defaults = { defaults = {
'content_type': permission_registry.content_type_model.objects.get_by_natural_key(role.content_type.app_label, role.content_type.model), 'content_type': permission_registry.content_type_model.objects.get_by_natural_key(role.content_type.app_label, role.content_type.model),
'description': f'Has {action_name.title()} permission to {model_print} for backwards API compatibility', 'description': f'Has {action_name.title()} permission to {model_print} for backwards API compatibility',
} }
# use Controller-specific role definitions for Team/Organization and member/admin
# instead of platform role definitions
# these should exist in the system already, so just do a lookup by role definition name
if model_print in ['Team', 'Organization'] and action_name in ['member', 'admin']:
rd_name = f'Controller {model_print} {action_name.title()}'
rd = RoleDefinition.objects.filter(name=rd_name).first()
if rd:
return rd
else:
return RoleDefinition.objects.create_from_permissions(permissions=perm_list, name=rd_name, managed=True, **defaults)
else:
rd_name = f'{model_print} {action_name.title()} Compat'
with impersonate(None): with impersonate(None):
try: try:
with no_reverse_sync(): rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
except ValidationError: except ValidationError:
# This is a tricky case - practically speaking, users should not be allowed to create team roles # This is a tricky case - practically speaking, users should not be allowed to create team roles
# or roles that include the team member permission. # or roles that include the team member permission.
# If we need to create this for compatibility purposes then we will create it as a managed non-editable role # If we need to create this for compatibility purposes then we will create it as a managed non-editable role
defaults['managed'] = True defaults['managed'] = True
with no_reverse_sync(): rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
if created and rbac_sync_enabled.enabled:
maybe_reverse_sync_role_definition(rd, action='create')
return rd return rd
@@ -596,6 +601,12 @@ def get_role_from_object_role(object_role):
model_name, role_name, _ = rd.name.split() model_name, role_name, _ = rd.name.split()
role_name = role_name.lower() role_name = role_name.lower()
role_name += '_role' role_name += '_role'
elif rd.name.startswith('Controller') and rd.name.endswith(' Admin'):
# Controller Organization Admin and Controller Team Admin
role_name = 'admin_role'
elif rd.name.startswith('Controller') and rd.name.endswith(' Member'):
# Controller Organization Member and Controller Team Member
role_name = 'member_role'
elif rd.name.endswith(' Admin') and rd.name.count(' ') == 2: elif rd.name.endswith(' Admin') and rd.name.count(' ') == 2:
# cases like "Organization Project Admin" # cases like "Organization Project Admin"
model_name, target_model_name, role_name = rd.name.split() model_name, target_model_name, role_name = rd.name.split()
@@ -622,14 +633,12 @@ def get_role_from_object_role(object_role):
return getattr(object_role.content_object, role_name) return getattr(object_role.content_object, role_name)
def give_or_remove_permission(role, actor, giving=True, rd=None): def give_or_remove_permission(role, actor, giving=True):
obj = role.content_object obj = role.content_object
if obj is None: if obj is None:
return return
if not rd: rd = get_role_definition(role)
rd = get_role_definition(role) rd.give_or_remove_permission(actor, obj, giving=giving)
assignment = rd.give_or_remove_permission(actor, obj, giving=giving)
return assignment
class SyncEnabled(threading.local): class SyncEnabled(threading.local):
@@ -681,15 +690,7 @@ def sync_members_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs)
role = Role.objects.get(pk=user_or_role_id) role = Role.objects.get(pk=user_or_role_id)
else: else:
user = get_user_model().objects.get(pk=user_or_role_id) user = get_user_model().objects.get(pk=user_or_role_id)
rd = get_role_definition(role) give_or_remove_permission(role, user, giving=is_giving)
assignment = give_or_remove_permission(role, user, giving=is_giving, rd=rd)
# sync to resource server
if rbac_sync_enabled.enabled:
if is_giving:
maybe_reverse_sync_assignment(assignment)
else:
maybe_reverse_sync_unassignment(rd, user, role.content_object)
def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs): def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs):
@@ -732,19 +733,12 @@ def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs)
from awx.main.models.organization import Team from awx.main.models.organization import Team
team = Team.objects.get(pk=parent_role.object_id) team = Team.objects.get(pk=parent_role.object_id)
rd = get_role_definition(child_role) give_or_remove_permission(child_role, team, giving=is_giving)
assignment = give_or_remove_permission(child_role, team, giving=is_giving, rd=rd)
# sync to resource server
if rbac_sync_enabled.enabled:
if is_giving:
maybe_reverse_sync_assignment(assignment)
else:
maybe_reverse_sync_unassignment(rd, team, child_role.content_object)
ROLE_DEFINITION_TO_ROLE_FIELD = { ROLE_DEFINITION_TO_ROLE_FIELD = {
'Organization Member': 'member_role', 'Organization Member': 'member_role',
'Controller Organization Member': 'member_role',
'WorkflowJobTemplate Admin': 'admin_role', 'WorkflowJobTemplate Admin': 'admin_role',
'Organization WorkflowJobTemplate Admin': 'workflow_admin_role', 'Organization WorkflowJobTemplate Admin': 'workflow_admin_role',
'WorkflowJobTemplate Execute': 'execute_role', 'WorkflowJobTemplate Execute': 'execute_role',
@@ -769,8 +763,11 @@ ROLE_DEFINITION_TO_ROLE_FIELD = {
'Organization Credential Admin': 'credential_admin_role', 'Organization Credential Admin': 'credential_admin_role',
'Credential Use': 'use_role', 'Credential Use': 'use_role',
'Team Admin': 'admin_role', 'Team Admin': 'admin_role',
'Controller Team Admin': 'admin_role',
'Team Member': 'member_role', 'Team Member': 'member_role',
'Controller Team Member': 'member_role',
'Organization Admin': 'admin_role', 'Organization Admin': 'admin_role',
'Controller Organization Admin': 'admin_role',
'Organization Audit': 'auditor_role', 'Organization Audit': 'auditor_role',
'Organization Execute': 'execute_role', 'Organization Execute': 'execute_role',
'Organization Approval': 'approval_role', 'Organization Approval': 'approval_role',

View File

@@ -1200,13 +1200,6 @@ class UnifiedJob(
fd = StringIO(fd.getvalue().replace('\\r\\n', '\n')) fd = StringIO(fd.getvalue().replace('\\r\\n', '\n'))
return fd return fd
def _fix_double_escapes(self, content):
"""
Collapse double-escaped sequences into single-escaped form.
"""
# Replace \\ followed by one of ' " \ n r t
return re.sub(r'\\([\'"\\nrt])', r'\1', content)
def _escape_ascii(self, content): def _escape_ascii(self, content):
# Remove ANSI escape sequences used to embed event data. # Remove ANSI escape sequences used to embed event data.
content = re.sub(r'\x1b\[K(?:[A-Za-z0-9+/=]+\x1b\[\d+D)+\x1b\[K', '', content) content = re.sub(r'\x1b\[K(?:[A-Za-z0-9+/=]+\x1b\[\d+D)+\x1b\[K', '', content)
@@ -1214,14 +1207,12 @@ class UnifiedJob(
content = re.sub(r'\x1b[^m]*m', '', content) content = re.sub(r'\x1b[^m]*m', '', content)
return content return content
def _result_stdout_raw(self, redact_sensitive=False, escape_ascii=False, fix_escapes=False): def _result_stdout_raw(self, redact_sensitive=False, escape_ascii=False):
content = self.result_stdout_raw_handle().read() content = self.result_stdout_raw_handle().read()
if redact_sensitive: if redact_sensitive:
content = UriCleaner.remove_sensitive(content) content = UriCleaner.remove_sensitive(content)
if escape_ascii: if escape_ascii:
content = self._escape_ascii(content) content = self._escape_ascii(content)
if fix_escapes:
content = self._fix_double_escapes(content)
return content return content
@property @property
@@ -1230,10 +1221,9 @@ class UnifiedJob(
@property @property
def result_stdout(self): def result_stdout(self):
# Human-facing output should fix escapes return self._result_stdout_raw(escape_ascii=True)
return self._result_stdout_raw(escape_ascii=True, fix_escapes=True)
def _result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=True, escape_ascii=False, fix_escapes=False): def _result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=True, escape_ascii=False):
return_buffer = StringIO() return_buffer = StringIO()
if end_line is not None: if end_line is not None:
end_line = int(end_line) end_line = int(end_line)
@@ -1256,18 +1246,14 @@ class UnifiedJob(
return_buffer = UriCleaner.remove_sensitive(return_buffer) return_buffer = UriCleaner.remove_sensitive(return_buffer)
if escape_ascii: if escape_ascii:
return_buffer = self._escape_ascii(return_buffer) return_buffer = self._escape_ascii(return_buffer)
if fix_escapes:
return_buffer = self._fix_double_escapes(return_buffer)
return return_buffer, start_actual, end_actual, absolute_end return return_buffer, start_actual, end_actual, absolute_end
def result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=False): def result_stdout_raw_limited(self, start_line=0, end_line=None, redact_sensitive=False):
# Raw should NOT fix escapes
return self._result_stdout_raw_limited(start_line, end_line, redact_sensitive) return self._result_stdout_raw_limited(start_line, end_line, redact_sensitive)
def result_stdout_limited(self, start_line=0, end_line=None, redact_sensitive=False): def result_stdout_limited(self, start_line=0, end_line=None, redact_sensitive=False):
# Human-facing should fix escapes return self._result_stdout_raw_limited(start_line, end_line, redact_sensitive, escape_ascii=True)
return self._result_stdout_raw_limited(start_line, end_line, redact_sensitive, escape_ascii=True, fix_escapes=True)
@property @property
def workflow_job_id(self): def workflow_job_id(self):

View File

@@ -53,8 +53,8 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase):
): ):
super(GrafanaBackend, self).__init__(fail_silently=fail_silently) super(GrafanaBackend, self).__init__(fail_silently=fail_silently)
self.grafana_key = grafana_key self.grafana_key = grafana_key
self.dashboardId = int(dashboardId) if dashboardId != '' else None self.dashboardId = int(dashboardId) if dashboardId is not None and panelId != "" else None
self.panelId = int(panelId) if panelId != '' else None self.panelId = int(panelId) if panelId is not None and panelId != "" else None
self.annotation_tags = annotation_tags if annotation_tags is not None else [] self.annotation_tags = annotation_tags if annotation_tags is not None else []
self.grafana_no_verify_ssl = grafana_no_verify_ssl self.grafana_no_verify_ssl = grafana_no_verify_ssl
self.isRegion = isRegion self.isRegion = isRegion

View File

@@ -5,6 +5,8 @@ import time
import ssl import ssl
import logging import logging
import irc.client
from django.utils.encoding import smart_str from django.utils.encoding import smart_str
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
@@ -14,19 +16,6 @@ from awx.main.notifications.custom_notification_base import CustomNotificationBa
logger = logging.getLogger('awx.main.notifications.irc_backend') logger = logging.getLogger('awx.main.notifications.irc_backend')
def _irc():
"""
Prime the real jaraco namespace before importing irc.* so that
setuptools' vendored 'setuptools._vendor.jaraco' doesn't shadow
external 'jaraco.*' packages (e.g., jaraco.stream).
"""
import jaraco.stream # ensure the namespace package is established # noqa: F401
import irc.client as irc_client
import irc.connection as irc_connection
return irc_client, irc_connection
class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase): class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase):
init_parameters = { init_parameters = {
"server": {"label": "IRC Server Address", "type": "string"}, "server": {"label": "IRC Server Address", "type": "string"},
@@ -51,15 +40,12 @@ class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase):
def open(self): def open(self):
if self.connection is not None: if self.connection is not None:
return False return False
irc_client, irc_connection = _irc()
if self.use_ssl: if self.use_ssl:
connection_factory = irc_connection.Factory(wrapper=ssl.wrap_socket) connection_factory = irc.connection.Factory(wrapper=ssl.wrap_socket)
else: else:
connection_factory = irc_connection.Factory() connection_factory = irc.connection.Factory()
try: try:
self.reactor = irc_client.Reactor() self.reactor = irc.client.Reactor()
self.connection = self.reactor.server().connect( self.connection = self.reactor.server().connect(
self.server, self.server,
self.port, self.port,
@@ -67,7 +53,7 @@ class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase):
password=self.password, password=self.password,
connect_factory=connection_factory, connect_factory=connection_factory,
) )
except irc_client.ServerConnectionError as e: except irc.client.ServerConnectionError as e:
logger.error(smart_str(_("Exception connecting to irc server: {}").format(e))) logger.error(smart_str(_("Exception connecting to irc server: {}").format(e)))
if not self.fail_silently: if not self.fail_silently:
raise raise
@@ -79,9 +65,8 @@ class IrcBackend(AWXBaseEmailBackend, CustomNotificationBase):
self.connection = None self.connection = None
def on_connect(self, connection, event): def on_connect(self, connection, event):
irc_client, _ = _irc()
for c in self.channels: for c in self.channels:
if irc_client.is_channel(c): if irc.client.is_channel(c):
connection.join(c) connection.join(c)
else: else:
for m in self.channels[c]: for m in self.channels[c]:

View File

@@ -12,7 +12,7 @@ from django.db import transaction
# Django flags # Django flags
from flags.state import flag_enabled from flags.state import flag_enabled
from awx.main.dispatch.publish import task from awx.main.dispatch.publish import task as task_awx
from awx.main.dispatch import get_task_queuename from awx.main.dispatch import get_task_queuename
from awx.main.models.indirect_managed_node_audit import IndirectManagedNodeAudit from awx.main.models.indirect_managed_node_audit import IndirectManagedNodeAudit
from awx.main.models.event_query import EventQuery from awx.main.models.event_query import EventQuery
@@ -159,7 +159,7 @@ def cleanup_old_indirect_host_entries() -> None:
IndirectManagedNodeAudit.objects.filter(created__lt=limit).delete() IndirectManagedNodeAudit.objects.filter(created__lt=limit).delete()
@task(queue=get_task_queuename) @task_awx(queue=get_task_queuename)
def save_indirect_host_entries(job_id: int, wait_for_events: bool = True) -> None: def save_indirect_host_entries(job_id: int, wait_for_events: bool = True) -> None:
try: try:
job = Job.objects.get(id=job_id) job = Job.objects.get(id=job_id)
@@ -201,7 +201,7 @@ def save_indirect_host_entries(job_id: int, wait_for_events: bool = True) -> Non
logger.exception(f'Error processing indirect host data for job_id={job_id}') logger.exception(f'Error processing indirect host data for job_id={job_id}')
@task(queue=get_task_queuename) @task_awx(queue=get_task_queuename)
def cleanup_and_save_indirect_host_entries_fallback() -> None: def cleanup_and_save_indirect_host_entries_fallback() -> None:
if not flag_enabled("FEATURE_INDIRECT_NODE_COUNTING_ENABLED"): if not flag_enabled("FEATURE_INDIRECT_NODE_COUNTING_ENABLED"):
return return

View File

@@ -21,8 +21,6 @@ from django.db import transaction
# Shared code for the AWX platform # Shared code for the AWX platform
from awx_plugins.interfaces._temporary_private_container_api import CONTAINER_ROOT, get_incontainer_path from awx_plugins.interfaces._temporary_private_container_api import CONTAINER_ROOT, get_incontainer_path
from django.utils.translation import gettext_lazy as _
from rest_framework.exceptions import PermissionDenied
# Runner # Runner
import ansible_runner import ansible_runner
@@ -89,6 +87,8 @@ from awx.main.utils.common import (
from awx.conf.license import get_license from awx.conf.license import get_license
from awx.main.utils.handlers import SpecialInventoryHandler from awx.main.utils.handlers import SpecialInventoryHandler
from awx.main.utils.update_model import update_model from awx.main.utils.update_model import update_model
from rest_framework.exceptions import PermissionDenied
from django.utils.translation import gettext_lazy as _
# Django flags # Django flags
from flags.state import flag_enabled from flags.state import flag_enabled

View File

@@ -1224,30 +1224,6 @@ def test_custom_credential_type_create(get, post, organization, admin):
assert decrypt_field(cred, 'api_token') == 'secret' assert decrypt_field(cred, 'api_token') == 'secret'
@pytest.mark.django_db
def test_galaxy_create_ok(post, organization, admin):
params = {
'credential_type': 1,
'name': 'Galaxy credential',
'inputs': {
'url': 'https://galaxy.ansible.com',
'token': 'some_galaxy_token',
},
}
galaxy = CredentialType.defaults['galaxy_api_token']()
galaxy.save()
params['user'] = admin.id
params['credential_type'] = galaxy.pk
response = post(reverse('api:credential_list'), params, admin)
assert response.status_code == 201
assert Credential.objects.count() == 1
cred = Credential.objects.all()[:1].get()
assert cred.credential_type == galaxy
assert cred.inputs['url'] == 'https://galaxy.ansible.com'
assert decrypt_field(cred, 'token') == 'some_galaxy_token'
# #
# misc xfail conditions # misc xfail conditions
# #

View File

@@ -1,5 +1,3 @@
from unittest import mock
import pytest import pytest
from awx.api.versioning import reverse from awx.api.versioning import reverse
@@ -7,9 +5,6 @@ from awx.main.models.activity_stream import ActivityStream
from awx.main.models.ha import Instance from awx.main.models.ha import Instance
from django.test.utils import override_settings from django.test.utils import override_settings
from django.http import HttpResponse
from rest_framework import status
INSTANCE_KWARGS = dict(hostname='example-host', cpu=6, node_type='execution', memory=36000000000, cpu_capacity=6, mem_capacity=42) INSTANCE_KWARGS = dict(hostname='example-host', cpu=6, node_type='execution', memory=36000000000, cpu_capacity=6, mem_capacity=42)
@@ -92,11 +87,3 @@ def test_custom_hostname_regex(post, admin_user):
"peers": [], "peers": [],
} }
post(url=url, user=admin_user, data=data, expect=value[1]) post(url=url, user=admin_user, data=data, expect=value[1])
def test_instance_install_bundle(get, admin_user, system_auditor):
instance = Instance.objects.create(**INSTANCE_KWARGS)
url = reverse('api:instance_install_bundle', kwargs={'pk': instance.pk})
with mock.patch('awx.api.views.instance_install_bundle.InstanceInstallBundle.get', return_value=HttpResponse({'test': 'data'}, status=status.HTTP_200_OK)):
get(url=url, user=admin_user, expect=200)
get(url=url, user=system_auditor, expect=403)

View File

@@ -521,20 +521,6 @@ class TestInventorySourceCredential:
patch(url=inv_src.get_absolute_url(), data={'credential': aws_cred.pk}, expect=200, user=admin_user) patch(url=inv_src.get_absolute_url(), data={'credential': aws_cred.pk}, expect=200, user=admin_user)
assert list(inv_src.credentials.values_list('id', flat=True)) == [aws_cred.pk] assert list(inv_src.credentials.values_list('id', flat=True)) == [aws_cred.pk]
@pytest.mark.skip(reason="Delay until AAP-53978 completed")
def test_vmware_cred_create_esxi_source(self, inventory, admin_user, organization, post, get):
"""Test that a vmware esxi source can be added with a vmware credential"""
from awx.main.models.credential import Credential, CredentialType
vmware = CredentialType.defaults['vmware']()
vmware.save()
vmware_cred = Credential.objects.create(credential_type=vmware, name="bar", organization=organization)
inv_src = InventorySource.objects.create(inventory=inventory, name='foobar', source='vmware_esxi')
r = post(url=reverse('api:inventory_source_credentials_list', kwargs={'pk': inv_src.pk}), data={'id': vmware_cred.pk}, expect=204, user=admin_user)
g = get(inv_src.get_absolute_url(), admin_user)
assert r.status_code == 204
assert g.data['credential'] == vmware_cred.pk
@pytest.mark.django_db @pytest.mark.django_db
class TestControlledBySCM: class TestControlledBySCM:

View File

@@ -1,191 +0,0 @@
import pytest
from unittest.mock import patch, MagicMock
from awx.api.versioning import reverse
# Generated by Cursor (claude-4-sonnet)
@pytest.mark.django_db
class TestLicenseCacheClearing:
"""Test cache clearing for LICENSE setting changes"""
def test_license_from_manifest_clears_cache(self, admin_user, post):
"""Test that posting a manifest to /api/v2/config/ clears the LICENSE cache"""
# Mock the licenser and clear_setting_cache
with patch('awx.api.views.root.get_licenser') as mock_get_licenser, patch('awx.api.views.root.validate_entitlement_manifest') as mock_validate, patch(
'awx.api.views.root.clear_setting_cache'
) as mock_clear_cache, patch('django.db.connection.on_commit') as mock_on_commit:
# Set up mock license data
mock_license_data = {'valid_key': True, 'license_type': 'enterprise', 'instance_count': 100, 'subscription_name': 'Test Enterprise License'}
# Mock the validation and license processing
mock_validate.return_value = [{'some': 'manifest_data'}]
mock_licenser = MagicMock()
mock_licenser.license_from_manifest.return_value = mock_license_data
mock_get_licenser.return_value = mock_licenser
# Prepare the request data (base64 encoded manifest)
manifest_data = {'manifest': 'ZmFrZS1tYW5pZmVzdC1kYXRh'} # base64 for "fake-manifest-data"
# Make the POST request
url = reverse('api:api_v2_config_view')
response = post(url, manifest_data, admin_user, expect=200)
# Verify the response
assert response.data == mock_license_data
# Verify license_from_manifest was called
mock_licenser.license_from_manifest.assert_called_once()
# Verify on_commit was called (may be multiple times due to other settings)
assert mock_on_commit.call_count >= 1
# Execute all on_commit callbacks to trigger cache clearing
for call_args in mock_on_commit.call_args_list:
callback = call_args[0][0]
callback()
# Verify that clear_setting_cache.delay was called with ['LICENSE']
mock_clear_cache.delay.assert_any_call(['LICENSE'])
def test_config_delete_clears_cache(self, admin_user, delete):
"""Test that DELETE /api/v2/config/ clears the LICENSE cache"""
with patch('awx.api.views.root.clear_setting_cache') as mock_clear_cache, patch('django.db.connection.on_commit') as mock_on_commit:
# Make the DELETE request
url = reverse('api:api_v2_config_view')
delete(url, admin_user, expect=204)
# Verify on_commit was called at least once
assert mock_on_commit.call_count >= 1
# Execute all on_commit callbacks to trigger cache clearing
for call_args in mock_on_commit.call_args_list:
callback = call_args[0][0]
callback()
mock_clear_cache.delay.assert_called_once_with(['LICENSE'])
def test_attach_view_clears_cache(self, admin_user, post):
"""Test that posting to /api/v2/config/attach/ clears the LICENSE cache"""
with patch('awx.api.views.root.get_licenser') as mock_get_licenser, patch('awx.api.views.root.clear_setting_cache') as mock_clear_cache, patch(
'django.db.connection.on_commit'
) as mock_on_commit, patch('awx.api.views.root.settings') as mock_settings:
# Set up subscription credentials in settings
mock_settings.SUBSCRIPTIONS_CLIENT_ID = 'test-client-id'
mock_settings.SUBSCRIPTIONS_CLIENT_SECRET = 'test-client-secret'
# Set up mock licenser with validated subscriptions
mock_licenser = MagicMock()
subscription_data = {'subscription_id': 'test-subscription-123', 'valid_key': False, 'license_type': 'enterprise', 'instance_count': 50}
mock_licenser.validate_rh.return_value = [subscription_data]
mock_get_licenser.return_value = mock_licenser
# Prepare request data
request_data = {'subscription_id': 'test-subscription-123'}
# Make the POST request
url = reverse('api:api_v2_attach_view')
response = post(url, request_data, admin_user, expect=200)
# Verify the response includes valid_key=True
assert response.data['valid_key'] is True
assert response.data['subscription_id'] == 'test-subscription-123'
# Verify settings.LICENSE was set
expected_license = subscription_data.copy()
expected_license['valid_key'] = True
assert mock_settings.LICENSE == expected_license
# Verify cache clearing was scheduled
mock_on_commit.assert_called_once()
call_args = mock_on_commit.call_args[0][0] # Get the lambda function
# Execute the lambda to verify it calls clear_setting_cache
call_args()
mock_clear_cache.delay.assert_called_once_with(['LICENSE'])
def test_attach_view_subscription_not_found_no_cache_clear(self, admin_user, post):
"""Test that attach view doesn't clear cache when subscription is not found"""
with patch('awx.api.views.root.get_licenser') as mock_get_licenser, patch('awx.api.views.root.clear_setting_cache') as mock_clear_cache, patch(
'django.db.connection.on_commit'
) as mock_on_commit:
# Set up mock licenser with different subscription
mock_licenser = MagicMock()
subscription_data = {'subscription_id': 'different-subscription-456', 'valid_key': False, 'license_type': 'enterprise'} # Different ID
mock_licenser.validate_rh.return_value = [subscription_data]
mock_get_licenser.return_value = mock_licenser
# Request data with non-matching subscription ID
request_data = {
'subscription_id': 'test-subscription-123', # This won't match
}
# Make the POST request
url = reverse('api:api_v2_attach_view')
response = post(url, request_data, admin_user, expect=400)
# Verify error response
assert 'error' in response.data
# Verify cache clearing was NOT called (no matching subscription)
mock_on_commit.assert_not_called()
mock_clear_cache.delay.assert_not_called()
def test_manifest_validation_error_no_cache_clear(self, admin_user, post):
"""Test that config view doesn't clear cache when manifest validation fails"""
with patch('awx.api.views.root.validate_entitlement_manifest') as mock_validate, patch(
'awx.api.views.root.clear_setting_cache'
) as mock_clear_cache, patch('django.db.connection.on_commit') as mock_on_commit:
# Mock validation to raise ValueError
mock_validate.side_effect = ValueError("Invalid manifest")
# Prepare request data
manifest_data = {'manifest': 'aW52YWxpZC1tYW5pZmVzdA=='} # base64 for "invalid-manifest"
# Make the POST request
url = reverse('api:api_v2_config_view')
response = post(url, manifest_data, admin_user, expect=400)
# Verify error response
assert response.data['error'] == 'Invalid manifest'
# Verify cache clearing was NOT called (validation failed)
mock_on_commit.assert_not_called()
mock_clear_cache.delay.assert_not_called()
def test_license_processing_error_no_cache_clear(self, admin_user, post):
"""Test that config view doesn't clear cache when license processing fails"""
with patch('awx.api.views.root.get_licenser') as mock_get_licenser, patch('awx.api.views.root.validate_entitlement_manifest') as mock_validate, patch(
'awx.api.views.root.clear_setting_cache'
) as mock_clear_cache, patch('django.db.connection.on_commit') as mock_on_commit:
# Mock validation to succeed but license processing to fail
mock_validate.return_value = [{'some': 'manifest_data'}]
mock_licenser = MagicMock()
mock_licenser.license_from_manifest.side_effect = Exception("License processing failed")
mock_get_licenser.return_value = mock_licenser
# Prepare request data
manifest_data = {'manifest': 'ZmFrZS1tYW5pZmVzdA=='} # base64 for "fake-manifest"
# Make the POST request
url = reverse('api:api_v2_config_view')
response = post(url, manifest_data, admin_user, expect=400)
# Verify error response
assert response.data['error'] == 'Invalid License'
# Verify cache clearing was NOT called (license processing failed)
mock_on_commit.assert_not_called()
mock_clear_cache.delay.assert_not_called()

View File

@@ -5,6 +5,10 @@ import pytest
from django.contrib.sessions.middleware import SessionMiddleware from django.contrib.sessions.middleware import SessionMiddleware
from django.test.utils import override_settings from django.test.utils import override_settings
from django.contrib.auth.models import AnonymousUser
from ansible_base.lib.utils.response import get_relative_url
from ansible_base.lib.testing.fixtures import settings_override_mutable # NOQA: F401 imported to be a pytest fixture
from awx.main.models import User from awx.main.models import User
from awx.api.versioning import reverse from awx.api.versioning import reverse
@@ -17,6 +21,33 @@ from awx.api.versioning import reverse
EXAMPLE_USER_DATA = {"username": "affable", "first_name": "a", "last_name": "a", "email": "a@a.com", "is_superuser": False, "password": "r$TyKiOCb#ED"} EXAMPLE_USER_DATA = {"username": "affable", "first_name": "a", "last_name": "a", "email": "a@a.com", "is_superuser": False, "password": "r$TyKiOCb#ED"}
@pytest.mark.django_db
def test_validate_local_user(post, admin_user, settings, settings_override_mutable): # NOQA: F811 this is how you use a pytest fixture
"Copy of the test by same name in django-ansible-base for integration and compatibility testing"
url = get_relative_url('validate-local-account')
admin_user.set_password('password')
admin_user.save()
data = {
"username": admin_user.username,
"password": "password",
}
with override_settings(RESOURCE_SERVER={"URL": "https://foo.invalid", "SECRET_KEY": "foobar"}):
response = post(url=url, data=data, user=AnonymousUser(), expect=200)
assert 'ansible_id' in response.data
assert response.data['auth_code'] is not None, response.data
# No resource server, return coherent response but can not provide auth code
response = post(url=url, data=data, user=AnonymousUser(), expect=200)
assert 'ansible_id' in response.data
assert response.data['auth_code'] is None
# wrong password
data['password'] = 'foobar'
response = post(url=url, data=data, user=AnonymousUser(), expect=401)
# response.data may be none here, this is just testing that we get no server error
@pytest.mark.django_db @pytest.mark.django_db
def test_user_create(post, admin): def test_user_create(post, admin):
response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock())) response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock()))
@@ -258,19 +289,3 @@ def test_user_verify_attribute_created(admin, get):
for op, count in (('gt', 1), ('lt', 0)): for op, count in (('gt', 1), ('lt', 0)):
resp = get(reverse('api:user_list') + f'?created__{op}={past}', admin) resp = get(reverse('api:user_list') + f'?created__{op}={past}', admin)
assert resp.data['count'] == count assert resp.data['count'] == count
@pytest.mark.django_db
def test_org_not_shown_in_admin_user_sublists(admin_user, get, organization):
for view_name in ('user_admin_of_organizations_list', 'user_organizations_list'):
url = reverse(f'api:{view_name}', kwargs={'pk': admin_user.pk})
r = get(url, user=admin_user, expect=200)
assert organization.pk not in [org['id'] for org in r.data['results']]
@pytest.mark.django_db
def test_admin_user_not_shown_in_org_users(admin_user, get, organization):
for view_name in ('organization_users_list', 'organization_admins_list'):
url = reverse(f'api:{view_name}', kwargs={'pk': organization.pk})
r = get(url, user=admin_user, expect=200)
assert admin_user.pk not in [u['id'] for u in r.data['results']]

View File

@@ -1,5 +1,3 @@
import logging
# Python # Python
import pytest import pytest
from unittest import mock from unittest import mock
@@ -10,7 +8,7 @@ import importlib
# Django # Django
from django.urls import resolve from django.urls import resolve
from django.http import Http404 from django.http import Http404
from django.apps import apps as global_apps from django.apps import apps
from django.core.handlers.exception import response_for_exception from django.core.handlers.exception import response_for_exception
from django.contrib.auth.models import User from django.contrib.auth.models import User
from django.core.serializers.json import DjangoJSONEncoder from django.core.serializers.json import DjangoJSONEncoder
@@ -49,8 +47,6 @@ from awx.main.models.ad_hoc_commands import AdHocCommand
from awx.main.models.execution_environments import ExecutionEnvironment from awx.main.models.execution_environments import ExecutionEnvironment
from awx.main.utils import is_testing from awx.main.utils import is_testing
logger = logging.getLogger(__name__)
__SWAGGER_REQUESTS__ = {} __SWAGGER_REQUESTS__ = {}
@@ -58,17 +54,8 @@ __SWAGGER_REQUESTS__ = {}
dab_rr_initial = importlib.import_module('ansible_base.resource_registry.migrations.0001_initial') dab_rr_initial = importlib.import_module('ansible_base.resource_registry.migrations.0001_initial')
def create_service_id(app_config, apps=global_apps, **kwargs):
try:
apps.get_model("dab_resource_registry", "ServiceID")
except LookupError:
logger.info('Looks like reverse migration, not creating resource registry ServiceID')
return
dab_rr_initial.create_service_id(apps, None)
if is_testing(): if is_testing():
post_migrate.connect(create_service_id) post_migrate.connect(lambda **kwargs: dab_rr_initial.create_service_id(apps, None))
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
@@ -139,7 +126,7 @@ def execution_environment():
@pytest.fixture @pytest.fixture
def setup_managed_roles(): def setup_managed_roles():
"Run the migration script to pre-create managed role definitions" "Run the migration script to pre-create managed role definitions"
setup_managed_role_definitions(global_apps, None) setup_managed_role_definitions(apps, None)
@pytest.fixture @pytest.fixture

View File

@@ -1,147 +0,0 @@
import pytest
from django.contrib.contenttypes.models import ContentType
from django.test import override_settings
from django.apps import apps
from ansible_base.rbac.models import RoleDefinition, RoleUserAssignment, RoleTeamAssignment
from ansible_base.rbac.migrations._utils import give_permissions
from awx.main.models import User, Team
from awx.main.migrations._dab_rbac import consolidate_indirect_user_roles
@pytest.mark.django_db
@override_settings(ANSIBLE_BASE_ALLOW_TEAM_PARENTS=True)
def test_consolidate_indirect_user_roles_with_nested_teams(setup_managed_roles, organization):
"""
Test the consolidate_indirect_user_roles function with a nested team hierarchy.
Setup:
- Users: A, B, C, D
- Teams: E, F, G
- Direct assignments: A→(E,F,G), B→E, C→F, D→G
- Team hierarchy: F→E (F is member of E), G→F (G is member of F)
Expected result after consolidation:
- Team E should have users: A, B, C, D (A directly, B directly, C through F, D through G→F)
- Team F should have users: A, C, D (A directly, C directly, D through G)
- Team G should have users: A, D (A directly, D directly)
"""
user_a = User.objects.create_user(username='user_a')
user_b = User.objects.create_user(username='user_b')
user_c = User.objects.create_user(username='user_c')
user_d = User.objects.create_user(username='user_d')
team_e = Team.objects.create(name='Team E', organization=organization)
team_f = Team.objects.create(name='Team F', organization=organization)
team_g = Team.objects.create(name='Team G', organization=organization)
# Get role definition and content type for give_permissions
team_member_role = RoleDefinition.objects.get(name='Team Member')
team_content_type = ContentType.objects.get_for_model(Team)
# Assign users to teams
give_permissions(apps=apps, rd=team_member_role, users=[user_a], object_id=team_e.id, content_type_id=team_content_type.id)
give_permissions(apps=apps, rd=team_member_role, users=[user_a], object_id=team_f.id, content_type_id=team_content_type.id)
give_permissions(apps=apps, rd=team_member_role, users=[user_a], object_id=team_g.id, content_type_id=team_content_type.id)
give_permissions(apps=apps, rd=team_member_role, users=[user_b], object_id=team_e.id, content_type_id=team_content_type.id)
give_permissions(apps=apps, rd=team_member_role, users=[user_c], object_id=team_f.id, content_type_id=team_content_type.id)
give_permissions(apps=apps, rd=team_member_role, users=[user_d], object_id=team_g.id, content_type_id=team_content_type.id)
# Mirror user assignments in the old RBAC system because signals don't run in tests
team_e.member_role.members.add(user_a.id, user_b.id)
team_f.member_role.members.add(user_a.id, user_c.id)
team_g.member_role.members.add(user_a.id, user_d.id)
# Setup team-to-team relationships
give_permissions(apps=apps, rd=team_member_role, teams=[team_f], object_id=team_e.id, content_type_id=team_content_type.id)
give_permissions(apps=apps, rd=team_member_role, teams=[team_g], object_id=team_f.id, content_type_id=team_content_type.id)
# Verify initial direct assignments
team_e_users_before = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_e.id).values_list('user_id', flat=True))
assert team_e_users_before == {user_a.id, user_b.id}
team_f_users_before = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_f.id).values_list('user_id', flat=True))
assert team_f_users_before == {user_a.id, user_c.id}
team_g_users_before = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_g.id).values_list('user_id', flat=True))
assert team_g_users_before == {user_a.id, user_d.id}
# Verify team-to-team relationships exist
assert RoleTeamAssignment.objects.filter(role_definition=team_member_role, team=team_f, object_id=team_e.id).exists()
assert RoleTeamAssignment.objects.filter(role_definition=team_member_role, team=team_g, object_id=team_f.id).exists()
# Run the consolidation function
consolidate_indirect_user_roles(apps, None)
# Verify consolidation
team_e_users_after = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_e.id).values_list('user_id', flat=True))
assert team_e_users_after == {user_a.id, user_b.id, user_c.id, user_d.id}, f"Team E should have users A, B, C, D but has {team_e_users_after}"
team_f_users_after = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_f.id).values_list('user_id', flat=True))
assert team_f_users_after == {user_a.id, user_c.id, user_d.id}, f"Team F should have users A, C, D but has {team_f_users_after}"
team_g_users_after = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_g.id).values_list('user_id', flat=True))
assert team_g_users_after == {user_a.id, user_d.id}, f"Team G should have users A, D but has {team_g_users_after}"
# Verify team member changes are mirrored to the old RBAC system
assert team_e_users_after == set(team_e.member_role.members.all().values_list('id', flat=True))
assert team_f_users_after == set(team_f.member_role.members.all().values_list('id', flat=True))
assert team_g_users_after == set(team_g.member_role.members.all().values_list('id', flat=True))
# Verify team-to-team relationships are removed after consolidation
assert not RoleTeamAssignment.objects.filter(
role_definition=team_member_role, team=team_f, object_id=team_e.id
).exists(), "Team-to-team relationship F→E should be removed"
assert not RoleTeamAssignment.objects.filter(
role_definition=team_member_role, team=team_g, object_id=team_f.id
).exists(), "Team-to-team relationship G→F should be removed"
@pytest.mark.django_db
@override_settings(ANSIBLE_BASE_ALLOW_TEAM_PARENTS=True)
def test_consolidate_indirect_user_roles_no_team_relationships(setup_managed_roles, organization):
"""
Test that the function handles the case where there are no team-to-team relationships.
It should return early without making any changes.
"""
# Create a user and team with direct assignment
user = User.objects.create_user(username='test_user')
team = Team.objects.create(name='Test Team', organization=organization)
team_member_role = RoleDefinition.objects.get(name='Team Member')
team_content_type = ContentType.objects.get_for_model(Team)
give_permissions(apps=apps, rd=team_member_role, users=[user], object_id=team.id, content_type_id=team_content_type.id)
# Compare count of assignments before and after consolidation
assignments_before = RoleUserAssignment.objects.filter(role_definition=team_member_role).count()
consolidate_indirect_user_roles(apps, None)
assignments_after = RoleUserAssignment.objects.filter(role_definition=team_member_role).count()
assert assignments_before == assignments_after, "Number of assignments should not change when there are no team-to-team relationships"
@pytest.mark.django_db
@override_settings(ANSIBLE_BASE_ALLOW_TEAM_PARENTS=True)
def test_consolidate_indirect_user_roles_circular_reference(setup_managed_roles, organization):
"""
Test that the function handles circular team references without infinite recursion.
"""
team_a = Team.objects.create(name='Team A', organization=organization)
team_b = Team.objects.create(name='Team B', organization=organization)
# Create a user assigned to team A
user = User.objects.create_user(username='test_user')
team_member_role = RoleDefinition.objects.get(name='Team Member')
team_content_type = ContentType.objects.get_for_model(Team)
give_permissions(apps=apps, rd=team_member_role, users=[user], object_id=team_a.id, content_type_id=team_content_type.id)
# Create circular team relationships: A → B → A
give_permissions(apps=apps, rd=team_member_role, teams=[team_b], object_id=team_a.id, content_type_id=team_content_type.id)
give_permissions(apps=apps, rd=team_member_role, teams=[team_a], object_id=team_b.id, content_type_id=team_content_type.id)
# Run the consolidation function - should not raise an exception
consolidate_indirect_user_roles(apps, None)
# Both teams should have the user assigned
team_a_users = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_a.id).values_list('user_id', flat=True))
team_b_users = set(RoleUserAssignment.objects.filter(role_definition=team_member_role, object_id=team_b.id).values_list('user_id', flat=True))
assert user.id in team_a_users, "User should be assigned to team A"
assert user.id in team_b_users, "User should be assigned to team B"

View File

@@ -151,6 +151,14 @@ def test_assign_credential_to_user_of_another_org(setup_managed_roles, credentia
post(url=url, data={"user": org_admin.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=201) post(url=url, data={"user": org_admin.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=201)
@pytest.mark.django_db
def test_team_member_role_not_assignable(team, rando, post, admin_user, setup_managed_roles):
member_rd = RoleDefinition.objects.get(name='Organization Member')
url = django_reverse('roleuserassignment-list')
r = post(url, data={'object_id': team.id, 'role_definition': member_rd.id, 'user': rando.id}, user=admin_user, expect=400)
assert 'Not managed locally' in str(r.data)
@pytest.mark.django_db @pytest.mark.django_db
def test_adding_user_to_org_member_role(setup_managed_roles, organization, admin, bob, post, get): def test_adding_user_to_org_member_role(setup_managed_roles, organization, admin, bob, post, get):
''' '''
@@ -170,17 +178,10 @@ def test_adding_user_to_org_member_role(setup_managed_roles, organization, admin
@pytest.mark.django_db @pytest.mark.django_db
@pytest.mark.parametrize('actor', ['user', 'team']) @pytest.mark.parametrize('actor', ['user', 'team'])
@pytest.mark.parametrize('role_name', ['Organization Admin', 'Organization Member', 'Team Admin', 'Team Member']) @pytest.mark.parametrize('role_name', ['Organization Admin', 'Organization Member', 'Team Admin', 'Team Member'])
def test_adding_actor_to_platform_roles(setup_managed_roles, role_name, actor, organization, team, admin, bob, post): def test_prevent_adding_actor_to_platform_roles(setup_managed_roles, role_name, actor, organization, team, admin, bob, post):
''' '''
Allow user to be added to platform-level roles Prevent user or team from being added to platform-level roles
Exceptions:
- Team cannot be added to Organization Member or Admin role
- Team cannot be added to Team Admin or Team Member role
''' '''
if actor == 'team':
expect = 400
else:
expect = 201
rd = RoleDefinition.objects.get(name=role_name) rd = RoleDefinition.objects.get(name=role_name)
endpoint = 'roleuserassignment-list' if actor == 'user' else 'roleteamassignment-list' endpoint = 'roleuserassignment-list' if actor == 'user' else 'roleteamassignment-list'
url = django_reverse(endpoint) url = django_reverse(endpoint)
@@ -188,9 +189,37 @@ def test_adding_actor_to_platform_roles(setup_managed_roles, role_name, actor, o
data = {'object_id': object_id, 'role_definition': rd.id} data = {'object_id': object_id, 'role_definition': rd.id}
actor_id = bob.id if actor == 'user' else team.id actor_id = bob.id if actor == 'user' else team.id
data[actor] = actor_id data[actor] = actor_id
r = post(url, data=data, user=admin, expect=expect) r = post(url, data=data, user=admin, expect=400)
if expect == 400: assert 'Not managed locally' in str(r.data)
if 'Organization' in role_name:
assert 'Assigning organization member permission to teams is not allowed' in str(r.data)
if 'Team' in role_name: @pytest.mark.django_db
assert 'Assigning team permissions to other teams is not allowed' in str(r.data) @pytest.mark.parametrize('role_name', ['Controller Team Admin', 'Controller Team Member'])
def test_adding_user_to_controller_team_roles(setup_managed_roles, role_name, team, admin, bob, post, get):
'''
Allow user to be added to Controller Team Admin or Controller Team Member
'''
url_detail = reverse('api:team_detail', kwargs={'pk': team.id})
get(url_detail, user=bob, expect=403)
rd = RoleDefinition.objects.get(name=role_name)
url = django_reverse('roleuserassignment-list')
post(url, data={'object_id': team.id, 'role_definition': rd.id, 'user': bob.id}, user=admin, expect=201)
get(url_detail, user=bob, expect=200)
@pytest.mark.django_db
@pytest.mark.parametrize('role_name', ['Controller Organization Admin', 'Controller Organization Member'])
def test_adding_user_to_controller_organization_roles(setup_managed_roles, role_name, organization, admin, bob, post, get):
'''
Allow user to be added to Controller Organization Admin or Controller Organization Member
'''
url_detail = reverse('api:organization_detail', kwargs={'pk': organization.id})
get(url_detail, user=bob, expect=403)
rd = RoleDefinition.objects.get(name=role_name)
url = django_reverse('roleuserassignment-list')
post(url, data={'object_id': organization.id, 'role_definition': rd.id, 'user': bob.id}, user=admin, expect=201)
get(url, user=bob, expect=200)

View File

@@ -15,14 +15,6 @@ def test_roles_to_not_create(setup_managed_roles):
raise Exception(f'Found RoleDefinitions that should not exist: {bad_names}') raise Exception(f'Found RoleDefinitions that should not exist: {bad_names}')
@pytest.mark.django_db
def test_org_admin_role(setup_managed_roles):
rd = RoleDefinition.objects.get(name='Organization Admin')
codenames = list(rd.permissions.values_list('codename', flat=True))
assert 'view_inventory' in codenames
assert 'change_inventory' in codenames
@pytest.mark.django_db @pytest.mark.django_db
def test_project_update_role(setup_managed_roles): def test_project_update_role(setup_managed_roles):
"""Role to allow updating a project on the object-level should exist""" """Role to allow updating a project on the object-level should exist"""
@@ -39,18 +31,32 @@ def test_org_child_add_permission(setup_managed_roles):
assert not DABPermission.objects.filter(codename='add_jobtemplate').exists() assert not DABPermission.objects.filter(codename='add_jobtemplate').exists()
@pytest.mark.django_db
def test_controller_specific_roles_have_correct_permissions(setup_managed_roles):
'''
Controller specific roles should have the same permissions as the platform roles
e.g. Controller Team Admin should have same permission set as Team Admin
'''
for rd_name in ['Controller Team Admin', 'Controller Team Member', 'Controller Organization Member', 'Controller Organization Admin']:
rd = RoleDefinition.objects.get(name=rd_name)
rd_platform = RoleDefinition.objects.get(name=rd_name.split('Controller ')[1])
assert set(rd.permissions.all()) == set(rd_platform.permissions.all())
@pytest.mark.django_db @pytest.mark.django_db
@pytest.mark.parametrize('resource_name', ['Team', 'Organization']) @pytest.mark.parametrize('resource_name', ['Team', 'Organization'])
@pytest.mark.parametrize('action', ['Member', 'Admin']) @pytest.mark.parametrize('action', ['Member', 'Admin'])
def test_legacy_RBAC_uses_platform_roles(setup_managed_roles, resource_name, action, team, bob, organization): def test_legacy_RBAC_uses_controller_specific_roles(setup_managed_roles, resource_name, action, team, bob, organization):
''' '''
Assignment to legacy RBAC roles should use platform role definitions Assignment to legacy RBAC roles should use controller specific role definitions
e.g. Team Admin, Team Member, Organization Member, Organization Admin e.g. Controller Team Admin, Controller Team Member, Controller Organization Member, Controller Organization Admin
''' '''
resource = team if resource_name == 'Team' else organization resource = team if resource_name == 'Team' else organization
if action == 'Member': if action == 'Member':
resource.member_role.members.add(bob) resource.member_role.members.add(bob)
else: else:
resource.admin_role.members.add(bob) resource.admin_role.members.add(bob)
rd = RoleDefinition.objects.get(name=f'{resource_name} {action}') rd = RoleDefinition.objects.get(name=f'Controller {resource_name} {action}')
rd_platform = RoleDefinition.objects.get(name=f'{resource_name} {action}')
assert RoleUserAssignment.objects.filter(role_definition=rd, user=bob, object_id=resource.id).exists() assert RoleUserAssignment.objects.filter(role_definition=rd, user=bob, object_id=resource.id).exists()
assert not RoleUserAssignment.objects.filter(role_definition=rd_platform, user=bob, object_id=resource.id).exists()

View File

@@ -173,6 +173,20 @@ def test_creator_permission(rando, admin_user, inventory, setup_managed_roles):
assert rando in inventory.admin_role.members.all() assert rando in inventory.admin_role.members.all()
@pytest.mark.django_db
def test_team_team_read_role(rando, team, admin_user, post, setup_managed_roles):
orgs = [Organization.objects.create(name=f'foo-{i}') for i in range(2)]
teams = [Team.objects.create(name=f'foo-{i}', organization=orgs[i]) for i in range(2)]
teams[1].member_role.members.add(rando)
# give second team read permission to first team through the API for regression testing
url = reverse('api:role_teams_list', kwargs={'pk': teams[0].read_role.pk, 'version': 'v2'})
post(url, {'id': teams[1].id}, user=admin_user)
# user should be able to view the first team
assert rando in teams[0].read_role
@pytest.mark.django_db @pytest.mark.django_db
def test_implicit_parents_no_assignments(organization): def test_implicit_parents_no_assignments(organization):
"""Through the normal course of creating models, we should not be changing DAB RBAC permissions""" """Through the normal course of creating models, we should not be changing DAB RBAC permissions"""
@@ -186,25 +200,25 @@ def test_user_auditor_rel(organization, rando, setup_managed_roles):
assert rando not in organization.auditor_role assert rando not in organization.auditor_role
audit_rd = RoleDefinition.objects.get(name='Organization Audit') audit_rd = RoleDefinition.objects.get(name='Organization Audit')
audit_rd.give_permission(rando, organization) audit_rd.give_permission(rando, organization)
assert list(Organization.access_qs(rando, 'audit')) == [organization] assert list(rando.auditor_of_organizations) == [organization]
@pytest.mark.django_db @pytest.mark.django_db
@pytest.mark.parametrize('resource_name', ['Organization', 'Team']) @pytest.mark.parametrize('resource_name', ['Organization', 'Team'])
@pytest.mark.parametrize('role_name', ['Member', 'Admin']) @pytest.mark.parametrize('role_name', ['Member', 'Admin'])
def test_mapping_from_role_definitions_to_roles(organization, team, rando, role_name, resource_name, setup_managed_roles): def test_mapping_from_controller_role_definitions_to_roles(organization, team, rando, role_name, resource_name, setup_managed_roles):
""" """
ensure mappings for platform roles are correct ensure mappings for controller roles are correct
e.g. e.g.
Organization Member > organization.member_role Controller Organization Member > organization.member_role
Organization Admin > organization.admin_role Controller Organization Admin > organization.admin_role
Team Member > team.member_role Controller Team Member > team.member_role
Team Admin > team.admin_role Controller Team Admin > team.admin_role
""" """
resource = organization if resource_name == 'Organization' else team resource = organization if resource_name == 'Organization' else team
old_role_name = f"{role_name.lower()}_role" old_role_name = f"{role_name.lower()}_role"
getattr(resource, old_role_name).members.add(rando) getattr(resource, old_role_name).members.add(rando)
assignment = RoleUserAssignment.objects.get(user=rando) assignment = RoleUserAssignment.objects.get(user=rando)
assert assignment.role_definition.name == f'{resource_name} {role_name}' assert assignment.role_definition.name == f'Controller {resource_name} {role_name}'
old_role = get_role_from_object_role(assignment.object_role) old_role = get_role_from_object_role(assignment.object_role)
assert old_role.id == getattr(resource, old_role_name).id assert old_role.id == getattr(resource, old_role_name).id

View File

@@ -35,21 +35,21 @@ class TestNewToOld:
def test_new_to_old_rbac_team_member_addition(self, admin, post, team, bob, setup_managed_roles): def test_new_to_old_rbac_team_member_addition(self, admin, post, team, bob, setup_managed_roles):
''' '''
Assign user to Team Member role definition, should be added to team.member_role.members Assign user to Controller Team Member role definition, should be added to team.member_role.members
''' '''
rd = RoleDefinition.objects.get(name='Team Member') rd = RoleDefinition.objects.get(name='Controller Team Member')
url = get_relative_url('roleuserassignment-list') url = get_relative_url('roleuserassignment-list')
post(url, user=admin, data={'role_definition': rd.id, 'user': bob.id, 'object_id': team.id}, expect=201) post(url, user=admin, data={'role_definition': rd.id, 'user': bob.id, 'object_id': team.id}, expect=201)
assert bob in team.member_role.members.all() assert bob in team.member_role.members.all()
def test_new_to_old_rbac_team_member_removal(self, admin, delete, team, bob, setup_managed_roles): def test_new_to_old_rbac_team_member_removal(self, admin, delete, team, bob):
''' '''
Remove user from Team Member role definition, should be deleted from team.member_role.members Remove user from Controller Team Member role definition, should be deleted from team.member_role.members
''' '''
team.member_role.members.add(bob) team.member_role.members.add(bob)
rd = RoleDefinition.objects.get(name='Team Member') rd = RoleDefinition.objects.get(name='Controller Team Member')
user_assignment = RoleUserAssignment.objects.get(user=bob, role_definition=rd, object_id=team.id) user_assignment = RoleUserAssignment.objects.get(user=bob, role_definition=rd, object_id=team.id)
url = get_relative_url('roleuserassignment-detail', kwargs={'pk': user_assignment.id}) url = get_relative_url('roleuserassignment-detail', kwargs={'pk': user_assignment.id})

View File

@@ -50,11 +50,13 @@ def test_org_factory_roles(organization_factory):
teams=['team1', 'team2'], teams=['team1', 'team2'],
users=['team1:foo', 'bar'], users=['team1:foo', 'bar'],
projects=['baz', 'bang'], projects=['baz', 'bang'],
roles=['team2.member_role:foo', 'team1.admin_role:bar', 'baz.admin_role:foo'], roles=['team2.member_role:foo', 'team1.admin_role:bar', 'team1.member_role:team2.admin_role', 'baz.admin_role:foo'],
) )
assert objects.users.bar in objects.teams.team1.admin_role
assert objects.users.bar in objects.teams.team2.admin_role
assert objects.users.foo in objects.projects.baz.admin_role assert objects.users.foo in objects.projects.baz.admin_role
assert objects.users.foo in objects.teams.team1.member_role assert objects.users.foo in objects.teams.team1.member_role
assert objects.teams.team2.admin_role in objects.teams.team1.member_role.children.all()
@pytest.mark.django_db @pytest.mark.django_db

View File

@@ -49,6 +49,7 @@ def credential_kind(source):
"""Given the inventory source kind, return expected credential kind""" """Given the inventory source kind, return expected credential kind"""
if source == 'openshift_virtualization': if source == 'openshift_virtualization':
return 'kubernetes_bearer_token' return 'kubernetes_bearer_token'
return source.replace('ec2', 'aws') return source.replace('ec2', 'aws')
@@ -222,10 +223,6 @@ def test_inventory_update_injected_content(product_name, this_kind, inventory, f
private_data_dir = envvars.pop('AWX_PRIVATE_DATA_DIR') private_data_dir = envvars.pop('AWX_PRIVATE_DATA_DIR')
assert envvars.pop('ANSIBLE_INVENTORY_ENABLED') == 'auto' assert envvars.pop('ANSIBLE_INVENTORY_ENABLED') == 'auto'
set_files = bool(os.getenv("MAKE_INVENTORY_REFERENCE_FILES", 'false').lower()[0] not in ['f', '0']) set_files = bool(os.getenv("MAKE_INVENTORY_REFERENCE_FILES", 'false').lower()[0] not in ['f', '0'])
# Ensure the directory exists before trying to list/read it
os.makedirs(private_data_dir, exist_ok=True)
env, content = read_content(private_data_dir, envvars, inventory_update) env, content = read_content(private_data_dir, envvars, inventory_update)
# Assert inventory plugin inventory file is in private_data_dir # Assert inventory plugin inventory file is in private_data_dir

View File

@@ -8,6 +8,7 @@ Most tests that live in here can probably be deleted at some point. They are mai
for a developer. When AWX versions that users upgrade from falls out of support that for a developer. When AWX versions that users upgrade from falls out of support that
is when migration tests can be deleted. This is also a good time to squash. Squashing is when migration tests can be deleted. This is also a good time to squash. Squashing
will likely mess with the tests that live here. will likely mess with the tests that live here.
The smoke test should be kept in here. The smoke test ensures that our migrations The smoke test should be kept in here. The smoke test ensures that our migrations
continue to work when sqlite is the backing database (vs. the default DB of postgres). continue to work when sqlite is the backing database (vs. the default DB of postgres).
""" """
@@ -18,22 +19,27 @@ class TestMigrationSmoke:
def test_happy_path(self, migrator): def test_happy_path(self, migrator):
""" """
This smoke test runs all the migrations. This smoke test runs all the migrations.
Example of how to use django-test-migration to invoke particular migration(s) Example of how to use django-test-migration to invoke particular migration(s)
while weaving in object creation and assertions. while weaving in object creation and assertions.
Note that this is more than just an example. It is a smoke test because it runs ALL Note that this is more than just an example. It is a smoke test because it runs ALL
the migrations. Our "normal" unit tests subvert the migrations running because it is slow. the migrations. Our "normal" unit tests subvert the migrations running because it is slow.
""" """
migration_nodes = all_migrations('default') migration_nodes = all_migrations('default')
migration_tuples = nodes_to_tuples(migration_nodes) migration_tuples = nodes_to_tuples(migration_nodes)
final_migration = migration_tuples[-1] final_migration = migration_tuples[-1]
migrator.apply_initial_migration(('main', None)) migrator.apply_initial_migration(('main', None))
# I just picked a newish migration at the time of writing this. # I just picked a newish migration at the time of writing this.
# If someone from the future finds themselves here because the are squashing migrations # If someone from the future finds themselves here because the are squashing migrations
# it is fine to change the 0180_... below to some other newish migration # it is fine to change the 0180_... below to some other newish migration
intermediate_state = migrator.apply_tested_migration(('main', '0180_add_hostmetric_fields')) intermediate_state = migrator.apply_tested_migration(('main', '0180_add_hostmetric_fields'))
Instance = intermediate_state.apps.get_model('main', 'Instance') Instance = intermediate_state.apps.get_model('main', 'Instance')
# Create any old object in the database # Create any old object in the database
Instance.objects.create(hostname='foobar', node_type='control') Instance.objects.create(hostname='foobar', node_type='control')
final_state = migrator.apply_tested_migration(final_migration) final_state = migrator.apply_tested_migration(final_migration)
Instance = final_state.apps.get_model('main', 'Instance') Instance = final_state.apps.get_model('main', 'Instance')
assert Instance.objects.filter(hostname='foobar').count() == 1 assert Instance.objects.filter(hostname='foobar').count() == 1
@@ -46,16 +52,20 @@ class TestMigrationSmoke:
foo = Instance.objects.create(hostname='foo', node_type='execution', listener_port=1234) foo = Instance.objects.create(hostname='foo', node_type='execution', listener_port=1234)
bar = Instance.objects.create(hostname='bar', node_type='execution', listener_port=None) bar = Instance.objects.create(hostname='bar', node_type='execution', listener_port=None)
bar.peers.add(foo) bar.peers.add(foo)
new_state = migrator.apply_tested_migration( new_state = migrator.apply_tested_migration(
('main', '0189_inbound_hop_nodes'), ('main', '0189_inbound_hop_nodes'),
) )
Instance = new_state.apps.get_model('main', 'Instance') Instance = new_state.apps.get_model('main', 'Instance')
ReceptorAddress = new_state.apps.get_model('main', 'ReceptorAddress') ReceptorAddress = new_state.apps.get_model('main', 'ReceptorAddress')
# We can now test how our migration worked, new field is there: # We can now test how our migration worked, new field is there:
assert ReceptorAddress.objects.filter(address='foo', port=1234).count() == 1 assert ReceptorAddress.objects.filter(address='foo', port=1234).count() == 1
assert not ReceptorAddress.objects.filter(address='bar').exists() assert not ReceptorAddress.objects.filter(address='bar').exists()
bar = Instance.objects.get(hostname='bar') bar = Instance.objects.get(hostname='bar')
fooaddr = ReceptorAddress.objects.get(address='foo') fooaddr = ReceptorAddress.objects.get(address='foo')
bar_peers = bar.peers.all() bar_peers = bar.peers.all()
assert len(bar_peers) == 1 assert len(bar_peers) == 1
assert fooaddr in bar_peers assert fooaddr in bar_peers
@@ -65,6 +75,7 @@ class TestMigrationSmoke:
Organization = old_state.apps.get_model('main', 'Organization') Organization = old_state.apps.get_model('main', 'Organization')
Team = old_state.apps.get_model('main', 'Team') Team = old_state.apps.get_model('main', 'Team')
User = old_state.apps.get_model('auth', 'User') User = old_state.apps.get_model('auth', 'User')
org = Organization.objects.create(name='arbitrary-org', created=now(), modified=now()) org = Organization.objects.create(name='arbitrary-org', created=now(), modified=now())
user = User.objects.create(username='random-user') user = User.objects.create(username='random-user')
org.read_role.members.add(user) org.read_role.members.add(user)
@@ -76,10 +87,11 @@ class TestMigrationSmoke:
new_state = migrator.apply_tested_migration( new_state = migrator.apply_tested_migration(
('main', '0192_custom_roles'), ('main', '0192_custom_roles'),
) )
RoleUserAssignment = new_state.apps.get_model('dab_rbac', 'RoleUserAssignment') RoleUserAssignment = new_state.apps.get_model('dab_rbac', 'RoleUserAssignment')
assert RoleUserAssignment.objects.filter(user=user.id, object_id=org.id).exists() assert RoleUserAssignment.objects.filter(user=user.id, object_id=org.id).exists()
assert RoleUserAssignment.objects.filter(user=user.id, role_definition__name='Organization Member', object_id=org.id).exists() assert RoleUserAssignment.objects.filter(user=user.id, role_definition__name='Controller Organization Member', object_id=org.id).exists()
assert RoleUserAssignment.objects.filter(user=user.id, role_definition__name='Team Member', object_id=team.id).exists() assert RoleUserAssignment.objects.filter(user=user.id, role_definition__name='Controller Team Member', object_id=team.id).exists()
# Regression testing for bug that comes from current vs past models mismatch # Regression testing for bug that comes from current vs past models mismatch
RoleDefinition = new_state.apps.get_model('dab_rbac', 'RoleDefinition') RoleDefinition = new_state.apps.get_model('dab_rbac', 'RoleDefinition')
@@ -87,6 +99,7 @@ class TestMigrationSmoke:
# Test special cases in managed role creation # Test special cases in managed role creation
assert not RoleDefinition.objects.filter(name='Organization Team Admin').exists() assert not RoleDefinition.objects.filter(name='Organization Team Admin').exists()
assert not RoleDefinition.objects.filter(name='Organization InstanceGroup Admin').exists() assert not RoleDefinition.objects.filter(name='Organization InstanceGroup Admin').exists()
# Test that a removed EE model permission has been deleted # Test that a removed EE model permission has been deleted
new_state = migrator.apply_tested_migration( new_state = migrator.apply_tested_migration(
('main', '0195_EE_permissions'), ('main', '0195_EE_permissions'),
@@ -97,35 +110,21 @@ class TestMigrationSmoke:
# Test create a Project with a duplicate name # Test create a Project with a duplicate name
Organization = new_state.apps.get_model('main', 'Organization') Organization = new_state.apps.get_model('main', 'Organization')
Project = new_state.apps.get_model('main', 'Project') Project = new_state.apps.get_model('main', 'Project')
WorkflowJobTemplate = new_state.apps.get_model('main', 'WorkflowJobTemplate')
org = Organization.objects.create(name='duplicate-obj-organization', created=now(), modified=now()) org = Organization.objects.create(name='duplicate-obj-organization', created=now(), modified=now())
proj_ids = [] proj_ids = []
for i in range(3): for i in range(3):
proj = Project.objects.create(name='duplicate-project-name', organization=org, created=now(), modified=now()) proj = Project.objects.create(name='duplicate-project-name', organization=org, created=now(), modified=now())
proj_ids.append(proj.id) proj_ids.append(proj.id)
# Test create WorkflowJobTemplate with duplicate names
wfjt_ids = []
for i in range(3):
wfjt = WorkflowJobTemplate.objects.create(name='duplicate-workflow-name', organization=org, created=now(), modified=now())
wfjt_ids.append(wfjt.id)
# The uniqueness rules will not apply to InventorySource # The uniqueness rules will not apply to InventorySource
Inventory = new_state.apps.get_model('main', 'Inventory') Inventory = new_state.apps.get_model('main', 'Inventory')
InventorySource = new_state.apps.get_model('main', 'InventorySource') InventorySource = new_state.apps.get_model('main', 'InventorySource')
inv = Inventory.objects.create(name='migration-test-inv', organization=org, created=now(), modified=now()) inv = Inventory.objects.create(name='migration-test-inv', organization=org, created=now(), modified=now())
InventorySource.objects.create(name='migration-test-src', source='file', inventory=inv, organization=org, created=now(), modified=now()) InventorySource.objects.create(name='migration-test-src', source='file', inventory=inv, organization=org, created=now(), modified=now())
# Apply migration 0200 which should rename duplicates
new_state = migrator.apply_tested_migration( new_state = migrator.apply_tested_migration(
('main', '0200_template_name_constraint'), ('main', '0200_template_name_constraint'),
) )
# Get the models from the new state for verification
Project = new_state.apps.get_model('main', 'Project')
WorkflowJobTemplate = new_state.apps.get_model('main', 'WorkflowJobTemplate')
InventorySource = new_state.apps.get_model('main', 'InventorySource')
for i, proj_id in enumerate(proj_ids): for i, proj_id in enumerate(proj_ids):
proj = Project.objects.get(id=proj_id) proj = Project.objects.get(id=proj_id)
if i == 0: if i == 0:
@@ -134,36 +133,10 @@ class TestMigrationSmoke:
assert proj.name != 'duplicate-project-name' assert proj.name != 'duplicate-project-name'
assert proj.name.startswith('duplicate-project-name') assert proj.name.startswith('duplicate-project-name')
# Verify WorkflowJobTemplate duplicates are renamed
for i, wfjt_id in enumerate(wfjt_ids):
wfjt = WorkflowJobTemplate.objects.get(id=wfjt_id)
if i == 0:
assert wfjt.name == 'duplicate-workflow-name'
else:
assert wfjt.name != 'duplicate-workflow-name'
assert wfjt.name.startswith('duplicate-workflow-name')
# The inventory source had this field set to avoid the constrains # The inventory source had this field set to avoid the constrains
InventorySource = new_state.apps.get_model('main', 'InventorySource')
inv_src = InventorySource.objects.get(name='migration-test-src') inv_src = InventorySource.objects.get(name='migration-test-src')
assert inv_src.org_unique is False assert inv_src.org_unique is False
Project = new_state.apps.get_model('main', 'Project')
for proj in Project.objects.all(): for proj in Project.objects.all():
assert proj.org_unique is True assert proj.org_unique is True
# Piggyback test for the new credential types
validate_exists = ['GitHub App Installation Access Token Lookup', 'Terraform backend configuration']
CredentialType = new_state.apps.get_model('main', 'CredentialType')
# simulate an upgrade by deleting existing types with these names
for expected_name in validate_exists:
ct = CredentialType.objects.filter(name=expected_name).first()
if ct:
ct.delete()
new_state = migrator.apply_tested_migration(
('main', '0201_create_managed_creds'),
)
CredentialType = new_state.apps.get_model('main', 'CredentialType')
for expected_name in validate_exists:
assert CredentialType.objects.filter(
name=expected_name
).exists(), f'Could not find {expected_name} credential type name, all names: {list(CredentialType.objects.values_list("name", flat=True))}'

View File

@@ -334,69 +334,6 @@ def test_team_project_list(get, team_project_list):
) )
@pytest.mark.django_db
def test_project_teams_list_multiple_roles_distinct(get, organization_factory):
# test projects with multiple roles on the same team
objects = organization_factory(
'org1',
superusers=['admin'],
teams=['teamA'],
projects=['proj1'],
roles=[
'teamA.member_role:proj1.admin_role',
'teamA.member_role:proj1.use_role',
'teamA.member_role:proj1.update_role',
'teamA.member_role:proj1.read_role',
],
)
admin = objects.superusers.admin
proj1 = objects.projects.proj1
res = get(reverse('api:project_teams_list', kwargs={'pk': proj1.pk}), admin).data
names = [t['name'] for t in res['results']]
assert names == ['teamA']
@pytest.mark.django_db
def test_project_teams_list_multiple_teams(get, organization_factory):
# test projects with multiple teams
objs = organization_factory(
'org1',
superusers=['admin'],
teams=['teamA', 'teamB', 'teamC', 'teamD'],
projects=['proj1'],
roles=[
'teamA.member_role:proj1.admin_role',
'teamB.member_role:proj1.update_role',
'teamC.member_role:proj1.use_role',
'teamD.member_role:proj1.read_role',
],
)
admin = objs.superusers.admin
proj1 = objs.projects.proj1
res = get(reverse('api:project_teams_list', kwargs={'pk': proj1.pk}), admin).data
names = sorted([t['name'] for t in res['results']])
assert names == ['teamA', 'teamB', 'teamC', 'teamD']
@pytest.mark.django_db
def test_project_teams_list_no_direct_assignments(get, organization_factory):
# test projects with no direct team assignments
objects = organization_factory(
'org1',
superusers=['admin'],
teams=['teamA'],
projects=['proj1'],
roles=[],
)
admin = objects.superusers.admin
proj1 = objects.projects.proj1
res = get(reverse('api:project_teams_list', kwargs={'pk': proj1.pk}), admin).data
assert res['count'] == 0
@pytest.mark.parametrize("u,expected_status_code", [('rando', 403), ('org_member', 403), ('org_admin', 201), ('admin', 201)]) @pytest.mark.parametrize("u,expected_status_code", [('rando', 403), ('org_member', 403), ('org_admin', 201), ('admin', 201)])
@pytest.mark.django_db @pytest.mark.django_db
def test_create_project(post, organization, org_admin, org_member, admin, rando, u, expected_status_code): def test_create_project(post, organization, org_admin, org_member, admin, rando, u, expected_status_code):

View File

@@ -1,14 +1,20 @@
import pytest import pytest
from awx.main.tests.live.tests.conftest import wait_for_events from awx.main.tests.live.tests.conftest import wait_for_events, wait_for_job
from awx.main.models import Job, Inventory from awx.main.models import Job, Inventory
@pytest.fixture
def facts_project(live_tmp_folder, project_factory):
return project_factory(scm_url=f'file://{live_tmp_folder}/facts')
def assert_facts_populated(name): def assert_facts_populated(name):
job = Job.objects.filter(name__icontains=name).order_by('-created').first() job = Job.objects.filter(name__icontains=name).order_by('-created').first()
assert job is not None assert job is not None
wait_for_events(job) wait_for_events(job)
wait_for_job(job)
inventory = job.inventory inventory = job.inventory
assert inventory.hosts.count() > 0 # sanity assert inventory.hosts.count() > 0 # sanity
@@ -17,24 +23,24 @@ def assert_facts_populated(name):
@pytest.fixture @pytest.fixture
def general_facts_test(live_tmp_folder, run_job_from_playbook): def general_facts_test(facts_project, run_job_from_playbook):
def _rf(slug, jt_params): def _rf(slug, jt_params):
jt_params['use_fact_cache'] = True jt_params['use_fact_cache'] = True
standard_kwargs = dict(scm_url=f'file://{live_tmp_folder}/facts', jt_params=jt_params) standard_kwargs = dict(jt_params=jt_params)
# GATHER FACTS # GATHER FACTS
name = f'test_gather_ansible_facts_{slug}' name = f'test_gather_ansible_facts_{slug}'
run_job_from_playbook(name, 'gather.yml', **standard_kwargs) run_job_from_playbook(name, 'gather.yml', proj=facts_project, **standard_kwargs)
assert_facts_populated(name) assert_facts_populated(name)
# KEEP FACTS # KEEP FACTS
name = f'test_clear_ansible_facts_{slug}' name = f'test_clear_ansible_facts_{slug}'
run_job_from_playbook(name, 'no_op.yml', **standard_kwargs) run_job_from_playbook(name, 'no_op.yml', proj=facts_project, **standard_kwargs)
assert_facts_populated(name) assert_facts_populated(name)
# CLEAR FACTS # CLEAR FACTS
name = f'test_clear_ansible_facts_{slug}' name = f'test_clear_ansible_facts_{slug}'
run_job_from_playbook(name, 'clear.yml', **standard_kwargs) run_job_from_playbook(name, 'clear.yml', proj=facts_project, **standard_kwargs)
job = Job.objects.filter(name__icontains=name).order_by('-created').first() job = Job.objects.filter(name__icontains=name).order_by('-created').first()
assert job is not None assert job is not None

View File

@@ -125,6 +125,9 @@ def test_finish_job_fact_cache_clear(hosts, mocker, ref_time, tmpdir):
for host in (hosts[0], hosts[2], hosts[3]): for host in (hosts[0], hosts[2], hosts[3]):
assert host.ansible_facts == {"a": 1, "b": 2} assert host.ansible_facts == {"a": 1, "b": 2}
assert host.ansible_facts_modified == ref_time assert host.ansible_facts_modified == ref_time
# Verify facts were cleared for host with deleted cache file
assert hosts[1].ansible_facts == {}
assert hosts[1].ansible_facts_modified > ref_time assert hosts[1].ansible_facts_modified > ref_time
# Current implementation skips the call entirely if hosts_to_update == [] # Current implementation skips the call entirely if hosts_to_update == []

View File

@@ -13,7 +13,7 @@ def test_send_messages():
m['started'] = dt.datetime.utcfromtimestamp(60).isoformat() m['started'] = dt.datetime.utcfromtimestamp(60).isoformat()
m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat() m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat()
m['subject'] = "test subject" m['subject'] = "test subject"
backend = grafana_backend.GrafanaBackend("testapikey", dashboardId='', panelId='') backend = grafana_backend.GrafanaBackend("testapikey")
message = EmailMessage( message = EmailMessage(
m['subject'], m['subject'],
{"started": m['started'], "finished": m['finished']}, {"started": m['started'], "finished": m['finished']},
@@ -43,7 +43,7 @@ def test_send_messages_with_no_verify_ssl():
m['started'] = dt.datetime.utcfromtimestamp(60).isoformat() m['started'] = dt.datetime.utcfromtimestamp(60).isoformat()
m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat() m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat()
m['subject'] = "test subject" m['subject'] = "test subject"
backend = grafana_backend.GrafanaBackend("testapikey", dashboardId='', panelId='', grafana_no_verify_ssl=True) backend = grafana_backend.GrafanaBackend("testapikey", grafana_no_verify_ssl=True)
message = EmailMessage( message = EmailMessage(
m['subject'], m['subject'],
{"started": m['started'], "finished": m['finished']}, {"started": m['started'], "finished": m['finished']},
@@ -74,7 +74,7 @@ def test_send_messages_with_dashboardid(dashboardId):
m['started'] = dt.datetime.utcfromtimestamp(60).isoformat() m['started'] = dt.datetime.utcfromtimestamp(60).isoformat()
m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat() m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat()
m['subject'] = "test subject" m['subject'] = "test subject"
backend = grafana_backend.GrafanaBackend("testapikey", dashboardId=dashboardId, panelId='') backend = grafana_backend.GrafanaBackend("testapikey", dashboardId=dashboardId)
message = EmailMessage( message = EmailMessage(
m['subject'], m['subject'],
{"started": m['started'], "finished": m['finished']}, {"started": m['started'], "finished": m['finished']},
@@ -97,7 +97,7 @@ def test_send_messages_with_dashboardid(dashboardId):
assert sent_messages == 1 assert sent_messages == 1
@pytest.mark.parametrize("panelId", ['42', '0']) @pytest.mark.parametrize("panelId", [42, 0])
def test_send_messages_with_panelid(panelId): def test_send_messages_with_panelid(panelId):
with mock.patch('awx.main.notifications.grafana_backend.requests') as requests_mock: with mock.patch('awx.main.notifications.grafana_backend.requests') as requests_mock:
requests_mock.post.return_value.status_code = 200 requests_mock.post.return_value.status_code = 200
@@ -105,7 +105,7 @@ def test_send_messages_with_panelid(panelId):
m['started'] = dt.datetime.utcfromtimestamp(60).isoformat() m['started'] = dt.datetime.utcfromtimestamp(60).isoformat()
m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat() m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat()
m['subject'] = "test subject" m['subject'] = "test subject"
backend = grafana_backend.GrafanaBackend("testapikey", dashboardId='', panelId=panelId) backend = grafana_backend.GrafanaBackend("testapikey", dashboardId=None, panelId=panelId)
message = EmailMessage( message = EmailMessage(
m['subject'], m['subject'],
{"started": m['started'], "finished": m['finished']}, {"started": m['started'], "finished": m['finished']},
@@ -122,7 +122,7 @@ def test_send_messages_with_panelid(panelId):
requests_mock.post.assert_called_once_with( requests_mock.post.assert_called_once_with(
'https://example.com/api/annotations', 'https://example.com/api/annotations',
headers={'Content-Type': 'application/json', 'Authorization': 'Bearer testapikey'}, headers={'Content-Type': 'application/json', 'Authorization': 'Bearer testapikey'},
json={'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': int(panelId), 'time': 60000}, json={'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'panelId': panelId, 'time': 60000},
verify=True, verify=True,
) )
assert sent_messages == 1 assert sent_messages == 1
@@ -135,7 +135,7 @@ def test_send_messages_with_bothids():
m['started'] = dt.datetime.utcfromtimestamp(60).isoformat() m['started'] = dt.datetime.utcfromtimestamp(60).isoformat()
m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat() m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat()
m['subject'] = "test subject" m['subject'] = "test subject"
backend = grafana_backend.GrafanaBackend("testapikey", dashboardId='42', panelId='42') backend = grafana_backend.GrafanaBackend("testapikey", dashboardId=42, panelId=42)
message = EmailMessage( message = EmailMessage(
m['subject'], m['subject'],
{"started": m['started'], "finished": m['finished']}, {"started": m['started'], "finished": m['finished']},
@@ -158,36 +158,6 @@ def test_send_messages_with_bothids():
assert sent_messages == 1 assert sent_messages == 1
def test_send_messages_with_emptyids():
with mock.patch('awx.main.notifications.grafana_backend.requests') as requests_mock:
requests_mock.post.return_value.status_code = 200
m = {}
m['started'] = dt.datetime.utcfromtimestamp(60).isoformat()
m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat()
m['subject'] = "test subject"
backend = grafana_backend.GrafanaBackend("testapikey", dashboardId='', panelId='')
message = EmailMessage(
m['subject'],
{"started": m['started'], "finished": m['finished']},
[],
[
'https://example.com',
],
)
sent_messages = backend.send_messages(
[
message,
]
)
requests_mock.post.assert_called_once_with(
'https://example.com/api/annotations',
headers={'Content-Type': 'application/json', 'Authorization': 'Bearer testapikey'},
json={'text': 'test subject', 'isRegion': True, 'timeEnd': 120000, 'time': 60000},
verify=True,
)
assert sent_messages == 1
def test_send_messages_with_tags(): def test_send_messages_with_tags():
with mock.patch('awx.main.notifications.grafana_backend.requests') as requests_mock: with mock.patch('awx.main.notifications.grafana_backend.requests') as requests_mock:
requests_mock.post.return_value.status_code = 200 requests_mock.post.return_value.status_code = 200
@@ -195,7 +165,7 @@ def test_send_messages_with_tags():
m['started'] = dt.datetime.utcfromtimestamp(60).isoformat() m['started'] = dt.datetime.utcfromtimestamp(60).isoformat()
m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat() m['finished'] = dt.datetime.utcfromtimestamp(120).isoformat()
m['subject'] = "test subject" m['subject'] = "test subject"
backend = grafana_backend.GrafanaBackend("testapikey", dashboardId='', panelId='', annotation_tags=["ansible"]) backend = grafana_backend.GrafanaBackend("testapikey", dashboardId=None, panelId=None, annotation_tags=["ansible"])
message = EmailMessage( message = EmailMessage(
m['subject'], m['subject'],
{"started": m['started'], "finished": m['finished']}, {"started": m['started'], "finished": m['finished']},

View File

@@ -249,7 +249,7 @@ class Licenser(object):
'GET', 'GET',
host, host,
verify=True, verify=True,
timeout=(31, 31), timeout=(5, 20),
) )
except requests.RequestException: except requests.RequestException:
logger.warning("Failed to connect to console.redhat.com using Service Account credentials. Falling back to basic auth.") logger.warning("Failed to connect to console.redhat.com using Service Account credentials. Falling back to basic auth.")
@@ -258,7 +258,7 @@ class Licenser(object):
host, host,
auth=(client_id, client_secret), auth=(client_id, client_secret),
verify=True, verify=True,
timeout=(31, 31), timeout=(5, 20),
) )
subs.raise_for_status() subs.raise_for_status()
subs_formatted = [] subs_formatted = []

View File

@@ -38,7 +38,7 @@ class ActionModule(ActionBase):
def _obtain_auth_token(self, oidc_endpoint, client_id, client_secret): def _obtain_auth_token(self, oidc_endpoint, client_id, client_secret):
if oidc_endpoint.endswith('/'): if oidc_endpoint.endswith('/'):
oidc_endpoint = oidc_endpoint[:-1] oidc_endpoint = oidc_endpoint.rstrip('/')
main_url = oidc_endpoint + '/.well-known/openid-configuration' main_url = oidc_endpoint + '/.well-known/openid-configuration'
response = requests.get(url=main_url, headers={'Accept': 'application/json'}) response = requests.get(url=main_url, headers={'Accept': 'application/json'})
data = {} data = {}

View File

@@ -1,7 +1,5 @@
from ansible_base.resource_registry.registry import ParentResource, ResourceConfig, ServiceAPIConfig, SharedResource from ansible_base.resource_registry.registry import ParentResource, ResourceConfig, ServiceAPIConfig, SharedResource
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
from ansible_base.rbac.models import RoleDefinition
from ansible_base.resource_registry.shared_types import RoleDefinitionType
from awx.main import models from awx.main import models
@@ -21,8 +19,4 @@ RESOURCE_LIST = (
shared_resource=SharedResource(serializer=TeamType, is_provider=False), shared_resource=SharedResource(serializer=TeamType, is_provider=False),
parent_resources=[ParentResource(model=models.Organization, field_name="organization")], parent_resources=[ParentResource(model=models.Organization, field_name="organization")],
), ),
ResourceConfig(
RoleDefinition,
shared_resource=SharedResource(serializer=RoleDefinitionType, is_provider=False),
),
) )

View File

@@ -83,7 +83,7 @@ USE_I18N = True
USE_TZ = True USE_TZ = True
STATICFILES_DIRS = [ STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'ui', 'build', 'static'), os.path.join(BASE_DIR, 'ui', 'build'),
os.path.join(BASE_DIR, 'static'), os.path.join(BASE_DIR, 'static'),
] ]
@@ -540,7 +540,7 @@ AWX_AUTO_DEPROVISION_INSTANCES = False
# If True, allow users to be assigned to roles that were created via JWT # If True, allow users to be assigned to roles that were created via JWT
ALLOW_LOCAL_ASSIGNING_JWT_ROLES = True ALLOW_LOCAL_ASSIGNING_JWT_ROLES = False
# Enable Pendo on the UI, possible values are 'off', 'anonymous', and 'detailed' # Enable Pendo on the UI, possible values are 'off', 'anonymous', and 'detailed'
# Note: This setting may be overridden by database settings. # Note: This setting may be overridden by database settings.
@@ -599,12 +599,6 @@ VMWARE_EXCLUDE_EMPTY_GROUPS = True
VMWARE_VALIDATE_CERTS = False VMWARE_VALIDATE_CERTS = False
# -----------------
# -- VMware ESXi --
# -----------------
# TODO: Verify matches with AAP-53978 solution in awx-plugins
VMWARE_ESXI_EXCLUDE_EMPTY_GROUPS = True
# --------------------------- # ---------------------------
# -- Google Compute Engine -- # -- Google Compute Engine --
# --------------------------- # ---------------------------
@@ -717,7 +711,7 @@ DISABLE_LOCAL_AUTH = False
TOWER_URL_BASE = "https://platformhost" TOWER_URL_BASE = "https://platformhost"
INSIGHTS_URL_BASE = "https://example.org" INSIGHTS_URL_BASE = "https://example.org"
INSIGHTS_OIDC_ENDPOINT = "https://sso.example.org/" INSIGHTS_OIDC_ENDPOINT = "https://sso.example.org"
INSIGHTS_AGENT_MIME = 'application/example' INSIGHTS_AGENT_MIME = 'application/example'
# See https://github.com/ansible/awx-facts-playbooks # See https://github.com/ansible/awx-facts-playbooks
INSIGHTS_SYSTEM_ID_FILE = '/etc/redhat-access-insights/machine-id' INSIGHTS_SYSTEM_ID_FILE = '/etc/redhat-access-insights/machine-id'
@@ -1075,7 +1069,6 @@ ANSIBLE_BASE_CACHE_PARENT_PERMISSIONS = True
# Currently features are enabled to keep compatibility with old system, except custom roles # Currently features are enabled to keep compatibility with old system, except custom roles
ANSIBLE_BASE_ALLOW_TEAM_ORG_ADMIN = False ANSIBLE_BASE_ALLOW_TEAM_ORG_ADMIN = False
# ANSIBLE_BASE_ALLOW_CUSTOM_ROLES = True # ANSIBLE_BASE_ALLOW_CUSTOM_ROLES = True
ANSIBLE_BASE_ALLOW_TEAM_PARENTS = False
ANSIBLE_BASE_ALLOW_CUSTOM_TEAM_ROLES = False ANSIBLE_BASE_ALLOW_CUSTOM_TEAM_ROLES = False
ANSIBLE_BASE_ALLOW_SINGLETON_USER_ROLES = True ANSIBLE_BASE_ALLOW_SINGLETON_USER_ROLES = True
ANSIBLE_BASE_ALLOW_SINGLETON_TEAM_ROLES = False # System auditor has always been restricted to users ANSIBLE_BASE_ALLOW_SINGLETON_TEAM_ROLES = False # System auditor has always been restricted to users
@@ -1096,9 +1089,6 @@ INDIRECT_HOST_QUERY_FALLBACK_GIVEUP_DAYS = 3
# Older records will be cleaned up # Older records will be cleaned up
INDIRECT_HOST_AUDIT_RECORD_MAX_AGE_DAYS = 7 INDIRECT_HOST_AUDIT_RECORD_MAX_AGE_DAYS = 7
# setting for Policy as Code feature
FEATURE_POLICY_AS_CODE_ENABLED = False
OPA_HOST = '' # The hostname used to connect to the OPA server. If empty, policy enforcement will be disabled. OPA_HOST = '' # The hostname used to connect to the OPA server. If empty, policy enforcement will be disabled.
OPA_PORT = 8181 # The port used to connect to the OPA server. Defaults to 8181. OPA_PORT = 8181 # The port used to connect to the OPA server. Defaults to 8181.
OPA_SSL = False # Enable or disable the use of SSL to connect to the OPA server. Defaults to false. OPA_SSL = False # Enable or disable the use of SSL to connect to the OPA server. Defaults to false.

View File

@@ -87,14 +87,7 @@ ui/src/webpack: $(UI_DIR)/src/node_modules/webpack
## True target for ui/src/webpack. ## True target for ui/src/webpack.
$(UI_DIR)/src/node_modules/webpack: $(UI_DIR)/src/node_modules/webpack:
@echo "=== Installing webpack ===" @echo "=== Installing webpack ==="
@cd $(UI_DIR)/src && \ @cd $(UI_DIR)/src && n 18 && npm install webpack
maj=$$(node -p "process.versions.node.split('.')[0]"); \
if [ "$$maj" != "18" ]; then \
echo "Error: Need Node 18.x; found $$(node -v)" >&2; \
exit 1; \
fi; \
npm install webpack
.PHONY: clean/ui .PHONY: clean/ui
## Clean ui ## Clean ui

View File

@@ -5,7 +5,6 @@ from django.conf import settings
from django.urls import re_path, include, path from django.urls import re_path, include, path
from ansible_base.lib.dynamic_config.dynamic_urls import api_urls, api_version_urls, root_urls from ansible_base.lib.dynamic_config.dynamic_urls import api_urls, api_version_urls, root_urls
from ansible_base.rbac.service_api.urls import rbac_service_urls
from ansible_base.resource_registry.urls import urlpatterns as resource_api_urls from ansible_base.resource_registry.urls import urlpatterns as resource_api_urls
@@ -24,7 +23,6 @@ def get_urlpatterns(prefix=None):
urlpatterns += [ urlpatterns += [
path(f'api{prefix}v2/', include(resource_api_urls)), path(f'api{prefix}v2/', include(resource_api_urls)),
path(f'api{prefix}v2/', include(rbac_service_urls)),
path(f'api{prefix}v2/', include(api_version_urls)), path(f'api{prefix}v2/', include(api_version_urls)),
path(f'api{prefix}', include(api_urls)), path(f'api{prefix}', include(api_urls)),
path('', include(root_urls)), path('', include(root_urls)),

View File

@@ -32,7 +32,7 @@ Installing the `tar.gz` involves no special instructions.
## Running ## Running
Non-deprecated modules in this collection have no Python requirements, but Non-deprecated modules in this collection have no Python requirements, but
may require the AWX CLI may require the official [AWX CLI](https://pypi.org/project/awxkit/)
in the future. The `DOCUMENTATION` for each module will report this. in the future. The `DOCUMENTATION` for each module will report this.
You can specify authentication by host, username, and password. You can specify authentication by host, username, and password.

View File

@@ -60,7 +60,7 @@ options:
- Path to the controller config file. - Path to the controller config file.
- If provided, the other locations for config files will not be considered. - If provided, the other locations for config files will not be considered.
type: path type: path
aliases: [ tower_config_file ] aliases: [tower_config_file]
notes: notes:
- If no I(config_file) is provided we will attempt to use the tower-cli library - If no I(config_file) is provided we will attempt to use the tower-cli library

View File

@@ -4,7 +4,6 @@ __metaclass__ = type
from .controller_api import ControllerModule from .controller_api import ControllerModule
from ansible.module_utils.basic import missing_required_lib from ansible.module_utils.basic import missing_required_lib
from os import getenv
try: try:
from awxkit.api.client import Connection from awxkit.api.client import Connection
@@ -43,13 +42,7 @@ class ControllerAWXKitModule(ControllerModule):
if not self.apiV2Ref: if not self.apiV2Ref:
if not self.authenticated: if not self.authenticated:
self.authenticate() self.authenticate()
prefix = getenv('CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX', '/api/') v2_index = get_registered_page('/api/v2/')(self.connection).get()
if not prefix.startswith('/'):
prefix = f"/{prefix}"
if not prefix.endswith('/'):
prefix = f"{prefix}/"
v2_path = f"{prefix}v2/"
v2_index = get_registered_page(v2_path)(self.connection).get()
self.api_ref = ApiV2(connection=self.connection, **{'json': v2_index}) self.api_ref = ApiV2(connection=self.connection, **{'json': v2_index})
return self.api_ref return self.api_ref

View File

@@ -538,18 +538,7 @@ class ControllerAPIModule(ControllerModule):
self.fail_json(msg='Invalid authentication credentials for {0} (HTTP 401).'.format(url.path)) self.fail_json(msg='Invalid authentication credentials for {0} (HTTP 401).'.format(url.path))
# Sanity check: Did we get a forbidden response, which means that the user isn't allowed to do this? Report that. # Sanity check: Did we get a forbidden response, which means that the user isn't allowed to do this? Report that.
elif he.code == 403: elif he.code == 403:
# Hack: Tell the customer to use the platform supported collection when interacting with Org, Team, User Controller endpoints self.fail_json(msg="You don't have permission to {1} to {0} (HTTP 403).".format(url.path, method))
err_msg = he.fp.read().decode('utf-8')
try:
# Defensive coding. Handle json responses and non-json responses
err_msg = loads(err_msg)
err_msg = err_msg['detail']
# JSONDecodeError only available on Python 3.5+
except ValueError:
pass
prepend_msg = " Use the collection ansible.platform to modify resources Organization, User, or Team." if (
"this resource via the platform ingress") in err_msg else ""
self.fail_json(msg="You don't have permission to {1} to {0} (HTTP 403).{2}".format(url.path, method, prepend_msg))
# Sanity check: Did we get a 404 response? # Sanity check: Did we get a 404 response?
# Requests with primary keys will return a 404 if there is no response, and we want to consistently trap these. # Requests with primary keys will return a 404 if there is no response, and we want to consistently trap these.
elif he.code == 404: elif he.code == 404:

View File

@@ -67,7 +67,6 @@ EXAMPLES = '''
''' '''
import base64 import base64
from ..module_utils.controller_api import ControllerAPIModule from ..module_utils.controller_api import ControllerAPIModule
@@ -121,17 +120,11 @@ def main():
# Do the actual install, if we need to # Do the actual install, if we need to
if perform_install: if perform_install:
if module.params.get('manifest', None):
response = module.post_endpoint('config', data={'manifest': manifest.decode()})
else:
response = module.post_endpoint('config/attach', data={'subscription_id': module.params.get('subscription_id')})
# Check API response for errors (AAP-44277 fix)
if response and response.get('status_code') and response.get('status_code') != 200:
error_msg = response.get('json', {}).get('error', 'License operation failed')
module.fail_json(msg=error_msg)
json_output['changed'] = True json_output['changed'] = True
if module.params.get('manifest', None):
module.post_endpoint('config', data={'manifest': manifest.decode()})
else:
module.post_endpoint('config/attach', data={'subscription_id': module.params.get('subscription_id')})
module.exit_json(**json_output) module.exit_json(**json_output)

View File

@@ -344,10 +344,7 @@ def main():
unified_job_template = module.params.get('unified_job_template') unified_job_template = module.params.get('unified_job_template')
if unified_job_template: if unified_job_template:
ujt = module.get_one('unified_job_templates', name_or_id=unified_job_template, **{'data': search_fields}) new_fields['unified_job_template'] = module.get_one('unified_job_templates', name_or_id=unified_job_template, **{'data': search_fields})['id']
if ujt is None or 'id' not in ujt:
module.fail_json(msg=f'Could not get unified_job_template name_or_id={unified_job_template} search_fields={search_fields}, got {ujt}')
new_fields['unified_job_template'] = ujt['id']
inventory = module.params.get('inventory') inventory = module.params.get('inventory')
if inventory: if inventory:
new_fields['inventory'] = module.resolve_name_to_id('inventories', inventory) new_fields['inventory'] = module.resolve_name_to_id('inventories', inventory)

View File

@@ -116,91 +116,35 @@ def collection_import():
return rf return rf
def _process_request_data(kwargs_copy, kwargs):
"""Helper to process 'data' in request kwargs."""
if 'data' in kwargs:
if isinstance(kwargs['data'], dict):
kwargs_copy['data'] = kwargs['data']
elif kwargs['data'] is None:
pass
elif isinstance(kwargs['data'], str):
kwargs_copy['data'] = json.loads(kwargs['data'])
else:
raise RuntimeError('Expected data to be dict or str, got {0}, data: {1}'.format(type(kwargs['data']), kwargs['data']))
def _process_request_params(kwargs_copy, kwargs, method):
"""Helper to process 'params' in request kwargs."""
if 'params' in kwargs and method == 'GET':
if not kwargs_copy.get('data'):
kwargs_copy['data'] = {}
if isinstance(kwargs['params'], dict):
kwargs_copy['data'].update(kwargs['params'])
elif isinstance(kwargs['params'], list):
for k, v in kwargs['params']:
kwargs_copy['data'][k] = v
def _get_resource_class(resource_module):
"""Helper to determine the Ansible module resource class."""
if getattr(resource_module, 'ControllerAWXKitModule', None):
return resource_module.ControllerAWXKitModule
elif getattr(resource_module, 'ControllerAPIModule', None):
return resource_module.ControllerAPIModule
else:
raise RuntimeError("The module has neither a ControllerAWXKitModule or a ControllerAPIModule")
def _get_tower_cli_mgr(new_request):
"""Helper to get the appropriate tower_cli mock context manager."""
if HAS_TOWER_CLI:
return mock.patch('tower_cli.api.Session.request', new=new_request)
elif HAS_AWX_KIT:
return mock.patch('awxkit.api.client.requests.Session.request', new=new_request)
else:
return suppress()
def _run_and_capture_module_output(resource_module, stdout_buffer):
"""Helper to run the module and capture its stdout."""
try:
with redirect_stdout(stdout_buffer):
resource_module.main()
except SystemExit:
pass # A system exit indicates successful execution
except Exception:
# dump the stdout back to console for debugging
print(stdout_buffer.getvalue())
raise
def _parse_and_handle_module_result(module_stdout):
"""Helper to parse module output and handle exceptions."""
try:
result = json.loads(module_stdout)
except Exception as e:
raise_from(Exception('Module did not write valid JSON, error: {0}, stdout:\n{1}'.format(str(e), module_stdout)), e)
if 'exception' in result:
if "ModuleNotFoundError: No module named 'tower_cli'" in result['exception']:
pytest.skip('The tower-cli library is needed to run this test, module no longer supported.')
raise Exception('Module encountered error:\n{0}'.format(result['exception']))
return result
@pytest.fixture @pytest.fixture
def run_module(request, collection_import, mocker): def run_module(request, collection_import):
def rf(module_name, module_params, request_user): def rf(module_name, module_params, request_user):
def new_request(self, method, url, **kwargs): def new_request(self, method, url, **kwargs):
kwargs_copy = kwargs.copy() kwargs_copy = kwargs.copy()
_process_request_data(kwargs_copy, kwargs) if 'data' in kwargs:
_process_request_params(kwargs_copy, kwargs, method) if isinstance(kwargs['data'], dict):
kwargs_copy['data'] = kwargs['data']
elif kwargs['data'] is None:
pass
elif isinstance(kwargs['data'], str):
kwargs_copy['data'] = json.loads(kwargs['data'])
else:
raise RuntimeError('Expected data to be dict or str, got {0}, data: {1}'.format(type(kwargs['data']), kwargs['data']))
if 'params' in kwargs and method == 'GET':
# query params for GET are handled a bit differently by
# tower-cli and python requests as opposed to REST framework APIRequestFactory
if not kwargs_copy.get('data'):
kwargs_copy['data'] = {}
if isinstance(kwargs['params'], dict):
kwargs_copy['data'].update(kwargs['params'])
elif isinstance(kwargs['params'], list):
for k, v in kwargs['params']:
kwargs_copy['data'][k] = v
# make request # make request
with transaction.atomic(): with transaction.atomic():
rf_django = _request(method.lower()) # Renamed rf to avoid conflict with outer rf rf = _request(method.lower())
django_response = rf_django(url, user=request_user, expect=None, **kwargs_copy) django_response = rf(url, user=request_user, expect=None, **kwargs_copy)
# requests library response object is different from the Django response, but they are the same concept # requests library response object is different from the Django response, but they are the same concept
# this converts the Django response object into a requests response object for consumption # this converts the Django response object into a requests response object for consumption
@@ -224,25 +168,58 @@ def run_module(request, collection_import, mocker):
return m return m
stdout_buffer = io.StringIO() stdout_buffer = io.StringIO()
# Requies specific PYTHONPATH, see docs
# Note that a proper Ansiballz explosion of the modules will have an import path like:
# ansible_collections.awx.awx.plugins.modules.{}
# We should consider supporting that in the future
resource_module = collection_import('plugins.modules.{0}'.format(module_name)) resource_module = collection_import('plugins.modules.{0}'.format(module_name))
if not isinstance(module_params, dict): if not isinstance(module_params, dict):
raise RuntimeError('Module params must be dict, got {0}'.format(type(module_params))) raise RuntimeError('Module params must be dict, got {0}'.format(type(module_params)))
# Ansible params can be passed as an invocation argument or over stdin
# this short circuits within the AnsibleModule interface
def mock_load_params(self): def mock_load_params(self):
self.params = module_params self.params = module_params
resource_class = _get_resource_class(resource_module) if getattr(resource_module, 'ControllerAWXKitModule', None):
resource_class = resource_module.ControllerAWXKitModule
elif getattr(resource_module, 'ControllerAPIModule', None):
resource_class = resource_module.ControllerAPIModule
else:
raise RuntimeError("The module has neither a ControllerAWXKitModule or a ControllerAPIModule")
with mock.patch.object(resource_class, '_load_params', new=mock_load_params): with mock.patch.object(resource_class, '_load_params', new=mock_load_params):
mocker.patch('ansible.module_utils.basic._ANSIBLE_PROFILE', 'legacy') # Call the test utility (like a mock server) instead of issuing HTTP requests
with mock.patch('ansible.module_utils.urls.Request.open', new=new_open): with mock.patch('ansible.module_utils.urls.Request.open', new=new_open):
with _get_tower_cli_mgr(new_request): if HAS_TOWER_CLI:
_run_and_capture_module_output(resource_module, stdout_buffer) tower_cli_mgr = mock.patch('tower_cli.api.Session.request', new=new_request)
elif HAS_AWX_KIT:
tower_cli_mgr = mock.patch('awxkit.api.client.requests.Session.request', new=new_request)
else:
tower_cli_mgr = suppress()
with tower_cli_mgr:
try:
# Ansible modules return data to the mothership over stdout
with redirect_stdout(stdout_buffer):
resource_module.main()
except SystemExit:
pass # A system exit indicates successful execution
except Exception:
# dump the stdout back to console for debugging
print(stdout_buffer.getvalue())
raise
module_stdout = stdout_buffer.getvalue().strip() module_stdout = stdout_buffer.getvalue().strip()
result = _parse_and_handle_module_result(module_stdout) try:
result = json.loads(module_stdout)
except Exception as e:
raise_from(Exception('Module did not write valid JSON, error: {0}, stdout:\n{1}'.format(str(e), module_stdout)), e)
# A module exception should never be a test expectation
if 'exception' in result:
if "ModuleNotFoundError: No module named 'tower_cli'" in result['exception']:
pytest.skip('The tower-cli library is needed to run this test, module no longer supported.')
raise Exception('Module encountered error:\n{0}'.format(result['exception']))
return result return result
return rf return rf

View File

@@ -1,36 +0,0 @@
from __future__ import absolute_import, division, print_function
import os
from unittest import mock
__metaclass__ = type
import pytest
def mock_get_registered_page(prefix):
return mock.Mock(return_value=mock.Mock(get=mock.Mock(return_value={'prefix': prefix})))
@pytest.mark.parametrize(
"env_prefix, controller_host, expected",
[
# without CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX env variable
[None, "https://localhost", "/api/v2/"],
# with CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX env variable
["/api/controller/", "https://localhost", "/api/controller/v2/"],
["/api/controller", "https://localhost", "/api/controller/v2/"],
["api/controller", "https://localhost", "/api/controller/v2/"],
["/custom/path/", "https://localhost", "/custom/path/v2/"],
],
)
def test_controller_awxkit_get_api_v2_object(collection_import, env_prefix, controller_host, expected):
controller_awxkit_class = collection_import('plugins.module_utils.awxkit').ControllerAWXKitModule
controller_awxkit = controller_awxkit_class(argument_spec={}, direct_params=dict(controller_host=controller_host))
with mock.patch('plugins.module_utils.awxkit.get_registered_page', mock_get_registered_page):
if env_prefix:
with mock.patch.dict(os.environ, {"CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX": env_prefix}):
api_v2_object = controller_awxkit.get_api_v2_object()
else:
api_v2_object = controller_awxkit.get_api_v2_object()
assert getattr(api_v2_object, 'prefix') == expected

View File

@@ -65,7 +65,7 @@ def test_export(run_module, admin_user):
all_assets_except_users = {k: v for k, v in assets.items() if k != 'users'} all_assets_except_users = {k: v for k, v in assets.items() if k != 'users'}
for k, v in all_assets_except_users.items(): for k, v in all_assets_except_users.items():
assert v == [] or v is None, f"Expected resource {k} to be empty. Instead it is {v}" assert v == [], f"Expected resource {k} to be empty. Instead it is {v}"
@pytest.mark.django_db @pytest.mark.django_db

View File

@@ -1,32 +0,0 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
@pytest.mark.django_db
def test_license_invalid_subscription_id_should_fail(run_module, admin_user):
"""Test invalid subscription ID returns failure."""
result = run_module('license', {'subscription_id': 'invalid-test-12345', 'state': 'present'}, admin_user)
assert result.get('failed', False)
assert 'msg' in result
assert 'subscription' in result['msg'].lower()
@pytest.mark.django_db
def test_license_invalid_manifest_should_fail(run_module, admin_user):
"""Test invalid manifest returns failure."""
result = run_module('license', {'manifest': '/nonexistent/test.zip', 'state': 'present'}, admin_user)
assert result.get('failed', False)
assert 'msg' in result
@pytest.mark.django_db
def test_license_state_absent_works(run_module, admin_user):
"""Test license removal works."""
result = run_module('license', {'state': 'absent'}, admin_user)
assert not result.get('failed', False)

View File

@@ -20,7 +20,6 @@ def test_create_organization(run_module, admin_user):
'controller_username': None, 'controller_username': None,
'controller_password': None, 'controller_password': None,
'validate_certs': None, 'validate_certs': None,
'aap_token': None,
'controller_config_file': None, 'controller_config_file': None,
} }
@@ -53,7 +52,6 @@ def test_galaxy_credential_order(run_module, admin_user):
'controller_username': None, 'controller_username': None,
'controller_password': None, 'controller_password': None,
'validate_certs': None, 'validate_certs': None,
'aap_token': None,
'controller_config_file': None, 'controller_config_file': None,
'galaxy_credentials': cred_ids, 'galaxy_credentials': cred_ids,
} }
@@ -78,7 +76,6 @@ def test_galaxy_credential_order(run_module, admin_user):
'controller_username': None, 'controller_username': None,
'controller_password': None, 'controller_password': None,
'validate_certs': None, 'validate_certs': None,
'aap_token': None,
'controller_config_file': None, 'controller_config_file': None,
'galaxy_credentials': cred_ids, 'galaxy_credentials': cred_ids,
} }

View File

@@ -108,7 +108,7 @@
credential: "{{ ssh_cred_name }}" credential: "{{ ssh_cred_name }}"
module_name: "Does not exist" module_name: "Does not exist"
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:

View File

@@ -70,7 +70,7 @@
command_id: "{{ command.id }}" command_id: "{{ command.id }}"
fail_if_not_running: true fail_if_not_running: true
register: results register: results
ignore_errors: yes ignore_errors: true
- ansible.builtin.assert: - ansible.builtin.assert:
that: that:
@@ -81,7 +81,7 @@
command_id: "{{ command.id }}" command_id: "{{ command.id }}"
fail_if_not_running: true fail_if_not_running: true
register: results register: results
ignore_errors: yes ignore_errors: true
- ansible.builtin.assert: - ansible.builtin.assert:
that: that:
@@ -91,7 +91,7 @@
awx.awx.ad_hoc_command_cancel: awx.awx.ad_hoc_command_cancel:
command_id: 9999999999 command_id: 9999999999
register: result register: result
ignore_errors: yes ignore_errors: true
- ansible.builtin.assert: - ansible.builtin.assert:
that: that:

View File

@@ -38,7 +38,7 @@
ad_hoc_command_wait: ad_hoc_command_wait:
command_id: "99999999" command_id: "99999999"
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:
@@ -85,13 +85,13 @@
ad_hoc_command_wait: ad_hoc_command_wait:
command_id: "{{ command.id }}" command_id: "{{ command.id }}"
timeout: 1 timeout: 1
ignore_errors: yes ignore_errors: true
register: wait_results register: wait_results
# Make sure that we failed and that we have some data in our results # Make sure that we failed and that we have some data in our results
- assert: - assert:
that: that:
- "('Monitoring of ad hoc command -' in wait_results.msg and 'aborted due to timeout' in wait_results.msg) or ('Timeout waiting for command to finish.' in wait_results.msg)" - "'Monitoring aborted due to timeout' or 'Timeout waiting for command to finish.' in wait_results.msg"
- "'id' in wait_results" - "'id' in wait_results"
- name: Async cancel the long-running command - name: Async cancel the long-running command
@@ -104,7 +104,7 @@
ad_hoc_command_wait: ad_hoc_command_wait:
command_id: "{{ command.id }}" command_id: "{{ command.id }}"
register: wait_results register: wait_results
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:

View File

@@ -93,7 +93,7 @@
organization: Default organization: Default
state: absent state: absent
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:
@@ -306,7 +306,7 @@
inputs: inputs:
username: joe username: joe
ssh_key_data: "{{ ssh_key_data }}" ssh_key_data: "{{ ssh_key_data }}"
ignore_errors: yes ignore_errors: true
register: result register: result
- assert: - assert:
@@ -322,7 +322,7 @@
credential_type: Machine credential_type: Machine
inputs: inputs:
username: joe username: joe
ignore_errors: yes ignore_errors: true
register: result register: result
- assert: - assert:
@@ -811,7 +811,7 @@
organization: test-non-existing-org organization: test-non-existing-org
state: present state: present
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:

View File

@@ -70,7 +70,7 @@
organization: Some Org organization: Some Org
image: quay.io/ansible/awx-ee image: quay.io/ansible/awx-ee
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:

View File

@@ -161,10 +161,11 @@
- name: "Find number of hosts in {{ group_name1 }}" - name: "Find number of hosts in {{ group_name1 }}"
set_fact: set_fact:
group1_host_count: "{{ lookup('awx.awx.controller_api', 'groups/' + result.id | string + '/all_hosts/') | length }}" group1_host_count: "{{ lookup('awx.awx.controller_api', 'groups/{{result.id}}/all_hosts/') |length}}"
- assert: - assert:
that: that:
- group1_host_count == 3 - group1_host_count == "3"
- name: Delete Group 3 - name: Delete Group 3
group: group:
@@ -208,7 +209,7 @@
inventory: test-non-existing-inventory inventory: test-non-existing-inventory
state: present state: present
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:

View File

@@ -79,13 +79,13 @@
- "result is changed" - "result is changed"
- name: Use lookup to check that host was enabled - name: Use lookup to check that host was enabled
set_fact: ansible.builtin.set_fact:
host_enabled_test: "{{ lookup('awx.awx.controller_api', 'hosts/' + result.id | string + '/').enabled }}" host_enabled_test: "lookup('awx.awx.controller_api', 'hosts/{{result.id}}/').enabled"
- name: Newly created host should have API default value for enabled - name: Newly created host should have API default value for enabled
assert: assert:
that: that:
- host_enabled_test is true - host_enabled_test
- name: Delete a Host - name: Delete a Host
host: host:
@@ -105,7 +105,7 @@
inventory: test-non-existing-inventory inventory: test-non-existing-inventory
state: present state: present
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:

View File

@@ -49,7 +49,7 @@
name: "{{ org_name1 }}" name: "{{ org_name1 }}"
type: "organization" type: "organization"
register: import_output register: import_output
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:

View File

@@ -127,7 +127,7 @@
organization: Default organization: Default
kind: smart kind: smart
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:
@@ -187,15 +187,13 @@
organization: test-non-existing-org organization: test-non-existing-org
state: present state: present
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:
- "result is failed" - "result is failed"
- "result is not changed" - "result is not changed"
- >- - "'test-non-existing-org' in result.msg"
'test-non-existing-org' in result.msg and
'returned 0 items, expected 1' in result.msg
- "result.total_results == 0" - "result.total_results == 0"
always: always:

View File

@@ -23,7 +23,7 @@
job_id: "{{ job.id }}" job_id: "{{ job.id }}"
fail_if_not_running: true fail_if_not_running: true
register: results register: results
ignore_errors: yes ignore_errors: true
# This test can be flaky, so we retry it a few times # This test can be flaky, so we retry it a few times
until: results is failed and results.msg == 'Job is not running' until: results is failed and results.msg == 'Job is not running'
retries: 6 retries: 6
@@ -33,7 +33,7 @@
job_cancel: job_cancel:
job_id: 9999999999 job_id: 9999999999
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:

View File

@@ -37,7 +37,7 @@
job_template: "Non_Existing_Job_Template" job_template: "Non_Existing_Job_Template"
inventory: "Demo Inventory" inventory: "Demo Inventory"
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:
@@ -124,7 +124,7 @@
extra_vars: extra_vars:
basic_name: My First Variable basic_name: My First Variable
option_true_false: 'no' option_true_false: 'no'
ignore_errors: yes ignore_errors: true
register: result register: result
- assert: - assert:
@@ -145,7 +145,7 @@
basic_name: My First Variable basic_name: My First Variable
var1: My First Variable var1: My First Variable
var2: My Second Variable var2: My Second Variable
ignore_errors: yes ignore_errors: true
register: result register: result
- assert: - assert:

View File

@@ -260,6 +260,7 @@
state: absent state: absent
register: result register: result
# This doesnt work if you include the credentials parameter
- name: Delete Job Template 1 - name: Delete Job Template 1
job_template: job_template:
name: "{{ jt1 }}" name: "{{ jt1 }}"
@@ -306,12 +307,11 @@
- label_bad - label_bad
state: present state: present
register: bad_label_results register: bad_label_results
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:
- bad_label_results is defined - "bad_label_results.msg == 'Could not find label entry with name label_bad'"
- not (bad_label_results.failed | default(false)) or ('msg' in bad_label_results)
- name: Add survey to Job Template 2 - name: Add survey to Job Template 2
job_template: job_template:
@@ -442,6 +442,7 @@
that: that:
- "result is changed" - "result is changed"
- name: Delete Job Template 2 - name: Delete Job Template 2
job_template: job_template:
name: "{{ jt2 }}" name: "{{ jt2 }}"
@@ -489,6 +490,8 @@
credential_type: Machine credential_type: Machine
state: absent state: absent
# You can't delete a label directly so no cleanup needed
- name: Delete email notification - name: Delete email notification
notification_template: notification_template:
name: "{{ email_not }}" name: "{{ email_not }}"

View File

@@ -32,14 +32,13 @@
job_wait: job_wait:
job_id: "99999999" job_id: "99999999"
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:
- result is failed - result is failed
- >- - "result.msg =='Unable to wait, no job_id 99999999 found: The requested object could not be found.' or
result.msg == 'Unable to wait, no job_id 99999999 found: The requested object could not be found.' or 'Unable to wait on job 99999999; that ID does not exist.'"
result.msg == 'Unable to wait on job 99999999; that ID does not exist.'
- name: Launch Demo Job Template (take happy path) - name: Launch Demo Job Template (take happy path)
job_launch: job_launch:
@@ -55,6 +54,7 @@
job_id: "{{ job.id }}" job_id: "{{ job.id }}"
register: wait_results register: wait_results
# Make sure it worked and that we have some data in our results
- assert: - assert:
that: that:
- wait_results is successful - wait_results is successful
@@ -74,12 +74,13 @@
job_wait: job_wait:
job_id: "{{ job.id }}" job_id: "{{ job.id }}"
timeout: 5 timeout: 5
ignore_errors: yes ignore_errors: true
register: wait_results register: wait_results
# Make sure that we failed and that we have some data in our results
- assert: - assert:
that: that:
- "'aborted due to timeout' in wait_results.msg" - "wait_results.msg == 'Monitoring aborted due to timeout' or 'Timeout waiting for job to finish.'"
- "'id' in wait_results" - "'id' in wait_results"
- name: Async cancel the long running job - name: Async cancel the long running job
@@ -91,16 +92,16 @@
- name: Wait for the job to exit on cancel - name: Wait for the job to exit on cancel
job_wait: job_wait:
job_id: "{{ job.id }}" job_id: "{{ job.id }}"
timeout: 60
register: wait_results register: wait_results
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:
- wait_results is failed - wait_results is failed
- 'wait_results.status == "canceled"' - 'wait_results.status == "canceled"'
- "'Unable to find job with id' not in result.msg" - "'Job with id ~ job.id failed' or 'Job with id= ~ job.id failed, error: Job failed.' is in wait_results.msg"
# workflow wait test
- name: Generate a random string for test - name: Generate a random string for test
set_fact: set_fact:
test_id1: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}" test_id1: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
@@ -125,7 +126,7 @@
- name: Kick off a workflow - name: Kick off a workflow
workflow_launch: workflow_launch:
workflow_template: "{{ wfjt_name2 }}" workflow_template: "{{ wfjt_name2 }}"
ignore_errors: yes ignore_errors: true
register: workflow register: workflow
- name: Wait for the Workflow Job to finish - name: Wait for the Workflow Job to finish
@@ -134,6 +135,7 @@
job_type: "workflow_jobs" job_type: "workflow_jobs"
register: wait_workflow_results register: wait_workflow_results
# Make sure it worked and that we have some data in our results
- assert: - assert:
that: that:
- wait_workflow_results is successful - wait_workflow_results is successful
@@ -146,12 +148,6 @@
name: "{{ wfjt_name2 }}" name: "{{ wfjt_name2 }}"
state: absent state: absent
- name: Get all jobs for the template
awx.awx.job_list:
query:
job_template: "{{ jt_name }}"
register: job_list
- name: Delete the job template - name: Delete the job template
job_template: job_template:
name: "{{ jt_name }}" name: "{{ jt_name }}"

View File

@@ -36,7 +36,7 @@
organization: "Non_existing_org" organization: "Non_existing_org"
state: present state: present
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:

View File

@@ -36,7 +36,7 @@
debug: debug:
msg: "{{ query(plugin_name, 'ping', host='DNE://junk.com', username='john', password='not_legit', verify_ssl=True) }}" msg: "{{ query(plugin_name, 'ping', host='DNE://junk.com', username='john', password='not_legit', verify_ssl=True) }}"
register: results register: results
ignore_errors: yes ignore_errors: true
- ansible.builtin.assert: - ansible.builtin.assert:
that: that:
@@ -51,7 +51,7 @@
- name: Test too many params (failure from validation of terms) - name: Test too many params (failure from validation of terms)
ansible.builtin.set_fact: ansible.builtin.set_fact:
junk: "{{ query(plugin_name, 'users', 'teams', query_params={}, ) }}" junk: "{{ query(plugin_name, 'users', 'teams', query_params={}, ) }}"
ignore_errors: yes ignore_errors: true
register: result register: result
- ansible.builtin.assert: - ansible.builtin.assert:
@@ -62,7 +62,7 @@
- name: Try to load invalid endpoint - name: Try to load invalid endpoint
ansible.builtin.set_fact: ansible.builtin.set_fact:
junk: "{{ query(plugin_name, 'john', query_params={}, ) }}" junk: "{{ query(plugin_name, 'john', query_params={}, ) }}"
ignore_errors: yes ignore_errors: true
register: result register: result
- ansible.builtin.assert: - ansible.builtin.assert:
@@ -122,7 +122,7 @@
- name: Get all of the users created with a max_objects of 1 - name: Get all of the users created with a max_objects of 1
ansible.builtin.set_fact: ansible.builtin.set_fact:
users: "{{ lookup(plugin_name, 'users', query_params={ 'username__endswith': test_id, 'page_size': 1 }, return_all=true, max_objects=1 ) }}" users: "{{ lookup(plugin_name, 'users', query_params={ 'username__endswith': test_id, 'page_size': 1 }, return_all=true, max_objects=1 ) }}"
ignore_errors: yes ignore_errors: true
register: max_user_errors register: max_user_errors
- ansible.builtin.assert: - ansible.builtin.assert:
@@ -138,7 +138,7 @@
ansible.builtin.set_fact: ansible.builtin.set_fact:
failed_user_id: "{{ query(plugin_name, 'users', query_params={ 'username': 'john jacob jingleheimer schmidt' }, expect_one=True) }}" failed_user_id: "{{ query(plugin_name, 'users', query_params={ 'username': 'john jacob jingleheimer schmidt' }, expect_one=True) }}"
register: result register: result
ignore_errors: yes ignore_errors: true
- ansible.builtin.assert: - ansible.builtin.assert:
that: that:
@@ -149,7 +149,7 @@
ansible.builtin.set_fact: ansible.builtin.set_fact:
too_many_user_ids: " {{ query(plugin_name, 'users', query_params={ 'username__endswith': test_id }, expect_one=True) }}" too_many_user_ids: " {{ query(plugin_name, 'users', query_params={ 'username__endswith': test_id }, expect_one=True) }}"
register: results register: results
ignore_errors: yes ignore_errors: true
- ansible.builtin.assert: - ansible.builtin.assert:
that: that:
@@ -169,7 +169,7 @@
- name: "Make sure that expect_objects fails on an API page" - name: "Make sure that expect_objects fails on an API page"
ansible.builtin.set_fact: ansible.builtin.set_fact:
my_var: "{{ lookup(plugin_name, 'settings/ui', expect_objects=True) }}" my_var: "{{ lookup(plugin_name, 'settings/ui', expect_objects=True) }}"
ignore_errors: yes ignore_errors: true
register: results register: results
- ansible.builtin.assert: - ansible.builtin.assert:

View File

@@ -139,7 +139,7 @@
organization: organization:
name: Default name: Default
validate_certs: true validate_certs: true
ignore_errors: yes ignore_errors: true
register: check_ssl_is_used register: check_ssl_is_used
- name: Check that connection failed - name: Check that connection failed

View File

@@ -63,7 +63,7 @@
state: exists state: exists
request_timeout: .001 request_timeout: .001
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:
@@ -106,7 +106,7 @@
scm_url: https://github.com/ansible/ansible-tower-samples scm_url: https://github.com/ansible/ansible-tower-samples
wait: false wait: false
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:
@@ -165,7 +165,7 @@
scm_url: https://github.com/ansible/ansible-tower-samples scm_url: https://github.com/ansible/ansible-tower-samples
scm_credential: "{{ cred_name }}" scm_credential: "{{ cred_name }}"
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:
@@ -182,7 +182,7 @@
scm_url: https://github.com/ansible/ansible-tower-samples scm_url: https://github.com/ansible/ansible-tower-samples
scm_credential: Non_Existing_Credential scm_credential: Non_Existing_Credential
register: result register: result
ignore_errors: yes ignore_errors: true
- assert: - assert:
that: that:

View File

@@ -1,11 +1,11 @@
--- ---
- name: Generate a test id - name: Generate a test id
ansible.builtin.set_fact: set_fact:
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}" test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
when: test_id is not defined when: test_id is not defined
- name: Generate names - name: Generate names
ansible.builtin.set_fact: set_fact:
username: "AWX-Collection-tests-role-user-{{ test_id }}" username: "AWX-Collection-tests-role-user-{{ test_id }}"
project_name: "AWX-Collection-tests-role-project-1-{{ test_id }}" project_name: "AWX-Collection-tests-role-project-1-{{ test_id }}"
jt1: "AWX-Collection-tests-role-jt1-{{ test_id }}" jt1: "AWX-Collection-tests-role-jt1-{{ test_id }}"
@@ -15,32 +15,34 @@
team2_name: "AWX-Collection-tests-team-team-{{ test_id }}2" team2_name: "AWX-Collection-tests-team-team-{{ test_id }}2"
org2_name: "AWX-Collection-tests-organization-{{ test_id }}2" org2_name: "AWX-Collection-tests-organization-{{ test_id }}2"
- name: Main block for user creation - block:
block: - name: Create a User
user:
- name: Create a user with a valid sanitized name first_name: Joe
awx.awx.user: last_name: User
username: "{{ username }}" username: "{{ username }}"
password: "{{ 65535 | random | to_uuid }}" password: "{{ 65535 | random | to_uuid }}"
email: joe@example.org
state: present state: present
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Create a 2nd User - name: Create a 2nd User
awx.awx.user: user:
first_name: Joe
last_name: User
username: "{{ username }}2" username: "{{ username }}2"
password: "{{ 65535 | random | to_uuid }}" password: "{{ 65535 | random | to_uuid }}"
email: joe@example.org
state: present state: present
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Create teams - name: Create teams
team: team:
@@ -50,11 +52,9 @@
loop: loop:
- "{{ team_name }}" - "{{ team_name }}"
- "{{ team2_name }}" - "{{ team2_name }}"
- assert:
- name: Assert result changed
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Create a project - name: Create a project
project: project:
@@ -65,8 +65,7 @@
wait: true wait: true
register: project_info register: project_info
- name: Assert project_info is changed - assert:
ansible.builtin.assert:
that: that:
- project_info is changed - project_info is changed
@@ -81,10 +80,9 @@
- jt2 - jt2
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Add Joe and teams to the update role of the default Project with lookup Organization - name: Add Joe and teams to the update role of the default Project with lookup Organization
role: role:
@@ -103,10 +101,9 @@
- "present" - "present"
- "absent" - "absent"
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Add Joe to the new project by ID - name: Add Joe to the new project by ID
role: role:
@@ -124,10 +121,9 @@
- "present" - "present"
- "absent" - "absent"
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Add Joe as execution admin to Default Org. - name: Add Joe as execution admin to Default Org.
role: role:
@@ -142,10 +138,9 @@
- "present" - "present"
- "absent" - "absent"
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Create a workflow - name: Create a workflow
workflow_job_template: workflow_job_template:
@@ -166,25 +161,27 @@
state: present state: present
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Add Joe to nonexistent job template execute role - name: Add Joe to nonexistant job template execute role
awx.awx.role: role:
user: "{{ username }}" user: "{{ username }}"
users:
- "{{ username }}2"
role: execute role: execute
job_template: "non existant temp" workflow: test-role-workflow
job_templates:
- non existant temp
state: present state: present
register: results register: result
ignore_errors: true ignore_errors: true
- name: Assert that adding a role to a non-existent template failed correctly - assert:
ansible.builtin.assert:
that: that:
- results.failed - "'There were 1 missing items, missing items' in result.msg"
- "'missing items' in results.msg" - "'non existant temp' in result.msg"
- name: Add Joe to workflow execute role, no-op - name: Add Joe to workflow execute role, no-op
role: role:
@@ -196,8 +193,7 @@
state: present state: present
register: result register: result
- name: Assert result did not change - assert:
ansible.builtin.assert:
that: that:
- "result is not changed" - "result is not changed"
@@ -210,10 +206,9 @@
state: present state: present
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Create a 2nd organization - name: Create a 2nd organization
organization: organization:
@@ -245,21 +240,22 @@
- "present" - "present"
- "absent" - "absent"
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
always: always:
- name: Delete a User - name: Delete a User
ansible.builtin.user: user:
name: "{{ username }}" username: "{{ username }}"
email: joe@example.org
state: absent state: absent
register: result register: result
- name: Delete a 2nd User - name: Delete a 2nd User
ansible.builtin.user: user:
name: "{{ username }}2" username: "{{ username }}2"
email: joe@example.org
state: absent state: absent
register: result register: result
@@ -294,6 +290,16 @@
retries: 5 retries: 5
delay: 3 delay: 3
- name: Delete the 2nd project
project:
name: "{{ project_name }}"
organization: "{{ org2_name }}"
state: absent
register: del_res
until: del_res is succeeded
retries: 5
delay: 3
- name: Delete the 2nd organization - name: Delete the 2nd organization
organization: organization:
name: "{{ org2_name }}" name: "{{ org2_name }}"

View File

@@ -10,10 +10,9 @@
state: present state: present
register: result register: result
- name: Assert result is changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - result is changed
- name: Delete Role Definition - name: Delete Role Definition
role_definition: role_definition:
@@ -26,7 +25,6 @@
state: absent state: absent
register: result register: result
- name: Assert result is changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - result is changed

View File

@@ -29,10 +29,9 @@
object_id: "{{ job_template.id }}" object_id: "{{ job_template.id }}"
register: result register: result
- name: Assert result is changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - result is changed
- name: Delete Role Team Assigment - name: Delete Role Team Assigment
role_team_assignment: role_team_assignment:
@@ -42,10 +41,9 @@
state: absent state: absent
register: result register: result
- name: Assert result is changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - result is changed
- name: Create Role Definition - name: Create Role Definition
role_definition: role_definition:

View File

@@ -1,8 +1,10 @@
--- ---
- name: Create user - name: Create User
awx.awx.user: user:
username: testing_user username: testing_user
password: "{{ 65535 | random | to_uuid }}" first_name: testing
last_name: user
password: password
- name: Create Job Template - name: Create Job Template
job_template: job_template:
@@ -29,10 +31,9 @@
object_id: "{{ job_template.id }}" object_id: "{{ job_template.id }}"
register: result register: result
- name: Assert result is changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - result is changed
- name: Delete Role User Assigment - name: Delete Role User Assigment
role_user_assignment: role_user_assignment:
@@ -42,10 +43,9 @@
state: absent state: absent
register: result register: result
- name: Assert result is changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - result is changed
- name: Create Role Definition - name: Create Role Definition
role_definition: role_definition:
@@ -57,7 +57,7 @@
description: role definition to launch job description: role definition to launch job
state: absent state: absent
- name: Delete user - name: Delete User
ansible.builtin.user: user:
name: testing_user username: testing_user
state: absent state: absent

View File

@@ -1,11 +1,11 @@
--- ---
- name: Generate a random string for test - name: Generate a random string for test
ansible.builtin.set_fact: set_fact:
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}" test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
when: test_id is not defined when: test_id is not defined
- name: Generate random string for schedule - name: generate random string for schedule
ansible.builtin.set_fact: set_fact:
org_name: "AWX-Collection-tests-organization-org-{{ test_id }}" org_name: "AWX-Collection-tests-organization-org-{{ test_id }}"
sched1: "AWX-Collection-tests-schedule-sched1-{{ test_id }}" sched1: "AWX-Collection-tests-schedule-sched1-{{ test_id }}"
sched2: "AWX-Collection-tests-schedule-sched2-{{ test_id }}" sched2: "AWX-Collection-tests-schedule-sched2-{{ test_id }}"
@@ -23,8 +23,7 @@
host_name: "AWX-Collection-tests-schedule-host-{{ test_id }}" host_name: "AWX-Collection-tests-schedule-host-{{ test_id }}"
slice_num: 10 slice_num: 10
- name: Assert blocks - block:
block:
- name: Try to create without an rrule - name: Try to create without an rrule
schedule: schedule:
name: "{{ sched1 }}" name: "{{ sched1 }}"
@@ -34,8 +33,7 @@
register: result register: result
ignore_errors: true ignore_errors: true
- name: Assert result is failed - assert:
ansible.builtin.assert:
that: that:
- result is failed - result is failed
- "'Unable to create schedule '~ sched1 in result.msg" - "'Unable to create schedule '~ sched1 in result.msg"
@@ -61,8 +59,7 @@
register: result register: result
ignore_errors: true ignore_errors: true
- name: Unable to create schedule - assert:
ansible.builtin.assert:
that: that:
- result is failed - result is failed
- "'Unable to create schedule '~ sched1 in result.msg" - "'Unable to create schedule '~ sched1 in result.msg"
@@ -75,17 +72,16 @@
rrule: "DTSTART:20191219T130551Z RRULE:FREQ=WEEKLY;INTERVAL=1;COUNT=1" rrule: "DTSTART:20191219T130551Z RRULE:FREQ=WEEKLY;INTERVAL=1;COUNT=1"
register: result register: result
- name: Assert result is changed - assert:
ansible.builtin.assert:
that: that:
- result is changed - result is changed
- name: Use lookup to check that schedules was enabled - name: Use lookup to check that schedules was enabled
ansible.builtin.set_fact: ansible.builtin.set_fact:
schedules_enabled_test: "{{lookup('awx.awx.controller_api', 'schedules/{{result.id}}/').enabled | bool}}" schedules_enabled_test: "lookup('awx.awx.controller_api', 'schedules/{{result.id}}/').enabled"
- name: Newly created schedules should have API default value for enabled - name: Newly created schedules should have API default value for enabled
ansible.builtin.assert: assert:
that: that:
- schedules_enabled_test - schedules_enabled_test
@@ -97,8 +93,7 @@
rrule: "DTSTART:20191219T130551Z RRULE:FREQ=WEEKLY;INTERVAL=1;COUNT=1" rrule: "DTSTART:20191219T130551Z RRULE:FREQ=WEEKLY;INTERVAL=1;COUNT=1"
register: result register: result
- name: Assert result did not change - assert:
ansible.builtin.assert:
that: that:
- result is not changed - result is not changed
@@ -110,8 +105,7 @@
rrule: "DTSTART:20191219T130551Z RRULE:FREQ=WEEKLY;INTERVAL=1;COUNT=1" rrule: "DTSTART:20191219T130551Z RRULE:FREQ=WEEKLY;INTERVAL=1;COUNT=1"
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result is changed - result is changed
@@ -123,8 +117,7 @@
rrule: "DTSTART:20191219T130551Z RRULE:FREQ=WEEKLY;INTERVAL=1;COUNT=1" rrule: "DTSTART:20191219T130551Z RRULE:FREQ=WEEKLY;INTERVAL=1;COUNT=1"
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result is changed - result is changed
@@ -136,8 +129,7 @@
rrule: "DTSTART:20191219T130551Z RRULE:FREQ=WEEKLY;INTERVAL=1;COUNT=1" rrule: "DTSTART:20191219T130551Z RRULE:FREQ=WEEKLY;INTERVAL=1;COUNT=1"
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result is not changed - result is not changed
@@ -197,8 +189,7 @@
state: present state: present
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- "result is changed" - "result is changed"
@@ -273,8 +264,7 @@
register: result register: result
ignore_errors: true ignore_errors: true
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- "result is changed" - "result is changed"
@@ -291,8 +281,7 @@
register: result register: result
ignore_errors: true ignore_errors: true
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- "result is changed" - "result is changed"
@@ -304,8 +293,7 @@
enabled: "false" enabled: "false"
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result is changed - result is changed
@@ -334,8 +322,7 @@
register: result register: result
ignore_errors: true ignore_errors: true
- name: Assert result failed - assert:
ansible.builtin.assert:
that: that:
- result is failed - result is failed
@@ -346,8 +333,7 @@
unified_job_template: "{{ jt2 }}" unified_job_template: "{{ jt2 }}"
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result is changed - result is changed
@@ -359,7 +345,7 @@
loop: loop:
- "{{ sched1 }}" - "{{ sched1 }}"
- "{{ sched2 }}" - "{{ sched2 }}"
failed_when: false ignore_errors: True
- name: Delete the jt1 - name: Delete the jt1
job_template: job_template:
@@ -394,7 +380,6 @@
until: del_res is succeeded until: del_res is succeeded
retries: 5 retries: 5
delay: 3 delay: 3
failed_when: false
- name: Delete the Project1 - name: Delete the Project1
project: project:
@@ -414,7 +399,7 @@
organization: Default organization: Default
credential_type: Red Hat Ansible Automation Platform credential_type: Red Hat Ansible Automation Platform
state: absent state: absent
failed_when: false ignore_errors: True
# Labels can not be deleted # Labels can not be deleted
@@ -423,7 +408,7 @@
name: "{{ ee1 }}" name: "{{ ee1 }}"
image: "junk" image: "junk"
state: absent state: absent
failed_when: false ignore_errors: True
- name: Delete instance groups - name: Delete instance groups
instance_group: instance_group:
@@ -432,20 +417,20 @@
loop: loop:
- "{{ ig1 }}" - "{{ ig1 }}"
- "{{ ig2 }}" - "{{ ig2 }}"
failed_when: false ignore_errors: True
- name: Remove the organization - name: "Remove the organization"
organization: organization:
name: "{{ org_name }}" name: "{{ org_name }}"
state: absent state: absent
failed_when: false ignore_errors: True
- name: Delete slice inventory - name: "Delete slice inventory"
inventory: inventory:
name: "{{ slice_inventory }}" name: "{{ slice_inventory }}"
organization: "{{ org_name }}" organization: "{{ org_name }}"
state: absent state: absent
failed_when: false ignore_errors: True
- name: Delete slice hosts - name: Delete slice hosts
host: host:
@@ -453,4 +438,4 @@
inventory: "{{ slice_inventory }}" inventory: "{{ slice_inventory }}"
state: absent state: absent
loop: "{{ range(slice_num)|list }}" loop: "{{ range(slice_num)|list }}"
failed_when: false ignore_errors: True

View File

@@ -7,48 +7,44 @@
ansible.builtin.set_fact: ansible.builtin.set_fact:
plugin_name: "{{ controller_meta.prefix }}.schedule_rrule" plugin_name: "{{ controller_meta.prefix }}.schedule_rrule"
- name: Lookup with too many parameters (should fail) - name: Test too many params (failure from validation of terms)
ansible.builtin.set_fact:
_rrule: "{{ query(plugin_name, days_of_week=[1, 2], days_of_month=[15]) }}"
register: result_too_many_params
ignore_errors: true
- name: Assert proper error is reported for too many parameters
ansible.builtin.assert:
that:
- result_too_many_params.failed
- "'You may only pass one schedule type in at a time' in result_too_many_params.msg"
- name: Attempt invalid schedule_rrule lookup with bad frequency
ansible.builtin.debug: ansible.builtin.debug:
msg: "{{ lookup(plugin_name, 'john', start_date='2020-04-16 03:45:07') }}" msg: "{{ lookup(plugin_name | string, 'none', 'weekly', start_date='2020-4-16 03:45:07') }}"
register: result_bad_freq
ignore_errors: true ignore_errors: true
register: result
- name: Assert proper error is reported for bad frequency - ansible.builtin.assert:
ansible.builtin.assert:
that: that:
- result_bad_freq.failed - result is failed
- "'Frequency of john is invalid' in result_bad_freq.msg | default('')" - "'You may only pass one schedule type in at a time' in result.msg"
- name: Test an invalid start date - name: Test invalid frequency (failure from validation of term)
ansible.builtin.debug:
msg: "{{ lookup(plugin_name, 'john', start_date='2020-4-16 03:45:07') }}"
ignore_errors: true
register: result
- ansible.builtin.assert:
that:
- result is failed
- "'Frequency of john is invalid' in result.msg"
- name: Test an invalid start date (generic failure case from get_rrule)
ansible.builtin.debug: ansible.builtin.debug:
msg: "{{ lookup(plugin_name, 'none', start_date='invalid') }}" msg: "{{ lookup(plugin_name, 'none', start_date='invalid') }}"
register: result_bad_date
ignore_errors: true ignore_errors: true
register: result
- name: Assert plugin error message for invalid start date - ansible.builtin.assert:
ansible.builtin.assert:
that: that:
- result_bad_date.failed - result is failed
- "'Parameter start_date must be in the format YYYY-MM-DD' in result_bad_date.msg | default('')" - "'Parameter start_date must be in the format YYYY-MM-DD' in result.msg"
- name: Test end_on as count (generic success case) - name: Test end_on as count (generic success case)
ansible.builtin.debug: ansible.builtin.debug:
msg: "{{ lookup(plugin_name, 'minute', start_date='2020-4-16 03:45:07', end_on='2') }}" msg: "{{ lookup(plugin_name, 'minute', start_date='2020-4-16 03:45:07', end_on='2') }}"
register: result_success register: result
- name: Assert successful rrule generation - ansible.builtin.assert:
ansible.builtin.assert:
that: that:
- result_success.msg == 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=MINUTELY;COUNT=2;INTERVAL=1' - result.msg == 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=MINUTELY;COUNT=2;INTERVAL=1'

View File

@@ -40,7 +40,7 @@
- name: Set the value of AWX_ISOLATION_SHOW_PATHS to a baseline - name: Set the value of AWX_ISOLATION_SHOW_PATHS to a baseline
awx.awx.settings: awx.awx.settings:
name: AWX_ISOLATION_SHOW_PATHS name: AWX_ISOLATION_SHOW_PATHS
value: ["/var/lib/awx/projects/"] value: '["/var/lib/awx/projects/"]'
- name: Set the value of AWX_ISOLATION_SHOW_PATHS to get an error back from the controller - name: Set the value of AWX_ISOLATION_SHOW_PATHS to get an error back from the controller
awx.awx.settings: awx.awx.settings:
@@ -51,11 +51,9 @@
register: result register: result
ignore_errors: true ignore_errors: true
- name: Assert result failed - ansible.builtin.assert:
ansible.builtin.assert:
that: that:
- result.failed - "result is failed"
- "'Unable to update settings' in result.msg | default('')"
- name: Set the value of AWX_ISOLATION_SHOW_PATHS - name: Set the value of AWX_ISOLATION_SHOW_PATHS
awx.awx.settings: awx.awx.settings:
@@ -63,10 +61,9 @@
value: '["/var/lib/awx/projects/", "/tmp"]' value: '["/var/lib/awx/projects/", "/tmp"]'
register: result register: result
- name: Assert result changed - ansible.builtin.assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Attempt to set the value of AWX_ISOLATION_BASE_PATH to what it already is - name: Attempt to set the value of AWX_ISOLATION_BASE_PATH to what it already is
awx.awx.settings: awx.awx.settings:
@@ -74,14 +71,12 @@
value: /tmp value: /tmp
register: result register: result
- name: Debug result - ansible.builtin.debug:
ansible.builtin.debug:
msg: "{{ result }}" msg: "{{ result }}"
- name: Result is not changed - ansible.builtin.assert:
ansible.builtin.assert:
that: that:
- not (result.changed) - "result is not changed"
- name: Apply a single setting via settings - name: Apply a single setting via settings
awx.awx.settings: awx.awx.settings:
@@ -89,10 +84,9 @@
value: '["/var/lib/awx/projects/", "/var/tmp"]' value: '["/var/lib/awx/projects/", "/var/tmp"]'
register: result register: result
- name: Result is changed - ansible.builtin.assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Apply multiple setting via settings with no change - name: Apply multiple setting via settings with no change
awx.awx.settings: awx.awx.settings:
@@ -101,14 +95,12 @@
AWX_ISOLATION_SHOW_PATHS: ["/var/lib/awx/projects/", "/var/tmp"] AWX_ISOLATION_SHOW_PATHS: ["/var/lib/awx/projects/", "/var/tmp"]
register: result register: result
- name: Debug - ansible.builtin.debug:
ansible.builtin.debug:
msg: "{{ result }}" msg: "{{ result }}"
- name: Assert result is not changed - ansible.builtin.assert:
ansible.builtin.assert:
that: that:
- not (result.changed) - "result is not changed"
- name: Apply multiple setting via settings with change - name: Apply multiple setting via settings with change
awx.awx.settings: awx.awx.settings:
@@ -117,10 +109,9 @@
AWX_ISOLATION_SHOW_PATHS: [] AWX_ISOLATION_SHOW_PATHS: []
register: result register: result
- name: Assert result changed - ansible.builtin.assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Handle an omit value - name: Handle an omit value
awx.awx.settings: awx.awx.settings:
@@ -129,8 +120,6 @@
register: result register: result
ignore_errors: true ignore_errors: true
- name: Assert result failed - ansible.builtin.assert:
ansible.builtin.assert:
that: that:
- result.failed - "'Unable to update settings' in result.msg"
- "'Unable to update settings' in result.msg | default('')"

View File

@@ -1,11 +1,11 @@
--- ---
- name: Generate a test ID - name: Generate a test ID
ansible.builtin.set_fact: set_fact:
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}" test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
when: test_id is not defined when: test_id is not defined
- name: Generate names - name: Generate names
ansible.builtin.set_fact: set_fact:
team_name: "AWX-Collection-tests-team-team-{{ test_id }}" team_name: "AWX-Collection-tests-team-team-{{ test_id }}"
- name: Attempt to add a team to a non-existant Organization - name: Attempt to add a team to a non-existant Organization
@@ -17,11 +17,12 @@
ignore_errors: true ignore_errors: true
- name: Assert a meaningful error was provided for the failed team creation - name: Assert a meaningful error was provided for the failed team creation
ansible.builtin.assert: assert:
that: that:
- "result is failed" - "result is failed"
- >- - "result is not changed"
'Missing_Organization' in result.msg - "'Missing_Organization' in result.msg"
- "result.total_results == 0"
- name: Create a team - name: Create a team
team: team:
@@ -29,10 +30,9 @@
organization: Default organization: Default
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Create a team with exists - name: Create a team with exists
team: team:
@@ -41,10 +41,9 @@
state: exists state: exists
register: result register: result
- name: Assert result did not change - assert:
ansible.builtin.assert:
that: that:
- not result.changed - "result is not changed"
- name: Delete a team - name: Delete a team
team: team:
@@ -53,10 +52,9 @@
state: absent state: absent
register: result register: result
- name: Assert reesult changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Create a team with exists - name: Create a team with exists
team: team:
@@ -65,10 +63,9 @@
state: exists state: exists
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Delete a team - name: Delete a team
team: team:
@@ -77,10 +74,9 @@
state: absent state: absent
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Check module fails with correct msg - name: Check module fails with correct msg
team: team:
@@ -90,19 +86,10 @@
register: result register: result
ignore_errors: true ignore_errors: true
- name: Assert module failed with expected message - name: Lookup of the related organization should cause a failure
ansible.builtin.assert: assert:
that: that:
- "result is failed" - "result is failed"
- >- - "result is not changed"
'returned 0 items, expected 1' in result.msg or
'returned 0 items, expected 1' in result.exception or
'returned 0 items, expected 1' in result.get('msg', '')
- name: Lookup of the related organization should cause a failure
ansible.builtin.assert:
that:
- result.failed
- not result.changed
- "'Non_Existing_Org' in result.msg" - "'Non_Existing_Org' in result.msg"
- "result.total_results == 0" - "result.total_results == 0"

View File

@@ -1,116 +1,108 @@
--- ---
- name: Generate a test ID - name: Generate a test ID
ansible.builtin.set_fact: set_fact:
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}" test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
when: test_id is not defined when: test_id is not defined
- name: Generate names - name: Generate names
ansible.builtin.set_fact: set_fact:
username: "AWX-Collection-tests-user-user-{{ test_id }}" username: "AWX-Collection-tests-user-user-{{ test_id }}"
- name: Create a User - name: Create a User
awx.awx.user: user:
username: "{{ username }}" username: "{{ username }}"
first_name: Joe first_name: Joe
password: "{{ 65535 | random | to_uuid }}" password: "{{ 65535 | random | to_uuid }}"
state: present state: present
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Create a User with exists - name: Create a User with exists
awx.awx.user: user:
username: "{{ username }}" username: "{{ username }}"
first_name: Joe first_name: Joe
password: "{{ 65535 | random | to_uuid }}" password: "{{ 65535 | random | to_uuid }}"
state: exists state: exists
register: result register: result
- name: Assert results did not change - assert:
ansible.builtin.assert:
that: that:
- not result.changed - "result is not changed"
- name: Delete a User - name: Delete a User
awx.awx.user: user:
username: "{{ username }}" username: "{{ username }}"
first_name: Joe first_name: Joe
password: "{{ 65535 | random | to_uuid }}" password: "{{ 65535 | random | to_uuid }}"
state: absent state: absent
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Create a User with exists - name: Create a User with exists
awx.awx.user: user:
username: "{{ username }}" username: "{{ username }}"
first_name: Joe first_name: Joe
password: "{{ 65535 | random | to_uuid }}" password: "{{ 65535 | random | to_uuid }}"
state: exists state: exists
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Change a User by ID - name: Change a User by ID
awx.awx.user: user:
username: "{{ result.id }}" username: "{{ result.id }}"
last_name: User last_name: User
email: joe@example.org email: joe@example.org
state: present state: present
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Check idempotency - name: Check idempotency
awx.awx.user: user:
username: "{{ username }}" username: "{{ username }}"
first_name: Joe first_name: Joe
last_name: User last_name: User
register: result register: result
- name: Assert result did not change - assert:
ansible.builtin.assert:
that: that:
- not (result.changed) - "result is not changed"
- name: Rename a User - name: Rename a User
awx.awx.user: user:
username: "{{ username }}" username: "{{ username }}"
new_username: "{{ username }}-renamed" new_username: "{{ username }}-renamed"
email: joe@example.org email: joe@example.org
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Delete a User - name: Delete a User
awx.awx.user: user:
username: "{{ username }}-renamed" username: "{{ username }}-renamed"
email: joe@example.org email: joe@example.org
state: absent state: absent
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Create an Auditor - name: Create an Auditor
awx.awx.user: user:
first_name: Joe first_name: Joe
last_name: Auditor last_name: Auditor
username: "{{ username }}" username: "{{ username }}"
@@ -120,25 +112,23 @@
auditor: true auditor: true
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Delete an Auditor - name: Delete an Auditor
awx.awx.user: user:
username: "{{ username }}" username: "{{ username }}"
email: joe@example.org email: joe@example.org
state: absent state: absent
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Create a Superuser - name: Create a Superuser
awx.awx.user: user:
first_name: Joe first_name: Joe
last_name: Super last_name: Super
username: "{{ username }}" username: "{{ username }}"
@@ -148,25 +138,23 @@
superuser: true superuser: true
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Delete a Superuser - name: Delete a Superuser
awx.awx.user: user:
username: "{{ username }}" username: "{{ username }}"
email: joe@example.org email: joe@example.org
state: absent state: absent
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Test SSL parameter - name: Test SSL parameter
awx.awx.user: user:
first_name: Joe first_name: Joe
last_name: User last_name: User
username: "{{ username }}" username: "{{ username }}"
@@ -178,18 +166,17 @@
ignore_errors: true ignore_errors: true
register: result register: result
- name: Assert SSL parameter failure message is meaningful - assert:
ansible.builtin.assert:
that: that:
- result is failed or result.failed | default(false) - "'Unable to resolve controller_host' in result.msg or
'Can not verify ssl with non-https protocol' in result.exception"
- name: Org tasks - block:
block:
- name: Generate an org name - name: Generate an org name
ansible.builtin.set_fact: set_fact:
org_name: "AWX-Collection-tests-organization-org-{{ test_id }}" org_name: "AWX-Collection-tests-organization-org-{{ test_id }}"
- name: Make sure organization is absent - name: Make sure {{ org_name }} is not there
organization: organization:
name: "{{ org_name }}" name: "{{ org_name }}"
state: absent state: absent
@@ -202,38 +189,35 @@
- Ansible Galaxy - Ansible Galaxy
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert: that: "result is changed"
that: result.changed
- name: Create a User to become admin of an organization - name: Create a User to become admin of an organization {{ org_name }}
awx.awx.user: user:
username: "{{ username }}-orgadmin" username: "{{ username }}-orgadmin"
password: "{{ username }}-orgadmin" password: "{{ username }}-orgadmin"
state: present state: present
organization: "{{ org_name }}" organization: "{{ org_name }}"
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Add the user -orgadmin as an admin of the organization - name: Add the user {{ username }}-orgadmin as an admin of the organization {{ org_name }}
awx.awx.role: role:
user: "{{ username }}-orgadmin" user: "{{ username }}-orgadmin"
role: admin role: admin
organization: "{{ org_name }}" organization: "{{ org_name }}"
state: present state: present
register: result register: result
- name: Assert that user was added as org admin - assert:
ansible.builtin.assert:
that: that:
- result.changed | default(false) - "result is changed"
- name: Create a User as -orgadmin without using an organization (must fail) - name: Create a User as {{ username }}-orgadmin without using an organization (must fail)
awx.awx.user: user:
controller_username: "{{ username }}-orgadmin" controller_username: "{{ username }}-orgadmin"
controller_password: "{{ username }}-orgadmin" controller_password: "{{ username }}-orgadmin"
username: "{{ username }}" username: "{{ username }}"
@@ -243,17 +227,12 @@
register: result register: result
ignore_errors: true ignore_errors: true
- name: Assert result failed - assert:
ansible.builtin.assert:
that: that:
- result is defined - "result is failed"
- result.failed is defined
- result.failed | bool
fail_msg: "The task did not fail as expected."
success_msg: "The task failed as expected."
- name: Create a User as -orgadmin using an organization - name: Create a User as {{ username }}-orgadmin using an organization
awx.awx.user: user:
controller_username: "{{ username }}-orgadmin" controller_username: "{{ username }}-orgadmin"
controller_password: "{{ username }}-orgadmin" controller_password: "{{ username }}-orgadmin"
username: "{{ username }}" username: "{{ username }}"
@@ -263,13 +242,12 @@
organization: "{{ org_name }}" organization: "{{ org_name }}"
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Change a User as -orgadmin by ID using an organization - name: Change a User as {{ username }}-orgadmin by ID using an organization
awx.awx.user: user:
controller_username: "{{ username }}-orgadmin" controller_username: "{{ username }}-orgadmin"
controller_password: "{{ username }}-orgadmin" controller_password: "{{ username }}-orgadmin"
username: "{{ result.id }}" username: "{{ result.id }}"
@@ -279,13 +257,12 @@
organization: "{{ org_name }}" organization: "{{ org_name }}"
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Check idempotency as -orgadmin using an organization - name: Check idempotency as {{ username }}-orgadmin using an organization
awx.awx.user: user:
controller_username: "{{ username }}-orgadmin" controller_username: "{{ username }}-orgadmin"
controller_password: "{{ username }}-orgadmin" controller_password: "{{ username }}-orgadmin"
username: "{{ username }}" username: "{{ username }}"
@@ -294,13 +271,12 @@
organization: "{{ org_name }}" organization: "{{ org_name }}"
register: result register: result
- name: Assert result did not change - assert:
ansible.builtin.assert:
that: that:
- not (result.changed) - "result is not changed"
- name: Rename a User as -orgadmin using an organization - name: Rename a User as {{ username }}-orgadmin using an organization
awx.awx.user: user:
controller_username: "{{ username }}-orgadmin" controller_username: "{{ username }}-orgadmin"
controller_password: "{{ username }}-orgadmin" controller_password: "{{ username }}-orgadmin"
username: "{{ username }}" username: "{{ username }}"
@@ -309,13 +285,12 @@
organization: "{{ org_name }}" organization: "{{ org_name }}"
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Delete a User as -orgadmin using an organization - name: Delete a User as {{ username }}-orgadmin using an organization
awx.awx.user: user:
controller_username: "{{ username }}-orgadmin" controller_username: "{{ username }}-orgadmin"
controller_password: "{{ username }}-orgadmin" controller_password: "{{ username }}-orgadmin"
username: "{{ username }}-renamed" username: "{{ username }}-renamed"
@@ -324,12 +299,11 @@
organization: "{{ org_name }}" organization: "{{ org_name }}"
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Remove the user -orgadmin as an admin of the organization - name: Remove the user {{ username }}-orgadmin as an admin of the organization {{ org_name }}
role: role:
user: "{{ username }}-orgadmin" user: "{{ username }}-orgadmin"
role: admin role: admin
@@ -337,23 +311,21 @@
state: absent state: absent
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Delete the User -orgadmin - name: Delete the User {{ username }}-orgadmin
awx.awx.user: user:
username: "{{ username }}-orgadmin" username: "{{ username }}-orgadmin"
password: "{{ username }}-orgadmin" password: "{{ username }}-orgadmin"
state: absent state: absent
organization: "{{ org_name }}" organization: "{{ org_name }}"
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- name: Delete the Organization {{ org_name }} - name: Delete the Organization {{ org_name }}
organization: organization:
@@ -361,7 +333,6 @@
state: absent state: absent
register: result register: result
- name: Assert result changed - assert:
ansible.builtin.assert: that: "result is changed"
that: result.changed
... ...

View File

@@ -1,19 +1,18 @@
--- ---
- name: Generate a random string for names - name: Generate a random string for names
ansible.builtin.set_fact: set_fact:
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}" test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
when: test_id is not defined when: test_id is not defined
- name: Generate random names for test objects - name: Generate random names for test objects
ansible.builtin.set_fact: set_fact:
org_name: "{{ test_prefix }}-org-{{ test_id }}" org_name: "{{ test_prefix }}-org-{{ test_id }}"
approval_node_name: "{{ test_prefix }}-node-{{ test_id }}" approval_node_name: "{{ test_prefix }}-node-{{ test_id }}"
wfjt_name: "{{ test_prefix }}-wfjt-{{ test_id }}" wfjt_name: "{{ test_prefix }}-wfjt-{{ test_id }}"
vars: vars:
test_prefix: AWX-Collection-tests-workflow_approval test_prefix: AWX-Collection-tests-workflow_approval
- name: Task block - block:
block:
- name: Create a new organization for test isolation - name: Create a new organization for test isolation
organization: organization:
name: "{{ org_name }}" name: "{{ org_name }}"
@@ -35,7 +34,7 @@
- name: Launch the workflow - name: Launch the workflow
workflow_launch: workflow_launch:
workflow_template: "{{ wfjt_name }}" workflow_template: "{{ wfjt_name }}"
wait: false wait: False
register: workflow_job register: workflow_job
- name: Wait for approval node to activate and approve - name: Wait for approval node to activate and approve
@@ -47,16 +46,14 @@
action: approve action: approve
register: result register: result
- name: Assert result changed and did not fail - assert:
ansible.builtin.assert:
that: that:
- result.changed - "result is changed"
- not (result.failed) - "result is not failed"
always: always:
- name: Delete the workflow job template - name: Delete the workflow job template
workflow_job_template: workflow_job_template:
name: "{{ wfjt_name }}" name: "{{ wfjt_name }}"
state: absent state: absent
register: delete_result ignore_errors: True
failed_when: delete_result.failed and "'not found' not in delete_result.msg"

View File

@@ -1,17 +1,16 @@
--- ---
- name: Generate a random string for test - name: Generate a random string for test
ansible.builtin.set_fact: set_fact:
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}" test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
when: test_id is not defined when: test_id is not defined
- name: Generate names - name: Generate names
ansible.builtin.set_fact: set_fact:
wfjt_name1: "AWX-Collection-tests-workflow_launch--wfjt1-{{ test_id }}" wfjt_name1: "AWX-Collection-tests-workflow_launch--wfjt1-{{ test_id }}"
wfjt_name2: "AWX-Collection-tests-workflow_launch--wfjt1-{{ test_id }}-2" wfjt_name2: "AWX-Collection-tests-workflow_launch--wfjt1-{{ test_id }}-2"
approval_node_name: "AWX-Collection-tests-workflow_launch_approval_node-{{ test_id }}" approval_node_name: "AWX-Collection-tests-workflow_launch_approval_node-{{ test_id }}"
- name: Create workflows - block:
block:
- name: Create our workflow - name: Create our workflow
workflow_job_template: workflow_job_template:
@@ -31,10 +30,9 @@
ignore_errors: true ignore_errors: true
register: result register: result
- name: Assert that workflow launch failed with expected error - assert:
ansible.builtin.assert:
that: that:
- result.failed | default(false) - result is failed
- "'Unable to find workflow job template' in result.msg" - "'Unable to find workflow job template' in result.msg"
- name: Run the workflow without waiting (this should just give us back a job ID) - name: Run the workflow without waiting (this should just give us back a job ID)
@@ -44,8 +42,7 @@
ignore_errors: true ignore_errors: true
register: result register: result
- name: Assert result not failed - assert:
ansible.builtin.assert:
that: that:
- result is not failed - result is not failed
- "'id' in result['job_info']" - "'id' in result['job_info']"
@@ -57,10 +54,9 @@
ignore_errors: true ignore_errors: true
register: result register: result
- name: Assert result failed - assert:
ansible.builtin.assert:
that: that:
- result.failed | default(false) - result is failed
- "'Monitoring of Workflow Job - '~ wfjt_name1 ~ ' aborted due to timeout' in result.msg" - "'Monitoring of Workflow Job - '~ wfjt_name1 ~ ' aborted due to timeout' in result.msg"
- name: Kick off a workflow and wait for it - name: Kick off a workflow and wait for it
@@ -69,10 +65,9 @@
ignore_errors: true ignore_errors: true
register: result register: result
- name: Assert result did not fail - assert:
ansible.builtin.assert:
that: that:
- not (result.failed | default(false)) - result is not failed
- "'id' in result['job_info']" - "'id' in result['job_info']"
- name: Kick off a workflow with extra_vars but not enabled - name: Kick off a workflow with extra_vars but not enabled
@@ -84,10 +79,9 @@
ignore_errors: true ignore_errors: true
register: result register: result
- name: Assert result failed - assert:
ansible.builtin.assert:
that: that:
- result.failed | default(false) - result is failed
- "'The field extra_vars was specified but the workflow job template does not allow for it to be overridden' in result.errors" - "'The field extra_vars was specified but the workflow job template does not allow for it to be overridden' in result.errors"
- name: Prompt the workflow's with survey - name: Prompt the workflow's with survey
@@ -132,10 +126,9 @@
ignore_errors: true ignore_errors: true
register: result register: result
- name: Assert result did not fail - assert:
ansible.builtin.assert:
that: that:
- not (result.failed | default(false)) - result is not failed
- name: Prompt the workflow's extra_vars on launch - name: Prompt the workflow's extra_vars on launch
workflow_job_template: workflow_job_template:
@@ -153,10 +146,9 @@
ignore_errors: true ignore_errors: true
register: result register: result
- name: Assert did not fail - assert:
ansible.builtin.assert:
that: that:
- not (result.failed | default(false)) - result is not failed
- name: Test waiting for an approval node that doesn't exit on the last workflow for failure. - name: Test waiting for an approval node that doesn't exit on the last workflow for failure.
workflow_approval: workflow_approval:
@@ -168,10 +160,9 @@
register: result register: result
ignore_errors: true ignore_errors: true
- name: Assert result failed - assert:
ansible.builtin.assert:
that: that:
- result.failed | default(false) - result is failed
- "'Monitoring of Workflow Approval - Test workflow approval aborted due to timeout' in result.msg" - "'Monitoring of Workflow Approval - Test workflow approval aborted due to timeout' in result.msg"
- name: Create new Workflow - name: Create new Workflow
@@ -217,10 +208,9 @@
register: result register: result
ignore_errors: true ignore_errors: true
- name: Assert result didn't fail - assert:
ansible.builtin.assert:
that: that:
- result.failed | default(false) - result is failed
- "'Monitoring of Workflow Node - Demo Job Template aborted due to timeout' in result.msg" - "'Monitoring of Workflow Node - Demo Job Template aborted due to timeout' in result.msg"
- name: Wait for approval node to activate and approve - name: Wait for approval node to activate and approve
@@ -232,11 +222,10 @@
action: deny action: deny
register: result register: result
- name: Assert did not fail - assert:
ansible.builtin.assert:
that: that:
- not (result.failed | default(false)) - result is not failed
- result.changed | default(false) - result is changed
- name: Wait for workflow job to finish max 120s - name: Wait for workflow job to finish max 120s
job_wait: job_wait:

View File

@@ -2,9 +2,9 @@
- name: Sanity assertions, that some variables have a non-blank value - name: Sanity assertions, that some variables have a non-blank value
assert: assert:
that: that:
- collection_version is defined and collection_version | length > 0 - collection_version
- collection_package is defined and collection_package | length > 0 - collection_package
- collection_path is defined and collection_path | length > 0 - collection_path
- name: Set the collection version in the controller_api.py file - name: Set the collection version in the controller_api.py file
replace: replace:

View File

@@ -18,7 +18,7 @@
# -- Project information ----------------------------------------------------- # -- Project information -----------------------------------------------------
project = 'AWX CLI' project = 'AWX CLI'
copyright = '2025, Ansible by Red Hat' copyright = '2024, Ansible by Red Hat'
author = 'Ansible by Red Hat' author = 'Ansible by Red Hat'
@@ -54,5 +54,5 @@ rst_epilog = '''
.. |prog| replace:: awx .. |prog| replace:: awx
.. |at| replace:: automation controller .. |at| replace:: automation controller
.. |At| replace:: Automation controller .. |At| replace:: Automation controller
.. |RHAT| replace:: Red Hat Ansible Automation Platform .. |RHAT| replace:: Red Hat Ansible Automation Platform controller
''' '''

View File

@@ -10,7 +10,7 @@ Installation
Synopsis Synopsis
-------- --------
CLI commands follow a simple format: |prog| commands follow a simple format:
.. code:: bash .. code:: bash
@@ -25,14 +25,7 @@ The ``action`` is the thing you want to do (a verb). Resources generally have a
Getting Started Getting Started
--------------- ---------------
Using |prog| requires some initial configuration. To execute AWX CLI on |RHAT| 2.5 and later, you must set your environment variable to: Using |prog| requires some initial configuration. Here is a simple example for interacting with an AWX or |RHAT| server:
.. code::
AWXKIT_API_BASE_PATH=/api/controller/
Here is a simple example for interacting with an AWX or |RHAT| server:
.. code:: bash .. code:: bash

View File

@@ -32,6 +32,5 @@ config.assume_untrusted = config.get('assume_untrusted', True)
config.client_connection_attempts = int(os.getenv('AWXKIT_CLIENT_CONNECTION_ATTEMPTS', 5)) config.client_connection_attempts = int(os.getenv('AWXKIT_CLIENT_CONNECTION_ATTEMPTS', 5))
config.prevent_teardown = to_bool(os.getenv('AWXKIT_PREVENT_TEARDOWN', False)) config.prevent_teardown = to_bool(os.getenv('AWXKIT_PREVENT_TEARDOWN', False))
config.use_sessions = to_bool(os.getenv('AWXKIT_SESSIONS', False)) config.use_sessions = to_bool(os.getenv('AWXKIT_SESSIONS', False))
config.api_base_path = os.getenv('CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX', '/api/') config.api_base_path = os.getenv('AWXKIT_API_BASE_PATH', '/api/')
config.api_base_path = os.getenv('AWXKIT_API_BASE_PATH', config.api_base_path)
config.gateway_base_path = os.getenv('AWXKIT_GATEWAY_BASE_PATH', '/api/gateway/') config.gateway_base_path = os.getenv('AWXKIT_GATEWAY_BASE_PATH', '/api/gateway/')

View File

@@ -106,42 +106,3 @@ def test_config_file():
assert config.credentials.default.username == 'mary' assert config.credentials.default.username == 'mary'
assert config.credentials.default.password == 'secret' assert config.credentials.default.password == 'secret'
def test_controller_optional_api_urlpattern_prefix():
"""Tests that CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX is honored when set."""
cli = CLI()
env = {'CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX': '/custom/api/'}
cli.parse_args(['awx'], env=env)
# Update config with environment variable since config.py reads from os.getenv at import time
config.api_base_path = env.get('CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX', '/api/')
config.api_base_path = env.get('AWXKIT_API_BASE_PATH', config.api_base_path)
assert config.api_base_path == '/custom/api/'
def test_awxkit_api_base_path_fallback():
"""Tests that AWXKIT_API_BASE_PATH overrides CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX."""
cli = CLI()
env = {'CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX': '/custom/api/', 'AWXKIT_API_BASE_PATH': '/override/api/'}
cli.parse_args(['awx'], env=env)
# Update config with environment variable since config.py reads from os.getenv at import time
config.api_base_path = env.get('CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX', '/api/')
config.api_base_path = env.get('AWXKIT_API_BASE_PATH', config.api_base_path)
assert config.api_base_path == '/override/api/'
def test_api_base_path_default():
"""Tests that api_base_path defaults to /api/ when no environment variables are set."""
cli = CLI()
env = {}
cli.parse_args(['awx'], env=env)
# Reset config to default when no environment variables are set
config.api_base_path = env.get('CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX', '/api/')
config.api_base_path = env.get('AWXKIT_API_BASE_PATH', config.api_base_path)
assert config.api_base_path == '/api/'

View File

@@ -29,7 +29,7 @@ filterwarnings =
once:module 'sre_constants' is deprecated:DeprecationWarning:_pytest.assertion.rewrite once:module 'sre_constants' is deprecated:DeprecationWarning:_pytest.assertion.rewrite
# FIXME: Delete this entry once `polymorphic` is updated. # FIXME: Delete this entry once `polymorphic` is updated.
once:pkg_resources is deprecated as an API. once:pkg_resources is deprecated as an API. See https.//setuptools.pypa.io/en/latest/pkg_resources.html:DeprecationWarning:_pytest.assertion.rewrite
# FIXME: Delete this entry once `zope` is updated. # FIXME: Delete this entry once `zope` is updated.
once:Deprecated call to `pkg_resources.declare_namespace.'zope'.`.\nImplementing implicit namespace packages .as specified in PEP 420. is preferred to `pkg_resources.declare_namespace`. See https.//setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages:DeprecationWarning: once:Deprecated call to `pkg_resources.declare_namespace.'zope'.`.\nImplementing implicit namespace packages .as specified in PEP 420. is preferred to `pkg_resources.declare_namespace`. See https.//setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages:DeprecationWarning:

Some files were not shown because too many files have changed in this diff Show More