Compare commits

..

3 Commits

Author SHA1 Message Date
Elijah DeLee
19e3cba35c Merge branch 'devel' into x-request-id 2024-04-24 15:04:13 -05:00
Elijah DeLee
c11ff49a56 fixup syntax 2024-04-24 14:48:02 -05:00
Elijah DeLee
51bcf82cf4 include x-request-id header in perf log if exists 2024-04-24 13:51:42 -05:00
164 changed files with 2512 additions and 7356 deletions

75
.github/workflows/e2e_test.yml vendored Normal file
View File

@@ -0,0 +1,75 @@
---
name: E2E Tests
env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
on:
pull_request_target:
types: [labeled]
jobs:
e2e-test:
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
runs-on: ubuntu-latest
timeout-minutes: 40
permissions:
packages: write
contents: read
strategy:
matrix:
job: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
steps:
- uses: actions/checkout@v3
- uses: ./.github/actions/run_awx_devel
id: awx
with:
build-ui: true
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Pull awx_cypress_base image
run: |
docker pull quay.io/awx/awx_cypress_base:latest
- name: Checkout test project
uses: actions/checkout@v3
with:
repository: ${{ github.repository_owner }}/tower-qa
ssh-key: ${{ secrets.QA_REPO_KEY }}
path: tower-qa
ref: devel
- name: Build cypress
run: |
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
docker build -t awx-pf-tests .
- name: Run E2E tests
env:
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
run: |
export COMMIT_INFO_BRANCH=$GITHUB_HEAD_REF
export COMMIT_INFO_AUTHOR=$GITHUB_ACTOR
export COMMIT_INFO_SHA=$GITHUB_SHA
export COMMIT_INFO_REMOTE=$GITHUB_REPOSITORY_OWNER
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
AWX_IP=${{ steps.awx.outputs.ip }}
printenv > .env
echo "Executing tests:"
docker run \
--network '_sources_default' \
--ipc=host \
--env-file=.env \
-e CYPRESS_baseUrl="https://$AWX_IP:8043" \
-e CYPRESS_AWX_E2E_USERNAME=admin \
-e CYPRESS_AWX_E2E_PASSWORD='password' \
-e COMMAND="npm run cypress-concurrently-gha" \
-v /dev/shm:/dev/shm \
-v $PWD:/e2e \
-w /e2e \
awx-pf-tests run --project .
- uses: ./.github/actions/upload_awx_devel_logs
if: always()
with:
log-filename: e2e-${{ matrix.job }}.log

View File

@@ -29,7 +29,7 @@ jobs:
- name: Set GitHub Env vars if release event
if: ${{ github.event_name == 'release' }}
run: |
echo "TAG_NAME=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
echo "TAG_NAME=${{ env.TAG_NAME }}" >> $GITHUB_ENV
- name: Checkout awx
uses: actions/checkout@v3
@@ -60,18 +60,15 @@ jobs:
COLLECTION_VERSION: ${{ env.TAG_NAME }}
COLLECTION_TEMPLATE_VERSION: true
run: |
sudo apt-get install jq
make build_collection
count=$(curl -s https://galaxy.ansible.com/api/v3/plugin/ansible/search/collection-versions/\?namespace\=${COLLECTION_NAMESPACE}\&name\=awx\&version\=${COLLECTION_VERSION} | jq .meta.count)
if [[ "$count" == "1" ]]; then
curl_with_redirects=$(curl --head -sLw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ env.TAG_NAME }}.tar.gz | tail -1)
curl_without_redirects=$(curl --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ env.TAG_NAME }}.tar.gz | tail -1)
if [[ "$curl_with_redirects" == "302" ]] || [[ "$curl_without_redirects" == "302" ]]; then
echo "Galaxy release already done";
elif [[ "$count" == "0" ]]; then
else
ansible-galaxy collection publish \
--token=${{ secrets.GALAXY_TOKEN }} \
awx_collection_build/${COLLECTION_NAMESPACE}-awx-${COLLECTION_VERSION}.tar.gz;
else
echo "Unexpected count from galaxy search: $count";
exit 1;
awx_collection_build/${{ env.collection_namespace }}-awx-${{ env.TAG_NAME }}.tar.gz;
fi
- name: Set official pypi info

View File

@@ -11,8 +11,6 @@ ignore: |
# django template files
awx/api/templates/instance_install_bundle/**
.readthedocs.yaml
tools/loki
tools/otel
extends: default

View File

@@ -47,14 +47,8 @@ VAULT ?= false
VAULT_TLS ?= false
# If set to true docker-compose will also start a tacacs+ instance
TACACS ?= false
# If set to true docker-compose will also start an OpenTelemetry Collector instance
OTEL ?= false
# If set to true docker-compose will also start a Loki instance
LOKI ?= false
# If set to true docker-compose will install editable dependencies
EDITABLE_DEPENDENCIES ?= false
# If set to true, use tls for postgres connection
PG_TLS ?= false
VENV_BASE ?= /var/lib/awx/venv
@@ -64,9 +58,6 @@ DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
# Common command to use for running ansible-playbook
ANSIBLE_PLAYBOOK ?= ansible-playbook -e ansible_python_interpreter=$(PYTHON)
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
# Python packages to install only from source (not from binary wheels)
@@ -74,7 +65,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
# These should be upgraded in the AWX and Ansible venv before attempting
# to install the actual requirements
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==0.29.37
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0
NAME ?= awx
@@ -371,7 +362,7 @@ symlink_collection:
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
awx_collection_build: $(shell find awx_collection -type f)
$(ANSIBLE_PLAYBOOK) -i localhost, awx_collection/tools/template_galaxy.yml \
ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \
-e collection_package=$(COLLECTION_PACKAGE) \
-e collection_namespace=$(COLLECTION_NAMESPACE) \
-e collection_version=$(COLLECTION_VERSION) \
@@ -525,10 +516,10 @@ endif
docker-compose-sources: .git/hooks/pre-commit
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
fi;
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
-e awx_image_tag=$(COMPOSE_TAG) \
-e receptor_image=$(RECEPTOR_IMAGE) \
@@ -544,15 +535,12 @@ docker-compose-sources: .git/hooks/pre-commit
-e enable_vault=$(VAULT) \
-e vault_tls=$(VAULT_TLS) \
-e enable_tacacs=$(TACACS) \
-e enable_otel=$(OTEL) \
-e enable_loki=$(LOKI) \
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
-e pg_tls=$(PG_TLS) \
$(EXTRA_SOURCES_ANSIBLE_OPTS)
docker-compose: awx/projects docker-compose-sources
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
-e enable_vault=$(VAULT) \
-e vault_tls=$(VAULT_TLS) \
-e enable_ldap=$(LDAP); \
@@ -595,7 +583,7 @@ docker-compose-container-group-clean:
.PHONY: Dockerfile.dev
## Generate Dockerfile.dev for awx_devel image
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
ansible-playbook tools/ansible/dockerfile.yml \
-e dockerfile_name=Dockerfile.dev \
-e build_dev=True \
-e receptor_image=$(RECEPTOR_IMAGE)
@@ -670,7 +658,7 @@ version-for-buildyml:
.PHONY: Dockerfile
## Generate Dockerfile for awx image
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
ansible-playbook tools/ansible/dockerfile.yml \
-e receptor_image=$(RECEPTOR_IMAGE) \
-e headless=$(HEADLESS)
@@ -700,7 +688,7 @@ awx-kube-buildx: Dockerfile
.PHONY: Dockerfile.kube-dev
## Generate Docker.kube-dev for awx_kube_devel image
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
ansible-playbook tools/ansible/dockerfile.yml \
-e dockerfile_name=Dockerfile.kube-dev \
-e kube_dev=True \
-e template_dest=_build_kube_dev \

View File

@@ -33,10 +33,8 @@ from rest_framework.negotiation import DefaultContentNegotiation
# django-ansible-base
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
from ansible_base.lib.utils.models import get_all_field_names
from ansible_base.lib.utils.requests import get_remote_host
from ansible_base.rbac.models import RoleEvaluation, RoleDefinition
from ansible_base.rbac.permission_registry import permission_registry
from ansible_base.jwt_consumer.common.util import validate_x_trusted_proxy_header
# AWX
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
@@ -44,7 +42,6 @@ from awx.main.models.rbac import give_creator_permissions
from awx.main.access import optimize_queryset
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
from awx.main.utils.licensing import server_product_name
from awx.main.utils.proxy import is_proxy_in_headers, delete_headers_starting_with_http
from awx.main.views import ApiErrorView
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
from awx.api.versioning import URLPathVersioning
@@ -96,9 +93,8 @@ class LoggedLoginView(auth_views.LoginView):
def post(self, request, *args, **kwargs):
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
if request.user.is_authenticated:
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, ip)))
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
ret.set_cookie(
'userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False), samesite=getattr(settings, 'USER_COOKIE_SAMESITE', 'Lax')
)
@@ -107,15 +103,12 @@ class LoggedLoginView(auth_views.LoginView):
return ret
else:
if 'username' in self.request.POST:
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), ip)))
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
ret.status_code = 401
return ret
class LoggedLogoutView(auth_views.LogoutView):
success_url_allowed_hosts = set(settings.LOGOUT_ALLOWED_HOSTS.split(",")) if settings.LOGOUT_ALLOWED_HOSTS else set()
def dispatch(self, request, *args, **kwargs):
original_user = getattr(request, 'user', None)
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
@@ -155,23 +148,22 @@ class APIView(views.APIView):
Store the Django REST Framework Request object as an attribute on the
normal Django request, store time the request started.
"""
remote_headers = ['REMOTE_ADDR', 'REMOTE_HOST']
self.time_started = time.time()
if getattr(settings, 'SQL_DEBUG', False):
self.queries_before = len(connection.queries)
if 'HTTP_X_TRUSTED_PROXY' in request.environ:
if validate_x_trusted_proxy_header(request.environ['HTTP_X_TRUSTED_PROXY']):
remote_headers = settings.REMOTE_HOST_HEADERS
else:
logger.warning("Request appeared to be a trusted upstream proxy but failed to provide a matching shared secret.")
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
# they respect the allowed proxy list
if settings.PROXY_IP_ALLOWED_LIST:
if not is_proxy_in_headers(self.request, settings.PROXY_IP_ALLOWED_LIST, remote_headers):
delete_headers_starting_with_http(request, settings.REMOTE_HOST_HEADERS)
if all(
[
settings.PROXY_IP_ALLOWED_LIST,
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
]
):
for custom_header in settings.REMOTE_HOST_HEADERS:
if custom_header.startswith('HTTP_'):
request.environ.pop(custom_header, None)
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
request.drf_request = drf_request
@@ -216,12 +208,11 @@ class APIView(views.APIView):
return response
if response.status_code >= 400:
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
msg_data = {
'status_code': response.status_code,
'user_name': request.user,
'url_path': request.path,
'remote_addr': ip,
'remote_addr': request.META.get('REMOTE_ADDR', None),
}
if type(response.data) is dict:

View File

@@ -5381,7 +5381,7 @@ class NotificationSerializer(BaseSerializer):
)
def get_body(self, obj):
if obj.notification_type in ('webhook', 'pagerduty', 'awssns'):
if obj.notification_type in ('webhook', 'pagerduty'):
if isinstance(obj.body, dict):
if 'body' in obj.body:
return obj.body['body']
@@ -5403,9 +5403,9 @@ class NotificationSerializer(BaseSerializer):
def to_representation(self, obj):
ret = super(NotificationSerializer, self).to_representation(obj)
if obj.notification_type in ('webhook', 'awssns'):
if obj.notification_type == 'webhook':
ret.pop('subject')
if obj.notification_type not in ('email', 'webhook', 'pagerduty', 'awssns'):
if obj.notification_type not in ('email', 'webhook', 'pagerduty'):
ret.pop('body')
return ret

View File

@@ -61,9 +61,7 @@ import pytz
from wsgiref.util import FileWrapper
# django-ansible-base
from ansible_base.lib.utils.requests import get_remote_hosts
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
# AWX
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
@@ -130,7 +128,6 @@ from awx.api.views.mixin import (
from awx.api.pagination import UnifiedJobEventPagination
from awx.main.utils import set_environ
logger = logging.getLogger('awx.api.views')
@@ -713,81 +710,16 @@ class AuthView(APIView):
return Response(data)
def immutablesharedfields(cls):
'''
Class decorator to prevent modifying shared resources when ALLOW_LOCAL_RESOURCE_MANAGEMENT setting is set to False.
Works by overriding these view methods:
- create
- delete
- perform_update
create and delete are overridden to raise a PermissionDenied exception.
perform_update is overridden to check if any shared fields are being modified,
and raise a PermissionDenied exception if so.
'''
# create instead of perform_create because some of our views
# override create instead of perform_create
if hasattr(cls, 'create'):
cls.original_create = cls.create
@functools.wraps(cls.create)
def create_wrapper(*args, **kwargs):
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
return cls.original_create(*args, **kwargs)
raise PermissionDenied({'detail': _('Creation of this resource is not allowed. Create this resource via the platform ingress.')})
cls.create = create_wrapper
if hasattr(cls, 'delete'):
cls.original_delete = cls.delete
@functools.wraps(cls.delete)
def delete_wrapper(*args, **kwargs):
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
return cls.original_delete(*args, **kwargs)
raise PermissionDenied({'detail': _('Deletion of this resource is not allowed. Delete this resource via the platform ingress.')})
cls.delete = delete_wrapper
if hasattr(cls, 'perform_update'):
cls.original_perform_update = cls.perform_update
@functools.wraps(cls.perform_update)
def update_wrapper(*args, **kwargs):
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
view, serializer = args
instance = view.get_object()
if instance:
if isinstance(instance, models.Organization):
shared_fields = OrganizationType._declared_fields.keys()
elif isinstance(instance, models.User):
shared_fields = UserType._declared_fields.keys()
elif isinstance(instance, models.Team):
shared_fields = TeamType._declared_fields.keys()
attrs = serializer.validated_data
for field in shared_fields:
if field in attrs and getattr(instance, field) != attrs[field]:
raise PermissionDenied({field: _(f"Cannot change shared field '{field}'. Alter this field via the platform ingress.")})
return cls.original_perform_update(*args, **kwargs)
cls.perform_update = update_wrapper
return cls
@immutablesharedfields
class TeamList(ListCreateAPIView):
model = models.Team
serializer_class = serializers.TeamSerializer
@immutablesharedfields
class TeamDetail(RetrieveUpdateDestroyAPIView):
model = models.Team
serializer_class = serializers.TeamSerializer
@immutablesharedfields
class TeamUsersList(BaseUsersList):
model = models.User
serializer_class = serializers.UserSerializer
@@ -1169,7 +1101,6 @@ class ProjectCopy(CopyAPIView):
copy_return_serializer_class = serializers.ProjectSerializer
@immutablesharedfields
class UserList(ListCreateAPIView):
model = models.User
serializer_class = serializers.UserSerializer
@@ -1340,16 +1271,7 @@ class UserRolesList(SubListAttachDetachAPIView):
user = get_object_or_400(models.User, pk=self.kwargs['pk'])
role = get_object_or_400(models.Role, pk=sub_id)
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
# Prevent user to be associated with team/org when ALLOW_LOCAL_RESOURCE_MANAGEMENT is False
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
for model in [models.Organization, models.Team]:
ct = content_types[model]
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
return Response(data, status=status.HTTP_403_FORBIDDEN)
credential_content_type = content_types[models.Credential]
credential_content_type = ContentType.objects.get_for_model(models.Credential)
if role.content_type == credential_content_type:
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
@@ -1421,7 +1343,6 @@ class UserActivityStreamList(SubListAPIView):
return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
@immutablesharedfields
class UserDetail(RetrieveUpdateDestroyAPIView):
model = models.User
serializer_class = serializers.UserSerializer
@@ -2771,7 +2692,12 @@ class JobTemplateCallback(GenericAPIView):
host for the current request.
"""
# Find the list of remote host names/IPs to check.
remote_hosts = set(get_remote_hosts(self.request))
remote_hosts = set()
for header in settings.REMOTE_HOST_HEADERS:
for value in self.request.META.get(header, '').split(','):
value = value.strip()
if value:
remote_hosts.add(value)
# Add the reverse lookup of IP addresses.
for rh in list(remote_hosts):
try:
@@ -4369,15 +4295,7 @@ class RoleUsersList(SubListAttachDetachAPIView):
user = get_object_or_400(models.User, pk=sub_id)
role = self.get_parent_object()
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
for model in [models.Organization, models.Team]:
ct = content_types[model]
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
return Response(data, status=status.HTTP_403_FORBIDDEN)
credential_content_type = content_types[models.Credential]
credential_content_type = ContentType.objects.get_for_model(models.Credential)
if role.content_type == credential_content_type:
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))

View File

@@ -53,18 +53,15 @@ from awx.api.serializers import (
CredentialSerializer,
)
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
from awx.api.views import immutablesharedfields
logger = logging.getLogger('awx.api.views.organization')
@immutablesharedfields
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
model = Organization
serializer_class = OrganizationSerializer
@immutablesharedfields
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
model = Organization
serializer_class = OrganizationSerializer
@@ -107,7 +104,6 @@ class OrganizationInventoriesList(SubListAPIView):
relationship = 'inventories'
@immutablesharedfields
class OrganizationUsersList(BaseUsersList):
model = User
serializer_class = UserSerializer
@@ -116,7 +112,6 @@ class OrganizationUsersList(BaseUsersList):
ordering = ('username',)
@immutablesharedfields
class OrganizationAdminsList(BaseUsersList):
model = User
serializer_class = UserSerializer
@@ -155,7 +150,6 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
parent_key = 'organization'
@immutablesharedfields
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
model = Team
serializer_class = TeamSerializer

View File

@@ -61,10 +61,6 @@ class StringListBooleanField(ListField):
def to_representation(self, value):
try:
if isinstance(value, str):
# https://github.com/encode/django-rest-framework/commit/a180bde0fd965915718b070932418cabc831cee1
# DRF changed truthy and falsy lists to be capitalized
value = value.lower()
if isinstance(value, (list, tuple)):
return super(StringListBooleanField, self).to_representation(value)
elif value in BooleanField.TRUE_VALUES:
@@ -82,8 +78,6 @@ class StringListBooleanField(ListField):
def to_internal_value(self, data):
try:
if isinstance(data, str):
data = data.lower()
if isinstance(data, (list, tuple)):
return super(StringListBooleanField, self).to_internal_value(data)
elif data in BooleanField.TRUE_VALUES:

View File

@@ -130,9 +130,9 @@ def test_default_setting(settings, mocker):
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
@@ -146,9 +146,9 @@ def test_setting_is_not_from_setting_file(settings, mocker):
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
def test_empty_setting(settings, mocker):
@@ -156,10 +156,10 @@ def test_empty_setting(settings, mocker):
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
with pytest.raises(AttributeError):
settings.AWX_SOME_SETTING
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
with pytest.raises(AttributeError):
settings.AWX_SOME_SETTING
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
def test_setting_from_db(settings, mocker):
@@ -168,9 +168,9 @@ def test_setting_from_db(settings, mocker):
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
assert settings.AWX_SOME_SETTING == 'FROM_DB'
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
assert settings.AWX_SOME_SETTING == 'FROM_DB'
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
@@ -205,8 +205,8 @@ def test_db_setting_update(settings, mocker):
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list)
settings.AWX_SOME_SETTING = 'NEW-VALUE'
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list):
settings.AWX_SOME_SETTING = 'NEW-VALUE'
assert existing_setting.value == 'NEW-VALUE'
existing_setting.save.assert_called_with(update_fields=['value'])
@@ -217,8 +217,8 @@ def test_db_setting_deletion(settings, mocker):
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting])
del settings.AWX_SOME_SETTING
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]):
del settings.AWX_SOME_SETTING
assert existing_setting.delete.call_count == 1
@@ -283,10 +283,10 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
# use its primary key as part of the encryption key
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
cache.set('AWX_ENCRYPTED', 'SECRET!')
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
cache.set('AWX_ENCRYPTED', 'SECRET!')
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
def test_readonly_sensitive_cache_data_is_encrypted(settings):

View File

@@ -598,7 +598,7 @@ class InstanceGroupAccess(BaseAccess):
- a superuser
- admin role on the Instance group
I can add/delete Instance Groups:
- a superuser(system administrator), because these are not org-scoped
- a superuser(system administrator)
I can use Instance Groups when I have:
- use_role on the instance group
"""
@@ -627,7 +627,7 @@ class InstanceGroupAccess(BaseAccess):
def can_delete(self, obj):
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
return False
return self.user.has_obj_perm(obj, 'delete')
return self.user.is_superuser
class UserAccess(BaseAccess):
@@ -2628,7 +2628,7 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
class NotificationTemplateAccess(BaseAccess):
"""
Run standard logic from DAB RBAC
I can see/use a notification_template if I have permission to
"""
model = NotificationTemplate
@@ -2649,7 +2649,10 @@ class NotificationTemplateAccess(BaseAccess):
@check_superuser
def can_change(self, obj, data):
return self.user.has_obj_perm(obj, 'change') and self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role')
if obj.organization is None:
# only superusers are allowed to edit orphan notification templates
return False
return self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role', mandatory=True)
def can_admin(self, obj, data):
return self.can_change(obj, data)
@@ -2659,7 +2662,9 @@ class NotificationTemplateAccess(BaseAccess):
@check_superuser
def can_start(self, obj, validate_license=True):
return self.can_change(obj, None)
if obj.organization is None:
return False
return self.user in obj.organization.notification_admin_role
class NotificationAccess(BaseAccess):

View File

@@ -14,7 +14,7 @@ __all__ = [
'STANDARD_INVENTORY_UPDATE_ENV',
]
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform', 'openshift_virtualization')
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform')
PRIVILEGE_ESCALATION_METHODS = [
('sudo', _('Sudo')),
('su', _('Su')),

View File

@@ -252,7 +252,7 @@ class ImplicitRoleField(models.ForeignKey):
kwargs.setdefault('related_name', '+')
kwargs.setdefault('null', 'True')
kwargs.setdefault('editable', False)
kwargs.setdefault('on_delete', models.SET_NULL)
kwargs.setdefault('on_delete', models.CASCADE)
super(ImplicitRoleField, self).__init__(*args, **kwargs)
def deconstruct(self):

View File

@@ -1,12 +0,0 @@
from django.core.management.base import BaseCommand, CommandError
from awx.main.models.ha import Instance
class Command(BaseCommand):
help = 'Check if the task manager instance is ready throw error if not ready, can be use as readiness probe for k8s.'
def handle(self, *args, **options):
if Instance.objects.me().node_state != Instance.States.READY:
raise CommandError('Instance is not ready') # so that return code is not 0
return

View File

@@ -101,9 +101,8 @@ class Command(BaseCommand):
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
connection.close() # Because of async nature, main loop will use new connection, so close this
except Exception as exc:
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
# sleeping before logging because logging rely on setting which require database connection...
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), slept for 10s...')
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), retry in 10s...')
time.sleep(10)
return
# In containerized deployments, migrations happen in the task container,
@@ -122,14 +121,13 @@ class Command(BaseCommand):
return
try:
my_hostname = Instance.objects.my_hostname() # This relies on settings.CLUSTER_HOST_ID which requires database connection
my_hostname = Instance.objects.my_hostname()
logger.info('Active instance with hostname {} is registered.'.format(my_hostname))
except RuntimeError as e:
# the CLUSTER_HOST_ID in the task, and web instance must match and
# ensure network connectivity between the task and web instance
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
# sleeping before logging because logging rely on setting which require database connection...
logger.warning(f"Unable to return currently active instance: {e}, slept for 10s before return.")
logger.info('Unable to return currently active instance: {}, retry in 5s...'.format(e))
time.sleep(5)
return
if options.get('status'):
@@ -168,14 +166,12 @@ class Command(BaseCommand):
WebsocketsMetricsServer().start()
try:
logger.info('Starting Websocket Relayer...')
websocket_relay_manager = WebSocketRelayManager()
asyncio.run(websocket_relay_manager.run())
except KeyboardInterrupt:
logger.info('Terminating Websocket Relayer')
except BaseException as e: # BaseException is used to catch all exceptions including asyncio.CancelledError
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
# sleeping before logging because logging rely on setting which require database connection...
logger.warning(f"Encounter error while running Websocket Relayer {e}, slept for 10s...")
return
while True:
try:
asyncio.run(WebSocketRelayManager().run())
except KeyboardInterrupt:
logger.info('Shutting down Websocket Relayer')
break
except Exception as e:
logger.exception('Error in Websocket Relayer, exception: {}. Restarting in 10 seconds'.format(e))
time.sleep(10)

View File

@@ -6,7 +6,7 @@ import logging
import threading
import time
import urllib.parse
from pathlib import Path, PurePosixPath
from pathlib import Path
from django.conf import settings
from django.contrib.auth import logout
@@ -58,7 +58,7 @@ class TimingMiddleware(threading.local, MiddlewareMixin):
response['X-API-Profile-File'] = self.prof.stop()
perf_logger.debug(
f'request: {request}, response_time: {response["X-API-Total-Time"]}',
extra=dict(python_objects=dict(request=request, response=response, X_API_TOTAL_TIME=response["X-API-Total-Time"])),
extra=dict(python_objects=dict(request=request, response=response, X_API_TOTAL_TIME=response["X-API-Total-Time"], x_request_id=request.get('x-request-id', 'not-set'))),
)
return response
@@ -138,36 +138,14 @@ class URLModificationMiddleware(MiddlewareMixin):
@classmethod
def _convert_named_url(cls, url_path):
default_prefix = PurePosixPath('/api/v2/')
optional_prefix = PurePosixPath(f'/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}/v2/')
url_path_original = url_path
url_path = PurePosixPath(url_path)
if set(optional_prefix.parts).issubset(set(url_path.parts)):
url_prefix = optional_prefix
elif set(default_prefix.parts).issubset(set(url_path.parts)):
url_prefix = default_prefix
else:
return url_path_original
# Remove prefix
url_path = PurePosixPath(*url_path.parts[len(url_prefix.parts) :])
try:
resource_path = PurePosixPath(url_path.parts[0])
name = url_path.parts[1]
url_suffix = PurePosixPath(*url_path.parts[2:]) # remove name and resource
except IndexError:
return url_path_original
resource = resource_path.parts[0]
url_units = url_path.split('/')
# If the identifier is an empty string, it is always invalid.
if len(url_units) < 6 or url_units[1] != 'api' or url_units[2] not in ['v2'] or not url_units[4]:
return url_path
resource = url_units[3]
if resource in settings.NAMED_URL_MAPPINGS:
pk = PurePosixPath(cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, name))
else:
return url_path_original
parts = url_prefix.parts + resource_path.parts + pk.parts + url_suffix.parts
return PurePosixPath(*parts).as_posix() + '/'
url_units[4] = cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, url_units[4])
return '/'.join(url_units)
def process_request(self, request):
old_path = request.path_info

View File

@@ -17,49 +17,49 @@ class Migration(migrations.Migration):
model_name='organization',
name='execute_role',
field=awx.main.fields.ImplicitRoleField(
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
),
),
migrations.AddField(
model_name='organization',
name='job_template_admin_role',
field=awx.main.fields.ImplicitRoleField(
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
),
),
migrations.AddField(
model_name='organization',
name='credential_admin_role',
field=awx.main.fields.ImplicitRoleField(
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
),
),
migrations.AddField(
model_name='organization',
name='inventory_admin_role',
field=awx.main.fields.ImplicitRoleField(
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
),
),
migrations.AddField(
model_name='organization',
name='project_admin_role',
field=awx.main.fields.ImplicitRoleField(
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
),
),
migrations.AddField(
model_name='organization',
name='workflow_admin_role',
field=awx.main.fields.ImplicitRoleField(
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
),
),
migrations.AddField(
model_name='organization',
name='notification_admin_role',
field=awx.main.fields.ImplicitRoleField(
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
),
),
migrations.AlterField(
@@ -67,7 +67,7 @@ class Migration(migrations.Migration):
name='admin_role',
field=awx.main.fields.ImplicitRoleField(
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=['singleton:system_administrator', 'organization.credential_admin_role'],
related_name='+',
to='main.Role',
@@ -77,7 +77,7 @@ class Migration(migrations.Migration):
model_name='inventory',
name='admin_role',
field=awx.main.fields.ImplicitRoleField(
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
),
),
migrations.AlterField(
@@ -85,7 +85,7 @@ class Migration(migrations.Migration):
name='admin_role',
field=awx.main.fields.ImplicitRoleField(
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=['organization.project_admin_role', 'singleton:system_administrator'],
related_name='+',
to='main.Role',
@@ -96,7 +96,7 @@ class Migration(migrations.Migration):
name='admin_role',
field=awx.main.fields.ImplicitRoleField(
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'],
related_name='+',
to='main.Role',
@@ -107,7 +107,7 @@ class Migration(migrations.Migration):
name='execute_role',
field=awx.main.fields.ImplicitRoleField(
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=['admin_role', 'organization.execute_role'],
related_name='+',
to='main.Role',
@@ -119,7 +119,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField(
editable=False,
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'],
related_name='+',
to='main.Role',
@@ -130,7 +130,7 @@ class Migration(migrations.Migration):
name='execute_role',
field=awx.main.fields.ImplicitRoleField(
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
related_name='+',
to='main.Role',
@@ -142,7 +142,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField(
editable=False,
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=[
'admin_role',
'execute_role',

View File

@@ -18,7 +18,7 @@ class Migration(migrations.Migration):
model_name='organization',
name='member_role',
field=awx.main.fields.ImplicitRoleField(
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.Role'
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'
),
),
migrations.AlterField(
@@ -27,7 +27,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField(
editable=False,
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=[
'member_role',
'auditor_role',

View File

@@ -36,7 +36,7 @@ class Migration(migrations.Migration):
model_name='organization',
name='approval_role',
field=awx.main.fields.ImplicitRoleField(
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
),
preserve_default='True',
),
@@ -46,7 +46,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField(
editable=False,
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=['organization.approval_role', 'admin_role'],
related_name='+',
to='main.Role',
@@ -116,7 +116,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField(
editable=False,
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=[
'member_role',
'auditor_role',
@@ -139,7 +139,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField(
editable=False,
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'],
related_name='+',
to='main.Role',

View File

@@ -80,7 +80,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField(
editable=False,
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=['organization.job_template_admin_role'],
related_name='+',
to='main.Role',
@@ -92,7 +92,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField(
editable=False,
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=['admin_role', 'organization.execute_role'],
related_name='+',
to='main.Role',
@@ -104,7 +104,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField(
editable=False,
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
related_name='+',
to='main.Role',

View File

@@ -26,7 +26,7 @@ class Migration(migrations.Migration):
model_name='organization',
name='execution_environment_admin_role',
field=awx.main.fields.ImplicitRoleField(
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
),
preserve_default='True',
),

View File

@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField(
editable=False,
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=[
'member_role',
'auditor_role',

View File

@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField(
editable=False,
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=['singleton:system_administrator'],
related_name='+',
to='main.role',
@@ -30,7 +30,7 @@ class Migration(migrations.Migration):
field=awx.main.fields.ImplicitRoleField(
editable=False,
null='True',
on_delete=django.db.models.deletion.SET_NULL,
on_delete=django.db.models.deletion.CASCADE,
parent_role=['singleton:system_auditor', 'use_role', 'admin_role'],
related_name='+',
to='main.role',
@@ -41,7 +41,7 @@ class Migration(migrations.Migration):
model_name='instancegroup',
name='use_role',
field=awx.main.fields.ImplicitRoleField(
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.role'
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.role'
),
preserve_default='True',
),

View File

@@ -1,51 +0,0 @@
# Generated by Django 4.2.6 on 2024-05-08 07:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0192_custom_roles'),
]
operations = [
migrations.AlterField(
model_name='notification',
name='notification_type',
field=models.CharField(
choices=[
('awssns', 'AWS SNS'),
('email', 'Email'),
('grafana', 'Grafana'),
('irc', 'IRC'),
('mattermost', 'Mattermost'),
('pagerduty', 'Pagerduty'),
('rocketchat', 'Rocket.Chat'),
('slack', 'Slack'),
('twilio', 'Twilio'),
('webhook', 'Webhook'),
],
max_length=32,
),
),
migrations.AlterField(
model_name='notificationtemplate',
name='notification_type',
field=models.CharField(
choices=[
('awssns', 'AWS SNS'),
('email', 'Email'),
('grafana', 'Grafana'),
('irc', 'IRC'),
('mattermost', 'Mattermost'),
('pagerduty', 'Pagerduty'),
('rocketchat', 'Rocket.Chat'),
('slack', 'Slack'),
('twilio', 'Twilio'),
('webhook', 'Webhook'),
],
max_length=32,
),
),
]

View File

@@ -1,61 +0,0 @@
# Generated by Django 4.2.10 on 2024-06-12 19:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0193_alter_notification_notification_type_and_more'),
]
operations = [
migrations.AlterField(
model_name='inventorysource',
name='source',
field=models.CharField(
choices=[
('file', 'File, Directory or Script'),
('constructed', 'Template additional groups and hostvars at runtime'),
('scm', 'Sourced from a Project'),
('ec2', 'Amazon EC2'),
('gce', 'Google Compute Engine'),
('azure_rm', 'Microsoft Azure Resource Manager'),
('vmware', 'VMware vCenter'),
('satellite6', 'Red Hat Satellite 6'),
('openstack', 'OpenStack'),
('rhv', 'Red Hat Virtualization'),
('controller', 'Red Hat Ansible Automation Platform'),
('insights', 'Red Hat Insights'),
('terraform', 'Terraform State'),
('openshift_virtualization', 'OpenShift Virtualization'),
],
default=None,
max_length=32,
),
),
migrations.AlterField(
model_name='inventoryupdate',
name='source',
field=models.CharField(
choices=[
('file', 'File, Directory or Script'),
('constructed', 'Template additional groups and hostvars at runtime'),
('scm', 'Sourced from a Project'),
('ec2', 'Amazon EC2'),
('gce', 'Google Compute Engine'),
('azure_rm', 'Microsoft Azure Resource Manager'),
('vmware', 'VMware vCenter'),
('satellite6', 'Red Hat Satellite 6'),
('openstack', 'OpenStack'),
('rhv', 'Red Hat Virtualization'),
('controller', 'Red Hat Ansible Automation Platform'),
('insights', 'Red Hat Insights'),
('terraform', 'Terraform State'),
('openshift_virtualization', 'OpenShift Virtualization'),
],
default=None,
max_length=32,
),
),
]

View File

@@ -275,12 +275,7 @@ def setup_managed_role_definitions(apps, schema_editor):
"""
Idepotent method to create or sync the managed role definitions
"""
to_create = {
'object_admin': '{cls.__name__} Admin',
'org_admin': 'Organization Admin',
'org_children': 'Organization {cls.__name__} Admin',
'special': '{cls.__name__} {action}',
}
to_create = settings.ANSIBLE_BASE_ROLE_PRECREATE
ContentType = apps.get_model('contenttypes', 'ContentType')
Permission = apps.get_model('dab_rbac', 'DABPermission')
@@ -290,7 +285,7 @@ def setup_managed_role_definitions(apps, schema_editor):
managed_role_definitions = []
org_perms = set()
for cls in permission_registry.all_registered_models:
for cls in permission_registry._registry:
ct = ContentType.objects.get_for_model(cls)
object_perms = set(Permission.objects.filter(content_type=ct))
# Special case for InstanceGroup which has an organiation field, but is not an organization child object

View File

@@ -4,12 +4,11 @@ import datetime
from datetime import timezone
import logging
from collections import defaultdict
import itertools
import time
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import models, DatabaseError, transaction
from django.db import models, DatabaseError
from django.db.models.functions import Cast
from django.utils.dateparse import parse_datetime
from django.utils.text import Truncator
@@ -606,23 +605,19 @@ class JobEvent(BasePlaybookEvent):
def _update_host_metrics(updated_hosts_list):
from awx.main.models import HostMetric # circular import
# bulk-create
current_time = now()
# FUTURE:
# - Hand-rolled implementation of itertools.batched(), introduced in Python 3.12. Replace.
# - Ability to do ORM upserts *may* have been introduced in Django 5.0.
# See the entry about `create_defaults` in https://docs.djangoproject.com/en/5.0/releases/5.0/#models.
# Hopefully this will be fully ready for batch use by 5.2 LTS.
args = [iter(updated_hosts_list)] * 500
for hosts in itertools.zip_longest(*args):
with transaction.atomic():
HostMetric.objects.bulk_create(
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in hosts if hostname is not None], ignore_conflicts=True
)
HostMetric.objects.filter(hostname__in=hosts).update(
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
)
HostMetric.objects.bulk_create(
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in updated_hosts_list], ignore_conflicts=True, batch_size=100
)
# bulk-update
batch_start, batch_size = 0, 1000
while batch_start <= len(updated_hosts_list):
batched_host_list = updated_hosts_list[batch_start : (batch_start + batch_size)]
HostMetric.objects.filter(hostname__in=batched_host_list).update(
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
)
batch_start += batch_size
@property
def job_verbosity(self):

View File

@@ -933,7 +933,6 @@ class InventorySourceOptions(BaseModel):
('controller', _('Red Hat Ansible Automation Platform')),
('insights', _('Red Hat Insights')),
('terraform', _('Terraform State')),
('openshift_virtualization', _('OpenShift Virtualization')),
]
# From the options of the Django management base command
@@ -1043,7 +1042,7 @@ class InventorySourceOptions(BaseModel):
def cloud_credential_validation(source, cred):
if not source:
return None
if cred and source not in ('custom', 'scm', 'openshift_virtualization'):
if cred and source not in ('custom', 'scm'):
# If a credential was provided, it's important that it matches
# the actual inventory source being used (Amazon requires Amazon
# credentials; Rackspace requires Rackspace credentials; etc...)
@@ -1052,14 +1051,12 @@ class InventorySourceOptions(BaseModel):
# Allow an EC2 source to omit the credential. If Tower is running on
# an EC2 instance with an IAM Role assigned, boto will use credentials
# from the instance metadata instead of those explicitly provided.
elif source in CLOUD_PROVIDERS and source not in ['ec2', 'openshift_virtualization']:
elif source in CLOUD_PROVIDERS and source != 'ec2':
return _('Credential is required for a cloud source.')
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.')
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
return _('Credentials of type insights and vault are disallowed for scm inventory sources.')
elif source == 'openshift_virtualization' and cred and cred.credential_type.kind != 'kubernetes':
return _('Credentials of type kubernetes is requred for openshift_virtualization inventory sources.')
return None
def get_cloud_credential(self):
@@ -1663,7 +1660,7 @@ class terraform(PluginFileInjector):
credential = inventory_update.get_cloud_credential()
private_data = {'credentials': {}}
gce_cred = credential.get_input('gce_credentials', default=None)
gce_cred = credential.get_input('gce_credentials')
if gce_cred:
private_data['credentials'][credential] = gce_cred
return private_data
@@ -1672,7 +1669,7 @@ class terraform(PluginFileInjector):
env = super(terraform, self).get_plugin_env(inventory_update, private_data_dir, private_data_files)
credential = inventory_update.get_cloud_credential()
cred_data = private_data_files['credentials']
if credential in cred_data:
if cred_data[credential]:
env['GOOGLE_BACKEND_CREDENTIALS'] = to_container_path(cred_data[credential], private_data_dir)
return env
@@ -1696,16 +1693,6 @@ class insights(PluginFileInjector):
use_fqcn = True
class openshift_virtualization(PluginFileInjector):
plugin_name = 'kubevirt'
base_injector = 'template'
namespace = 'kubevirt'
collection = 'core'
downstream_namespace = 'redhat'
downstream_collection = 'openshift_virtualization'
use_fqcn = True
class constructed(PluginFileInjector):
plugin_name = 'constructed'
namespace = 'ansible'

View File

@@ -31,7 +31,6 @@ from awx.main.notifications.mattermost_backend import MattermostBackend
from awx.main.notifications.grafana_backend import GrafanaBackend
from awx.main.notifications.rocketchat_backend import RocketChatBackend
from awx.main.notifications.irc_backend import IrcBackend
from awx.main.notifications.awssns_backend import AWSSNSBackend
logger = logging.getLogger('awx.main.models.notifications')
@@ -41,7 +40,6 @@ __all__ = ['NotificationTemplate', 'Notification']
class NotificationTemplate(CommonModelNameNotUnique):
NOTIFICATION_TYPES = [
('awssns', _('AWS SNS'), AWSSNSBackend),
('email', _('Email'), CustomEmailBackend),
('slack', _('Slack'), SlackBackend),
('twilio', _('Twilio'), TwilioBackend),

View File

@@ -10,9 +10,6 @@ import re
# django-rest-framework
from rest_framework.serializers import ValidationError
# crum to impersonate users
from crum import impersonate
# Django
from django.db import models, transaction, connection
from django.db.models.signals import m2m_changed
@@ -556,22 +553,17 @@ def get_role_definition(role):
return
f = obj._meta.get_field(role.role_field)
action_name = f.name.rsplit("_", 1)[0]
model_print = type(obj).__name__
rd_name = f'{model_print} {action_name.title()} Compat'
rd_name = f'{type(obj).__name__} {action_name.title()} Compat'
perm_list = get_role_codenames(role)
defaults = {
'content_type_id': role.content_type_id,
'description': f'Has {action_name.title()} permission to {model_print} for backwards API compatibility',
}
with impersonate(None):
try:
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
except ValidationError:
# This is a tricky case - practically speaking, users should not be allowed to create team roles
# or roles that include the team member permission.
# If we need to create this for compatibility purposes then we will create it as a managed non-editable role
defaults['managed'] = True
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
defaults = {'content_type_id': role.content_type_id}
try:
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
except ValidationError:
# This is a tricky case - practically speaking, users should not be allowed to create team roles
# or roles that include the team member permission.
# If we need to create this for compatibility purposes then we will create it as a managed non-editable role
defaults['managed'] = True
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
return rd

View File

@@ -17,7 +17,7 @@ from collections import OrderedDict
# Django
from django.conf import settings
from django.db import models, connection, transaction
from django.db import models, connection
from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.translation import gettext_lazy as _
from django.utils.timezone import now
@@ -273,14 +273,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
if new_next_schedule:
if new_next_schedule.pk == self.next_schedule_id and new_next_schedule.next_run == self.next_job_run:
return # no-op, common for infrequent schedules
# If in a transaction, use select_for_update to lock the next schedule row, which
# prevents a race condition if new_next_schedule is deleted elsewhere during this transaction
if transaction.get_autocommit():
self.next_schedule = related_schedules.first()
else:
self.next_schedule = related_schedules.select_for_update().first()
self.next_schedule = new_next_schedule
self.next_job_run = new_next_schedule.next_run
self.save(update_fields=['next_schedule', 'next_job_run'])
@@ -830,7 +823,7 @@ class UnifiedJob(
update_fields.append(key)
if parent_instance:
if self.status in ('pending', 'running'):
if self.status in ('pending', 'waiting', 'running'):
if parent_instance.current_job != self:
parent_instance_set('current_job', self)
# Update parent with all the 'good' states of it's child
@@ -867,7 +860,7 @@ class UnifiedJob(
# If this job already exists in the database, retrieve a copy of
# the job in its prior state.
# If update_fields are given without status, then that indicates no change
if self.status != 'waiting' and self.pk and ((not update_fields) or ('status' in update_fields)):
if self.pk and ((not update_fields) or ('status' in update_fields)):
self_before = self.__class__.objects.get(pk=self.pk)
if self_before.status != self.status:
status_before = self_before.status
@@ -909,8 +902,7 @@ class UnifiedJob(
update_fields.append('elapsed')
# Ensure that the job template information is current.
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking
if self.status != 'waiting' and self.unified_job_template != self._get_parent_instance():
if self.unified_job_template != self._get_parent_instance():
self.unified_job_template = self._get_parent_instance()
if 'unified_job_template' not in update_fields:
update_fields.append('unified_job_template')
@@ -923,9 +915,8 @@ class UnifiedJob(
# Okay; we're done. Perform the actual save.
result = super(UnifiedJob, self).save(*args, **kwargs)
# If status changed, update the parent instance
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking
if self.status != status_before and self.status != 'waiting':
# If status changed, update the parent instance.
if self.status != status_before:
# Update parent outside of the transaction for Job w/ allow_simultaneous=True
# This dodges lock contention at the expense of the foreign key not being
# completely correct.

View File

@@ -1,70 +0,0 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import json
import logging
import boto3
from botocore.exceptions import ClientError
from awx.main.notifications.base import AWXBaseEmailBackend
from awx.main.notifications.custom_notification_base import CustomNotificationBase
logger = logging.getLogger('awx.main.notifications.awssns_backend')
WEBSOCKET_TIMEOUT = 30
class AWSSNSBackend(AWXBaseEmailBackend, CustomNotificationBase):
init_parameters = {
"aws_region": {"label": "AWS Region", "type": "string", "default": ""},
"aws_access_key_id": {"label": "Access Key ID", "type": "string", "default": ""},
"aws_secret_access_key": {"label": "Secret Access Key", "type": "password", "default": ""},
"aws_session_token": {"label": "Session Token", "type": "password", "default": ""},
"sns_topic_arn": {"label": "SNS Topic ARN", "type": "string", "default": ""},
}
recipient_parameter = "sns_topic_arn"
sender_parameter = None
DEFAULT_BODY = "{{ job_metadata }}"
default_messages = CustomNotificationBase.job_metadata_messages
def __init__(self, aws_region, aws_access_key_id, aws_secret_access_key, aws_session_token, fail_silently=False, **kwargs):
session = boto3.session.Session()
client_config = {"service_name": 'sns'}
if aws_region:
client_config["region_name"] = aws_region
if aws_secret_access_key:
client_config["aws_secret_access_key"] = aws_secret_access_key
if aws_access_key_id:
client_config["aws_access_key_id"] = aws_access_key_id
if aws_session_token:
client_config["aws_session_token"] = aws_session_token
self.client = session.client(**client_config)
super(AWSSNSBackend, self).__init__(fail_silently=fail_silently)
def _sns_publish(self, topic_arn, message):
self.client.publish(TopicArn=topic_arn, Message=message, MessageAttributes={})
def format_body(self, body):
if isinstance(body, str):
try:
body = json.loads(body)
except json.JSONDecodeError:
pass
if isinstance(body, dict):
body = json.dumps(body)
# convert dict body to json string
return body
def send_messages(self, messages):
sent_messages = 0
for message in messages:
sns_topic_arn = str(message.recipients()[0])
try:
self._sns_publish(topic_arn=sns_topic_arn, message=message.body)
sent_messages += 1
except ClientError as error:
if not self.fail_silently:
raise error
return sent_messages

View File

@@ -32,15 +32,3 @@ class CustomNotificationBase(object):
"denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": None},
},
}
job_metadata_messages = {
"started": {"body": "{{ job_metadata }}"},
"success": {"body": "{{ job_metadata }}"},
"error": {"body": "{{ job_metadata }}"},
"workflow_approval": {
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
},
}

View File

@@ -27,7 +27,17 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
sender_parameter = None
DEFAULT_BODY = "{{ job_metadata }}"
default_messages = CustomNotificationBase.job_metadata_messages
default_messages = {
"started": {"body": DEFAULT_BODY},
"success": {"body": DEFAULT_BODY},
"error": {"body": DEFAULT_BODY},
"workflow_approval": {
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
},
}
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
self.http_method = http_method

View File

@@ -63,10 +63,6 @@ websocket_urlpatterns = [
re_path(r'api/websocket/$', consumers.EventConsumer.as_asgi()),
re_path(r'websocket/$', consumers.EventConsumer.as_asgi()),
]
if settings.OPTIONAL_API_URLPATTERN_PREFIX:
websocket_urlpatterns.append(re_path(r'api/{}/v2/websocket/$'.format(settings.OPTIONAL_API_URLPATTERN_PREFIX), consumers.EventConsumer.as_asgi()))
websocket_relay_urlpatterns = [
re_path(r'websocket/relay/$', consumers.RelayConsumer.as_asgi()),
]

View File

@@ -36,9 +36,6 @@ import ansible_runner.cleanup
# dateutil
from dateutil.parser import parse as parse_date
# django-ansible-base
from ansible_base.resource_registry.tasks.sync import SyncExecutor
# AWX
from awx import __version__ as awx_application_version
from awx.main.access import access_registry
@@ -967,17 +964,3 @@ def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, p
permission_check_func(creater, copy_mapping.values())
if isinstance(new_obj, Inventory):
update_inventory_computed_fields.delay(new_obj.id)
@task(queue=get_task_queuename)
def periodic_resource_sync():
if not getattr(settings, 'RESOURCE_SERVER', None):
logger.debug("Skipping periodic resource_sync, RESOURCE_SERVER not configured")
return
with advisory_lock('periodic_resource_sync', wait=False) as acquired:
if acquired is False:
logger.debug("Not running periodic_resource_sync, another task holds lock")
return
SyncExecutor().run()

View File

@@ -1,5 +0,0 @@
{
"K8S_AUTH_HOST": "https://foo.invalid",
"K8S_AUTH_API_KEY": "fooo",
"K8S_AUTH_VERIFY_SSL": "False"
}

View File

@@ -9,8 +9,8 @@ def test_user_role_view_access(rando, inventory, mocker, post):
role_pk = inventory.admin_role.pk
data = {"id": role_pk}
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
mocker.patch('awx.main.access.RoleAccess', return_value=mock_access)
post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}), data=data, user=rando, expect=403)
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}), data=data, user=rando, expect=403)
mock_access.can_attach.assert_called_once_with(inventory.admin_role, rando, 'members', data, skip_sub_obj_read_check=False)
@@ -21,8 +21,8 @@ def test_team_role_view_access(rando, team, inventory, mocker, post):
role_pk = inventory.admin_role.pk
data = {"id": role_pk}
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
mocker.patch('awx.main.access.RoleAccess', return_value=mock_access)
post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}), data=data, user=rando, expect=403)
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}), data=data, user=rando, expect=403)
mock_access.can_attach.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
@@ -33,8 +33,8 @@ def test_role_team_view_access(rando, team, inventory, mocker, post):
role_pk = inventory.admin_role.pk
data = {"id": team.pk}
mock_access = mocker.MagicMock(return_value=False, __name__='mocked')
mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access)
post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}), data=data, user=rando, expect=403)
with mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access):
post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}), data=data, user=rando, expect=403)
mock_access.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)

View File

@@ -30,7 +30,7 @@ def test_idempotent_credential_type_setup():
@pytest.mark.django_db
def test_create_user_credential_via_credentials_list(post, get, alice, credentialtype_ssh, setup_managed_roles):
def test_create_user_credential_via_credentials_list(post, get, alice, credentialtype_ssh):
params = {
'credential_type': 1,
'inputs': {'username': 'someusername'},
@@ -81,7 +81,7 @@ def test_credential_validation_error_with_multiple_owner_fields(post, admin, ali
@pytest.mark.django_db
def test_create_user_credential_via_user_credentials_list(post, get, alice, credentialtype_ssh, setup_managed_roles):
def test_create_user_credential_via_user_credentials_list(post, get, alice, credentialtype_ssh):
params = {
'credential_type': 1,
'inputs': {'username': 'someusername'},

View File

@@ -1,30 +1,22 @@
import pytest
from unittest import mock
from awx.api.versioning import reverse
from django.test.utils import override_settings
from ansible_base.jwt_consumer.common.util import generate_x_trusted_proxy_header
from ansible_base.lib.testing.fixtures import rsa_keypair_factory, rsa_keypair # noqa: F401; pylint: disable=unused-import
class HeaderTrackingMiddleware(object):
def __init__(self):
self.environ = {}
def process_request(self, request):
pass
def process_response(self, request, response):
self.environ = request.environ
@pytest.mark.django_db
def test_proxy_ip_allowed(get, patch, admin):
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
class HeaderTrackingMiddleware(object):
environ = {}
def process_request(self, request):
pass
def process_response(self, request, response):
self.environ = request.environ
# By default, `PROXY_IP_ALLOWED_LIST` is disabled, so custom `REMOTE_HOST_HEADERS`
# should just pass through
middleware = HeaderTrackingMiddleware()
@@ -53,51 +45,6 @@ def test_proxy_ip_allowed(get, patch, admin):
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
@pytest.mark.django_db
class TestTrustedProxyAllowListIntegration:
@pytest.fixture
def url(self, patch, admin):
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
patch(url, user=admin, data={'PROXY_IP_ALLOWED_LIST': ['my.proxy.example.org']})
return url
@pytest.fixture
def middleware(self):
return HeaderTrackingMiddleware()
def test_x_trusted_proxy_valid_signature(self, get, admin, rsa_keypair, url, middleware): # noqa: F811
# Headers should NOT get deleted
headers = {
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
}
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public, PROXY_IP_ALLOWED_LIST=[]):
get(url, user=admin, middleware=middleware, **headers)
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
def test_x_trusted_proxy_invalid_signature(self, get, admin, url, patch, middleware):
# Headers should NOT get deleted
headers = {
'HTTP_X_TRUSTED_PROXY': 'DEAD-BEEF',
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
}
with override_settings(PROXY_IP_ALLOWED_LIST=[]):
get(url, user=admin, middleware=middleware, **headers)
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
def test_x_trusted_proxy_invalid_signature_valid_proxy(self, get, admin, url, middleware):
# A valid explicit proxy SHOULD result in sensitive headers NOT being deleted, regardless of the trusted proxy signature results
headers = {
'HTTP_X_TRUSTED_PROXY': 'DEAD-BEEF',
'REMOTE_ADDR': 'my.proxy.example.org',
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
}
get(url, user=admin, middleware=middleware, **headers)
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
@pytest.mark.django_db
class TestDeleteViews:
def test_sublist_delete_permission_check(self, inventory_source, host, rando, delete):

View File

@@ -1,66 +0,0 @@
import pytest
from awx.api.versioning import reverse
from awx.main.models import Organization
@pytest.mark.django_db
class TestImmutableSharedFields:
@pytest.fixture(autouse=True)
def configure_settings(self, settings):
settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT = False
def test_create_raises_permission_denied(self, admin_user, post):
orgA = Organization.objects.create(name='orgA')
resp = post(
url=reverse('api:team_list'),
data={'name': 'teamA', 'organization': orgA.id},
user=admin_user,
expect=403,
)
assert "Creation of this resource is not allowed" in resp.data['detail']
def test_perform_delete_raises_permission_denied(self, admin_user, delete):
orgA = Organization.objects.create(name='orgA')
team = orgA.teams.create(name='teamA')
resp = delete(
url=reverse('api:team_detail', kwargs={'pk': team.id}),
user=admin_user,
expect=403,
)
assert "Deletion of this resource is not allowed" in resp.data['detail']
def test_perform_update(self, admin_user, patch):
orgA = Organization.objects.create(name='orgA')
team = orgA.teams.create(name='teamA')
# allow patching non-shared fields
patch(
url=reverse('api:team_detail', kwargs={'pk': team.id}),
data={"description": "can change this field"},
user=admin_user,
expect=200,
)
orgB = Organization.objects.create(name='orgB')
# prevent patching shared fields
resp = patch(url=reverse('api:team_detail', kwargs={'pk': team.id}), data={"organization": orgB.id}, user=admin_user, expect=403)
assert "Cannot change shared field" in resp.data['organization']
@pytest.mark.parametrize(
'role',
['admin_role', 'member_role'],
)
@pytest.mark.parametrize('resource', ['organization', 'team'])
def test_prevent_assigning_member_to_organization_or_team(self, admin_user, post, resource, role):
orgA = Organization.objects.create(name='orgA')
if resource == 'organization':
role = getattr(orgA, role)
elif resource == 'team':
teamA = orgA.teams.create(name='teamA')
role = getattr(teamA, role)
resp = post(
url=reverse('api:user_roles_list', kwargs={'pk': admin_user.id}),
data={'id': role.id},
user=admin_user,
expect=403,
)
assert f"Cannot directly modify user membership to {resource}." in resp.data['msg']

View File

@@ -32,6 +32,13 @@ def node_type_instance():
return fn
@pytest.fixture
def instance_group(job_factory):
ig = InstanceGroup(name="east")
ig.save()
return ig
@pytest.fixture
def containerized_instance_group(instance_group, kube_credential):
ig = InstanceGroup(name="container")

View File

@@ -131,11 +131,11 @@ def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, ad
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == ()
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == ()
# Check that job is serialized correctly
job_id = response.data['job']
@@ -167,12 +167,12 @@ def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, admi
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
assert JobTemplate.create_unified_job.called
called_with = data_to_internal(runtime_data)
JobTemplate.create_unified_job.assert_called_with(**called_with)
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
assert JobTemplate.create_unified_job.called
called_with = data_to_internal(runtime_data)
JobTemplate.create_unified_job.assert_called_with(**called_with)
job_id = response.data['job']
assert job_id == 968
@@ -187,11 +187,11 @@ def test_job_accept_empty_tags(job_template_prompts, post, admin_user, mocker):
mock_job = mocker.MagicMock(spec=Job, id=968)
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == ({'job_tags': '', 'skip_tags': ''},)
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == ({'job_tags': '', 'skip_tags': ''},)
mock_job.signal_start.assert_called_once()
@@ -203,14 +203,14 @@ def test_slice_timeout_forks_need_int(job_template_prompts, post, admin_user, mo
mock_job = mocker.MagicMock(spec=Job, id=968)
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
response = post(
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'timeout': '', 'job_slice_count': '', 'forks': ''}, admin_user, expect=400
)
assert 'forks' in response.data and response.data['forks'][0] == 'A valid integer is required.'
assert 'job_slice_count' in response.data and response.data['job_slice_count'][0] == 'A valid integer is required.'
assert 'timeout' in response.data and response.data['timeout'][0] == 'A valid integer is required.'
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
response = post(
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'timeout': '', 'job_slice_count': '', 'forks': ''}, admin_user, expect=400
)
assert 'forks' in response.data and response.data['forks'][0] == 'A valid integer is required.'
assert 'job_slice_count' in response.data and response.data['job_slice_count'][0] == 'A valid integer is required.'
assert 'timeout' in response.data and response.data['timeout'][0] == 'A valid integer is required.'
@pytest.mark.django_db
@@ -244,12 +244,12 @@ def test_job_accept_prompted_vars_null(runtime_data, job_template_prompts_null,
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, rando, expect=201)
assert JobTemplate.create_unified_job.called
expected_call = data_to_internal(runtime_data)
assert JobTemplate.create_unified_job.call_args == (expected_call,)
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, rando, expect=201)
assert JobTemplate.create_unified_job.called
expected_call = data_to_internal(runtime_data)
assert JobTemplate.create_unified_job.call_args == (expected_call,)
job_id = response.data['job']
assert job_id == 968
@@ -641,18 +641,18 @@ def test_job_launch_unprompted_vars_with_survey(mocker, survey_spec_factory, job
job_template.survey_spec = survey_spec_factory('survey_var')
job_template.save()
mocker.patch('awx.main.access.BaseAccess.check_license')
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
response = post(
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
admin_user,
expect=201,
)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == ({'extra_vars': {'survey_var': 4}},)
with mocker.patch('awx.main.access.BaseAccess.check_license'):
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
response = post(
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
admin_user,
expect=201,
)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == ({'extra_vars': {'survey_var': 4}},)
job_id = response.data['job']
assert job_id == 968
@@ -670,22 +670,22 @@ def test_callback_accept_prompted_extra_var(mocker, survey_spec_factory, job_tem
job_template.survey_spec = survey_spec_factory('survey_var')
job_template.save()
mocker.patch('awx.main.access.BaseAccess.check_license')
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job)
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host])
post(
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
admin_user,
expect=201,
format='json',
)
assert UnifiedJobTemplate.create_unified_job.called
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
call_args.pop('_eager_fields', None) # internal purposes
assert call_args == {'extra_vars': {'survey_var': 4, 'job_launch_var': 3}, 'limit': 'single-host'}
with mocker.patch('awx.main.access.BaseAccess.check_license'):
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
with mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]):
post(
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
admin_user,
expect=201,
format='json',
)
assert UnifiedJobTemplate.create_unified_job.called
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
call_args.pop('_eager_fields', None) # internal purposes
assert call_args == {'extra_vars': {'survey_var': 4, 'job_launch_var': 3}, 'limit': 'single-host'}
mock_job.signal_start.assert_called_once()
@@ -697,22 +697,22 @@ def test_callback_ignore_unprompted_extra_var(mocker, survey_spec_factory, job_t
job_template.host_config_key = "foo"
job_template.save()
mocker.patch('awx.main.access.BaseAccess.check_license')
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job)
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host])
post(
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
admin_user,
expect=201,
format='json',
)
assert UnifiedJobTemplate.create_unified_job.called
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
call_args.pop('_eager_fields', None) # internal purposes
assert call_args == {'limit': 'single-host'}
with mocker.patch('awx.main.access.BaseAccess.check_license'):
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
with mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]):
post(
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
admin_user,
expect=201,
format='json',
)
assert UnifiedJobTemplate.create_unified_job.called
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
call_args.pop('_eager_fields', None) # internal purposes
assert call_args == {'limit': 'single-host'}
mock_job.signal_start.assert_called_once()
@@ -725,9 +725,9 @@ def test_callback_find_matching_hosts(mocker, get, job_template_prompts, admin_u
job_template.save()
host_with_alias = Host(name='localhost', inventory=job_template.inventory)
host_with_alias.save()
mocker.patch('awx.main.access.BaseAccess.check_license')
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
assert tuple(r.data['matching_hosts']) == ('localhost',)
with mocker.patch('awx.main.access.BaseAccess.check_license'):
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
assert tuple(r.data['matching_hosts']) == ('localhost',)
@pytest.mark.django_db
@@ -738,6 +738,6 @@ def test_callback_extra_var_takes_priority_over_host_name(mocker, get, job_templ
job_template.save()
host_with_alias = Host(name='localhost', variables={'ansible_host': 'foobar'}, inventory=job_template.inventory)
host_with_alias.save()
mocker.patch('awx.main.access.BaseAccess.check_license')
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
assert not r.data['matching_hosts']
with mocker.patch('awx.main.access.BaseAccess.check_license'):
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
assert not r.data['matching_hosts']

View File

@@ -1,5 +1,4 @@
import pytest
from unittest import mock
# AWX
from awx.api.serializers import JobTemplateSerializer
@@ -9,15 +8,10 @@ from awx.main.migrations import _save_password_keys as save_password_keys
# Django
from django.apps import apps
from django.test.utils import override_settings
# DRF
from rest_framework.exceptions import ValidationError
# DAB
from ansible_base.jwt_consumer.common.util import generate_x_trusted_proxy_header
from ansible_base.lib.testing.fixtures import rsa_keypair_factory, rsa_keypair # noqa: F401; pylint: disable=unused-import
@pytest.mark.django_db
@pytest.mark.parametrize(
@@ -375,113 +369,3 @@ def test_job_template_missing_inventory(project, inventory, admin_user, post):
)
assert r.status_code == 400
assert "Cannot start automatically, an inventory is required." in str(r.data)
@pytest.mark.django_db
class TestJobTemplateCallbackProxyIntegration:
"""
Test the interaction of provision job template callback feature and:
settings.PROXY_IP_ALLOWED_LIST
x-trusted-proxy http header
"""
@pytest.fixture
def job_template(self, inventory, project):
jt = JobTemplate.objects.create(name='test-jt', inventory=inventory, project=project, playbook='helloworld.yml', host_config_key='abcd')
return jt
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
def test_host_not_found(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
job_template.inventory.hosts.create(name='foobar')
headers = {
'HTTP_X_FROM_THE_LOAD_BALANCER': 'baz',
'REMOTE_HOST': 'baz',
'REMOTE_ADDR': 'baz',
}
r = post(
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), data={'host_config_key': 'abcd'}, user=admin_user, expect=400, **headers
)
assert r.data['msg'] == 'No matching host could be found!'
@pytest.mark.parametrize(
'headers, expected',
(
pytest.param(
{
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
'REMOTE_HOST': 'my.proxy.example.org',
},
201,
),
pytest.param(
{
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
'REMOTE_HOST': 'not-my-proxy.org',
},
400,
),
),
)
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
def test_proxy_ip_allowed_list(self, job_template, admin_user, post, headers, expected): # noqa: F811
job_template.inventory.hosts.create(name='my.proxy.example.org')
post(
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
data={'host_config_key': 'abcd'},
user=admin_user,
expect=expected,
**headers
)
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=[])
def test_no_proxy_trust_all_headers(self, job_template, admin_user, post):
job_template.inventory.hosts.create(name='foobar')
headers = {
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
'REMOTE_ADDR': 'bar',
'REMOTE_HOST': 'baz',
}
post(url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), data={'host_config_key': 'abcd'}, user=admin_user, expect=201, **headers)
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
def test_trusted_proxy(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
job_template.inventory.hosts.create(name='foobar')
headers = {
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar, my.proxy.example.org',
}
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public):
post(
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
data={'host_config_key': 'abcd'},
user=admin_user,
expect=201,
**headers
)
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
def test_trusted_proxy_host_not_found(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
job_template.inventory.hosts.create(name='foobar')
headers = {
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
'HTTP_X_FROM_THE_LOAD_BALANCER': 'baz, my.proxy.example.org',
'REMOTE_ADDR': 'bar',
'REMOTE_HOST': 'baz',
}
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public):
post(
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
data={'host_config_key': 'abcd'},
user=admin_user,
expect=400,
**headers
)

View File

@@ -165,8 +165,8 @@ class TestAccessListCapabilities:
def test_access_list_direct_access_capability(self, inventory, rando, get, mocker, mock_access_method):
inventory.admin_role.members.add(rando)
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), rando)
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), rando)
mock_access_method.assert_called_once_with(inventory.admin_role, rando, 'members', **self.extra_kwargs)
self._assert_one_in_list(response.data)
@@ -174,8 +174,8 @@ class TestAccessListCapabilities:
assert direct_access_list[0]['role']['user_capabilities']['unattach'] == 'foobar'
def test_access_list_indirect_access_capability(self, inventory, organization, org_admin, get, mocker, mock_access_method):
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), org_admin)
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), org_admin)
mock_access_method.assert_called_once_with(organization.admin_role, org_admin, 'members', **self.extra_kwargs)
self._assert_one_in_list(response.data, sublist='indirect_access')
@@ -185,8 +185,8 @@ class TestAccessListCapabilities:
def test_access_list_team_direct_access_capability(self, inventory, team, team_member, get, mocker, mock_access_method):
team.member_role.children.add(inventory.admin_role)
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), team_member)
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), team_member)
mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', **self.extra_kwargs)
self._assert_one_in_list(response.data)
@@ -198,8 +198,8 @@ class TestAccessListCapabilities:
def test_team_roles_unattach(mocker, team, team_member, inventory, mock_access_method, get):
team.member_role.children.add(inventory.admin_role)
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
response = get(reverse('api:team_roles_list', kwargs={'pk': team.id}), team_member)
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
response = get(reverse('api:team_roles_list', kwargs={'pk': team.id}), team_member)
# Did we assess whether team_member can remove team's permission to the inventory?
mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', skip_sub_obj_read_check=True, data={})
@@ -212,8 +212,8 @@ def test_user_roles_unattach(mocker, organization, alice, bob, mock_access_metho
organization.member_role.members.add(alice)
organization.member_role.members.add(bob)
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
response = get(reverse('api:user_roles_list', kwargs={'pk': alice.id}), bob)
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
response = get(reverse('api:user_roles_list', kwargs={'pk': alice.id}), bob)
# Did we assess whether bob can remove alice's permission to the inventory?
mock_access_method.assert_called_once_with(organization.member_role, alice, 'members', skip_sub_obj_read_check=True, data={})

View File

@@ -43,9 +43,9 @@ def run_command(name, *args, **options):
],
)
def test_update_password_command(mocker, username, password, expected, changed):
mocker.patch.object(UpdatePassword, 'update_password', return_value=changed)
result, stdout, stderr = run_command('update_password', username=username, password=password)
if result is None:
assert stdout == expected
else:
assert str(result) == expected
with mocker.patch.object(UpdatePassword, 'update_password', return_value=changed):
result, stdout, stderr = run_command('update_password', username=username, password=password)
if result is None:
assert stdout == expected
else:
assert str(result) == expected

View File

@@ -16,11 +16,9 @@ from django.db.backends.sqlite3.base import SQLiteCursorWrapper
from django.db.models.signals import post_migrate
from awx.main.migrations._dab_rbac import setup_managed_role_definitions
# AWX
from awx.main.models.projects import Project
from awx.main.models.ha import Instance, InstanceGroup
from awx.main.models.ha import Instance
from rest_framework.test import (
APIRequestFactory,
@@ -92,12 +90,6 @@ def deploy_jobtemplate(project, inventory, credential):
return jt
@pytest.fixture
def setup_managed_roles():
"Run the migration script to pre-create managed role definitions"
setup_managed_role_definitions(apps, None)
@pytest.fixture
def team(organization):
return organization.teams.create(name='test-team')
@@ -730,11 +722,6 @@ def jt_linked(organization, project, inventory, machine_credential, credential,
return jt
@pytest.fixture
def instance_group():
return InstanceGroup.objects.create(name="east")
@pytest.fixture
def workflow_job_template(organization):
wjt = WorkflowJobTemplate.objects.create(name='test-workflow_job_template', organization=organization)

View File

@@ -0,0 +1,10 @@
import pytest
from django.apps import apps
from awx.main.migrations._dab_rbac import setup_managed_role_definitions
@pytest.fixture
def managed_roles():
"Run the migration script to pre-create managed role definitions"
setup_managed_role_definitions(apps, None)

View File

@@ -1,23 +0,0 @@
import pytest
from awx.main.access import InstanceGroupAccess, NotificationTemplateAccess
from ansible_base.rbac.models import RoleDefinition
@pytest.mark.django_db
def test_instance_group_object_role_delete(rando, instance_group, setup_managed_roles):
"""Basic functionality of IG object-level admin role function AAP-25506"""
rd = RoleDefinition.objects.get(name='InstanceGroup Admin')
rd.give_permission(rando, instance_group)
access = InstanceGroupAccess(rando)
assert access.can_delete(instance_group)
@pytest.mark.django_db
def test_notification_template_object_role_change(rando, notification_template, setup_managed_roles):
"""Basic functionality of NT object-level admin role function AAP-25493"""
rd = RoleDefinition.objects.get(name='NotificationTemplate Admin')
rd.give_permission(rando, notification_template)
access = NotificationTemplateAccess(rando)
assert access.can_change(notification_template, {'name': 'new name'})

View File

@@ -0,0 +1,45 @@
import pytest
from django.apps import apps
from django.test.utils import override_settings
from awx.main.migrations._dab_rbac import setup_managed_role_definitions
from ansible_base.rbac.models import RoleDefinition
INVENTORY_OBJ_PERMISSIONS = ['view_inventory', 'adhoc_inventory', 'use_inventory', 'change_inventory', 'delete_inventory', 'update_inventory']
@pytest.mark.django_db
def test_managed_definitions_precreate():
with override_settings(
ANSIBLE_BASE_ROLE_PRECREATE={
'object_admin': '{cls._meta.model_name}-admin',
'org_admin': 'organization-admin',
'org_children': 'organization-{cls._meta.model_name}-admin',
'special': '{cls._meta.model_name}-{action}',
}
):
setup_managed_role_definitions(apps, None)
rd = RoleDefinition.objects.get(name='inventory-admin')
assert rd.managed is True
# add permissions do not go in the object-level admin
assert set(rd.permissions.values_list('codename', flat=True)) == set(INVENTORY_OBJ_PERMISSIONS)
# test org-level object admin permissions
rd = RoleDefinition.objects.get(name='organization-inventory-admin')
assert rd.managed is True
assert set(rd.permissions.values_list('codename', flat=True)) == set(['add_inventory', 'view_organization'] + INVENTORY_OBJ_PERMISSIONS)
@pytest.mark.django_db
def test_managed_definitions_custom_obj_admin_name():
with override_settings(
ANSIBLE_BASE_ROLE_PRECREATE={
'object_admin': 'foo-{cls._meta.model_name}-foo',
}
):
setup_managed_role_definitions(apps, None)
rd = RoleDefinition.objects.get(name='foo-inventory-foo')
assert rd.managed is True
# add permissions do not go in the object-level admin
assert set(rd.permissions.values_list('codename', flat=True)) == set(INVENTORY_OBJ_PERMISSIONS)

View File

@@ -10,7 +10,7 @@ from ansible_base.rbac.models import RoleDefinition
@pytest.mark.django_db
def test_managed_roles_created(setup_managed_roles):
def test_managed_roles_created(managed_roles):
"Managed RoleDefinitions are created in post_migration signal, we expect to see them here"
for cls in (JobTemplate, Inventory):
ct = ContentType.objects.get_for_model(cls)
@@ -22,7 +22,7 @@ def test_managed_roles_created(setup_managed_roles):
@pytest.mark.django_db
def test_custom_read_role(admin_user, post, setup_managed_roles):
def test_custom_read_role(admin_user, post, managed_roles):
rd_url = django_reverse('roledefinition-list')
resp = post(
url=rd_url, data={"name": "read role made for test", "content_type": "awx.inventory", "permissions": ['view_inventory']}, user=admin_user, expect=201
@@ -40,7 +40,7 @@ def test_custom_system_roles_prohibited(admin_user, post):
@pytest.mark.django_db
def test_assignment_to_invisible_user(admin_user, alice, rando, inventory, post, setup_managed_roles):
def test_assignment_to_invisible_user(admin_user, alice, rando, inventory, post, managed_roles):
"Alice can not see rando, and so can not give them a role assignment"
rd = RoleDefinition.objects.get(name='Inventory Admin')
rd.give_permission(alice, inventory)
@@ -51,7 +51,7 @@ def test_assignment_to_invisible_user(admin_user, alice, rando, inventory, post,
@pytest.mark.django_db
def test_assign_managed_role(admin_user, alice, rando, inventory, post, setup_managed_roles, organization):
def test_assign_managed_role(admin_user, alice, rando, inventory, post, managed_roles, organization):
rd = RoleDefinition.objects.get(name='Inventory Admin')
rd.give_permission(alice, inventory)
# When alice and rando are members of the same org, they can see each other
@@ -78,7 +78,7 @@ def test_assign_custom_delete_role(admin_user, rando, inventory, delete, patch):
@pytest.mark.django_db
def test_assign_custom_add_role(admin_user, rando, organization, post, setup_managed_roles):
def test_assign_custom_add_role(admin_user, rando, organization, post, managed_roles):
rd, _ = RoleDefinition.objects.get_or_create(
name='inventory-add', permissions=['add_inventory', 'view_organization'], content_type=ContentType.objects.get_for_model(Organization)
)

View File

@@ -2,15 +2,11 @@ from unittest import mock
import pytest
from django.contrib.contenttypes.models import ContentType
from crum import impersonate
from awx.main.models.rbac import get_role_from_object_role, give_creator_permissions
from awx.main.models import User, Organization, WorkflowJobTemplate, WorkflowJobTemplateNode, Team
from awx.api.versioning import reverse
from ansible_base.rbac.models import RoleUserAssignment, RoleDefinition
from ansible_base.rbac.models import RoleUserAssignment
@pytest.mark.django_db
@@ -18,7 +14,7 @@ from ansible_base.rbac.models import RoleUserAssignment, RoleDefinition
'role_name',
['execution_environment_admin_role', 'project_admin_role', 'admin_role', 'auditor_role', 'read_role', 'execute_role', 'notification_admin_role'],
)
def test_round_trip_roles(organization, rando, role_name, setup_managed_roles):
def test_round_trip_roles(organization, rando, role_name, managed_roles):
"""
Make an assignment with the old-style role,
get the equivelent new role
@@ -32,39 +28,7 @@ def test_round_trip_roles(organization, rando, role_name, setup_managed_roles):
@pytest.mark.django_db
def test_role_naming(setup_managed_roles):
qs = RoleDefinition.objects.filter(content_type=ContentType.objects.get(model='jobtemplate'), name__endswith='dmin')
assert qs.count() == 1 # sanity
rd = qs.first()
assert rd.name == 'JobTemplate Admin'
assert rd.description
assert rd.created_by is None
@pytest.mark.django_db
def test_action_role_naming(setup_managed_roles):
qs = RoleDefinition.objects.filter(content_type=ContentType.objects.get(model='jobtemplate'), name__endswith='ecute')
assert qs.count() == 1 # sanity
rd = qs.first()
assert rd.name == 'JobTemplate Execute'
assert rd.description
assert rd.created_by is None
@pytest.mark.django_db
def test_compat_role_naming(setup_managed_roles, job_template, rando, alice):
with impersonate(alice):
job_template.read_role.members.add(rando)
qs = RoleDefinition.objects.filter(content_type=ContentType.objects.get(model='jobtemplate'), name__endswith='ompat')
assert qs.count() == 1 # sanity
rd = qs.first()
assert rd.name == 'JobTemplate Read Compat'
assert rd.description
assert rd.created_by is None
@pytest.mark.django_db
def test_organization_level_permissions(organization, inventory, setup_managed_roles):
def test_organization_level_permissions(organization, inventory, managed_roles):
u1 = User.objects.create(username='alice')
u2 = User.objects.create(username='bob')
@@ -94,14 +58,14 @@ def test_organization_level_permissions(organization, inventory, setup_managed_r
@pytest.mark.django_db
def test_organization_execute_role(organization, rando, setup_managed_roles):
def test_organization_execute_role(organization, rando, managed_roles):
organization.execute_role.members.add(rando)
assert rando in organization.execute_role
assert set(Organization.accessible_objects(rando, 'execute_role')) == set([organization])
@pytest.mark.django_db
def test_workflow_approval_list(get, post, admin_user, setup_managed_roles):
def test_workflow_approval_list(get, post, admin_user, managed_roles):
workflow_job_template = WorkflowJobTemplate.objects.create()
approval_node = WorkflowJobTemplateNode.objects.create(workflow_job_template=workflow_job_template)
url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': approval_node.pk, 'version': 'v2'})
@@ -115,14 +79,14 @@ def test_workflow_approval_list(get, post, admin_user, setup_managed_roles):
@pytest.mark.django_db
def test_creator_permission(rando, admin_user, inventory, setup_managed_roles):
def test_creator_permission(rando, admin_user, inventory, managed_roles):
give_creator_permissions(rando, inventory)
assert rando in inventory.admin_role
assert rando in inventory.admin_role.members.all()
@pytest.mark.django_db
def test_team_team_read_role(rando, team, admin_user, post, setup_managed_roles):
def test_team_team_read_role(rando, team, admin_user, post, managed_roles):
orgs = [Organization.objects.create(name=f'foo-{i}') for i in range(2)]
teams = [Team.objects.create(name=f'foo-{i}', organization=orgs[i]) for i in range(2)]
teams[1].member_role.members.add(rando)

View File

@@ -21,13 +21,13 @@ class TestComputedFields:
def test_computed_fields_normal_use(self, mocker, inventory):
job = Job.objects.create(name='fake-job', inventory=inventory)
with immediate_on_commit():
mocker.patch.object(update_inventory_computed_fields, 'delay')
job.delete()
update_inventory_computed_fields.delay.assert_called_once_with(inventory.id)
with mocker.patch.object(update_inventory_computed_fields, 'delay'):
job.delete()
update_inventory_computed_fields.delay.assert_called_once_with(inventory.id)
def test_disable_computed_fields(self, mocker, inventory):
job = Job.objects.create(name='fake-job', inventory=inventory)
with disable_computed_fields():
mocker.patch.object(update_inventory_computed_fields, 'delay')
job.delete()
update_inventory_computed_fields.delay.assert_not_called()
with mocker.patch.object(update_inventory_computed_fields, 'delay'):
job.delete()
update_inventory_computed_fields.delay.assert_not_called()

View File

@@ -21,13 +21,13 @@ def test_multi_group_basic_job_launch(instance_factory, controlplane_instance_gr
j2 = create_job(objects2.job_template)
with mock.patch('awx.main.models.Job.task_impact', new_callable=mock.PropertyMock) as mock_task_impact:
mock_task_impact.return_value = 500
mocker.patch("awx.main.scheduler.TaskManager.start_task")
TaskManager().schedule()
TaskManager.start_task.assert_has_calls([mock.call(j1, ig1, i1), mock.call(j2, ig2, i2)])
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
TaskManager().schedule()
TaskManager.start_task.assert_has_calls([mock.call(j1, ig1, i1), mock.call(j2, ig2, i2)])
@pytest.mark.django_db
def test_multi_group_with_shared_dependency(instance_factory, controlplane_instance_group, instance_group_factory, job_template_factory):
def test_multi_group_with_shared_dependency(instance_factory, controlplane_instance_group, mocker, instance_group_factory, job_template_factory):
i1 = instance_factory("i1")
i2 = instance_factory("i2")
ig1 = instance_group_factory("ig1", instances=[i1])
@@ -50,7 +50,7 @@ def test_multi_group_with_shared_dependency(instance_factory, controlplane_insta
objects2 = job_template_factory('jt2', organization=objects1.organization, project=p, inventory='inv2', credential='cred2')
objects2.job_template.instance_groups.add(ig2)
j2 = create_job(objects2.job_template, dependencies_processed=False)
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
DependencyManager().schedule()
TaskManager().schedule()
pu = p.project_updates.first()
@@ -73,10 +73,10 @@ def test_workflow_job_no_instancegroup(workflow_job_template_factory, controlpla
wfj = wfjt.create_unified_job()
wfj.status = "pending"
wfj.save()
mocker.patch("awx.main.scheduler.TaskManager.start_task")
TaskManager().schedule()
TaskManager.start_task.assert_called_once_with(wfj, None, None)
assert wfj.instance_group is None
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
TaskManager().schedule()
TaskManager.start_task.assert_called_once_with(wfj, None, None)
assert wfj.instance_group is None
@pytest.mark.django_db

View File

@@ -16,9 +16,9 @@ def test_single_job_scheduler_launch(hybrid_instance, controlplane_instance_grou
instance = controlplane_instance_group.instances.all()[0]
objects = job_template_factory('jt', organization='org1', project='proj', inventory='inv', credential='cred')
j = create_job(objects.job_template)
mocker.patch("awx.main.scheduler.TaskManager.start_task")
TaskManager().schedule()
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, instance)
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
TaskManager().schedule()
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, instance)
@pytest.mark.django_db

View File

@@ -46,8 +46,6 @@ def generate_fake_var(element):
def credential_kind(source):
"""Given the inventory source kind, return expected credential kind"""
if source == 'openshift_virtualization':
return 'kubernetes_bearer_token'
return source.replace('ec2', 'aws')

View File

@@ -1,6 +1,8 @@
# -*- coding: utf-8 -*-
import pytest
from django.conf import settings
from awx.api.versioning import reverse
from awx.main.middleware import URLModificationMiddleware
from awx.main.models import ( # noqa
@@ -119,7 +121,7 @@ def test_notification_template(get, admin_user):
@pytest.mark.django_db
def test_instance(get, admin_user, settings):
def test_instance(get, admin_user):
test_instance = Instance.objects.create(uuid=settings.SYSTEM_UUID, hostname="localhost", capacity=100)
url = reverse('api:instance_detail', kwargs={'pk': test_instance.pk})
response = get(url, user=admin_user, expect=200)
@@ -203,65 +205,3 @@ def test_403_vs_404(get):
get(f'/api/v2/users/{cindy.pk}/', expect=401)
get('/api/v2/users/cindy/', expect=404)
@pytest.mark.django_db
class TestConvertNamedUrl:
@pytest.mark.parametrize(
"url",
(
"/api/",
"/api/v2/",
"/api/v2/hosts/",
"/api/v2/hosts/1/",
"/api/v2/organizations/1/inventories/",
"/api/foo/",
"/api/foo/v2/",
"/api/foo/v2/organizations/",
"/api/foo/v2/organizations/1/",
"/api/foo/v2/organizations/1/inventories/",
"/api/foobar/",
"/api/foobar/v2/",
"/api/foobar/v2/organizations/",
"/api/foobar/v2/organizations/1/",
"/api/foobar/v2/organizations/1/inventories/",
"/api/foobar/v2/organizations/1/inventories/",
),
)
def test_noop(self, url, settings):
settings.OPTIONAL_API_URLPATTERN_PREFIX = ''
assert URLModificationMiddleware._convert_named_url(url) == url
settings.OPTIONAL_API_URLPATTERN_PREFIX = 'foo'
assert URLModificationMiddleware._convert_named_url(url) == url
def test_named_org(self):
test_org = Organization.objects.create(name='test_org')
assert URLModificationMiddleware._convert_named_url('/api/v2/organizations/test_org/') == f'/api/v2/organizations/{test_org.pk}/'
def test_named_org_optional_api_urlpattern_prefix_interaction(self, settings):
settings.OPTIONAL_API_URLPATTERN_PREFIX = 'bar'
test_org = Organization.objects.create(name='test_org')
assert URLModificationMiddleware._convert_named_url('/api/bar/v2/organizations/test_org/') == f'/api/bar/v2/organizations/{test_org.pk}/'
@pytest.mark.parametrize("prefix", ['', 'bar'])
def test_named_org_not_found(self, prefix, settings):
settings.OPTIONAL_API_URLPATTERN_PREFIX = prefix
if prefix:
prefix += '/'
assert URLModificationMiddleware._convert_named_url(f'/api/{prefix}v2/organizations/does-not-exist/') == f'/api/{prefix}v2/organizations/0/'
@pytest.mark.parametrize("prefix", ['', 'bar'])
def test_named_sub_resource(self, prefix, settings):
settings.OPTIONAL_API_URLPATTERN_PREFIX = prefix
test_org = Organization.objects.create(name='test_org')
if prefix:
prefix += '/'
assert (
URLModificationMiddleware._convert_named_url(f'/api/{prefix}v2/organizations/test_org/inventories/')
== f'/api/{prefix}v2/organizations/{test_org.pk}/inventories/'
)

View File

@@ -187,7 +187,7 @@ def test_remove_role_from_user(role, post, admin):
@pytest.mark.django_db
@override_settings(ANSIBLE_BASE_ALLOW_TEAM_ORG_ADMIN=True, ANSIBLE_BASE_ALLOW_TEAM_ORG_MEMBER=True)
@override_settings(ANSIBLE_BASE_ALLOW_TEAM_ORG_ADMIN=True)
def test_get_teams_roles_list(get, team, organization, admin):
team.member_role.children.add(organization.admin_role)
url = reverse('api:team_roles_list', kwargs={'pk': team.id})

View File

@@ -165,7 +165,7 @@ class TestOrphanJobTemplate:
@pytest.mark.django_db
@pytest.mark.job_permissions
def test_job_template_creator_access(project, organization, rando, post, setup_managed_roles):
def test_job_template_creator_access(project, organization, rando, post):
project.use_role.members.add(rando)
response = post(
url=reverse('api:job_template_list'),

View File

@@ -99,9 +99,7 @@ def test_notification_template_access_org_user(notification_template, user):
@pytest.mark.django_db
def test_notificaiton_template_orphan_access_org_admin(notification_template, organization, org_admin):
notification_template.organization = None
notification_template.save(update_fields=['organization'])
access = NotificationTemplateAccess(org_admin)
assert not org_admin.has_obj_perm(notification_template, 'change')
assert not access.can_change(notification_template, {'organization': organization.id})

View File

@@ -76,15 +76,15 @@ class TestJobTemplateSerializerGetRelated:
class TestJobTemplateSerializerGetSummaryFields:
def test_survey_spec_exists(self, test_get_summary_fields, mocker, job_template):
job_template.survey_spec = {'name': 'blah', 'description': 'blah blah'}
mock_rj = mocker.patch.object(JobTemplateSerializer, '_recent_jobs')
mock_rj.return_value = []
test_get_summary_fields(JobTemplateSerializer, job_template, 'survey')
with mocker.patch.object(JobTemplateSerializer, '_recent_jobs') as mock_rj:
mock_rj.return_value = []
test_get_summary_fields(JobTemplateSerializer, job_template, 'survey')
def test_survey_spec_absent(self, get_summary_fields_mock_and_run, mocker, job_template):
job_template.survey_spec = None
mock_rj = mocker.patch.object(JobTemplateSerializer, '_recent_jobs')
mock_rj.return_value = []
summary = get_summary_fields_mock_and_run(JobTemplateSerializer, job_template)
with mocker.patch.object(JobTemplateSerializer, '_recent_jobs') as mock_rj:
mock_rj.return_value = []
summary = get_summary_fields_mock_and_run(JobTemplateSerializer, job_template)
assert 'survey' not in summary
def test_copy_edit_standard(self, mocker, job_template_factory):
@@ -107,10 +107,10 @@ class TestJobTemplateSerializerGetSummaryFields:
view.kwargs = {}
serializer.context['view'] = view
mocker.patch("awx.api.serializers.role_summary_fields_generator", return_value='Can eat pie')
mocker.patch("awx.main.access.JobTemplateAccess.can_change", return_value='foobar')
mocker.patch("awx.main.access.JobTemplateAccess.can_copy", return_value='foo')
response = serializer.get_summary_fields(jt_obj)
with mocker.patch("awx.api.serializers.role_summary_fields_generator", return_value='Can eat pie'):
with mocker.patch("awx.main.access.JobTemplateAccess.can_change", return_value='foobar'):
with mocker.patch("awx.main.access.JobTemplateAccess.can_copy", return_value='foo'):
response = serializer.get_summary_fields(jt_obj)
assert response['user_capabilities']['copy'] == 'foo'
assert response['user_capabilities']['edit'] == 'foobar'

View File

@@ -189,8 +189,8 @@ class TestWorkflowJobTemplateNodeSerializerSurveyPasswords:
serializer = WorkflowJobTemplateNodeSerializer()
wfjt = WorkflowJobTemplate.objects.create(name='fake-wfjt')
serializer.instance = WorkflowJobTemplateNode(workflow_job_template=wfjt, unified_job_template=jt, extra_data={'var1': '$encrypted$foooooo'})
mocker.patch('awx.main.models.mixins.decrypt_value', return_value='foo')
attrs = serializer.validate({'unified_job_template': jt, 'workflow_job_template': wfjt, 'extra_data': {'var1': '$encrypted$'}})
with mocker.patch('awx.main.models.mixins.decrypt_value', return_value='foo'):
attrs = serializer.validate({'unified_job_template': jt, 'workflow_job_template': wfjt, 'extra_data': {'var1': '$encrypted$'}})
assert 'survey_passwords' in attrs
assert 'var1' in attrs['survey_passwords']
assert attrs['extra_data']['var1'] == '$encrypted$foooooo'

View File

@@ -191,16 +191,16 @@ class TestResourceAccessList:
def test_parent_access_check_failed(self, mocker, mock_organization):
mock_access = mocker.MagicMock(__name__='for logger', return_value=False)
mocker.patch('awx.main.access.BaseAccess.can_read', mock_access)
with pytest.raises(PermissionDenied):
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
mock_access.assert_called_once_with(mock_organization)
with mocker.patch('awx.main.access.BaseAccess.can_read', mock_access):
with pytest.raises(PermissionDenied):
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
mock_access.assert_called_once_with(mock_organization)
def test_parent_access_check_worked(self, mocker, mock_organization):
mock_access = mocker.MagicMock(__name__='for logger', return_value=True)
mocker.patch('awx.main.access.BaseAccess.can_read', mock_access)
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
mock_access.assert_called_once_with(mock_organization)
with mocker.patch('awx.main.access.BaseAccess.can_read', mock_access):
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
mock_access.assert_called_once_with(mock_organization)
def test_related_search_reverse_FK_field():

View File

@@ -66,7 +66,7 @@ class TestJobTemplateLabelList:
mock_request = mock.MagicMock()
super(JobTemplateLabelList, view).unattach(mock_request, None, None)
mixin_unattach.assert_called_with(mock_request, None, None)
assert mixin_unattach.called_with(mock_request, None, None)
class TestInventoryInventorySourcesUpdate:
@@ -108,16 +108,15 @@ class TestInventoryInventorySourcesUpdate:
mock_request = mocker.MagicMock()
mock_request.user.can_access.return_value = can_access
mocker.patch.object(InventoryInventorySourcesUpdate, 'get_object', return_value=obj)
mocker.patch.object(InventoryInventorySourcesUpdate, 'get_serializer_context', return_value=None)
serializer_class = mocker.patch('awx.api.serializers.InventoryUpdateDetailSerializer')
with mocker.patch.object(InventoryInventorySourcesUpdate, 'get_object', return_value=obj):
with mocker.patch.object(InventoryInventorySourcesUpdate, 'get_serializer_context', return_value=None):
with mocker.patch('awx.api.serializers.InventoryUpdateDetailSerializer') as serializer_class:
serializer = serializer_class.return_value
serializer.to_representation.return_value = {}
serializer = serializer_class.return_value
serializer.to_representation.return_value = {}
view = InventoryInventorySourcesUpdate()
response = view.post(mock_request)
assert response.data == expected
view = InventoryInventorySourcesUpdate()
response = view.post(mock_request)
assert response.data == expected
class TestSurveySpecValidation:

View File

@@ -155,35 +155,35 @@ def test_node_getter_and_setters():
class TestWorkflowJobCreate:
def test_create_no_prompts(self, wfjt_node_no_prompts, workflow_job_unit, mocker):
mock_create = mocker.MagicMock()
mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create)
wfjt_node_no_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
mock_create.assert_called_once_with(
all_parents_must_converge=False,
extra_data={},
survey_passwords={},
char_prompts=wfjt_node_no_prompts.char_prompts,
inventory=None,
unified_job_template=wfjt_node_no_prompts.unified_job_template,
workflow_job=workflow_job_unit,
identifier=mocker.ANY,
execution_environment=None,
)
with mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create):
wfjt_node_no_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
mock_create.assert_called_once_with(
all_parents_must_converge=False,
extra_data={},
survey_passwords={},
char_prompts=wfjt_node_no_prompts.char_prompts,
inventory=None,
unified_job_template=wfjt_node_no_prompts.unified_job_template,
workflow_job=workflow_job_unit,
identifier=mocker.ANY,
execution_environment=None,
)
def test_create_with_prompts(self, wfjt_node_with_prompts, workflow_job_unit, credential, mocker):
mock_create = mocker.MagicMock()
mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create)
wfjt_node_with_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
mock_create.assert_called_once_with(
all_parents_must_converge=False,
extra_data={},
survey_passwords={},
char_prompts=wfjt_node_with_prompts.char_prompts,
inventory=wfjt_node_with_prompts.inventory,
unified_job_template=wfjt_node_with_prompts.unified_job_template,
workflow_job=workflow_job_unit,
identifier=mocker.ANY,
execution_environment=None,
)
with mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create):
wfjt_node_with_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
mock_create.assert_called_once_with(
all_parents_must_converge=False,
extra_data={},
survey_passwords={},
char_prompts=wfjt_node_with_prompts.char_prompts,
inventory=wfjt_node_with_prompts.inventory,
unified_job_template=wfjt_node_with_prompts.unified_job_template,
workflow_job=workflow_job_unit,
identifier=mocker.ANY,
execution_environment=None,
)
@pytest.mark.django_db

View File

@@ -1,26 +0,0 @@
from unittest import mock
from django.core.mail.message import EmailMessage
import awx.main.notifications.awssns_backend as awssns_backend
def test_send_messages():
with mock.patch('awx.main.notifications.awssns_backend.AWSSNSBackend._sns_publish') as sns_publish_mock:
aws_region = 'us-east-1'
sns_topic = f"arn:aws:sns:{aws_region}:111111111111:topic-mock"
backend = awssns_backend.AWSSNSBackend(aws_region=aws_region, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None)
message = EmailMessage(
'test subject',
{'body': 'test body'},
[],
[
sns_topic,
],
)
sent_messages = backend.send_messages(
[
message,
]
)
sns_publish_mock.assert_called_once_with(topic_arn=sns_topic, message=message.body)
assert sent_messages == 1

View File

@@ -137,10 +137,10 @@ def test_send_notifications_not_list():
def test_send_notifications_job_id(mocker):
mocker.patch('awx.main.models.UnifiedJob.objects.get')
system.send_notifications([], job_id=1)
assert UnifiedJob.objects.get.called
assert UnifiedJob.objects.get.called_with(id=1)
with mocker.patch('awx.main.models.UnifiedJob.objects.get'):
system.send_notifications([], job_id=1)
assert UnifiedJob.objects.get.called
assert UnifiedJob.objects.get.called_with(id=1)
@mock.patch('awx.main.models.UnifiedJob.objects.get')

View File

@@ -7,15 +7,15 @@ def test_produce_supervisor_command(mocker):
mock_process = mocker.MagicMock()
mock_process.communicate = communicate_mock
Popen_mock = mocker.MagicMock(return_value=mock_process)
mocker.patch.object(reload.subprocess, 'Popen', Popen_mock)
reload.supervisor_service_command("restart")
reload.subprocess.Popen.assert_called_once_with(
[
'supervisorctl',
'restart',
'tower-processes:*',
],
stderr=-1,
stdin=-1,
stdout=-1,
)
with mocker.patch.object(reload.subprocess, 'Popen', Popen_mock):
reload.supervisor_service_command("restart")
reload.subprocess.Popen.assert_called_once_with(
[
'supervisorctl',
'restart',
'tower-processes:*',
],
stderr=-1,
stdin=-1,
stdout=-1,
)

View File

@@ -2,11 +2,9 @@
# All Rights Reserved.
# Python
import base64
import logging
import sys
import traceback
import os
from datetime import datetime
# Django
@@ -17,15 +15,6 @@ from django.utils.encoding import force_str
# AWX
from awx.main.exceptions import PostRunError
# OTEL
from opentelemetry._logs import set_logger_provider
from opentelemetry.exporter.otlp.proto.grpc._log_exporter import OTLPLogExporter as OTLPGrpcLogExporter
from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter as OTLPHttpLogExporter
from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
from opentelemetry.sdk.resources import Resource
class RSysLogHandler(logging.handlers.SysLogHandler):
append_nul = False
@@ -144,39 +133,3 @@ if settings.COLOR_LOGS is True:
pass
else:
ColorHandler = logging.StreamHandler
class OTLPHandler(LoggingHandler):
def __init__(self, endpoint=None, protocol='grpc', service_name=None, instance_id=None, auth=None, username=None, password=None):
if not endpoint:
raise ValueError("endpoint required")
if auth == 'basic' and (username is None or password is None):
raise ValueError("auth type basic requires username and passsword parameters")
self.endpoint = endpoint
self.service_name = service_name or (sys.argv[1] if len(sys.argv) > 1 else (sys.argv[0] or 'unknown_service'))
self.instance_id = instance_id or os.uname().nodename
logger_provider = LoggerProvider(
resource=Resource.create(
{
"service.name": self.service_name,
"service.instance.id": self.instance_id,
}
),
)
set_logger_provider(logger_provider)
headers = {}
if auth == 'basic':
secret = f'{username}:{password}'
headers['Authorization'] = "Basic " + base64.b64encode(secret.encode()).decode()
if protocol == 'grpc':
otlp_exporter = OTLPGrpcLogExporter(endpoint=self.endpoint, insecure=True, headers=headers)
elif protocol == 'http':
otlp_exporter = OTLPHttpLogExporter(endpoint=self.endpoint, headers=headers)
logger_provider.add_log_record_processor(BatchLogRecordProcessor(otlp_exporter))
super().__init__(level=logging.NOTSET, logger_provider=logger_provider)

View File

@@ -1,48 +0,0 @@
# Copyright (c) 2024 Ansible, Inc.
# All Rights Reserved.
# DRF
from rest_framework.request import Request
"""
Note that these methods operate on request.environ. This data is from uwsgi.
It is the source data from which request.headers (read-only) is constructed.
"""
def is_proxy_in_headers(request: Request, proxy_list: list[str], headers: list[str]) -> bool:
"""
Determine if the request went through at least one proxy in the list.
Example:
request.environ = {
"HTTP_X_FOO": "8.8.8.8, 192.168.2.1",
"REMOTE_ADDR": "192.168.2.1",
"REMOTE_HOST": "foobar"
}
proxy_list = ["192.168.2.1"]
headers = ["HTTP_X_FOO", "REMOTE_ADDR", "REMOTE_HOST"]
The above would return True since 192.168.2.1 is a value for the header HTTP_X_FOO
request: The DRF/Django request. request.environ dict will be used for searching for proxies
proxy_list: A list of known and trusted proxies may be ip or hostnames
headers: A list of keys for which to consider values that may contain a proxy
"""
remote_hosts = set()
for header in headers:
for value in request.environ.get(header, '').split(','):
value = value.strip()
if value:
remote_hosts.add(value)
return bool(remote_hosts.intersection(set(proxy_list)))
def delete_headers_starting_with_http(request: Request, headers: list[str]):
for header in headers:
if header.startswith('HTTP_'):
request.environ.pop(header, None)

View File

@@ -285,6 +285,8 @@ class WebSocketRelayManager(object):
except asyncio.CancelledError:
# Handle the case where the task was already cancelled by the time we got here.
pass
except Exception as e:
logger.warning(f"Failed to cancel relay connection for {hostname}: {e}")
del self.relay_connections[hostname]
@@ -295,6 +297,8 @@ class WebSocketRelayManager(object):
self.stats_mgr.delete_remote_host_stats(hostname)
except KeyError:
pass
except Exception as e:
logger.warning(f"Failed to delete stats for {hostname}: {e}")
async def run(self):
event_loop = asyncio.get_running_loop()
@@ -302,6 +306,7 @@ class WebSocketRelayManager(object):
self.stats_mgr = RelayWebsocketStatsManager(event_loop, self.local_hostname)
self.stats_mgr.start()
# Set up a pg_notify consumer for allowing web nodes to "provision" and "deprovision" themselves gracefully.
database_conf = deepcopy(settings.DATABASES['default'])
database_conf['OPTIONS'] = deepcopy(database_conf.get('OPTIONS', {}))
@@ -313,54 +318,79 @@ class WebSocketRelayManager(object):
if 'PASSWORD' in database_conf:
database_conf['OPTIONS']['password'] = database_conf.pop('PASSWORD')
async_conn = await psycopg.AsyncConnection.connect(
dbname=database_conf['NAME'],
host=database_conf['HOST'],
user=database_conf['USER'],
port=database_conf['PORT'],
**database_conf.get("OPTIONS", {}),
)
task = None
await async_conn.set_autocommit(True)
on_ws_heartbeat_task = event_loop.create_task(self.on_ws_heartbeat(async_conn))
# Managing the async_conn here so that we can close it if we need to restart the connection
async_conn = None
# Establishes a websocket connection to /websocket/relay on all API servers
while True:
if on_ws_heartbeat_task.done():
raise Exception("on_ws_heartbeat_task has exited")
try:
while True:
if not task or task.done():
try:
# Try to close the connection if it's open
if async_conn:
try:
await async_conn.close()
except Exception as e:
logger.warning(f"Failed to close connection to database for pg_notify: {e}")
future_remote_hosts = self.known_hosts.keys()
current_remote_hosts = self.relay_connections.keys()
deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts)
new_remote_hosts = set(future_remote_hosts) - set(current_remote_hosts)
# and re-establish the connection
async_conn = await psycopg.AsyncConnection.connect(
dbname=database_conf['NAME'],
host=database_conf['HOST'],
user=database_conf['USER'],
port=database_conf['PORT'],
**database_conf.get("OPTIONS", {}),
)
await async_conn.set_autocommit(True)
# This loop handles if we get an advertisement from a host we already know about but
# the advertisement has a different IP than we are currently connected to.
for hostname, address in self.known_hosts.items():
if hostname not in self.relay_connections:
# We've picked up a new hostname that we don't know about yet.
continue
# before creating the task that uses the connection
task = event_loop.create_task(self.on_ws_heartbeat(async_conn), name="on_ws_heartbeat")
logger.info("Creating `on_ws_heartbeat` task in event loop.")
if address != self.relay_connections[hostname].remote_host:
deleted_remote_hosts.add(hostname)
new_remote_hosts.add(hostname)
except Exception as e:
logger.warning(f"Failed to connect to database for pg_notify: {e}")
# Delete any hosts with closed connections
for hostname, relay_conn in self.relay_connections.items():
if not relay_conn.connected:
deleted_remote_hosts.add(hostname)
future_remote_hosts = self.known_hosts.keys()
current_remote_hosts = self.relay_connections.keys()
deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts)
new_remote_hosts = set(future_remote_hosts) - set(current_remote_hosts)
if deleted_remote_hosts:
logger.info(f"Removing {deleted_remote_hosts} from websocket broadcast list")
await asyncio.gather(*[self.cleanup_offline_host(h) for h in deleted_remote_hosts])
# This loop handles if we get an advertisement from a host we already know about but
# the advertisement has a different IP than we are currently connected to.
for hostname, address in self.known_hosts.items():
if hostname not in self.relay_connections:
# We've picked up a new hostname that we don't know about yet.
continue
if new_remote_hosts:
logger.info(f"Adding {new_remote_hosts} to websocket broadcast list")
if address != self.relay_connections[hostname].remote_host:
deleted_remote_hosts.add(hostname)
new_remote_hosts.add(hostname)
for h in new_remote_hosts:
stats = self.stats_mgr.new_remote_host_stats(h)
relay_connection = WebsocketRelayConnection(name=self.local_hostname, stats=stats, remote_host=self.known_hosts[h])
relay_connection.start()
self.relay_connections[h] = relay_connection
# Delete any hosts with closed connections
for hostname, relay_conn in self.relay_connections.items():
if not relay_conn.connected:
deleted_remote_hosts.add(hostname)
await asyncio.sleep(settings.BROADCAST_WEBSOCKET_NEW_INSTANCE_POLL_RATE_SECONDS)
if deleted_remote_hosts:
logger.info(f"Removing {deleted_remote_hosts} from websocket broadcast list")
await asyncio.gather(*[self.cleanup_offline_host(h) for h in deleted_remote_hosts])
if new_remote_hosts:
logger.info(f"Adding {new_remote_hosts} to websocket broadcast list")
for h in new_remote_hosts:
stats = self.stats_mgr.new_remote_host_stats(h)
relay_connection = WebsocketRelayConnection(name=self.local_hostname, stats=stats, remote_host=self.known_hosts[h])
relay_connection.start()
self.relay_connections[h] = relay_connection
await asyncio.sleep(settings.BROADCAST_WEBSOCKET_NEW_INSTANCE_POLL_RATE_SECONDS)
finally:
if async_conn:
logger.info("Shutting down db connection for wsrelay.")
try:
await async_conn.close()
except Exception as e:
logger.info(f"Failed to close connection to database for pg_notify: {e}")

View File

@@ -114,7 +114,6 @@ MEDIA_ROOT = os.path.join(BASE_DIR, 'public', 'media')
MEDIA_URL = '/media/'
LOGIN_URL = '/api/login/'
LOGOUT_ALLOWED_HOSTS = None
# Absolute filesystem path to the directory to host projects (with playbooks).
# This directory should not be web-accessible.
@@ -492,7 +491,6 @@ CELERYBEAT_SCHEDULE = {
'cleanup_images': {'task': 'awx.main.tasks.system.cleanup_images_and_files', 'schedule': timedelta(hours=3)},
'cleanup_host_metrics': {'task': 'awx.main.tasks.host_metrics.cleanup_host_metrics', 'schedule': timedelta(hours=3, minutes=30)},
'host_metric_summary_monthly': {'task': 'awx.main.tasks.host_metrics.host_metric_summary_monthly', 'schedule': timedelta(hours=4)},
'periodic_resource_sync': {'task': 'awx.main.tasks.system.periodic_resource_sync', 'schedule': timedelta(minutes=15)},
}
# Django Caching Configuration
@@ -657,10 +655,6 @@ AWX_ANSIBLE_CALLBACK_PLUGINS = ""
# Automatically remove nodes that have missed their heartbeats after some time
AWX_AUTO_DEPROVISION_INSTANCES = False
# If False, do not allow creation of resources that are shared with the platform ingress
# e.g. organizations, teams, and users
ALLOW_LOCAL_RESOURCE_MANAGEMENT = True
# Enable Pendo on the UI, possible values are 'off', 'anonymous', and 'detailed'
# Note: This setting may be overridden by database settings.
PENDO_TRACKING_STATE = "off"
@@ -783,11 +777,6 @@ INSIGHTS_EXCLUDE_EMPTY_GROUPS = False
TERRAFORM_INSTANCE_ID_VAR = 'id'
TERRAFORM_EXCLUDE_EMPTY_GROUPS = True
# ------------------------
# OpenShift Virtualization
# ------------------------
OPENSHIFT_VIRTUALIZATION_EXCLUDE_EMPTY_GROUPS = True
# ---------------------
# ----- Custom -----
# ---------------------
@@ -890,7 +879,6 @@ LOGGING = {
'address': '/var/run/awx-rsyslog/rsyslog.sock',
'filters': ['external_log_enabled', 'dynamic_level_filter', 'guid'],
},
'otel': {'class': 'logging.NullHandler'},
},
'loggers': {
'django': {'handlers': ['console']},
@@ -1160,8 +1148,13 @@ ANSIBLE_BASE_CUSTOM_VIEW_PARENT = 'awx.api.generics.APIView'
# Settings for the ansible_base RBAC system
# This has been moved to data migration code
ANSIBLE_BASE_ROLE_PRECREATE = {}
# Only used internally, names of the managed RoleDefinitions to create
ANSIBLE_BASE_ROLE_PRECREATE = {
'object_admin': '{cls.__name__} Admin',
'org_admin': 'Organization Admin',
'org_children': 'Organization {cls.__name__} Admin',
'special': '{cls.__name__} {action}',
}
# Name for auto-created roles that give users permissions to what they create
ANSIBLE_BASE_ROLE_CREATOR_NAME = '{cls.__name__} Creator'
@@ -1172,6 +1165,9 @@ ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED = True
# Permissions a user will get when creating a new item
ANSIBLE_BASE_CREATOR_DEFAULTS = ['change', 'delete', 'execute', 'use', 'adhoc', 'approve', 'update', 'view']
# This is a stopgap, will delete after resource registry integration
ANSIBLE_BASE_SERVICE_PREFIX = "awx"
# Temporary, for old roles API compatibility, save child permissions at organization level
ANSIBLE_BASE_CACHE_PARENT_PERMISSIONS = True
@@ -1185,3 +1181,6 @@ ANSIBLE_BASE_ALLOW_SINGLETON_ROLES_API = False # Do not allow creating user-def
# system username for django-ansible-base
SYSTEM_USERNAME = None
# Use AWX base view, to give 401 on unauthenticated requests
ANSIBLE_BASE_CUSTOM_VIEW_PARENT = 'awx.api.generics.APIView'

File diff suppressed because it is too large Load Diff

View File

@@ -7,18 +7,18 @@ from django.core.cache import cache
def test_ldap_default_settings(mocker):
from_db = mocker.Mock(**{'order_by.return_value': []})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=from_db)
settings = LDAPSettings()
assert settings.ORGANIZATION_MAP == {}
assert settings.TEAM_MAP == {}
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=from_db):
settings = LDAPSettings()
assert settings.ORGANIZATION_MAP == {}
assert settings.TEAM_MAP == {}
def test_ldap_default_network_timeout(mocker):
cache.clear() # clearing cache avoids picking up stray default for OPT_REFERRALS
from_db = mocker.Mock(**{'order_by.return_value': []})
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=from_db)
settings = LDAPSettings()
assert settings.CONNECTION_OPTIONS[ldap.OPT_NETWORK_TIMEOUT] == 30
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=from_db):
settings = LDAPSettings()
assert settings.CONNECTION_OPTIONS[ldap.OPT_NETWORK_TIMEOUT] == 30
def test_ldap_filter_validator():

View File

@@ -62,7 +62,7 @@ function CredentialLookup({
? { credential_type: credentialTypeId }
: {};
const typeKindParams = credentialTypeKind
? { credential_type__kind__in: credentialTypeKind }
? { credential_type__kind: credentialTypeKind }
: {};
const typeNamespaceParams = credentialTypeNamespace
? { credential_type__namespace: credentialTypeNamespace }
@@ -125,7 +125,7 @@ function CredentialLookup({
? { credential_type: credentialTypeId }
: {};
const typeKindParams = credentialTypeKind
? { credential_type__kind__in: credentialTypeKind }
? { credential_type__kind: credentialTypeKind }
: {};
const typeNamespaceParams = credentialTypeNamespace
? { credential_type__namespace: credentialTypeNamespace }

View File

@@ -190,7 +190,6 @@ function NotificationList({
name: t`Notification type`,
key: 'or__notification_type',
options: [
['awssns', t`AWS SNS`],
['email', t`Email`],
['grafana', t`Grafana`],
['hipchat', t`Hipchat`],

View File

@@ -12,7 +12,7 @@ const Inner = styled.div`
border-radius: 2px;
color: white;
left: 10px;
max-width: 500px;
max-width: 300px;
padding: 5px 10px;
position: absolute;
top: 10px;

View File

@@ -12,7 +12,6 @@ const GridDL = styled.dl`
column-gap: 15px;
display: grid;
grid-template-columns: max-content;
overflow-wrap: anywhere;
row-gap: 0px;
dt {
grid-column-start: 1;

View File

@@ -56,10 +56,6 @@ describe('<InventorySourceAdd />', () => {
['satellite6', 'Red Hat Satellite 6'],
['openstack', 'OpenStack'],
['rhv', 'Red Hat Virtualization'],
[
'openshift_virtualization',
'Red Hat OpenShift Virtualization',
],
['controller', 'Red Hat Ansible Automation Platform'],
],
},

View File

@@ -22,9 +22,7 @@ const ansibleDocUrls = {
constructed:
'https://docs.ansible.com/ansible/latest/collections/ansible/builtin/constructed_inventory.html',
terraform:
'https://github.com/ansible-collections/cloud.terraform/blob/main/docs/cloud.terraform.terraform_state_inventory.rst',
openshift_virtualization:
'https://kubevirt.io/kubevirt.core/latest/plugins/kubevirt.html',
'https://github.com/ansible-collections/cloud.terraform/blob/stable-statefile-inventory/plugins/inventory/terraform_state.py',
};
const getInventoryHelpTextStrings = () => ({
@@ -123,7 +121,7 @@ const getInventoryHelpTextStrings = () => ({
<br />
{value && (
<div>
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
and also go to `}
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
{t`and click on Update Revision on Launch.`}
@@ -142,7 +140,7 @@ const getInventoryHelpTextStrings = () => ({
<br />
{value && (
<div>
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
and also go to `}
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
{t`and click on Update Revision on Launch`}

View File

@@ -26,7 +26,6 @@ import {
TerraformSubForm,
VMwareSubForm,
VirtualizationSubForm,
OpenShiftVirtualizationSubForm,
} from './InventorySourceSubForms';
const buildSourceChoiceOptions = (options) => {
@@ -232,15 +231,6 @@ const InventorySourceFormFields = ({
sourceOptions={sourceOptions}
/>
),
openshift_virtualization: (
<OpenShiftVirtualizationSubForm
autoPopulateCredential={
!source?.id ||
source?.source !== 'openshift_virtualization'
}
sourceOptions={sourceOptions}
/>
),
}[sourceField.value]
}
</FormColumnLayout>

View File

@@ -1,64 +0,0 @@
import React, { useCallback } from 'react';
import { useField, useFormikContext } from 'formik';
import { t } from '@lingui/macro';
import { useConfig } from 'contexts/Config';
import getDocsBaseUrl from 'util/getDocsBaseUrl';
import CredentialLookup from 'components/Lookup/CredentialLookup';
import { required } from 'util/validators';
import {
OptionsField,
VerbosityField,
EnabledVarField,
EnabledValueField,
HostFilterField,
SourceVarsField,
} from './SharedFields';
import getHelpText from '../Inventory.helptext';
const OpenShiftVirtualizationSubForm = ({ autoPopulateCredential }) => {
const helpText = getHelpText();
const { setFieldValue, setFieldTouched } = useFormikContext();
const [credentialField, credentialMeta, credentialHelpers] =
useField('credential');
const config = useConfig();
const handleCredentialUpdate = useCallback(
(value) => {
setFieldValue('credential', value);
setFieldTouched('credential', true, false);
},
[setFieldValue, setFieldTouched]
);
const docsBaseUrl = getDocsBaseUrl(config);
return (
<>
<CredentialLookup
credentialTypeNamespace="kubernetes_bearer_token"
label={t`Credential`}
helperTextInvalid={credentialMeta.error}
isValid={!credentialMeta.touched || !credentialMeta.error}
onBlur={() => credentialHelpers.setTouched()}
onChange={handleCredentialUpdate}
value={credentialField.value}
required
autoPopulate={autoPopulateCredential}
validate={required(t`Select a value for this field`)}
/>
<VerbosityField />
<HostFilterField />
<EnabledVarField />
<EnabledValueField />
<OptionsField />
<SourceVarsField
popoverContent={helpText.sourceVars(
docsBaseUrl,
'openshift_virtualization'
)}
/>
</>
);
};
export default OpenShiftVirtualizationSubForm;

View File

@@ -1,65 +0,0 @@
import React from 'react';
import { act } from 'react-dom/test-utils';
import { Formik } from 'formik';
import { CredentialsAPI } from 'api';
import { mountWithContexts } from '../../../../../testUtils/enzymeHelpers';
import VirtualizationSubForm from './VirtualizationSubForm';
jest.mock('../../../../api');
const initialValues = {
credential: null,
overwrite: false,
overwrite_vars: false,
source_path: '',
source_project: null,
source_script: null,
source_vars: '---\n',
update_cache_timeout: 0,
update_on_launch: true,
verbosity: 1,
};
describe('<VirtualizationSubForm />', () => {
let wrapper;
beforeEach(async () => {
CredentialsAPI.read.mockResolvedValue({
data: { count: 0, results: [] },
});
await act(async () => {
wrapper = mountWithContexts(
<Formik initialValues={initialValues}>
<VirtualizationSubForm />
</Formik>
);
});
});
afterAll(() => {
jest.clearAllMocks();
});
test('should render subform fields', () => {
expect(wrapper.find('FormGroup[label="Credential"]')).toHaveLength(1);
expect(wrapper.find('FormGroup[label="Verbosity"]')).toHaveLength(1);
expect(wrapper.find('FormGroup[label="Update options"]')).toHaveLength(1);
expect(
wrapper.find('FormGroup[label="Cache timeout (seconds)"]')
).toHaveLength(1);
expect(
wrapper.find('VariablesField[label="Source variables"]')
).toHaveLength(1);
});
test('should make expected api calls', () => {
expect(CredentialsAPI.read).toHaveBeenCalledTimes(1);
expect(CredentialsAPI.read).toHaveBeenCalledWith({
credential_type__namespace: 'rhv',
order_by: 'name',
page: 1,
page_size: 5,
});
});
});

View File

@@ -87,7 +87,7 @@ const SCMSubForm = ({ autoPopulateProject }) => {
/>
)}
<CredentialLookup
credentialTypeKind="cloud,kubernetes"
credentialTypeKind="cloud"
label={t`Credential`}
value={credentialField.value}
onChange={handleCredentialUpdate}

View File

@@ -9,4 +9,3 @@ export { default as ControllerSubForm } from './ControllerSubForm';
export { default as TerraformSubForm } from './TerraformSubForm';
export { default as VMwareSubForm } from './VMwareSubForm';
export { default as VirtualizationSubForm } from './VirtualizationSubForm';
export { default as OpenShiftVirtualizationSubForm } from './OpenShiftVirtualizationSubForm';

View File

@@ -138,25 +138,6 @@ function NotificationTemplateDetail({ template, defaultMessages }) {
}
dataCy="nt-detail-type"
/>
{template.notification_type === 'awssns' && (
<>
<Detail
label={t`AWS Region`}
value={configuration.aws_region}
dataCy="nt-detail-aws-region"
/>
<Detail
label={t`Access Key ID`}
value={configuration.aws_access_key_id}
dataCy="nt-detail-aws-access-key-id"
/>
<Detail
label={t`SNS Topic ARN`}
value={configuration.sns_topic_arn}
dataCy="nt-detail-sns-topic-arn"
/>
</>
)}
{template.notification_type === 'email' && (
<>
<Detail
@@ -474,8 +455,8 @@ function NotificationTemplateDetail({ template, defaultMessages }) {
}
function CustomMessageDetails({ messages, defaults, type }) {
const showMessages = !['awssns', 'webhook'].includes(type);
const showBodies = ['email', 'pagerduty', 'webhook', 'awssns'].includes(type);
const showMessages = type !== 'webhook';
const showBodies = ['email', 'pagerduty', 'webhook'].includes(type);
return (
<>

View File

@@ -120,7 +120,7 @@ function NotificationTemplatesList() {
toolbarSearchColumns={[
{
name: t`Name`,
key: 'name__icontains',
key: 'name',
isDefault: true,
},
{
@@ -131,7 +131,6 @@ function NotificationTemplatesList() {
name: t`Notification type`,
key: 'or__notification_type',
options: [
['awssns', t`AWS SNS`],
['email', t`Email`],
['grafana', t`Grafana`],
['hipchat', t`Hipchat`],

View File

@@ -1,6 +1,5 @@
/* eslint-disable-next-line import/prefer-default-export */
export const NOTIFICATION_TYPES = {
awssns: 'AWS SNS',
email: 'Email',
grafana: 'Grafana',
irc: 'IRC',

View File

@@ -11,8 +11,8 @@ import getDocsBaseUrl from 'util/getDocsBaseUrl';
function CustomMessagesSubForm({ defaultMessages, type }) {
const [useCustomField, , useCustomHelpers] = useField('useCustomMessages');
const showMessages = !['webhook', 'awssns'].includes(type);
const showBodies = ['email', 'pagerduty', 'webhook', 'awssns'].includes(type);
const showMessages = type !== 'webhook';
const showBodies = ['email', 'pagerduty', 'webhook'].includes(type);
const { setFieldValue } = useFormikContext();
const config = useConfig();

View File

@@ -78,7 +78,6 @@ function NotificationTemplateFormFields({ defaultMessages, template }) {
label: t`Choose a Notification Type`,
isDisabled: true,
},
{ value: 'awssns', key: 'awssns', label: t`AWS SNS` },
{ value: 'email', key: 'email', label: t`E-mail` },
{ value: 'grafana', key: 'grafana', label: 'Grafana' },
{ value: 'irc', key: 'irc', label: 'IRC' },

View File

@@ -29,7 +29,6 @@ import Popover from '../../../components/Popover/Popover';
import getHelpText from './Notifications.helptext';
const TypeFields = {
awssns: AWSSNSFields,
email: EmailFields,
grafana: GrafanaFields,
irc: IRCFields,
@@ -59,44 +58,6 @@ TypeInputsSubForm.propTypes = {
export default TypeInputsSubForm;
function AWSSNSFields() {
return (
<>
<FormField
id="awssns-aws-region"
label={t`AWS Region`}
name="notification_configuration.aws_region"
type="text"
isRequired
/>
<FormField
id="awssns-aws-access-key-id"
label={t`Access Key ID`}
name="notification_configuration.aws_access_key_id"
type="text"
/>
<PasswordField
id="awssns-aws-secret-access-key"
label={t`Secret Access Key`}
name="notification_configuration.aws_secret_access_key"
/>
<PasswordField
id="awssns-aws-session-token"
label={t`Session Token`}
name="notification_configuration.aws_session_token"
/>
<FormField
id="awssns-sns-topic-arn"
label={t`SNS Topic ARN`}
name="notification_configuration.sns_topic_arn"
type="text"
validate={required(null)}
isRequired
/>
</>
);
}
function EmailFields() {
const helpText = getHelpText();
return (

View File

@@ -203,39 +203,6 @@
}
}
},
"awssns": {
"started": {
"body": "{{ job_metadata }}"
},
"success": {
"body": "{{ job_metadata }}"
},
"error": {
"body": "{{ job_metadata }}"
},
"workflow_approval": {
"running": {
"body": {
"body": "The approval node \"{{ approval_node_name }}\" needs review. This node can be viewed at: {{ workflow_url }}"
}
},
"approved": {
"body": {
"body": "The approval node \"{{ approval_node_name }}\" was approved. {{ workflow_url }}"
}
},
"timed_out": {
"body": {
"body": "The approval node \"{{ approval_node_name }}\" has timed out. {{ workflow_url }}"
}
},
"denied": {
"body": {
"body": "The approval node \"{{ approval_node_name }}\" was denied. {{ workflow_url }}"
}
}
}
},
"mattermost": {
"started": {
"message": "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}",

View File

@@ -1,11 +1,4 @@
const typeFieldNames = {
awssns: [
'aws_region',
'aws_access_key_id',
'aws_secret_access_key',
'aws_session_token',
'sns_topic_arn',
],
email: [
'username',
'password',

View File

@@ -374,7 +374,6 @@ export const CredentialType = shape({
});
export const NotificationType = oneOf([
'awssns',
'email',
'grafana',
'irc',

View File

@@ -17,7 +17,7 @@ import time
import re
from json import loads, dumps
from os.path import isfile, expanduser, split, join, exists, isdir
from os import access, R_OK, getcwd, environ, getenv
from os import access, R_OK, getcwd, environ
try:
@@ -107,7 +107,7 @@ class ControllerModule(AnsibleModule):
# Perform magic depending on whether controller_oauthtoken is a string or a dict
if self.params.get('controller_oauthtoken'):
token_param = self.params.get('controller_oauthtoken')
if isinstance(token_param, dict):
if type(token_param) is dict:
if 'token' in token_param:
self.oauth_token = self.params.get('controller_oauthtoken')['token']
else:
@@ -148,10 +148,9 @@ class ControllerModule(AnsibleModule):
# Make sure we start with /api/vX
if not endpoint.startswith("/"):
endpoint = "/{0}".format(endpoint)
hostname_prefix = self.url_prefix.rstrip("/")
api_path = self.api_path()
if not endpoint.startswith(hostname_prefix + api_path):
endpoint = hostname_prefix + f"{api_path}v2{endpoint}"
prefix = self.url_prefix.rstrip("/")
if not endpoint.startswith(prefix + "/api/"):
endpoint = prefix + "/api/v2{0}".format(endpoint)
if not endpoint.endswith('/') and '?' not in endpoint:
endpoint = "{0}/".format(endpoint)
@@ -216,7 +215,7 @@ class ControllerModule(AnsibleModule):
try:
config_data = yaml.load(config_string, Loader=yaml.SafeLoader)
# If this is an actual ini file, yaml will return the whole thing as a string instead of a dict
if not isinstance(config_data, dict):
if type(config_data) is not dict:
raise AssertionError("The yaml config file is not properly formatted as a dict.")
try_config_parsing = False
@@ -258,7 +257,7 @@ class ControllerModule(AnsibleModule):
if honorred_setting in config_data:
# Veriffy SSL must be a boolean
if honorred_setting == 'verify_ssl':
if isinstance(config_data[honorred_setting], str):
if type(config_data[honorred_setting]) is str:
setattr(self, honorred_setting, strtobool(config_data[honorred_setting]))
else:
setattr(self, honorred_setting, bool(config_data[honorred_setting]))
@@ -604,14 +603,6 @@ class ControllerAPIModule(ControllerModule):
status_code = response.status
return {'status_code': status_code, 'json': response_json}
def api_path(self):
default_api_path = "/api/"
if self._COLLECTION_TYPE != "awx":
default_api_path = "/api/controller/"
prefix = getenv('CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX', default_api_path)
return prefix
def authenticate(self, **kwargs):
if self.username and self.password:
# Attempt to get a token from /api/v2/tokens/ by giving it our username/password combo
@@ -622,7 +613,7 @@ class ControllerAPIModule(ControllerModule):
"scope": "write",
}
# Preserve URL prefix
endpoint = self.url_prefix.rstrip('/') + f'{self.api_path()}v2/tokens/'
endpoint = self.url_prefix.rstrip('/') + '/api/v2/tokens/'
# Post to the tokens endpoint with baisc auth to try and get a token
api_token_url = (self.url._replace(path=endpoint)).geturl()
@@ -1011,7 +1002,7 @@ class ControllerAPIModule(ControllerModule):
if self.authenticated and self.oauth_token_id:
# Attempt to delete our current token from /api/v2/tokens/
# Post to the tokens endpoint with baisc auth to try and get a token
endpoint = self.url_prefix.rstrip('/') + f'{self.api_path()}v2/tokens/{self.oauth_token_id}/'
endpoint = self.url_prefix.rstrip('/') + '/api/v2/tokens/{0}/'.format(self.oauth_token_id)
api_token_url = (self.url._replace(path=endpoint, query=None)).geturl() # in error cases, fail_json exists before exception handling
try:
@@ -1047,10 +1038,7 @@ class ControllerAPIModule(ControllerModule):
# Grab our start time to compare against for the timeout
start = time.time()
result = self.get_endpoint(url)
wait_on_field = 'event_processing_finished'
if wait_on_field not in result['json']:
wait_on_field = 'finished'
while not result['json'][wait_on_field]:
while not result['json']['finished']:
# If we are past our time out fail with a message
if timeout and timeout < time.time() - start:
# Account for Legacy messages

View File

@@ -163,7 +163,7 @@ def main():
for arg in ['job_type', 'limit', 'forks', 'verbosity', 'extra_vars', 'become_enabled', 'diff_mode']:
if module.params.get(arg):
# extra_var can receive a dict or a string, if a dict covert it to a string
if arg == 'extra_vars' and not isinstance(module.params.get(arg), str):
if arg == 'extra_vars' and type(module.params.get(arg)) is not str:
post_data[arg] = json.dumps(module.params.get(arg))
else:
post_data[arg] = module.params.get(arg)

View File

@@ -121,7 +121,6 @@ def main():
client_type = module.params.get('client_type')
organization = module.params.get('organization')
redirect_uris = module.params.get('redirect_uris')
skip_authorization = module.params.get('skip_authorization')
state = module.params.get('state')
# Attempt to look up the related items the user specified (these will fail the module if not found)
@@ -147,8 +146,6 @@ def main():
application_fields['description'] = description
if redirect_uris is not None:
application_fields['redirect_uris'] = ' '.join(redirect_uris)
if skip_authorization is not None:
application_fields['skip_authorization'] = skip_authorization
response = module.create_or_update_if_needed(application, application_fields, endpoint='applications', item_type='application', auto_exit=False)
if 'client_id' in response:

View File

@@ -56,7 +56,7 @@ import logging
# In this module we don't use EXPORTABLE_RESOURCES, we just want to validate that our installed awxkit has import/export
try:
from awxkit.api.pages.api import EXPORTABLE_RESOURCES # noqa: F401; pylint: disable=unused-import
from awxkit.api.pages.api import EXPORTABLE_RESOURCES # noqa
HAS_EXPORTABLE_RESOURCES = True
except ImportError:

View File

@@ -42,8 +42,7 @@ options:
source:
description:
- The source to use for this group.
choices: [ "scm", "ec2", "gce", "azure_rm", "vmware", "satellite6", "openstack", "rhv", "controller", "insights", "terraform",
"openshift_virtualization" ]
choices: [ "scm", "ec2", "gce", "azure_rm", "vmware", "satellite6", "openstack", "rhv", "controller", "insights" ]
type: str
source_path:
description:
@@ -171,22 +170,7 @@ def main():
#
# How do we handle manual and file? The controller does not seem to be able to activate them
#
source=dict(
choices=[
"scm",
"ec2",
"gce",
"azure_rm",
"vmware",
"satellite6",
"openstack",
"rhv",
"controller",
"insights",
"terraform",
"openshift_virtualization",
]
),
source=dict(choices=["scm", "ec2", "gce", "azure_rm", "vmware", "satellite6", "openstack", "rhv", "controller", "insights"]),
source_path=dict(),
source_vars=dict(type='dict'),
enabled_var=dict(),

View File

@@ -50,7 +50,6 @@ options:
description:
- The type of notification to be sent.
choices:
- 'awssns'
- 'email'
- 'grafana'
- 'irc'
@@ -220,7 +219,7 @@ def main():
copy_from=dict(),
description=dict(),
organization=dict(),
notification_type=dict(choices=['awssns', 'email', 'grafana', 'irc', 'mattermost', 'pagerduty', 'rocketchat', 'slack', 'twilio', 'webhook']),
notification_type=dict(choices=['email', 'grafana', 'irc', 'mattermost', 'pagerduty', 'rocketchat', 'slack', 'twilio', 'webhook']),
notification_configuration=dict(type='dict'),
messages=dict(type='dict'),
state=dict(choices=['present', 'absent', 'exists'], default='present'),

Some files were not shown because too many files have changed in this diff Show More