mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 19:44:43 -03:30
Compare commits
61 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4738c8333a | ||
|
|
13dcea0afd | ||
|
|
bc2d339981 | ||
|
|
bef9ef10bb | ||
|
|
8645fe5c57 | ||
|
|
b93aa20362 | ||
|
|
4bbfc8a946 | ||
|
|
2c8eef413b | ||
|
|
d5bad1a533 | ||
|
|
f6c0effcb2 | ||
|
|
31a086b11a | ||
|
|
d94f766fcb | ||
|
|
a7113549eb | ||
|
|
bfd811f408 | ||
|
|
030704a9e1 | ||
|
|
c312d9bce3 | ||
|
|
aadcc217eb | ||
|
|
345c1c11e9 | ||
|
|
2c3a7fafc5 | ||
|
|
dbcd32a1d9 | ||
|
|
d45e258a78 | ||
|
|
d16b69a102 | ||
|
|
8b4efbc973 | ||
|
|
4cb061e7db | ||
|
|
31db6a1447 | ||
|
|
ad9d5904d8 | ||
|
|
b837d549ff | ||
|
|
9e22865d2e | ||
|
|
ee3e3e1516 | ||
|
|
4a8f6e45f8 | ||
|
|
6a317cca1b | ||
|
|
d67af79451 | ||
|
|
fe77fda7b2 | ||
|
|
f613b76baa | ||
|
|
054cbe69d7 | ||
|
|
87e9dcb6d7 | ||
|
|
c8829b057e | ||
|
|
a0b376a6ca | ||
|
|
d675207f99 | ||
|
|
20504042c9 | ||
|
|
0e87e97820 | ||
|
|
1f154742df | ||
|
|
85fc81aab1 | ||
|
|
5cfeeb3e87 | ||
|
|
a8c07b06d8 | ||
|
|
53c5feaf6b | ||
|
|
6f57aaa8f5 | ||
|
|
bea74a401d | ||
|
|
54e85813c8 | ||
|
|
b69ed08fe5 | ||
|
|
de25408a23 | ||
|
|
b17f0a188b | ||
|
|
fb860d76ce | ||
|
|
451f20ce0f | ||
|
|
c1dc0c7b86 | ||
|
|
d65ea2a3d5 | ||
|
|
8827ae7554 | ||
|
|
4915262af1 | ||
|
|
d43c91e1a5 | ||
|
|
b470ca32af | ||
|
|
793777bec7 |
22
Makefile
22
Makefile
@@ -53,6 +53,8 @@ OTEL ?= false
|
||||
LOKI ?= false
|
||||
# If set to true docker-compose will install editable dependencies
|
||||
EDITABLE_DEPENDENCIES ?= false
|
||||
# If set to true, use tls for postgres connection
|
||||
PG_TLS ?= false
|
||||
|
||||
VENV_BASE ?= /var/lib/awx/venv
|
||||
|
||||
@@ -62,6 +64,9 @@ DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
|
||||
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
|
||||
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
|
||||
# Common command to use for running ansible-playbook
|
||||
ANSIBLE_PLAYBOOK ?= ansible-playbook -e ansible_python_interpreter=$(PYTHON)
|
||||
|
||||
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
|
||||
# Python packages to install only from source (not from binary wheels)
|
||||
@@ -69,7 +74,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==0.29.37
|
||||
|
||||
NAME ?= awx
|
||||
|
||||
@@ -366,7 +371,7 @@ symlink_collection:
|
||||
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
|
||||
|
||||
awx_collection_build: $(shell find awx_collection -type f)
|
||||
ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \
|
||||
$(ANSIBLE_PLAYBOOK) -i localhost, awx_collection/tools/template_galaxy.yml \
|
||||
-e collection_package=$(COLLECTION_PACKAGE) \
|
||||
-e collection_namespace=$(COLLECTION_NAMESPACE) \
|
||||
-e collection_version=$(COLLECTION_VERSION) \
|
||||
@@ -520,10 +525,10 @@ endif
|
||||
|
||||
docker-compose-sources: .git/hooks/pre-commit
|
||||
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||
fi;
|
||||
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
||||
-e awx_image_tag=$(COMPOSE_TAG) \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||
@@ -542,11 +547,12 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
-e enable_otel=$(OTEL) \
|
||||
-e enable_loki=$(LOKI) \
|
||||
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
|
||||
-e pg_tls=$(PG_TLS) \
|
||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_ldap=$(LDAP); \
|
||||
@@ -589,7 +595,7 @@ docker-compose-container-group-clean:
|
||||
.PHONY: Dockerfile.dev
|
||||
## Generate Dockerfile.dev for awx_devel image
|
||||
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||
-e dockerfile_name=Dockerfile.dev \
|
||||
-e build_dev=True \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE)
|
||||
@@ -664,7 +670,7 @@ version-for-buildyml:
|
||||
.PHONY: Dockerfile
|
||||
## Generate Dockerfile for awx image
|
||||
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||
-e headless=$(HEADLESS)
|
||||
|
||||
@@ -694,7 +700,7 @@ awx-kube-buildx: Dockerfile
|
||||
.PHONY: Dockerfile.kube-dev
|
||||
## Generate Docker.kube-dev for awx_kube_devel image
|
||||
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||
-e dockerfile_name=Dockerfile.kube-dev \
|
||||
-e kube_dev=True \
|
||||
-e template_dest=_build_kube_dev \
|
||||
|
||||
@@ -33,8 +33,10 @@ from rest_framework.negotiation import DefaultContentNegotiation
|
||||
# django-ansible-base
|
||||
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
||||
from ansible_base.lib.utils.models import get_all_field_names
|
||||
from ansible_base.lib.utils.requests import get_remote_host
|
||||
from ansible_base.rbac.models import RoleEvaluation, RoleDefinition
|
||||
from ansible_base.rbac.permission_registry import permission_registry
|
||||
from ansible_base.jwt_consumer.common.util import validate_x_trusted_proxy_header
|
||||
|
||||
# AWX
|
||||
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||
@@ -42,6 +44,7 @@ from awx.main.models.rbac import give_creator_permissions
|
||||
from awx.main.access import optimize_queryset
|
||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||
from awx.main.utils.licensing import server_product_name
|
||||
from awx.main.utils.proxy import is_proxy_in_headers, delete_headers_starting_with_http
|
||||
from awx.main.views import ApiErrorView
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||
from awx.api.versioning import URLPathVersioning
|
||||
@@ -93,8 +96,9 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||
if request.user.is_authenticated:
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, ip)))
|
||||
ret.set_cookie(
|
||||
'userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False), samesite=getattr(settings, 'USER_COOKIE_SAMESITE', 'Lax')
|
||||
)
|
||||
@@ -103,7 +107,7 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
return ret
|
||||
else:
|
||||
if 'username' in self.request.POST:
|
||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
|
||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), ip)))
|
||||
ret.status_code = 401
|
||||
return ret
|
||||
|
||||
@@ -151,22 +155,23 @@ class APIView(views.APIView):
|
||||
Store the Django REST Framework Request object as an attribute on the
|
||||
normal Django request, store time the request started.
|
||||
"""
|
||||
remote_headers = ['REMOTE_ADDR', 'REMOTE_HOST']
|
||||
|
||||
self.time_started = time.time()
|
||||
if getattr(settings, 'SQL_DEBUG', False):
|
||||
self.queries_before = len(connection.queries)
|
||||
|
||||
if 'HTTP_X_TRUSTED_PROXY' in request.environ:
|
||||
if validate_x_trusted_proxy_header(request.environ['HTTP_X_TRUSTED_PROXY']):
|
||||
remote_headers = settings.REMOTE_HOST_HEADERS
|
||||
else:
|
||||
logger.warning("Request appeared to be a trusted upstream proxy but failed to provide a matching shared secret.")
|
||||
|
||||
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
||||
# they respect the allowed proxy list
|
||||
if all(
|
||||
[
|
||||
settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
]
|
||||
):
|
||||
for custom_header in settings.REMOTE_HOST_HEADERS:
|
||||
if custom_header.startswith('HTTP_'):
|
||||
request.environ.pop(custom_header, None)
|
||||
if settings.PROXY_IP_ALLOWED_LIST:
|
||||
if not is_proxy_in_headers(self.request, settings.PROXY_IP_ALLOWED_LIST, remote_headers):
|
||||
delete_headers_starting_with_http(request, settings.REMOTE_HOST_HEADERS)
|
||||
|
||||
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||
request.drf_request = drf_request
|
||||
@@ -211,11 +216,12 @@ class APIView(views.APIView):
|
||||
return response
|
||||
|
||||
if response.status_code >= 400:
|
||||
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||
msg_data = {
|
||||
'status_code': response.status_code,
|
||||
'user_name': request.user,
|
||||
'url_path': request.path,
|
||||
'remote_addr': request.META.get('REMOTE_ADDR', None),
|
||||
'remote_addr': ip,
|
||||
}
|
||||
|
||||
if type(response.data) is dict:
|
||||
|
||||
@@ -61,7 +61,9 @@ import pytz
|
||||
from wsgiref.util import FileWrapper
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.lib.utils.requests import get_remote_hosts
|
||||
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
|
||||
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||
|
||||
# AWX
|
||||
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
||||
@@ -128,6 +130,7 @@ from awx.api.views.mixin import (
|
||||
from awx.api.pagination import UnifiedJobEventPagination
|
||||
from awx.main.utils import set_environ
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.api.views')
|
||||
|
||||
|
||||
@@ -710,16 +713,81 @@ class AuthView(APIView):
|
||||
return Response(data)
|
||||
|
||||
|
||||
def immutablesharedfields(cls):
|
||||
'''
|
||||
Class decorator to prevent modifying shared resources when ALLOW_LOCAL_RESOURCE_MANAGEMENT setting is set to False.
|
||||
|
||||
Works by overriding these view methods:
|
||||
- create
|
||||
- delete
|
||||
- perform_update
|
||||
create and delete are overridden to raise a PermissionDenied exception.
|
||||
perform_update is overridden to check if any shared fields are being modified,
|
||||
and raise a PermissionDenied exception if so.
|
||||
'''
|
||||
# create instead of perform_create because some of our views
|
||||
# override create instead of perform_create
|
||||
if hasattr(cls, 'create'):
|
||||
cls.original_create = cls.create
|
||||
|
||||
@functools.wraps(cls.create)
|
||||
def create_wrapper(*args, **kwargs):
|
||||
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
return cls.original_create(*args, **kwargs)
|
||||
raise PermissionDenied({'detail': _('Creation of this resource is not allowed. Create this resource via the platform ingress.')})
|
||||
|
||||
cls.create = create_wrapper
|
||||
|
||||
if hasattr(cls, 'delete'):
|
||||
cls.original_delete = cls.delete
|
||||
|
||||
@functools.wraps(cls.delete)
|
||||
def delete_wrapper(*args, **kwargs):
|
||||
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
return cls.original_delete(*args, **kwargs)
|
||||
raise PermissionDenied({'detail': _('Deletion of this resource is not allowed. Delete this resource via the platform ingress.')})
|
||||
|
||||
cls.delete = delete_wrapper
|
||||
|
||||
if hasattr(cls, 'perform_update'):
|
||||
cls.original_perform_update = cls.perform_update
|
||||
|
||||
@functools.wraps(cls.perform_update)
|
||||
def update_wrapper(*args, **kwargs):
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
view, serializer = args
|
||||
instance = view.get_object()
|
||||
if instance:
|
||||
if isinstance(instance, models.Organization):
|
||||
shared_fields = OrganizationType._declared_fields.keys()
|
||||
elif isinstance(instance, models.User):
|
||||
shared_fields = UserType._declared_fields.keys()
|
||||
elif isinstance(instance, models.Team):
|
||||
shared_fields = TeamType._declared_fields.keys()
|
||||
attrs = serializer.validated_data
|
||||
for field in shared_fields:
|
||||
if field in attrs and getattr(instance, field) != attrs[field]:
|
||||
raise PermissionDenied({field: _(f"Cannot change shared field '{field}'. Alter this field via the platform ingress.")})
|
||||
return cls.original_perform_update(*args, **kwargs)
|
||||
|
||||
cls.perform_update = update_wrapper
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamList(ListCreateAPIView):
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamUsersList(BaseUsersList):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -1101,6 +1169,7 @@ class ProjectCopy(CopyAPIView):
|
||||
copy_return_serializer_class = serializers.ProjectSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class UserList(ListCreateAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -1271,7 +1340,16 @@ class UserRolesList(SubListAttachDetachAPIView):
|
||||
user = get_object_or_400(models.User, pk=self.kwargs['pk'])
|
||||
role = get_object_or_400(models.Role, pk=sub_id)
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||
# Prevent user to be associated with team/org when ALLOW_LOCAL_RESOURCE_MANAGEMENT is False
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
for model in [models.Organization, models.Team]:
|
||||
ct = content_types[model]
|
||||
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
credential_content_type = content_types[models.Credential]
|
||||
if role.content_type == credential_content_type:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
@@ -1343,6 +1421,7 @@ class UserActivityStreamList(SubListAPIView):
|
||||
return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class UserDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -2692,12 +2771,7 @@ class JobTemplateCallback(GenericAPIView):
|
||||
host for the current request.
|
||||
"""
|
||||
# Find the list of remote host names/IPs to check.
|
||||
remote_hosts = set()
|
||||
for header in settings.REMOTE_HOST_HEADERS:
|
||||
for value in self.request.META.get(header, '').split(','):
|
||||
value = value.strip()
|
||||
if value:
|
||||
remote_hosts.add(value)
|
||||
remote_hosts = set(get_remote_hosts(self.request))
|
||||
# Add the reverse lookup of IP addresses.
|
||||
for rh in list(remote_hosts):
|
||||
try:
|
||||
@@ -4295,7 +4369,15 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
||||
user = get_object_or_400(models.User, pk=sub_id)
|
||||
role = self.get_parent_object()
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
for model in [models.Organization, models.Team]:
|
||||
ct = content_types[model]
|
||||
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
credential_content_type = content_types[models.Credential]
|
||||
if role.content_type == credential_content_type:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
|
||||
@@ -53,15 +53,18 @@ from awx.api.serializers import (
|
||||
CredentialSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
||||
from awx.api.views import immutablesharedfields
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
@@ -104,6 +107,7 @@ class OrganizationInventoriesList(SubListAPIView):
|
||||
relationship = 'inventories'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationUsersList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
@@ -112,6 +116,7 @@ class OrganizationUsersList(BaseUsersList):
|
||||
ordering = ('username',)
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationAdminsList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
@@ -150,6 +155,7 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||
model = Team
|
||||
serializer_class = TeamSerializer
|
||||
|
||||
@@ -598,7 +598,7 @@ class InstanceGroupAccess(BaseAccess):
|
||||
- a superuser
|
||||
- admin role on the Instance group
|
||||
I can add/delete Instance Groups:
|
||||
- a superuser(system administrator)
|
||||
- a superuser(system administrator), because these are not org-scoped
|
||||
I can use Instance Groups when I have:
|
||||
- use_role on the instance group
|
||||
"""
|
||||
@@ -627,7 +627,7 @@ class InstanceGroupAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
|
||||
return False
|
||||
return self.user.is_superuser
|
||||
return self.user.has_obj_perm(obj, 'delete')
|
||||
|
||||
|
||||
class UserAccess(BaseAccess):
|
||||
@@ -2628,7 +2628,7 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||
|
||||
class NotificationTemplateAccess(BaseAccess):
|
||||
"""
|
||||
I can see/use a notification_template if I have permission to
|
||||
Run standard logic from DAB RBAC
|
||||
"""
|
||||
|
||||
model = NotificationTemplate
|
||||
@@ -2649,10 +2649,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
if obj.organization is None:
|
||||
# only superusers are allowed to edit orphan notification templates
|
||||
return False
|
||||
return self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role', mandatory=True)
|
||||
return self.user.has_obj_perm(obj, 'change') and self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role')
|
||||
|
||||
def can_admin(self, obj, data):
|
||||
return self.can_change(obj, data)
|
||||
@@ -2662,9 +2659,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_start(self, obj, validate_license=True):
|
||||
if obj.organization is None:
|
||||
return False
|
||||
return self.user in obj.organization.notification_admin_role
|
||||
return self.can_change(obj, None)
|
||||
|
||||
|
||||
class NotificationAccess(BaseAccess):
|
||||
|
||||
@@ -14,7 +14,7 @@ __all__ = [
|
||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||
]
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform', 'openshift_virtualization')
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')),
|
||||
('su', _('Su')),
|
||||
|
||||
@@ -252,7 +252,7 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
kwargs.setdefault('related_name', '+')
|
||||
kwargs.setdefault('null', 'True')
|
||||
kwargs.setdefault('editable', False)
|
||||
kwargs.setdefault('on_delete', models.CASCADE)
|
||||
kwargs.setdefault('on_delete', models.SET_NULL)
|
||||
super(ImplicitRoleField, self).__init__(*args, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
|
||||
@@ -17,49 +17,49 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='job_template_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='credential_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='inventory_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='project_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='workflow_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notification_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -67,7 +67,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator', 'organization.credential_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -77,7 +77,7 @@ class Migration(migrations.Migration):
|
||||
model_name='inventory',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -85,7 +85,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.project_admin_role', 'singleton:system_administrator'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -96,7 +96,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -107,7 +107,7 @@ class Migration(migrations.Migration):
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -119,7 +119,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -130,7 +130,7 @@ class Migration(migrations.Migration):
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -142,7 +142,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'admin_role',
|
||||
'execute_role',
|
||||
|
||||
@@ -18,7 +18,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -27,7 +27,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
|
||||
@@ -36,7 +36,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='approval_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
@@ -46,7 +46,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.approval_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -116,7 +116,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
@@ -139,7 +139,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
|
||||
@@ -80,7 +80,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.job_template_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -92,7 +92,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -104,7 +104,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
|
||||
@@ -26,7 +26,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='execution_environment_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
|
||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
|
||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator'],
|
||||
related_name='+',
|
||||
to='main.role',
|
||||
@@ -30,7 +30,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_auditor', 'use_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.role',
|
||||
@@ -41,7 +41,7 @@ class Migration(migrations.Migration):
|
||||
model_name='instancegroup',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
# Generated by Django 4.2.10 on 2024-06-12 19:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0193_alter_notification_notification_type_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -290,7 +290,7 @@ def setup_managed_role_definitions(apps, schema_editor):
|
||||
managed_role_definitions = []
|
||||
|
||||
org_perms = set()
|
||||
for cls in permission_registry._registry:
|
||||
for cls in permission_registry.all_registered_models:
|
||||
ct = ContentType.objects.get_for_model(cls)
|
||||
object_perms = set(Permission.objects.filter(content_type=ct))
|
||||
# Special case for InstanceGroup which has an organiation field, but is not an organization child object
|
||||
|
||||
@@ -4,11 +4,12 @@ import datetime
|
||||
from datetime import timezone
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
import itertools
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import models, DatabaseError
|
||||
from django.db import models, DatabaseError, transaction
|
||||
from django.db.models.functions import Cast
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.text import Truncator
|
||||
@@ -605,19 +606,23 @@ class JobEvent(BasePlaybookEvent):
|
||||
def _update_host_metrics(updated_hosts_list):
|
||||
from awx.main.models import HostMetric # circular import
|
||||
|
||||
# bulk-create
|
||||
current_time = now()
|
||||
HostMetric.objects.bulk_create(
|
||||
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in updated_hosts_list], ignore_conflicts=True, batch_size=100
|
||||
)
|
||||
# bulk-update
|
||||
batch_start, batch_size = 0, 1000
|
||||
while batch_start <= len(updated_hosts_list):
|
||||
batched_host_list = updated_hosts_list[batch_start : (batch_start + batch_size)]
|
||||
HostMetric.objects.filter(hostname__in=batched_host_list).update(
|
||||
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
||||
)
|
||||
batch_start += batch_size
|
||||
|
||||
# FUTURE:
|
||||
# - Hand-rolled implementation of itertools.batched(), introduced in Python 3.12. Replace.
|
||||
# - Ability to do ORM upserts *may* have been introduced in Django 5.0.
|
||||
# See the entry about `create_defaults` in https://docs.djangoproject.com/en/5.0/releases/5.0/#models.
|
||||
# Hopefully this will be fully ready for batch use by 5.2 LTS.
|
||||
|
||||
args = [iter(updated_hosts_list)] * 500
|
||||
for hosts in itertools.zip_longest(*args):
|
||||
with transaction.atomic():
|
||||
HostMetric.objects.bulk_create(
|
||||
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in hosts if hostname is not None], ignore_conflicts=True
|
||||
)
|
||||
HostMetric.objects.filter(hostname__in=hosts).update(
|
||||
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
||||
)
|
||||
|
||||
@property
|
||||
def job_verbosity(self):
|
||||
|
||||
@@ -933,6 +933,7 @@ class InventorySourceOptions(BaseModel):
|
||||
('controller', _('Red Hat Ansible Automation Platform')),
|
||||
('insights', _('Red Hat Insights')),
|
||||
('terraform', _('Terraform State')),
|
||||
('openshift_virtualization', _('OpenShift Virtualization')),
|
||||
]
|
||||
|
||||
# From the options of the Django management base command
|
||||
@@ -1042,7 +1043,7 @@ class InventorySourceOptions(BaseModel):
|
||||
def cloud_credential_validation(source, cred):
|
||||
if not source:
|
||||
return None
|
||||
if cred and source not in ('custom', 'scm'):
|
||||
if cred and source not in ('custom', 'scm', 'openshift_virtualization'):
|
||||
# If a credential was provided, it's important that it matches
|
||||
# the actual inventory source being used (Amazon requires Amazon
|
||||
# credentials; Rackspace requires Rackspace credentials; etc...)
|
||||
@@ -1051,12 +1052,14 @@ class InventorySourceOptions(BaseModel):
|
||||
# Allow an EC2 source to omit the credential. If Tower is running on
|
||||
# an EC2 instance with an IAM Role assigned, boto will use credentials
|
||||
# from the instance metadata instead of those explicitly provided.
|
||||
elif source in CLOUD_PROVIDERS and source != 'ec2':
|
||||
elif source in CLOUD_PROVIDERS and source not in ['ec2', 'openshift_virtualization']:
|
||||
return _('Credential is required for a cloud source.')
|
||||
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
|
||||
return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.')
|
||||
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
|
||||
return _('Credentials of type insights and vault are disallowed for scm inventory sources.')
|
||||
elif source == 'openshift_virtualization' and cred and cred.credential_type.kind != 'kubernetes':
|
||||
return _('Credentials of type kubernetes is requred for openshift_virtualization inventory sources.')
|
||||
return None
|
||||
|
||||
def get_cloud_credential(self):
|
||||
@@ -1693,6 +1696,16 @@ class insights(PluginFileInjector):
|
||||
use_fqcn = True
|
||||
|
||||
|
||||
class openshift_virtualization(PluginFileInjector):
|
||||
plugin_name = 'kubevirt'
|
||||
base_injector = 'template'
|
||||
namespace = 'kubevirt'
|
||||
collection = 'core'
|
||||
downstream_namespace = 'redhat'
|
||||
downstream_collection = 'openshift_virtualization'
|
||||
use_fqcn = True
|
||||
|
||||
|
||||
class constructed(PluginFileInjector):
|
||||
plugin_name = 'constructed'
|
||||
namespace = 'ansible'
|
||||
|
||||
@@ -17,7 +17,7 @@ from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.db import models, connection
|
||||
from django.db import models, connection, transaction
|
||||
from django.core.exceptions import NON_FIELD_ERRORS
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.timezone import now
|
||||
@@ -273,7 +273,14 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
||||
if new_next_schedule:
|
||||
if new_next_schedule.pk == self.next_schedule_id and new_next_schedule.next_run == self.next_job_run:
|
||||
return # no-op, common for infrequent schedules
|
||||
self.next_schedule = new_next_schedule
|
||||
|
||||
# If in a transaction, use select_for_update to lock the next schedule row, which
|
||||
# prevents a race condition if new_next_schedule is deleted elsewhere during this transaction
|
||||
if transaction.get_autocommit():
|
||||
self.next_schedule = related_schedules.first()
|
||||
else:
|
||||
self.next_schedule = related_schedules.select_for_update().first()
|
||||
|
||||
self.next_job_run = new_next_schedule.next_run
|
||||
self.save(update_fields=['next_schedule', 'next_job_run'])
|
||||
|
||||
|
||||
@@ -36,6 +36,9 @@ import ansible_runner.cleanup
|
||||
# dateutil
|
||||
from dateutil.parser import parse as parse_date
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.resource_registry.tasks.sync import SyncExecutor
|
||||
|
||||
# AWX
|
||||
from awx import __version__ as awx_application_version
|
||||
from awx.main.access import access_registry
|
||||
@@ -964,3 +967,17 @@ def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, p
|
||||
permission_check_func(creater, copy_mapping.values())
|
||||
if isinstance(new_obj, Inventory):
|
||||
update_inventory_computed_fields.delay(new_obj.id)
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def periodic_resource_sync():
|
||||
if not getattr(settings, 'RESOURCE_SERVER', None):
|
||||
logger.debug("Skipping periodic resource_sync, RESOURCE_SERVER not configured")
|
||||
return
|
||||
|
||||
with advisory_lock('periodic_resource_sync', wait=False) as acquired:
|
||||
if acquired is False:
|
||||
logger.debug("Not running periodic_resource_sync, another task holds lock")
|
||||
return
|
||||
|
||||
SyncExecutor().run()
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"K8S_AUTH_HOST": "https://foo.invalid",
|
||||
"K8S_AUTH_API_KEY": "fooo",
|
||||
"K8S_AUTH_VERIFY_SSL": "False"
|
||||
}
|
||||
@@ -1,22 +1,30 @@
|
||||
import pytest
|
||||
from unittest import mock
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from ansible_base.jwt_consumer.common.util import generate_x_trusted_proxy_header
|
||||
from ansible_base.lib.testing.fixtures import rsa_keypair_factory, rsa_keypair # noqa: F401; pylint: disable=unused-import
|
||||
|
||||
|
||||
class HeaderTrackingMiddleware(object):
|
||||
def __init__(self):
|
||||
self.environ = {}
|
||||
|
||||
def process_request(self, request):
|
||||
pass
|
||||
|
||||
def process_response(self, request, response):
|
||||
self.environ = request.environ
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_proxy_ip_allowed(get, patch, admin):
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
|
||||
patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
|
||||
|
||||
class HeaderTrackingMiddleware(object):
|
||||
environ = {}
|
||||
|
||||
def process_request(self, request):
|
||||
pass
|
||||
|
||||
def process_response(self, request, response):
|
||||
self.environ = request.environ
|
||||
|
||||
# By default, `PROXY_IP_ALLOWED_LIST` is disabled, so custom `REMOTE_HOST_HEADERS`
|
||||
# should just pass through
|
||||
middleware = HeaderTrackingMiddleware()
|
||||
@@ -45,6 +53,51 @@ def test_proxy_ip_allowed(get, patch, admin):
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestTrustedProxyAllowListIntegration:
|
||||
@pytest.fixture
|
||||
def url(self, patch, admin):
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
|
||||
patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
|
||||
patch(url, user=admin, data={'PROXY_IP_ALLOWED_LIST': ['my.proxy.example.org']})
|
||||
return url
|
||||
|
||||
@pytest.fixture
|
||||
def middleware(self):
|
||||
return HeaderTrackingMiddleware()
|
||||
|
||||
def test_x_trusted_proxy_valid_signature(self, get, admin, rsa_keypair, url, middleware): # noqa: F811
|
||||
# Headers should NOT get deleted
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||
}
|
||||
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public, PROXY_IP_ALLOWED_LIST=[]):
|
||||
get(url, user=admin, middleware=middleware, **headers)
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
def test_x_trusted_proxy_invalid_signature(self, get, admin, url, patch, middleware):
|
||||
# Headers should NOT get deleted
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': 'DEAD-BEEF',
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||
}
|
||||
with override_settings(PROXY_IP_ALLOWED_LIST=[]):
|
||||
get(url, user=admin, middleware=middleware, **headers)
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
def test_x_trusted_proxy_invalid_signature_valid_proxy(self, get, admin, url, middleware):
|
||||
# A valid explicit proxy SHOULD result in sensitive headers NOT being deleted, regardless of the trusted proxy signature results
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': 'DEAD-BEEF',
|
||||
'REMOTE_ADDR': 'my.proxy.example.org',
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||
}
|
||||
get(url, user=admin, middleware=middleware, **headers)
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestDeleteViews:
|
||||
def test_sublist_delete_permission_check(self, inventory_source, host, rando, delete):
|
||||
|
||||
66
awx/main/tests/functional/api/test_immutablesharedfields.py
Normal file
66
awx/main/tests/functional/api/test_immutablesharedfields.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import Organization
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestImmutableSharedFields:
|
||||
@pytest.fixture(autouse=True)
|
||||
def configure_settings(self, settings):
|
||||
settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT = False
|
||||
|
||||
def test_create_raises_permission_denied(self, admin_user, post):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
resp = post(
|
||||
url=reverse('api:team_list'),
|
||||
data={'name': 'teamA', 'organization': orgA.id},
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert "Creation of this resource is not allowed" in resp.data['detail']
|
||||
|
||||
def test_perform_delete_raises_permission_denied(self, admin_user, delete):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
team = orgA.teams.create(name='teamA')
|
||||
resp = delete(
|
||||
url=reverse('api:team_detail', kwargs={'pk': team.id}),
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert "Deletion of this resource is not allowed" in resp.data['detail']
|
||||
|
||||
def test_perform_update(self, admin_user, patch):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
team = orgA.teams.create(name='teamA')
|
||||
# allow patching non-shared fields
|
||||
patch(
|
||||
url=reverse('api:team_detail', kwargs={'pk': team.id}),
|
||||
data={"description": "can change this field"},
|
||||
user=admin_user,
|
||||
expect=200,
|
||||
)
|
||||
orgB = Organization.objects.create(name='orgB')
|
||||
# prevent patching shared fields
|
||||
resp = patch(url=reverse('api:team_detail', kwargs={'pk': team.id}), data={"organization": orgB.id}, user=admin_user, expect=403)
|
||||
assert "Cannot change shared field" in resp.data['organization']
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'role',
|
||||
['admin_role', 'member_role'],
|
||||
)
|
||||
@pytest.mark.parametrize('resource', ['organization', 'team'])
|
||||
def test_prevent_assigning_member_to_organization_or_team(self, admin_user, post, resource, role):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
if resource == 'organization':
|
||||
role = getattr(orgA, role)
|
||||
elif resource == 'team':
|
||||
teamA = orgA.teams.create(name='teamA')
|
||||
role = getattr(teamA, role)
|
||||
resp = post(
|
||||
url=reverse('api:user_roles_list', kwargs={'pk': admin_user.id}),
|
||||
data={'id': role.id},
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert f"Cannot directly modify user membership to {resource}." in resp.data['msg']
|
||||
@@ -32,13 +32,6 @@ def node_type_instance():
|
||||
return fn
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def instance_group(job_factory):
|
||||
ig = InstanceGroup(name="east")
|
||||
ig.save()
|
||||
return ig
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def containerized_instance_group(instance_group, kube_credential):
|
||||
ig = InstanceGroup(name="container")
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import pytest
|
||||
from unittest import mock
|
||||
|
||||
# AWX
|
||||
from awx.api.serializers import JobTemplateSerializer
|
||||
@@ -8,10 +9,15 @@ from awx.main.migrations import _save_password_keys as save_password_keys
|
||||
|
||||
# Django
|
||||
from django.apps import apps
|
||||
from django.test.utils import override_settings
|
||||
|
||||
# DRF
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
# DAB
|
||||
from ansible_base.jwt_consumer.common.util import generate_x_trusted_proxy_header
|
||||
from ansible_base.lib.testing.fixtures import rsa_keypair_factory, rsa_keypair # noqa: F401; pylint: disable=unused-import
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
@@ -369,3 +375,113 @@ def test_job_template_missing_inventory(project, inventory, admin_user, post):
|
||||
)
|
||||
assert r.status_code == 400
|
||||
assert "Cannot start automatically, an inventory is required." in str(r.data)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestJobTemplateCallbackProxyIntegration:
|
||||
"""
|
||||
Test the interaction of provision job template callback feature and:
|
||||
settings.PROXY_IP_ALLOWED_LIST
|
||||
x-trusted-proxy http header
|
||||
"""
|
||||
|
||||
@pytest.fixture
|
||||
def job_template(self, inventory, project):
|
||||
jt = JobTemplate.objects.create(name='test-jt', inventory=inventory, project=project, playbook='helloworld.yml', host_config_key='abcd')
|
||||
return jt
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_host_not_found(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'baz',
|
||||
'REMOTE_HOST': 'baz',
|
||||
'REMOTE_ADDR': 'baz',
|
||||
}
|
||||
r = post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), data={'host_config_key': 'abcd'}, user=admin_user, expect=400, **headers
|
||||
)
|
||||
assert r.data['msg'] == 'No matching host could be found!'
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'headers, expected',
|
||||
(
|
||||
pytest.param(
|
||||
{
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||
'REMOTE_HOST': 'my.proxy.example.org',
|
||||
},
|
||||
201,
|
||||
),
|
||||
pytest.param(
|
||||
{
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||
'REMOTE_HOST': 'not-my-proxy.org',
|
||||
},
|
||||
400,
|
||||
),
|
||||
),
|
||||
)
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_proxy_ip_allowed_list(self, job_template, admin_user, post, headers, expected): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='my.proxy.example.org')
|
||||
|
||||
post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=expected,
|
||||
**headers
|
||||
)
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=[])
|
||||
def test_no_proxy_trust_all_headers(self, job_template, admin_user, post):
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||
'REMOTE_ADDR': 'bar',
|
||||
'REMOTE_HOST': 'baz',
|
||||
}
|
||||
post(url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), data={'host_config_key': 'abcd'}, user=admin_user, expect=201, **headers)
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_trusted_proxy(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar, my.proxy.example.org',
|
||||
}
|
||||
|
||||
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public):
|
||||
post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=201,
|
||||
**headers
|
||||
)
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_trusted_proxy_host_not_found(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'baz, my.proxy.example.org',
|
||||
'REMOTE_ADDR': 'bar',
|
||||
'REMOTE_HOST': 'baz',
|
||||
}
|
||||
|
||||
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public):
|
||||
post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=400,
|
||||
**headers
|
||||
)
|
||||
|
||||
@@ -20,7 +20,7 @@ from awx.main.migrations._dab_rbac import setup_managed_role_definitions
|
||||
|
||||
# AWX
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.ha import Instance
|
||||
from awx.main.models.ha import Instance, InstanceGroup
|
||||
|
||||
from rest_framework.test import (
|
||||
APIRequestFactory,
|
||||
@@ -730,6 +730,11 @@ def jt_linked(organization, project, inventory, machine_credential, credential,
|
||||
return jt
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def instance_group():
|
||||
return InstanceGroup.objects.create(name="east")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_job_template(organization):
|
||||
wjt = WorkflowJobTemplate.objects.create(name='test-workflow_job_template', organization=organization)
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
import pytest
|
||||
|
||||
from awx.main.access import InstanceGroupAccess, NotificationTemplateAccess
|
||||
|
||||
from ansible_base.rbac.models import RoleDefinition
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_instance_group_object_role_delete(rando, instance_group, setup_managed_roles):
|
||||
"""Basic functionality of IG object-level admin role function AAP-25506"""
|
||||
rd = RoleDefinition.objects.get(name='InstanceGroup Admin')
|
||||
rd.give_permission(rando, instance_group)
|
||||
access = InstanceGroupAccess(rando)
|
||||
assert access.can_delete(instance_group)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_notification_template_object_role_change(rando, notification_template, setup_managed_roles):
|
||||
"""Basic functionality of NT object-level admin role function AAP-25493"""
|
||||
rd = RoleDefinition.objects.get(name='NotificationTemplate Admin')
|
||||
rd.give_permission(rando, notification_template)
|
||||
access = NotificationTemplateAccess(rando)
|
||||
assert access.can_change(notification_template, {'name': 'new name'})
|
||||
@@ -46,6 +46,8 @@ def generate_fake_var(element):
|
||||
|
||||
def credential_kind(source):
|
||||
"""Given the inventory source kind, return expected credential kind"""
|
||||
if source == 'openshift_virtualization':
|
||||
return 'kubernetes_bearer_token'
|
||||
return source.replace('ec2', 'aws')
|
||||
|
||||
|
||||
|
||||
@@ -99,7 +99,9 @@ def test_notification_template_access_org_user(notification_template, user):
|
||||
@pytest.mark.django_db
|
||||
def test_notificaiton_template_orphan_access_org_admin(notification_template, organization, org_admin):
|
||||
notification_template.organization = None
|
||||
notification_template.save(update_fields=['organization'])
|
||||
access = NotificationTemplateAccess(org_admin)
|
||||
assert not org_admin.has_obj_perm(notification_template, 'change')
|
||||
assert not access.can_change(notification_template, {'organization': organization.id})
|
||||
|
||||
|
||||
|
||||
48
awx/main/utils/proxy.py
Normal file
48
awx/main/utils/proxy.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# Copyright (c) 2024 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
|
||||
# DRF
|
||||
from rest_framework.request import Request
|
||||
|
||||
|
||||
"""
|
||||
Note that these methods operate on request.environ. This data is from uwsgi.
|
||||
It is the source data from which request.headers (read-only) is constructed.
|
||||
"""
|
||||
|
||||
|
||||
def is_proxy_in_headers(request: Request, proxy_list: list[str], headers: list[str]) -> bool:
|
||||
"""
|
||||
Determine if the request went through at least one proxy in the list.
|
||||
Example:
|
||||
request.environ = {
|
||||
"HTTP_X_FOO": "8.8.8.8, 192.168.2.1",
|
||||
"REMOTE_ADDR": "192.168.2.1",
|
||||
"REMOTE_HOST": "foobar"
|
||||
}
|
||||
proxy_list = ["192.168.2.1"]
|
||||
headers = ["HTTP_X_FOO", "REMOTE_ADDR", "REMOTE_HOST"]
|
||||
|
||||
The above would return True since 192.168.2.1 is a value for the header HTTP_X_FOO
|
||||
|
||||
request: The DRF/Django request. request.environ dict will be used for searching for proxies
|
||||
proxy_list: A list of known and trusted proxies may be ip or hostnames
|
||||
headers: A list of keys for which to consider values that may contain a proxy
|
||||
"""
|
||||
|
||||
remote_hosts = set()
|
||||
|
||||
for header in headers:
|
||||
for value in request.environ.get(header, '').split(','):
|
||||
value = value.strip()
|
||||
if value:
|
||||
remote_hosts.add(value)
|
||||
|
||||
return bool(remote_hosts.intersection(set(proxy_list)))
|
||||
|
||||
|
||||
def delete_headers_starting_with_http(request: Request, headers: list[str]):
|
||||
for header in headers:
|
||||
if header.startswith('HTTP_'):
|
||||
request.environ.pop(header, None)
|
||||
@@ -492,6 +492,7 @@ CELERYBEAT_SCHEDULE = {
|
||||
'cleanup_images': {'task': 'awx.main.tasks.system.cleanup_images_and_files', 'schedule': timedelta(hours=3)},
|
||||
'cleanup_host_metrics': {'task': 'awx.main.tasks.host_metrics.cleanup_host_metrics', 'schedule': timedelta(hours=3, minutes=30)},
|
||||
'host_metric_summary_monthly': {'task': 'awx.main.tasks.host_metrics.host_metric_summary_monthly', 'schedule': timedelta(hours=4)},
|
||||
'periodic_resource_sync': {'task': 'awx.main.tasks.system.periodic_resource_sync', 'schedule': timedelta(minutes=15)},
|
||||
}
|
||||
|
||||
# Django Caching Configuration
|
||||
@@ -656,6 +657,10 @@ AWX_ANSIBLE_CALLBACK_PLUGINS = ""
|
||||
# Automatically remove nodes that have missed their heartbeats after some time
|
||||
AWX_AUTO_DEPROVISION_INSTANCES = False
|
||||
|
||||
# If False, do not allow creation of resources that are shared with the platform ingress
|
||||
# e.g. organizations, teams, and users
|
||||
ALLOW_LOCAL_RESOURCE_MANAGEMENT = True
|
||||
|
||||
# Enable Pendo on the UI, possible values are 'off', 'anonymous', and 'detailed'
|
||||
# Note: This setting may be overridden by database settings.
|
||||
PENDO_TRACKING_STATE = "off"
|
||||
@@ -778,6 +783,11 @@ INSIGHTS_EXCLUDE_EMPTY_GROUPS = False
|
||||
TERRAFORM_INSTANCE_ID_VAR = 'id'
|
||||
TERRAFORM_EXCLUDE_EMPTY_GROUPS = True
|
||||
|
||||
# ------------------------
|
||||
# OpenShift Virtualization
|
||||
# ------------------------
|
||||
OPENSHIFT_VIRTUALIZATION_EXCLUDE_EMPTY_GROUPS = True
|
||||
|
||||
# ---------------------
|
||||
# ----- Custom -----
|
||||
# ---------------------
|
||||
|
||||
3004
awx/sso/conf.py
3004
awx/sso/conf.py
File diff suppressed because it is too large
Load Diff
@@ -56,6 +56,10 @@ describe('<InventorySourceAdd />', () => {
|
||||
['satellite6', 'Red Hat Satellite 6'],
|
||||
['openstack', 'OpenStack'],
|
||||
['rhv', 'Red Hat Virtualization'],
|
||||
[
|
||||
'openshift_virtualization',
|
||||
'Red Hat OpenShift Virtualization',
|
||||
],
|
||||
['controller', 'Red Hat Ansible Automation Platform'],
|
||||
],
|
||||
},
|
||||
|
||||
@@ -23,6 +23,8 @@ const ansibleDocUrls = {
|
||||
'https://docs.ansible.com/ansible/latest/collections/ansible/builtin/constructed_inventory.html',
|
||||
terraform:
|
||||
'https://github.com/ansible-collections/cloud.terraform/blob/main/docs/cloud.terraform.terraform_state_inventory.rst',
|
||||
openshift_virtualization:
|
||||
'https://kubevirt.io/kubevirt.core/latest/plugins/kubevirt.html',
|
||||
};
|
||||
|
||||
const getInventoryHelpTextStrings = () => ({
|
||||
@@ -121,7 +123,7 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch.`}
|
||||
@@ -140,7 +142,7 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch`}
|
||||
|
||||
@@ -26,6 +26,7 @@ import {
|
||||
TerraformSubForm,
|
||||
VMwareSubForm,
|
||||
VirtualizationSubForm,
|
||||
OpenShiftVirtualizationSubForm,
|
||||
} from './InventorySourceSubForms';
|
||||
|
||||
const buildSourceChoiceOptions = (options) => {
|
||||
@@ -231,6 +232,15 @@ const InventorySourceFormFields = ({
|
||||
sourceOptions={sourceOptions}
|
||||
/>
|
||||
),
|
||||
openshift_virtualization: (
|
||||
<OpenShiftVirtualizationSubForm
|
||||
autoPopulateCredential={
|
||||
!source?.id ||
|
||||
source?.source !== 'openshift_virtualization'
|
||||
}
|
||||
sourceOptions={sourceOptions}
|
||||
/>
|
||||
),
|
||||
}[sourceField.value]
|
||||
}
|
||||
</FormColumnLayout>
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import { useField, useFormikContext } from 'formik';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { useConfig } from 'contexts/Config';
|
||||
import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
import CredentialLookup from 'components/Lookup/CredentialLookup';
|
||||
import { required } from 'util/validators';
|
||||
import {
|
||||
OptionsField,
|
||||
VerbosityField,
|
||||
EnabledVarField,
|
||||
EnabledValueField,
|
||||
HostFilterField,
|
||||
SourceVarsField,
|
||||
} from './SharedFields';
|
||||
import getHelpText from '../Inventory.helptext';
|
||||
|
||||
const OpenShiftVirtualizationSubForm = ({ autoPopulateCredential }) => {
|
||||
const helpText = getHelpText();
|
||||
const { setFieldValue, setFieldTouched } = useFormikContext();
|
||||
const [credentialField, credentialMeta, credentialHelpers] =
|
||||
useField('credential');
|
||||
const config = useConfig();
|
||||
|
||||
const handleCredentialUpdate = useCallback(
|
||||
(value) => {
|
||||
setFieldValue('credential', value);
|
||||
setFieldTouched('credential', true, false);
|
||||
},
|
||||
[setFieldValue, setFieldTouched]
|
||||
);
|
||||
|
||||
const docsBaseUrl = getDocsBaseUrl(config);
|
||||
return (
|
||||
<>
|
||||
<CredentialLookup
|
||||
credentialTypeNamespace="kubernetes_bearer_token"
|
||||
label={t`Credential`}
|
||||
helperTextInvalid={credentialMeta.error}
|
||||
isValid={!credentialMeta.touched || !credentialMeta.error}
|
||||
onBlur={() => credentialHelpers.setTouched()}
|
||||
onChange={handleCredentialUpdate}
|
||||
value={credentialField.value}
|
||||
required
|
||||
autoPopulate={autoPopulateCredential}
|
||||
validate={required(t`Select a value for this field`)}
|
||||
/>
|
||||
<VerbosityField />
|
||||
<HostFilterField />
|
||||
<EnabledVarField />
|
||||
<EnabledValueField />
|
||||
<OptionsField />
|
||||
<SourceVarsField
|
||||
popoverContent={helpText.sourceVars(
|
||||
docsBaseUrl,
|
||||
'openshift_virtualization'
|
||||
)}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default OpenShiftVirtualizationSubForm;
|
||||
@@ -0,0 +1,65 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { Formik } from 'formik';
|
||||
import { CredentialsAPI } from 'api';
|
||||
import { mountWithContexts } from '../../../../../testUtils/enzymeHelpers';
|
||||
import VirtualizationSubForm from './VirtualizationSubForm';
|
||||
|
||||
jest.mock('../../../../api');
|
||||
|
||||
const initialValues = {
|
||||
credential: null,
|
||||
overwrite: false,
|
||||
overwrite_vars: false,
|
||||
source_path: '',
|
||||
source_project: null,
|
||||
source_script: null,
|
||||
source_vars: '---\n',
|
||||
update_cache_timeout: 0,
|
||||
update_on_launch: true,
|
||||
verbosity: 1,
|
||||
};
|
||||
|
||||
describe('<VirtualizationSubForm />', () => {
|
||||
let wrapper;
|
||||
|
||||
beforeEach(async () => {
|
||||
CredentialsAPI.read.mockResolvedValue({
|
||||
data: { count: 0, results: [] },
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={initialValues}>
|
||||
<VirtualizationSubForm />
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test('should render subform fields', () => {
|
||||
expect(wrapper.find('FormGroup[label="Credential"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Verbosity"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Update options"]')).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('FormGroup[label="Cache timeout (seconds)"]')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('VariablesField[label="Source variables"]')
|
||||
).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should make expected api calls', () => {
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledTimes(1);
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledWith({
|
||||
credential_type__namespace: 'rhv',
|
||||
order_by: 'name',
|
||||
page: 1,
|
||||
page_size: 5,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -9,3 +9,4 @@ export { default as ControllerSubForm } from './ControllerSubForm';
|
||||
export { default as TerraformSubForm } from './TerraformSubForm';
|
||||
export { default as VMwareSubForm } from './VMwareSubForm';
|
||||
export { default as VirtualizationSubForm } from './VirtualizationSubForm';
|
||||
export { default as OpenShiftVirtualizationSubForm } from './OpenShiftVirtualizationSubForm';
|
||||
|
||||
@@ -120,7 +120,7 @@ function NotificationTemplatesList() {
|
||||
toolbarSearchColumns={[
|
||||
{
|
||||
name: t`Name`,
|
||||
key: 'name',
|
||||
key: 'name__icontains',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -42,7 +42,8 @@ options:
|
||||
source:
|
||||
description:
|
||||
- The source to use for this group.
|
||||
choices: [ "scm", "ec2", "gce", "azure_rm", "vmware", "satellite6", "openstack", "rhv", "controller", "insights" ]
|
||||
choices: [ "scm", "ec2", "gce", "azure_rm", "vmware", "satellite6", "openstack", "rhv", "controller", "insights", "terraform",
|
||||
"openshift_virtualization" ]
|
||||
type: str
|
||||
source_path:
|
||||
description:
|
||||
@@ -170,7 +171,22 @@ def main():
|
||||
#
|
||||
# How do we handle manual and file? The controller does not seem to be able to activate them
|
||||
#
|
||||
source=dict(choices=["scm", "ec2", "gce", "azure_rm", "vmware", "satellite6", "openstack", "rhv", "controller", "insights"]),
|
||||
source=dict(
|
||||
choices=[
|
||||
"scm",
|
||||
"ec2",
|
||||
"gce",
|
||||
"azure_rm",
|
||||
"vmware",
|
||||
"satellite6",
|
||||
"openstack",
|
||||
"rhv",
|
||||
"controller",
|
||||
"insights",
|
||||
"terraform",
|
||||
"openshift_virtualization",
|
||||
]
|
||||
),
|
||||
source_path=dict(),
|
||||
source_vars=dict(type='dict'),
|
||||
enabled_var=dict(),
|
||||
|
||||
@@ -317,7 +317,10 @@ class ApiV2(base.Base):
|
||||
if asset['natural_key']['type'] == 'project' and 'local_path' in post_data and _page['scm_type'] == post_data['scm_type']:
|
||||
del post_data['local_path']
|
||||
|
||||
_page = _page.put(post_data)
|
||||
if asset['natural_key']['type'] == 'user':
|
||||
_page = _page.patch(**post_data)
|
||||
else:
|
||||
_page = _page.put(post_data)
|
||||
changed = True
|
||||
except (exc.Common, AssertionError) as e:
|
||||
identifier = asset.get("name", None) or asset.get("username", None) or asset.get("hostname", None)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
aiohttp>=3.8.6 # CVE-2023-47627
|
||||
aiohttp>=3.9.4 # CVE-2024-30251
|
||||
ansiconv==1.0.0 # UPGRADE BLOCKER: from 2013, consider replacing instead of upgrading
|
||||
asciichartpy
|
||||
asn1
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
adal==1.2.7
|
||||
# via msrestazure
|
||||
aiohttp==3.9.3
|
||||
aiohttp==3.9.5
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# aiohttp-retry
|
||||
|
||||
@@ -4,6 +4,7 @@ awx_image: 'ghcr.io/ansible/awx_devel'
|
||||
pg_port: 5432
|
||||
pg_username: 'awx'
|
||||
pg_database: 'awx'
|
||||
pg_tls: false
|
||||
control_plane_node_count: 1
|
||||
minikube_container_group: false
|
||||
receptor_socket_file: /var/run/awx-receptor/receptor.sock
|
||||
|
||||
@@ -5,6 +5,9 @@ DATABASES = {
|
||||
'NAME': "{{ pg_database }}",
|
||||
'USER': "{{ pg_username }}",
|
||||
'PASSWORD': "{{ pg_password }}",
|
||||
{% if pg_tls|bool %}
|
||||
'OPTIONS': {'sslmode': 'require'},
|
||||
{% endif %}
|
||||
{% if enable_pgbouncer|bool %}
|
||||
'HOST': "pgbouncer",
|
||||
'PORT': "{{ pgbouncer_port }}",
|
||||
|
||||
@@ -237,13 +237,24 @@ services:
|
||||
image: quay.io/sclorg/postgresql-15-c9s
|
||||
container_name: tools_postgres_1
|
||||
# additional logging settings for postgres can be found https://www.postgresql.org/docs/current/runtime-config-logging.html
|
||||
command: run-postgresql -c log_destination=stderr -c log_min_messages=info -c log_min_duration_statement={{ pg_log_min_duration_statement|default(1000) }} -c max_connections={{ pg_max_connections|default(1024) }}
|
||||
command: >
|
||||
bash -c "
|
||||
{% if pg_tls|bool %}
|
||||
mkdir -p /opt/app-root/src/certs
|
||||
&& openssl genrsa -out /opt/app-root/src/certs/tls.key 2048
|
||||
&& openssl req -new -x509 -key /opt/app-root/src/certs/tls.key -out /opt/app-root/src/certs/tls.crt -subj '/CN=postgres'
|
||||
&& chmod 600 /opt/app-root/src/certs/tls.crt /opt/app-root/src/certs/tls.key &&
|
||||
{% endif %}
|
||||
run-postgresql -c log_destination=stderr -c log_min_messages=info -c log_min_duration_statement={{ pg_log_min_duration_statement|default(1000) }} -c max_connections={{ pg_max_connections|default(1024) }}"
|
||||
environment:
|
||||
POSTGRESQL_USER: {{ pg_username }}
|
||||
POSTGRESQL_DATABASE: {{ pg_database }}
|
||||
POSTGRESQL_PASSWORD: {{ pg_password }}
|
||||
volumes:
|
||||
- "awx_db_15:/var/lib/pgsql/data"
|
||||
{% if pg_tls|bool %}
|
||||
- "../../docker-compose/pgssl.conf:/opt/app-root/src/postgresql-cfg/pgssl.conf"
|
||||
{% endif %}
|
||||
networks:
|
||||
- awx
|
||||
ports:
|
||||
|
||||
5
tools/docker-compose/pgssl.conf
Normal file
5
tools/docker-compose/pgssl.conf
Normal file
@@ -0,0 +1,5 @@
|
||||
ssl = on
|
||||
ssl_cert_file = '/opt/app-root/src/certs/tls.crt' # server certificate
|
||||
ssl_key_file = '/opt/app-root/src/certs/tls.key' # server private key
|
||||
#ssl_ca_file # trusted certificate authorities
|
||||
#ssl_crl_file # certificates revoked by certificate authorities
|
||||
7
tools/scripts/ig-hotfix/.gitignore
vendored
Normal file
7
tools/scripts/ig-hotfix/.gitignore
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
*~
|
||||
customer-backup.tar.*
|
||||
*.db
|
||||
*.log
|
||||
*.dot
|
||||
*.png
|
||||
*.tar.*
|
||||
36
tools/scripts/ig-hotfix/README.md
Normal file
36
tools/scripts/ig-hotfix/README.md
Normal file
@@ -0,0 +1,36 @@
|
||||
# Hotfix for Instance Groups and Roles after backup/restore corruption #
|
||||
|
||||
## role_check.py ##
|
||||
|
||||
`awx-manage shell < role_check.py 2> role_check.log > fix.py`
|
||||
|
||||
This checks the roles and resources on the system, and constructs a
|
||||
fix.py file that will change the linkages of the roles that it finds
|
||||
are incorrect. The command line above also redirects logging output to
|
||||
a file. The fix.py file (and the log file) can then be examined (and
|
||||
potentially modified) before performing the actual fix.
|
||||
|
||||
`awx-manage shell < fix.py > fix.log 2>&1`
|
||||
|
||||
This performs the fix, while redirecting all output to another log
|
||||
file. Ideally, this file should wind up being empty after execution
|
||||
completes.
|
||||
|
||||
`awx-manage shell < role_check.py 2> role_check2.log > fix2.py`
|
||||
|
||||
Re-run the check script in order to see that there are no remaining
|
||||
problems. Ideally the log file will only consist of the equal-sign
|
||||
lines.
|
||||
|
||||
|
||||
## foreignkeys.sql ##
|
||||
|
||||
This script uses Postgres internals to determine all of the foreign
|
||||
keys that cross the boundaries established by our (old) backup/restore
|
||||
logic. Users have no need to run this.
|
||||
|
||||
|
||||
## scenarios/test*.py ##
|
||||
|
||||
These files were used to set up corruption similar to that caused by
|
||||
faulty backup/restore, for testing purposes. Do not use.
|
||||
38
tools/scripts/ig-hotfix/foreignkeys.sql
Normal file
38
tools/scripts/ig-hotfix/foreignkeys.sql
Normal file
@@ -0,0 +1,38 @@
|
||||
DO $$
|
||||
DECLARE
|
||||
-- add table names here when they get excluded from main / included in topology dump
|
||||
topology text[] := ARRAY['main_instance', 'main_instancegroup', 'main_instancegroup_instances'];
|
||||
|
||||
-- add table names here when they are handled by the special-case mapping
|
||||
mapping text[] := ARRAY['main_organizationinstancegroupmembership', 'main_unifiedjobtemplateinstancegroupmembership', 'main_inventoryinstancegroupmembership'];
|
||||
BEGIN
|
||||
CREATE TABLE tmp_fk_from AS (
|
||||
SELECT DISTINCT
|
||||
tc.table_name,
|
||||
ccu.table_name AS foreign_table_name
|
||||
FROM information_schema.table_constraints AS tc
|
||||
JOIN information_schema.constraint_column_usage AS ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY'
|
||||
AND tc.table_name = ANY (topology)
|
||||
AND NOT ccu.table_name = ANY (topology || mapping)
|
||||
);
|
||||
|
||||
CREATE TABLE tmp_fk_into AS (
|
||||
SELECT DISTINCT
|
||||
tc.table_name,
|
||||
ccu.table_name AS foreign_table_name
|
||||
FROM information_schema.table_constraints AS tc
|
||||
JOIN information_schema.constraint_column_usage AS ccu
|
||||
ON ccu.constraint_name = tc.constraint_name
|
||||
WHERE tc.constraint_type = 'FOREIGN KEY'
|
||||
AND ccu.table_name = ANY (topology)
|
||||
AND NOT tc.table_name = ANY (topology || mapping)
|
||||
);
|
||||
END $$;
|
||||
|
||||
SELECT * FROM tmp_fk_from;
|
||||
SELECT * FROM tmp_fk_into;
|
||||
|
||||
DROP TABLE tmp_fk_from;
|
||||
DROP TABLE tmp_fk_into;
|
||||
204
tools/scripts/ig-hotfix/role_check.py
Normal file
204
tools/scripts/ig-hotfix/role_check.py
Normal file
@@ -0,0 +1,204 @@
|
||||
from collections import defaultdict
|
||||
import json
|
||||
import sys
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db.models.fields.related_descriptors import ManyToManyDescriptor
|
||||
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.models.rbac import Role
|
||||
|
||||
|
||||
team_ct = ContentType.objects.get(app_label='main', model='team')
|
||||
|
||||
crosslinked = defaultdict(lambda: defaultdict(dict))
|
||||
crosslinked_parents = defaultdict(list)
|
||||
orphaned_roles = set()
|
||||
|
||||
|
||||
def resolve(obj, path):
|
||||
fname, _, path = path.partition('.')
|
||||
new_obj = getattr(obj, fname, None)
|
||||
if new_obj is None:
|
||||
return set()
|
||||
if not path:
|
||||
return {
|
||||
new_obj,
|
||||
}
|
||||
|
||||
if isinstance(new_obj, ManyToManyDescriptor):
|
||||
return {x for o in new_obj.all() for x in resolve(o, path)}
|
||||
|
||||
return resolve(new_obj, path)
|
||||
|
||||
|
||||
for ct in ContentType.objects.order_by('id'):
|
||||
cls = ct.model_class()
|
||||
if cls is None:
|
||||
sys.stderr.write(f"{ct!r} does not have a corresponding model class in the codebase. Skipping.\n")
|
||||
continue
|
||||
if not any(isinstance(f, ImplicitRoleField) for f in cls._meta.fields):
|
||||
continue
|
||||
for obj in cls.objects.all():
|
||||
for f in cls._meta.fields:
|
||||
if not isinstance(f, ImplicitRoleField):
|
||||
continue
|
||||
r_id = getattr(obj, f'{f.name}_id', None)
|
||||
try:
|
||||
r = getattr(obj, f.name, None)
|
||||
except Role.DoesNotExist:
|
||||
sys.stderr.write(f"{cls} id={obj.id} {f.name} points to Role id={r_id}, which is not in the database.\n")
|
||||
crosslinked[ct.id][obj.id][f'{f.name}_id'] = None
|
||||
continue
|
||||
if not r:
|
||||
sys.stderr.write(f"{cls} id={obj.id} {f.name} does not have a Role object\n")
|
||||
crosslinked[ct.id][obj.id][f'{f.name}_id'] = None
|
||||
continue
|
||||
if r.content_object != obj:
|
||||
sys.stderr.write(
|
||||
f"{cls.__name__} id={obj.id} {f.name} is pointing to a Role that is assigned to a different object: role.id={r.id} {r.content_type!r} {r.object_id} {r.role_field}\n"
|
||||
)
|
||||
crosslinked[ct.id][obj.id][f'{f.name}_id'] = None
|
||||
continue
|
||||
|
||||
|
||||
sys.stderr.write('===================================\n')
|
||||
for r in Role.objects.exclude(role_field__startswith='system_').order_by('id'):
|
||||
|
||||
# The ancestor list should be a superset of both parents and implicit_parents.
|
||||
# Also, parents should be a superset of implicit_parents.
|
||||
parents = set(r.parents.values_list('id', flat=True))
|
||||
ancestors = set(r.ancestors.values_list('id', flat=True))
|
||||
implicit = set(json.loads(r.implicit_parents))
|
||||
|
||||
if not implicit:
|
||||
sys.stderr.write(f"Role id={r.id} has no implicit_parents\n")
|
||||
if not parents <= ancestors:
|
||||
sys.stderr.write(f"Role id={r.id} has parents that are not in the ancestor list: {parents - ancestors}\n")
|
||||
crosslinked[r.content_type_id][r.object_id][f'{r.role_field}_id'] = r.id
|
||||
if not implicit <= parents:
|
||||
sys.stderr.write(f"Role id={r.id} has implicit_parents that are not in the parents list: {implicit - parents}\n")
|
||||
crosslinked[r.content_type_id][r.object_id][f'{r.role_field}_id'] = r.id
|
||||
if not implicit <= ancestors:
|
||||
sys.stderr.write(f"Role id={r.id} has implicit_parents that are not in the ancestor list: {implicit - ancestors}\n")
|
||||
crosslinked[r.content_type_id][r.object_id][f'{r.role_field}_id'] = r.id
|
||||
|
||||
# Check that the Role's generic foreign key points to a legitimate object
|
||||
if not r.content_object:
|
||||
sys.stderr.write(f"Role id={r.id} is missing a valid content_object: {r.content_type!r} {r.object_id} {r.role_field}\n")
|
||||
orphaned_roles.add(r.id)
|
||||
continue
|
||||
|
||||
# Check the resource's role field parents for consistency with Role.parents.all().
|
||||
f = r.content_object._meta.get_field(r.role_field)
|
||||
f_parent = (
|
||||
set(f.parent_role)
|
||||
if isinstance(f.parent_role, list)
|
||||
else {
|
||||
f.parent_role,
|
||||
}
|
||||
)
|
||||
dotted = {x for p in f_parent if '.' in p for x in resolve(r.content_object, p)}
|
||||
plus = set()
|
||||
for p in r.parents.all():
|
||||
if p.singleton_name:
|
||||
if f'singleton:{p.singleton_name}' not in f_parent:
|
||||
plus.add(p)
|
||||
elif p.content_type == team_ct:
|
||||
# Team has been granted this role; probably legitimate.
|
||||
if p.role_field in ('admin_role', 'member_role'):
|
||||
continue
|
||||
elif (p.content_type, p.object_id) == (r.content_type, r.object_id):
|
||||
if p.role_field not in f_parent:
|
||||
plus.add(p)
|
||||
elif p in dotted:
|
||||
continue
|
||||
else:
|
||||
plus.add(p)
|
||||
|
||||
if plus:
|
||||
plus_repr = [f"{x.content_type!r} {x.object_id} {x.role_field}" for x in plus]
|
||||
sys.stderr.write(f"Role id={r.id} has cross-linked parents: {plus_repr}\n")
|
||||
crosslinked_parents[r.id].extend(x.id for x in plus)
|
||||
|
||||
try:
|
||||
rev = getattr(r.content_object, r.role_field, None)
|
||||
except Role.DoesNotExist:
|
||||
sys.stderr.write(f"Role id={r.id} {r.content_type!r} {r.object_id} {r.role_field} points at an object with a broken role.\n")
|
||||
crosslinked[r.content_type_id][r.object_id][f'{r.role_field}_id'] = r.id
|
||||
continue
|
||||
if rev is None or r.id != rev.id:
|
||||
if rev and (r.content_type_id, r.object_id, r.role_field) == (rev.content_type_id, rev.object_id, rev.role_field):
|
||||
sys.stderr.write(
|
||||
f"Role id={r.id} {r.content_type!r} {r.object_id} {r.role_field} is an orphaned duplicate of Role id={rev.id}, which is actually being used by the assigned resource\n"
|
||||
)
|
||||
orphaned_roles.add(r.id)
|
||||
elif not rev:
|
||||
sys.stderr.write(f"Role id={r.id} {r.content_type!r} {r.object_id} {r.role_field} is pointing to an object currently using no role\n")
|
||||
crosslinked[r.content_type_id][r.object_id][f'{r.role_field}_id'] = r.id
|
||||
else:
|
||||
sys.stderr.write(
|
||||
f"Role id={r.id} {r.content_type!r} {r.object_id} {r.role_field} is pointing to an object using a different role: id={rev.id} {rev.content_type!r} {rev.object_id} {rev.role_field}\n"
|
||||
)
|
||||
crosslinked[r.content_type_id][r.object_id][f'{r.role_field}_id'] = r.id
|
||||
continue
|
||||
|
||||
|
||||
sys.stderr.write('===================================\n')
|
||||
|
||||
|
||||
print(
|
||||
f"""\
|
||||
from collections import Counter
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.models.rbac import Role
|
||||
|
||||
|
||||
delete_counts = Counter()
|
||||
update_counts = Counter()
|
||||
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
print("# Resource objects that are pointing to the wrong Role. Some of these")
|
||||
print("# do not have corresponding Roles anywhere, so delete the foreign key.")
|
||||
print("# For those, new Roles will be constructed upon save.\n")
|
||||
print("queue = set()\n")
|
||||
for ct, objs in crosslinked.items():
|
||||
print(f"cls = ContentType.objects.get(id={ct}).model_class()\n")
|
||||
for obj, kv in objs.items():
|
||||
print(f"c = cls.objects.filter(id={obj}).update(**{kv!r})")
|
||||
print("update_counts.update({cls._meta.label: c})")
|
||||
print(f"queue.add((cls, {obj}))")
|
||||
|
||||
print("\n# Role objects that are assigned to objects that do not exist")
|
||||
for r in orphaned_roles:
|
||||
print(f"c = Role.objects.filter(id={r}).update(object_id=None)")
|
||||
print("update_counts.update({'main.Role': c})")
|
||||
print(f"_, c = Role.objects.filter(id={r}).delete()")
|
||||
print("delete_counts.update(c)")
|
||||
|
||||
print('\n\n')
|
||||
for child, parents in crosslinked_parents.items():
|
||||
print(f"r = Role.objects.get(id={child})")
|
||||
print(f"r.parents.remove(*Role.objects.filter(id__in={parents!r}))")
|
||||
print(f"queue.add((r.content_object.__class__, r.object_id))")
|
||||
|
||||
print('\n\n')
|
||||
print('print("Objects deleted:", dict(delete_counts.most_common()))')
|
||||
print('print("Objects updated:", dict(update_counts.most_common()))')
|
||||
|
||||
print("\n\nfor cls, obj_id in queue:")
|
||||
print(" role_fields = [f for f in cls._meta.fields if isinstance(f, ImplicitRoleField)]")
|
||||
print(" obj = cls.objects.get(id=obj_id)")
|
||||
print(" for f in role_fields:")
|
||||
print(" r = getattr(obj, f.name, None)")
|
||||
print(" if r is not None:")
|
||||
print(" print(f'updating implicit parents on Role {r.id}')")
|
||||
print(" r.implicit_parents = '[]'")
|
||||
print(" r.save()")
|
||||
print(" obj.save()")
|
||||
19
tools/scripts/ig-hotfix/scenarios/test.py
Normal file
19
tools/scripts/ig-hotfix/scenarios/test.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from django.db import connection
|
||||
from awx.main.models import InstanceGroup
|
||||
|
||||
InstanceGroup.objects.filter(name__in=('green', 'yellow', 'red')).delete()
|
||||
|
||||
green = InstanceGroup.objects.create(name='green')
|
||||
red = InstanceGroup.objects.create(name='red')
|
||||
yellow = InstanceGroup.objects.create(name='yellow')
|
||||
|
||||
for ig in InstanceGroup.objects.all():
|
||||
print((ig.id, ig.name, ig.use_role_id))
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("UPDATE main_instancegroup SET use_role_id = NULL WHERE name = 'red'")
|
||||
cursor.execute(f"UPDATE main_instancegroup SET use_role_id = {green.use_role_id} WHERE name = 'yellow'")
|
||||
|
||||
print("=====================================")
|
||||
for ig in InstanceGroup.objects.all():
|
||||
print((ig.id, ig.name, ig.use_role_id))
|
||||
20
tools/scripts/ig-hotfix/scenarios/test2.py
Normal file
20
tools/scripts/ig-hotfix/scenarios/test2.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from django.db import connection
|
||||
from awx.main.models import InstanceGroup
|
||||
|
||||
InstanceGroup.objects.filter(name__in=('green', 'yellow', 'red')).delete()
|
||||
|
||||
green = InstanceGroup.objects.create(name='green')
|
||||
red = InstanceGroup.objects.create(name='red')
|
||||
yellow = InstanceGroup.objects.create(name='yellow')
|
||||
|
||||
for ig in InstanceGroup.objects.all():
|
||||
print((ig.id, ig.name, ig.use_role_id))
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"UPDATE main_rbac_roles SET object_id = NULL WHERE id = {red.use_role_id}")
|
||||
cursor.execute("UPDATE main_instancegroup SET use_role_id = NULL WHERE name = 'red'")
|
||||
cursor.execute(f"UPDATE main_instancegroup SET use_role_id = {green.use_role_id} WHERE name = 'yellow'")
|
||||
|
||||
print("=====================================")
|
||||
for ig in InstanceGroup.objects.all():
|
||||
print((ig.id, ig.name, ig.use_role_id))
|
||||
30
tools/scripts/ig-hotfix/scenarios/test3.py
Normal file
30
tools/scripts/ig-hotfix/scenarios/test3.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from django.db import connection
|
||||
from awx.main.models import InstanceGroup
|
||||
|
||||
InstanceGroup.objects.filter(name__in=('green', 'yellow', 'red', 'blue')).delete()
|
||||
|
||||
green = InstanceGroup.objects.create(name='green')
|
||||
red = InstanceGroup.objects.create(name='red')
|
||||
yellow = InstanceGroup.objects.create(name='yellow')
|
||||
blue = InstanceGroup.objects.create(name='blue')
|
||||
|
||||
for ig in InstanceGroup.objects.all():
|
||||
print((ig.id, ig.name, ig.use_role_id))
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("ALTER TABLE main_instancegroup DROP CONSTRAINT main_instancegroup_use_role_id_48ea7ecc_fk_main_rbac_roles_id")
|
||||
|
||||
cursor.execute(f"UPDATE main_rbac_roles SET object_id = NULL WHERE id = {red.use_role_id}")
|
||||
cursor.execute(f"DELETE FROM main_rbac_roles_parents WHERE from_role_id = {blue.use_role_id} OR to_role_id = {blue.use_role_id}")
|
||||
cursor.execute(f"DELETE FROM main_rbac_role_ancestors WHERE ancestor_id = {blue.use_role_id} OR descendent_id = {blue.use_role_id}")
|
||||
cursor.execute(f"DELETE FROM main_rbac_roles WHERE id = {blue.use_role_id}")
|
||||
cursor.execute("UPDATE main_instancegroup SET use_role_id = NULL WHERE name = 'red'")
|
||||
cursor.execute(f"UPDATE main_instancegroup SET use_role_id = {green.use_role_id} WHERE name = 'yellow'")
|
||||
|
||||
cursor.execute(
|
||||
"ALTER TABLE main_instancegroup ADD CONSTRAINT main_instancegroup_use_role_id_48ea7ecc_fk_main_rbac_roles_id FOREIGN KEY (use_role_id) REFERENCES public.main_rbac_roles(id) DEFERRABLE INITIALLY DEFERRED NOT VALID"
|
||||
)
|
||||
|
||||
print("=====================================")
|
||||
for ig in InstanceGroup.objects.all():
|
||||
print((ig.id, ig.name, ig.use_role_id))
|
||||
26
tools/scripts/ig-hotfix/scenarios/test4.py
Normal file
26
tools/scripts/ig-hotfix/scenarios/test4.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from django.db import connection
|
||||
from awx.main.models import InstanceGroup
|
||||
|
||||
InstanceGroup.objects.filter(name__in=('green', 'yellow', 'red')).delete()
|
||||
|
||||
green = InstanceGroup.objects.create(name='green')
|
||||
red = InstanceGroup.objects.create(name='red')
|
||||
yellow = InstanceGroup.objects.create(name='yellow')
|
||||
|
||||
for ig in InstanceGroup.objects.all():
|
||||
print((ig.id, ig.name, ig.use_role_id))
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("UPDATE main_instancegroup SET use_role_id = NULL WHERE name = 'red'")
|
||||
cursor.execute(f"UPDATE main_instancegroup SET use_role_id = {green.use_role_id} WHERE name = 'yellow'")
|
||||
|
||||
green.refresh_from_db()
|
||||
red.refresh_from_db()
|
||||
yellow.refresh_from_db()
|
||||
green.save()
|
||||
red.save()
|
||||
yellow.save()
|
||||
|
||||
print("=====================================")
|
||||
for ig in InstanceGroup.objects.all():
|
||||
print((ig.id, ig.name, ig.use_role_id))
|
||||
@@ -25,6 +25,7 @@ SOSREPORT_CONTROLLER_COMMANDS = [
|
||||
"ls -ll /var/run/awx-receptor", # list contents of dirctory where receptor socket should be
|
||||
"ls -ll /etc/receptor",
|
||||
"receptorctl --socket /var/run/awx-receptor/receptor.sock status", # Get information about the status of the mesh
|
||||
"receptorctl --socket /var/run/awx-receptor/receptor.sock work list", # Get list of receptor work units
|
||||
"umask -p", # check current umask
|
||||
]
|
||||
|
||||
|
||||
Reference in New Issue
Block a user