mirror of
https://github.com/ansible/awx.git
synced 2026-02-06 03:54:44 -03:30
Compare commits
96 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4738c8333a | ||
|
|
13dcea0afd | ||
|
|
bc2d339981 | ||
|
|
bef9ef10bb | ||
|
|
8645fe5c57 | ||
|
|
b93aa20362 | ||
|
|
4bbfc8a946 | ||
|
|
2c8eef413b | ||
|
|
d5bad1a533 | ||
|
|
f6c0effcb2 | ||
|
|
31a086b11a | ||
|
|
d94f766fcb | ||
|
|
a7113549eb | ||
|
|
bfd811f408 | ||
|
|
030704a9e1 | ||
|
|
c312d9bce3 | ||
|
|
aadcc217eb | ||
|
|
345c1c11e9 | ||
|
|
2c3a7fafc5 | ||
|
|
dbcd32a1d9 | ||
|
|
d45e258a78 | ||
|
|
d16b69a102 | ||
|
|
8b4efbc973 | ||
|
|
4cb061e7db | ||
|
|
31db6a1447 | ||
|
|
ad9d5904d8 | ||
|
|
b837d549ff | ||
|
|
9e22865d2e | ||
|
|
ee3e3e1516 | ||
|
|
4a8f6e45f8 | ||
|
|
6a317cca1b | ||
|
|
d67af79451 | ||
|
|
fe77fda7b2 | ||
|
|
f613b76baa | ||
|
|
054cbe69d7 | ||
|
|
87e9dcb6d7 | ||
|
|
c8829b057e | ||
|
|
a0b376a6ca | ||
|
|
d675207f99 | ||
|
|
20504042c9 | ||
|
|
0e87e97820 | ||
|
|
1f154742df | ||
|
|
85fc81aab1 | ||
|
|
5cfeeb3e87 | ||
|
|
a8c07b06d8 | ||
|
|
53c5feaf6b | ||
|
|
6f57aaa8f5 | ||
|
|
bea74a401d | ||
|
|
54e85813c8 | ||
|
|
b69ed08fe5 | ||
|
|
de25408a23 | ||
|
|
b17f0a188b | ||
|
|
fb860d76ce | ||
|
|
451f20ce0f | ||
|
|
c1dc0c7b86 | ||
|
|
d65ea2a3d5 | ||
|
|
8827ae7554 | ||
|
|
4915262af1 | ||
|
|
d43c91e1a5 | ||
|
|
b470ca32af | ||
|
|
793777bec7 | ||
|
|
6dc4a4508d | ||
|
|
cf09a4220d | ||
|
|
659c3b64de | ||
|
|
37ad690d09 | ||
|
|
7845ec7e01 | ||
|
|
a15bcf1d55 | ||
|
|
7b3fb2c2a8 | ||
|
|
6df47c8449 | ||
|
|
cae42653bf | ||
|
|
da46a29f40 | ||
|
|
0eb465531c | ||
|
|
d0fe0ed796 | ||
|
|
ceafa14c9d | ||
|
|
08e1454098 | ||
|
|
776b661fb3 | ||
|
|
af6ccdbde5 | ||
|
|
559ab3564b | ||
|
|
208ef0ce25 | ||
|
|
c3d9aa54d8 | ||
|
|
66efe7198a | ||
|
|
adf930ee42 | ||
|
|
892410477a | ||
|
|
0d4f653794 | ||
|
|
8de8f6dce2 | ||
|
|
fc9064e27f | ||
|
|
7de350dc3e | ||
|
|
d4bdaad4d8 | ||
|
|
a9b2ffa3e9 | ||
|
|
1b8d409043 | ||
|
|
da2bccf5a8 | ||
|
|
a2f083bd8e | ||
|
|
4d641b6cf5 | ||
|
|
439c3f0c23 | ||
|
|
946bbe3560 | ||
|
|
20f054d600 |
15
.github/workflows/promote.yml
vendored
15
.github/workflows/promote.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
- name: Set GitHub Env vars if release event
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
run: |
|
||||
echo "TAG_NAME=${{ env.TAG_NAME }}" >> $GITHUB_ENV
|
||||
echo "TAG_NAME=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout awx
|
||||
uses: actions/checkout@v3
|
||||
@@ -60,15 +60,18 @@ jobs:
|
||||
COLLECTION_VERSION: ${{ env.TAG_NAME }}
|
||||
COLLECTION_TEMPLATE_VERSION: true
|
||||
run: |
|
||||
sudo apt-get install jq
|
||||
make build_collection
|
||||
curl_with_redirects=$(curl --head -sLw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ env.TAG_NAME }}.tar.gz | tail -1)
|
||||
curl_without_redirects=$(curl --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ env.TAG_NAME }}.tar.gz | tail -1)
|
||||
if [[ "$curl_with_redirects" == "302" ]] || [[ "$curl_without_redirects" == "302" ]]; then
|
||||
count=$(curl -s https://galaxy.ansible.com/api/v3/plugin/ansible/search/collection-versions/\?namespace\=${COLLECTION_NAMESPACE}\&name\=awx\&version\=${COLLECTION_VERSION} | jq .meta.count)
|
||||
if [[ "$count" == "1" ]]; then
|
||||
echo "Galaxy release already done";
|
||||
else
|
||||
elif [[ "$count" == "0" ]]; then
|
||||
ansible-galaxy collection publish \
|
||||
--token=${{ secrets.GALAXY_TOKEN }} \
|
||||
awx_collection_build/${{ env.collection_namespace }}-awx-${{ env.TAG_NAME }}.tar.gz;
|
||||
awx_collection_build/${COLLECTION_NAMESPACE}-awx-${COLLECTION_VERSION}.tar.gz;
|
||||
else
|
||||
echo "Unexpected count from galaxy search: $count";
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
- name: Set official pypi info
|
||||
|
||||
@@ -11,6 +11,8 @@ ignore: |
|
||||
# django template files
|
||||
awx/api/templates/instance_install_bundle/**
|
||||
.readthedocs.yaml
|
||||
tools/loki
|
||||
tools/otel
|
||||
|
||||
extends: default
|
||||
|
||||
|
||||
28
Makefile
28
Makefile
@@ -47,8 +47,14 @@ VAULT ?= false
|
||||
VAULT_TLS ?= false
|
||||
# If set to true docker-compose will also start a tacacs+ instance
|
||||
TACACS ?= false
|
||||
# If set to true docker-compose will also start an OpenTelemetry Collector instance
|
||||
OTEL ?= false
|
||||
# If set to true docker-compose will also start a Loki instance
|
||||
LOKI ?= false
|
||||
# If set to true docker-compose will install editable dependencies
|
||||
EDITABLE_DEPENDENCIES ?= false
|
||||
# If set to true, use tls for postgres connection
|
||||
PG_TLS ?= false
|
||||
|
||||
VENV_BASE ?= /var/lib/awx/venv
|
||||
|
||||
@@ -58,6 +64,9 @@ DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
|
||||
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
|
||||
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
|
||||
# Common command to use for running ansible-playbook
|
||||
ANSIBLE_PLAYBOOK ?= ansible-playbook -e ansible_python_interpreter=$(PYTHON)
|
||||
|
||||
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
|
||||
# Python packages to install only from source (not from binary wheels)
|
||||
@@ -65,7 +74,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==0.29.37
|
||||
|
||||
NAME ?= awx
|
||||
|
||||
@@ -362,7 +371,7 @@ symlink_collection:
|
||||
ln -s $(shell pwd)/awx_collection $(COLLECTION_INSTALL)
|
||||
|
||||
awx_collection_build: $(shell find awx_collection -type f)
|
||||
ansible-playbook -i localhost, awx_collection/tools/template_galaxy.yml \
|
||||
$(ANSIBLE_PLAYBOOK) -i localhost, awx_collection/tools/template_galaxy.yml \
|
||||
-e collection_package=$(COLLECTION_PACKAGE) \
|
||||
-e collection_namespace=$(COLLECTION_NAMESPACE) \
|
||||
-e collection_version=$(COLLECTION_VERSION) \
|
||||
@@ -516,10 +525,10 @@ endif
|
||||
|
||||
docker-compose-sources: .git/hooks/pre-commit
|
||||
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||
fi;
|
||||
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||
-e awx_image=$(DEV_DOCKER_TAG_BASE)/awx_devel \
|
||||
-e awx_image_tag=$(COMPOSE_TAG) \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||
@@ -535,12 +544,15 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_tacacs=$(TACACS) \
|
||||
-e enable_otel=$(OTEL) \
|
||||
-e enable_loki=$(LOKI) \
|
||||
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
|
||||
-e pg_tls=$(PG_TLS) \
|
||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
$(ANSIBLE_PLAYBOOK) -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_ldap=$(LDAP); \
|
||||
@@ -583,7 +595,7 @@ docker-compose-container-group-clean:
|
||||
.PHONY: Dockerfile.dev
|
||||
## Generate Dockerfile.dev for awx_devel image
|
||||
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||
-e dockerfile_name=Dockerfile.dev \
|
||||
-e build_dev=True \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE)
|
||||
@@ -658,7 +670,7 @@ version-for-buildyml:
|
||||
.PHONY: Dockerfile
|
||||
## Generate Dockerfile for awx image
|
||||
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||
-e headless=$(HEADLESS)
|
||||
|
||||
@@ -688,7 +700,7 @@ awx-kube-buildx: Dockerfile
|
||||
.PHONY: Dockerfile.kube-dev
|
||||
## Generate Docker.kube-dev for awx_kube_devel image
|
||||
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
$(ANSIBLE_PLAYBOOK) tools/ansible/dockerfile.yml \
|
||||
-e dockerfile_name=Dockerfile.kube-dev \
|
||||
-e kube_dev=True \
|
||||
-e template_dest=_build_kube_dev \
|
||||
|
||||
@@ -33,8 +33,10 @@ from rest_framework.negotiation import DefaultContentNegotiation
|
||||
# django-ansible-base
|
||||
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
||||
from ansible_base.lib.utils.models import get_all_field_names
|
||||
from ansible_base.lib.utils.requests import get_remote_host
|
||||
from ansible_base.rbac.models import RoleEvaluation, RoleDefinition
|
||||
from ansible_base.rbac.permission_registry import permission_registry
|
||||
from ansible_base.jwt_consumer.common.util import validate_x_trusted_proxy_header
|
||||
|
||||
# AWX
|
||||
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||
@@ -42,6 +44,7 @@ from awx.main.models.rbac import give_creator_permissions
|
||||
from awx.main.access import optimize_queryset
|
||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||
from awx.main.utils.licensing import server_product_name
|
||||
from awx.main.utils.proxy import is_proxy_in_headers, delete_headers_starting_with_http
|
||||
from awx.main.views import ApiErrorView
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||
from awx.api.versioning import URLPathVersioning
|
||||
@@ -93,8 +96,9 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||
if request.user.is_authenticated:
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, ip)))
|
||||
ret.set_cookie(
|
||||
'userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False), samesite=getattr(settings, 'USER_COOKIE_SAMESITE', 'Lax')
|
||||
)
|
||||
@@ -103,12 +107,15 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
return ret
|
||||
else:
|
||||
if 'username' in self.request.POST:
|
||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
|
||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), ip)))
|
||||
ret.status_code = 401
|
||||
return ret
|
||||
|
||||
|
||||
class LoggedLogoutView(auth_views.LogoutView):
|
||||
|
||||
success_url_allowed_hosts = set(settings.LOGOUT_ALLOWED_HOSTS.split(",")) if settings.LOGOUT_ALLOWED_HOSTS else set()
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
original_user = getattr(request, 'user', None)
|
||||
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
||||
@@ -148,22 +155,23 @@ class APIView(views.APIView):
|
||||
Store the Django REST Framework Request object as an attribute on the
|
||||
normal Django request, store time the request started.
|
||||
"""
|
||||
remote_headers = ['REMOTE_ADDR', 'REMOTE_HOST']
|
||||
|
||||
self.time_started = time.time()
|
||||
if getattr(settings, 'SQL_DEBUG', False):
|
||||
self.queries_before = len(connection.queries)
|
||||
|
||||
if 'HTTP_X_TRUSTED_PROXY' in request.environ:
|
||||
if validate_x_trusted_proxy_header(request.environ['HTTP_X_TRUSTED_PROXY']):
|
||||
remote_headers = settings.REMOTE_HOST_HEADERS
|
||||
else:
|
||||
logger.warning("Request appeared to be a trusted upstream proxy but failed to provide a matching shared secret.")
|
||||
|
||||
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
||||
# they respect the allowed proxy list
|
||||
if all(
|
||||
[
|
||||
settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
]
|
||||
):
|
||||
for custom_header in settings.REMOTE_HOST_HEADERS:
|
||||
if custom_header.startswith('HTTP_'):
|
||||
request.environ.pop(custom_header, None)
|
||||
if settings.PROXY_IP_ALLOWED_LIST:
|
||||
if not is_proxy_in_headers(self.request, settings.PROXY_IP_ALLOWED_LIST, remote_headers):
|
||||
delete_headers_starting_with_http(request, settings.REMOTE_HOST_HEADERS)
|
||||
|
||||
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||
request.drf_request = drf_request
|
||||
@@ -208,11 +216,12 @@ class APIView(views.APIView):
|
||||
return response
|
||||
|
||||
if response.status_code >= 400:
|
||||
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||
msg_data = {
|
||||
'status_code': response.status_code,
|
||||
'user_name': request.user,
|
||||
'url_path': request.path,
|
||||
'remote_addr': request.META.get('REMOTE_ADDR', None),
|
||||
'remote_addr': ip,
|
||||
}
|
||||
|
||||
if type(response.data) is dict:
|
||||
|
||||
@@ -5381,7 +5381,7 @@ class NotificationSerializer(BaseSerializer):
|
||||
)
|
||||
|
||||
def get_body(self, obj):
|
||||
if obj.notification_type in ('webhook', 'pagerduty'):
|
||||
if obj.notification_type in ('webhook', 'pagerduty', 'awssns'):
|
||||
if isinstance(obj.body, dict):
|
||||
if 'body' in obj.body:
|
||||
return obj.body['body']
|
||||
@@ -5403,9 +5403,9 @@ class NotificationSerializer(BaseSerializer):
|
||||
def to_representation(self, obj):
|
||||
ret = super(NotificationSerializer, self).to_representation(obj)
|
||||
|
||||
if obj.notification_type == 'webhook':
|
||||
if obj.notification_type in ('webhook', 'awssns'):
|
||||
ret.pop('subject')
|
||||
if obj.notification_type not in ('email', 'webhook', 'pagerduty'):
|
||||
if obj.notification_type not in ('email', 'webhook', 'pagerduty', 'awssns'):
|
||||
ret.pop('body')
|
||||
return ret
|
||||
|
||||
|
||||
@@ -61,7 +61,9 @@ import pytz
|
||||
from wsgiref.util import FileWrapper
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.lib.utils.requests import get_remote_hosts
|
||||
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
|
||||
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||
|
||||
# AWX
|
||||
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
||||
@@ -128,6 +130,7 @@ from awx.api.views.mixin import (
|
||||
from awx.api.pagination import UnifiedJobEventPagination
|
||||
from awx.main.utils import set_environ
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.api.views')
|
||||
|
||||
|
||||
@@ -710,16 +713,81 @@ class AuthView(APIView):
|
||||
return Response(data)
|
||||
|
||||
|
||||
def immutablesharedfields(cls):
|
||||
'''
|
||||
Class decorator to prevent modifying shared resources when ALLOW_LOCAL_RESOURCE_MANAGEMENT setting is set to False.
|
||||
|
||||
Works by overriding these view methods:
|
||||
- create
|
||||
- delete
|
||||
- perform_update
|
||||
create and delete are overridden to raise a PermissionDenied exception.
|
||||
perform_update is overridden to check if any shared fields are being modified,
|
||||
and raise a PermissionDenied exception if so.
|
||||
'''
|
||||
# create instead of perform_create because some of our views
|
||||
# override create instead of perform_create
|
||||
if hasattr(cls, 'create'):
|
||||
cls.original_create = cls.create
|
||||
|
||||
@functools.wraps(cls.create)
|
||||
def create_wrapper(*args, **kwargs):
|
||||
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
return cls.original_create(*args, **kwargs)
|
||||
raise PermissionDenied({'detail': _('Creation of this resource is not allowed. Create this resource via the platform ingress.')})
|
||||
|
||||
cls.create = create_wrapper
|
||||
|
||||
if hasattr(cls, 'delete'):
|
||||
cls.original_delete = cls.delete
|
||||
|
||||
@functools.wraps(cls.delete)
|
||||
def delete_wrapper(*args, **kwargs):
|
||||
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
return cls.original_delete(*args, **kwargs)
|
||||
raise PermissionDenied({'detail': _('Deletion of this resource is not allowed. Delete this resource via the platform ingress.')})
|
||||
|
||||
cls.delete = delete_wrapper
|
||||
|
||||
if hasattr(cls, 'perform_update'):
|
||||
cls.original_perform_update = cls.perform_update
|
||||
|
||||
@functools.wraps(cls.perform_update)
|
||||
def update_wrapper(*args, **kwargs):
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
view, serializer = args
|
||||
instance = view.get_object()
|
||||
if instance:
|
||||
if isinstance(instance, models.Organization):
|
||||
shared_fields = OrganizationType._declared_fields.keys()
|
||||
elif isinstance(instance, models.User):
|
||||
shared_fields = UserType._declared_fields.keys()
|
||||
elif isinstance(instance, models.Team):
|
||||
shared_fields = TeamType._declared_fields.keys()
|
||||
attrs = serializer.validated_data
|
||||
for field in shared_fields:
|
||||
if field in attrs and getattr(instance, field) != attrs[field]:
|
||||
raise PermissionDenied({field: _(f"Cannot change shared field '{field}'. Alter this field via the platform ingress.")})
|
||||
return cls.original_perform_update(*args, **kwargs)
|
||||
|
||||
cls.perform_update = update_wrapper
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamList(ListCreateAPIView):
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamUsersList(BaseUsersList):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -1101,6 +1169,7 @@ class ProjectCopy(CopyAPIView):
|
||||
copy_return_serializer_class = serializers.ProjectSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class UserList(ListCreateAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -1271,7 +1340,16 @@ class UserRolesList(SubListAttachDetachAPIView):
|
||||
user = get_object_or_400(models.User, pk=self.kwargs['pk'])
|
||||
role = get_object_or_400(models.Role, pk=sub_id)
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||
# Prevent user to be associated with team/org when ALLOW_LOCAL_RESOURCE_MANAGEMENT is False
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
for model in [models.Organization, models.Team]:
|
||||
ct = content_types[model]
|
||||
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
credential_content_type = content_types[models.Credential]
|
||||
if role.content_type == credential_content_type:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
@@ -1343,6 +1421,7 @@ class UserActivityStreamList(SubListAPIView):
|
||||
return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class UserDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -2692,12 +2771,7 @@ class JobTemplateCallback(GenericAPIView):
|
||||
host for the current request.
|
||||
"""
|
||||
# Find the list of remote host names/IPs to check.
|
||||
remote_hosts = set()
|
||||
for header in settings.REMOTE_HOST_HEADERS:
|
||||
for value in self.request.META.get(header, '').split(','):
|
||||
value = value.strip()
|
||||
if value:
|
||||
remote_hosts.add(value)
|
||||
remote_hosts = set(get_remote_hosts(self.request))
|
||||
# Add the reverse lookup of IP addresses.
|
||||
for rh in list(remote_hosts):
|
||||
try:
|
||||
@@ -4295,7 +4369,15 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
||||
user = get_object_or_400(models.User, pk=sub_id)
|
||||
role = self.get_parent_object()
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
for model in [models.Organization, models.Team]:
|
||||
ct = content_types[model]
|
||||
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
credential_content_type = content_types[models.Credential]
|
||||
if role.content_type == credential_content_type:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
|
||||
@@ -53,15 +53,18 @@ from awx.api.serializers import (
|
||||
CredentialSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
||||
from awx.api.views import immutablesharedfields
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
@@ -104,6 +107,7 @@ class OrganizationInventoriesList(SubListAPIView):
|
||||
relationship = 'inventories'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationUsersList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
@@ -112,6 +116,7 @@ class OrganizationUsersList(BaseUsersList):
|
||||
ordering = ('username',)
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationAdminsList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
@@ -150,6 +155,7 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||
model = Team
|
||||
serializer_class = TeamSerializer
|
||||
|
||||
@@ -130,9 +130,9 @@ def test_default_setting(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||
|
||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
||||
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
@@ -146,9 +146,9 @@ def test_setting_is_not_from_setting_file(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||
|
||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
|
||||
|
||||
|
||||
def test_empty_setting(settings, mocker):
|
||||
@@ -156,10 +156,10 @@ def test_empty_setting(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
with pytest.raises(AttributeError):
|
||||
settings.AWX_SOME_SETTING
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||
with pytest.raises(AttributeError):
|
||||
settings.AWX_SOME_SETTING
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
|
||||
|
||||
|
||||
def test_setting_from_db(settings, mocker):
|
||||
@@ -168,9 +168,9 @@ def test_setting_from_db(settings, mocker):
|
||||
|
||||
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
||||
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
@@ -205,8 +205,8 @@ def test_db_setting_update(settings, mocker):
|
||||
|
||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list):
|
||||
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list)
|
||||
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
||||
|
||||
assert existing_setting.value == 'NEW-VALUE'
|
||||
existing_setting.save.assert_called_with(update_fields=['value'])
|
||||
@@ -217,8 +217,8 @@ def test_db_setting_deletion(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]):
|
||||
del settings.AWX_SOME_SETTING
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting])
|
||||
del settings.AWX_SOME_SETTING
|
||||
|
||||
assert existing_setting.delete.call_count == 1
|
||||
|
||||
@@ -283,10 +283,10 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
|
||||
# use its primary key as part of the encryption key
|
||||
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
||||
|
||||
|
||||
def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
||||
|
||||
@@ -598,7 +598,7 @@ class InstanceGroupAccess(BaseAccess):
|
||||
- a superuser
|
||||
- admin role on the Instance group
|
||||
I can add/delete Instance Groups:
|
||||
- a superuser(system administrator)
|
||||
- a superuser(system administrator), because these are not org-scoped
|
||||
I can use Instance Groups when I have:
|
||||
- use_role on the instance group
|
||||
"""
|
||||
@@ -627,7 +627,7 @@ class InstanceGroupAccess(BaseAccess):
|
||||
def can_delete(self, obj):
|
||||
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
|
||||
return False
|
||||
return self.user.is_superuser
|
||||
return self.user.has_obj_perm(obj, 'delete')
|
||||
|
||||
|
||||
class UserAccess(BaseAccess):
|
||||
@@ -2628,7 +2628,7 @@ class ScheduleAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||
|
||||
class NotificationTemplateAccess(BaseAccess):
|
||||
"""
|
||||
I can see/use a notification_template if I have permission to
|
||||
Run standard logic from DAB RBAC
|
||||
"""
|
||||
|
||||
model = NotificationTemplate
|
||||
@@ -2649,10 +2649,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
if obj.organization is None:
|
||||
# only superusers are allowed to edit orphan notification templates
|
||||
return False
|
||||
return self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role', mandatory=True)
|
||||
return self.user.has_obj_perm(obj, 'change') and self.check_related('organization', Organization, data, obj=obj, role_field='notification_admin_role')
|
||||
|
||||
def can_admin(self, obj, data):
|
||||
return self.can_change(obj, data)
|
||||
@@ -2662,9 +2659,7 @@ class NotificationTemplateAccess(BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_start(self, obj, validate_license=True):
|
||||
if obj.organization is None:
|
||||
return False
|
||||
return self.user in obj.organization.notification_admin_role
|
||||
return self.can_change(obj, None)
|
||||
|
||||
|
||||
class NotificationAccess(BaseAccess):
|
||||
|
||||
@@ -14,7 +14,7 @@ __all__ = [
|
||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||
]
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform', 'openshift_virtualization')
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')),
|
||||
('su', _('Su')),
|
||||
|
||||
@@ -252,7 +252,7 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
kwargs.setdefault('related_name', '+')
|
||||
kwargs.setdefault('null', 'True')
|
||||
kwargs.setdefault('editable', False)
|
||||
kwargs.setdefault('on_delete', models.CASCADE)
|
||||
kwargs.setdefault('on_delete', models.SET_NULL)
|
||||
super(ImplicitRoleField, self).__init__(*args, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
|
||||
12
awx/main/management/commands/check_instance_ready.py
Normal file
12
awx/main/management/commands/check_instance_ready.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from awx.main.models.ha import Instance
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Check if the task manager instance is ready throw error if not ready, can be use as readiness probe for k8s.'
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if Instance.objects.me().node_state != Instance.States.READY:
|
||||
raise CommandError('Instance is not ready') # so that return code is not 0
|
||||
|
||||
return
|
||||
@@ -101,8 +101,9 @@ class Command(BaseCommand):
|
||||
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
|
||||
connection.close() # Because of async nature, main loop will use new connection, so close this
|
||||
except Exception as exc:
|
||||
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), retry in 10s...')
|
||||
time.sleep(10)
|
||||
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||
# sleeping before logging because logging rely on setting which require database connection...
|
||||
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), slept for 10s...')
|
||||
return
|
||||
|
||||
# In containerized deployments, migrations happen in the task container,
|
||||
@@ -121,13 +122,14 @@ class Command(BaseCommand):
|
||||
return
|
||||
|
||||
try:
|
||||
my_hostname = Instance.objects.my_hostname()
|
||||
my_hostname = Instance.objects.my_hostname() # This relies on settings.CLUSTER_HOST_ID which requires database connection
|
||||
logger.info('Active instance with hostname {} is registered.'.format(my_hostname))
|
||||
except RuntimeError as e:
|
||||
# the CLUSTER_HOST_ID in the task, and web instance must match and
|
||||
# ensure network connectivity between the task and web instance
|
||||
logger.info('Unable to return currently active instance: {}, retry in 5s...'.format(e))
|
||||
time.sleep(5)
|
||||
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||
# sleeping before logging because logging rely on setting which require database connection...
|
||||
logger.warning(f"Unable to return currently active instance: {e}, slept for 10s before return.")
|
||||
return
|
||||
|
||||
if options.get('status'):
|
||||
@@ -166,12 +168,14 @@ class Command(BaseCommand):
|
||||
|
||||
WebsocketsMetricsServer().start()
|
||||
|
||||
while True:
|
||||
try:
|
||||
asyncio.run(WebSocketRelayManager().run())
|
||||
except KeyboardInterrupt:
|
||||
logger.info('Shutting down Websocket Relayer')
|
||||
break
|
||||
except Exception as e:
|
||||
logger.exception('Error in Websocket Relayer, exception: {}. Restarting in 10 seconds'.format(e))
|
||||
time.sleep(10)
|
||||
try:
|
||||
logger.info('Starting Websocket Relayer...')
|
||||
websocket_relay_manager = WebSocketRelayManager()
|
||||
asyncio.run(websocket_relay_manager.run())
|
||||
except KeyboardInterrupt:
|
||||
logger.info('Terminating Websocket Relayer')
|
||||
except BaseException as e: # BaseException is used to catch all exceptions including asyncio.CancelledError
|
||||
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||
# sleeping before logging because logging rely on setting which require database connection...
|
||||
logger.warning(f"Encounter error while running Websocket Relayer {e}, slept for 10s...")
|
||||
return
|
||||
|
||||
@@ -6,7 +6,7 @@ import logging
|
||||
import threading
|
||||
import time
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
from pathlib import Path, PurePosixPath
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import logout
|
||||
@@ -138,14 +138,36 @@ class URLModificationMiddleware(MiddlewareMixin):
|
||||
|
||||
@classmethod
|
||||
def _convert_named_url(cls, url_path):
|
||||
url_units = url_path.split('/')
|
||||
# If the identifier is an empty string, it is always invalid.
|
||||
if len(url_units) < 6 or url_units[1] != 'api' or url_units[2] not in ['v2'] or not url_units[4]:
|
||||
return url_path
|
||||
resource = url_units[3]
|
||||
default_prefix = PurePosixPath('/api/v2/')
|
||||
optional_prefix = PurePosixPath(f'/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}/v2/')
|
||||
|
||||
url_path_original = url_path
|
||||
url_path = PurePosixPath(url_path)
|
||||
|
||||
if set(optional_prefix.parts).issubset(set(url_path.parts)):
|
||||
url_prefix = optional_prefix
|
||||
elif set(default_prefix.parts).issubset(set(url_path.parts)):
|
||||
url_prefix = default_prefix
|
||||
else:
|
||||
return url_path_original
|
||||
|
||||
# Remove prefix
|
||||
url_path = PurePosixPath(*url_path.parts[len(url_prefix.parts) :])
|
||||
try:
|
||||
resource_path = PurePosixPath(url_path.parts[0])
|
||||
name = url_path.parts[1]
|
||||
url_suffix = PurePosixPath(*url_path.parts[2:]) # remove name and resource
|
||||
except IndexError:
|
||||
return url_path_original
|
||||
|
||||
resource = resource_path.parts[0]
|
||||
if resource in settings.NAMED_URL_MAPPINGS:
|
||||
url_units[4] = cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, url_units[4])
|
||||
return '/'.join(url_units)
|
||||
pk = PurePosixPath(cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, name))
|
||||
else:
|
||||
return url_path_original
|
||||
|
||||
parts = url_prefix.parts + resource_path.parts + pk.parts + url_suffix.parts
|
||||
return PurePosixPath(*parts).as_posix() + '/'
|
||||
|
||||
def process_request(self, request):
|
||||
old_path = request.path_info
|
||||
|
||||
@@ -17,49 +17,49 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='job_template_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='credential_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='inventory_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='project_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='workflow_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notification_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -67,7 +67,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator', 'organization.credential_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -77,7 +77,7 @@ class Migration(migrations.Migration):
|
||||
model_name='inventory',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -85,7 +85,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.project_admin_role', 'singleton:system_administrator'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -96,7 +96,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -107,7 +107,7 @@ class Migration(migrations.Migration):
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -119,7 +119,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -130,7 +130,7 @@ class Migration(migrations.Migration):
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -142,7 +142,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'admin_role',
|
||||
'execute_role',
|
||||
|
||||
@@ -18,7 +18,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -27,7 +27,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
|
||||
@@ -36,7 +36,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='approval_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
@@ -46,7 +46,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.approval_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -116,7 +116,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
@@ -139,7 +139,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
|
||||
@@ -80,7 +80,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.job_template_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -92,7 +92,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -104,7 +104,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
|
||||
@@ -26,7 +26,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='execution_environment_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
|
||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
|
||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator'],
|
||||
related_name='+',
|
||||
to='main.role',
|
||||
@@ -30,7 +30,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_auditor', 'use_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.role',
|
||||
@@ -41,7 +41,7 @@ class Migration(migrations.Migration):
|
||||
model_name='instancegroup',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
# Generated by Django 4.2.6 on 2024-05-08 07:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0192_custom_roles'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='notification',
|
||||
name='notification_type',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('awssns', 'AWS SNS'),
|
||||
('email', 'Email'),
|
||||
('grafana', 'Grafana'),
|
||||
('irc', 'IRC'),
|
||||
('mattermost', 'Mattermost'),
|
||||
('pagerduty', 'Pagerduty'),
|
||||
('rocketchat', 'Rocket.Chat'),
|
||||
('slack', 'Slack'),
|
||||
('twilio', 'Twilio'),
|
||||
('webhook', 'Webhook'),
|
||||
],
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notificationtemplate',
|
||||
name='notification_type',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('awssns', 'AWS SNS'),
|
||||
('email', 'Email'),
|
||||
('grafana', 'Grafana'),
|
||||
('irc', 'IRC'),
|
||||
('mattermost', 'Mattermost'),
|
||||
('pagerduty', 'Pagerduty'),
|
||||
('rocketchat', 'Rocket.Chat'),
|
||||
('slack', 'Slack'),
|
||||
('twilio', 'Twilio'),
|
||||
('webhook', 'Webhook'),
|
||||
],
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,61 @@
|
||||
# Generated by Django 4.2.10 on 2024-06-12 19:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0193_alter_notification_notification_type_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -290,7 +290,7 @@ def setup_managed_role_definitions(apps, schema_editor):
|
||||
managed_role_definitions = []
|
||||
|
||||
org_perms = set()
|
||||
for cls in permission_registry._registry:
|
||||
for cls in permission_registry.all_registered_models:
|
||||
ct = ContentType.objects.get_for_model(cls)
|
||||
object_perms = set(Permission.objects.filter(content_type=ct))
|
||||
# Special case for InstanceGroup which has an organiation field, but is not an organization child object
|
||||
|
||||
@@ -4,11 +4,12 @@ import datetime
|
||||
from datetime import timezone
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
import itertools
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import models, DatabaseError
|
||||
from django.db import models, DatabaseError, transaction
|
||||
from django.db.models.functions import Cast
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.text import Truncator
|
||||
@@ -605,19 +606,23 @@ class JobEvent(BasePlaybookEvent):
|
||||
def _update_host_metrics(updated_hosts_list):
|
||||
from awx.main.models import HostMetric # circular import
|
||||
|
||||
# bulk-create
|
||||
current_time = now()
|
||||
HostMetric.objects.bulk_create(
|
||||
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in updated_hosts_list], ignore_conflicts=True, batch_size=100
|
||||
)
|
||||
# bulk-update
|
||||
batch_start, batch_size = 0, 1000
|
||||
while batch_start <= len(updated_hosts_list):
|
||||
batched_host_list = updated_hosts_list[batch_start : (batch_start + batch_size)]
|
||||
HostMetric.objects.filter(hostname__in=batched_host_list).update(
|
||||
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
||||
)
|
||||
batch_start += batch_size
|
||||
|
||||
# FUTURE:
|
||||
# - Hand-rolled implementation of itertools.batched(), introduced in Python 3.12. Replace.
|
||||
# - Ability to do ORM upserts *may* have been introduced in Django 5.0.
|
||||
# See the entry about `create_defaults` in https://docs.djangoproject.com/en/5.0/releases/5.0/#models.
|
||||
# Hopefully this will be fully ready for batch use by 5.2 LTS.
|
||||
|
||||
args = [iter(updated_hosts_list)] * 500
|
||||
for hosts in itertools.zip_longest(*args):
|
||||
with transaction.atomic():
|
||||
HostMetric.objects.bulk_create(
|
||||
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in hosts if hostname is not None], ignore_conflicts=True
|
||||
)
|
||||
HostMetric.objects.filter(hostname__in=hosts).update(
|
||||
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
||||
)
|
||||
|
||||
@property
|
||||
def job_verbosity(self):
|
||||
|
||||
@@ -933,6 +933,7 @@ class InventorySourceOptions(BaseModel):
|
||||
('controller', _('Red Hat Ansible Automation Platform')),
|
||||
('insights', _('Red Hat Insights')),
|
||||
('terraform', _('Terraform State')),
|
||||
('openshift_virtualization', _('OpenShift Virtualization')),
|
||||
]
|
||||
|
||||
# From the options of the Django management base command
|
||||
@@ -1042,7 +1043,7 @@ class InventorySourceOptions(BaseModel):
|
||||
def cloud_credential_validation(source, cred):
|
||||
if not source:
|
||||
return None
|
||||
if cred and source not in ('custom', 'scm'):
|
||||
if cred and source not in ('custom', 'scm', 'openshift_virtualization'):
|
||||
# If a credential was provided, it's important that it matches
|
||||
# the actual inventory source being used (Amazon requires Amazon
|
||||
# credentials; Rackspace requires Rackspace credentials; etc...)
|
||||
@@ -1051,12 +1052,14 @@ class InventorySourceOptions(BaseModel):
|
||||
# Allow an EC2 source to omit the credential. If Tower is running on
|
||||
# an EC2 instance with an IAM Role assigned, boto will use credentials
|
||||
# from the instance metadata instead of those explicitly provided.
|
||||
elif source in CLOUD_PROVIDERS and source != 'ec2':
|
||||
elif source in CLOUD_PROVIDERS and source not in ['ec2', 'openshift_virtualization']:
|
||||
return _('Credential is required for a cloud source.')
|
||||
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
|
||||
return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.')
|
||||
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
|
||||
return _('Credentials of type insights and vault are disallowed for scm inventory sources.')
|
||||
elif source == 'openshift_virtualization' and cred and cred.credential_type.kind != 'kubernetes':
|
||||
return _('Credentials of type kubernetes is requred for openshift_virtualization inventory sources.')
|
||||
return None
|
||||
|
||||
def get_cloud_credential(self):
|
||||
@@ -1660,7 +1663,7 @@ class terraform(PluginFileInjector):
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
|
||||
private_data = {'credentials': {}}
|
||||
gce_cred = credential.get_input('gce_credentials')
|
||||
gce_cred = credential.get_input('gce_credentials', default=None)
|
||||
if gce_cred:
|
||||
private_data['credentials'][credential] = gce_cred
|
||||
return private_data
|
||||
@@ -1669,7 +1672,7 @@ class terraform(PluginFileInjector):
|
||||
env = super(terraform, self).get_plugin_env(inventory_update, private_data_dir, private_data_files)
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
cred_data = private_data_files['credentials']
|
||||
if cred_data[credential]:
|
||||
if credential in cred_data:
|
||||
env['GOOGLE_BACKEND_CREDENTIALS'] = to_container_path(cred_data[credential], private_data_dir)
|
||||
return env
|
||||
|
||||
@@ -1693,6 +1696,16 @@ class insights(PluginFileInjector):
|
||||
use_fqcn = True
|
||||
|
||||
|
||||
class openshift_virtualization(PluginFileInjector):
|
||||
plugin_name = 'kubevirt'
|
||||
base_injector = 'template'
|
||||
namespace = 'kubevirt'
|
||||
collection = 'core'
|
||||
downstream_namespace = 'redhat'
|
||||
downstream_collection = 'openshift_virtualization'
|
||||
use_fqcn = True
|
||||
|
||||
|
||||
class constructed(PluginFileInjector):
|
||||
plugin_name = 'constructed'
|
||||
namespace = 'ansible'
|
||||
|
||||
@@ -31,6 +31,7 @@ from awx.main.notifications.mattermost_backend import MattermostBackend
|
||||
from awx.main.notifications.grafana_backend import GrafanaBackend
|
||||
from awx.main.notifications.rocketchat_backend import RocketChatBackend
|
||||
from awx.main.notifications.irc_backend import IrcBackend
|
||||
from awx.main.notifications.awssns_backend import AWSSNSBackend
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.models.notifications')
|
||||
@@ -40,6 +41,7 @@ __all__ = ['NotificationTemplate', 'Notification']
|
||||
|
||||
class NotificationTemplate(CommonModelNameNotUnique):
|
||||
NOTIFICATION_TYPES = [
|
||||
('awssns', _('AWS SNS'), AWSSNSBackend),
|
||||
('email', _('Email'), CustomEmailBackend),
|
||||
('slack', _('Slack'), SlackBackend),
|
||||
('twilio', _('Twilio'), TwilioBackend),
|
||||
|
||||
@@ -17,7 +17,7 @@ from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.db import models, connection
|
||||
from django.db import models, connection, transaction
|
||||
from django.core.exceptions import NON_FIELD_ERRORS
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.timezone import now
|
||||
@@ -273,7 +273,14 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
||||
if new_next_schedule:
|
||||
if new_next_schedule.pk == self.next_schedule_id and new_next_schedule.next_run == self.next_job_run:
|
||||
return # no-op, common for infrequent schedules
|
||||
self.next_schedule = new_next_schedule
|
||||
|
||||
# If in a transaction, use select_for_update to lock the next schedule row, which
|
||||
# prevents a race condition if new_next_schedule is deleted elsewhere during this transaction
|
||||
if transaction.get_autocommit():
|
||||
self.next_schedule = related_schedules.first()
|
||||
else:
|
||||
self.next_schedule = related_schedules.select_for_update().first()
|
||||
|
||||
self.next_job_run = new_next_schedule.next_run
|
||||
self.save(update_fields=['next_schedule', 'next_job_run'])
|
||||
|
||||
@@ -823,7 +830,7 @@ class UnifiedJob(
|
||||
update_fields.append(key)
|
||||
|
||||
if parent_instance:
|
||||
if self.status in ('pending', 'waiting', 'running'):
|
||||
if self.status in ('pending', 'running'):
|
||||
if parent_instance.current_job != self:
|
||||
parent_instance_set('current_job', self)
|
||||
# Update parent with all the 'good' states of it's child
|
||||
@@ -860,7 +867,7 @@ class UnifiedJob(
|
||||
# If this job already exists in the database, retrieve a copy of
|
||||
# the job in its prior state.
|
||||
# If update_fields are given without status, then that indicates no change
|
||||
if self.pk and ((not update_fields) or ('status' in update_fields)):
|
||||
if self.status != 'waiting' and self.pk and ((not update_fields) or ('status' in update_fields)):
|
||||
self_before = self.__class__.objects.get(pk=self.pk)
|
||||
if self_before.status != self.status:
|
||||
status_before = self_before.status
|
||||
@@ -902,7 +909,8 @@ class UnifiedJob(
|
||||
update_fields.append('elapsed')
|
||||
|
||||
# Ensure that the job template information is current.
|
||||
if self.unified_job_template != self._get_parent_instance():
|
||||
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking
|
||||
if self.status != 'waiting' and self.unified_job_template != self._get_parent_instance():
|
||||
self.unified_job_template = self._get_parent_instance()
|
||||
if 'unified_job_template' not in update_fields:
|
||||
update_fields.append('unified_job_template')
|
||||
@@ -915,8 +923,9 @@ class UnifiedJob(
|
||||
# Okay; we're done. Perform the actual save.
|
||||
result = super(UnifiedJob, self).save(*args, **kwargs)
|
||||
|
||||
# If status changed, update the parent instance.
|
||||
if self.status != status_before:
|
||||
# If status changed, update the parent instance
|
||||
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking
|
||||
if self.status != status_before and self.status != 'waiting':
|
||||
# Update parent outside of the transaction for Job w/ allow_simultaneous=True
|
||||
# This dodges lock contention at the expense of the foreign key not being
|
||||
# completely correct.
|
||||
|
||||
70
awx/main/notifications/awssns_backend.py
Normal file
70
awx/main/notifications/awssns_backend.py
Normal file
@@ -0,0 +1,70 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
import json
|
||||
import logging
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.awssns_backend')
|
||||
WEBSOCKET_TIMEOUT = 30
|
||||
|
||||
|
||||
class AWSSNSBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
init_parameters = {
|
||||
"aws_region": {"label": "AWS Region", "type": "string", "default": ""},
|
||||
"aws_access_key_id": {"label": "Access Key ID", "type": "string", "default": ""},
|
||||
"aws_secret_access_key": {"label": "Secret Access Key", "type": "password", "default": ""},
|
||||
"aws_session_token": {"label": "Session Token", "type": "password", "default": ""},
|
||||
"sns_topic_arn": {"label": "SNS Topic ARN", "type": "string", "default": ""},
|
||||
}
|
||||
recipient_parameter = "sns_topic_arn"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_BODY = "{{ job_metadata }}"
|
||||
default_messages = CustomNotificationBase.job_metadata_messages
|
||||
|
||||
def __init__(self, aws_region, aws_access_key_id, aws_secret_access_key, aws_session_token, fail_silently=False, **kwargs):
|
||||
session = boto3.session.Session()
|
||||
client_config = {"service_name": 'sns'}
|
||||
if aws_region:
|
||||
client_config["region_name"] = aws_region
|
||||
if aws_secret_access_key:
|
||||
client_config["aws_secret_access_key"] = aws_secret_access_key
|
||||
if aws_access_key_id:
|
||||
client_config["aws_access_key_id"] = aws_access_key_id
|
||||
if aws_session_token:
|
||||
client_config["aws_session_token"] = aws_session_token
|
||||
self.client = session.client(**client_config)
|
||||
super(AWSSNSBackend, self).__init__(fail_silently=fail_silently)
|
||||
|
||||
def _sns_publish(self, topic_arn, message):
|
||||
self.client.publish(TopicArn=topic_arn, Message=message, MessageAttributes={})
|
||||
|
||||
def format_body(self, body):
|
||||
if isinstance(body, str):
|
||||
try:
|
||||
body = json.loads(body)
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
if isinstance(body, dict):
|
||||
body = json.dumps(body)
|
||||
# convert dict body to json string
|
||||
return body
|
||||
|
||||
def send_messages(self, messages):
|
||||
sent_messages = 0
|
||||
for message in messages:
|
||||
sns_topic_arn = str(message.recipients()[0])
|
||||
try:
|
||||
self._sns_publish(topic_arn=sns_topic_arn, message=message.body)
|
||||
sent_messages += 1
|
||||
except ClientError as error:
|
||||
if not self.fail_silently:
|
||||
raise error
|
||||
|
||||
return sent_messages
|
||||
@@ -32,3 +32,15 @@ class CustomNotificationBase(object):
|
||||
"denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": None},
|
||||
},
|
||||
}
|
||||
|
||||
job_metadata_messages = {
|
||||
"started": {"body": "{{ job_metadata }}"},
|
||||
"success": {"body": "{{ job_metadata }}"},
|
||||
"error": {"body": "{{ job_metadata }}"},
|
||||
"workflow_approval": {
|
||||
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
|
||||
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
||||
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
||||
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -27,17 +27,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_BODY = "{{ job_metadata }}"
|
||||
default_messages = {
|
||||
"started": {"body": DEFAULT_BODY},
|
||||
"success": {"body": DEFAULT_BODY},
|
||||
"error": {"body": DEFAULT_BODY},
|
||||
"workflow_approval": {
|
||||
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
|
||||
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
||||
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
||||
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
|
||||
},
|
||||
}
|
||||
default_messages = CustomNotificationBase.job_metadata_messages
|
||||
|
||||
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
|
||||
self.http_method = http_method
|
||||
|
||||
@@ -63,6 +63,10 @@ websocket_urlpatterns = [
|
||||
re_path(r'api/websocket/$', consumers.EventConsumer.as_asgi()),
|
||||
re_path(r'websocket/$', consumers.EventConsumer.as_asgi()),
|
||||
]
|
||||
|
||||
if settings.OPTIONAL_API_URLPATTERN_PREFIX:
|
||||
websocket_urlpatterns.append(re_path(r'api/{}/v2/websocket/$'.format(settings.OPTIONAL_API_URLPATTERN_PREFIX), consumers.EventConsumer.as_asgi()))
|
||||
|
||||
websocket_relay_urlpatterns = [
|
||||
re_path(r'websocket/relay/$', consumers.RelayConsumer.as_asgi()),
|
||||
]
|
||||
|
||||
@@ -36,6 +36,9 @@ import ansible_runner.cleanup
|
||||
# dateutil
|
||||
from dateutil.parser import parse as parse_date
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.resource_registry.tasks.sync import SyncExecutor
|
||||
|
||||
# AWX
|
||||
from awx import __version__ as awx_application_version
|
||||
from awx.main.access import access_registry
|
||||
@@ -964,3 +967,17 @@ def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, p
|
||||
permission_check_func(creater, copy_mapping.values())
|
||||
if isinstance(new_obj, Inventory):
|
||||
update_inventory_computed_fields.delay(new_obj.id)
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def periodic_resource_sync():
|
||||
if not getattr(settings, 'RESOURCE_SERVER', None):
|
||||
logger.debug("Skipping periodic resource_sync, RESOURCE_SERVER not configured")
|
||||
return
|
||||
|
||||
with advisory_lock('periodic_resource_sync', wait=False) as acquired:
|
||||
if acquired is False:
|
||||
logger.debug("Not running periodic_resource_sync, another task holds lock")
|
||||
return
|
||||
|
||||
SyncExecutor().run()
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"K8S_AUTH_HOST": "https://foo.invalid",
|
||||
"K8S_AUTH_API_KEY": "fooo",
|
||||
"K8S_AUTH_VERIFY_SSL": "False"
|
||||
}
|
||||
@@ -9,8 +9,8 @@ def test_user_role_view_access(rando, inventory, mocker, post):
|
||||
role_pk = inventory.admin_role.pk
|
||||
data = {"id": role_pk}
|
||||
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
||||
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
|
||||
post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}), data=data, user=rando, expect=403)
|
||||
mocker.patch('awx.main.access.RoleAccess', return_value=mock_access)
|
||||
post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}), data=data, user=rando, expect=403)
|
||||
mock_access.can_attach.assert_called_once_with(inventory.admin_role, rando, 'members', data, skip_sub_obj_read_check=False)
|
||||
|
||||
|
||||
@@ -21,8 +21,8 @@ def test_team_role_view_access(rando, team, inventory, mocker, post):
|
||||
role_pk = inventory.admin_role.pk
|
||||
data = {"id": role_pk}
|
||||
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
||||
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
|
||||
post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}), data=data, user=rando, expect=403)
|
||||
mocker.patch('awx.main.access.RoleAccess', return_value=mock_access)
|
||||
post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}), data=data, user=rando, expect=403)
|
||||
mock_access.can_attach.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
||||
|
||||
|
||||
@@ -33,8 +33,8 @@ def test_role_team_view_access(rando, team, inventory, mocker, post):
|
||||
role_pk = inventory.admin_role.pk
|
||||
data = {"id": team.pk}
|
||||
mock_access = mocker.MagicMock(return_value=False, __name__='mocked')
|
||||
with mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access):
|
||||
post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}), data=data, user=rando, expect=403)
|
||||
mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access)
|
||||
post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}), data=data, user=rando, expect=403)
|
||||
mock_access.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
||||
|
||||
|
||||
|
||||
@@ -1,22 +1,30 @@
|
||||
import pytest
|
||||
from unittest import mock
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from ansible_base.jwt_consumer.common.util import generate_x_trusted_proxy_header
|
||||
from ansible_base.lib.testing.fixtures import rsa_keypair_factory, rsa_keypair # noqa: F401; pylint: disable=unused-import
|
||||
|
||||
|
||||
class HeaderTrackingMiddleware(object):
|
||||
def __init__(self):
|
||||
self.environ = {}
|
||||
|
||||
def process_request(self, request):
|
||||
pass
|
||||
|
||||
def process_response(self, request, response):
|
||||
self.environ = request.environ
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_proxy_ip_allowed(get, patch, admin):
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
|
||||
patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
|
||||
|
||||
class HeaderTrackingMiddleware(object):
|
||||
environ = {}
|
||||
|
||||
def process_request(self, request):
|
||||
pass
|
||||
|
||||
def process_response(self, request, response):
|
||||
self.environ = request.environ
|
||||
|
||||
# By default, `PROXY_IP_ALLOWED_LIST` is disabled, so custom `REMOTE_HOST_HEADERS`
|
||||
# should just pass through
|
||||
middleware = HeaderTrackingMiddleware()
|
||||
@@ -45,6 +53,51 @@ def test_proxy_ip_allowed(get, patch, admin):
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestTrustedProxyAllowListIntegration:
|
||||
@pytest.fixture
|
||||
def url(self, patch, admin):
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'system'})
|
||||
patch(url, user=admin, data={'REMOTE_HOST_HEADERS': ['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST']})
|
||||
patch(url, user=admin, data={'PROXY_IP_ALLOWED_LIST': ['my.proxy.example.org']})
|
||||
return url
|
||||
|
||||
@pytest.fixture
|
||||
def middleware(self):
|
||||
return HeaderTrackingMiddleware()
|
||||
|
||||
def test_x_trusted_proxy_valid_signature(self, get, admin, rsa_keypair, url, middleware): # noqa: F811
|
||||
# Headers should NOT get deleted
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||
}
|
||||
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public, PROXY_IP_ALLOWED_LIST=[]):
|
||||
get(url, user=admin, middleware=middleware, **headers)
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
def test_x_trusted_proxy_invalid_signature(self, get, admin, url, patch, middleware):
|
||||
# Headers should NOT get deleted
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': 'DEAD-BEEF',
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||
}
|
||||
with override_settings(PROXY_IP_ALLOWED_LIST=[]):
|
||||
get(url, user=admin, middleware=middleware, **headers)
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
def test_x_trusted_proxy_invalid_signature_valid_proxy(self, get, admin, url, middleware):
|
||||
# A valid explicit proxy SHOULD result in sensitive headers NOT being deleted, regardless of the trusted proxy signature results
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': 'DEAD-BEEF',
|
||||
'REMOTE_ADDR': 'my.proxy.example.org',
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'some-actual-ip',
|
||||
}
|
||||
get(url, user=admin, middleware=middleware, **headers)
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestDeleteViews:
|
||||
def test_sublist_delete_permission_check(self, inventory_source, host, rando, delete):
|
||||
|
||||
66
awx/main/tests/functional/api/test_immutablesharedfields.py
Normal file
66
awx/main/tests/functional/api/test_immutablesharedfields.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import Organization
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestImmutableSharedFields:
|
||||
@pytest.fixture(autouse=True)
|
||||
def configure_settings(self, settings):
|
||||
settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT = False
|
||||
|
||||
def test_create_raises_permission_denied(self, admin_user, post):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
resp = post(
|
||||
url=reverse('api:team_list'),
|
||||
data={'name': 'teamA', 'organization': orgA.id},
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert "Creation of this resource is not allowed" in resp.data['detail']
|
||||
|
||||
def test_perform_delete_raises_permission_denied(self, admin_user, delete):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
team = orgA.teams.create(name='teamA')
|
||||
resp = delete(
|
||||
url=reverse('api:team_detail', kwargs={'pk': team.id}),
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert "Deletion of this resource is not allowed" in resp.data['detail']
|
||||
|
||||
def test_perform_update(self, admin_user, patch):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
team = orgA.teams.create(name='teamA')
|
||||
# allow patching non-shared fields
|
||||
patch(
|
||||
url=reverse('api:team_detail', kwargs={'pk': team.id}),
|
||||
data={"description": "can change this field"},
|
||||
user=admin_user,
|
||||
expect=200,
|
||||
)
|
||||
orgB = Organization.objects.create(name='orgB')
|
||||
# prevent patching shared fields
|
||||
resp = patch(url=reverse('api:team_detail', kwargs={'pk': team.id}), data={"organization": orgB.id}, user=admin_user, expect=403)
|
||||
assert "Cannot change shared field" in resp.data['organization']
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'role',
|
||||
['admin_role', 'member_role'],
|
||||
)
|
||||
@pytest.mark.parametrize('resource', ['organization', 'team'])
|
||||
def test_prevent_assigning_member_to_organization_or_team(self, admin_user, post, resource, role):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
if resource == 'organization':
|
||||
role = getattr(orgA, role)
|
||||
elif resource == 'team':
|
||||
teamA = orgA.teams.create(name='teamA')
|
||||
role = getattr(teamA, role)
|
||||
resp = post(
|
||||
url=reverse('api:user_roles_list', kwargs={'pk': admin_user.id}),
|
||||
data={'id': role.id},
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert f"Cannot directly modify user membership to {resource}." in resp.data['msg']
|
||||
@@ -32,13 +32,6 @@ def node_type_instance():
|
||||
return fn
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def instance_group(job_factory):
|
||||
ig = InstanceGroup(name="east")
|
||||
ig.save()
|
||||
return ig
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def containerized_instance_group(instance_group, kube_credential):
|
||||
ig = InstanceGroup(name="container")
|
||||
|
||||
@@ -131,11 +131,11 @@ def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, ad
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ()
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ()
|
||||
|
||||
# Check that job is serialized correctly
|
||||
job_id = response.data['job']
|
||||
@@ -167,12 +167,12 @@ def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, admi
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
called_with = data_to_internal(runtime_data)
|
||||
JobTemplate.create_unified_job.assert_called_with(**called_with)
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
called_with = data_to_internal(runtime_data)
|
||||
JobTemplate.create_unified_job.assert_called_with(**called_with)
|
||||
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
@@ -187,11 +187,11 @@ def test_job_accept_empty_tags(job_template_prompts, post, admin_user, mocker):
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'job_tags': '', 'skip_tags': ''},)
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'job_tags': '', 'skip_tags': ''},)
|
||||
|
||||
mock_job.signal_start.assert_called_once()
|
||||
|
||||
@@ -203,14 +203,14 @@ def test_slice_timeout_forks_need_int(job_template_prompts, post, admin_user, mo
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'timeout': '', 'job_slice_count': '', 'forks': ''}, admin_user, expect=400
|
||||
)
|
||||
assert 'forks' in response.data and response.data['forks'][0] == 'A valid integer is required.'
|
||||
assert 'job_slice_count' in response.data and response.data['job_slice_count'][0] == 'A valid integer is required.'
|
||||
assert 'timeout' in response.data and response.data['timeout'][0] == 'A valid integer is required.'
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'timeout': '', 'job_slice_count': '', 'forks': ''}, admin_user, expect=400
|
||||
)
|
||||
assert 'forks' in response.data and response.data['forks'][0] == 'A valid integer is required.'
|
||||
assert 'job_slice_count' in response.data and response.data['job_slice_count'][0] == 'A valid integer is required.'
|
||||
assert 'timeout' in response.data and response.data['timeout'][0] == 'A valid integer is required.'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -244,12 +244,12 @@ def test_job_accept_prompted_vars_null(runtime_data, job_template_prompts_null,
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, rando, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
expected_call = data_to_internal(runtime_data)
|
||||
assert JobTemplate.create_unified_job.call_args == (expected_call,)
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, rando, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
expected_call = data_to_internal(runtime_data)
|
||||
assert JobTemplate.create_unified_job.call_args == (expected_call,)
|
||||
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
@@ -641,18 +641,18 @@ def test_job_launch_unprompted_vars_with_survey(mocker, survey_spec_factory, job
|
||||
job_template.survey_spec = survey_spec_factory('survey_var')
|
||||
job_template.save()
|
||||
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
|
||||
admin_user,
|
||||
expect=201,
|
||||
)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'extra_vars': {'survey_var': 4}},)
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
|
||||
admin_user,
|
||||
expect=201,
|
||||
)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'extra_vars': {'survey_var': 4}},)
|
||||
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
@@ -670,22 +670,22 @@ def test_callback_accept_prompted_extra_var(mocker, survey_spec_factory, job_tem
|
||||
job_template.survey_spec = survey_spec_factory('survey_var')
|
||||
job_template.save()
|
||||
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
with mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
||||
with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]):
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'extra_vars': {'survey_var': 4, 'job_launch_var': 3}, 'limit': 'single-host'}
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
|
||||
mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host])
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'extra_vars': {'survey_var': 4, 'job_launch_var': 3}, 'limit': 'single-host'}
|
||||
|
||||
mock_job.signal_start.assert_called_once()
|
||||
|
||||
@@ -697,22 +697,22 @@ def test_callback_ignore_unprompted_extra_var(mocker, survey_spec_factory, job_t
|
||||
job_template.host_config_key = "foo"
|
||||
job_template.save()
|
||||
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
with mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
||||
with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]):
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'limit': 'single-host'}
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
|
||||
mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host])
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'limit': 'single-host'}
|
||||
|
||||
mock_job.signal_start.assert_called_once()
|
||||
|
||||
@@ -725,9 +725,9 @@ def test_callback_find_matching_hosts(mocker, get, job_template_prompts, admin_u
|
||||
job_template.save()
|
||||
host_with_alias = Host(name='localhost', inventory=job_template.inventory)
|
||||
host_with_alias.save()
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert tuple(r.data['matching_hosts']) == ('localhost',)
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert tuple(r.data['matching_hosts']) == ('localhost',)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -738,6 +738,6 @@ def test_callback_extra_var_takes_priority_over_host_name(mocker, get, job_templ
|
||||
job_template.save()
|
||||
host_with_alias = Host(name='localhost', variables={'ansible_host': 'foobar'}, inventory=job_template.inventory)
|
||||
host_with_alias.save()
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert not r.data['matching_hosts']
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert not r.data['matching_hosts']
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import pytest
|
||||
from unittest import mock
|
||||
|
||||
# AWX
|
||||
from awx.api.serializers import JobTemplateSerializer
|
||||
@@ -8,10 +9,15 @@ from awx.main.migrations import _save_password_keys as save_password_keys
|
||||
|
||||
# Django
|
||||
from django.apps import apps
|
||||
from django.test.utils import override_settings
|
||||
|
||||
# DRF
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
# DAB
|
||||
from ansible_base.jwt_consumer.common.util import generate_x_trusted_proxy_header
|
||||
from ansible_base.lib.testing.fixtures import rsa_keypair_factory, rsa_keypair # noqa: F401; pylint: disable=unused-import
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
@@ -369,3 +375,113 @@ def test_job_template_missing_inventory(project, inventory, admin_user, post):
|
||||
)
|
||||
assert r.status_code == 400
|
||||
assert "Cannot start automatically, an inventory is required." in str(r.data)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestJobTemplateCallbackProxyIntegration:
|
||||
"""
|
||||
Test the interaction of provision job template callback feature and:
|
||||
settings.PROXY_IP_ALLOWED_LIST
|
||||
x-trusted-proxy http header
|
||||
"""
|
||||
|
||||
@pytest.fixture
|
||||
def job_template(self, inventory, project):
|
||||
jt = JobTemplate.objects.create(name='test-jt', inventory=inventory, project=project, playbook='helloworld.yml', host_config_key='abcd')
|
||||
return jt
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_host_not_found(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'baz',
|
||||
'REMOTE_HOST': 'baz',
|
||||
'REMOTE_ADDR': 'baz',
|
||||
}
|
||||
r = post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), data={'host_config_key': 'abcd'}, user=admin_user, expect=400, **headers
|
||||
)
|
||||
assert r.data['msg'] == 'No matching host could be found!'
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'headers, expected',
|
||||
(
|
||||
pytest.param(
|
||||
{
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||
'REMOTE_HOST': 'my.proxy.example.org',
|
||||
},
|
||||
201,
|
||||
),
|
||||
pytest.param(
|
||||
{
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||
'REMOTE_HOST': 'not-my-proxy.org',
|
||||
},
|
||||
400,
|
||||
),
|
||||
),
|
||||
)
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_proxy_ip_allowed_list(self, job_template, admin_user, post, headers, expected): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='my.proxy.example.org')
|
||||
|
||||
post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=expected,
|
||||
**headers
|
||||
)
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=[])
|
||||
def test_no_proxy_trust_all_headers(self, job_template, admin_user, post):
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar',
|
||||
'REMOTE_ADDR': 'bar',
|
||||
'REMOTE_HOST': 'baz',
|
||||
}
|
||||
post(url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), data={'host_config_key': 'abcd'}, user=admin_user, expect=201, **headers)
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_trusted_proxy(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'foobar, my.proxy.example.org',
|
||||
}
|
||||
|
||||
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public):
|
||||
post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=201,
|
||||
**headers
|
||||
)
|
||||
|
||||
@override_settings(REMOTE_HOST_HEADERS=['HTTP_X_FROM_THE_LOAD_BALANCER', 'REMOTE_ADDR', 'REMOTE_HOST'], PROXY_IP_ALLOWED_LIST=['my.proxy.example.org'])
|
||||
def test_trusted_proxy_host_not_found(self, job_template, admin_user, post, rsa_keypair): # noqa: F811
|
||||
job_template.inventory.hosts.create(name='foobar')
|
||||
|
||||
headers = {
|
||||
'HTTP_X_TRUSTED_PROXY': generate_x_trusted_proxy_header(rsa_keypair.private),
|
||||
'HTTP_X_FROM_THE_LOAD_BALANCER': 'baz, my.proxy.example.org',
|
||||
'REMOTE_ADDR': 'bar',
|
||||
'REMOTE_HOST': 'baz',
|
||||
}
|
||||
|
||||
with mock.patch('ansible_base.jwt_consumer.common.cache.JWTCache.get_key_from_cache', lambda self: None):
|
||||
with override_settings(ANSIBLE_BASE_JWT_KEY=rsa_keypair.public):
|
||||
post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=400,
|
||||
**headers
|
||||
)
|
||||
|
||||
@@ -165,8 +165,8 @@ class TestAccessListCapabilities:
|
||||
def test_access_list_direct_access_capability(self, inventory, rando, get, mocker, mock_access_method):
|
||||
inventory.admin_role.members.add(rando)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), rando)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), rando)
|
||||
|
||||
mock_access_method.assert_called_once_with(inventory.admin_role, rando, 'members', **self.extra_kwargs)
|
||||
self._assert_one_in_list(response.data)
|
||||
@@ -174,8 +174,8 @@ class TestAccessListCapabilities:
|
||||
assert direct_access_list[0]['role']['user_capabilities']['unattach'] == 'foobar'
|
||||
|
||||
def test_access_list_indirect_access_capability(self, inventory, organization, org_admin, get, mocker, mock_access_method):
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), org_admin)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), org_admin)
|
||||
|
||||
mock_access_method.assert_called_once_with(organization.admin_role, org_admin, 'members', **self.extra_kwargs)
|
||||
self._assert_one_in_list(response.data, sublist='indirect_access')
|
||||
@@ -185,8 +185,8 @@ class TestAccessListCapabilities:
|
||||
def test_access_list_team_direct_access_capability(self, inventory, team, team_member, get, mocker, mock_access_method):
|
||||
team.member_role.children.add(inventory.admin_role)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), team_member)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), team_member)
|
||||
|
||||
mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', **self.extra_kwargs)
|
||||
self._assert_one_in_list(response.data)
|
||||
@@ -198,8 +198,8 @@ class TestAccessListCapabilities:
|
||||
def test_team_roles_unattach(mocker, team, team_member, inventory, mock_access_method, get):
|
||||
team.member_role.children.add(inventory.admin_role)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:team_roles_list', kwargs={'pk': team.id}), team_member)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:team_roles_list', kwargs={'pk': team.id}), team_member)
|
||||
|
||||
# Did we assess whether team_member can remove team's permission to the inventory?
|
||||
mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', skip_sub_obj_read_check=True, data={})
|
||||
@@ -212,8 +212,8 @@ def test_user_roles_unattach(mocker, organization, alice, bob, mock_access_metho
|
||||
organization.member_role.members.add(alice)
|
||||
organization.member_role.members.add(bob)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:user_roles_list', kwargs={'pk': alice.id}), bob)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:user_roles_list', kwargs={'pk': alice.id}), bob)
|
||||
|
||||
# Did we assess whether bob can remove alice's permission to the inventory?
|
||||
mock_access_method.assert_called_once_with(organization.member_role, alice, 'members', skip_sub_obj_read_check=True, data={})
|
||||
|
||||
@@ -43,9 +43,9 @@ def run_command(name, *args, **options):
|
||||
],
|
||||
)
|
||||
def test_update_password_command(mocker, username, password, expected, changed):
|
||||
with mocker.patch.object(UpdatePassword, 'update_password', return_value=changed):
|
||||
result, stdout, stderr = run_command('update_password', username=username, password=password)
|
||||
if result is None:
|
||||
assert stdout == expected
|
||||
else:
|
||||
assert str(result) == expected
|
||||
mocker.patch.object(UpdatePassword, 'update_password', return_value=changed)
|
||||
result, stdout, stderr = run_command('update_password', username=username, password=password)
|
||||
if result is None:
|
||||
assert stdout == expected
|
||||
else:
|
||||
assert str(result) == expected
|
||||
|
||||
@@ -20,7 +20,7 @@ from awx.main.migrations._dab_rbac import setup_managed_role_definitions
|
||||
|
||||
# AWX
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.ha import Instance
|
||||
from awx.main.models.ha import Instance, InstanceGroup
|
||||
|
||||
from rest_framework.test import (
|
||||
APIRequestFactory,
|
||||
@@ -730,6 +730,11 @@ def jt_linked(organization, project, inventory, machine_credential, credential,
|
||||
return jt
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def instance_group():
|
||||
return InstanceGroup.objects.create(name="east")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_job_template(organization):
|
||||
wjt = WorkflowJobTemplate.objects.create(name='test-workflow_job_template', organization=organization)
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
import pytest
|
||||
|
||||
from awx.main.access import InstanceGroupAccess, NotificationTemplateAccess
|
||||
|
||||
from ansible_base.rbac.models import RoleDefinition
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_instance_group_object_role_delete(rando, instance_group, setup_managed_roles):
|
||||
"""Basic functionality of IG object-level admin role function AAP-25506"""
|
||||
rd = RoleDefinition.objects.get(name='InstanceGroup Admin')
|
||||
rd.give_permission(rando, instance_group)
|
||||
access = InstanceGroupAccess(rando)
|
||||
assert access.can_delete(instance_group)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_notification_template_object_role_change(rando, notification_template, setup_managed_roles):
|
||||
"""Basic functionality of NT object-level admin role function AAP-25493"""
|
||||
rd = RoleDefinition.objects.get(name='NotificationTemplate Admin')
|
||||
rd.give_permission(rando, notification_template)
|
||||
access = NotificationTemplateAccess(rando)
|
||||
assert access.can_change(notification_template, {'name': 'new name'})
|
||||
@@ -21,13 +21,13 @@ class TestComputedFields:
|
||||
def test_computed_fields_normal_use(self, mocker, inventory):
|
||||
job = Job.objects.create(name='fake-job', inventory=inventory)
|
||||
with immediate_on_commit():
|
||||
with mocker.patch.object(update_inventory_computed_fields, 'delay'):
|
||||
job.delete()
|
||||
update_inventory_computed_fields.delay.assert_called_once_with(inventory.id)
|
||||
mocker.patch.object(update_inventory_computed_fields, 'delay')
|
||||
job.delete()
|
||||
update_inventory_computed_fields.delay.assert_called_once_with(inventory.id)
|
||||
|
||||
def test_disable_computed_fields(self, mocker, inventory):
|
||||
job = Job.objects.create(name='fake-job', inventory=inventory)
|
||||
with disable_computed_fields():
|
||||
with mocker.patch.object(update_inventory_computed_fields, 'delay'):
|
||||
job.delete()
|
||||
update_inventory_computed_fields.delay.assert_not_called()
|
||||
mocker.patch.object(update_inventory_computed_fields, 'delay')
|
||||
job.delete()
|
||||
update_inventory_computed_fields.delay.assert_not_called()
|
||||
|
||||
@@ -21,13 +21,13 @@ def test_multi_group_basic_job_launch(instance_factory, controlplane_instance_gr
|
||||
j2 = create_job(objects2.job_template)
|
||||
with mock.patch('awx.main.models.Job.task_impact', new_callable=mock.PropertyMock) as mock_task_impact:
|
||||
mock_task_impact.return_value = 500
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_has_calls([mock.call(j1, ig1, i1), mock.call(j2, ig2, i2)])
|
||||
mocker.patch("awx.main.scheduler.TaskManager.start_task")
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_has_calls([mock.call(j1, ig1, i1), mock.call(j2, ig2, i2)])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_multi_group_with_shared_dependency(instance_factory, controlplane_instance_group, mocker, instance_group_factory, job_template_factory):
|
||||
def test_multi_group_with_shared_dependency(instance_factory, controlplane_instance_group, instance_group_factory, job_template_factory):
|
||||
i1 = instance_factory("i1")
|
||||
i2 = instance_factory("i2")
|
||||
ig1 = instance_group_factory("ig1", instances=[i1])
|
||||
@@ -50,7 +50,7 @@ def test_multi_group_with_shared_dependency(instance_factory, controlplane_insta
|
||||
objects2 = job_template_factory('jt2', organization=objects1.organization, project=p, inventory='inv2', credential='cred2')
|
||||
objects2.job_template.instance_groups.add(ig2)
|
||||
j2 = create_job(objects2.job_template, dependencies_processed=False)
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
DependencyManager().schedule()
|
||||
TaskManager().schedule()
|
||||
pu = p.project_updates.first()
|
||||
@@ -73,10 +73,10 @@ def test_workflow_job_no_instancegroup(workflow_job_template_factory, controlpla
|
||||
wfj = wfjt.create_unified_job()
|
||||
wfj.status = "pending"
|
||||
wfj.save()
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(wfj, None, None)
|
||||
assert wfj.instance_group is None
|
||||
mocker.patch("awx.main.scheduler.TaskManager.start_task")
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(wfj, None, None)
|
||||
assert wfj.instance_group is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -16,9 +16,9 @@ def test_single_job_scheduler_launch(hybrid_instance, controlplane_instance_grou
|
||||
instance = controlplane_instance_group.instances.all()[0]
|
||||
objects = job_template_factory('jt', organization='org1', project='proj', inventory='inv', credential='cred')
|
||||
j = create_job(objects.job_template)
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, instance)
|
||||
mocker.patch("awx.main.scheduler.TaskManager.start_task")
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, instance)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -46,6 +46,8 @@ def generate_fake_var(element):
|
||||
|
||||
def credential_kind(source):
|
||||
"""Given the inventory source kind, return expected credential kind"""
|
||||
if source == 'openshift_virtualization':
|
||||
return 'kubernetes_bearer_token'
|
||||
return source.replace('ec2', 'aws')
|
||||
|
||||
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import pytest
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.middleware import URLModificationMiddleware
|
||||
from awx.main.models import ( # noqa
|
||||
@@ -121,7 +119,7 @@ def test_notification_template(get, admin_user):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_instance(get, admin_user):
|
||||
def test_instance(get, admin_user, settings):
|
||||
test_instance = Instance.objects.create(uuid=settings.SYSTEM_UUID, hostname="localhost", capacity=100)
|
||||
url = reverse('api:instance_detail', kwargs={'pk': test_instance.pk})
|
||||
response = get(url, user=admin_user, expect=200)
|
||||
@@ -205,3 +203,65 @@ def test_403_vs_404(get):
|
||||
|
||||
get(f'/api/v2/users/{cindy.pk}/', expect=401)
|
||||
get('/api/v2/users/cindy/', expect=404)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestConvertNamedUrl:
|
||||
@pytest.mark.parametrize(
|
||||
"url",
|
||||
(
|
||||
"/api/",
|
||||
"/api/v2/",
|
||||
"/api/v2/hosts/",
|
||||
"/api/v2/hosts/1/",
|
||||
"/api/v2/organizations/1/inventories/",
|
||||
"/api/foo/",
|
||||
"/api/foo/v2/",
|
||||
"/api/foo/v2/organizations/",
|
||||
"/api/foo/v2/organizations/1/",
|
||||
"/api/foo/v2/organizations/1/inventories/",
|
||||
"/api/foobar/",
|
||||
"/api/foobar/v2/",
|
||||
"/api/foobar/v2/organizations/",
|
||||
"/api/foobar/v2/organizations/1/",
|
||||
"/api/foobar/v2/organizations/1/inventories/",
|
||||
"/api/foobar/v2/organizations/1/inventories/",
|
||||
),
|
||||
)
|
||||
def test_noop(self, url, settings):
|
||||
settings.OPTIONAL_API_URLPATTERN_PREFIX = ''
|
||||
assert URLModificationMiddleware._convert_named_url(url) == url
|
||||
|
||||
settings.OPTIONAL_API_URLPATTERN_PREFIX = 'foo'
|
||||
assert URLModificationMiddleware._convert_named_url(url) == url
|
||||
|
||||
def test_named_org(self):
|
||||
test_org = Organization.objects.create(name='test_org')
|
||||
|
||||
assert URLModificationMiddleware._convert_named_url('/api/v2/organizations/test_org/') == f'/api/v2/organizations/{test_org.pk}/'
|
||||
|
||||
def test_named_org_optional_api_urlpattern_prefix_interaction(self, settings):
|
||||
settings.OPTIONAL_API_URLPATTERN_PREFIX = 'bar'
|
||||
test_org = Organization.objects.create(name='test_org')
|
||||
|
||||
assert URLModificationMiddleware._convert_named_url('/api/bar/v2/organizations/test_org/') == f'/api/bar/v2/organizations/{test_org.pk}/'
|
||||
|
||||
@pytest.mark.parametrize("prefix", ['', 'bar'])
|
||||
def test_named_org_not_found(self, prefix, settings):
|
||||
settings.OPTIONAL_API_URLPATTERN_PREFIX = prefix
|
||||
if prefix:
|
||||
prefix += '/'
|
||||
|
||||
assert URLModificationMiddleware._convert_named_url(f'/api/{prefix}v2/organizations/does-not-exist/') == f'/api/{prefix}v2/organizations/0/'
|
||||
|
||||
@pytest.mark.parametrize("prefix", ['', 'bar'])
|
||||
def test_named_sub_resource(self, prefix, settings):
|
||||
settings.OPTIONAL_API_URLPATTERN_PREFIX = prefix
|
||||
test_org = Organization.objects.create(name='test_org')
|
||||
if prefix:
|
||||
prefix += '/'
|
||||
|
||||
assert (
|
||||
URLModificationMiddleware._convert_named_url(f'/api/{prefix}v2/organizations/test_org/inventories/')
|
||||
== f'/api/{prefix}v2/organizations/{test_org.pk}/inventories/'
|
||||
)
|
||||
|
||||
@@ -187,7 +187,7 @@ def test_remove_role_from_user(role, post, admin):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ANSIBLE_BASE_ALLOW_TEAM_ORG_ADMIN=True)
|
||||
@override_settings(ANSIBLE_BASE_ALLOW_TEAM_ORG_ADMIN=True, ANSIBLE_BASE_ALLOW_TEAM_ORG_MEMBER=True)
|
||||
def test_get_teams_roles_list(get, team, organization, admin):
|
||||
team.member_role.children.add(organization.admin_role)
|
||||
url = reverse('api:team_roles_list', kwargs={'pk': team.id})
|
||||
|
||||
@@ -99,7 +99,9 @@ def test_notification_template_access_org_user(notification_template, user):
|
||||
@pytest.mark.django_db
|
||||
def test_notificaiton_template_orphan_access_org_admin(notification_template, organization, org_admin):
|
||||
notification_template.organization = None
|
||||
notification_template.save(update_fields=['organization'])
|
||||
access = NotificationTemplateAccess(org_admin)
|
||||
assert not org_admin.has_obj_perm(notification_template, 'change')
|
||||
assert not access.can_change(notification_template, {'organization': organization.id})
|
||||
|
||||
|
||||
|
||||
@@ -76,15 +76,15 @@ class TestJobTemplateSerializerGetRelated:
|
||||
class TestJobTemplateSerializerGetSummaryFields:
|
||||
def test_survey_spec_exists(self, test_get_summary_fields, mocker, job_template):
|
||||
job_template.survey_spec = {'name': 'blah', 'description': 'blah blah'}
|
||||
with mocker.patch.object(JobTemplateSerializer, '_recent_jobs') as mock_rj:
|
||||
mock_rj.return_value = []
|
||||
test_get_summary_fields(JobTemplateSerializer, job_template, 'survey')
|
||||
mock_rj = mocker.patch.object(JobTemplateSerializer, '_recent_jobs')
|
||||
mock_rj.return_value = []
|
||||
test_get_summary_fields(JobTemplateSerializer, job_template, 'survey')
|
||||
|
||||
def test_survey_spec_absent(self, get_summary_fields_mock_and_run, mocker, job_template):
|
||||
job_template.survey_spec = None
|
||||
with mocker.patch.object(JobTemplateSerializer, '_recent_jobs') as mock_rj:
|
||||
mock_rj.return_value = []
|
||||
summary = get_summary_fields_mock_and_run(JobTemplateSerializer, job_template)
|
||||
mock_rj = mocker.patch.object(JobTemplateSerializer, '_recent_jobs')
|
||||
mock_rj.return_value = []
|
||||
summary = get_summary_fields_mock_and_run(JobTemplateSerializer, job_template)
|
||||
assert 'survey' not in summary
|
||||
|
||||
def test_copy_edit_standard(self, mocker, job_template_factory):
|
||||
@@ -107,10 +107,10 @@ class TestJobTemplateSerializerGetSummaryFields:
|
||||
view.kwargs = {}
|
||||
serializer.context['view'] = view
|
||||
|
||||
with mocker.patch("awx.api.serializers.role_summary_fields_generator", return_value='Can eat pie'):
|
||||
with mocker.patch("awx.main.access.JobTemplateAccess.can_change", return_value='foobar'):
|
||||
with mocker.patch("awx.main.access.JobTemplateAccess.can_copy", return_value='foo'):
|
||||
response = serializer.get_summary_fields(jt_obj)
|
||||
mocker.patch("awx.api.serializers.role_summary_fields_generator", return_value='Can eat pie')
|
||||
mocker.patch("awx.main.access.JobTemplateAccess.can_change", return_value='foobar')
|
||||
mocker.patch("awx.main.access.JobTemplateAccess.can_copy", return_value='foo')
|
||||
response = serializer.get_summary_fields(jt_obj)
|
||||
|
||||
assert response['user_capabilities']['copy'] == 'foo'
|
||||
assert response['user_capabilities']['edit'] == 'foobar'
|
||||
|
||||
@@ -189,8 +189,8 @@ class TestWorkflowJobTemplateNodeSerializerSurveyPasswords:
|
||||
serializer = WorkflowJobTemplateNodeSerializer()
|
||||
wfjt = WorkflowJobTemplate.objects.create(name='fake-wfjt')
|
||||
serializer.instance = WorkflowJobTemplateNode(workflow_job_template=wfjt, unified_job_template=jt, extra_data={'var1': '$encrypted$foooooo'})
|
||||
with mocker.patch('awx.main.models.mixins.decrypt_value', return_value='foo'):
|
||||
attrs = serializer.validate({'unified_job_template': jt, 'workflow_job_template': wfjt, 'extra_data': {'var1': '$encrypted$'}})
|
||||
mocker.patch('awx.main.models.mixins.decrypt_value', return_value='foo')
|
||||
attrs = serializer.validate({'unified_job_template': jt, 'workflow_job_template': wfjt, 'extra_data': {'var1': '$encrypted$'}})
|
||||
assert 'survey_passwords' in attrs
|
||||
assert 'var1' in attrs['survey_passwords']
|
||||
assert attrs['extra_data']['var1'] == '$encrypted$foooooo'
|
||||
|
||||
@@ -191,16 +191,16 @@ class TestResourceAccessList:
|
||||
|
||||
def test_parent_access_check_failed(self, mocker, mock_organization):
|
||||
mock_access = mocker.MagicMock(__name__='for logger', return_value=False)
|
||||
with mocker.patch('awx.main.access.BaseAccess.can_read', mock_access):
|
||||
with pytest.raises(PermissionDenied):
|
||||
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
|
||||
mock_access.assert_called_once_with(mock_organization)
|
||||
mocker.patch('awx.main.access.BaseAccess.can_read', mock_access)
|
||||
with pytest.raises(PermissionDenied):
|
||||
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
|
||||
mock_access.assert_called_once_with(mock_organization)
|
||||
|
||||
def test_parent_access_check_worked(self, mocker, mock_organization):
|
||||
mock_access = mocker.MagicMock(__name__='for logger', return_value=True)
|
||||
with mocker.patch('awx.main.access.BaseAccess.can_read', mock_access):
|
||||
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
|
||||
mock_access.assert_called_once_with(mock_organization)
|
||||
mocker.patch('awx.main.access.BaseAccess.can_read', mock_access)
|
||||
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
|
||||
mock_access.assert_called_once_with(mock_organization)
|
||||
|
||||
|
||||
def test_related_search_reverse_FK_field():
|
||||
|
||||
@@ -66,7 +66,7 @@ class TestJobTemplateLabelList:
|
||||
mock_request = mock.MagicMock()
|
||||
|
||||
super(JobTemplateLabelList, view).unattach(mock_request, None, None)
|
||||
assert mixin_unattach.called_with(mock_request, None, None)
|
||||
mixin_unattach.assert_called_with(mock_request, None, None)
|
||||
|
||||
|
||||
class TestInventoryInventorySourcesUpdate:
|
||||
@@ -108,15 +108,16 @@ class TestInventoryInventorySourcesUpdate:
|
||||
mock_request = mocker.MagicMock()
|
||||
mock_request.user.can_access.return_value = can_access
|
||||
|
||||
with mocker.patch.object(InventoryInventorySourcesUpdate, 'get_object', return_value=obj):
|
||||
with mocker.patch.object(InventoryInventorySourcesUpdate, 'get_serializer_context', return_value=None):
|
||||
with mocker.patch('awx.api.serializers.InventoryUpdateDetailSerializer') as serializer_class:
|
||||
serializer = serializer_class.return_value
|
||||
serializer.to_representation.return_value = {}
|
||||
mocker.patch.object(InventoryInventorySourcesUpdate, 'get_object', return_value=obj)
|
||||
mocker.patch.object(InventoryInventorySourcesUpdate, 'get_serializer_context', return_value=None)
|
||||
serializer_class = mocker.patch('awx.api.serializers.InventoryUpdateDetailSerializer')
|
||||
|
||||
view = InventoryInventorySourcesUpdate()
|
||||
response = view.post(mock_request)
|
||||
assert response.data == expected
|
||||
serializer = serializer_class.return_value
|
||||
serializer.to_representation.return_value = {}
|
||||
|
||||
view = InventoryInventorySourcesUpdate()
|
||||
response = view.post(mock_request)
|
||||
assert response.data == expected
|
||||
|
||||
|
||||
class TestSurveySpecValidation:
|
||||
|
||||
@@ -155,35 +155,35 @@ def test_node_getter_and_setters():
|
||||
class TestWorkflowJobCreate:
|
||||
def test_create_no_prompts(self, wfjt_node_no_prompts, workflow_job_unit, mocker):
|
||||
mock_create = mocker.MagicMock()
|
||||
with mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create):
|
||||
wfjt_node_no_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
|
||||
mock_create.assert_called_once_with(
|
||||
all_parents_must_converge=False,
|
||||
extra_data={},
|
||||
survey_passwords={},
|
||||
char_prompts=wfjt_node_no_prompts.char_prompts,
|
||||
inventory=None,
|
||||
unified_job_template=wfjt_node_no_prompts.unified_job_template,
|
||||
workflow_job=workflow_job_unit,
|
||||
identifier=mocker.ANY,
|
||||
execution_environment=None,
|
||||
)
|
||||
mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create)
|
||||
wfjt_node_no_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
|
||||
mock_create.assert_called_once_with(
|
||||
all_parents_must_converge=False,
|
||||
extra_data={},
|
||||
survey_passwords={},
|
||||
char_prompts=wfjt_node_no_prompts.char_prompts,
|
||||
inventory=None,
|
||||
unified_job_template=wfjt_node_no_prompts.unified_job_template,
|
||||
workflow_job=workflow_job_unit,
|
||||
identifier=mocker.ANY,
|
||||
execution_environment=None,
|
||||
)
|
||||
|
||||
def test_create_with_prompts(self, wfjt_node_with_prompts, workflow_job_unit, credential, mocker):
|
||||
mock_create = mocker.MagicMock()
|
||||
with mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create):
|
||||
wfjt_node_with_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
|
||||
mock_create.assert_called_once_with(
|
||||
all_parents_must_converge=False,
|
||||
extra_data={},
|
||||
survey_passwords={},
|
||||
char_prompts=wfjt_node_with_prompts.char_prompts,
|
||||
inventory=wfjt_node_with_prompts.inventory,
|
||||
unified_job_template=wfjt_node_with_prompts.unified_job_template,
|
||||
workflow_job=workflow_job_unit,
|
||||
identifier=mocker.ANY,
|
||||
execution_environment=None,
|
||||
)
|
||||
mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create)
|
||||
wfjt_node_with_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
|
||||
mock_create.assert_called_once_with(
|
||||
all_parents_must_converge=False,
|
||||
extra_data={},
|
||||
survey_passwords={},
|
||||
char_prompts=wfjt_node_with_prompts.char_prompts,
|
||||
inventory=wfjt_node_with_prompts.inventory,
|
||||
unified_job_template=wfjt_node_with_prompts.unified_job_template,
|
||||
workflow_job=workflow_job_unit,
|
||||
identifier=mocker.ANY,
|
||||
execution_environment=None,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
26
awx/main/tests/unit/notifications/test_awssns.py
Normal file
26
awx/main/tests/unit/notifications/test_awssns.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from unittest import mock
|
||||
from django.core.mail.message import EmailMessage
|
||||
|
||||
import awx.main.notifications.awssns_backend as awssns_backend
|
||||
|
||||
|
||||
def test_send_messages():
|
||||
with mock.patch('awx.main.notifications.awssns_backend.AWSSNSBackend._sns_publish') as sns_publish_mock:
|
||||
aws_region = 'us-east-1'
|
||||
sns_topic = f"arn:aws:sns:{aws_region}:111111111111:topic-mock"
|
||||
backend = awssns_backend.AWSSNSBackend(aws_region=aws_region, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None)
|
||||
message = EmailMessage(
|
||||
'test subject',
|
||||
{'body': 'test body'},
|
||||
[],
|
||||
[
|
||||
sns_topic,
|
||||
],
|
||||
)
|
||||
sent_messages = backend.send_messages(
|
||||
[
|
||||
message,
|
||||
]
|
||||
)
|
||||
sns_publish_mock.assert_called_once_with(topic_arn=sns_topic, message=message.body)
|
||||
assert sent_messages == 1
|
||||
@@ -137,10 +137,10 @@ def test_send_notifications_not_list():
|
||||
|
||||
|
||||
def test_send_notifications_job_id(mocker):
|
||||
with mocker.patch('awx.main.models.UnifiedJob.objects.get'):
|
||||
system.send_notifications([], job_id=1)
|
||||
assert UnifiedJob.objects.get.called
|
||||
assert UnifiedJob.objects.get.called_with(id=1)
|
||||
mocker.patch('awx.main.models.UnifiedJob.objects.get')
|
||||
system.send_notifications([], job_id=1)
|
||||
assert UnifiedJob.objects.get.called
|
||||
assert UnifiedJob.objects.get.called_with(id=1)
|
||||
|
||||
|
||||
@mock.patch('awx.main.models.UnifiedJob.objects.get')
|
||||
|
||||
@@ -7,15 +7,15 @@ def test_produce_supervisor_command(mocker):
|
||||
mock_process = mocker.MagicMock()
|
||||
mock_process.communicate = communicate_mock
|
||||
Popen_mock = mocker.MagicMock(return_value=mock_process)
|
||||
with mocker.patch.object(reload.subprocess, 'Popen', Popen_mock):
|
||||
reload.supervisor_service_command("restart")
|
||||
reload.subprocess.Popen.assert_called_once_with(
|
||||
[
|
||||
'supervisorctl',
|
||||
'restart',
|
||||
'tower-processes:*',
|
||||
],
|
||||
stderr=-1,
|
||||
stdin=-1,
|
||||
stdout=-1,
|
||||
)
|
||||
mocker.patch.object(reload.subprocess, 'Popen', Popen_mock)
|
||||
reload.supervisor_service_command("restart")
|
||||
reload.subprocess.Popen.assert_called_once_with(
|
||||
[
|
||||
'supervisorctl',
|
||||
'restart',
|
||||
'tower-processes:*',
|
||||
],
|
||||
stderr=-1,
|
||||
stdin=-1,
|
||||
stdout=-1,
|
||||
)
|
||||
|
||||
@@ -2,9 +2,11 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import base64
|
||||
import logging
|
||||
import sys
|
||||
import traceback
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
# Django
|
||||
@@ -15,6 +17,15 @@ from django.utils.encoding import force_str
|
||||
# AWX
|
||||
from awx.main.exceptions import PostRunError
|
||||
|
||||
# OTEL
|
||||
from opentelemetry._logs import set_logger_provider
|
||||
from opentelemetry.exporter.otlp.proto.grpc._log_exporter import OTLPLogExporter as OTLPGrpcLogExporter
|
||||
from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter as OTLPHttpLogExporter
|
||||
|
||||
from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
|
||||
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
|
||||
from opentelemetry.sdk.resources import Resource
|
||||
|
||||
|
||||
class RSysLogHandler(logging.handlers.SysLogHandler):
|
||||
append_nul = False
|
||||
@@ -133,3 +144,39 @@ if settings.COLOR_LOGS is True:
|
||||
pass
|
||||
else:
|
||||
ColorHandler = logging.StreamHandler
|
||||
|
||||
|
||||
class OTLPHandler(LoggingHandler):
|
||||
def __init__(self, endpoint=None, protocol='grpc', service_name=None, instance_id=None, auth=None, username=None, password=None):
|
||||
if not endpoint:
|
||||
raise ValueError("endpoint required")
|
||||
|
||||
if auth == 'basic' and (username is None or password is None):
|
||||
raise ValueError("auth type basic requires username and passsword parameters")
|
||||
|
||||
self.endpoint = endpoint
|
||||
self.service_name = service_name or (sys.argv[1] if len(sys.argv) > 1 else (sys.argv[0] or 'unknown_service'))
|
||||
self.instance_id = instance_id or os.uname().nodename
|
||||
|
||||
logger_provider = LoggerProvider(
|
||||
resource=Resource.create(
|
||||
{
|
||||
"service.name": self.service_name,
|
||||
"service.instance.id": self.instance_id,
|
||||
}
|
||||
),
|
||||
)
|
||||
set_logger_provider(logger_provider)
|
||||
|
||||
headers = {}
|
||||
if auth == 'basic':
|
||||
secret = f'{username}:{password}'
|
||||
headers['Authorization'] = "Basic " + base64.b64encode(secret.encode()).decode()
|
||||
|
||||
if protocol == 'grpc':
|
||||
otlp_exporter = OTLPGrpcLogExporter(endpoint=self.endpoint, insecure=True, headers=headers)
|
||||
elif protocol == 'http':
|
||||
otlp_exporter = OTLPHttpLogExporter(endpoint=self.endpoint, headers=headers)
|
||||
logger_provider.add_log_record_processor(BatchLogRecordProcessor(otlp_exporter))
|
||||
|
||||
super().__init__(level=logging.NOTSET, logger_provider=logger_provider)
|
||||
|
||||
48
awx/main/utils/proxy.py
Normal file
48
awx/main/utils/proxy.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# Copyright (c) 2024 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
|
||||
# DRF
|
||||
from rest_framework.request import Request
|
||||
|
||||
|
||||
"""
|
||||
Note that these methods operate on request.environ. This data is from uwsgi.
|
||||
It is the source data from which request.headers (read-only) is constructed.
|
||||
"""
|
||||
|
||||
|
||||
def is_proxy_in_headers(request: Request, proxy_list: list[str], headers: list[str]) -> bool:
|
||||
"""
|
||||
Determine if the request went through at least one proxy in the list.
|
||||
Example:
|
||||
request.environ = {
|
||||
"HTTP_X_FOO": "8.8.8.8, 192.168.2.1",
|
||||
"REMOTE_ADDR": "192.168.2.1",
|
||||
"REMOTE_HOST": "foobar"
|
||||
}
|
||||
proxy_list = ["192.168.2.1"]
|
||||
headers = ["HTTP_X_FOO", "REMOTE_ADDR", "REMOTE_HOST"]
|
||||
|
||||
The above would return True since 192.168.2.1 is a value for the header HTTP_X_FOO
|
||||
|
||||
request: The DRF/Django request. request.environ dict will be used for searching for proxies
|
||||
proxy_list: A list of known and trusted proxies may be ip or hostnames
|
||||
headers: A list of keys for which to consider values that may contain a proxy
|
||||
"""
|
||||
|
||||
remote_hosts = set()
|
||||
|
||||
for header in headers:
|
||||
for value in request.environ.get(header, '').split(','):
|
||||
value = value.strip()
|
||||
if value:
|
||||
remote_hosts.add(value)
|
||||
|
||||
return bool(remote_hosts.intersection(set(proxy_list)))
|
||||
|
||||
|
||||
def delete_headers_starting_with_http(request: Request, headers: list[str]):
|
||||
for header in headers:
|
||||
if header.startswith('HTTP_'):
|
||||
request.environ.pop(header, None)
|
||||
@@ -285,8 +285,6 @@ class WebSocketRelayManager(object):
|
||||
except asyncio.CancelledError:
|
||||
# Handle the case where the task was already cancelled by the time we got here.
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to cancel relay connection for {hostname}: {e}")
|
||||
|
||||
del self.relay_connections[hostname]
|
||||
|
||||
@@ -297,8 +295,6 @@ class WebSocketRelayManager(object):
|
||||
self.stats_mgr.delete_remote_host_stats(hostname)
|
||||
except KeyError:
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to delete stats for {hostname}: {e}")
|
||||
|
||||
async def run(self):
|
||||
event_loop = asyncio.get_running_loop()
|
||||
@@ -306,7 +302,6 @@ class WebSocketRelayManager(object):
|
||||
self.stats_mgr = RelayWebsocketStatsManager(event_loop, self.local_hostname)
|
||||
self.stats_mgr.start()
|
||||
|
||||
# Set up a pg_notify consumer for allowing web nodes to "provision" and "deprovision" themselves gracefully.
|
||||
database_conf = deepcopy(settings.DATABASES['default'])
|
||||
database_conf['OPTIONS'] = deepcopy(database_conf.get('OPTIONS', {}))
|
||||
|
||||
@@ -318,79 +313,54 @@ class WebSocketRelayManager(object):
|
||||
if 'PASSWORD' in database_conf:
|
||||
database_conf['OPTIONS']['password'] = database_conf.pop('PASSWORD')
|
||||
|
||||
task = None
|
||||
async_conn = await psycopg.AsyncConnection.connect(
|
||||
dbname=database_conf['NAME'],
|
||||
host=database_conf['HOST'],
|
||||
user=database_conf['USER'],
|
||||
port=database_conf['PORT'],
|
||||
**database_conf.get("OPTIONS", {}),
|
||||
)
|
||||
|
||||
# Managing the async_conn here so that we can close it if we need to restart the connection
|
||||
async_conn = None
|
||||
await async_conn.set_autocommit(True)
|
||||
on_ws_heartbeat_task = event_loop.create_task(self.on_ws_heartbeat(async_conn))
|
||||
|
||||
# Establishes a websocket connection to /websocket/relay on all API servers
|
||||
try:
|
||||
while True:
|
||||
if not task or task.done():
|
||||
try:
|
||||
# Try to close the connection if it's open
|
||||
if async_conn:
|
||||
try:
|
||||
await async_conn.close()
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to close connection to database for pg_notify: {e}")
|
||||
while True:
|
||||
if on_ws_heartbeat_task.done():
|
||||
raise Exception("on_ws_heartbeat_task has exited")
|
||||
|
||||
# and re-establish the connection
|
||||
async_conn = await psycopg.AsyncConnection.connect(
|
||||
dbname=database_conf['NAME'],
|
||||
host=database_conf['HOST'],
|
||||
user=database_conf['USER'],
|
||||
port=database_conf['PORT'],
|
||||
**database_conf.get("OPTIONS", {}),
|
||||
)
|
||||
await async_conn.set_autocommit(True)
|
||||
future_remote_hosts = self.known_hosts.keys()
|
||||
current_remote_hosts = self.relay_connections.keys()
|
||||
deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts)
|
||||
new_remote_hosts = set(future_remote_hosts) - set(current_remote_hosts)
|
||||
|
||||
# before creating the task that uses the connection
|
||||
task = event_loop.create_task(self.on_ws_heartbeat(async_conn), name="on_ws_heartbeat")
|
||||
logger.info("Creating `on_ws_heartbeat` task in event loop.")
|
||||
# This loop handles if we get an advertisement from a host we already know about but
|
||||
# the advertisement has a different IP than we are currently connected to.
|
||||
for hostname, address in self.known_hosts.items():
|
||||
if hostname not in self.relay_connections:
|
||||
# We've picked up a new hostname that we don't know about yet.
|
||||
continue
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to connect to database for pg_notify: {e}")
|
||||
if address != self.relay_connections[hostname].remote_host:
|
||||
deleted_remote_hosts.add(hostname)
|
||||
new_remote_hosts.add(hostname)
|
||||
|
||||
future_remote_hosts = self.known_hosts.keys()
|
||||
current_remote_hosts = self.relay_connections.keys()
|
||||
deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts)
|
||||
new_remote_hosts = set(future_remote_hosts) - set(current_remote_hosts)
|
||||
# Delete any hosts with closed connections
|
||||
for hostname, relay_conn in self.relay_connections.items():
|
||||
if not relay_conn.connected:
|
||||
deleted_remote_hosts.add(hostname)
|
||||
|
||||
# This loop handles if we get an advertisement from a host we already know about but
|
||||
# the advertisement has a different IP than we are currently connected to.
|
||||
for hostname, address in self.known_hosts.items():
|
||||
if hostname not in self.relay_connections:
|
||||
# We've picked up a new hostname that we don't know about yet.
|
||||
continue
|
||||
if deleted_remote_hosts:
|
||||
logger.info(f"Removing {deleted_remote_hosts} from websocket broadcast list")
|
||||
await asyncio.gather(*[self.cleanup_offline_host(h) for h in deleted_remote_hosts])
|
||||
|
||||
if address != self.relay_connections[hostname].remote_host:
|
||||
deleted_remote_hosts.add(hostname)
|
||||
new_remote_hosts.add(hostname)
|
||||
if new_remote_hosts:
|
||||
logger.info(f"Adding {new_remote_hosts} to websocket broadcast list")
|
||||
|
||||
# Delete any hosts with closed connections
|
||||
for hostname, relay_conn in self.relay_connections.items():
|
||||
if not relay_conn.connected:
|
||||
deleted_remote_hosts.add(hostname)
|
||||
for h in new_remote_hosts:
|
||||
stats = self.stats_mgr.new_remote_host_stats(h)
|
||||
relay_connection = WebsocketRelayConnection(name=self.local_hostname, stats=stats, remote_host=self.known_hosts[h])
|
||||
relay_connection.start()
|
||||
self.relay_connections[h] = relay_connection
|
||||
|
||||
if deleted_remote_hosts:
|
||||
logger.info(f"Removing {deleted_remote_hosts} from websocket broadcast list")
|
||||
await asyncio.gather(*[self.cleanup_offline_host(h) for h in deleted_remote_hosts])
|
||||
|
||||
if new_remote_hosts:
|
||||
logger.info(f"Adding {new_remote_hosts} to websocket broadcast list")
|
||||
|
||||
for h in new_remote_hosts:
|
||||
stats = self.stats_mgr.new_remote_host_stats(h)
|
||||
relay_connection = WebsocketRelayConnection(name=self.local_hostname, stats=stats, remote_host=self.known_hosts[h])
|
||||
relay_connection.start()
|
||||
self.relay_connections[h] = relay_connection
|
||||
|
||||
await asyncio.sleep(settings.BROADCAST_WEBSOCKET_NEW_INSTANCE_POLL_RATE_SECONDS)
|
||||
finally:
|
||||
if async_conn:
|
||||
logger.info("Shutting down db connection for wsrelay.")
|
||||
try:
|
||||
await async_conn.close()
|
||||
except Exception as e:
|
||||
logger.info(f"Failed to close connection to database for pg_notify: {e}")
|
||||
await asyncio.sleep(settings.BROADCAST_WEBSOCKET_NEW_INSTANCE_POLL_RATE_SECONDS)
|
||||
|
||||
@@ -114,6 +114,7 @@ MEDIA_ROOT = os.path.join(BASE_DIR, 'public', 'media')
|
||||
MEDIA_URL = '/media/'
|
||||
|
||||
LOGIN_URL = '/api/login/'
|
||||
LOGOUT_ALLOWED_HOSTS = None
|
||||
|
||||
# Absolute filesystem path to the directory to host projects (with playbooks).
|
||||
# This directory should not be web-accessible.
|
||||
@@ -491,6 +492,7 @@ CELERYBEAT_SCHEDULE = {
|
||||
'cleanup_images': {'task': 'awx.main.tasks.system.cleanup_images_and_files', 'schedule': timedelta(hours=3)},
|
||||
'cleanup_host_metrics': {'task': 'awx.main.tasks.host_metrics.cleanup_host_metrics', 'schedule': timedelta(hours=3, minutes=30)},
|
||||
'host_metric_summary_monthly': {'task': 'awx.main.tasks.host_metrics.host_metric_summary_monthly', 'schedule': timedelta(hours=4)},
|
||||
'periodic_resource_sync': {'task': 'awx.main.tasks.system.periodic_resource_sync', 'schedule': timedelta(minutes=15)},
|
||||
}
|
||||
|
||||
# Django Caching Configuration
|
||||
@@ -655,6 +657,10 @@ AWX_ANSIBLE_CALLBACK_PLUGINS = ""
|
||||
# Automatically remove nodes that have missed their heartbeats after some time
|
||||
AWX_AUTO_DEPROVISION_INSTANCES = False
|
||||
|
||||
# If False, do not allow creation of resources that are shared with the platform ingress
|
||||
# e.g. organizations, teams, and users
|
||||
ALLOW_LOCAL_RESOURCE_MANAGEMENT = True
|
||||
|
||||
# Enable Pendo on the UI, possible values are 'off', 'anonymous', and 'detailed'
|
||||
# Note: This setting may be overridden by database settings.
|
||||
PENDO_TRACKING_STATE = "off"
|
||||
@@ -777,6 +783,11 @@ INSIGHTS_EXCLUDE_EMPTY_GROUPS = False
|
||||
TERRAFORM_INSTANCE_ID_VAR = 'id'
|
||||
TERRAFORM_EXCLUDE_EMPTY_GROUPS = True
|
||||
|
||||
# ------------------------
|
||||
# OpenShift Virtualization
|
||||
# ------------------------
|
||||
OPENSHIFT_VIRTUALIZATION_EXCLUDE_EMPTY_GROUPS = True
|
||||
|
||||
# ---------------------
|
||||
# ----- Custom -----
|
||||
# ---------------------
|
||||
@@ -879,6 +890,7 @@ LOGGING = {
|
||||
'address': '/var/run/awx-rsyslog/rsyslog.sock',
|
||||
'filters': ['external_log_enabled', 'dynamic_level_filter', 'guid'],
|
||||
},
|
||||
'otel': {'class': 'logging.NullHandler'},
|
||||
},
|
||||
'loggers': {
|
||||
'django': {'handlers': ['console']},
|
||||
@@ -1160,9 +1172,6 @@ ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED = True
|
||||
# Permissions a user will get when creating a new item
|
||||
ANSIBLE_BASE_CREATOR_DEFAULTS = ['change', 'delete', 'execute', 'use', 'adhoc', 'approve', 'update', 'view']
|
||||
|
||||
# This is a stopgap, will delete after resource registry integration
|
||||
ANSIBLE_BASE_SERVICE_PREFIX = "awx"
|
||||
|
||||
# Temporary, for old roles API compatibility, save child permissions at organization level
|
||||
ANSIBLE_BASE_CACHE_PARENT_PERMISSIONS = True
|
||||
|
||||
@@ -1176,6 +1185,3 @@ ANSIBLE_BASE_ALLOW_SINGLETON_ROLES_API = False # Do not allow creating user-def
|
||||
|
||||
# system username for django-ansible-base
|
||||
SYSTEM_USERNAME = None
|
||||
|
||||
# Use AWX base view, to give 401 on unauthenticated requests
|
||||
ANSIBLE_BASE_CUSTOM_VIEW_PARENT = 'awx.api.generics.APIView'
|
||||
|
||||
3004
awx/sso/conf.py
3004
awx/sso/conf.py
File diff suppressed because it is too large
Load Diff
@@ -7,18 +7,18 @@ from django.core.cache import cache
|
||||
|
||||
def test_ldap_default_settings(mocker):
|
||||
from_db = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=from_db):
|
||||
settings = LDAPSettings()
|
||||
assert settings.ORGANIZATION_MAP == {}
|
||||
assert settings.TEAM_MAP == {}
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=from_db)
|
||||
settings = LDAPSettings()
|
||||
assert settings.ORGANIZATION_MAP == {}
|
||||
assert settings.TEAM_MAP == {}
|
||||
|
||||
|
||||
def test_ldap_default_network_timeout(mocker):
|
||||
cache.clear() # clearing cache avoids picking up stray default for OPT_REFERRALS
|
||||
from_db = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=from_db):
|
||||
settings = LDAPSettings()
|
||||
assert settings.CONNECTION_OPTIONS[ldap.OPT_NETWORK_TIMEOUT] == 30
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=from_db)
|
||||
settings = LDAPSettings()
|
||||
assert settings.CONNECTION_OPTIONS[ldap.OPT_NETWORK_TIMEOUT] == 30
|
||||
|
||||
|
||||
def test_ldap_filter_validator():
|
||||
|
||||
@@ -62,7 +62,7 @@ function CredentialLookup({
|
||||
? { credential_type: credentialTypeId }
|
||||
: {};
|
||||
const typeKindParams = credentialTypeKind
|
||||
? { credential_type__kind: credentialTypeKind }
|
||||
? { credential_type__kind__in: credentialTypeKind }
|
||||
: {};
|
||||
const typeNamespaceParams = credentialTypeNamespace
|
||||
? { credential_type__namespace: credentialTypeNamespace }
|
||||
@@ -125,7 +125,7 @@ function CredentialLookup({
|
||||
? { credential_type: credentialTypeId }
|
||||
: {};
|
||||
const typeKindParams = credentialTypeKind
|
||||
? { credential_type__kind: credentialTypeKind }
|
||||
? { credential_type__kind__in: credentialTypeKind }
|
||||
: {};
|
||||
const typeNamespaceParams = credentialTypeNamespace
|
||||
? { credential_type__namespace: credentialTypeNamespace }
|
||||
|
||||
@@ -190,6 +190,7 @@ function NotificationList({
|
||||
name: t`Notification type`,
|
||||
key: 'or__notification_type',
|
||||
options: [
|
||||
['awssns', t`AWS SNS`],
|
||||
['email', t`Email`],
|
||||
['grafana', t`Grafana`],
|
||||
['hipchat', t`Hipchat`],
|
||||
|
||||
@@ -12,7 +12,7 @@ const Inner = styled.div`
|
||||
border-radius: 2px;
|
||||
color: white;
|
||||
left: 10px;
|
||||
max-width: 300px;
|
||||
max-width: 500px;
|
||||
padding: 5px 10px;
|
||||
position: absolute;
|
||||
top: 10px;
|
||||
|
||||
@@ -12,6 +12,7 @@ const GridDL = styled.dl`
|
||||
column-gap: 15px;
|
||||
display: grid;
|
||||
grid-template-columns: max-content;
|
||||
overflow-wrap: anywhere;
|
||||
row-gap: 0px;
|
||||
dt {
|
||||
grid-column-start: 1;
|
||||
|
||||
@@ -56,6 +56,10 @@ describe('<InventorySourceAdd />', () => {
|
||||
['satellite6', 'Red Hat Satellite 6'],
|
||||
['openstack', 'OpenStack'],
|
||||
['rhv', 'Red Hat Virtualization'],
|
||||
[
|
||||
'openshift_virtualization',
|
||||
'Red Hat OpenShift Virtualization',
|
||||
],
|
||||
['controller', 'Red Hat Ansible Automation Platform'],
|
||||
],
|
||||
},
|
||||
|
||||
@@ -22,7 +22,9 @@ const ansibleDocUrls = {
|
||||
constructed:
|
||||
'https://docs.ansible.com/ansible/latest/collections/ansible/builtin/constructed_inventory.html',
|
||||
terraform:
|
||||
'https://github.com/ansible-collections/cloud.terraform/blob/stable-statefile-inventory/plugins/inventory/terraform_state.py',
|
||||
'https://github.com/ansible-collections/cloud.terraform/blob/main/docs/cloud.terraform.terraform_state_inventory.rst',
|
||||
openshift_virtualization:
|
||||
'https://kubevirt.io/kubevirt.core/latest/plugins/kubevirt.html',
|
||||
};
|
||||
|
||||
const getInventoryHelpTextStrings = () => ({
|
||||
@@ -121,7 +123,7 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch.`}
|
||||
@@ -140,7 +142,7 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch`}
|
||||
|
||||
@@ -26,6 +26,7 @@ import {
|
||||
TerraformSubForm,
|
||||
VMwareSubForm,
|
||||
VirtualizationSubForm,
|
||||
OpenShiftVirtualizationSubForm,
|
||||
} from './InventorySourceSubForms';
|
||||
|
||||
const buildSourceChoiceOptions = (options) => {
|
||||
@@ -231,6 +232,15 @@ const InventorySourceFormFields = ({
|
||||
sourceOptions={sourceOptions}
|
||||
/>
|
||||
),
|
||||
openshift_virtualization: (
|
||||
<OpenShiftVirtualizationSubForm
|
||||
autoPopulateCredential={
|
||||
!source?.id ||
|
||||
source?.source !== 'openshift_virtualization'
|
||||
}
|
||||
sourceOptions={sourceOptions}
|
||||
/>
|
||||
),
|
||||
}[sourceField.value]
|
||||
}
|
||||
</FormColumnLayout>
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import { useField, useFormikContext } from 'formik';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { useConfig } from 'contexts/Config';
|
||||
import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
import CredentialLookup from 'components/Lookup/CredentialLookup';
|
||||
import { required } from 'util/validators';
|
||||
import {
|
||||
OptionsField,
|
||||
VerbosityField,
|
||||
EnabledVarField,
|
||||
EnabledValueField,
|
||||
HostFilterField,
|
||||
SourceVarsField,
|
||||
} from './SharedFields';
|
||||
import getHelpText from '../Inventory.helptext';
|
||||
|
||||
const OpenShiftVirtualizationSubForm = ({ autoPopulateCredential }) => {
|
||||
const helpText = getHelpText();
|
||||
const { setFieldValue, setFieldTouched } = useFormikContext();
|
||||
const [credentialField, credentialMeta, credentialHelpers] =
|
||||
useField('credential');
|
||||
const config = useConfig();
|
||||
|
||||
const handleCredentialUpdate = useCallback(
|
||||
(value) => {
|
||||
setFieldValue('credential', value);
|
||||
setFieldTouched('credential', true, false);
|
||||
},
|
||||
[setFieldValue, setFieldTouched]
|
||||
);
|
||||
|
||||
const docsBaseUrl = getDocsBaseUrl(config);
|
||||
return (
|
||||
<>
|
||||
<CredentialLookup
|
||||
credentialTypeNamespace="kubernetes_bearer_token"
|
||||
label={t`Credential`}
|
||||
helperTextInvalid={credentialMeta.error}
|
||||
isValid={!credentialMeta.touched || !credentialMeta.error}
|
||||
onBlur={() => credentialHelpers.setTouched()}
|
||||
onChange={handleCredentialUpdate}
|
||||
value={credentialField.value}
|
||||
required
|
||||
autoPopulate={autoPopulateCredential}
|
||||
validate={required(t`Select a value for this field`)}
|
||||
/>
|
||||
<VerbosityField />
|
||||
<HostFilterField />
|
||||
<EnabledVarField />
|
||||
<EnabledValueField />
|
||||
<OptionsField />
|
||||
<SourceVarsField
|
||||
popoverContent={helpText.sourceVars(
|
||||
docsBaseUrl,
|
||||
'openshift_virtualization'
|
||||
)}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default OpenShiftVirtualizationSubForm;
|
||||
@@ -0,0 +1,65 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { Formik } from 'formik';
|
||||
import { CredentialsAPI } from 'api';
|
||||
import { mountWithContexts } from '../../../../../testUtils/enzymeHelpers';
|
||||
import VirtualizationSubForm from './VirtualizationSubForm';
|
||||
|
||||
jest.mock('../../../../api');
|
||||
|
||||
const initialValues = {
|
||||
credential: null,
|
||||
overwrite: false,
|
||||
overwrite_vars: false,
|
||||
source_path: '',
|
||||
source_project: null,
|
||||
source_script: null,
|
||||
source_vars: '---\n',
|
||||
update_cache_timeout: 0,
|
||||
update_on_launch: true,
|
||||
verbosity: 1,
|
||||
};
|
||||
|
||||
describe('<VirtualizationSubForm />', () => {
|
||||
let wrapper;
|
||||
|
||||
beforeEach(async () => {
|
||||
CredentialsAPI.read.mockResolvedValue({
|
||||
data: { count: 0, results: [] },
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={initialValues}>
|
||||
<VirtualizationSubForm />
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test('should render subform fields', () => {
|
||||
expect(wrapper.find('FormGroup[label="Credential"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Verbosity"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Update options"]')).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('FormGroup[label="Cache timeout (seconds)"]')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('VariablesField[label="Source variables"]')
|
||||
).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should make expected api calls', () => {
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledTimes(1);
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledWith({
|
||||
credential_type__namespace: 'rhv',
|
||||
order_by: 'name',
|
||||
page: 1,
|
||||
page_size: 5,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -87,7 +87,7 @@ const SCMSubForm = ({ autoPopulateProject }) => {
|
||||
/>
|
||||
)}
|
||||
<CredentialLookup
|
||||
credentialTypeKind="cloud"
|
||||
credentialTypeKind="cloud,kubernetes"
|
||||
label={t`Credential`}
|
||||
value={credentialField.value}
|
||||
onChange={handleCredentialUpdate}
|
||||
|
||||
@@ -9,3 +9,4 @@ export { default as ControllerSubForm } from './ControllerSubForm';
|
||||
export { default as TerraformSubForm } from './TerraformSubForm';
|
||||
export { default as VMwareSubForm } from './VMwareSubForm';
|
||||
export { default as VirtualizationSubForm } from './VirtualizationSubForm';
|
||||
export { default as OpenShiftVirtualizationSubForm } from './OpenShiftVirtualizationSubForm';
|
||||
|
||||
@@ -138,6 +138,25 @@ function NotificationTemplateDetail({ template, defaultMessages }) {
|
||||
}
|
||||
dataCy="nt-detail-type"
|
||||
/>
|
||||
{template.notification_type === 'awssns' && (
|
||||
<>
|
||||
<Detail
|
||||
label={t`AWS Region`}
|
||||
value={configuration.aws_region}
|
||||
dataCy="nt-detail-aws-region"
|
||||
/>
|
||||
<Detail
|
||||
label={t`Access Key ID`}
|
||||
value={configuration.aws_access_key_id}
|
||||
dataCy="nt-detail-aws-access-key-id"
|
||||
/>
|
||||
<Detail
|
||||
label={t`SNS Topic ARN`}
|
||||
value={configuration.sns_topic_arn}
|
||||
dataCy="nt-detail-sns-topic-arn"
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
{template.notification_type === 'email' && (
|
||||
<>
|
||||
<Detail
|
||||
@@ -455,8 +474,8 @@ function NotificationTemplateDetail({ template, defaultMessages }) {
|
||||
}
|
||||
|
||||
function CustomMessageDetails({ messages, defaults, type }) {
|
||||
const showMessages = type !== 'webhook';
|
||||
const showBodies = ['email', 'pagerduty', 'webhook'].includes(type);
|
||||
const showMessages = !['awssns', 'webhook'].includes(type);
|
||||
const showBodies = ['email', 'pagerduty', 'webhook', 'awssns'].includes(type);
|
||||
|
||||
return (
|
||||
<>
|
||||
|
||||
@@ -120,7 +120,7 @@ function NotificationTemplatesList() {
|
||||
toolbarSearchColumns={[
|
||||
{
|
||||
name: t`Name`,
|
||||
key: 'name',
|
||||
key: 'name__icontains',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
@@ -131,6 +131,7 @@ function NotificationTemplatesList() {
|
||||
name: t`Notification type`,
|
||||
key: 'or__notification_type',
|
||||
options: [
|
||||
['awssns', t`AWS SNS`],
|
||||
['email', t`Email`],
|
||||
['grafana', t`Grafana`],
|
||||
['hipchat', t`Hipchat`],
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
/* eslint-disable-next-line import/prefer-default-export */
|
||||
export const NOTIFICATION_TYPES = {
|
||||
awssns: 'AWS SNS',
|
||||
email: 'Email',
|
||||
grafana: 'Grafana',
|
||||
irc: 'IRC',
|
||||
|
||||
@@ -11,8 +11,8 @@ import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
|
||||
function CustomMessagesSubForm({ defaultMessages, type }) {
|
||||
const [useCustomField, , useCustomHelpers] = useField('useCustomMessages');
|
||||
const showMessages = type !== 'webhook';
|
||||
const showBodies = ['email', 'pagerduty', 'webhook'].includes(type);
|
||||
const showMessages = !['webhook', 'awssns'].includes(type);
|
||||
const showBodies = ['email', 'pagerduty', 'webhook', 'awssns'].includes(type);
|
||||
|
||||
const { setFieldValue } = useFormikContext();
|
||||
const config = useConfig();
|
||||
|
||||
@@ -78,6 +78,7 @@ function NotificationTemplateFormFields({ defaultMessages, template }) {
|
||||
label: t`Choose a Notification Type`,
|
||||
isDisabled: true,
|
||||
},
|
||||
{ value: 'awssns', key: 'awssns', label: t`AWS SNS` },
|
||||
{ value: 'email', key: 'email', label: t`E-mail` },
|
||||
{ value: 'grafana', key: 'grafana', label: 'Grafana' },
|
||||
{ value: 'irc', key: 'irc', label: 'IRC' },
|
||||
|
||||
@@ -29,6 +29,7 @@ import Popover from '../../../components/Popover/Popover';
|
||||
import getHelpText from './Notifications.helptext';
|
||||
|
||||
const TypeFields = {
|
||||
awssns: AWSSNSFields,
|
||||
email: EmailFields,
|
||||
grafana: GrafanaFields,
|
||||
irc: IRCFields,
|
||||
@@ -58,6 +59,44 @@ TypeInputsSubForm.propTypes = {
|
||||
|
||||
export default TypeInputsSubForm;
|
||||
|
||||
function AWSSNSFields() {
|
||||
return (
|
||||
<>
|
||||
<FormField
|
||||
id="awssns-aws-region"
|
||||
label={t`AWS Region`}
|
||||
name="notification_configuration.aws_region"
|
||||
type="text"
|
||||
isRequired
|
||||
/>
|
||||
<FormField
|
||||
id="awssns-aws-access-key-id"
|
||||
label={t`Access Key ID`}
|
||||
name="notification_configuration.aws_access_key_id"
|
||||
type="text"
|
||||
/>
|
||||
<PasswordField
|
||||
id="awssns-aws-secret-access-key"
|
||||
label={t`Secret Access Key`}
|
||||
name="notification_configuration.aws_secret_access_key"
|
||||
/>
|
||||
<PasswordField
|
||||
id="awssns-aws-session-token"
|
||||
label={t`Session Token`}
|
||||
name="notification_configuration.aws_session_token"
|
||||
/>
|
||||
<FormField
|
||||
id="awssns-sns-topic-arn"
|
||||
label={t`SNS Topic ARN`}
|
||||
name="notification_configuration.sns_topic_arn"
|
||||
type="text"
|
||||
validate={required(null)}
|
||||
isRequired
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
function EmailFields() {
|
||||
const helpText = getHelpText();
|
||||
return (
|
||||
|
||||
@@ -203,6 +203,39 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"awssns": {
|
||||
"started": {
|
||||
"body": "{{ job_metadata }}"
|
||||
},
|
||||
"success": {
|
||||
"body": "{{ job_metadata }}"
|
||||
},
|
||||
"error": {
|
||||
"body": "{{ job_metadata }}"
|
||||
},
|
||||
"workflow_approval": {
|
||||
"running": {
|
||||
"body": {
|
||||
"body": "The approval node \"{{ approval_node_name }}\" needs review. This node can be viewed at: {{ workflow_url }}"
|
||||
}
|
||||
},
|
||||
"approved": {
|
||||
"body": {
|
||||
"body": "The approval node \"{{ approval_node_name }}\" was approved. {{ workflow_url }}"
|
||||
}
|
||||
},
|
||||
"timed_out": {
|
||||
"body": {
|
||||
"body": "The approval node \"{{ approval_node_name }}\" has timed out. {{ workflow_url }}"
|
||||
}
|
||||
},
|
||||
"denied": {
|
||||
"body": {
|
||||
"body": "The approval node \"{{ approval_node_name }}\" was denied. {{ workflow_url }}"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"mattermost": {
|
||||
"started": {
|
||||
"message": "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}",
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
const typeFieldNames = {
|
||||
awssns: [
|
||||
'aws_region',
|
||||
'aws_access_key_id',
|
||||
'aws_secret_access_key',
|
||||
'aws_session_token',
|
||||
'sns_topic_arn',
|
||||
],
|
||||
email: [
|
||||
'username',
|
||||
'password',
|
||||
|
||||
@@ -374,6 +374,7 @@ export const CredentialType = shape({
|
||||
});
|
||||
|
||||
export const NotificationType = oneOf([
|
||||
'awssns',
|
||||
'email',
|
||||
'grafana',
|
||||
'irc',
|
||||
|
||||
@@ -17,7 +17,7 @@ import time
|
||||
import re
|
||||
from json import loads, dumps
|
||||
from os.path import isfile, expanduser, split, join, exists, isdir
|
||||
from os import access, R_OK, getcwd, environ
|
||||
from os import access, R_OK, getcwd, environ, getenv
|
||||
|
||||
|
||||
try:
|
||||
@@ -107,7 +107,7 @@ class ControllerModule(AnsibleModule):
|
||||
# Perform magic depending on whether controller_oauthtoken is a string or a dict
|
||||
if self.params.get('controller_oauthtoken'):
|
||||
token_param = self.params.get('controller_oauthtoken')
|
||||
if type(token_param) is dict:
|
||||
if isinstance(token_param, dict):
|
||||
if 'token' in token_param:
|
||||
self.oauth_token = self.params.get('controller_oauthtoken')['token']
|
||||
else:
|
||||
@@ -148,9 +148,10 @@ class ControllerModule(AnsibleModule):
|
||||
# Make sure we start with /api/vX
|
||||
if not endpoint.startswith("/"):
|
||||
endpoint = "/{0}".format(endpoint)
|
||||
prefix = self.url_prefix.rstrip("/")
|
||||
if not endpoint.startswith(prefix + "/api/"):
|
||||
endpoint = prefix + "/api/v2{0}".format(endpoint)
|
||||
hostname_prefix = self.url_prefix.rstrip("/")
|
||||
api_path = self.api_path()
|
||||
if not endpoint.startswith(hostname_prefix + api_path):
|
||||
endpoint = hostname_prefix + f"{api_path}v2{endpoint}"
|
||||
if not endpoint.endswith('/') and '?' not in endpoint:
|
||||
endpoint = "{0}/".format(endpoint)
|
||||
|
||||
@@ -215,7 +216,7 @@ class ControllerModule(AnsibleModule):
|
||||
try:
|
||||
config_data = yaml.load(config_string, Loader=yaml.SafeLoader)
|
||||
# If this is an actual ini file, yaml will return the whole thing as a string instead of a dict
|
||||
if type(config_data) is not dict:
|
||||
if not isinstance(config_data, dict):
|
||||
raise AssertionError("The yaml config file is not properly formatted as a dict.")
|
||||
try_config_parsing = False
|
||||
|
||||
@@ -257,7 +258,7 @@ class ControllerModule(AnsibleModule):
|
||||
if honorred_setting in config_data:
|
||||
# Veriffy SSL must be a boolean
|
||||
if honorred_setting == 'verify_ssl':
|
||||
if type(config_data[honorred_setting]) is str:
|
||||
if isinstance(config_data[honorred_setting], str):
|
||||
setattr(self, honorred_setting, strtobool(config_data[honorred_setting]))
|
||||
else:
|
||||
setattr(self, honorred_setting, bool(config_data[honorred_setting]))
|
||||
@@ -603,6 +604,14 @@ class ControllerAPIModule(ControllerModule):
|
||||
status_code = response.status
|
||||
return {'status_code': status_code, 'json': response_json}
|
||||
|
||||
def api_path(self):
|
||||
|
||||
default_api_path = "/api/"
|
||||
if self._COLLECTION_TYPE != "awx":
|
||||
default_api_path = "/api/controller/"
|
||||
prefix = getenv('CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX', default_api_path)
|
||||
return prefix
|
||||
|
||||
def authenticate(self, **kwargs):
|
||||
if self.username and self.password:
|
||||
# Attempt to get a token from /api/v2/tokens/ by giving it our username/password combo
|
||||
@@ -613,7 +622,7 @@ class ControllerAPIModule(ControllerModule):
|
||||
"scope": "write",
|
||||
}
|
||||
# Preserve URL prefix
|
||||
endpoint = self.url_prefix.rstrip('/') + '/api/v2/tokens/'
|
||||
endpoint = self.url_prefix.rstrip('/') + f'{self.api_path()}v2/tokens/'
|
||||
# Post to the tokens endpoint with baisc auth to try and get a token
|
||||
api_token_url = (self.url._replace(path=endpoint)).geturl()
|
||||
|
||||
@@ -1002,7 +1011,7 @@ class ControllerAPIModule(ControllerModule):
|
||||
if self.authenticated and self.oauth_token_id:
|
||||
# Attempt to delete our current token from /api/v2/tokens/
|
||||
# Post to the tokens endpoint with baisc auth to try and get a token
|
||||
endpoint = self.url_prefix.rstrip('/') + '/api/v2/tokens/{0}/'.format(self.oauth_token_id)
|
||||
endpoint = self.url_prefix.rstrip('/') + f'{self.api_path()}v2/tokens/{self.oauth_token_id}/'
|
||||
api_token_url = (self.url._replace(path=endpoint, query=None)).geturl() # in error cases, fail_json exists before exception handling
|
||||
|
||||
try:
|
||||
|
||||
@@ -163,7 +163,7 @@ def main():
|
||||
for arg in ['job_type', 'limit', 'forks', 'verbosity', 'extra_vars', 'become_enabled', 'diff_mode']:
|
||||
if module.params.get(arg):
|
||||
# extra_var can receive a dict or a string, if a dict covert it to a string
|
||||
if arg == 'extra_vars' and type(module.params.get(arg)) is not str:
|
||||
if arg == 'extra_vars' and not isinstance(module.params.get(arg), str):
|
||||
post_data[arg] = json.dumps(module.params.get(arg))
|
||||
else:
|
||||
post_data[arg] = module.params.get(arg)
|
||||
|
||||
@@ -121,6 +121,7 @@ def main():
|
||||
client_type = module.params.get('client_type')
|
||||
organization = module.params.get('organization')
|
||||
redirect_uris = module.params.get('redirect_uris')
|
||||
skip_authorization = module.params.get('skip_authorization')
|
||||
state = module.params.get('state')
|
||||
|
||||
# Attempt to look up the related items the user specified (these will fail the module if not found)
|
||||
@@ -146,6 +147,8 @@ def main():
|
||||
application_fields['description'] = description
|
||||
if redirect_uris is not None:
|
||||
application_fields['redirect_uris'] = ' '.join(redirect_uris)
|
||||
if skip_authorization is not None:
|
||||
application_fields['skip_authorization'] = skip_authorization
|
||||
|
||||
response = module.create_or_update_if_needed(application, application_fields, endpoint='applications', item_type='application', auto_exit=False)
|
||||
if 'client_id' in response:
|
||||
|
||||
@@ -56,7 +56,7 @@ import logging
|
||||
|
||||
# In this module we don't use EXPORTABLE_RESOURCES, we just want to validate that our installed awxkit has import/export
|
||||
try:
|
||||
from awxkit.api.pages.api import EXPORTABLE_RESOURCES # noqa
|
||||
from awxkit.api.pages.api import EXPORTABLE_RESOURCES # noqa: F401; pylint: disable=unused-import
|
||||
|
||||
HAS_EXPORTABLE_RESOURCES = True
|
||||
except ImportError:
|
||||
|
||||
@@ -42,7 +42,8 @@ options:
|
||||
source:
|
||||
description:
|
||||
- The source to use for this group.
|
||||
choices: [ "scm", "ec2", "gce", "azure_rm", "vmware", "satellite6", "openstack", "rhv", "controller", "insights" ]
|
||||
choices: [ "scm", "ec2", "gce", "azure_rm", "vmware", "satellite6", "openstack", "rhv", "controller", "insights", "terraform",
|
||||
"openshift_virtualization" ]
|
||||
type: str
|
||||
source_path:
|
||||
description:
|
||||
@@ -170,7 +171,22 @@ def main():
|
||||
#
|
||||
# How do we handle manual and file? The controller does not seem to be able to activate them
|
||||
#
|
||||
source=dict(choices=["scm", "ec2", "gce", "azure_rm", "vmware", "satellite6", "openstack", "rhv", "controller", "insights"]),
|
||||
source=dict(
|
||||
choices=[
|
||||
"scm",
|
||||
"ec2",
|
||||
"gce",
|
||||
"azure_rm",
|
||||
"vmware",
|
||||
"satellite6",
|
||||
"openstack",
|
||||
"rhv",
|
||||
"controller",
|
||||
"insights",
|
||||
"terraform",
|
||||
"openshift_virtualization",
|
||||
]
|
||||
),
|
||||
source_path=dict(),
|
||||
source_vars=dict(type='dict'),
|
||||
enabled_var=dict(),
|
||||
|
||||
@@ -50,6 +50,7 @@ options:
|
||||
description:
|
||||
- The type of notification to be sent.
|
||||
choices:
|
||||
- 'awssns'
|
||||
- 'email'
|
||||
- 'grafana'
|
||||
- 'irc'
|
||||
@@ -219,7 +220,7 @@ def main():
|
||||
copy_from=dict(),
|
||||
description=dict(),
|
||||
organization=dict(),
|
||||
notification_type=dict(choices=['email', 'grafana', 'irc', 'mattermost', 'pagerduty', 'rocketchat', 'slack', 'twilio', 'webhook']),
|
||||
notification_type=dict(choices=['awssns', 'email', 'grafana', 'irc', 'mattermost', 'pagerduty', 'rocketchat', 'slack', 'twilio', 'webhook']),
|
||||
notification_configuration=dict(type='dict'),
|
||||
messages=dict(type='dict'),
|
||||
state=dict(choices=['present', 'absent', 'exists'], default='present'),
|
||||
|
||||
@@ -19,7 +19,7 @@ from ansible.module_utils.six import raise_from
|
||||
|
||||
from ansible_base.rbac.models import RoleDefinition, DABPermission
|
||||
from awx.main.tests.functional.conftest import _request
|
||||
from awx.main.tests.functional.conftest import credentialtype_scm, credentialtype_ssh # noqa: F401; pylint: disable=unused-variable
|
||||
from awx.main.tests.functional.conftest import credentialtype_scm, credentialtype_ssh # noqa: F401; pylint: disable=unused-import
|
||||
from awx.main.models import (
|
||||
Organization,
|
||||
Project,
|
||||
|
||||
1
awx_collection/tests/sanity/ignore-2.17.txt
Normal file
1
awx_collection/tests/sanity/ignore-2.17.txt
Normal file
@@ -0,0 +1 @@
|
||||
plugins/modules/export.py validate-modules:nonexistent-parameter-documented # needs awxkit to construct argspec
|
||||
@@ -317,7 +317,10 @@ class ApiV2(base.Base):
|
||||
if asset['natural_key']['type'] == 'project' and 'local_path' in post_data and _page['scm_type'] == post_data['scm_type']:
|
||||
del post_data['local_path']
|
||||
|
||||
_page = _page.put(post_data)
|
||||
if asset['natural_key']['type'] == 'user':
|
||||
_page = _page.patch(**post_data)
|
||||
else:
|
||||
_page = _page.put(post_data)
|
||||
changed = True
|
||||
except (exc.Common, AssertionError) as e:
|
||||
identifier = asset.get("name", None) or asset.get("username", None) or asset.get("hostname", None)
|
||||
|
||||
@@ -11,7 +11,7 @@ from . import page
|
||||
|
||||
|
||||
job_results = ('any', 'error', 'success')
|
||||
notification_types = ('email', 'irc', 'pagerduty', 'slack', 'twilio', 'webhook', 'mattermost', 'grafana', 'rocketchat')
|
||||
notification_types = ('awssns', 'email', 'irc', 'pagerduty', 'slack', 'twilio', 'webhook', 'mattermost', 'grafana', 'rocketchat')
|
||||
|
||||
|
||||
class NotificationTemplate(HasCopy, HasCreate, base.Base):
|
||||
@@ -58,7 +58,10 @@ class NotificationTemplate(HasCopy, HasCreate, base.Base):
|
||||
if payload.notification_configuration == {}:
|
||||
services = config.credentials.notification_services
|
||||
|
||||
if notification_type == 'email':
|
||||
if notification_type == 'awssns':
|
||||
fields = ('aws_region', 'aws_access_key_id', 'aws_secret_access_key', 'aws_session_token', 'sns_topic_arn')
|
||||
cred = services.awssns
|
||||
elif notification_type == 'email':
|
||||
fields = ('host', 'username', 'password', 'port', 'use_ssl', 'use_tls', 'sender', 'recipients')
|
||||
cred = services.email
|
||||
elif notification_type == 'irc':
|
||||
|
||||
@@ -185,7 +185,7 @@ def format_human(output, fmt):
|
||||
|
||||
def format_num(v):
|
||||
try:
|
||||
return locale.format("%.*f", (0, int(v)), True)
|
||||
return locale.format_string("%.*f", (0, int(v)), True)
|
||||
except (ValueError, TypeError):
|
||||
if isinstance(v, (list, dict)):
|
||||
return json.dumps(v)
|
||||
|
||||
@@ -23,7 +23,7 @@ idna==3.4
|
||||
# via requests
|
||||
imagesize==1.4.1
|
||||
# via sphinx
|
||||
jinja2==3.1.3
|
||||
jinja2==3.1.4
|
||||
# via
|
||||
# -r requirements.in
|
||||
# sphinx
|
||||
|
||||
BIN
docs/docsite/rst/common/images/rbac_jt_team_access.png
Normal file
BIN
docs/docsite/rst/common/images/rbac_jt_team_access.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 51 KiB |
BIN
docs/docsite/rst/common/images/rbac_jt_user_access.png
Normal file
BIN
docs/docsite/rst/common/images/rbac_jt_user_access.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 52 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 61 KiB |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user