mirror of
https://github.com/ansible/awx.git
synced 2026-02-04 19:18:13 -03:30
Compare commits
99 Commits
24.3.0
...
awxkit-sup
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6d0a3149f1 | ||
|
|
31a086b11a | ||
|
|
d94f766fcb | ||
|
|
a7113549eb | ||
|
|
bfd811f408 | ||
|
|
030704a9e1 | ||
|
|
c312d9bce3 | ||
|
|
aadcc217eb | ||
|
|
345c1c11e9 | ||
|
|
2c3a7fafc5 | ||
|
|
dbcd32a1d9 | ||
|
|
d45e258a78 | ||
|
|
d16b69a102 | ||
|
|
8b4efbc973 | ||
|
|
4cb061e7db | ||
|
|
31db6a1447 | ||
|
|
ad9d5904d8 | ||
|
|
b837d549ff | ||
|
|
9e22865d2e | ||
|
|
ee3e3e1516 | ||
|
|
4a8f6e45f8 | ||
|
|
6a317cca1b | ||
|
|
d67af79451 | ||
|
|
fe77fda7b2 | ||
|
|
f613b76baa | ||
|
|
054cbe69d7 | ||
|
|
87e9dcb6d7 | ||
|
|
c8829b057e | ||
|
|
a0b376a6ca | ||
|
|
d675207f99 | ||
|
|
20504042c9 | ||
|
|
0e87e97820 | ||
|
|
1f154742df | ||
|
|
85fc81aab1 | ||
|
|
5cfeeb3e87 | ||
|
|
a8c07b06d8 | ||
|
|
53c5feaf6b | ||
|
|
6f57aaa8f5 | ||
|
|
bea74a401d | ||
|
|
54e85813c8 | ||
|
|
b69ed08fe5 | ||
|
|
de25408a23 | ||
|
|
b17f0a188b | ||
|
|
fb860d76ce | ||
|
|
451f20ce0f | ||
|
|
c1dc0c7b86 | ||
|
|
d65ea2a3d5 | ||
|
|
8827ae7554 | ||
|
|
4915262af1 | ||
|
|
d43c91e1a5 | ||
|
|
b470ca32af | ||
|
|
793777bec7 | ||
|
|
6dc4a4508d | ||
|
|
cf09a4220d | ||
|
|
659c3b64de | ||
|
|
37ad690d09 | ||
|
|
7845ec7e01 | ||
|
|
a15bcf1d55 | ||
|
|
7b3fb2c2a8 | ||
|
|
6df47c8449 | ||
|
|
cae42653bf | ||
|
|
da46a29f40 | ||
|
|
0eb465531c | ||
|
|
d0fe0ed796 | ||
|
|
ceafa14c9d | ||
|
|
08e1454098 | ||
|
|
776b661fb3 | ||
|
|
af6ccdbde5 | ||
|
|
559ab3564b | ||
|
|
208ef0ce25 | ||
|
|
c3d9aa54d8 | ||
|
|
66efe7198a | ||
|
|
adf930ee42 | ||
|
|
892410477a | ||
|
|
0d4f653794 | ||
|
|
8de8f6dce2 | ||
|
|
fc9064e27f | ||
|
|
7de350dc3e | ||
|
|
d4bdaad4d8 | ||
|
|
a9b2ffa3e9 | ||
|
|
1b8d409043 | ||
|
|
da2bccf5a8 | ||
|
|
a2f083bd8e | ||
|
|
4d641b6cf5 | ||
|
|
439c3f0c23 | ||
|
|
946bbe3560 | ||
|
|
20f054d600 | ||
|
|
918d5b3565 | ||
|
|
158314af50 | ||
|
|
4754819a09 | ||
|
|
78fc23138a | ||
|
|
014534bfa5 | ||
|
|
2502e7c7d8 | ||
|
|
fb237e3834 | ||
|
|
e4646ae611 | ||
|
|
7dc77546f4 | ||
|
|
f5f85666c8 | ||
|
|
47a061eb39 | ||
|
|
c760577855 |
75
.github/workflows/e2e_test.yml
vendored
75
.github/workflows/e2e_test.yml
vendored
@@ -1,75 +0,0 @@
|
||||
---
|
||||
name: E2E Tests
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled]
|
||||
jobs:
|
||||
e2e-test:
|
||||
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 40
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
strategy:
|
||||
matrix:
|
||||
job: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
with:
|
||||
build-ui: true
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Pull awx_cypress_base image
|
||||
run: |
|
||||
docker pull quay.io/awx/awx_cypress_base:latest
|
||||
|
||||
- name: Checkout test project
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ${{ github.repository_owner }}/tower-qa
|
||||
ssh-key: ${{ secrets.QA_REPO_KEY }}
|
||||
path: tower-qa
|
||||
ref: devel
|
||||
|
||||
- name: Build cypress
|
||||
run: |
|
||||
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
||||
docker build -t awx-pf-tests .
|
||||
|
||||
- name: Run E2E tests
|
||||
env:
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
run: |
|
||||
export COMMIT_INFO_BRANCH=$GITHUB_HEAD_REF
|
||||
export COMMIT_INFO_AUTHOR=$GITHUB_ACTOR
|
||||
export COMMIT_INFO_SHA=$GITHUB_SHA
|
||||
export COMMIT_INFO_REMOTE=$GITHUB_REPOSITORY_OWNER
|
||||
cd ${{ secrets.E2E_PROJECT }}/ui-tests/awx-pf-tests
|
||||
AWX_IP=${{ steps.awx.outputs.ip }}
|
||||
printenv > .env
|
||||
echo "Executing tests:"
|
||||
docker run \
|
||||
--network '_sources_default' \
|
||||
--ipc=host \
|
||||
--env-file=.env \
|
||||
-e CYPRESS_baseUrl="https://$AWX_IP:8043" \
|
||||
-e CYPRESS_AWX_E2E_USERNAME=admin \
|
||||
-e CYPRESS_AWX_E2E_PASSWORD='password' \
|
||||
-e COMMAND="npm run cypress-concurrently-gha" \
|
||||
-v /dev/shm:/dev/shm \
|
||||
-v $PWD:/e2e \
|
||||
-w /e2e \
|
||||
awx-pf-tests run --project .
|
||||
|
||||
- uses: ./.github/actions/upload_awx_devel_logs
|
||||
if: always()
|
||||
with:
|
||||
log-filename: e2e-${{ matrix.job }}.log
|
||||
15
.github/workflows/promote.yml
vendored
15
.github/workflows/promote.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
- name: Set GitHub Env vars if release event
|
||||
if: ${{ github.event_name == 'release' }}
|
||||
run: |
|
||||
echo "TAG_NAME=${{ env.TAG_NAME }}" >> $GITHUB_ENV
|
||||
echo "TAG_NAME=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout awx
|
||||
uses: actions/checkout@v3
|
||||
@@ -60,15 +60,18 @@ jobs:
|
||||
COLLECTION_VERSION: ${{ env.TAG_NAME }}
|
||||
COLLECTION_TEMPLATE_VERSION: true
|
||||
run: |
|
||||
sudo apt-get install jq
|
||||
make build_collection
|
||||
curl_with_redirects=$(curl --head -sLw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ env.TAG_NAME }}.tar.gz | tail -1)
|
||||
curl_without_redirects=$(curl --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ env.TAG_NAME }}.tar.gz | tail -1)
|
||||
if [[ "$curl_with_redirects" == "302" ]] || [[ "$curl_without_redirects" == "302" ]]; then
|
||||
count=$(curl -s https://galaxy.ansible.com/api/v3/plugin/ansible/search/collection-versions/\?namespace\=${COLLECTION_NAMESPACE}\&name\=awx\&version\=${COLLECTION_VERSION} | jq .meta.count)
|
||||
if [[ "$count" == "1" ]]; then
|
||||
echo "Galaxy release already done";
|
||||
else
|
||||
elif [[ "$count" == "0" ]]; then
|
||||
ansible-galaxy collection publish \
|
||||
--token=${{ secrets.GALAXY_TOKEN }} \
|
||||
awx_collection_build/${{ env.collection_namespace }}-awx-${{ env.TAG_NAME }}.tar.gz;
|
||||
awx_collection_build/${COLLECTION_NAMESPACE}-awx-${COLLECTION_VERSION}.tar.gz;
|
||||
else
|
||||
echo "Unexpected count from galaxy search: $count";
|
||||
exit 1;
|
||||
fi
|
||||
|
||||
- name: Set official pypi info
|
||||
|
||||
@@ -11,6 +11,8 @@ ignore: |
|
||||
# django template files
|
||||
awx/api/templates/instance_install_bundle/**
|
||||
.readthedocs.yaml
|
||||
tools/loki
|
||||
tools/otel
|
||||
|
||||
extends: default
|
||||
|
||||
|
||||
11
Makefile
11
Makefile
@@ -47,8 +47,14 @@ VAULT ?= false
|
||||
VAULT_TLS ?= false
|
||||
# If set to true docker-compose will also start a tacacs+ instance
|
||||
TACACS ?= false
|
||||
# If set to true docker-compose will also start an OpenTelemetry Collector instance
|
||||
OTEL ?= false
|
||||
# If set to true docker-compose will also start a Loki instance
|
||||
LOKI ?= false
|
||||
# If set to true docker-compose will install editable dependencies
|
||||
EDITABLE_DEPENDENCIES ?= false
|
||||
# If set to true, use tls for postgres connection
|
||||
PG_TLS ?= false
|
||||
|
||||
VENV_BASE ?= /var/lib/awx/venv
|
||||
|
||||
@@ -65,7 +71,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0 cython==0.29.37
|
||||
|
||||
NAME ?= awx
|
||||
|
||||
@@ -535,7 +541,10 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_tacacs=$(TACACS) \
|
||||
-e enable_otel=$(OTEL) \
|
||||
-e enable_loki=$(LOKI) \
|
||||
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
|
||||
-e pg_tls=$(PG_TLS) \
|
||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
|
||||
@@ -33,6 +33,7 @@ from rest_framework.negotiation import DefaultContentNegotiation
|
||||
# django-ansible-base
|
||||
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
||||
from ansible_base.lib.utils.models import get_all_field_names
|
||||
from ansible_base.lib.utils.requests import get_remote_host
|
||||
from ansible_base.rbac.models import RoleEvaluation, RoleDefinition
|
||||
from ansible_base.rbac.permission_registry import permission_registry
|
||||
|
||||
@@ -93,20 +94,26 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||
if request.user.is_authenticated:
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
||||
ret.set_cookie('userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False))
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, ip)))
|
||||
ret.set_cookie(
|
||||
'userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False), samesite=getattr(settings, 'USER_COOKIE_SAMESITE', 'Lax')
|
||||
)
|
||||
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||
|
||||
return ret
|
||||
else:
|
||||
if 'username' in self.request.POST:
|
||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
|
||||
logger.warning(smart_str(u"Login failed for user {} from {}".format(self.request.POST.get('username'), ip)))
|
||||
ret.status_code = 401
|
||||
return ret
|
||||
|
||||
|
||||
class LoggedLogoutView(auth_views.LogoutView):
|
||||
|
||||
success_url_allowed_hosts = set(settings.LOGOUT_ALLOWED_HOSTS.split(",")) if settings.LOGOUT_ALLOWED_HOSTS else set()
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
original_user = getattr(request, 'user', None)
|
||||
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
||||
@@ -206,11 +213,12 @@ class APIView(views.APIView):
|
||||
return response
|
||||
|
||||
if response.status_code >= 400:
|
||||
ip = get_remote_host(request) # request.META.get('REMOTE_ADDR', None)
|
||||
msg_data = {
|
||||
'status_code': response.status_code,
|
||||
'user_name': request.user,
|
||||
'url_path': request.path,
|
||||
'remote_addr': request.META.get('REMOTE_ADDR', None),
|
||||
'remote_addr': ip,
|
||||
}
|
||||
|
||||
if type(response.data) is dict:
|
||||
|
||||
@@ -5381,7 +5381,7 @@ class NotificationSerializer(BaseSerializer):
|
||||
)
|
||||
|
||||
def get_body(self, obj):
|
||||
if obj.notification_type in ('webhook', 'pagerduty'):
|
||||
if obj.notification_type in ('webhook', 'pagerduty', 'awssns'):
|
||||
if isinstance(obj.body, dict):
|
||||
if 'body' in obj.body:
|
||||
return obj.body['body']
|
||||
@@ -5403,9 +5403,9 @@ class NotificationSerializer(BaseSerializer):
|
||||
def to_representation(self, obj):
|
||||
ret = super(NotificationSerializer, self).to_representation(obj)
|
||||
|
||||
if obj.notification_type == 'webhook':
|
||||
if obj.notification_type in ('webhook', 'awssns'):
|
||||
ret.pop('subject')
|
||||
if obj.notification_type not in ('email', 'webhook', 'pagerduty'):
|
||||
if obj.notification_type not in ('email', 'webhook', 'pagerduty', 'awssns'):
|
||||
ret.pop('body')
|
||||
return ret
|
||||
|
||||
|
||||
@@ -62,6 +62,7 @@ from wsgiref.util import FileWrapper
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.rbac.models import RoleEvaluation, ObjectRole
|
||||
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||
|
||||
# AWX
|
||||
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
||||
@@ -128,6 +129,7 @@ from awx.api.views.mixin import (
|
||||
from awx.api.pagination import UnifiedJobEventPagination
|
||||
from awx.main.utils import set_environ
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.api.views')
|
||||
|
||||
|
||||
@@ -710,16 +712,81 @@ class AuthView(APIView):
|
||||
return Response(data)
|
||||
|
||||
|
||||
def immutablesharedfields(cls):
|
||||
'''
|
||||
Class decorator to prevent modifying shared resources when ALLOW_LOCAL_RESOURCE_MANAGEMENT setting is set to False.
|
||||
|
||||
Works by overriding these view methods:
|
||||
- create
|
||||
- delete
|
||||
- perform_update
|
||||
create and delete are overridden to raise a PermissionDenied exception.
|
||||
perform_update is overridden to check if any shared fields are being modified,
|
||||
and raise a PermissionDenied exception if so.
|
||||
'''
|
||||
# create instead of perform_create because some of our views
|
||||
# override create instead of perform_create
|
||||
if hasattr(cls, 'create'):
|
||||
cls.original_create = cls.create
|
||||
|
||||
@functools.wraps(cls.create)
|
||||
def create_wrapper(*args, **kwargs):
|
||||
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
return cls.original_create(*args, **kwargs)
|
||||
raise PermissionDenied({'detail': _('Creation of this resource is not allowed. Create this resource via the platform ingress.')})
|
||||
|
||||
cls.create = create_wrapper
|
||||
|
||||
if hasattr(cls, 'delete'):
|
||||
cls.original_delete = cls.delete
|
||||
|
||||
@functools.wraps(cls.delete)
|
||||
def delete_wrapper(*args, **kwargs):
|
||||
if settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
return cls.original_delete(*args, **kwargs)
|
||||
raise PermissionDenied({'detail': _('Deletion of this resource is not allowed. Delete this resource via the platform ingress.')})
|
||||
|
||||
cls.delete = delete_wrapper
|
||||
|
||||
if hasattr(cls, 'perform_update'):
|
||||
cls.original_perform_update = cls.perform_update
|
||||
|
||||
@functools.wraps(cls.perform_update)
|
||||
def update_wrapper(*args, **kwargs):
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
view, serializer = args
|
||||
instance = view.get_object()
|
||||
if instance:
|
||||
if isinstance(instance, models.Organization):
|
||||
shared_fields = OrganizationType._declared_fields.keys()
|
||||
elif isinstance(instance, models.User):
|
||||
shared_fields = UserType._declared_fields.keys()
|
||||
elif isinstance(instance, models.Team):
|
||||
shared_fields = TeamType._declared_fields.keys()
|
||||
attrs = serializer.validated_data
|
||||
for field in shared_fields:
|
||||
if field in attrs and getattr(instance, field) != attrs[field]:
|
||||
raise PermissionDenied({field: _(f"Cannot change shared field '{field}'. Alter this field via the platform ingress.")})
|
||||
return cls.original_perform_update(*args, **kwargs)
|
||||
|
||||
cls.perform_update = update_wrapper
|
||||
|
||||
return cls
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamList(ListCreateAPIView):
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = models.Team
|
||||
serializer_class = serializers.TeamSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class TeamUsersList(BaseUsersList):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -1101,6 +1168,7 @@ class ProjectCopy(CopyAPIView):
|
||||
copy_return_serializer_class = serializers.ProjectSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class UserList(ListCreateAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -1271,7 +1339,16 @@ class UserRolesList(SubListAttachDetachAPIView):
|
||||
user = get_object_or_400(models.User, pk=self.kwargs['pk'])
|
||||
role = get_object_or_400(models.Role, pk=sub_id)
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||
# Prevent user to be associated with team/org when ALLOW_LOCAL_RESOURCE_MANAGEMENT is False
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
for model in [models.Organization, models.Team]:
|
||||
ct = content_types[model]
|
||||
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
credential_content_type = content_types[models.Credential]
|
||||
if role.content_type == credential_content_type:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
@@ -1343,6 +1420,7 @@ class UserActivityStreamList(SubListAPIView):
|
||||
return qs.filter(Q(actor=parent) | Q(user__in=[parent]))
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class UserDetail(RetrieveUpdateDestroyAPIView):
|
||||
model = models.User
|
||||
serializer_class = serializers.UserSerializer
|
||||
@@ -4295,7 +4373,15 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
||||
user = get_object_or_400(models.User, pk=sub_id)
|
||||
role = self.get_parent_object()
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(models.Credential)
|
||||
content_types = ContentType.objects.get_for_models(models.Organization, models.Team, models.Credential) # dict of {model: content_type}
|
||||
if not settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT:
|
||||
for model in [models.Organization, models.Team]:
|
||||
ct = content_types[model]
|
||||
if role.content_type == ct and role.role_field in ['member_role', 'admin_role']:
|
||||
data = dict(msg=_(f"Cannot directly modify user membership to {ct.model}. Direct shared resource management disabled"))
|
||||
return Response(data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
credential_content_type = content_types[models.Credential]
|
||||
if role.content_type == credential_content_type:
|
||||
if 'disassociate' not in request.data and role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
|
||||
@@ -53,15 +53,18 @@ from awx.api.serializers import (
|
||||
CredentialSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
||||
from awx.api.views import immutablesharedfields
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
@@ -104,6 +107,7 @@ class OrganizationInventoriesList(SubListAPIView):
|
||||
relationship = 'inventories'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationUsersList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
@@ -112,6 +116,7 @@ class OrganizationUsersList(BaseUsersList):
|
||||
ordering = ('username',)
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationAdminsList(BaseUsersList):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
@@ -150,6 +155,7 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
|
||||
parent_key = 'organization'
|
||||
|
||||
|
||||
@immutablesharedfields
|
||||
class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
|
||||
model = Team
|
||||
serializer_class = TeamSerializer
|
||||
|
||||
@@ -61,6 +61,10 @@ class StringListBooleanField(ListField):
|
||||
|
||||
def to_representation(self, value):
|
||||
try:
|
||||
if isinstance(value, str):
|
||||
# https://github.com/encode/django-rest-framework/commit/a180bde0fd965915718b070932418cabc831cee1
|
||||
# DRF changed truthy and falsy lists to be capitalized
|
||||
value = value.lower()
|
||||
if isinstance(value, (list, tuple)):
|
||||
return super(StringListBooleanField, self).to_representation(value)
|
||||
elif value in BooleanField.TRUE_VALUES:
|
||||
@@ -78,6 +82,8 @@ class StringListBooleanField(ListField):
|
||||
|
||||
def to_internal_value(self, data):
|
||||
try:
|
||||
if isinstance(data, str):
|
||||
data = data.lower()
|
||||
if isinstance(data, (list, tuple)):
|
||||
return super(StringListBooleanField, self).to_internal_value(data)
|
||||
elif data in BooleanField.TRUE_VALUES:
|
||||
|
||||
@@ -130,9 +130,9 @@ def test_default_setting(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||
|
||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
||||
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
@@ -146,9 +146,9 @@ def test_setting_is_not_from_setting_file(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||
|
||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache)
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
|
||||
|
||||
|
||||
def test_empty_setting(settings, mocker):
|
||||
@@ -156,10 +156,10 @@ def test_empty_setting(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
with pytest.raises(AttributeError):
|
||||
settings.AWX_SOME_SETTING
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||
with pytest.raises(AttributeError):
|
||||
settings.AWX_SOME_SETTING
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
|
||||
|
||||
|
||||
def test_setting_from_db(settings, mocker):
|
||||
@@ -168,9 +168,9 @@ def test_setting_from_db(settings, mocker):
|
||||
|
||||
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
||||
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
@@ -205,8 +205,8 @@ def test_db_setting_update(settings, mocker):
|
||||
|
||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list):
|
||||
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list)
|
||||
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
||||
|
||||
assert existing_setting.value == 'NEW-VALUE'
|
||||
existing_setting.save.assert_called_with(update_fields=['value'])
|
||||
@@ -217,8 +217,8 @@ def test_db_setting_deletion(settings, mocker):
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]):
|
||||
del settings.AWX_SOME_SETTING
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting])
|
||||
del settings.AWX_SOME_SETTING
|
||||
|
||||
assert existing_setting.delete.call_count == 1
|
||||
|
||||
@@ -283,10 +283,10 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
|
||||
# use its primary key as part of the encryption key
|
||||
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks)
|
||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
||||
|
||||
|
||||
def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import logging
|
||||
|
||||
# Django
|
||||
from django.core.checks import Error
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
@@ -954,3 +955,27 @@ def logging_validate(serializer, attrs):
|
||||
|
||||
|
||||
register_validate('logging', logging_validate)
|
||||
|
||||
|
||||
def csrf_trusted_origins_validate(serializer, attrs):
|
||||
if not serializer.instance or not hasattr(serializer.instance, 'CSRF_TRUSTED_ORIGINS'):
|
||||
return attrs
|
||||
if 'CSRF_TRUSTED_ORIGINS' not in attrs:
|
||||
return attrs
|
||||
errors = []
|
||||
for origin in attrs['CSRF_TRUSTED_ORIGINS']:
|
||||
if "://" not in origin:
|
||||
errors.append(
|
||||
Error(
|
||||
"As of Django 4.0, the values in the CSRF_TRUSTED_ORIGINS "
|
||||
"setting must start with a scheme (usually http:// or "
|
||||
"https://) but found %s. See the release notes for details." % origin,
|
||||
)
|
||||
)
|
||||
if errors:
|
||||
error_messages = [error.msg for error in errors]
|
||||
raise serializers.ValidationError(_('\n'.join(error_messages)))
|
||||
return attrs
|
||||
|
||||
|
||||
register_validate('system', csrf_trusted_origins_validate)
|
||||
|
||||
@@ -14,7 +14,7 @@ __all__ = [
|
||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||
]
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform', 'openshift_virtualization')
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')),
|
||||
('su', _('Su')),
|
||||
|
||||
@@ -252,7 +252,7 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
kwargs.setdefault('related_name', '+')
|
||||
kwargs.setdefault('null', 'True')
|
||||
kwargs.setdefault('editable', False)
|
||||
kwargs.setdefault('on_delete', models.CASCADE)
|
||||
kwargs.setdefault('on_delete', models.SET_NULL)
|
||||
super(ImplicitRoleField, self).__init__(*args, **kwargs)
|
||||
|
||||
def deconstruct(self):
|
||||
|
||||
12
awx/main/management/commands/check_instance_ready.py
Normal file
12
awx/main/management/commands/check_instance_ready.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from awx.main.models.ha import Instance
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Check if the task manager instance is ready throw error if not ready, can be use as readiness probe for k8s.'
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if Instance.objects.me().node_state != Instance.States.READY:
|
||||
raise CommandError('Instance is not ready') # so that return code is not 0
|
||||
|
||||
return
|
||||
@@ -101,8 +101,9 @@ class Command(BaseCommand):
|
||||
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
|
||||
connection.close() # Because of async nature, main loop will use new connection, so close this
|
||||
except Exception as exc:
|
||||
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), retry in 10s...')
|
||||
time.sleep(10)
|
||||
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||
# sleeping before logging because logging rely on setting which require database connection...
|
||||
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), slept for 10s...')
|
||||
return
|
||||
|
||||
# In containerized deployments, migrations happen in the task container,
|
||||
@@ -121,13 +122,14 @@ class Command(BaseCommand):
|
||||
return
|
||||
|
||||
try:
|
||||
my_hostname = Instance.objects.my_hostname()
|
||||
my_hostname = Instance.objects.my_hostname() # This relies on settings.CLUSTER_HOST_ID which requires database connection
|
||||
logger.info('Active instance with hostname {} is registered.'.format(my_hostname))
|
||||
except RuntimeError as e:
|
||||
# the CLUSTER_HOST_ID in the task, and web instance must match and
|
||||
# ensure network connectivity between the task and web instance
|
||||
logger.info('Unable to return currently active instance: {}, retry in 5s...'.format(e))
|
||||
time.sleep(5)
|
||||
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||
# sleeping before logging because logging rely on setting which require database connection...
|
||||
logger.warning(f"Unable to return currently active instance: {e}, slept for 10s before return.")
|
||||
return
|
||||
|
||||
if options.get('status'):
|
||||
@@ -166,12 +168,14 @@ class Command(BaseCommand):
|
||||
|
||||
WebsocketsMetricsServer().start()
|
||||
|
||||
while True:
|
||||
try:
|
||||
asyncio.run(WebSocketRelayManager().run())
|
||||
except KeyboardInterrupt:
|
||||
logger.info('Shutting down Websocket Relayer')
|
||||
break
|
||||
except Exception as e:
|
||||
logger.exception('Error in Websocket Relayer, exception: {}. Restarting in 10 seconds'.format(e))
|
||||
time.sleep(10)
|
||||
try:
|
||||
logger.info('Starting Websocket Relayer...')
|
||||
websocket_relay_manager = WebSocketRelayManager()
|
||||
asyncio.run(websocket_relay_manager.run())
|
||||
except KeyboardInterrupt:
|
||||
logger.info('Terminating Websocket Relayer')
|
||||
except BaseException as e: # BaseException is used to catch all exceptions including asyncio.CancelledError
|
||||
time.sleep(10) # Prevent supervisor from restarting the service too quickly and the service to enter FATAL state
|
||||
# sleeping before logging because logging rely on setting which require database connection...
|
||||
logger.warning(f"Encounter error while running Websocket Relayer {e}, slept for 10s...")
|
||||
return
|
||||
|
||||
@@ -6,7 +6,7 @@ import logging
|
||||
import threading
|
||||
import time
|
||||
import urllib.parse
|
||||
from pathlib import Path
|
||||
from pathlib import Path, PurePosixPath
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import logout
|
||||
@@ -138,14 +138,36 @@ class URLModificationMiddleware(MiddlewareMixin):
|
||||
|
||||
@classmethod
|
||||
def _convert_named_url(cls, url_path):
|
||||
url_units = url_path.split('/')
|
||||
# If the identifier is an empty string, it is always invalid.
|
||||
if len(url_units) < 6 or url_units[1] != 'api' or url_units[2] not in ['v2'] or not url_units[4]:
|
||||
return url_path
|
||||
resource = url_units[3]
|
||||
default_prefix = PurePosixPath('/api/v2/')
|
||||
optional_prefix = PurePosixPath(f'/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}/v2/')
|
||||
|
||||
url_path_original = url_path
|
||||
url_path = PurePosixPath(url_path)
|
||||
|
||||
if set(optional_prefix.parts).issubset(set(url_path.parts)):
|
||||
url_prefix = optional_prefix
|
||||
elif set(default_prefix.parts).issubset(set(url_path.parts)):
|
||||
url_prefix = default_prefix
|
||||
else:
|
||||
return url_path_original
|
||||
|
||||
# Remove prefix
|
||||
url_path = PurePosixPath(*url_path.parts[len(url_prefix.parts) :])
|
||||
try:
|
||||
resource_path = PurePosixPath(url_path.parts[0])
|
||||
name = url_path.parts[1]
|
||||
url_suffix = PurePosixPath(*url_path.parts[2:]) # remove name and resource
|
||||
except IndexError:
|
||||
return url_path_original
|
||||
|
||||
resource = resource_path.parts[0]
|
||||
if resource in settings.NAMED_URL_MAPPINGS:
|
||||
url_units[4] = cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, url_units[4])
|
||||
return '/'.join(url_units)
|
||||
pk = PurePosixPath(cls._named_url_to_pk(settings.NAMED_URL_GRAPH[settings.NAMED_URL_MAPPINGS[resource]], resource, name))
|
||||
else:
|
||||
return url_path_original
|
||||
|
||||
parts = url_prefix.parts + resource_path.parts + pk.parts + url_suffix.parts
|
||||
return PurePosixPath(*parts).as_posix() + '/'
|
||||
|
||||
def process_request(self, request):
|
||||
old_path = request.path_info
|
||||
|
||||
@@ -17,49 +17,49 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='job_template_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='credential_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='inventory_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='project_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='workflow_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='notification_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -67,7 +67,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator', 'organization.credential_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -77,7 +77,7 @@ class Migration(migrations.Migration):
|
||||
model_name='inventory',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
||||
null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='organization.inventory_admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -85,7 +85,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.project_admin_role', 'singleton:system_administrator'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -96,7 +96,7 @@ class Migration(migrations.Migration):
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator', 'organization.workflow_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -107,7 +107,7 @@ class Migration(migrations.Migration):
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -119,7 +119,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -130,7 +130,7 @@ class Migration(migrations.Migration):
|
||||
name='execute_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'project.organization.execute_role', 'inventory.organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -142,7 +142,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'admin_role',
|
||||
'execute_role',
|
||||
|
||||
@@ -18,7 +18,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='member_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.Role'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
@@ -27,7 +27,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
|
||||
@@ -36,7 +36,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='approval_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
@@ -46,7 +46,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.approval_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -116,7 +116,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
@@ -139,7 +139,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_auditor', 'organization.auditor_role', 'execute_role', 'admin_role', 'approval_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
|
||||
@@ -80,7 +80,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.job_template_admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -92,7 +92,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['admin_role', 'organization.execute_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
@@ -104,7 +104,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.Role',
|
||||
|
||||
@@ -26,7 +26,7 @@ class Migration(migrations.Migration):
|
||||
model_name='organization',
|
||||
name='execution_environment_admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role='admin_role', related_name='+', to='main.Role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
|
||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=[
|
||||
'member_role',
|
||||
'auditor_role',
|
||||
|
||||
@@ -17,7 +17,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_administrator'],
|
||||
related_name='+',
|
||||
to='main.role',
|
||||
@@ -30,7 +30,7 @@ class Migration(migrations.Migration):
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
parent_role=['singleton:system_auditor', 'use_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.role',
|
||||
@@ -41,7 +41,7 @@ class Migration(migrations.Migration):
|
||||
model_name='instancegroup',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.SET_NULL, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
|
||||
@@ -4,7 +4,6 @@ from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0189_inbound_hop_nodes'),
|
||||
]
|
||||
|
||||
@@ -0,0 +1,51 @@
|
||||
# Generated by Django 4.2.6 on 2024-05-08 07:29
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0192_custom_roles'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='notification',
|
||||
name='notification_type',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('awssns', 'AWS SNS'),
|
||||
('email', 'Email'),
|
||||
('grafana', 'Grafana'),
|
||||
('irc', 'IRC'),
|
||||
('mattermost', 'Mattermost'),
|
||||
('pagerduty', 'Pagerduty'),
|
||||
('rocketchat', 'Rocket.Chat'),
|
||||
('slack', 'Slack'),
|
||||
('twilio', 'Twilio'),
|
||||
('webhook', 'Webhook'),
|
||||
],
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notificationtemplate',
|
||||
name='notification_type',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('awssns', 'AWS SNS'),
|
||||
('email', 'Email'),
|
||||
('grafana', 'Grafana'),
|
||||
('irc', 'IRC'),
|
||||
('mattermost', 'Mattermost'),
|
||||
('pagerduty', 'Pagerduty'),
|
||||
('rocketchat', 'Rocket.Chat'),
|
||||
('slack', 'Slack'),
|
||||
('twilio', 'Twilio'),
|
||||
('webhook', 'Webhook'),
|
||||
],
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,61 @@
|
||||
# Generated by Django 4.2.10 on 2024-06-12 19:59
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0193_alter_notification_notification_type_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
('openshift_virtualization', 'OpenShift Virtualization'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -140,6 +140,17 @@ def get_permissions_for_role(role_field, children_map, apps):
|
||||
return perm_list
|
||||
|
||||
|
||||
def model_class(ct, apps):
|
||||
"""
|
||||
You can not use model methods in migrations, so this duplicates
|
||||
what ContentType.model_class does, using current apps
|
||||
"""
|
||||
try:
|
||||
return apps.get_model(ct.app_label, ct.model)
|
||||
except LookupError:
|
||||
return None
|
||||
|
||||
|
||||
def migrate_to_new_rbac(apps, schema_editor):
|
||||
"""
|
||||
This method moves the assigned permissions from the old rbac.py models
|
||||
@@ -197,7 +208,7 @@ def migrate_to_new_rbac(apps, schema_editor):
|
||||
role_definition = managed_definitions[permissions]
|
||||
else:
|
||||
action = role.role_field.rsplit('_', 1)[0] # remove the _field ending of the name
|
||||
role_definition_name = f'{role.content_type.model_class().__name__} {action.title()}'
|
||||
role_definition_name = f'{model_class(role.content_type, apps).__name__} {action.title()}'
|
||||
|
||||
description = role_descriptions[role.role_field]
|
||||
if type(description) == dict:
|
||||
@@ -264,7 +275,12 @@ def setup_managed_role_definitions(apps, schema_editor):
|
||||
"""
|
||||
Idepotent method to create or sync the managed role definitions
|
||||
"""
|
||||
to_create = settings.ANSIBLE_BASE_ROLE_PRECREATE
|
||||
to_create = {
|
||||
'object_admin': '{cls.__name__} Admin',
|
||||
'org_admin': 'Organization Admin',
|
||||
'org_children': 'Organization {cls.__name__} Admin',
|
||||
'special': '{cls.__name__} {action}',
|
||||
}
|
||||
|
||||
ContentType = apps.get_model('contenttypes', 'ContentType')
|
||||
Permission = apps.get_model('dab_rbac', 'DABPermission')
|
||||
|
||||
@@ -4,11 +4,12 @@ import datetime
|
||||
from datetime import timezone
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
import itertools
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db import models, DatabaseError
|
||||
from django.db import models, DatabaseError, transaction
|
||||
from django.db.models.functions import Cast
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.text import Truncator
|
||||
@@ -605,19 +606,23 @@ class JobEvent(BasePlaybookEvent):
|
||||
def _update_host_metrics(updated_hosts_list):
|
||||
from awx.main.models import HostMetric # circular import
|
||||
|
||||
# bulk-create
|
||||
current_time = now()
|
||||
HostMetric.objects.bulk_create(
|
||||
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in updated_hosts_list], ignore_conflicts=True, batch_size=100
|
||||
)
|
||||
# bulk-update
|
||||
batch_start, batch_size = 0, 1000
|
||||
while batch_start <= len(updated_hosts_list):
|
||||
batched_host_list = updated_hosts_list[batch_start : (batch_start + batch_size)]
|
||||
HostMetric.objects.filter(hostname__in=batched_host_list).update(
|
||||
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
||||
)
|
||||
batch_start += batch_size
|
||||
|
||||
# FUTURE:
|
||||
# - Hand-rolled implementation of itertools.batched(), introduced in Python 3.12. Replace.
|
||||
# - Ability to do ORM upserts *may* have been introduced in Django 5.0.
|
||||
# See the entry about `create_defaults` in https://docs.djangoproject.com/en/5.0/releases/5.0/#models.
|
||||
# Hopefully this will be fully ready for batch use by 5.2 LTS.
|
||||
|
||||
args = [iter(updated_hosts_list)] * 500
|
||||
for hosts in itertools.zip_longest(*args):
|
||||
with transaction.atomic():
|
||||
HostMetric.objects.bulk_create(
|
||||
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in hosts if hostname is not None], ignore_conflicts=True
|
||||
)
|
||||
HostMetric.objects.filter(hostname__in=hosts).update(
|
||||
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
|
||||
)
|
||||
|
||||
@property
|
||||
def job_verbosity(self):
|
||||
|
||||
@@ -933,6 +933,7 @@ class InventorySourceOptions(BaseModel):
|
||||
('controller', _('Red Hat Ansible Automation Platform')),
|
||||
('insights', _('Red Hat Insights')),
|
||||
('terraform', _('Terraform State')),
|
||||
('openshift_virtualization', _('OpenShift Virtualization')),
|
||||
]
|
||||
|
||||
# From the options of the Django management base command
|
||||
@@ -1042,7 +1043,7 @@ class InventorySourceOptions(BaseModel):
|
||||
def cloud_credential_validation(source, cred):
|
||||
if not source:
|
||||
return None
|
||||
if cred and source not in ('custom', 'scm'):
|
||||
if cred and source not in ('custom', 'scm', 'openshift_virtualization'):
|
||||
# If a credential was provided, it's important that it matches
|
||||
# the actual inventory source being used (Amazon requires Amazon
|
||||
# credentials; Rackspace requires Rackspace credentials; etc...)
|
||||
@@ -1051,12 +1052,14 @@ class InventorySourceOptions(BaseModel):
|
||||
# Allow an EC2 source to omit the credential. If Tower is running on
|
||||
# an EC2 instance with an IAM Role assigned, boto will use credentials
|
||||
# from the instance metadata instead of those explicitly provided.
|
||||
elif source in CLOUD_PROVIDERS and source != 'ec2':
|
||||
elif source in CLOUD_PROVIDERS and source not in ['ec2', 'openshift_virtualization']:
|
||||
return _('Credential is required for a cloud source.')
|
||||
elif source == 'custom' and cred and cred.credential_type.kind in ('scm', 'ssh', 'insights', 'vault'):
|
||||
return _('Credentials of type machine, source control, insights and vault are disallowed for custom inventory sources.')
|
||||
elif source == 'scm' and cred and cred.credential_type.kind in ('insights', 'vault'):
|
||||
return _('Credentials of type insights and vault are disallowed for scm inventory sources.')
|
||||
elif source == 'openshift_virtualization' and cred and cred.credential_type.kind != 'kubernetes':
|
||||
return _('Credentials of type kubernetes is requred for openshift_virtualization inventory sources.')
|
||||
return None
|
||||
|
||||
def get_cloud_credential(self):
|
||||
@@ -1660,7 +1663,7 @@ class terraform(PluginFileInjector):
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
|
||||
private_data = {'credentials': {}}
|
||||
gce_cred = credential.get_input('gce_credentials')
|
||||
gce_cred = credential.get_input('gce_credentials', default=None)
|
||||
if gce_cred:
|
||||
private_data['credentials'][credential] = gce_cred
|
||||
return private_data
|
||||
@@ -1669,7 +1672,7 @@ class terraform(PluginFileInjector):
|
||||
env = super(terraform, self).get_plugin_env(inventory_update, private_data_dir, private_data_files)
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
cred_data = private_data_files['credentials']
|
||||
if cred_data[credential]:
|
||||
if credential in cred_data:
|
||||
env['GOOGLE_BACKEND_CREDENTIALS'] = to_container_path(cred_data[credential], private_data_dir)
|
||||
return env
|
||||
|
||||
@@ -1693,6 +1696,16 @@ class insights(PluginFileInjector):
|
||||
use_fqcn = True
|
||||
|
||||
|
||||
class openshift_virtualization(PluginFileInjector):
|
||||
plugin_name = 'kubevirt'
|
||||
base_injector = 'template'
|
||||
namespace = 'kubevirt'
|
||||
collection = 'core'
|
||||
downstream_namespace = 'redhat'
|
||||
downstream_collection = 'openshift_virtualization'
|
||||
use_fqcn = True
|
||||
|
||||
|
||||
class constructed(PluginFileInjector):
|
||||
plugin_name = 'constructed'
|
||||
namespace = 'ansible'
|
||||
|
||||
@@ -31,6 +31,7 @@ from awx.main.notifications.mattermost_backend import MattermostBackend
|
||||
from awx.main.notifications.grafana_backend import GrafanaBackend
|
||||
from awx.main.notifications.rocketchat_backend import RocketChatBackend
|
||||
from awx.main.notifications.irc_backend import IrcBackend
|
||||
from awx.main.notifications.awssns_backend import AWSSNSBackend
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.models.notifications')
|
||||
@@ -40,6 +41,7 @@ __all__ = ['NotificationTemplate', 'Notification']
|
||||
|
||||
class NotificationTemplate(CommonModelNameNotUnique):
|
||||
NOTIFICATION_TYPES = [
|
||||
('awssns', _('AWS SNS'), AWSSNSBackend),
|
||||
('email', _('Email'), CustomEmailBackend),
|
||||
('slack', _('Slack'), SlackBackend),
|
||||
('twilio', _('Twilio'), TwilioBackend),
|
||||
|
||||
@@ -10,6 +10,9 @@ import re
|
||||
# django-rest-framework
|
||||
from rest_framework.serializers import ValidationError
|
||||
|
||||
# crum to impersonate users
|
||||
from crum import impersonate
|
||||
|
||||
# Django
|
||||
from django.db import models, transaction, connection
|
||||
from django.db.models.signals import m2m_changed
|
||||
@@ -553,17 +556,22 @@ def get_role_definition(role):
|
||||
return
|
||||
f = obj._meta.get_field(role.role_field)
|
||||
action_name = f.name.rsplit("_", 1)[0]
|
||||
rd_name = f'{type(obj).__name__} {action_name.title()} Compat'
|
||||
model_print = type(obj).__name__
|
||||
rd_name = f'{model_print} {action_name.title()} Compat'
|
||||
perm_list = get_role_codenames(role)
|
||||
defaults = {'content_type_id': role.content_type_id}
|
||||
try:
|
||||
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
|
||||
except ValidationError:
|
||||
# This is a tricky case - practically speaking, users should not be allowed to create team roles
|
||||
# or roles that include the team member permission.
|
||||
# If we need to create this for compatibility purposes then we will create it as a managed non-editable role
|
||||
defaults['managed'] = True
|
||||
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
|
||||
defaults = {
|
||||
'content_type_id': role.content_type_id,
|
||||
'description': f'Has {action_name.title()} permission to {model_print} for backwards API compatibility',
|
||||
}
|
||||
with impersonate(None):
|
||||
try:
|
||||
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
|
||||
except ValidationError:
|
||||
# This is a tricky case - practically speaking, users should not be allowed to create team roles
|
||||
# or roles that include the team member permission.
|
||||
# If we need to create this for compatibility purposes then we will create it as a managed non-editable role
|
||||
defaults['managed'] = True
|
||||
rd, created = RoleDefinition.objects.get_or_create(name=rd_name, permissions=perm_list, defaults=defaults)
|
||||
return rd
|
||||
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.db import models, connection
|
||||
from django.db import models, connection, transaction
|
||||
from django.core.exceptions import NON_FIELD_ERRORS
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.timezone import now
|
||||
@@ -273,7 +273,14 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEn
|
||||
if new_next_schedule:
|
||||
if new_next_schedule.pk == self.next_schedule_id and new_next_schedule.next_run == self.next_job_run:
|
||||
return # no-op, common for infrequent schedules
|
||||
self.next_schedule = new_next_schedule
|
||||
|
||||
# If in a transaction, use select_for_update to lock the next schedule row, which
|
||||
# prevents a race condition if new_next_schedule is deleted elsewhere during this transaction
|
||||
if transaction.get_autocommit():
|
||||
self.next_schedule = related_schedules.first()
|
||||
else:
|
||||
self.next_schedule = related_schedules.select_for_update().first()
|
||||
|
||||
self.next_job_run = new_next_schedule.next_run
|
||||
self.save(update_fields=['next_schedule', 'next_job_run'])
|
||||
|
||||
@@ -823,7 +830,7 @@ class UnifiedJob(
|
||||
update_fields.append(key)
|
||||
|
||||
if parent_instance:
|
||||
if self.status in ('pending', 'waiting', 'running'):
|
||||
if self.status in ('pending', 'running'):
|
||||
if parent_instance.current_job != self:
|
||||
parent_instance_set('current_job', self)
|
||||
# Update parent with all the 'good' states of it's child
|
||||
@@ -860,7 +867,7 @@ class UnifiedJob(
|
||||
# If this job already exists in the database, retrieve a copy of
|
||||
# the job in its prior state.
|
||||
# If update_fields are given without status, then that indicates no change
|
||||
if self.pk and ((not update_fields) or ('status' in update_fields)):
|
||||
if self.status != 'waiting' and self.pk and ((not update_fields) or ('status' in update_fields)):
|
||||
self_before = self.__class__.objects.get(pk=self.pk)
|
||||
if self_before.status != self.status:
|
||||
status_before = self_before.status
|
||||
@@ -902,7 +909,8 @@ class UnifiedJob(
|
||||
update_fields.append('elapsed')
|
||||
|
||||
# Ensure that the job template information is current.
|
||||
if self.unified_job_template != self._get_parent_instance():
|
||||
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking
|
||||
if self.status != 'waiting' and self.unified_job_template != self._get_parent_instance():
|
||||
self.unified_job_template = self._get_parent_instance()
|
||||
if 'unified_job_template' not in update_fields:
|
||||
update_fields.append('unified_job_template')
|
||||
@@ -915,8 +923,9 @@ class UnifiedJob(
|
||||
# Okay; we're done. Perform the actual save.
|
||||
result = super(UnifiedJob, self).save(*args, **kwargs)
|
||||
|
||||
# If status changed, update the parent instance.
|
||||
if self.status != status_before:
|
||||
# If status changed, update the parent instance
|
||||
# unless status is 'waiting', because this happens in large batches at end of task manager runs and is blocking
|
||||
if self.status != status_before and self.status != 'waiting':
|
||||
# Update parent outside of the transaction for Job w/ allow_simultaneous=True
|
||||
# This dodges lock contention at the expense of the foreign key not being
|
||||
# completely correct.
|
||||
|
||||
70
awx/main/notifications/awssns_backend.py
Normal file
70
awx/main/notifications/awssns_backend.py
Normal file
@@ -0,0 +1,70 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
import json
|
||||
import logging
|
||||
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.awssns_backend')
|
||||
WEBSOCKET_TIMEOUT = 30
|
||||
|
||||
|
||||
class AWSSNSBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
init_parameters = {
|
||||
"aws_region": {"label": "AWS Region", "type": "string", "default": ""},
|
||||
"aws_access_key_id": {"label": "Access Key ID", "type": "string", "default": ""},
|
||||
"aws_secret_access_key": {"label": "Secret Access Key", "type": "password", "default": ""},
|
||||
"aws_session_token": {"label": "Session Token", "type": "password", "default": ""},
|
||||
"sns_topic_arn": {"label": "SNS Topic ARN", "type": "string", "default": ""},
|
||||
}
|
||||
recipient_parameter = "sns_topic_arn"
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_BODY = "{{ job_metadata }}"
|
||||
default_messages = CustomNotificationBase.job_metadata_messages
|
||||
|
||||
def __init__(self, aws_region, aws_access_key_id, aws_secret_access_key, aws_session_token, fail_silently=False, **kwargs):
|
||||
session = boto3.session.Session()
|
||||
client_config = {"service_name": 'sns'}
|
||||
if aws_region:
|
||||
client_config["region_name"] = aws_region
|
||||
if aws_secret_access_key:
|
||||
client_config["aws_secret_access_key"] = aws_secret_access_key
|
||||
if aws_access_key_id:
|
||||
client_config["aws_access_key_id"] = aws_access_key_id
|
||||
if aws_session_token:
|
||||
client_config["aws_session_token"] = aws_session_token
|
||||
self.client = session.client(**client_config)
|
||||
super(AWSSNSBackend, self).__init__(fail_silently=fail_silently)
|
||||
|
||||
def _sns_publish(self, topic_arn, message):
|
||||
self.client.publish(TopicArn=topic_arn, Message=message, MessageAttributes={})
|
||||
|
||||
def format_body(self, body):
|
||||
if isinstance(body, str):
|
||||
try:
|
||||
body = json.loads(body)
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
|
||||
if isinstance(body, dict):
|
||||
body = json.dumps(body)
|
||||
# convert dict body to json string
|
||||
return body
|
||||
|
||||
def send_messages(self, messages):
|
||||
sent_messages = 0
|
||||
for message in messages:
|
||||
sns_topic_arn = str(message.recipients()[0])
|
||||
try:
|
||||
self._sns_publish(topic_arn=sns_topic_arn, message=message.body)
|
||||
sent_messages += 1
|
||||
except ClientError as error:
|
||||
if not self.fail_silently:
|
||||
raise error
|
||||
|
||||
return sent_messages
|
||||
@@ -32,3 +32,15 @@ class CustomNotificationBase(object):
|
||||
"denied": {"message": DEFAULT_APPROVAL_DENIED_MSG, "body": None},
|
||||
},
|
||||
}
|
||||
|
||||
job_metadata_messages = {
|
||||
"started": {"body": "{{ job_metadata }}"},
|
||||
"success": {"body": "{{ job_metadata }}"},
|
||||
"error": {"body": "{{ job_metadata }}"},
|
||||
"workflow_approval": {
|
||||
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
|
||||
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
||||
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
||||
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -27,17 +27,7 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
sender_parameter = None
|
||||
|
||||
DEFAULT_BODY = "{{ job_metadata }}"
|
||||
default_messages = {
|
||||
"started": {"body": DEFAULT_BODY},
|
||||
"success": {"body": DEFAULT_BODY},
|
||||
"error": {"body": DEFAULT_BODY},
|
||||
"workflow_approval": {
|
||||
"running": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" needs review. This node can be viewed at: {{ workflow_url }}"}'},
|
||||
"approved": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was approved. {{ workflow_url }}"}'},
|
||||
"timed_out": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" has timed out. {{ workflow_url }}"}'},
|
||||
"denied": {"body": '{"body": "The approval node \\"{{ approval_node_name }}\\" was denied. {{ workflow_url }}"}'},
|
||||
},
|
||||
}
|
||||
default_messages = CustomNotificationBase.job_metadata_messages
|
||||
|
||||
def __init__(self, http_method, headers, disable_ssl_verification=False, fail_silently=False, username=None, password=None, **kwargs):
|
||||
self.http_method = http_method
|
||||
|
||||
@@ -63,6 +63,10 @@ websocket_urlpatterns = [
|
||||
re_path(r'api/websocket/$', consumers.EventConsumer.as_asgi()),
|
||||
re_path(r'websocket/$', consumers.EventConsumer.as_asgi()),
|
||||
]
|
||||
|
||||
if settings.OPTIONAL_API_URLPATTERN_PREFIX:
|
||||
websocket_urlpatterns.append(re_path(r'api/{}/v2/websocket/$'.format(settings.OPTIONAL_API_URLPATTERN_PREFIX), consumers.EventConsumer.as_asgi()))
|
||||
|
||||
websocket_relay_urlpatterns = [
|
||||
re_path(r'websocket/relay/$', consumers.RelayConsumer.as_asgi()),
|
||||
]
|
||||
|
||||
@@ -36,6 +36,9 @@ import ansible_runner.cleanup
|
||||
# dateutil
|
||||
from dateutil.parser import parse as parse_date
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.resource_registry.tasks.sync import SyncExecutor
|
||||
|
||||
# AWX
|
||||
from awx import __version__ as awx_application_version
|
||||
from awx.main.access import access_registry
|
||||
@@ -964,3 +967,17 @@ def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, p
|
||||
permission_check_func(creater, copy_mapping.values())
|
||||
if isinstance(new_obj, Inventory):
|
||||
update_inventory_computed_fields.delay(new_obj.id)
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def periodic_resource_sync():
|
||||
if not getattr(settings, 'RESOURCE_SERVER', None):
|
||||
logger.debug("Skipping periodic resource_sync, RESOURCE_SERVER not configured")
|
||||
return
|
||||
|
||||
with advisory_lock('periodic_resource_sync', wait=False) as acquired:
|
||||
if acquired is False:
|
||||
logger.debug("Not running periodic_resource_sync, another task holds lock")
|
||||
return
|
||||
|
||||
SyncExecutor().run()
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"K8S_AUTH_HOST": "https://foo.invalid",
|
||||
"K8S_AUTH_API_KEY": "fooo",
|
||||
"K8S_AUTH_VERIFY_SSL": "False"
|
||||
}
|
||||
@@ -9,8 +9,8 @@ def test_user_role_view_access(rando, inventory, mocker, post):
|
||||
role_pk = inventory.admin_role.pk
|
||||
data = {"id": role_pk}
|
||||
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
||||
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
|
||||
post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}), data=data, user=rando, expect=403)
|
||||
mocker.patch('awx.main.access.RoleAccess', return_value=mock_access)
|
||||
post(url=reverse('api:user_roles_list', kwargs={'pk': rando.pk}), data=data, user=rando, expect=403)
|
||||
mock_access.can_attach.assert_called_once_with(inventory.admin_role, rando, 'members', data, skip_sub_obj_read_check=False)
|
||||
|
||||
|
||||
@@ -21,8 +21,8 @@ def test_team_role_view_access(rando, team, inventory, mocker, post):
|
||||
role_pk = inventory.admin_role.pk
|
||||
data = {"id": role_pk}
|
||||
mock_access = mocker.MagicMock(can_attach=mocker.MagicMock(return_value=False))
|
||||
with mocker.patch('awx.main.access.RoleAccess', return_value=mock_access):
|
||||
post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}), data=data, user=rando, expect=403)
|
||||
mocker.patch('awx.main.access.RoleAccess', return_value=mock_access)
|
||||
post(url=reverse('api:team_roles_list', kwargs={'pk': team.pk}), data=data, user=rando, expect=403)
|
||||
mock_access.can_attach.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
||||
|
||||
|
||||
@@ -33,8 +33,8 @@ def test_role_team_view_access(rando, team, inventory, mocker, post):
|
||||
role_pk = inventory.admin_role.pk
|
||||
data = {"id": team.pk}
|
||||
mock_access = mocker.MagicMock(return_value=False, __name__='mocked')
|
||||
with mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access):
|
||||
post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}), data=data, user=rando, expect=403)
|
||||
mocker.patch('awx.main.access.RoleAccess.can_attach', mock_access)
|
||||
post(url=reverse('api:role_teams_list', kwargs={'pk': role_pk}), data=data, user=rando, expect=403)
|
||||
mock_access.assert_called_once_with(inventory.admin_role, team, 'member_role.parents', data, skip_sub_obj_read_check=False)
|
||||
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ def test_idempotent_credential_type_setup():
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_user_credential_via_credentials_list(post, get, alice, credentialtype_ssh):
|
||||
def test_create_user_credential_via_credentials_list(post, get, alice, credentialtype_ssh, setup_managed_roles):
|
||||
params = {
|
||||
'credential_type': 1,
|
||||
'inputs': {'username': 'someusername'},
|
||||
@@ -81,7 +81,7 @@ def test_credential_validation_error_with_multiple_owner_fields(post, admin, ali
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_user_credential_via_user_credentials_list(post, get, alice, credentialtype_ssh):
|
||||
def test_create_user_credential_via_user_credentials_list(post, get, alice, credentialtype_ssh, setup_managed_roles):
|
||||
params = {
|
||||
'credential_type': 1,
|
||||
'inputs': {'username': 'someusername'},
|
||||
|
||||
66
awx/main/tests/functional/api/test_immutablesharedfields.py
Normal file
66
awx/main/tests/functional/api/test_immutablesharedfields.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import Organization
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestImmutableSharedFields:
|
||||
@pytest.fixture(autouse=True)
|
||||
def configure_settings(self, settings):
|
||||
settings.ALLOW_LOCAL_RESOURCE_MANAGEMENT = False
|
||||
|
||||
def test_create_raises_permission_denied(self, admin_user, post):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
resp = post(
|
||||
url=reverse('api:team_list'),
|
||||
data={'name': 'teamA', 'organization': orgA.id},
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert "Creation of this resource is not allowed" in resp.data['detail']
|
||||
|
||||
def test_perform_delete_raises_permission_denied(self, admin_user, delete):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
team = orgA.teams.create(name='teamA')
|
||||
resp = delete(
|
||||
url=reverse('api:team_detail', kwargs={'pk': team.id}),
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert "Deletion of this resource is not allowed" in resp.data['detail']
|
||||
|
||||
def test_perform_update(self, admin_user, patch):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
team = orgA.teams.create(name='teamA')
|
||||
# allow patching non-shared fields
|
||||
patch(
|
||||
url=reverse('api:team_detail', kwargs={'pk': team.id}),
|
||||
data={"description": "can change this field"},
|
||||
user=admin_user,
|
||||
expect=200,
|
||||
)
|
||||
orgB = Organization.objects.create(name='orgB')
|
||||
# prevent patching shared fields
|
||||
resp = patch(url=reverse('api:team_detail', kwargs={'pk': team.id}), data={"organization": orgB.id}, user=admin_user, expect=403)
|
||||
assert "Cannot change shared field" in resp.data['organization']
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'role',
|
||||
['admin_role', 'member_role'],
|
||||
)
|
||||
@pytest.mark.parametrize('resource', ['organization', 'team'])
|
||||
def test_prevent_assigning_member_to_organization_or_team(self, admin_user, post, resource, role):
|
||||
orgA = Organization.objects.create(name='orgA')
|
||||
if resource == 'organization':
|
||||
role = getattr(orgA, role)
|
||||
elif resource == 'team':
|
||||
teamA = orgA.teams.create(name='teamA')
|
||||
role = getattr(teamA, role)
|
||||
resp = post(
|
||||
url=reverse('api:user_roles_list', kwargs={'pk': admin_user.id}),
|
||||
data={'id': role.id},
|
||||
user=admin_user,
|
||||
expect=403,
|
||||
)
|
||||
assert f"Cannot directly modify user membership to {resource}." in resp.data['msg']
|
||||
@@ -131,11 +131,11 @@ def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, ad
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ()
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ()
|
||||
|
||||
# Check that job is serialized correctly
|
||||
job_id = response.data['job']
|
||||
@@ -167,12 +167,12 @@ def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, admi
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
called_with = data_to_internal(runtime_data)
|
||||
JobTemplate.create_unified_job.assert_called_with(**called_with)
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
called_with = data_to_internal(runtime_data)
|
||||
JobTemplate.create_unified_job.assert_called_with(**called_with)
|
||||
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
@@ -187,11 +187,11 @@ def test_job_accept_empty_tags(job_template_prompts, post, admin_user, mocker):
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'job_tags': '', 'skip_tags': ''},)
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'job_tags': '', 'skip_tags': ''},)
|
||||
|
||||
mock_job.signal_start.assert_called_once()
|
||||
|
||||
@@ -203,14 +203,14 @@ def test_slice_timeout_forks_need_int(job_template_prompts, post, admin_user, mo
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'timeout': '', 'job_slice_count': '', 'forks': ''}, admin_user, expect=400
|
||||
)
|
||||
assert 'forks' in response.data and response.data['forks'][0] == 'A valid integer is required.'
|
||||
assert 'job_slice_count' in response.data and response.data['job_slice_count'][0] == 'A valid integer is required.'
|
||||
assert 'timeout' in response.data and response.data['timeout'][0] == 'A valid integer is required.'
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), {'timeout': '', 'job_slice_count': '', 'forks': ''}, admin_user, expect=400
|
||||
)
|
||||
assert 'forks' in response.data and response.data['forks'][0] == 'A valid integer is required.'
|
||||
assert 'job_slice_count' in response.data and response.data['job_slice_count'][0] == 'A valid integer is required.'
|
||||
assert 'timeout' in response.data and response.data['timeout'][0] == 'A valid integer is required.'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -244,12 +244,12 @@ def test_job_accept_prompted_vars_null(runtime_data, job_template_prompts_null,
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, rando, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
expected_call = data_to_internal(runtime_data)
|
||||
assert JobTemplate.create_unified_job.call_args == (expected_call,)
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation')
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}), runtime_data, rando, expect=201)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
expected_call = data_to_internal(runtime_data)
|
||||
assert JobTemplate.create_unified_job.call_args == (expected_call,)
|
||||
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
@@ -641,18 +641,18 @@ def test_job_launch_unprompted_vars_with_survey(mocker, survey_spec_factory, job
|
||||
job_template.survey_spec = survey_spec_factory('survey_var')
|
||||
job_template.save()
|
||||
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
|
||||
admin_user,
|
||||
expect=201,
|
||||
)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'extra_vars': {'survey_var': 4}},)
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
|
||||
response = post(
|
||||
reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
|
||||
admin_user,
|
||||
expect=201,
|
||||
)
|
||||
assert JobTemplate.create_unified_job.called
|
||||
assert JobTemplate.create_unified_job.call_args == ({'extra_vars': {'survey_var': 4}},)
|
||||
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
@@ -670,22 +670,22 @@ def test_callback_accept_prompted_extra_var(mocker, survey_spec_factory, job_tem
|
||||
job_template.survey_spec = survey_spec_factory('survey_var')
|
||||
job_template.save()
|
||||
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
with mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
||||
with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]):
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'extra_vars': {'survey_var': 4, 'job_launch_var': 3}, 'limit': 'single-host'}
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
|
||||
mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host])
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'extra_vars': {'survey_var': 4, 'job_launch_var': 3}, 'limit': 'single-host'}
|
||||
|
||||
mock_job.signal_start.assert_called_once()
|
||||
|
||||
@@ -697,22 +697,22 @@ def test_callback_ignore_unprompted_extra_var(mocker, survey_spec_factory, job_t
|
||||
job_template.host_config_key = "foo"
|
||||
job_template.save()
|
||||
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
with mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
||||
with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]):
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'limit': 'single-host'}
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job)
|
||||
mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={})
|
||||
mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host])
|
||||
post(
|
||||
reverse('api:job_template_callback', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"),
|
||||
admin_user,
|
||||
expect=201,
|
||||
format='json',
|
||||
)
|
||||
assert UnifiedJobTemplate.create_unified_job.called
|
||||
call_args = UnifiedJobTemplate.create_unified_job.call_args[1]
|
||||
call_args.pop('_eager_fields', None) # internal purposes
|
||||
assert call_args == {'limit': 'single-host'}
|
||||
|
||||
mock_job.signal_start.assert_called_once()
|
||||
|
||||
@@ -725,9 +725,9 @@ def test_callback_find_matching_hosts(mocker, get, job_template_prompts, admin_u
|
||||
job_template.save()
|
||||
host_with_alias = Host(name='localhost', inventory=job_template.inventory)
|
||||
host_with_alias.save()
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert tuple(r.data['matching_hosts']) == ('localhost',)
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert tuple(r.data['matching_hosts']) == ('localhost',)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -738,6 +738,6 @@ def test_callback_extra_var_takes_priority_over_host_name(mocker, get, job_templ
|
||||
job_template.save()
|
||||
host_with_alias = Host(name='localhost', variables={'ansible_host': 'foobar'}, inventory=job_template.inventory)
|
||||
host_with_alias.save()
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert not r.data['matching_hosts']
|
||||
mocker.patch('awx.main.access.BaseAccess.check_license')
|
||||
r = get(reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), user=admin_user, expect=200)
|
||||
assert not r.data['matching_hosts']
|
||||
|
||||
@@ -165,8 +165,8 @@ class TestAccessListCapabilities:
|
||||
def test_access_list_direct_access_capability(self, inventory, rando, get, mocker, mock_access_method):
|
||||
inventory.admin_role.members.add(rando)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), rando)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), rando)
|
||||
|
||||
mock_access_method.assert_called_once_with(inventory.admin_role, rando, 'members', **self.extra_kwargs)
|
||||
self._assert_one_in_list(response.data)
|
||||
@@ -174,8 +174,8 @@ class TestAccessListCapabilities:
|
||||
assert direct_access_list[0]['role']['user_capabilities']['unattach'] == 'foobar'
|
||||
|
||||
def test_access_list_indirect_access_capability(self, inventory, organization, org_admin, get, mocker, mock_access_method):
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), org_admin)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), org_admin)
|
||||
|
||||
mock_access_method.assert_called_once_with(organization.admin_role, org_admin, 'members', **self.extra_kwargs)
|
||||
self._assert_one_in_list(response.data, sublist='indirect_access')
|
||||
@@ -185,8 +185,8 @@ class TestAccessListCapabilities:
|
||||
def test_access_list_team_direct_access_capability(self, inventory, team, team_member, get, mocker, mock_access_method):
|
||||
team.member_role.children.add(inventory.admin_role)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), team_member)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), team_member)
|
||||
|
||||
mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', **self.extra_kwargs)
|
||||
self._assert_one_in_list(response.data)
|
||||
@@ -198,8 +198,8 @@ class TestAccessListCapabilities:
|
||||
def test_team_roles_unattach(mocker, team, team_member, inventory, mock_access_method, get):
|
||||
team.member_role.children.add(inventory.admin_role)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:team_roles_list', kwargs={'pk': team.id}), team_member)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:team_roles_list', kwargs={'pk': team.id}), team_member)
|
||||
|
||||
# Did we assess whether team_member can remove team's permission to the inventory?
|
||||
mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', skip_sub_obj_read_check=True, data={})
|
||||
@@ -212,8 +212,8 @@ def test_user_roles_unattach(mocker, organization, alice, bob, mock_access_metho
|
||||
organization.member_role.members.add(alice)
|
||||
organization.member_role.members.add(bob)
|
||||
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:user_roles_list', kwargs={'pk': alice.id}), bob)
|
||||
mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method)
|
||||
response = get(reverse('api:user_roles_list', kwargs={'pk': alice.id}), bob)
|
||||
|
||||
# Did we assess whether bob can remove alice's permission to the inventory?
|
||||
mock_access_method.assert_called_once_with(organization.member_role, alice, 'members', skip_sub_obj_read_check=True, data={})
|
||||
|
||||
@@ -43,9 +43,9 @@ def run_command(name, *args, **options):
|
||||
],
|
||||
)
|
||||
def test_update_password_command(mocker, username, password, expected, changed):
|
||||
with mocker.patch.object(UpdatePassword, 'update_password', return_value=changed):
|
||||
result, stdout, stderr = run_command('update_password', username=username, password=password)
|
||||
if result is None:
|
||||
assert stdout == expected
|
||||
else:
|
||||
assert str(result) == expected
|
||||
mocker.patch.object(UpdatePassword, 'update_password', return_value=changed)
|
||||
result, stdout, stderr = run_command('update_password', username=username, password=password)
|
||||
if result is None:
|
||||
assert stdout == expected
|
||||
else:
|
||||
assert str(result) == expected
|
||||
|
||||
@@ -16,6 +16,8 @@ from django.db.backends.sqlite3.base import SQLiteCursorWrapper
|
||||
|
||||
from django.db.models.signals import post_migrate
|
||||
|
||||
from awx.main.migrations._dab_rbac import setup_managed_role_definitions
|
||||
|
||||
# AWX
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.ha import Instance
|
||||
@@ -90,6 +92,12 @@ def deploy_jobtemplate(project, inventory, credential):
|
||||
return jt
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def setup_managed_roles():
|
||||
"Run the migration script to pre-create managed role definitions"
|
||||
setup_managed_role_definitions(apps, None)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def team(organization):
|
||||
return organization.teams.create(name='test-team')
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
import pytest
|
||||
from django.apps import apps
|
||||
|
||||
from awx.main.migrations._dab_rbac import setup_managed_role_definitions
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def managed_roles():
|
||||
"Run the migration script to pre-create managed role definitions"
|
||||
setup_managed_role_definitions(apps, None)
|
||||
@@ -1,45 +0,0 @@
|
||||
import pytest
|
||||
from django.apps import apps
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from awx.main.migrations._dab_rbac import setup_managed_role_definitions
|
||||
|
||||
from ansible_base.rbac.models import RoleDefinition
|
||||
|
||||
INVENTORY_OBJ_PERMISSIONS = ['view_inventory', 'adhoc_inventory', 'use_inventory', 'change_inventory', 'delete_inventory', 'update_inventory']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_managed_definitions_precreate():
|
||||
with override_settings(
|
||||
ANSIBLE_BASE_ROLE_PRECREATE={
|
||||
'object_admin': '{cls._meta.model_name}-admin',
|
||||
'org_admin': 'organization-admin',
|
||||
'org_children': 'organization-{cls._meta.model_name}-admin',
|
||||
'special': '{cls._meta.model_name}-{action}',
|
||||
}
|
||||
):
|
||||
setup_managed_role_definitions(apps, None)
|
||||
rd = RoleDefinition.objects.get(name='inventory-admin')
|
||||
assert rd.managed is True
|
||||
# add permissions do not go in the object-level admin
|
||||
assert set(rd.permissions.values_list('codename', flat=True)) == set(INVENTORY_OBJ_PERMISSIONS)
|
||||
|
||||
# test org-level object admin permissions
|
||||
rd = RoleDefinition.objects.get(name='organization-inventory-admin')
|
||||
assert rd.managed is True
|
||||
assert set(rd.permissions.values_list('codename', flat=True)) == set(['add_inventory', 'view_organization'] + INVENTORY_OBJ_PERMISSIONS)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_managed_definitions_custom_obj_admin_name():
|
||||
with override_settings(
|
||||
ANSIBLE_BASE_ROLE_PRECREATE={
|
||||
'object_admin': 'foo-{cls._meta.model_name}-foo',
|
||||
}
|
||||
):
|
||||
setup_managed_role_definitions(apps, None)
|
||||
rd = RoleDefinition.objects.get(name='foo-inventory-foo')
|
||||
assert rd.managed is True
|
||||
# add permissions do not go in the object-level admin
|
||||
assert set(rd.permissions.values_list('codename', flat=True)) == set(INVENTORY_OBJ_PERMISSIONS)
|
||||
@@ -10,7 +10,7 @@ from ansible_base.rbac.models import RoleDefinition
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_managed_roles_created(managed_roles):
|
||||
def test_managed_roles_created(setup_managed_roles):
|
||||
"Managed RoleDefinitions are created in post_migration signal, we expect to see them here"
|
||||
for cls in (JobTemplate, Inventory):
|
||||
ct = ContentType.objects.get_for_model(cls)
|
||||
@@ -22,7 +22,7 @@ def test_managed_roles_created(managed_roles):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_custom_read_role(admin_user, post, managed_roles):
|
||||
def test_custom_read_role(admin_user, post, setup_managed_roles):
|
||||
rd_url = django_reverse('roledefinition-list')
|
||||
resp = post(
|
||||
url=rd_url, data={"name": "read role made for test", "content_type": "awx.inventory", "permissions": ['view_inventory']}, user=admin_user, expect=201
|
||||
@@ -40,10 +40,25 @@ def test_custom_system_roles_prohibited(admin_user, post):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_assign_managed_role(admin_user, alice, rando, inventory, post, managed_roles):
|
||||
def test_assignment_to_invisible_user(admin_user, alice, rando, inventory, post, setup_managed_roles):
|
||||
"Alice can not see rando, and so can not give them a role assignment"
|
||||
rd = RoleDefinition.objects.get(name='Inventory Admin')
|
||||
rd.give_permission(alice, inventory)
|
||||
# Now that alice has full permissions to the inventory, she will give rando permission
|
||||
url = django_reverse('roleuserassignment-list')
|
||||
r = post(url=url, data={"user": rando.id, "role_definition": rd.id, "object_id": inventory.id}, user=alice, expect=400)
|
||||
assert 'does not exist' in str(r.data)
|
||||
assert not rando.has_obj_perm(inventory, 'change')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_assign_managed_role(admin_user, alice, rando, inventory, post, setup_managed_roles, organization):
|
||||
rd = RoleDefinition.objects.get(name='Inventory Admin')
|
||||
rd.give_permission(alice, inventory)
|
||||
# When alice and rando are members of the same org, they can see each other
|
||||
member_rd = RoleDefinition.objects.get(name='Organization Member')
|
||||
for u in (alice, rando):
|
||||
member_rd.give_permission(u, organization)
|
||||
# Now that alice has full permissions to the inventory, and can see rando, she will give rando permission
|
||||
url = django_reverse('roleuserassignment-list')
|
||||
post(url=url, data={"user": rando.id, "role_definition": rd.id, "object_id": inventory.id}, user=alice, expect=201)
|
||||
assert rando.has_obj_perm(inventory, 'change') is True
|
||||
@@ -63,7 +78,7 @@ def test_assign_custom_delete_role(admin_user, rando, inventory, delete, patch):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_assign_custom_add_role(admin_user, rando, organization, post, managed_roles):
|
||||
def test_assign_custom_add_role(admin_user, rando, organization, post, setup_managed_roles):
|
||||
rd, _ = RoleDefinition.objects.get_or_create(
|
||||
name='inventory-add', permissions=['add_inventory', 'view_organization'], content_type=ContentType.objects.get_for_model(Organization)
|
||||
)
|
||||
|
||||
@@ -2,11 +2,15 @@ from unittest import mock
|
||||
|
||||
import pytest
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from crum import impersonate
|
||||
|
||||
from awx.main.models.rbac import get_role_from_object_role, give_creator_permissions
|
||||
from awx.main.models import User, Organization, WorkflowJobTemplate, WorkflowJobTemplateNode, Team
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
from ansible_base.rbac.models import RoleUserAssignment
|
||||
from ansible_base.rbac.models import RoleUserAssignment, RoleDefinition
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -14,7 +18,7 @@ from ansible_base.rbac.models import RoleUserAssignment
|
||||
'role_name',
|
||||
['execution_environment_admin_role', 'project_admin_role', 'admin_role', 'auditor_role', 'read_role', 'execute_role', 'notification_admin_role'],
|
||||
)
|
||||
def test_round_trip_roles(organization, rando, role_name, managed_roles):
|
||||
def test_round_trip_roles(organization, rando, role_name, setup_managed_roles):
|
||||
"""
|
||||
Make an assignment with the old-style role,
|
||||
get the equivelent new role
|
||||
@@ -28,7 +32,39 @@ def test_round_trip_roles(organization, rando, role_name, managed_roles):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_organization_level_permissions(organization, inventory, managed_roles):
|
||||
def test_role_naming(setup_managed_roles):
|
||||
qs = RoleDefinition.objects.filter(content_type=ContentType.objects.get(model='jobtemplate'), name__endswith='dmin')
|
||||
assert qs.count() == 1 # sanity
|
||||
rd = qs.first()
|
||||
assert rd.name == 'JobTemplate Admin'
|
||||
assert rd.description
|
||||
assert rd.created_by is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_action_role_naming(setup_managed_roles):
|
||||
qs = RoleDefinition.objects.filter(content_type=ContentType.objects.get(model='jobtemplate'), name__endswith='ecute')
|
||||
assert qs.count() == 1 # sanity
|
||||
rd = qs.first()
|
||||
assert rd.name == 'JobTemplate Execute'
|
||||
assert rd.description
|
||||
assert rd.created_by is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_compat_role_naming(setup_managed_roles, job_template, rando, alice):
|
||||
with impersonate(alice):
|
||||
job_template.read_role.members.add(rando)
|
||||
qs = RoleDefinition.objects.filter(content_type=ContentType.objects.get(model='jobtemplate'), name__endswith='ompat')
|
||||
assert qs.count() == 1 # sanity
|
||||
rd = qs.first()
|
||||
assert rd.name == 'JobTemplate Read Compat'
|
||||
assert rd.description
|
||||
assert rd.created_by is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_organization_level_permissions(organization, inventory, setup_managed_roles):
|
||||
u1 = User.objects.create(username='alice')
|
||||
u2 = User.objects.create(username='bob')
|
||||
|
||||
@@ -58,14 +94,14 @@ def test_organization_level_permissions(organization, inventory, managed_roles):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_organization_execute_role(organization, rando, managed_roles):
|
||||
def test_organization_execute_role(organization, rando, setup_managed_roles):
|
||||
organization.execute_role.members.add(rando)
|
||||
assert rando in organization.execute_role
|
||||
assert set(Organization.accessible_objects(rando, 'execute_role')) == set([organization])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_workflow_approval_list(get, post, admin_user, managed_roles):
|
||||
def test_workflow_approval_list(get, post, admin_user, setup_managed_roles):
|
||||
workflow_job_template = WorkflowJobTemplate.objects.create()
|
||||
approval_node = WorkflowJobTemplateNode.objects.create(workflow_job_template=workflow_job_template)
|
||||
url = reverse('api:workflow_job_template_node_create_approval', kwargs={'pk': approval_node.pk, 'version': 'v2'})
|
||||
@@ -79,14 +115,14 @@ def test_workflow_approval_list(get, post, admin_user, managed_roles):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_creator_permission(rando, admin_user, inventory, managed_roles):
|
||||
def test_creator_permission(rando, admin_user, inventory, setup_managed_roles):
|
||||
give_creator_permissions(rando, inventory)
|
||||
assert rando in inventory.admin_role
|
||||
assert rando in inventory.admin_role.members.all()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_team_team_read_role(rando, team, admin_user, post, managed_roles):
|
||||
def test_team_team_read_role(rando, team, admin_user, post, setup_managed_roles):
|
||||
orgs = [Organization.objects.create(name=f'foo-{i}') for i in range(2)]
|
||||
teams = [Team.objects.create(name=f'foo-{i}', organization=orgs[i]) for i in range(2)]
|
||||
teams[1].member_role.members.add(rando)
|
||||
|
||||
@@ -21,13 +21,13 @@ class TestComputedFields:
|
||||
def test_computed_fields_normal_use(self, mocker, inventory):
|
||||
job = Job.objects.create(name='fake-job', inventory=inventory)
|
||||
with immediate_on_commit():
|
||||
with mocker.patch.object(update_inventory_computed_fields, 'delay'):
|
||||
job.delete()
|
||||
update_inventory_computed_fields.delay.assert_called_once_with(inventory.id)
|
||||
mocker.patch.object(update_inventory_computed_fields, 'delay')
|
||||
job.delete()
|
||||
update_inventory_computed_fields.delay.assert_called_once_with(inventory.id)
|
||||
|
||||
def test_disable_computed_fields(self, mocker, inventory):
|
||||
job = Job.objects.create(name='fake-job', inventory=inventory)
|
||||
with disable_computed_fields():
|
||||
with mocker.patch.object(update_inventory_computed_fields, 'delay'):
|
||||
job.delete()
|
||||
update_inventory_computed_fields.delay.assert_not_called()
|
||||
mocker.patch.object(update_inventory_computed_fields, 'delay')
|
||||
job.delete()
|
||||
update_inventory_computed_fields.delay.assert_not_called()
|
||||
|
||||
@@ -21,13 +21,13 @@ def test_multi_group_basic_job_launch(instance_factory, controlplane_instance_gr
|
||||
j2 = create_job(objects2.job_template)
|
||||
with mock.patch('awx.main.models.Job.task_impact', new_callable=mock.PropertyMock) as mock_task_impact:
|
||||
mock_task_impact.return_value = 500
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_has_calls([mock.call(j1, ig1, i1), mock.call(j2, ig2, i2)])
|
||||
mocker.patch("awx.main.scheduler.TaskManager.start_task")
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_has_calls([mock.call(j1, ig1, i1), mock.call(j2, ig2, i2)])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_multi_group_with_shared_dependency(instance_factory, controlplane_instance_group, mocker, instance_group_factory, job_template_factory):
|
||||
def test_multi_group_with_shared_dependency(instance_factory, controlplane_instance_group, instance_group_factory, job_template_factory):
|
||||
i1 = instance_factory("i1")
|
||||
i2 = instance_factory("i2")
|
||||
ig1 = instance_group_factory("ig1", instances=[i1])
|
||||
@@ -50,7 +50,7 @@ def test_multi_group_with_shared_dependency(instance_factory, controlplane_insta
|
||||
objects2 = job_template_factory('jt2', organization=objects1.organization, project=p, inventory='inv2', credential='cred2')
|
||||
objects2.job_template.instance_groups.add(ig2)
|
||||
j2 = create_job(objects2.job_template, dependencies_processed=False)
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
DependencyManager().schedule()
|
||||
TaskManager().schedule()
|
||||
pu = p.project_updates.first()
|
||||
@@ -73,10 +73,10 @@ def test_workflow_job_no_instancegroup(workflow_job_template_factory, controlpla
|
||||
wfj = wfjt.create_unified_job()
|
||||
wfj.status = "pending"
|
||||
wfj.save()
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(wfj, None, None)
|
||||
assert wfj.instance_group is None
|
||||
mocker.patch("awx.main.scheduler.TaskManager.start_task")
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(wfj, None, None)
|
||||
assert wfj.instance_group is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -16,9 +16,9 @@ def test_single_job_scheduler_launch(hybrid_instance, controlplane_instance_grou
|
||||
instance = controlplane_instance_group.instances.all()[0]
|
||||
objects = job_template_factory('jt', organization='org1', project='proj', inventory='inv', credential='cred')
|
||||
j = create_job(objects.job_template)
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, instance)
|
||||
mocker.patch("awx.main.scheduler.TaskManager.start_task")
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j, controlplane_instance_group, instance)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -46,6 +46,8 @@ def generate_fake_var(element):
|
||||
|
||||
def credential_kind(source):
|
||||
"""Given the inventory source kind, return expected credential kind"""
|
||||
if source == 'openshift_virtualization':
|
||||
return 'kubernetes_bearer_token'
|
||||
return source.replace('ec2', 'aws')
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import pytest
|
||||
|
||||
from django_test_migrations.plan import all_migrations, nodes_to_tuples
|
||||
from django.utils.timezone import now
|
||||
|
||||
"""
|
||||
Most tests that live in here can probably be deleted at some point. They are mainly
|
||||
@@ -68,3 +69,19 @@ class TestMigrationSmoke:
|
||||
bar_peers = bar.peers.all()
|
||||
assert len(bar_peers) == 1
|
||||
assert fooaddr in bar_peers
|
||||
|
||||
def test_migrate_DAB_RBAC(self, migrator):
|
||||
old_state = migrator.apply_initial_migration(('main', '0190_alter_inventorysource_source_and_more'))
|
||||
Organization = old_state.apps.get_model('main', 'Organization')
|
||||
User = old_state.apps.get_model('auth', 'User')
|
||||
|
||||
org = Organization.objects.create(name='arbitrary-org', created=now(), modified=now())
|
||||
user = User.objects.create(username='random-user')
|
||||
org.read_role.members.add(user)
|
||||
|
||||
new_state = migrator.apply_tested_migration(
|
||||
('main', '0192_custom_roles'),
|
||||
)
|
||||
|
||||
RoleUserAssignment = new_state.apps.get_model('dab_rbac', 'RoleUserAssignment')
|
||||
assert RoleUserAssignment.objects.filter(user=user.id, object_id=org.id).exists()
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import pytest
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.middleware import URLModificationMiddleware
|
||||
from awx.main.models import ( # noqa
|
||||
@@ -121,7 +119,7 @@ def test_notification_template(get, admin_user):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_instance(get, admin_user):
|
||||
def test_instance(get, admin_user, settings):
|
||||
test_instance = Instance.objects.create(uuid=settings.SYSTEM_UUID, hostname="localhost", capacity=100)
|
||||
url = reverse('api:instance_detail', kwargs={'pk': test_instance.pk})
|
||||
response = get(url, user=admin_user, expect=200)
|
||||
@@ -205,3 +203,65 @@ def test_403_vs_404(get):
|
||||
|
||||
get(f'/api/v2/users/{cindy.pk}/', expect=401)
|
||||
get('/api/v2/users/cindy/', expect=404)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestConvertNamedUrl:
|
||||
@pytest.mark.parametrize(
|
||||
"url",
|
||||
(
|
||||
"/api/",
|
||||
"/api/v2/",
|
||||
"/api/v2/hosts/",
|
||||
"/api/v2/hosts/1/",
|
||||
"/api/v2/organizations/1/inventories/",
|
||||
"/api/foo/",
|
||||
"/api/foo/v2/",
|
||||
"/api/foo/v2/organizations/",
|
||||
"/api/foo/v2/organizations/1/",
|
||||
"/api/foo/v2/organizations/1/inventories/",
|
||||
"/api/foobar/",
|
||||
"/api/foobar/v2/",
|
||||
"/api/foobar/v2/organizations/",
|
||||
"/api/foobar/v2/organizations/1/",
|
||||
"/api/foobar/v2/organizations/1/inventories/",
|
||||
"/api/foobar/v2/organizations/1/inventories/",
|
||||
),
|
||||
)
|
||||
def test_noop(self, url, settings):
|
||||
settings.OPTIONAL_API_URLPATTERN_PREFIX = ''
|
||||
assert URLModificationMiddleware._convert_named_url(url) == url
|
||||
|
||||
settings.OPTIONAL_API_URLPATTERN_PREFIX = 'foo'
|
||||
assert URLModificationMiddleware._convert_named_url(url) == url
|
||||
|
||||
def test_named_org(self):
|
||||
test_org = Organization.objects.create(name='test_org')
|
||||
|
||||
assert URLModificationMiddleware._convert_named_url('/api/v2/organizations/test_org/') == f'/api/v2/organizations/{test_org.pk}/'
|
||||
|
||||
def test_named_org_optional_api_urlpattern_prefix_interaction(self, settings):
|
||||
settings.OPTIONAL_API_URLPATTERN_PREFIX = 'bar'
|
||||
test_org = Organization.objects.create(name='test_org')
|
||||
|
||||
assert URLModificationMiddleware._convert_named_url('/api/bar/v2/organizations/test_org/') == f'/api/bar/v2/organizations/{test_org.pk}/'
|
||||
|
||||
@pytest.mark.parametrize("prefix", ['', 'bar'])
|
||||
def test_named_org_not_found(self, prefix, settings):
|
||||
settings.OPTIONAL_API_URLPATTERN_PREFIX = prefix
|
||||
if prefix:
|
||||
prefix += '/'
|
||||
|
||||
assert URLModificationMiddleware._convert_named_url(f'/api/{prefix}v2/organizations/does-not-exist/') == f'/api/{prefix}v2/organizations/0/'
|
||||
|
||||
@pytest.mark.parametrize("prefix", ['', 'bar'])
|
||||
def test_named_sub_resource(self, prefix, settings):
|
||||
settings.OPTIONAL_API_URLPATTERN_PREFIX = prefix
|
||||
test_org = Organization.objects.create(name='test_org')
|
||||
if prefix:
|
||||
prefix += '/'
|
||||
|
||||
assert (
|
||||
URLModificationMiddleware._convert_named_url(f'/api/{prefix}v2/organizations/test_org/inventories/')
|
||||
== f'/api/{prefix}v2/organizations/{test_org.pk}/inventories/'
|
||||
)
|
||||
|
||||
@@ -187,7 +187,7 @@ def test_remove_role_from_user(role, post, admin):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@override_settings(ANSIBLE_BASE_ALLOW_TEAM_ORG_ADMIN=True)
|
||||
@override_settings(ANSIBLE_BASE_ALLOW_TEAM_ORG_ADMIN=True, ANSIBLE_BASE_ALLOW_TEAM_ORG_MEMBER=True)
|
||||
def test_get_teams_roles_list(get, team, organization, admin):
|
||||
team.member_role.children.add(organization.admin_role)
|
||||
url = reverse('api:team_roles_list', kwargs={'pk': team.id})
|
||||
|
||||
@@ -165,7 +165,7 @@ class TestOrphanJobTemplate:
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_permissions
|
||||
def test_job_template_creator_access(project, organization, rando, post):
|
||||
def test_job_template_creator_access(project, organization, rando, post, setup_managed_roles):
|
||||
project.use_role.members.add(rando)
|
||||
response = post(
|
||||
url=reverse('api:job_template_list'),
|
||||
|
||||
@@ -76,15 +76,15 @@ class TestJobTemplateSerializerGetRelated:
|
||||
class TestJobTemplateSerializerGetSummaryFields:
|
||||
def test_survey_spec_exists(self, test_get_summary_fields, mocker, job_template):
|
||||
job_template.survey_spec = {'name': 'blah', 'description': 'blah blah'}
|
||||
with mocker.patch.object(JobTemplateSerializer, '_recent_jobs') as mock_rj:
|
||||
mock_rj.return_value = []
|
||||
test_get_summary_fields(JobTemplateSerializer, job_template, 'survey')
|
||||
mock_rj = mocker.patch.object(JobTemplateSerializer, '_recent_jobs')
|
||||
mock_rj.return_value = []
|
||||
test_get_summary_fields(JobTemplateSerializer, job_template, 'survey')
|
||||
|
||||
def test_survey_spec_absent(self, get_summary_fields_mock_and_run, mocker, job_template):
|
||||
job_template.survey_spec = None
|
||||
with mocker.patch.object(JobTemplateSerializer, '_recent_jobs') as mock_rj:
|
||||
mock_rj.return_value = []
|
||||
summary = get_summary_fields_mock_and_run(JobTemplateSerializer, job_template)
|
||||
mock_rj = mocker.patch.object(JobTemplateSerializer, '_recent_jobs')
|
||||
mock_rj.return_value = []
|
||||
summary = get_summary_fields_mock_and_run(JobTemplateSerializer, job_template)
|
||||
assert 'survey' not in summary
|
||||
|
||||
def test_copy_edit_standard(self, mocker, job_template_factory):
|
||||
@@ -107,10 +107,10 @@ class TestJobTemplateSerializerGetSummaryFields:
|
||||
view.kwargs = {}
|
||||
serializer.context['view'] = view
|
||||
|
||||
with mocker.patch("awx.api.serializers.role_summary_fields_generator", return_value='Can eat pie'):
|
||||
with mocker.patch("awx.main.access.JobTemplateAccess.can_change", return_value='foobar'):
|
||||
with mocker.patch("awx.main.access.JobTemplateAccess.can_copy", return_value='foo'):
|
||||
response = serializer.get_summary_fields(jt_obj)
|
||||
mocker.patch("awx.api.serializers.role_summary_fields_generator", return_value='Can eat pie')
|
||||
mocker.patch("awx.main.access.JobTemplateAccess.can_change", return_value='foobar')
|
||||
mocker.patch("awx.main.access.JobTemplateAccess.can_copy", return_value='foo')
|
||||
response = serializer.get_summary_fields(jt_obj)
|
||||
|
||||
assert response['user_capabilities']['copy'] == 'foo'
|
||||
assert response['user_capabilities']['edit'] == 'foobar'
|
||||
|
||||
@@ -189,8 +189,8 @@ class TestWorkflowJobTemplateNodeSerializerSurveyPasswords:
|
||||
serializer = WorkflowJobTemplateNodeSerializer()
|
||||
wfjt = WorkflowJobTemplate.objects.create(name='fake-wfjt')
|
||||
serializer.instance = WorkflowJobTemplateNode(workflow_job_template=wfjt, unified_job_template=jt, extra_data={'var1': '$encrypted$foooooo'})
|
||||
with mocker.patch('awx.main.models.mixins.decrypt_value', return_value='foo'):
|
||||
attrs = serializer.validate({'unified_job_template': jt, 'workflow_job_template': wfjt, 'extra_data': {'var1': '$encrypted$'}})
|
||||
mocker.patch('awx.main.models.mixins.decrypt_value', return_value='foo')
|
||||
attrs = serializer.validate({'unified_job_template': jt, 'workflow_job_template': wfjt, 'extra_data': {'var1': '$encrypted$'}})
|
||||
assert 'survey_passwords' in attrs
|
||||
assert 'var1' in attrs['survey_passwords']
|
||||
assert attrs['extra_data']['var1'] == '$encrypted$foooooo'
|
||||
|
||||
@@ -191,16 +191,16 @@ class TestResourceAccessList:
|
||||
|
||||
def test_parent_access_check_failed(self, mocker, mock_organization):
|
||||
mock_access = mocker.MagicMock(__name__='for logger', return_value=False)
|
||||
with mocker.patch('awx.main.access.BaseAccess.can_read', mock_access):
|
||||
with pytest.raises(PermissionDenied):
|
||||
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
|
||||
mock_access.assert_called_once_with(mock_organization)
|
||||
mocker.patch('awx.main.access.BaseAccess.can_read', mock_access)
|
||||
with pytest.raises(PermissionDenied):
|
||||
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
|
||||
mock_access.assert_called_once_with(mock_organization)
|
||||
|
||||
def test_parent_access_check_worked(self, mocker, mock_organization):
|
||||
mock_access = mocker.MagicMock(__name__='for logger', return_value=True)
|
||||
with mocker.patch('awx.main.access.BaseAccess.can_read', mock_access):
|
||||
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
|
||||
mock_access.assert_called_once_with(mock_organization)
|
||||
mocker.patch('awx.main.access.BaseAccess.can_read', mock_access)
|
||||
self.mock_view(parent=mock_organization).check_permissions(self.mock_request())
|
||||
mock_access.assert_called_once_with(mock_organization)
|
||||
|
||||
|
||||
def test_related_search_reverse_FK_field():
|
||||
|
||||
@@ -66,7 +66,7 @@ class TestJobTemplateLabelList:
|
||||
mock_request = mock.MagicMock()
|
||||
|
||||
super(JobTemplateLabelList, view).unattach(mock_request, None, None)
|
||||
assert mixin_unattach.called_with(mock_request, None, None)
|
||||
mixin_unattach.assert_called_with(mock_request, None, None)
|
||||
|
||||
|
||||
class TestInventoryInventorySourcesUpdate:
|
||||
@@ -108,15 +108,16 @@ class TestInventoryInventorySourcesUpdate:
|
||||
mock_request = mocker.MagicMock()
|
||||
mock_request.user.can_access.return_value = can_access
|
||||
|
||||
with mocker.patch.object(InventoryInventorySourcesUpdate, 'get_object', return_value=obj):
|
||||
with mocker.patch.object(InventoryInventorySourcesUpdate, 'get_serializer_context', return_value=None):
|
||||
with mocker.patch('awx.api.serializers.InventoryUpdateDetailSerializer') as serializer_class:
|
||||
serializer = serializer_class.return_value
|
||||
serializer.to_representation.return_value = {}
|
||||
mocker.patch.object(InventoryInventorySourcesUpdate, 'get_object', return_value=obj)
|
||||
mocker.patch.object(InventoryInventorySourcesUpdate, 'get_serializer_context', return_value=None)
|
||||
serializer_class = mocker.patch('awx.api.serializers.InventoryUpdateDetailSerializer')
|
||||
|
||||
view = InventoryInventorySourcesUpdate()
|
||||
response = view.post(mock_request)
|
||||
assert response.data == expected
|
||||
serializer = serializer_class.return_value
|
||||
serializer.to_representation.return_value = {}
|
||||
|
||||
view = InventoryInventorySourcesUpdate()
|
||||
response = view.post(mock_request)
|
||||
assert response.data == expected
|
||||
|
||||
|
||||
class TestSurveySpecValidation:
|
||||
|
||||
@@ -155,35 +155,35 @@ def test_node_getter_and_setters():
|
||||
class TestWorkflowJobCreate:
|
||||
def test_create_no_prompts(self, wfjt_node_no_prompts, workflow_job_unit, mocker):
|
||||
mock_create = mocker.MagicMock()
|
||||
with mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create):
|
||||
wfjt_node_no_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
|
||||
mock_create.assert_called_once_with(
|
||||
all_parents_must_converge=False,
|
||||
extra_data={},
|
||||
survey_passwords={},
|
||||
char_prompts=wfjt_node_no_prompts.char_prompts,
|
||||
inventory=None,
|
||||
unified_job_template=wfjt_node_no_prompts.unified_job_template,
|
||||
workflow_job=workflow_job_unit,
|
||||
identifier=mocker.ANY,
|
||||
execution_environment=None,
|
||||
)
|
||||
mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create)
|
||||
wfjt_node_no_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
|
||||
mock_create.assert_called_once_with(
|
||||
all_parents_must_converge=False,
|
||||
extra_data={},
|
||||
survey_passwords={},
|
||||
char_prompts=wfjt_node_no_prompts.char_prompts,
|
||||
inventory=None,
|
||||
unified_job_template=wfjt_node_no_prompts.unified_job_template,
|
||||
workflow_job=workflow_job_unit,
|
||||
identifier=mocker.ANY,
|
||||
execution_environment=None,
|
||||
)
|
||||
|
||||
def test_create_with_prompts(self, wfjt_node_with_prompts, workflow_job_unit, credential, mocker):
|
||||
mock_create = mocker.MagicMock()
|
||||
with mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create):
|
||||
wfjt_node_with_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
|
||||
mock_create.assert_called_once_with(
|
||||
all_parents_must_converge=False,
|
||||
extra_data={},
|
||||
survey_passwords={},
|
||||
char_prompts=wfjt_node_with_prompts.char_prompts,
|
||||
inventory=wfjt_node_with_prompts.inventory,
|
||||
unified_job_template=wfjt_node_with_prompts.unified_job_template,
|
||||
workflow_job=workflow_job_unit,
|
||||
identifier=mocker.ANY,
|
||||
execution_environment=None,
|
||||
)
|
||||
mocker.patch('awx.main.models.WorkflowJobNode.objects.create', mock_create)
|
||||
wfjt_node_with_prompts.create_workflow_job_node(workflow_job=workflow_job_unit)
|
||||
mock_create.assert_called_once_with(
|
||||
all_parents_must_converge=False,
|
||||
extra_data={},
|
||||
survey_passwords={},
|
||||
char_prompts=wfjt_node_with_prompts.char_prompts,
|
||||
inventory=wfjt_node_with_prompts.inventory,
|
||||
unified_job_template=wfjt_node_with_prompts.unified_job_template,
|
||||
workflow_job=workflow_job_unit,
|
||||
identifier=mocker.ANY,
|
||||
execution_environment=None,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
26
awx/main/tests/unit/notifications/test_awssns.py
Normal file
26
awx/main/tests/unit/notifications/test_awssns.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from unittest import mock
|
||||
from django.core.mail.message import EmailMessage
|
||||
|
||||
import awx.main.notifications.awssns_backend as awssns_backend
|
||||
|
||||
|
||||
def test_send_messages():
|
||||
with mock.patch('awx.main.notifications.awssns_backend.AWSSNSBackend._sns_publish') as sns_publish_mock:
|
||||
aws_region = 'us-east-1'
|
||||
sns_topic = f"arn:aws:sns:{aws_region}:111111111111:topic-mock"
|
||||
backend = awssns_backend.AWSSNSBackend(aws_region=aws_region, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None)
|
||||
message = EmailMessage(
|
||||
'test subject',
|
||||
{'body': 'test body'},
|
||||
[],
|
||||
[
|
||||
sns_topic,
|
||||
],
|
||||
)
|
||||
sent_messages = backend.send_messages(
|
||||
[
|
||||
message,
|
||||
]
|
||||
)
|
||||
sns_publish_mock.assert_called_once_with(topic_arn=sns_topic, message=message.body)
|
||||
assert sent_messages == 1
|
||||
@@ -137,10 +137,10 @@ def test_send_notifications_not_list():
|
||||
|
||||
|
||||
def test_send_notifications_job_id(mocker):
|
||||
with mocker.patch('awx.main.models.UnifiedJob.objects.get'):
|
||||
system.send_notifications([], job_id=1)
|
||||
assert UnifiedJob.objects.get.called
|
||||
assert UnifiedJob.objects.get.called_with(id=1)
|
||||
mocker.patch('awx.main.models.UnifiedJob.objects.get')
|
||||
system.send_notifications([], job_id=1)
|
||||
assert UnifiedJob.objects.get.called
|
||||
assert UnifiedJob.objects.get.called_with(id=1)
|
||||
|
||||
|
||||
@mock.patch('awx.main.models.UnifiedJob.objects.get')
|
||||
|
||||
@@ -7,15 +7,15 @@ def test_produce_supervisor_command(mocker):
|
||||
mock_process = mocker.MagicMock()
|
||||
mock_process.communicate = communicate_mock
|
||||
Popen_mock = mocker.MagicMock(return_value=mock_process)
|
||||
with mocker.patch.object(reload.subprocess, 'Popen', Popen_mock):
|
||||
reload.supervisor_service_command("restart")
|
||||
reload.subprocess.Popen.assert_called_once_with(
|
||||
[
|
||||
'supervisorctl',
|
||||
'restart',
|
||||
'tower-processes:*',
|
||||
],
|
||||
stderr=-1,
|
||||
stdin=-1,
|
||||
stdout=-1,
|
||||
)
|
||||
mocker.patch.object(reload.subprocess, 'Popen', Popen_mock)
|
||||
reload.supervisor_service_command("restart")
|
||||
reload.subprocess.Popen.assert_called_once_with(
|
||||
[
|
||||
'supervisorctl',
|
||||
'restart',
|
||||
'tower-processes:*',
|
||||
],
|
||||
stderr=-1,
|
||||
stdin=-1,
|
||||
stdout=-1,
|
||||
)
|
||||
|
||||
@@ -2,9 +2,11 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import base64
|
||||
import logging
|
||||
import sys
|
||||
import traceback
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
# Django
|
||||
@@ -15,6 +17,15 @@ from django.utils.encoding import force_str
|
||||
# AWX
|
||||
from awx.main.exceptions import PostRunError
|
||||
|
||||
# OTEL
|
||||
from opentelemetry._logs import set_logger_provider
|
||||
from opentelemetry.exporter.otlp.proto.grpc._log_exporter import OTLPLogExporter as OTLPGrpcLogExporter
|
||||
from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter as OTLPHttpLogExporter
|
||||
|
||||
from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
|
||||
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
|
||||
from opentelemetry.sdk.resources import Resource
|
||||
|
||||
|
||||
class RSysLogHandler(logging.handlers.SysLogHandler):
|
||||
append_nul = False
|
||||
@@ -133,3 +144,39 @@ if settings.COLOR_LOGS is True:
|
||||
pass
|
||||
else:
|
||||
ColorHandler = logging.StreamHandler
|
||||
|
||||
|
||||
class OTLPHandler(LoggingHandler):
|
||||
def __init__(self, endpoint=None, protocol='grpc', service_name=None, instance_id=None, auth=None, username=None, password=None):
|
||||
if not endpoint:
|
||||
raise ValueError("endpoint required")
|
||||
|
||||
if auth == 'basic' and (username is None or password is None):
|
||||
raise ValueError("auth type basic requires username and passsword parameters")
|
||||
|
||||
self.endpoint = endpoint
|
||||
self.service_name = service_name or (sys.argv[1] if len(sys.argv) > 1 else (sys.argv[0] or 'unknown_service'))
|
||||
self.instance_id = instance_id or os.uname().nodename
|
||||
|
||||
logger_provider = LoggerProvider(
|
||||
resource=Resource.create(
|
||||
{
|
||||
"service.name": self.service_name,
|
||||
"service.instance.id": self.instance_id,
|
||||
}
|
||||
),
|
||||
)
|
||||
set_logger_provider(logger_provider)
|
||||
|
||||
headers = {}
|
||||
if auth == 'basic':
|
||||
secret = f'{username}:{password}'
|
||||
headers['Authorization'] = "Basic " + base64.b64encode(secret.encode()).decode()
|
||||
|
||||
if protocol == 'grpc':
|
||||
otlp_exporter = OTLPGrpcLogExporter(endpoint=self.endpoint, insecure=True, headers=headers)
|
||||
elif protocol == 'http':
|
||||
otlp_exporter = OTLPHttpLogExporter(endpoint=self.endpoint, headers=headers)
|
||||
logger_provider.add_log_record_processor(BatchLogRecordProcessor(otlp_exporter))
|
||||
|
||||
super().__init__(level=logging.NOTSET, logger_provider=logger_provider)
|
||||
|
||||
@@ -285,8 +285,6 @@ class WebSocketRelayManager(object):
|
||||
except asyncio.CancelledError:
|
||||
# Handle the case where the task was already cancelled by the time we got here.
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to cancel relay connection for {hostname}: {e}")
|
||||
|
||||
del self.relay_connections[hostname]
|
||||
|
||||
@@ -297,8 +295,6 @@ class WebSocketRelayManager(object):
|
||||
self.stats_mgr.delete_remote_host_stats(hostname)
|
||||
except KeyError:
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to delete stats for {hostname}: {e}")
|
||||
|
||||
async def run(self):
|
||||
event_loop = asyncio.get_running_loop()
|
||||
@@ -306,7 +302,6 @@ class WebSocketRelayManager(object):
|
||||
self.stats_mgr = RelayWebsocketStatsManager(event_loop, self.local_hostname)
|
||||
self.stats_mgr.start()
|
||||
|
||||
# Set up a pg_notify consumer for allowing web nodes to "provision" and "deprovision" themselves gracefully.
|
||||
database_conf = deepcopy(settings.DATABASES['default'])
|
||||
database_conf['OPTIONS'] = deepcopy(database_conf.get('OPTIONS', {}))
|
||||
|
||||
@@ -318,79 +313,54 @@ class WebSocketRelayManager(object):
|
||||
if 'PASSWORD' in database_conf:
|
||||
database_conf['OPTIONS']['password'] = database_conf.pop('PASSWORD')
|
||||
|
||||
task = None
|
||||
async_conn = await psycopg.AsyncConnection.connect(
|
||||
dbname=database_conf['NAME'],
|
||||
host=database_conf['HOST'],
|
||||
user=database_conf['USER'],
|
||||
port=database_conf['PORT'],
|
||||
**database_conf.get("OPTIONS", {}),
|
||||
)
|
||||
|
||||
# Managing the async_conn here so that we can close it if we need to restart the connection
|
||||
async_conn = None
|
||||
await async_conn.set_autocommit(True)
|
||||
on_ws_heartbeat_task = event_loop.create_task(self.on_ws_heartbeat(async_conn))
|
||||
|
||||
# Establishes a websocket connection to /websocket/relay on all API servers
|
||||
try:
|
||||
while True:
|
||||
if not task or task.done():
|
||||
try:
|
||||
# Try to close the connection if it's open
|
||||
if async_conn:
|
||||
try:
|
||||
await async_conn.close()
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to close connection to database for pg_notify: {e}")
|
||||
while True:
|
||||
if on_ws_heartbeat_task.done():
|
||||
raise Exception("on_ws_heartbeat_task has exited")
|
||||
|
||||
# and re-establish the connection
|
||||
async_conn = await psycopg.AsyncConnection.connect(
|
||||
dbname=database_conf['NAME'],
|
||||
host=database_conf['HOST'],
|
||||
user=database_conf['USER'],
|
||||
port=database_conf['PORT'],
|
||||
**database_conf.get("OPTIONS", {}),
|
||||
)
|
||||
await async_conn.set_autocommit(True)
|
||||
future_remote_hosts = self.known_hosts.keys()
|
||||
current_remote_hosts = self.relay_connections.keys()
|
||||
deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts)
|
||||
new_remote_hosts = set(future_remote_hosts) - set(current_remote_hosts)
|
||||
|
||||
# before creating the task that uses the connection
|
||||
task = event_loop.create_task(self.on_ws_heartbeat(async_conn), name="on_ws_heartbeat")
|
||||
logger.info("Creating `on_ws_heartbeat` task in event loop.")
|
||||
# This loop handles if we get an advertisement from a host we already know about but
|
||||
# the advertisement has a different IP than we are currently connected to.
|
||||
for hostname, address in self.known_hosts.items():
|
||||
if hostname not in self.relay_connections:
|
||||
# We've picked up a new hostname that we don't know about yet.
|
||||
continue
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to connect to database for pg_notify: {e}")
|
||||
if address != self.relay_connections[hostname].remote_host:
|
||||
deleted_remote_hosts.add(hostname)
|
||||
new_remote_hosts.add(hostname)
|
||||
|
||||
future_remote_hosts = self.known_hosts.keys()
|
||||
current_remote_hosts = self.relay_connections.keys()
|
||||
deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts)
|
||||
new_remote_hosts = set(future_remote_hosts) - set(current_remote_hosts)
|
||||
# Delete any hosts with closed connections
|
||||
for hostname, relay_conn in self.relay_connections.items():
|
||||
if not relay_conn.connected:
|
||||
deleted_remote_hosts.add(hostname)
|
||||
|
||||
# This loop handles if we get an advertisement from a host we already know about but
|
||||
# the advertisement has a different IP than we are currently connected to.
|
||||
for hostname, address in self.known_hosts.items():
|
||||
if hostname not in self.relay_connections:
|
||||
# We've picked up a new hostname that we don't know about yet.
|
||||
continue
|
||||
if deleted_remote_hosts:
|
||||
logger.info(f"Removing {deleted_remote_hosts} from websocket broadcast list")
|
||||
await asyncio.gather(*[self.cleanup_offline_host(h) for h in deleted_remote_hosts])
|
||||
|
||||
if address != self.relay_connections[hostname].remote_host:
|
||||
deleted_remote_hosts.add(hostname)
|
||||
new_remote_hosts.add(hostname)
|
||||
if new_remote_hosts:
|
||||
logger.info(f"Adding {new_remote_hosts} to websocket broadcast list")
|
||||
|
||||
# Delete any hosts with closed connections
|
||||
for hostname, relay_conn in self.relay_connections.items():
|
||||
if not relay_conn.connected:
|
||||
deleted_remote_hosts.add(hostname)
|
||||
for h in new_remote_hosts:
|
||||
stats = self.stats_mgr.new_remote_host_stats(h)
|
||||
relay_connection = WebsocketRelayConnection(name=self.local_hostname, stats=stats, remote_host=self.known_hosts[h])
|
||||
relay_connection.start()
|
||||
self.relay_connections[h] = relay_connection
|
||||
|
||||
if deleted_remote_hosts:
|
||||
logger.info(f"Removing {deleted_remote_hosts} from websocket broadcast list")
|
||||
await asyncio.gather(*[self.cleanup_offline_host(h) for h in deleted_remote_hosts])
|
||||
|
||||
if new_remote_hosts:
|
||||
logger.info(f"Adding {new_remote_hosts} to websocket broadcast list")
|
||||
|
||||
for h in new_remote_hosts:
|
||||
stats = self.stats_mgr.new_remote_host_stats(h)
|
||||
relay_connection = WebsocketRelayConnection(name=self.local_hostname, stats=stats, remote_host=self.known_hosts[h])
|
||||
relay_connection.start()
|
||||
self.relay_connections[h] = relay_connection
|
||||
|
||||
await asyncio.sleep(settings.BROADCAST_WEBSOCKET_NEW_INSTANCE_POLL_RATE_SECONDS)
|
||||
finally:
|
||||
if async_conn:
|
||||
logger.info("Shutting down db connection for wsrelay.")
|
||||
try:
|
||||
await async_conn.close()
|
||||
except Exception as e:
|
||||
logger.info(f"Failed to close connection to database for pg_notify: {e}")
|
||||
await asyncio.sleep(settings.BROADCAST_WEBSOCKET_NEW_INSTANCE_POLL_RATE_SECONDS)
|
||||
|
||||
@@ -114,6 +114,7 @@ MEDIA_ROOT = os.path.join(BASE_DIR, 'public', 'media')
|
||||
MEDIA_URL = '/media/'
|
||||
|
||||
LOGIN_URL = '/api/login/'
|
||||
LOGOUT_ALLOWED_HOSTS = None
|
||||
|
||||
# Absolute filesystem path to the directory to host projects (with playbooks).
|
||||
# This directory should not be web-accessible.
|
||||
@@ -277,6 +278,9 @@ SESSION_COOKIE_SECURE = True
|
||||
# Note: This setting may be overridden by database settings.
|
||||
SESSION_COOKIE_AGE = 1800
|
||||
|
||||
# Option to change userLoggedIn cookie SameSite policy.
|
||||
USER_COOKIE_SAMESITE = 'Lax'
|
||||
|
||||
# Name of the cookie that contains the session information.
|
||||
# Note: Changing this value may require changes to any clients.
|
||||
SESSION_COOKIE_NAME = 'awx_sessionid'
|
||||
@@ -488,6 +492,7 @@ CELERYBEAT_SCHEDULE = {
|
||||
'cleanup_images': {'task': 'awx.main.tasks.system.cleanup_images_and_files', 'schedule': timedelta(hours=3)},
|
||||
'cleanup_host_metrics': {'task': 'awx.main.tasks.host_metrics.cleanup_host_metrics', 'schedule': timedelta(hours=3, minutes=30)},
|
||||
'host_metric_summary_monthly': {'task': 'awx.main.tasks.host_metrics.host_metric_summary_monthly', 'schedule': timedelta(hours=4)},
|
||||
'periodic_resource_sync': {'task': 'awx.main.tasks.system.periodic_resource_sync', 'schedule': timedelta(minutes=15)},
|
||||
}
|
||||
|
||||
# Django Caching Configuration
|
||||
@@ -652,6 +657,10 @@ AWX_ANSIBLE_CALLBACK_PLUGINS = ""
|
||||
# Automatically remove nodes that have missed their heartbeats after some time
|
||||
AWX_AUTO_DEPROVISION_INSTANCES = False
|
||||
|
||||
# If False, do not allow creation of resources that are shared with the platform ingress
|
||||
# e.g. organizations, teams, and users
|
||||
ALLOW_LOCAL_RESOURCE_MANAGEMENT = True
|
||||
|
||||
# Enable Pendo on the UI, possible values are 'off', 'anonymous', and 'detailed'
|
||||
# Note: This setting may be overridden by database settings.
|
||||
PENDO_TRACKING_STATE = "off"
|
||||
@@ -774,6 +783,11 @@ INSIGHTS_EXCLUDE_EMPTY_GROUPS = False
|
||||
TERRAFORM_INSTANCE_ID_VAR = 'id'
|
||||
TERRAFORM_EXCLUDE_EMPTY_GROUPS = True
|
||||
|
||||
# ------------------------
|
||||
# OpenShift Virtualization
|
||||
# ------------------------
|
||||
OPENSHIFT_VIRTUALIZATION_EXCLUDE_EMPTY_GROUPS = True
|
||||
|
||||
# ---------------------
|
||||
# ----- Custom -----
|
||||
# ---------------------
|
||||
@@ -876,6 +890,7 @@ LOGGING = {
|
||||
'address': '/var/run/awx-rsyslog/rsyslog.sock',
|
||||
'filters': ['external_log_enabled', 'dynamic_level_filter', 'guid'],
|
||||
},
|
||||
'otel': {'class': 'logging.NullHandler'},
|
||||
},
|
||||
'loggers': {
|
||||
'django': {'handlers': ['console']},
|
||||
@@ -1145,13 +1160,8 @@ ANSIBLE_BASE_CUSTOM_VIEW_PARENT = 'awx.api.generics.APIView'
|
||||
|
||||
# Settings for the ansible_base RBAC system
|
||||
|
||||
# Only used internally, names of the managed RoleDefinitions to create
|
||||
ANSIBLE_BASE_ROLE_PRECREATE = {
|
||||
'object_admin': '{cls.__name__} Admin',
|
||||
'org_admin': 'Organization Admin',
|
||||
'org_children': 'Organization {cls.__name__} Admin',
|
||||
'special': '{cls.__name__} {action}',
|
||||
}
|
||||
# This has been moved to data migration code
|
||||
ANSIBLE_BASE_ROLE_PRECREATE = {}
|
||||
|
||||
# Name for auto-created roles that give users permissions to what they create
|
||||
ANSIBLE_BASE_ROLE_CREATOR_NAME = '{cls.__name__} Creator'
|
||||
@@ -1162,9 +1172,6 @@ ANSIBLE_BASE_ROLE_SYSTEM_ACTIVATED = True
|
||||
# Permissions a user will get when creating a new item
|
||||
ANSIBLE_BASE_CREATOR_DEFAULTS = ['change', 'delete', 'execute', 'use', 'adhoc', 'approve', 'update', 'view']
|
||||
|
||||
# This is a stopgap, will delete after resource registry integration
|
||||
ANSIBLE_BASE_SERVICE_PREFIX = "awx"
|
||||
|
||||
# Temporary, for old roles API compatibility, save child permissions at organization level
|
||||
ANSIBLE_BASE_CACHE_PARENT_PERMISSIONS = True
|
||||
|
||||
@@ -1178,6 +1185,3 @@ ANSIBLE_BASE_ALLOW_SINGLETON_ROLES_API = False # Do not allow creating user-def
|
||||
|
||||
# system username for django-ansible-base
|
||||
SYSTEM_USERNAME = None
|
||||
|
||||
# Use AWX base view, to give 401 on unauthenticated requests
|
||||
ANSIBLE_BASE_CUSTOM_VIEW_PARENT = 'awx.api.generics.APIView'
|
||||
|
||||
3004
awx/sso/conf.py
3004
awx/sso/conf.py
File diff suppressed because it is too large
Load Diff
@@ -7,18 +7,18 @@ from django.core.cache import cache
|
||||
|
||||
def test_ldap_default_settings(mocker):
|
||||
from_db = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=from_db):
|
||||
settings = LDAPSettings()
|
||||
assert settings.ORGANIZATION_MAP == {}
|
||||
assert settings.TEAM_MAP == {}
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=from_db)
|
||||
settings = LDAPSettings()
|
||||
assert settings.ORGANIZATION_MAP == {}
|
||||
assert settings.TEAM_MAP == {}
|
||||
|
||||
|
||||
def test_ldap_default_network_timeout(mocker):
|
||||
cache.clear() # clearing cache avoids picking up stray default for OPT_REFERRALS
|
||||
from_db = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=from_db):
|
||||
settings = LDAPSettings()
|
||||
assert settings.CONNECTION_OPTIONS[ldap.OPT_NETWORK_TIMEOUT] == 30
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=from_db)
|
||||
settings = LDAPSettings()
|
||||
assert settings.CONNECTION_OPTIONS[ldap.OPT_NETWORK_TIMEOUT] == 30
|
||||
|
||||
|
||||
def test_ldap_filter_validator():
|
||||
|
||||
@@ -38,7 +38,9 @@ class CompleteView(BaseRedirectView):
|
||||
response = super(CompleteView, self).dispatch(request, *args, **kwargs)
|
||||
if self.request.user and self.request.user.is_authenticated:
|
||||
logger.info(smart_str(u"User {} logged in".format(self.request.user.username)))
|
||||
response.set_cookie('userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False))
|
||||
response.set_cookie(
|
||||
'userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False), samesite=getattr(settings, 'USER_COOKIE_SAMESITE', 'Lax')
|
||||
)
|
||||
response.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||
return response
|
||||
|
||||
|
||||
@@ -62,7 +62,7 @@ function CredentialLookup({
|
||||
? { credential_type: credentialTypeId }
|
||||
: {};
|
||||
const typeKindParams = credentialTypeKind
|
||||
? { credential_type__kind: credentialTypeKind }
|
||||
? { credential_type__kind__in: credentialTypeKind }
|
||||
: {};
|
||||
const typeNamespaceParams = credentialTypeNamespace
|
||||
? { credential_type__namespace: credentialTypeNamespace }
|
||||
@@ -125,7 +125,7 @@ function CredentialLookup({
|
||||
? { credential_type: credentialTypeId }
|
||||
: {};
|
||||
const typeKindParams = credentialTypeKind
|
||||
? { credential_type__kind: credentialTypeKind }
|
||||
? { credential_type__kind__in: credentialTypeKind }
|
||||
: {};
|
||||
const typeNamespaceParams = credentialTypeNamespace
|
||||
? { credential_type__namespace: credentialTypeNamespace }
|
||||
|
||||
@@ -190,6 +190,7 @@ function NotificationList({
|
||||
name: t`Notification type`,
|
||||
key: 'or__notification_type',
|
||||
options: [
|
||||
['awssns', t`AWS SNS`],
|
||||
['email', t`Email`],
|
||||
['grafana', t`Grafana`],
|
||||
['hipchat', t`Hipchat`],
|
||||
|
||||
@@ -12,7 +12,7 @@ const Inner = styled.div`
|
||||
border-radius: 2px;
|
||||
color: white;
|
||||
left: 10px;
|
||||
max-width: 300px;
|
||||
max-width: 500px;
|
||||
padding: 5px 10px;
|
||||
position: absolute;
|
||||
top: 10px;
|
||||
|
||||
@@ -12,6 +12,7 @@ const GridDL = styled.dl`
|
||||
column-gap: 15px;
|
||||
display: grid;
|
||||
grid-template-columns: max-content;
|
||||
overflow-wrap: anywhere;
|
||||
row-gap: 0px;
|
||||
dt {
|
||||
grid-column-start: 1;
|
||||
|
||||
@@ -56,6 +56,10 @@ describe('<InventorySourceAdd />', () => {
|
||||
['satellite6', 'Red Hat Satellite 6'],
|
||||
['openstack', 'OpenStack'],
|
||||
['rhv', 'Red Hat Virtualization'],
|
||||
[
|
||||
'openshift_virtualization',
|
||||
'Red Hat OpenShift Virtualization',
|
||||
],
|
||||
['controller', 'Red Hat Ansible Automation Platform'],
|
||||
],
|
||||
},
|
||||
|
||||
@@ -22,7 +22,9 @@ const ansibleDocUrls = {
|
||||
constructed:
|
||||
'https://docs.ansible.com/ansible/latest/collections/ansible/builtin/constructed_inventory.html',
|
||||
terraform:
|
||||
'https://github.com/ansible-collections/cloud.terraform/blob/stable-statefile-inventory/plugins/inventory/terraform_state.py',
|
||||
'https://github.com/ansible-collections/cloud.terraform/blob/main/docs/cloud.terraform.terraform_state_inventory.rst',
|
||||
openshift_virtualization:
|
||||
'https://kubevirt.io/kubevirt.core/latest/plugins/kubevirt.html',
|
||||
};
|
||||
|
||||
const getInventoryHelpTextStrings = () => ({
|
||||
@@ -121,7 +123,7 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch.`}
|
||||
@@ -140,7 +142,7 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch`}
|
||||
|
||||
@@ -26,6 +26,7 @@ import {
|
||||
TerraformSubForm,
|
||||
VMwareSubForm,
|
||||
VirtualizationSubForm,
|
||||
OpenShiftVirtualizationSubForm,
|
||||
} from './InventorySourceSubForms';
|
||||
|
||||
const buildSourceChoiceOptions = (options) => {
|
||||
@@ -231,6 +232,15 @@ const InventorySourceFormFields = ({
|
||||
sourceOptions={sourceOptions}
|
||||
/>
|
||||
),
|
||||
openshift_virtualization: (
|
||||
<OpenShiftVirtualizationSubForm
|
||||
autoPopulateCredential={
|
||||
!source?.id ||
|
||||
source?.source !== 'openshift_virtualization'
|
||||
}
|
||||
sourceOptions={sourceOptions}
|
||||
/>
|
||||
),
|
||||
}[sourceField.value]
|
||||
}
|
||||
</FormColumnLayout>
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import { useField, useFormikContext } from 'formik';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { useConfig } from 'contexts/Config';
|
||||
import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
import CredentialLookup from 'components/Lookup/CredentialLookup';
|
||||
import { required } from 'util/validators';
|
||||
import {
|
||||
OptionsField,
|
||||
VerbosityField,
|
||||
EnabledVarField,
|
||||
EnabledValueField,
|
||||
HostFilterField,
|
||||
SourceVarsField,
|
||||
} from './SharedFields';
|
||||
import getHelpText from '../Inventory.helptext';
|
||||
|
||||
const OpenShiftVirtualizationSubForm = ({ autoPopulateCredential }) => {
|
||||
const helpText = getHelpText();
|
||||
const { setFieldValue, setFieldTouched } = useFormikContext();
|
||||
const [credentialField, credentialMeta, credentialHelpers] =
|
||||
useField('credential');
|
||||
const config = useConfig();
|
||||
|
||||
const handleCredentialUpdate = useCallback(
|
||||
(value) => {
|
||||
setFieldValue('credential', value);
|
||||
setFieldTouched('credential', true, false);
|
||||
},
|
||||
[setFieldValue, setFieldTouched]
|
||||
);
|
||||
|
||||
const docsBaseUrl = getDocsBaseUrl(config);
|
||||
return (
|
||||
<>
|
||||
<CredentialLookup
|
||||
credentialTypeNamespace="kubernetes_bearer_token"
|
||||
label={t`Credential`}
|
||||
helperTextInvalid={credentialMeta.error}
|
||||
isValid={!credentialMeta.touched || !credentialMeta.error}
|
||||
onBlur={() => credentialHelpers.setTouched()}
|
||||
onChange={handleCredentialUpdate}
|
||||
value={credentialField.value}
|
||||
required
|
||||
autoPopulate={autoPopulateCredential}
|
||||
validate={required(t`Select a value for this field`)}
|
||||
/>
|
||||
<VerbosityField />
|
||||
<HostFilterField />
|
||||
<EnabledVarField />
|
||||
<EnabledValueField />
|
||||
<OptionsField />
|
||||
<SourceVarsField
|
||||
popoverContent={helpText.sourceVars(
|
||||
docsBaseUrl,
|
||||
'openshift_virtualization'
|
||||
)}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default OpenShiftVirtualizationSubForm;
|
||||
@@ -0,0 +1,65 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { Formik } from 'formik';
|
||||
import { CredentialsAPI } from 'api';
|
||||
import { mountWithContexts } from '../../../../../testUtils/enzymeHelpers';
|
||||
import VirtualizationSubForm from './VirtualizationSubForm';
|
||||
|
||||
jest.mock('../../../../api');
|
||||
|
||||
const initialValues = {
|
||||
credential: null,
|
||||
overwrite: false,
|
||||
overwrite_vars: false,
|
||||
source_path: '',
|
||||
source_project: null,
|
||||
source_script: null,
|
||||
source_vars: '---\n',
|
||||
update_cache_timeout: 0,
|
||||
update_on_launch: true,
|
||||
verbosity: 1,
|
||||
};
|
||||
|
||||
describe('<VirtualizationSubForm />', () => {
|
||||
let wrapper;
|
||||
|
||||
beforeEach(async () => {
|
||||
CredentialsAPI.read.mockResolvedValue({
|
||||
data: { count: 0, results: [] },
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={initialValues}>
|
||||
<VirtualizationSubForm />
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test('should render subform fields', () => {
|
||||
expect(wrapper.find('FormGroup[label="Credential"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Verbosity"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Update options"]')).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('FormGroup[label="Cache timeout (seconds)"]')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('VariablesField[label="Source variables"]')
|
||||
).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should make expected api calls', () => {
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledTimes(1);
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledWith({
|
||||
credential_type__namespace: 'rhv',
|
||||
order_by: 'name',
|
||||
page: 1,
|
||||
page_size: 5,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -87,7 +87,7 @@ const SCMSubForm = ({ autoPopulateProject }) => {
|
||||
/>
|
||||
)}
|
||||
<CredentialLookup
|
||||
credentialTypeKind="cloud"
|
||||
credentialTypeKind="cloud,kubernetes"
|
||||
label={t`Credential`}
|
||||
value={credentialField.value}
|
||||
onChange={handleCredentialUpdate}
|
||||
|
||||
@@ -9,3 +9,4 @@ export { default as ControllerSubForm } from './ControllerSubForm';
|
||||
export { default as TerraformSubForm } from './TerraformSubForm';
|
||||
export { default as VMwareSubForm } from './VMwareSubForm';
|
||||
export { default as VirtualizationSubForm } from './VirtualizationSubForm';
|
||||
export { default as OpenShiftVirtualizationSubForm } from './OpenShiftVirtualizationSubForm';
|
||||
|
||||
@@ -138,6 +138,25 @@ function NotificationTemplateDetail({ template, defaultMessages }) {
|
||||
}
|
||||
dataCy="nt-detail-type"
|
||||
/>
|
||||
{template.notification_type === 'awssns' && (
|
||||
<>
|
||||
<Detail
|
||||
label={t`AWS Region`}
|
||||
value={configuration.aws_region}
|
||||
dataCy="nt-detail-aws-region"
|
||||
/>
|
||||
<Detail
|
||||
label={t`Access Key ID`}
|
||||
value={configuration.aws_access_key_id}
|
||||
dataCy="nt-detail-aws-access-key-id"
|
||||
/>
|
||||
<Detail
|
||||
label={t`SNS Topic ARN`}
|
||||
value={configuration.sns_topic_arn}
|
||||
dataCy="nt-detail-sns-topic-arn"
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
{template.notification_type === 'email' && (
|
||||
<>
|
||||
<Detail
|
||||
@@ -455,8 +474,8 @@ function NotificationTemplateDetail({ template, defaultMessages }) {
|
||||
}
|
||||
|
||||
function CustomMessageDetails({ messages, defaults, type }) {
|
||||
const showMessages = type !== 'webhook';
|
||||
const showBodies = ['email', 'pagerduty', 'webhook'].includes(type);
|
||||
const showMessages = !['awssns', 'webhook'].includes(type);
|
||||
const showBodies = ['email', 'pagerduty', 'webhook', 'awssns'].includes(type);
|
||||
|
||||
return (
|
||||
<>
|
||||
|
||||
@@ -120,7 +120,7 @@ function NotificationTemplatesList() {
|
||||
toolbarSearchColumns={[
|
||||
{
|
||||
name: t`Name`,
|
||||
key: 'name',
|
||||
key: 'name__icontains',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
@@ -131,6 +131,7 @@ function NotificationTemplatesList() {
|
||||
name: t`Notification type`,
|
||||
key: 'or__notification_type',
|
||||
options: [
|
||||
['awssns', t`AWS SNS`],
|
||||
['email', t`Email`],
|
||||
['grafana', t`Grafana`],
|
||||
['hipchat', t`Hipchat`],
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
/* eslint-disable-next-line import/prefer-default-export */
|
||||
export const NOTIFICATION_TYPES = {
|
||||
awssns: 'AWS SNS',
|
||||
email: 'Email',
|
||||
grafana: 'Grafana',
|
||||
irc: 'IRC',
|
||||
|
||||
@@ -11,8 +11,8 @@ import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
|
||||
function CustomMessagesSubForm({ defaultMessages, type }) {
|
||||
const [useCustomField, , useCustomHelpers] = useField('useCustomMessages');
|
||||
const showMessages = type !== 'webhook';
|
||||
const showBodies = ['email', 'pagerduty', 'webhook'].includes(type);
|
||||
const showMessages = !['webhook', 'awssns'].includes(type);
|
||||
const showBodies = ['email', 'pagerduty', 'webhook', 'awssns'].includes(type);
|
||||
|
||||
const { setFieldValue } = useFormikContext();
|
||||
const config = useConfig();
|
||||
|
||||
@@ -78,6 +78,7 @@ function NotificationTemplateFormFields({ defaultMessages, template }) {
|
||||
label: t`Choose a Notification Type`,
|
||||
isDisabled: true,
|
||||
},
|
||||
{ value: 'awssns', key: 'awssns', label: t`AWS SNS` },
|
||||
{ value: 'email', key: 'email', label: t`E-mail` },
|
||||
{ value: 'grafana', key: 'grafana', label: 'Grafana' },
|
||||
{ value: 'irc', key: 'irc', label: 'IRC' },
|
||||
|
||||
@@ -29,6 +29,7 @@ import Popover from '../../../components/Popover/Popover';
|
||||
import getHelpText from './Notifications.helptext';
|
||||
|
||||
const TypeFields = {
|
||||
awssns: AWSSNSFields,
|
||||
email: EmailFields,
|
||||
grafana: GrafanaFields,
|
||||
irc: IRCFields,
|
||||
@@ -58,6 +59,44 @@ TypeInputsSubForm.propTypes = {
|
||||
|
||||
export default TypeInputsSubForm;
|
||||
|
||||
function AWSSNSFields() {
|
||||
return (
|
||||
<>
|
||||
<FormField
|
||||
id="awssns-aws-region"
|
||||
label={t`AWS Region`}
|
||||
name="notification_configuration.aws_region"
|
||||
type="text"
|
||||
isRequired
|
||||
/>
|
||||
<FormField
|
||||
id="awssns-aws-access-key-id"
|
||||
label={t`Access Key ID`}
|
||||
name="notification_configuration.aws_access_key_id"
|
||||
type="text"
|
||||
/>
|
||||
<PasswordField
|
||||
id="awssns-aws-secret-access-key"
|
||||
label={t`Secret Access Key`}
|
||||
name="notification_configuration.aws_secret_access_key"
|
||||
/>
|
||||
<PasswordField
|
||||
id="awssns-aws-session-token"
|
||||
label={t`Session Token`}
|
||||
name="notification_configuration.aws_session_token"
|
||||
/>
|
||||
<FormField
|
||||
id="awssns-sns-topic-arn"
|
||||
label={t`SNS Topic ARN`}
|
||||
name="notification_configuration.sns_topic_arn"
|
||||
type="text"
|
||||
validate={required(null)}
|
||||
isRequired
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
function EmailFields() {
|
||||
const helpText = getHelpText();
|
||||
return (
|
||||
|
||||
@@ -203,6 +203,39 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"awssns": {
|
||||
"started": {
|
||||
"body": "{{ job_metadata }}"
|
||||
},
|
||||
"success": {
|
||||
"body": "{{ job_metadata }}"
|
||||
},
|
||||
"error": {
|
||||
"body": "{{ job_metadata }}"
|
||||
},
|
||||
"workflow_approval": {
|
||||
"running": {
|
||||
"body": {
|
||||
"body": "The approval node \"{{ approval_node_name }}\" needs review. This node can be viewed at: {{ workflow_url }}"
|
||||
}
|
||||
},
|
||||
"approved": {
|
||||
"body": {
|
||||
"body": "The approval node \"{{ approval_node_name }}\" was approved. {{ workflow_url }}"
|
||||
}
|
||||
},
|
||||
"timed_out": {
|
||||
"body": {
|
||||
"body": "The approval node \"{{ approval_node_name }}\" has timed out. {{ workflow_url }}"
|
||||
}
|
||||
},
|
||||
"denied": {
|
||||
"body": {
|
||||
"body": "The approval node \"{{ approval_node_name }}\" was denied. {{ workflow_url }}"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"mattermost": {
|
||||
"started": {
|
||||
"message": "{{ job_friendly_name }} #{{ job.id }} '{{ job.name }}' {{ job.status }}: {{ url }}",
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
const typeFieldNames = {
|
||||
awssns: [
|
||||
'aws_region',
|
||||
'aws_access_key_id',
|
||||
'aws_secret_access_key',
|
||||
'aws_session_token',
|
||||
'sns_topic_arn',
|
||||
],
|
||||
email: [
|
||||
'username',
|
||||
'password',
|
||||
|
||||
@@ -78,12 +78,14 @@ function MiscAuthenticationEdit() {
|
||||
default: OAUTH2_PROVIDER_OPTIONS.default.ACCESS_TOKEN_EXPIRE_SECONDS,
|
||||
type: OAUTH2_PROVIDER_OPTIONS.child.type,
|
||||
label: t`Access Token Expiration`,
|
||||
help_text: t`Access Token Expiration in seconds`,
|
||||
},
|
||||
REFRESH_TOKEN_EXPIRE_SECONDS: {
|
||||
...OAUTH2_PROVIDER_OPTIONS,
|
||||
default: OAUTH2_PROVIDER_OPTIONS.default.REFRESH_TOKEN_EXPIRE_SECONDS,
|
||||
type: OAUTH2_PROVIDER_OPTIONS.child.type,
|
||||
label: t`Refresh Token Expiration`,
|
||||
help_text: t`Refresh Token Expiration in seconds`,
|
||||
},
|
||||
AUTHORIZATION_CODE_EXPIRE_SECONDS: {
|
||||
...OAUTH2_PROVIDER_OPTIONS,
|
||||
@@ -91,6 +93,7 @@ function MiscAuthenticationEdit() {
|
||||
OAUTH2_PROVIDER_OPTIONS.default.AUTHORIZATION_CODE_EXPIRE_SECONDS,
|
||||
type: OAUTH2_PROVIDER_OPTIONS.child.type,
|
||||
label: t`Authorization Code Expiration`,
|
||||
help_text: t`Authorization Code Expiration in seconds`,
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -374,6 +374,7 @@ export const CredentialType = shape({
|
||||
});
|
||||
|
||||
export const NotificationType = oneOf([
|
||||
'awssns',
|
||||
'email',
|
||||
'grafana',
|
||||
'irc',
|
||||
|
||||
@@ -17,7 +17,7 @@ import time
|
||||
import re
|
||||
from json import loads, dumps
|
||||
from os.path import isfile, expanduser, split, join, exists, isdir
|
||||
from os import access, R_OK, getcwd, environ
|
||||
from os import access, R_OK, getcwd, environ, getenv
|
||||
|
||||
|
||||
try:
|
||||
@@ -107,7 +107,7 @@ class ControllerModule(AnsibleModule):
|
||||
# Perform magic depending on whether controller_oauthtoken is a string or a dict
|
||||
if self.params.get('controller_oauthtoken'):
|
||||
token_param = self.params.get('controller_oauthtoken')
|
||||
if type(token_param) is dict:
|
||||
if isinstance(token_param, dict):
|
||||
if 'token' in token_param:
|
||||
self.oauth_token = self.params.get('controller_oauthtoken')['token']
|
||||
else:
|
||||
@@ -148,9 +148,10 @@ class ControllerModule(AnsibleModule):
|
||||
# Make sure we start with /api/vX
|
||||
if not endpoint.startswith("/"):
|
||||
endpoint = "/{0}".format(endpoint)
|
||||
prefix = self.url_prefix.rstrip("/")
|
||||
if not endpoint.startswith(prefix + "/api/"):
|
||||
endpoint = prefix + "/api/v2{0}".format(endpoint)
|
||||
hostname_prefix = self.url_prefix.rstrip("/")
|
||||
api_path = self.api_path()
|
||||
if not endpoint.startswith(hostname_prefix + api_path):
|
||||
endpoint = hostname_prefix + f"{api_path}v2{endpoint}"
|
||||
if not endpoint.endswith('/') and '?' not in endpoint:
|
||||
endpoint = "{0}/".format(endpoint)
|
||||
|
||||
@@ -215,7 +216,7 @@ class ControllerModule(AnsibleModule):
|
||||
try:
|
||||
config_data = yaml.load(config_string, Loader=yaml.SafeLoader)
|
||||
# If this is an actual ini file, yaml will return the whole thing as a string instead of a dict
|
||||
if type(config_data) is not dict:
|
||||
if not isinstance(config_data, dict):
|
||||
raise AssertionError("The yaml config file is not properly formatted as a dict.")
|
||||
try_config_parsing = False
|
||||
|
||||
@@ -257,7 +258,7 @@ class ControllerModule(AnsibleModule):
|
||||
if honorred_setting in config_data:
|
||||
# Veriffy SSL must be a boolean
|
||||
if honorred_setting == 'verify_ssl':
|
||||
if type(config_data[honorred_setting]) is str:
|
||||
if isinstance(config_data[honorred_setting], str):
|
||||
setattr(self, honorred_setting, strtobool(config_data[honorred_setting]))
|
||||
else:
|
||||
setattr(self, honorred_setting, bool(config_data[honorred_setting]))
|
||||
@@ -603,6 +604,14 @@ class ControllerAPIModule(ControllerModule):
|
||||
status_code = response.status
|
||||
return {'status_code': status_code, 'json': response_json}
|
||||
|
||||
def api_path(self):
|
||||
|
||||
default_api_path = "/api/"
|
||||
if self._COLLECTION_TYPE != "awx":
|
||||
default_api_path = "/api/controller/"
|
||||
prefix = getenv('CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX', default_api_path)
|
||||
return prefix
|
||||
|
||||
def authenticate(self, **kwargs):
|
||||
if self.username and self.password:
|
||||
# Attempt to get a token from /api/v2/tokens/ by giving it our username/password combo
|
||||
@@ -613,7 +622,7 @@ class ControllerAPIModule(ControllerModule):
|
||||
"scope": "write",
|
||||
}
|
||||
# Preserve URL prefix
|
||||
endpoint = self.url_prefix.rstrip('/') + '/api/v2/tokens/'
|
||||
endpoint = self.url_prefix.rstrip('/') + f'{self.api_path()}v2/tokens/'
|
||||
# Post to the tokens endpoint with baisc auth to try and get a token
|
||||
api_token_url = (self.url._replace(path=endpoint)).geturl()
|
||||
|
||||
@@ -1002,7 +1011,7 @@ class ControllerAPIModule(ControllerModule):
|
||||
if self.authenticated and self.oauth_token_id:
|
||||
# Attempt to delete our current token from /api/v2/tokens/
|
||||
# Post to the tokens endpoint with baisc auth to try and get a token
|
||||
endpoint = self.url_prefix.rstrip('/') + '/api/v2/tokens/{0}/'.format(self.oauth_token_id)
|
||||
endpoint = self.url_prefix.rstrip('/') + f'{self.api_path()}v2/tokens/{self.oauth_token_id}/'
|
||||
api_token_url = (self.url._replace(path=endpoint, query=None)).geturl() # in error cases, fail_json exists before exception handling
|
||||
|
||||
try:
|
||||
@@ -1038,7 +1047,10 @@ class ControllerAPIModule(ControllerModule):
|
||||
# Grab our start time to compare against for the timeout
|
||||
start = time.time()
|
||||
result = self.get_endpoint(url)
|
||||
while not result['json']['finished']:
|
||||
wait_on_field = 'event_processing_finished'
|
||||
if wait_on_field not in result['json']:
|
||||
wait_on_field = 'finished'
|
||||
while not result['json'][wait_on_field]:
|
||||
# If we are past our time out fail with a message
|
||||
if timeout and timeout < time.time() - start:
|
||||
# Account for Legacy messages
|
||||
|
||||
@@ -163,7 +163,7 @@ def main():
|
||||
for arg in ['job_type', 'limit', 'forks', 'verbosity', 'extra_vars', 'become_enabled', 'diff_mode']:
|
||||
if module.params.get(arg):
|
||||
# extra_var can receive a dict or a string, if a dict covert it to a string
|
||||
if arg == 'extra_vars' and type(module.params.get(arg)) is not str:
|
||||
if arg == 'extra_vars' and not isinstance(module.params.get(arg), str):
|
||||
post_data[arg] = json.dumps(module.params.get(arg))
|
||||
else:
|
||||
post_data[arg] = module.params.get(arg)
|
||||
|
||||
@@ -121,6 +121,7 @@ def main():
|
||||
client_type = module.params.get('client_type')
|
||||
organization = module.params.get('organization')
|
||||
redirect_uris = module.params.get('redirect_uris')
|
||||
skip_authorization = module.params.get('skip_authorization')
|
||||
state = module.params.get('state')
|
||||
|
||||
# Attempt to look up the related items the user specified (these will fail the module if not found)
|
||||
@@ -146,6 +147,8 @@ def main():
|
||||
application_fields['description'] = description
|
||||
if redirect_uris is not None:
|
||||
application_fields['redirect_uris'] = ' '.join(redirect_uris)
|
||||
if skip_authorization is not None:
|
||||
application_fields['skip_authorization'] = skip_authorization
|
||||
|
||||
response = module.create_or_update_if_needed(application, application_fields, endpoint='applications', item_type='application', auto_exit=False)
|
||||
if 'client_id' in response:
|
||||
|
||||
@@ -56,7 +56,7 @@ import logging
|
||||
|
||||
# In this module we don't use EXPORTABLE_RESOURCES, we just want to validate that our installed awxkit has import/export
|
||||
try:
|
||||
from awxkit.api.pages.api import EXPORTABLE_RESOURCES # noqa
|
||||
from awxkit.api.pages.api import EXPORTABLE_RESOURCES # noqa: F401; pylint: disable=unused-import
|
||||
|
||||
HAS_EXPORTABLE_RESOURCES = True
|
||||
except ImportError:
|
||||
|
||||
@@ -42,7 +42,8 @@ options:
|
||||
source:
|
||||
description:
|
||||
- The source to use for this group.
|
||||
choices: [ "scm", "ec2", "gce", "azure_rm", "vmware", "satellite6", "openstack", "rhv", "controller", "insights" ]
|
||||
choices: [ "scm", "ec2", "gce", "azure_rm", "vmware", "satellite6", "openstack", "rhv", "controller", "insights", "terraform",
|
||||
"openshift_virtualization" ]
|
||||
type: str
|
||||
source_path:
|
||||
description:
|
||||
@@ -170,7 +171,22 @@ def main():
|
||||
#
|
||||
# How do we handle manual and file? The controller does not seem to be able to activate them
|
||||
#
|
||||
source=dict(choices=["scm", "ec2", "gce", "azure_rm", "vmware", "satellite6", "openstack", "rhv", "controller", "insights"]),
|
||||
source=dict(
|
||||
choices=[
|
||||
"scm",
|
||||
"ec2",
|
||||
"gce",
|
||||
"azure_rm",
|
||||
"vmware",
|
||||
"satellite6",
|
||||
"openstack",
|
||||
"rhv",
|
||||
"controller",
|
||||
"insights",
|
||||
"terraform",
|
||||
"openshift_virtualization",
|
||||
]
|
||||
),
|
||||
source_path=dict(),
|
||||
source_vars=dict(type='dict'),
|
||||
enabled_var=dict(),
|
||||
|
||||
@@ -50,6 +50,7 @@ options:
|
||||
description:
|
||||
- The type of notification to be sent.
|
||||
choices:
|
||||
- 'awssns'
|
||||
- 'email'
|
||||
- 'grafana'
|
||||
- 'irc'
|
||||
@@ -219,7 +220,7 @@ def main():
|
||||
copy_from=dict(),
|
||||
description=dict(),
|
||||
organization=dict(),
|
||||
notification_type=dict(choices=['email', 'grafana', 'irc', 'mattermost', 'pagerduty', 'rocketchat', 'slack', 'twilio', 'webhook']),
|
||||
notification_type=dict(choices=['awssns', 'email', 'grafana', 'irc', 'mattermost', 'pagerduty', 'rocketchat', 'slack', 'twilio', 'webhook']),
|
||||
notification_configuration=dict(type='dict'),
|
||||
messages=dict(type='dict'),
|
||||
state=dict(choices=['present', 'absent', 'exists'], default='present'),
|
||||
|
||||
@@ -19,7 +19,7 @@ from ansible.module_utils.six import raise_from
|
||||
|
||||
from ansible_base.rbac.models import RoleDefinition, DABPermission
|
||||
from awx.main.tests.functional.conftest import _request
|
||||
from awx.main.tests.functional.conftest import credentialtype_scm, credentialtype_ssh # noqa: F401; pylint: disable=unused-variable
|
||||
from awx.main.tests.functional.conftest import credentialtype_scm, credentialtype_ssh # noqa: F401; pylint: disable=unused-import
|
||||
from awx.main.models import (
|
||||
Organization,
|
||||
Project,
|
||||
|
||||
1
awx_collection/tests/sanity/ignore-2.17.txt
Normal file
1
awx_collection/tests/sanity/ignore-2.17.txt
Normal file
@@ -0,0 +1 @@
|
||||
plugins/modules/export.py validate-modules:nonexistent-parameter-documented # needs awxkit to construct argspec
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user