mirror of
https://github.com/ansible/awx.git
synced 2026-02-06 03:54:44 -03:30
Compare commits
100 Commits
12640-Refa
...
21.8.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1bedf32baf | ||
|
|
c5cf39abb7 | ||
|
|
6b315f39de | ||
|
|
529a936d0a | ||
|
|
e40824bded | ||
|
|
ed318ea784 | ||
|
|
d2b69e05f6 | ||
|
|
b57ae592ed | ||
|
|
e22f887765 | ||
|
|
fc838ba44b | ||
|
|
b19aa4a88d | ||
|
|
eba24db74c | ||
|
|
153a197fad | ||
|
|
8f4c329c2a | ||
|
|
368eb46f5b | ||
|
|
d6fea77082 | ||
|
|
aaf6f5f17e | ||
|
|
3303f7bfcf | ||
|
|
95dba81a9d | ||
|
|
4b308d313a | ||
|
|
d80db763bc | ||
|
|
41fd6ea37f | ||
|
|
4808a0053f | ||
|
|
de41601f27 | ||
|
|
ddd09461fb | ||
|
|
6d192927ae | ||
|
|
e655e1dbc2 | ||
|
|
e41f20320a | ||
|
|
192f45bbd0 | ||
|
|
e013d25e2d | ||
|
|
8a6ad47ca5 | ||
|
|
cba780a8f8 | ||
|
|
3fc67dc76c | ||
|
|
6f85aef5fe | ||
|
|
4d9b8400da | ||
|
|
eeb9d61488 | ||
|
|
234ce529fc | ||
|
|
4f36943b47 | ||
|
|
25737ba7c6 | ||
|
|
7127d18072 | ||
|
|
e5c834383c | ||
|
|
b9c9800210 | ||
|
|
c94dc08cf3 | ||
|
|
a0594c8948 | ||
|
|
ab5ea46006 | ||
|
|
6b471e468c | ||
|
|
50614b961e | ||
|
|
a2be320605 | ||
|
|
8a959e9586 | ||
|
|
1db189c7ee | ||
|
|
39c2fcd8c2 | ||
|
|
da857ea334 | ||
|
|
d50c97ae22 | ||
|
|
0f150aa3b3 | ||
|
|
cdb51a75b8 | ||
|
|
22b6ae6903 | ||
|
|
871175f97f | ||
|
|
e6497be200 | ||
|
|
3b9333be9f | ||
|
|
04b814cfd8 | ||
|
|
bb2e5cba0a | ||
|
|
42a4e9f10f | ||
|
|
882d2fdbe8 | ||
|
|
0d69d40859 | ||
|
|
2e38bbcbcd | ||
|
|
6f741b909a | ||
|
|
bbb00e0674 | ||
|
|
560b952dd6 | ||
|
|
62c773e912 | ||
|
|
fd38c926b2 | ||
|
|
7a8874b947 | ||
|
|
150c55c72a | ||
|
|
417ac3b88c | ||
|
|
9e0d1a678c | ||
|
|
1a766c09e7 | ||
|
|
7849c0fb1e | ||
|
|
35a7e43f22 | ||
|
|
47a6a73fc5 | ||
|
|
805091cfc1 | ||
|
|
8d05e339ae | ||
|
|
8472e3a26d | ||
|
|
174121cdbe | ||
|
|
385a2eabce | ||
|
|
a64467c5a6 | ||
|
|
58772d79c7 | ||
|
|
235ed2f0d0 | ||
|
|
03eaeac459 | ||
|
|
a4fba37222 | ||
|
|
3a09522d3e | ||
|
|
b5db710c8b | ||
|
|
b964905c80 | ||
|
|
37717ce3d5 | ||
|
|
e7c75f3510 | ||
|
|
d3eb2c1975 | ||
|
|
5551874352 | ||
|
|
80a0842df1 | ||
|
|
2dd2931ab2 | ||
|
|
e83a4d7234 | ||
|
|
8e2003a36b | ||
|
|
4f52343cd9 |
18
.github/workflows/pr_body_check.yml
vendored
18
.github/workflows/pr_body_check.yml
vendored
@@ -13,21 +13,13 @@ jobs:
|
||||
packages: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Write PR body to a file
|
||||
run: |
|
||||
cat >> pr.body << __SOME_RANDOM_PR_EOF__
|
||||
${{ github.event.pull_request.body }}
|
||||
__SOME_RANDOM_PR_EOF__
|
||||
|
||||
- name: Display the received body for troubleshooting
|
||||
run: cat pr.body
|
||||
|
||||
# We want to write these out individually just incase the options were joined on a single line
|
||||
- name: Check for each of the lines
|
||||
env:
|
||||
PR_BODY: ${{ github.event.pull_request.body }}
|
||||
run: |
|
||||
grep "Bug, Docs Fix or other nominal change" pr.body > Z
|
||||
grep "New or Enhanced Feature" pr.body > Y
|
||||
grep "Breaking Change" pr.body > X
|
||||
echo $PR_BODY | grep "Bug, Docs Fix or other nominal change" > Z
|
||||
echo $PR_BODY | grep "New or Enhanced Feature" > Y
|
||||
echo $PR_BODY | grep "Breaking Change" > X
|
||||
exit 0
|
||||
# We exit 0 and set the shell to prevent the returns from the greps from failing this step
|
||||
# See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#exit-codes-and-error-action-preference
|
||||
|
||||
24
Makefile
24
Makefile
@@ -85,6 +85,7 @@ clean: clean-ui clean-api clean-awxkit clean-dist
|
||||
|
||||
clean-api:
|
||||
rm -rf build $(NAME)-$(VERSION) *.egg-info
|
||||
rm -rf .tox
|
||||
find . -type f -regex ".*\.py[co]$$" -delete
|
||||
find . -type d -name "__pycache__" -delete
|
||||
rm -f awx/awx_test.sqlite3*
|
||||
@@ -181,7 +182,7 @@ collectstatic:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
mkdir -p awx/public/static && $(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||
|
||||
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
||||
|
||||
@@ -377,6 +378,8 @@ clean-ui:
|
||||
rm -rf awx/ui/build
|
||||
rm -rf awx/ui/src/locales/_build
|
||||
rm -rf $(UI_BUILD_FLAG_FILE)
|
||||
# the collectstatic command doesn't like it if this dir doesn't exist.
|
||||
mkdir -p awx/ui/build/static
|
||||
|
||||
awx/ui/node_modules:
|
||||
NODE_OPTIONS=--max-old-space-size=6144 $(NPM_BIN) --prefix awx/ui --loglevel warn --force ci
|
||||
@@ -386,16 +389,14 @@ $(UI_BUILD_FLAG_FILE):
|
||||
$(PYTHON) tools/scripts/compilemessages.py
|
||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
|
||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
|
||||
mkdir -p awx/public/static/css
|
||||
mkdir -p awx/public/static/js
|
||||
mkdir -p awx/public/static/media
|
||||
cp -r awx/ui/build/static/css/* awx/public/static/css
|
||||
cp -r awx/ui/build/static/js/* awx/public/static/js
|
||||
cp -r awx/ui/build/static/media/* awx/public/static/media
|
||||
mkdir -p /var/lib/awx/public/static/css
|
||||
mkdir -p /var/lib/awx/public/static/js
|
||||
mkdir -p /var/lib/awx/public/static/media
|
||||
cp -r awx/ui/build/static/css/* /var/lib/awx/public/static/css
|
||||
cp -r awx/ui/build/static/js/* /var/lib/awx/public/static/js
|
||||
cp -r awx/ui/build/static/media/* /var/lib/awx/public/static/media
|
||||
touch $@
|
||||
|
||||
|
||||
|
||||
ui-release: $(UI_BUILD_FLAG_FILE)
|
||||
|
||||
ui-devel: awx/ui/node_modules
|
||||
@@ -453,6 +454,7 @@ COMPOSE_OPTS ?=
|
||||
CONTROL_PLANE_NODE_COUNT ?= 1
|
||||
EXECUTION_NODE_COUNT ?= 2
|
||||
MINIKUBE_CONTAINER_GROUP ?= false
|
||||
MINIKUBE_SETUP ?= false # if false, run minikube separately
|
||||
EXTRA_SOURCES_ANSIBLE_OPTS ?=
|
||||
|
||||
ifneq ($(ADMIN_PASSWORD),)
|
||||
@@ -461,7 +463,7 @@ endif
|
||||
|
||||
docker-compose-sources: .git/hooks/pre-commit
|
||||
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose-minikube/deploy.yml; \
|
||||
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||
fi;
|
||||
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||
@@ -635,4 +637,4 @@ help/generate:
|
||||
} \
|
||||
} \
|
||||
{ lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u
|
||||
@printf "\n"
|
||||
@printf "\n"
|
||||
|
||||
@@ -6,7 +6,6 @@ import inspect
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
import urllib.parse
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -14,7 +13,7 @@ from django.contrib.auth import views as auth_views
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import FieldDoesNotExist
|
||||
from django.db import connection
|
||||
from django.db import connection, transaction
|
||||
from django.db.models.fields.related import OneToOneRel
|
||||
from django.http import QueryDict
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -30,7 +29,7 @@ from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework import views
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.renderers import StaticHTMLRenderer, JSONRenderer
|
||||
from rest_framework.renderers import StaticHTMLRenderer
|
||||
from rest_framework.negotiation import DefaultContentNegotiation
|
||||
|
||||
# AWX
|
||||
@@ -41,7 +40,7 @@ from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd,
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.utils.licensing import server_product_name
|
||||
from awx.main.views import ApiErrorView
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||
from awx.api.versioning import URLPathVersioning
|
||||
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
|
||||
from awx.conf import settings_registry
|
||||
@@ -65,6 +64,7 @@ __all__ = [
|
||||
'ParentMixin',
|
||||
'SubListAttachDetachAPIView',
|
||||
'CopyAPIView',
|
||||
'GenericCancelView',
|
||||
'BaseUsersList',
|
||||
]
|
||||
|
||||
@@ -90,13 +90,9 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||
current_user = getattr(request, 'user', None)
|
||||
if request.user.is_authenticated:
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
||||
ret.set_cookie('userLoggedIn', 'true')
|
||||
current_user = UserSerializer(self.request.user)
|
||||
current_user = smart_str(JSONRenderer().render(current_user.data))
|
||||
current_user = urllib.parse.quote('%s' % current_user, '')
|
||||
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||
|
||||
return ret
|
||||
@@ -253,7 +249,7 @@ class APIView(views.APIView):
|
||||
response['X-API-Query-Time'] = '%0.3fs' % sum(q_times)
|
||||
|
||||
if getattr(self, 'deprecated', False):
|
||||
response['Warning'] = '299 awx "This resource has been deprecated and will be removed in a future release."' # noqa
|
||||
response['Warning'] = '299 awx "This resource has been deprecated and will be removed in a future release."'
|
||||
|
||||
return response
|
||||
|
||||
@@ -990,6 +986,23 @@ class CopyAPIView(GenericAPIView):
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
|
||||
class GenericCancelView(RetrieveAPIView):
|
||||
# In subclass set model, serializer_class
|
||||
obj_permission_type = 'cancel'
|
||||
|
||||
@transaction.non_atomic_requests
|
||||
def dispatch(self, *args, **kwargs):
|
||||
return super(GenericCancelView, self).dispatch(*args, **kwargs)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
|
||||
|
||||
class BaseUsersList(SubListCreateAttachDetachAPIView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(BaseUsersList, self).post(request, *args, **kwargs)
|
||||
|
||||
@@ -24,7 +24,6 @@ __all__ = [
|
||||
'InventoryInventorySourcesUpdatePermission',
|
||||
'UserPermission',
|
||||
'IsSystemAdminOrAuditor',
|
||||
'InstanceGroupTowerPermission',
|
||||
'WorkflowApprovalPermission',
|
||||
]
|
||||
|
||||
|
||||
@@ -29,6 +29,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.encoding import force_str
|
||||
from django.utils.text import capfirst
|
||||
from django.utils.timezone import now
|
||||
from django.core.validators import RegexValidator, MaxLengthValidator
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import ValidationError, PermissionDenied
|
||||
@@ -120,6 +121,9 @@ from awx.main.validators import vars_validate_or_raise
|
||||
from awx.api.versioning import reverse
|
||||
from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, VerbatimField, DeprecatedCredentialField
|
||||
|
||||
# AWX Utils
|
||||
from awx.api.validators import HostnameRegexValidator
|
||||
|
||||
logger = logging.getLogger('awx.api.serializers')
|
||||
|
||||
# Fields that should be summarized regardless of object type.
|
||||
@@ -3746,7 +3750,11 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
|
||||
# Build unsaved version of this config, use it to detect prompts errors
|
||||
mock_obj = self._build_mock_obj(attrs)
|
||||
accepted, rejected, errors = ujt._accept_or_ignore_job_kwargs(_exclude_errors=self.exclude_errors, **mock_obj.prompts_dict())
|
||||
if set(list(ujt.get_ask_mapping().keys()) + ['extra_data']) & set(attrs.keys()):
|
||||
accepted, rejected, errors = ujt._accept_or_ignore_job_kwargs(_exclude_errors=self.exclude_errors, **mock_obj.prompts_dict())
|
||||
else:
|
||||
# Only perform validation of prompts if prompts fields are provided
|
||||
errors = {}
|
||||
|
||||
# Remove all unprocessed $encrypted$ strings, indicating default usage
|
||||
if 'extra_data' in attrs and password_dict:
|
||||
@@ -4921,6 +4929,19 @@ class InstanceSerializer(BaseSerializer):
|
||||
extra_kwargs = {
|
||||
'node_type': {'initial': Instance.Types.EXECUTION, 'default': Instance.Types.EXECUTION},
|
||||
'node_state': {'initial': Instance.States.INSTALLED, 'default': Instance.States.INSTALLED},
|
||||
'hostname': {
|
||||
'validators': [
|
||||
MaxLengthValidator(limit_value=250),
|
||||
validators.UniqueValidator(queryset=Instance.objects.all()),
|
||||
RegexValidator(
|
||||
regex=r'^localhost$|^127(?:\.[0-9]+){0,2}\.[0-9]+$|^(?:0*\:)*?:?0*1$',
|
||||
flags=re.IGNORECASE,
|
||||
inverse_match=True,
|
||||
message="hostname cannot be localhost or 127.0.0.1",
|
||||
),
|
||||
HostnameRegexValidator(),
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
def get_related(self, obj):
|
||||
@@ -4931,7 +4952,7 @@ class InstanceSerializer(BaseSerializer):
|
||||
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
|
||||
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
|
||||
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
||||
if obj.node_type != 'hop':
|
||||
if obj.node_type == 'execution':
|
||||
res['health_check'] = self.reverse('api:instance_health_check', kwargs={'pk': obj.pk})
|
||||
return res
|
||||
|
||||
@@ -4991,6 +5012,10 @@ class InstanceSerializer(BaseSerializer):
|
||||
return value
|
||||
|
||||
def validate_hostname(self, value):
|
||||
"""
|
||||
- Hostname cannot be "localhost" - but can be something like localhost.domain
|
||||
- Cannot change the hostname of an-already instantiated & initialized Instance object
|
||||
"""
|
||||
if self.instance and self.instance.hostname != value:
|
||||
raise serializers.ValidationError("Cannot change hostname.")
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
receptor_user: awx
|
||||
receptor_group: awx
|
||||
receptor_verify: true
|
||||
receptor_tls: true
|
||||
receptor_work_commands:
|
||||
@@ -10,12 +12,12 @@ custom_worksign_public_keyfile: receptor/work-public-key.pem
|
||||
custom_tls_certfile: receptor/tls/receptor.crt
|
||||
custom_tls_keyfile: receptor/tls/receptor.key
|
||||
custom_ca_certfile: receptor/tls/ca/receptor-ca.crt
|
||||
receptor_user: awx
|
||||
receptor_group: awx
|
||||
receptor_protocol: 'tcp'
|
||||
receptor_listener: true
|
||||
receptor_port: {{ instance.listener_port }}
|
||||
receptor_dependencies:
|
||||
- podman
|
||||
- crun
|
||||
- python39-pip
|
||||
{% verbatim %}
|
||||
podman_user: "{{ receptor_user }}"
|
||||
podman_group: "{{ receptor_group }}"
|
||||
{% endverbatim %}
|
||||
|
||||
@@ -9,10 +9,12 @@
|
||||
shell: /bin/bash
|
||||
- name: Enable Copr repo for Receptor
|
||||
command: dnf copr enable ansible-awx/receptor -y
|
||||
- import_role:
|
||||
name: ansible.receptor.podman
|
||||
- import_role:
|
||||
name: ansible.receptor.setup
|
||||
- name: Install ansible-runner
|
||||
pip:
|
||||
name: ansible-runner
|
||||
executable: pip3.9
|
||||
{% endverbatim %}
|
||||
{% endverbatim %}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
---
|
||||
collections:
|
||||
- name: ansible.receptor
|
||||
source: https://github.com/ansible/receptor-collection/
|
||||
type: git
|
||||
version: 0.1.1
|
||||
version: 1.1.0
|
||||
|
||||
@@ -9,9 +9,9 @@ from awx.api.views import (
|
||||
InstanceUnifiedJobsList,
|
||||
InstanceInstanceGroupsList,
|
||||
InstanceHealthCheck,
|
||||
InstanceInstallBundle,
|
||||
InstancePeersList,
|
||||
)
|
||||
from awx.api.views.instance_install_bundle import InstanceInstallBundle
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -3,26 +3,28 @@
|
||||
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views import (
|
||||
from awx.api.views.inventory import (
|
||||
InventoryList,
|
||||
InventoryDetail,
|
||||
InventoryHostsList,
|
||||
InventoryGroupsList,
|
||||
InventoryRootGroupsList,
|
||||
InventoryVariableData,
|
||||
InventoryScriptView,
|
||||
InventoryTreeView,
|
||||
InventoryInventorySourcesList,
|
||||
InventoryInventorySourcesUpdate,
|
||||
InventoryActivityStreamList,
|
||||
InventoryJobTemplateList,
|
||||
InventoryAdHocCommandsList,
|
||||
InventoryAccessList,
|
||||
InventoryObjectRolesList,
|
||||
InventoryInstanceGroupsList,
|
||||
InventoryLabelList,
|
||||
InventoryCopy,
|
||||
)
|
||||
from awx.api.views import (
|
||||
InventoryHostsList,
|
||||
InventoryGroupsList,
|
||||
InventoryInventorySourcesList,
|
||||
InventoryInventorySourcesUpdate,
|
||||
InventoryAdHocCommandsList,
|
||||
InventoryRootGroupsList,
|
||||
InventoryScriptView,
|
||||
InventoryTreeView,
|
||||
InventoryVariableData,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views.inventory import (
|
||||
InventoryUpdateEventsList,
|
||||
)
|
||||
from awx.api.views import (
|
||||
InventoryUpdateList,
|
||||
InventoryUpdateDetail,
|
||||
@@ -10,7 +13,6 @@ from awx.api.views import (
|
||||
InventoryUpdateStdout,
|
||||
InventoryUpdateNotificationsList,
|
||||
InventoryUpdateCredentialsList,
|
||||
InventoryUpdateEventsList,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ from oauthlib import oauth2
|
||||
from oauth2_provider import views
|
||||
|
||||
from awx.main.models import RefreshToken
|
||||
from awx.api.views import ApiOAuthAuthorizationRootView
|
||||
from awx.api.views.root import ApiOAuthAuthorizationRootView
|
||||
|
||||
|
||||
class TokenView(views.TokenView):
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views import (
|
||||
from awx.api.views.organization import (
|
||||
OrganizationList,
|
||||
OrganizationDetail,
|
||||
OrganizationUsersList,
|
||||
@@ -14,7 +14,6 @@ from awx.api.views import (
|
||||
OrganizationJobTemplatesList,
|
||||
OrganizationWorkflowJobTemplatesList,
|
||||
OrganizationTeamsList,
|
||||
OrganizationCredentialList,
|
||||
OrganizationActivityStreamList,
|
||||
OrganizationNotificationTemplatesList,
|
||||
OrganizationNotificationTemplatesErrorList,
|
||||
@@ -25,8 +24,8 @@ from awx.api.views import (
|
||||
OrganizationGalaxyCredentialsList,
|
||||
OrganizationObjectRolesList,
|
||||
OrganizationAccessList,
|
||||
OrganizationApplicationList,
|
||||
)
|
||||
from awx.api.views import OrganizationCredentialList, OrganizationApplicationList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -6,13 +6,15 @@ from django.urls import include, re_path
|
||||
|
||||
from awx import MODE
|
||||
from awx.api.generics import LoggedLoginView, LoggedLogoutView
|
||||
from awx.api.views import (
|
||||
from awx.api.views.root import (
|
||||
ApiRootView,
|
||||
ApiV2RootView,
|
||||
ApiV2PingView,
|
||||
ApiV2ConfigView,
|
||||
ApiV2SubscriptionView,
|
||||
ApiV2AttachView,
|
||||
)
|
||||
from awx.api.views import (
|
||||
AuthView,
|
||||
UserMeList,
|
||||
DashboardView,
|
||||
@@ -28,8 +30,8 @@ from awx.api.views import (
|
||||
OAuth2TokenList,
|
||||
ApplicationOAuth2TokenList,
|
||||
OAuth2ApplicationDetail,
|
||||
MeshVisualizer,
|
||||
)
|
||||
from awx.api.views.mesh_visualizer import MeshVisualizer
|
||||
|
||||
from awx.api.views.metrics import MetricsView
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
|
||||
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
|
||||
55
awx/api/validators.py
Normal file
55
awx/api/validators.py
Normal file
@@ -0,0 +1,55 @@
|
||||
import re
|
||||
|
||||
from django.core.validators import RegexValidator, validate_ipv46_address
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
|
||||
class HostnameRegexValidator(RegexValidator):
|
||||
"""
|
||||
Fully validates a domain name that is compliant with norms in Linux/RHEL
|
||||
- Cannot start with a hyphen
|
||||
- Cannot begin with, or end with a "."
|
||||
- Cannot contain any whitespaces
|
||||
- Entire hostname is max 255 chars (including dots)
|
||||
- Each domain/label is between 1 and 63 characters, except top level domain, which must be at least 2 characters
|
||||
- Supports ipv4, ipv6, simple hostnames and FQDNs
|
||||
- Follows RFC 9210 (modern RFC 1123, 1178) requirements
|
||||
|
||||
Accepts an IP Address or Hostname as the argument
|
||||
"""
|
||||
|
||||
regex = '^[a-z0-9][-a-z0-9]*$|^([a-z0-9][-a-z0-9]{0,62}[.])*[a-z0-9][-a-z0-9]{1,62}$'
|
||||
flags = re.IGNORECASE
|
||||
|
||||
def __call__(self, value):
|
||||
regex_matches, err = self.__validate(value)
|
||||
invalid_input = regex_matches if self.inverse_match else not regex_matches
|
||||
if invalid_input:
|
||||
if err is None:
|
||||
err = ValidationError(self.message, code=self.code, params={"value": value})
|
||||
raise err
|
||||
|
||||
def __str__(self):
|
||||
return f"regex={self.regex}, message={self.message}, code={self.code}, inverse_match={self.inverse_match}, flags={self.flags}"
|
||||
|
||||
def __validate(self, value):
|
||||
|
||||
if ' ' in value:
|
||||
return False, ValidationError("whitespaces in hostnames are illegal")
|
||||
|
||||
"""
|
||||
If we have an IP address, try and validate it.
|
||||
"""
|
||||
try:
|
||||
validate_ipv46_address(value)
|
||||
return True, None
|
||||
except ValidationError:
|
||||
pass
|
||||
|
||||
"""
|
||||
By this point in the code, we probably have a simple hostname, FQDN or a strange hostname like "192.localhost.domain.101"
|
||||
"""
|
||||
if not self.regex.match(value):
|
||||
return False, ValidationError(f"illegal characters detected in hostname={value}. Please verify.")
|
||||
|
||||
return True, None
|
||||
@@ -69,6 +69,7 @@ from awx.api.generics import (
|
||||
APIView,
|
||||
BaseUsersList,
|
||||
CopyAPIView,
|
||||
GenericCancelView,
|
||||
GenericAPIView,
|
||||
ListAPIView,
|
||||
ListCreateAPIView,
|
||||
@@ -122,56 +123,6 @@ from awx.api.views.mixin import (
|
||||
UnifiedJobDeletionMixin,
|
||||
NoTruncateMixin,
|
||||
)
|
||||
from awx.api.views.instance_install_bundle import InstanceInstallBundle # noqa
|
||||
from awx.api.views.inventory import ( # noqa
|
||||
InventoryList,
|
||||
InventoryDetail,
|
||||
InventoryUpdateEventsList,
|
||||
InventoryList,
|
||||
InventoryDetail,
|
||||
InventoryActivityStreamList,
|
||||
InventoryInstanceGroupsList,
|
||||
InventoryAccessList,
|
||||
InventoryObjectRolesList,
|
||||
InventoryJobTemplateList,
|
||||
InventoryLabelList,
|
||||
InventoryCopy,
|
||||
)
|
||||
from awx.api.views.mesh_visualizer import MeshVisualizer # noqa
|
||||
from awx.api.views.organization import ( # noqa
|
||||
OrganizationList,
|
||||
OrganizationDetail,
|
||||
OrganizationInventoriesList,
|
||||
OrganizationUsersList,
|
||||
OrganizationAdminsList,
|
||||
OrganizationExecutionEnvironmentsList,
|
||||
OrganizationProjectsList,
|
||||
OrganizationJobTemplatesList,
|
||||
OrganizationWorkflowJobTemplatesList,
|
||||
OrganizationTeamsList,
|
||||
OrganizationActivityStreamList,
|
||||
OrganizationNotificationTemplatesList,
|
||||
OrganizationNotificationTemplatesAnyList,
|
||||
OrganizationNotificationTemplatesErrorList,
|
||||
OrganizationNotificationTemplatesStartedList,
|
||||
OrganizationNotificationTemplatesSuccessList,
|
||||
OrganizationNotificationTemplatesApprovalList,
|
||||
OrganizationInstanceGroupsList,
|
||||
OrganizationGalaxyCredentialsList,
|
||||
OrganizationAccessList,
|
||||
OrganizationObjectRolesList,
|
||||
)
|
||||
from awx.api.views.root import ( # noqa
|
||||
ApiRootView,
|
||||
ApiOAuthAuthorizationRootView,
|
||||
ApiVersionRootView,
|
||||
ApiV2RootView,
|
||||
ApiV2PingView,
|
||||
ApiV2ConfigView,
|
||||
ApiV2SubscriptionView,
|
||||
ApiV2AttachView,
|
||||
)
|
||||
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver # noqa
|
||||
from awx.api.pagination import UnifiedJobEventPagination
|
||||
from awx.main.utils import set_environ
|
||||
|
||||
@@ -441,8 +392,8 @@ class InstanceHealthCheck(GenericAPIView):
|
||||
permission_classes = (IsSystemAdminOrAuditor,)
|
||||
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().filter(node_type='execution')
|
||||
# FIXME: For now, we don't have a good way of checking the health of a hop node.
|
||||
return super().get_queryset().exclude(node_type='hop')
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
@@ -462,9 +413,10 @@ class InstanceHealthCheck(GenericAPIView):
|
||||
|
||||
execution_node_health_check.apply_async([obj.hostname])
|
||||
else:
|
||||
from awx.main.tasks.system import cluster_node_health_check
|
||||
|
||||
cluster_node_health_check.apply_async([obj.hostname], queue=obj.hostname)
|
||||
return Response(
|
||||
{"error": f"Cannot run a health check on instances of type {obj.node_type}. Health checks can only be run on execution nodes."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
return Response({'msg': f"Health check is running for {obj.hostname}."}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@@ -1026,20 +978,11 @@ class SystemJobEventsList(SubListAPIView):
|
||||
return job.get_event_queryset()
|
||||
|
||||
|
||||
class ProjectUpdateCancel(RetrieveAPIView):
|
||||
class ProjectUpdateCancel(GenericCancelView):
|
||||
|
||||
model = models.ProjectUpdate
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = serializers.ProjectUpdateCancelSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
|
||||
|
||||
class ProjectUpdateNotificationsList(SubListAPIView):
|
||||
|
||||
@@ -2312,20 +2255,11 @@ class InventoryUpdateCredentialsList(SubListAPIView):
|
||||
relationship = 'credentials'
|
||||
|
||||
|
||||
class InventoryUpdateCancel(RetrieveAPIView):
|
||||
class InventoryUpdateCancel(GenericCancelView):
|
||||
|
||||
model = models.InventoryUpdate
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = serializers.InventoryUpdateCancelSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
|
||||
|
||||
class InventoryUpdateNotificationsList(SubListAPIView):
|
||||
|
||||
@@ -3100,8 +3034,7 @@ class WorkflowJobNodeChildrenBaseList(SubListAPIView):
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
#
|
||||
# Limit the set of WorkflowJobeNodes to the related nodes of specified by
|
||||
#'relationship'
|
||||
# Limit the set of WorkflowJobNodes to the related nodes of specified by self.relationship
|
||||
#
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
@@ -3403,20 +3336,15 @@ class WorkflowJobWorkflowNodesList(SubListAPIView):
|
||||
return super(WorkflowJobWorkflowNodesList, self).get_queryset().order_by('id')
|
||||
|
||||
|
||||
class WorkflowJobCancel(RetrieveAPIView):
|
||||
class WorkflowJobCancel(GenericCancelView):
|
||||
|
||||
model = models.WorkflowJob
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = serializers.WorkflowJobCancelSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
ScheduleWorkflowManager().schedule()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
r = super().post(request, *args, **kwargs)
|
||||
ScheduleWorkflowManager().schedule()
|
||||
return r
|
||||
|
||||
|
||||
class WorkflowJobNotificationsList(SubListAPIView):
|
||||
@@ -3572,20 +3500,11 @@ class JobActivityStreamList(SubListAPIView):
|
||||
search_fields = ('changes',)
|
||||
|
||||
|
||||
class JobCancel(RetrieveAPIView):
|
||||
class JobCancel(GenericCancelView):
|
||||
|
||||
model = models.Job
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = serializers.JobCancelSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
|
||||
|
||||
class JobRelaunch(RetrieveAPIView):
|
||||
|
||||
@@ -4056,20 +3975,11 @@ class AdHocCommandDetail(UnifiedJobDeletionMixin, RetrieveDestroyAPIView):
|
||||
serializer_class = serializers.AdHocCommandDetailSerializer
|
||||
|
||||
|
||||
class AdHocCommandCancel(RetrieveAPIView):
|
||||
class AdHocCommandCancel(GenericCancelView):
|
||||
|
||||
model = models.AdHocCommand
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = serializers.AdHocCommandCancelSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
|
||||
|
||||
class AdHocCommandRelaunch(GenericAPIView):
|
||||
|
||||
@@ -4204,20 +4114,11 @@ class SystemJobDetail(UnifiedJobDeletionMixin, RetrieveDestroyAPIView):
|
||||
serializer_class = serializers.SystemJobSerializer
|
||||
|
||||
|
||||
class SystemJobCancel(RetrieveAPIView):
|
||||
class SystemJobCancel(GenericCancelView):
|
||||
|
||||
model = models.SystemJob
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = serializers.SystemJobCancelSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
|
||||
|
||||
class SystemJobNotificationsList(SubListAPIView):
|
||||
|
||||
|
||||
@@ -178,7 +178,7 @@ def generate_receptor_tls(instance_obj):
|
||||
.public_key(csr.public_key())
|
||||
.serial_number(x509.random_serial_number())
|
||||
.not_valid_before(datetime.datetime.utcnow())
|
||||
.not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=10))
|
||||
.not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=3650))
|
||||
.add_extension(
|
||||
csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).value,
|
||||
critical=csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).critical,
|
||||
|
||||
6241
awx/locale/translations/es/django.po
Normal file
6241
awx/locale/translations/es/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10833
awx/locale/translations/es/messages.po
Normal file
10833
awx/locale/translations/es/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6243
awx/locale/translations/fr/django.po
Normal file
6243
awx/locale/translations/fr/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10713
awx/locale/translations/fr/messages.po
Normal file
10713
awx/locale/translations/fr/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6240
awx/locale/translations/ja/django.po
Normal file
6240
awx/locale/translations/ja/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10739
awx/locale/translations/ja/messages.po
Normal file
10739
awx/locale/translations/ja/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6240
awx/locale/translations/ko/django.po
Normal file
6240
awx/locale/translations/ko/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10700
awx/locale/translations/ko/messages.po
Normal file
10700
awx/locale/translations/ko/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6241
awx/locale/translations/nl/django.po
Normal file
6241
awx/locale/translations/nl/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10725
awx/locale/translations/nl/messages.po
Normal file
10725
awx/locale/translations/nl/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6242
awx/locale/translations/zh/django.po
Normal file
6242
awx/locale/translations/zh/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10698
awx/locale/translations/zh/messages.po
Normal file
10698
awx/locale/translations/zh/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
@@ -993,9 +993,6 @@ class HostAccess(BaseAccess):
|
||||
if data and 'name' in data:
|
||||
self.check_license(add_host_name=data['name'])
|
||||
|
||||
# Check the per-org limit
|
||||
self.check_org_host_limit({'inventory': obj.inventory}, add_host_name=data['name'])
|
||||
|
||||
# Checks for admin or change permission on inventory, controls whether
|
||||
# the user can edit variable data.
|
||||
return obj and self.user in obj.inventory.admin_role
|
||||
|
||||
@@ -166,11 +166,7 @@ class Metrics:
|
||||
elif settings.IS_TESTING():
|
||||
self.instance_name = "awx_testing"
|
||||
else:
|
||||
try:
|
||||
self.instance_name = Instance.objects.me().hostname
|
||||
except Exception as e:
|
||||
self.instance_name = settings.CLUSTER_HOST_ID
|
||||
logger.info(f'Instance {self.instance_name} seems to be unregistered, error: {e}')
|
||||
self.instance_name = Instance.objects.my_hostname()
|
||||
|
||||
# metric name, help_text
|
||||
METRICSLIST = [
|
||||
|
||||
@@ -3,6 +3,7 @@ import uuid
|
||||
import json
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
import redis
|
||||
|
||||
from awx.main.dispatch import get_local_queuename
|
||||
@@ -49,7 +50,10 @@ class Control(object):
|
||||
reply_queue = Control.generate_reply_queue_name()
|
||||
self.result = None
|
||||
|
||||
with pg_bus_conn(new_connection=True) as conn:
|
||||
if not connection.get_autocommit():
|
||||
raise RuntimeError('Control-with-reply messages can only be done in autocommit mode')
|
||||
|
||||
with pg_bus_conn() as conn:
|
||||
conn.listen(reply_queue)
|
||||
send_data = {'control': command, 'reply_to': reply_queue}
|
||||
if extra_data:
|
||||
|
||||
@@ -387,6 +387,8 @@ class AutoscalePool(WorkerPool):
|
||||
reaper.reap_job(j, 'failed')
|
||||
except Exception:
|
||||
logger.exception('failed to reap job UUID {}'.format(w.current_task['uuid']))
|
||||
else:
|
||||
logger.warning(f'Worker was told to quit but has not, pid={w.pid}')
|
||||
orphaned.extend(w.orphaned_tasks)
|
||||
self.workers.remove(w)
|
||||
elif w.idle and len(self.workers) > self.min_workers:
|
||||
@@ -450,9 +452,6 @@ class AutoscalePool(WorkerPool):
|
||||
try:
|
||||
if isinstance(body, dict) and body.get('bind_kwargs'):
|
||||
self.add_bind_kwargs(body)
|
||||
# when the cluster heartbeat occurs, clean up internally
|
||||
if isinstance(body, dict) and 'cluster_node_heartbeat' in body['task']:
|
||||
self.cleanup()
|
||||
if self.should_grow:
|
||||
self.up()
|
||||
# we don't care about "preferred queue" round robin distribution, just
|
||||
|
||||
@@ -16,12 +16,7 @@ def startup_reaping():
|
||||
If this particular instance is starting, then we know that any running jobs are invalid
|
||||
so we will reap those jobs as a special action here
|
||||
"""
|
||||
try:
|
||||
me = Instance.objects.me()
|
||||
except RuntimeError as e:
|
||||
logger.warning(f'Local instance is not registered, not running startup reaper: {e}')
|
||||
return
|
||||
jobs = UnifiedJob.objects.filter(status='running', controller_node=me.hostname)
|
||||
jobs = UnifiedJob.objects.filter(status='running', controller_node=Instance.objects.my_hostname())
|
||||
job_ids = []
|
||||
for j in jobs:
|
||||
job_ids.append(j.id)
|
||||
@@ -62,16 +57,13 @@ def reap_waiting(instance=None, status='failed', job_explanation=None, grace_per
|
||||
if grace_period is None:
|
||||
grace_period = settings.JOB_WAITING_GRACE_PERIOD + settings.TASK_MANAGER_TIMEOUT
|
||||
|
||||
me = instance
|
||||
if me is None:
|
||||
try:
|
||||
me = Instance.objects.me()
|
||||
except RuntimeError as e:
|
||||
logger.warning(f'Local instance is not registered, not running reaper: {e}')
|
||||
return
|
||||
if instance is None:
|
||||
hostname = Instance.objects.my_hostname()
|
||||
else:
|
||||
hostname = instance.hostname
|
||||
if ref_time is None:
|
||||
ref_time = tz_now()
|
||||
jobs = UnifiedJob.objects.filter(status='waiting', modified__lte=ref_time - timedelta(seconds=grace_period), controller_node=me.hostname)
|
||||
jobs = UnifiedJob.objects.filter(status='waiting', modified__lte=ref_time - timedelta(seconds=grace_period), controller_node=hostname)
|
||||
if excluded_uuids:
|
||||
jobs = jobs.exclude(celery_task_id__in=excluded_uuids)
|
||||
for j in jobs:
|
||||
@@ -82,16 +74,13 @@ def reap(instance=None, status='failed', job_explanation=None, excluded_uuids=No
|
||||
"""
|
||||
Reap all jobs in running for this instance.
|
||||
"""
|
||||
me = instance
|
||||
if me is None:
|
||||
try:
|
||||
me = Instance.objects.me()
|
||||
except RuntimeError as e:
|
||||
logger.warning(f'Local instance is not registered, not running reaper: {e}')
|
||||
return
|
||||
if instance is None:
|
||||
hostname = Instance.objects.my_hostname()
|
||||
else:
|
||||
hostname = instance.hostname
|
||||
workflow_ctype_id = ContentType.objects.get_for_model(WorkflowJob).id
|
||||
jobs = UnifiedJob.objects.filter(
|
||||
Q(status='running') & (Q(execution_node=me.hostname) | Q(controller_node=me.hostname)) & ~Q(polymorphic_ctype_id=workflow_ctype_id)
|
||||
Q(status='running') & (Q(execution_node=hostname) | Q(controller_node=hostname)) & ~Q(polymorphic_ctype_id=workflow_ctype_id)
|
||||
)
|
||||
if excluded_uuids:
|
||||
jobs = jobs.exclude(celery_task_id__in=excluded_uuids)
|
||||
|
||||
@@ -114,7 +114,6 @@ class AWXConsumerBase(object):
|
||||
queue = 0
|
||||
self.pool.write(queue, body)
|
||||
self.total_messages += 1
|
||||
self.record_statistics()
|
||||
|
||||
@log_excess_runtime(logger)
|
||||
def record_statistics(self):
|
||||
@@ -156,6 +155,16 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
# if no successful loops have ran since startup, then we should fail right away
|
||||
self.pg_is_down = True # set so that we fail if we get database errors on startup
|
||||
self.pg_down_time = time.time() - self.pg_max_wait # allow no grace period
|
||||
self.last_cleanup = time.time()
|
||||
|
||||
def run_periodic_tasks(self):
|
||||
self.record_statistics() # maintains time buffer in method
|
||||
|
||||
if time.time() - self.last_cleanup > 60: # same as cluster_node_heartbeat
|
||||
# NOTE: if we run out of database connections, it is important to still run cleanup
|
||||
# so that we scale down workers and free up connections
|
||||
self.pool.cleanup()
|
||||
self.last_cleanup = time.time()
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
super(AWXConsumerPG, self).run(*args, **kwargs)
|
||||
@@ -171,8 +180,10 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
if init is False:
|
||||
self.worker.on_start()
|
||||
init = True
|
||||
for e in conn.events():
|
||||
self.process_task(json.loads(e.payload))
|
||||
for e in conn.events(yield_timeouts=True):
|
||||
if e is not None:
|
||||
self.process_task(json.loads(e.payload))
|
||||
self.run_periodic_tasks()
|
||||
self.pg_is_down = False
|
||||
if self.should_stop:
|
||||
return
|
||||
@@ -229,6 +240,8 @@ class BaseWorker(object):
|
||||
# so we can establish a new connection
|
||||
conn.close_if_unusable_or_obsolete()
|
||||
self.perform_work(body, *args)
|
||||
except Exception:
|
||||
logger.exception(f'Unhandled exception in perform_work in worker pid={os.getpid()}')
|
||||
finally:
|
||||
if 'uuid' in body:
|
||||
uuid = body['uuid']
|
||||
|
||||
@@ -25,7 +25,7 @@ class Command(BaseCommand):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
f'''
|
||||
SELECT
|
||||
SELECT
|
||||
b.id, b.job_id, b.host_name, b.created - a.created delta,
|
||||
b.task task,
|
||||
b.event_data::json->'task_action' task_action,
|
||||
|
||||
@@ -53,7 +53,7 @@ class Command(BaseCommand):
|
||||
return lines
|
||||
|
||||
@classmethod
|
||||
def get_connection_status(cls, me, hostnames, data):
|
||||
def get_connection_status(cls, hostnames, data):
|
||||
host_stats = [('hostname', 'state', 'start time', 'duration (sec)')]
|
||||
for h in hostnames:
|
||||
connection_color = '91' # red
|
||||
@@ -78,7 +78,7 @@ class Command(BaseCommand):
|
||||
return host_stats
|
||||
|
||||
@classmethod
|
||||
def get_connection_stats(cls, me, hostnames, data):
|
||||
def get_connection_stats(cls, hostnames, data):
|
||||
host_stats = [('hostname', 'total', 'per minute')]
|
||||
for h in hostnames:
|
||||
h_safe = safe_name(h)
|
||||
@@ -119,8 +119,8 @@ class Command(BaseCommand):
|
||||
return
|
||||
|
||||
try:
|
||||
me = Instance.objects.me()
|
||||
logger.info('Active instance with hostname {} is registered.'.format(me.hostname))
|
||||
my_hostname = Instance.objects.my_hostname()
|
||||
logger.info('Active instance with hostname {} is registered.'.format(my_hostname))
|
||||
except RuntimeError as e:
|
||||
# the CLUSTER_HOST_ID in the task, and web instance must match and
|
||||
# ensure network connectivity between the task and web instance
|
||||
@@ -145,19 +145,19 @@ class Command(BaseCommand):
|
||||
else:
|
||||
data[family.name] = family.samples[0].value
|
||||
|
||||
me = Instance.objects.me()
|
||||
hostnames = [i.hostname for i in Instance.objects.exclude(hostname=me.hostname)]
|
||||
my_hostname = Instance.objects.my_hostname()
|
||||
hostnames = [i.hostname for i in Instance.objects.exclude(hostname=my_hostname)]
|
||||
|
||||
host_stats = Command.get_connection_status(me, hostnames, data)
|
||||
host_stats = Command.get_connection_status(hostnames, data)
|
||||
lines = Command._format_lines(host_stats)
|
||||
|
||||
print(f'Broadcast websocket connection status from "{me.hostname}" to:')
|
||||
print(f'Broadcast websocket connection status from "{my_hostname}" to:')
|
||||
print('\n'.join(lines))
|
||||
|
||||
host_stats = Command.get_connection_stats(me, hostnames, data)
|
||||
host_stats = Command.get_connection_stats(hostnames, data)
|
||||
lines = Command._format_lines(host_stats)
|
||||
|
||||
print(f'\nBroadcast websocket connection stats from "{me.hostname}" to:')
|
||||
print(f'\nBroadcast websocket connection stats from "{my_hostname}" to:')
|
||||
print('\n'.join(lines))
|
||||
|
||||
return
|
||||
|
||||
@@ -99,9 +99,12 @@ class InstanceManager(models.Manager):
|
||||
instance or role.
|
||||
"""
|
||||
|
||||
def my_hostname(self):
|
||||
return settings.CLUSTER_HOST_ID
|
||||
|
||||
def me(self):
|
||||
"""Return the currently active instance."""
|
||||
node = self.filter(hostname=settings.CLUSTER_HOST_ID)
|
||||
node = self.filter(hostname=self.my_hostname())
|
||||
if node.exists():
|
||||
return node[0]
|
||||
raise RuntimeError("No instance found with the current cluster host id")
|
||||
|
||||
@@ -4,7 +4,7 @@ from django.utils.timezone import now
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
__all__ = ['create_collection_jt', 'create_clearsessions_jt', 'create_cleartokens_jt']
|
||||
__all__ = ['create_clearsessions_jt', 'create_cleartokens_jt']
|
||||
|
||||
'''
|
||||
These methods are called by migrations to create various system job templates
|
||||
|
||||
@@ -44,7 +44,7 @@ def migrate_galaxy_settings(apps, schema_editor):
|
||||
credential_type=galaxy_type,
|
||||
inputs={'url': 'https://galaxy.ansible.com/'},
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
# Needed for new migrations, tests
|
||||
public_galaxy_credential = Credential(
|
||||
created=now(), modified=now(), name='Ansible Galaxy', managed=True, credential_type=galaxy_type, inputs={'url': 'https://galaxy.ansible.com/'}
|
||||
|
||||
@@ -282,7 +282,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
return field['default']
|
||||
if 'default' in kwargs:
|
||||
return kwargs['default']
|
||||
raise AttributeError
|
||||
raise AttributeError(field_name)
|
||||
if field_name in self.inputs:
|
||||
return self.inputs[field_name]
|
||||
if 'default' in kwargs:
|
||||
|
||||
@@ -247,6 +247,19 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
return (number, step)
|
||||
|
||||
def get_sliced_hosts(self, host_queryset, slice_number, slice_count):
|
||||
"""
|
||||
Returns a slice of Hosts given a slice number and total slice count, or
|
||||
the original queryset if slicing is not requested.
|
||||
|
||||
NOTE: If slicing is performed, this will return a List[Host] with the
|
||||
resulting slice. If slicing is not performed it will return the
|
||||
original queryset (not evaluating it or forcing it to a list). This
|
||||
puts the burden on the caller to check the resulting type. This is
|
||||
non-ideal because it's easy to get wrong, but I think the only way
|
||||
around it is to force the queryset which has memory implications for
|
||||
large inventories.
|
||||
"""
|
||||
|
||||
if slice_count > 1 and slice_number > 0:
|
||||
offset = slice_number - 1
|
||||
host_queryset = host_queryset[offset::slice_count]
|
||||
|
||||
@@ -15,6 +15,7 @@ from urllib.parse import urljoin
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.db.models.query import QuerySet
|
||||
|
||||
# from django.core.cache import cache
|
||||
from django.utils.encoding import smart_str
|
||||
@@ -844,22 +845,30 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
def get_notification_friendly_name(self):
|
||||
return "Job"
|
||||
|
||||
def _get_inventory_hosts(self, only=['name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id']):
|
||||
def _get_inventory_hosts(self, only=('name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id'), **filters):
|
||||
"""Return value is an iterable for the relevant hosts for this job"""
|
||||
if not self.inventory:
|
||||
return []
|
||||
host_queryset = self.inventory.hosts.only(*only)
|
||||
return self.inventory.get_sliced_hosts(host_queryset, self.job_slice_number, self.job_slice_count)
|
||||
if filters:
|
||||
host_queryset = host_queryset.filter(**filters)
|
||||
host_queryset = self.inventory.get_sliced_hosts(host_queryset, self.job_slice_number, self.job_slice_count)
|
||||
if isinstance(host_queryset, QuerySet):
|
||||
return host_queryset.iterator()
|
||||
return host_queryset
|
||||
|
||||
def start_job_fact_cache(self, destination, modification_times, timeout=None):
|
||||
self.log_lifecycle("start_job_fact_cache")
|
||||
os.makedirs(destination, mode=0o700)
|
||||
hosts = self._get_inventory_hosts()
|
||||
|
||||
if timeout is None:
|
||||
timeout = settings.ANSIBLE_FACT_CACHE_TIMEOUT
|
||||
if timeout > 0:
|
||||
# exclude hosts with fact data older than `settings.ANSIBLE_FACT_CACHE_TIMEOUT seconds`
|
||||
timeout = now() - datetime.timedelta(seconds=timeout)
|
||||
hosts = hosts.filter(ansible_facts_modified__gte=timeout)
|
||||
hosts = self._get_inventory_hosts(ansible_facts_modified__gte=timeout)
|
||||
else:
|
||||
hosts = self._get_inventory_hosts()
|
||||
for host in hosts:
|
||||
filepath = os.sep.join(map(str, [destination, host.name]))
|
||||
if not os.path.realpath(filepath).startswith(destination):
|
||||
|
||||
@@ -153,7 +153,7 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
|
||||
#
|
||||
|
||||
# Find the DTSTART rule or raise an error, its usually the first rule but that is not strictly enforced
|
||||
start_date_rule = re.sub('^.*(DTSTART[^\s]+)\s.*$', r'\1', rrule)
|
||||
start_date_rule = re.sub(r'^.*(DTSTART[^\s]+)\s.*$', r'\1', rrule)
|
||||
if not start_date_rule:
|
||||
raise ValueError('A DTSTART field needs to be in the rrule')
|
||||
|
||||
|
||||
@@ -1305,6 +1305,8 @@ class UnifiedJob(
|
||||
status_data['instance_group_name'] = None
|
||||
elif status in ['successful', 'failed', 'canceled'] and self.finished:
|
||||
status_data['finished'] = datetime.datetime.strftime(self.finished, "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
elif status == 'running':
|
||||
status_data['started'] = datetime.datetime.strftime(self.finished, "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
status_data.update(self.websocket_emit_data())
|
||||
status_data['group_name'] = 'jobs'
|
||||
if getattr(self, 'unified_job_template_id', None):
|
||||
@@ -1465,23 +1467,23 @@ class UnifiedJob(
|
||||
self.job_explanation = job_explanation
|
||||
cancel_fields.append('job_explanation')
|
||||
|
||||
# Important to save here before sending cancel signal to dispatcher to cancel because
|
||||
# the job control process will use the cancel_flag to distinguish a shutdown from a cancel
|
||||
self.save(update_fields=cancel_fields)
|
||||
|
||||
controller_notified = False
|
||||
if self.celery_task_id:
|
||||
controller_notified = self.cancel_dispatcher_process()
|
||||
|
||||
else:
|
||||
# Avoid race condition where we have stale model from pending state but job has already started,
|
||||
# its checking signal but not cancel_flag, so re-send signal after this database commit
|
||||
connection.on_commit(self.fallback_cancel)
|
||||
|
||||
# If a SIGTERM signal was sent to the control process, and acked by the dispatcher
|
||||
# then we want to let its own cleanup change status, otherwise change status now
|
||||
if not controller_notified:
|
||||
if self.status != 'canceled':
|
||||
self.status = 'canceled'
|
||||
cancel_fields.append('status')
|
||||
|
||||
self.save(update_fields=cancel_fields)
|
||||
self.save(update_fields=['status'])
|
||||
# Avoid race condition where we have stale model from pending state but job has already started,
|
||||
# its checking signal but not cancel_flag, so re-send signal after updating cancel fields
|
||||
self.fallback_cancel()
|
||||
|
||||
return self.cancel_flag
|
||||
|
||||
|
||||
@@ -700,7 +700,7 @@ class SourceControlMixin(BaseTask):
|
||||
|
||||
def spawn_project_sync(self, project, sync_needs, scm_branch=None):
|
||||
pu_ig = self.instance.instance_group
|
||||
pu_en = Instance.objects.me().hostname
|
||||
pu_en = Instance.objects.my_hostname()
|
||||
|
||||
sync_metafields = dict(
|
||||
launch_type="sync",
|
||||
|
||||
@@ -208,7 +208,10 @@ def run_until_complete(node, timing_data=None, **kwargs):
|
||||
if state_name.lower() == 'failed':
|
||||
work_detail = status.get('Detail', '')
|
||||
if work_detail:
|
||||
raise RemoteJobError(f'Receptor error from {node}, detail:\n{work_detail}')
|
||||
if stdout:
|
||||
raise RemoteJobError(f'Receptor error from {node}, detail:\n{work_detail}\nstdout:\n{stdout}')
|
||||
else:
|
||||
raise RemoteJobError(f'Receptor error from {node}, detail:\n{work_detail}')
|
||||
else:
|
||||
raise RemoteJobError(f'Unknown ansible-runner error on node {node}, stdout:\n{stdout}')
|
||||
|
||||
|
||||
@@ -4,8 +4,10 @@ from awx.api.versioning import reverse
|
||||
from awx.main.models.activity_stream import ActivityStream
|
||||
from awx.main.models.ha import Instance
|
||||
|
||||
from django.test.utils import override_settings
|
||||
|
||||
INSTANCE_KWARGS = dict(hostname='example-host', cpu=6, memory=36000000000, cpu_capacity=6, mem_capacity=42)
|
||||
|
||||
INSTANCE_KWARGS = dict(hostname='example-host', cpu=6, node_type='execution', memory=36000000000, cpu_capacity=6, mem_capacity=42)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -54,3 +56,33 @@ def test_health_check_usage(get, post, admin_user):
|
||||
get(url=url, user=admin_user, expect=200)
|
||||
r = post(url=url, user=admin_user, expect=200)
|
||||
assert r.data['msg'] == f"Health check is running for {instance.hostname}."
|
||||
|
||||
|
||||
def test_custom_hostname_regex(post, admin_user):
|
||||
url = reverse('api:instance_list')
|
||||
with override_settings(IS_K8S=True):
|
||||
for value in [
|
||||
("foo.bar.baz", 201),
|
||||
("f.bar.bz", 201),
|
||||
("foo.bar.b", 400),
|
||||
("a.b.c", 400),
|
||||
("localhost", 400),
|
||||
("127.0.0.1", 400),
|
||||
("192.168.56.101", 201),
|
||||
("2001:0db8:85a3:0000:0000:8a2e:0370:7334", 201),
|
||||
("foobar", 201),
|
||||
("--yoooo", 400),
|
||||
("$3$@foobar@#($!@#*$", 400),
|
||||
("999.999.999.999", 201),
|
||||
("0000:0000:0000:0000:0000:0000:0000:0001", 400),
|
||||
("whitespaces are bad for hostnames", 400),
|
||||
("0:0:0:0:0:0:0:1", 400),
|
||||
("192.localhost.domain.101", 201),
|
||||
("F@$%(@#$H%^(I@#^HCTQEWRFG", 400),
|
||||
]:
|
||||
data = {
|
||||
"hostname": value[0],
|
||||
"node_type": "execution",
|
||||
"node_state": "installed",
|
||||
}
|
||||
post(url=url, user=admin_user, data=data, expect=value[1])
|
||||
|
||||
@@ -216,7 +216,7 @@ def test_instance_attach_to_instance_group(post, instance_group, node_type_insta
|
||||
|
||||
count = ActivityStream.objects.count()
|
||||
|
||||
url = reverse(f'api:instance_group_instance_list', kwargs={'pk': instance_group.pk})
|
||||
url = reverse('api:instance_group_instance_list', kwargs={'pk': instance_group.pk})
|
||||
post(url, {'associate': True, 'id': instance.id}, admin, expect=204 if node_type != 'control' else 400)
|
||||
|
||||
new_activity = ActivityStream.objects.all()[count:]
|
||||
@@ -240,7 +240,7 @@ def test_instance_unattach_from_instance_group(post, instance_group, node_type_i
|
||||
|
||||
count = ActivityStream.objects.count()
|
||||
|
||||
url = reverse(f'api:instance_group_instance_list', kwargs={'pk': instance_group.pk})
|
||||
url = reverse('api:instance_group_instance_list', kwargs={'pk': instance_group.pk})
|
||||
post(url, {'disassociate': True, 'id': instance.id}, admin, expect=204 if node_type != 'control' else 400)
|
||||
|
||||
new_activity = ActivityStream.objects.all()[count:]
|
||||
@@ -263,7 +263,7 @@ def test_instance_group_attach_to_instance(post, instance_group, node_type_insta
|
||||
|
||||
count = ActivityStream.objects.count()
|
||||
|
||||
url = reverse(f'api:instance_instance_groups_list', kwargs={'pk': instance.pk})
|
||||
url = reverse('api:instance_instance_groups_list', kwargs={'pk': instance.pk})
|
||||
post(url, {'associate': True, 'id': instance_group.id}, admin, expect=204 if node_type != 'control' else 400)
|
||||
|
||||
new_activity = ActivityStream.objects.all()[count:]
|
||||
@@ -287,7 +287,7 @@ def test_instance_group_unattach_from_instance(post, instance_group, node_type_i
|
||||
|
||||
count = ActivityStream.objects.count()
|
||||
|
||||
url = reverse(f'api:instance_instance_groups_list', kwargs={'pk': instance.pk})
|
||||
url = reverse('api:instance_instance_groups_list', kwargs={'pk': instance.pk})
|
||||
post(url, {'disassociate': True, 'id': instance_group.id}, admin, expect=204 if node_type != 'control' else 400)
|
||||
|
||||
new_activity = ActivityStream.objects.all()[count:]
|
||||
@@ -314,4 +314,4 @@ def test_cannot_remove_controlplane_hybrid_instances(post, controlplane_instance
|
||||
|
||||
url = reverse('api:instance_instance_groups_list', kwargs={'pk': instance.pk})
|
||||
r = post(url, {'disassociate': True, 'id': controlplane_instance_group.id}, admin_user, expect=400)
|
||||
assert f'Cannot disassociate hybrid instance' in str(r.data)
|
||||
assert 'Cannot disassociate hybrid instance' in str(r.data)
|
||||
|
||||
@@ -105,6 +105,30 @@ def test_encrypted_survey_answer(post, patch, admin_user, project, inventory, su
|
||||
assert decrypt_value(get_encryption_key('value', pk=None), schedule.extra_data['var1']) == 'bar'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_survey_password_default(post, patch, admin_user, project, inventory, survey_spec_factory):
|
||||
job_template = JobTemplate.objects.create(
|
||||
name='test-jt',
|
||||
project=project,
|
||||
playbook='helloworld.yml',
|
||||
inventory=inventory,
|
||||
ask_variables_on_launch=False,
|
||||
survey_enabled=True,
|
||||
survey_spec=survey_spec_factory([{'variable': 'var1', 'question_name': 'Q1', 'type': 'password', 'required': True, 'default': 'foobar'}]),
|
||||
)
|
||||
|
||||
# test removal of $encrypted$
|
||||
url = reverse('api:job_template_schedules_list', kwargs={'pk': job_template.id})
|
||||
r = post(url, {'name': 'test sch', 'rrule': RRULE_EXAMPLE, 'extra_data': '{"var1": "$encrypted$"}'}, admin_user, expect=201)
|
||||
schedule = Schedule.objects.get(pk=r.data['id'])
|
||||
assert schedule.extra_data == {}
|
||||
assert schedule.enabled is True
|
||||
|
||||
# test an unrelated change
|
||||
patch(schedule.get_absolute_url(), data={'enabled': False}, user=admin_user, expect=200)
|
||||
patch(schedule.get_absolute_url(), data={'enabled': True}, user=admin_user, expect=200)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
'rrule, error',
|
||||
@@ -123,19 +147,19 @@ def test_encrypted_survey_answer(post, patch, admin_user, project, inventory, su
|
||||
("DTSTART:20030925T104941Z RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z", "RRULE may not contain both COUNT and UNTIL"), # noqa
|
||||
("DTSTART:20300308T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2000", "COUNT > 999 is unsupported"), # noqa
|
||||
# Individual rule test with multiple rules
|
||||
## Bad Rule: RRULE:NONSENSE
|
||||
# Bad Rule: RRULE:NONSENSE
|
||||
("DTSTART:20300308T050000Z RRULE:NONSENSE RRULE:INTERVAL=1;FREQ=DAILY EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU", "INTERVAL required in rrule"),
|
||||
## Bad Rule: RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=5MO
|
||||
# Bad Rule: RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=5MO
|
||||
(
|
||||
"DTSTART:20300308T050000Z RRULE:INTERVAL=1;FREQ=DAILY EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=5MO",
|
||||
"BYDAY with numeric prefix not supported",
|
||||
), # noqa
|
||||
## Bad Rule: RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z
|
||||
# Bad Rule: RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z
|
||||
(
|
||||
"DTSTART:20030925T104941Z RRULE:INTERVAL=1;FREQ=DAILY EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z",
|
||||
"RRULE may not contain both COUNT and UNTIL",
|
||||
), # noqa
|
||||
## Bad Rule: RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2000
|
||||
# Bad Rule: RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2000
|
||||
(
|
||||
"DTSTART:20300308T050000Z RRULE:INTERVAL=1;FREQ=DAILY EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2000",
|
||||
"COUNT > 999 is unsupported",
|
||||
|
||||
@@ -5,7 +5,8 @@ from unittest import mock
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
from awx.api.views import ApiVersionRootView, JobTemplateLabelList, InventoryInventorySourcesUpdate, JobTemplateSurveySpec
|
||||
from awx.api.views.root import ApiVersionRootView
|
||||
from awx.api.views import JobTemplateLabelList, InventoryInventorySourcesUpdate, JobTemplateSurveySpec
|
||||
|
||||
from awx.main.views import handle_error
|
||||
|
||||
@@ -23,7 +24,7 @@ class TestApiRootView:
|
||||
endpoints = [
|
||||
'ping',
|
||||
'config',
|
||||
#'settings',
|
||||
# 'settings',
|
||||
'me',
|
||||
'dashboard',
|
||||
'organizations',
|
||||
|
||||
@@ -50,7 +50,10 @@ def test_cancel(unified_job):
|
||||
# Some more thought may want to go into only emitting canceled if/when the job record
|
||||
# status is changed to canceled. Unlike, currently, where it's emitted unconditionally.
|
||||
unified_job.websocket_emit_status.assert_called_with("canceled")
|
||||
unified_job.save.assert_called_with(update_fields=['cancel_flag', 'start_args', 'status'])
|
||||
assert [(args, kwargs) for args, kwargs in unified_job.save.call_args_list] == [
|
||||
((), {'update_fields': ['cancel_flag', 'start_args']}),
|
||||
((), {'update_fields': ['status']}),
|
||||
]
|
||||
|
||||
|
||||
def test_cancel_job_explanation(unified_job):
|
||||
@@ -60,7 +63,10 @@ def test_cancel_job_explanation(unified_job):
|
||||
unified_job.cancel(job_explanation=job_explanation)
|
||||
|
||||
assert unified_job.job_explanation == job_explanation
|
||||
unified_job.save.assert_called_with(update_fields=['cancel_flag', 'start_args', 'job_explanation', 'status'])
|
||||
assert [(args, kwargs) for args, kwargs in unified_job.save.call_args_list] == [
|
||||
((), {'update_fields': ['cancel_flag', 'start_args', 'job_explanation']}),
|
||||
((), {'update_fields': ['status']}),
|
||||
]
|
||||
|
||||
|
||||
def test_organization_copy_to_jobs():
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
import os
|
||||
import re
|
||||
import pytest
|
||||
from uuid import uuid4
|
||||
import json
|
||||
@@ -12,9 +13,13 @@ from unittest import mock
|
||||
from rest_framework.exceptions import ParseError
|
||||
|
||||
from awx.main.utils import common
|
||||
from awx.api.validators import HostnameRegexValidator
|
||||
|
||||
from awx.main.models import Job, AdHocCommand, InventoryUpdate, ProjectUpdate, SystemJob, WorkflowJob, Inventory, JobTemplate, UnifiedJobTemplate, UnifiedJob
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.regex_helper import _lazy_re_compile
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'input_, output',
|
||||
@@ -194,3 +199,136 @@ def test_extract_ansible_vars():
|
||||
redacted, var_list = common.extract_ansible_vars(json.dumps(my_dict))
|
||||
assert var_list == set(['ansible_connetion_setting'])
|
||||
assert redacted == {"foobar": "baz"}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'scm_type, url, username, password, check_special_cases, scp_format, expected',
|
||||
[
|
||||
# General/random cases
|
||||
('git', '', True, True, True, False, ''),
|
||||
('git', 'git://example.com/foo.git', True, True, True, False, 'git://example.com/foo.git'),
|
||||
('git', 'http://example.com/foo.git', True, True, True, False, 'http://example.com/foo.git'),
|
||||
('git', 'example.com:bar.git', True, True, True, False, 'git+ssh://example.com/bar.git'),
|
||||
('git', 'user@example.com:bar.git', True, True, True, False, 'git+ssh://user@example.com/bar.git'),
|
||||
('git', '127.0.0.1:bar.git', True, True, True, False, 'git+ssh://127.0.0.1/bar.git'),
|
||||
('git', 'git+ssh://127.0.0.1/bar.git', True, True, True, True, '127.0.0.1:bar.git'),
|
||||
('git', 'ssh://127.0.0.1:22/bar.git', True, True, True, False, 'ssh://127.0.0.1:22/bar.git'),
|
||||
('git', 'ssh://root@127.0.0.1:22/bar.git', True, True, True, False, 'ssh://root@127.0.0.1:22/bar.git'),
|
||||
('git', 'some/path', True, True, True, False, 'file:///some/path'),
|
||||
('git', '/some/path', True, True, True, False, 'file:///some/path'),
|
||||
# Invalid URLs - ensure we error properly
|
||||
('cvs', 'anything', True, True, True, False, ValueError('Unsupported SCM type "cvs"')),
|
||||
('svn', 'anything-without-colon-slash-slash', True, True, True, False, ValueError('Invalid svn URL')),
|
||||
('git', 'http://example.com:123invalidport/foo.git', True, True, True, False, ValueError('Invalid git URL')),
|
||||
('git', 'git+ssh://127.0.0.1/bar.git', True, True, True, False, ValueError('Unsupported git URL')),
|
||||
('git', 'git@example.com:3000:/git/repo.git', True, True, True, False, ValueError('Invalid git URL')),
|
||||
('insights', 'git://example.com/foo.git', True, True, True, False, ValueError('Unsupported insights URL')),
|
||||
('svn', 'file://example/path', True, True, True, False, ValueError('Unsupported host "example" for file:// URL')),
|
||||
('svn', 'svn:///example', True, True, True, False, ValueError('Host is required for svn URL')),
|
||||
# Username/password cases
|
||||
('git', 'https://example@example.com/bar.git', False, True, True, False, 'https://example.com/bar.git'),
|
||||
('git', 'https://example@example.com/bar.git', 'user', True, True, False, 'https://user@example.com/bar.git'),
|
||||
('git', 'https://example@example.com/bar.git', 'user:pw', True, True, False, 'https://user%3Apw@example.com/bar.git'),
|
||||
('git', 'https://example@example.com/bar.git', False, 'pw', True, False, 'https://example.com/bar.git'),
|
||||
('git', 'https://some:example@example.com/bar.git', True, False, True, False, 'https://some@example.com/bar.git'),
|
||||
('git', 'https://some:example@example.com/bar.git', False, False, True, False, 'https://example.com/bar.git'),
|
||||
('git', 'https://example.com/bar.git', 'user', 'pw', True, False, 'https://user:pw@example.com/bar.git'),
|
||||
('git', 'https://example@example.com/bar.git', False, 'something', True, False, 'https://example.com/bar.git'),
|
||||
# Special github/bitbucket cases
|
||||
('git', 'notgit@github.com:ansible/awx.git', True, True, True, False, ValueError('Username must be "git" for SSH access to github.com.')),
|
||||
(
|
||||
'git',
|
||||
'notgit@bitbucket.org:does-not-exist/example.git',
|
||||
True,
|
||||
True,
|
||||
True,
|
||||
False,
|
||||
ValueError('Username must be "git" for SSH access to bitbucket.org.'),
|
||||
),
|
||||
(
|
||||
'git',
|
||||
'notgit@altssh.bitbucket.org:does-not-exist/example.git',
|
||||
True,
|
||||
True,
|
||||
True,
|
||||
False,
|
||||
ValueError('Username must be "git" for SSH access to altssh.bitbucket.org.'),
|
||||
),
|
||||
('git', 'git:password@github.com:ansible/awx.git', True, True, True, False, 'git+ssh://git@github.com/ansible/awx.git'),
|
||||
# Disabling the special handling should not raise an error
|
||||
('git', 'notgit@github.com:ansible/awx.git', True, True, False, False, 'git+ssh://notgit@github.com/ansible/awx.git'),
|
||||
('git', 'notgit@bitbucket.org:does-not-exist/example.git', True, True, False, False, 'git+ssh://notgit@bitbucket.org/does-not-exist/example.git'),
|
||||
(
|
||||
'git',
|
||||
'notgit@altssh.bitbucket.org:does-not-exist/example.git',
|
||||
True,
|
||||
True,
|
||||
False,
|
||||
False,
|
||||
'git+ssh://notgit@altssh.bitbucket.org/does-not-exist/example.git',
|
||||
),
|
||||
# awx#12992 - IPv6
|
||||
('git', 'http://[fd00:1234:2345:6789::11]:3000/foo.git', True, True, True, False, 'http://[fd00:1234:2345:6789::11]:3000/foo.git'),
|
||||
('git', 'http://foo:bar@[fd00:1234:2345:6789::11]:3000/foo.git', True, True, True, False, 'http://foo:bar@[fd00:1234:2345:6789::11]:3000/foo.git'),
|
||||
('git', 'example@[fd00:1234:2345:6789::11]:example/foo.git', True, True, True, False, 'git+ssh://example@[fd00:1234:2345:6789::11]/example/foo.git'),
|
||||
],
|
||||
)
|
||||
def test_update_scm_url(scm_type, url, username, password, check_special_cases, scp_format, expected):
|
||||
if isinstance(expected, Exception):
|
||||
with pytest.raises(type(expected)) as excinfo:
|
||||
common.update_scm_url(scm_type, url, username, password, check_special_cases, scp_format)
|
||||
assert str(excinfo.value) == str(expected)
|
||||
else:
|
||||
assert common.update_scm_url(scm_type, url, username, password, check_special_cases, scp_format) == expected
|
||||
|
||||
|
||||
class TestHostnameRegexValidator:
|
||||
@pytest.fixture
|
||||
def regex_expr(self):
|
||||
return '^[a-z0-9][-a-z0-9]*$|^([a-z0-9][-a-z0-9]{0,62}[.])*[a-z0-9][-a-z0-9]{1,62}$'
|
||||
|
||||
@pytest.fixture
|
||||
def re_flags(self):
|
||||
return re.IGNORECASE
|
||||
|
||||
@pytest.fixture
|
||||
def custom_err_message(self):
|
||||
return "foobar"
|
||||
|
||||
def test_hostame_regex_validator_constructor_with_args(self, regex_expr, re_flags, custom_err_message):
|
||||
h = HostnameRegexValidator(regex=regex_expr, flags=re_flags, message=custom_err_message)
|
||||
assert h.regex == _lazy_re_compile(regex_expr, re_flags)
|
||||
assert h.message == 'foobar'
|
||||
assert h.code == 'invalid'
|
||||
assert h.inverse_match == False
|
||||
assert h.flags == re_flags
|
||||
|
||||
def test_hostame_regex_validator_default_constructor(self, regex_expr, re_flags):
|
||||
h = HostnameRegexValidator()
|
||||
assert h.regex == _lazy_re_compile(regex_expr, re_flags)
|
||||
assert h.message == 'Enter a valid value.'
|
||||
assert h.code == 'invalid'
|
||||
assert h.inverse_match == False
|
||||
assert h.flags == re_flags
|
||||
|
||||
def test_good_call(self, regex_expr, re_flags):
|
||||
h = HostnameRegexValidator(regex=regex_expr, flags=re_flags)
|
||||
assert (h("192.168.56.101"), None)
|
||||
|
||||
def test_bad_call(self, regex_expr, re_flags):
|
||||
h = HostnameRegexValidator(regex=regex_expr, flags=re_flags)
|
||||
try:
|
||||
h("@#$%)$#(TUFAS_DG")
|
||||
except ValidationError as e:
|
||||
assert e.message is not None
|
||||
|
||||
def test_good_call_with_inverse(self, regex_expr, re_flags, inverse_match=True):
|
||||
h = HostnameRegexValidator(regex=regex_expr, flags=re_flags, inverse_match=inverse_match)
|
||||
try:
|
||||
h("1.2.3.4")
|
||||
except ValidationError as e:
|
||||
assert e.message is not None
|
||||
|
||||
def test_bad_call_with_inverse(self, regex_expr, re_flags, inverse_match=True):
|
||||
h = HostnameRegexValidator(regex=regex_expr, flags=re_flags, inverse_match=inverse_match)
|
||||
assert (h("@#$%)$#(TUFAS_DG"), None)
|
||||
|
||||
@@ -264,9 +264,15 @@ def update_scm_url(scm_type, url, username=True, password=True, check_special_ca
|
||||
userpass, hostpath = url.split('@', 1)
|
||||
else:
|
||||
userpass, hostpath = '', url
|
||||
if hostpath.count(':') > 1:
|
||||
# Handle IPv6 here. In this case, we might have hostpath of:
|
||||
# [fd00:1234:2345:6789::11]:example/foo.git
|
||||
if hostpath.startswith('[') and ']:' in hostpath:
|
||||
host, path = hostpath.split(']:', 1)
|
||||
host = host + ']'
|
||||
elif hostpath.count(':') > 1:
|
||||
raise ValueError(_('Invalid %s URL') % scm_type)
|
||||
host, path = hostpath.split(':', 1)
|
||||
else:
|
||||
host, path = hostpath.split(':', 1)
|
||||
# if not path.startswith('/') and not path.startswith('~/'):
|
||||
# path = '~/%s' % path
|
||||
# if path.startswith('/'):
|
||||
@@ -325,7 +331,11 @@ def update_scm_url(scm_type, url, username=True, password=True, check_special_ca
|
||||
netloc = u':'.join([urllib.parse.quote(x, safe='') for x in (netloc_username, netloc_password) if x])
|
||||
else:
|
||||
netloc = u''
|
||||
netloc = u'@'.join(filter(None, [netloc, parts.hostname]))
|
||||
# urllib.parse strips brackets from IPv6 addresses, so we need to add them back in
|
||||
hostname = parts.hostname
|
||||
if hostname and ':' in hostname and '[' in url and ']' in url:
|
||||
hostname = f'[{hostname}]'
|
||||
netloc = u'@'.join(filter(None, [netloc, hostname]))
|
||||
if parts.port:
|
||||
netloc = u':'.join([netloc, str(parts.port)])
|
||||
new_url = urllib.parse.urlunsplit([parts.scheme, netloc, parts.path, parts.query, parts.fragment])
|
||||
|
||||
@@ -110,7 +110,7 @@ if settings.COLOR_LOGS is True:
|
||||
# logs rendered with cyan text
|
||||
previous_level_map = self.level_map.copy()
|
||||
if record.name == "awx.analytics.job_lifecycle":
|
||||
self.level_map[logging.DEBUG] = (None, 'cyan', True)
|
||||
self.level_map[logging.INFO] = (None, 'cyan', True)
|
||||
msg = super(ColorHandler, self).colorize(line, record)
|
||||
self.level_map = previous_level_map
|
||||
return msg
|
||||
|
||||
@@ -35,7 +35,7 @@ def unwrap_broadcast_msg(payload: dict):
|
||||
def get_broadcast_hosts():
|
||||
Instance = apps.get_model('main', 'Instance')
|
||||
instances = (
|
||||
Instance.objects.exclude(hostname=Instance.objects.me().hostname)
|
||||
Instance.objects.exclude(hostname=Instance.objects.my_hostname())
|
||||
.exclude(node_type='execution')
|
||||
.exclude(node_type='hop')
|
||||
.order_by('hostname')
|
||||
@@ -47,7 +47,7 @@ def get_broadcast_hosts():
|
||||
|
||||
def get_local_host():
|
||||
Instance = apps.get_model('main', 'Instance')
|
||||
return Instance.objects.me().hostname
|
||||
return Instance.objects.my_hostname()
|
||||
|
||||
|
||||
class WebsocketTask:
|
||||
|
||||
@@ -5,7 +5,6 @@ __metaclass__ = type
|
||||
import gnupg
|
||||
import os
|
||||
import tempfile
|
||||
from ansible.module_utils.basic import *
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.utils.display import Display
|
||||
|
||||
@@ -15,7 +14,7 @@ from ansible_sign.checksum import (
|
||||
InvalidChecksumLine,
|
||||
)
|
||||
from ansible_sign.checksum.differ import DistlibManifestChecksumFileExistenceDiffer
|
||||
from ansible_sign.signing import *
|
||||
from ansible_sign.signing import GPGVerifier
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
@@ -101,7 +101,7 @@ USE_L10N = True
|
||||
|
||||
USE_TZ = True
|
||||
|
||||
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'ui', 'build', 'static'), os.path.join(BASE_DIR, 'static'))
|
||||
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'ui', 'build', 'static'), os.path.join(BASE_DIR, 'static')]
|
||||
|
||||
# Absolute filesystem path to the directory where static file are collected via
|
||||
# the collectstatic command.
|
||||
@@ -360,7 +360,7 @@ REST_FRAMEWORK = {
|
||||
# For swagger schema generation
|
||||
# see https://github.com/encode/django-rest-framework/pull/6532
|
||||
'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.AutoSchema',
|
||||
#'URL_FORMAT_OVERRIDE': None,
|
||||
# 'URL_FORMAT_OVERRIDE': None,
|
||||
}
|
||||
|
||||
AUTHENTICATION_BACKENDS = (
|
||||
|
||||
@@ -101,5 +101,5 @@ except IOError:
|
||||
# The below runs AFTER all of the custom settings are imported.
|
||||
|
||||
DATABASES.setdefault('default', dict()).setdefault('OPTIONS', dict()).setdefault(
|
||||
'application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63]
|
||||
'application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63] # NOQA
|
||||
) # noqa
|
||||
|
||||
@@ -11,9 +11,11 @@ import ldap
|
||||
# Django
|
||||
from django.dispatch import receiver
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.conf import settings as django_settings
|
||||
from django.core.signals import setting_changed
|
||||
from django.utils.encoding import force_str
|
||||
from django.db.utils import IntegrityError
|
||||
|
||||
# django-auth-ldap
|
||||
from django_auth_ldap.backend import LDAPSettings as BaseLDAPSettings
|
||||
@@ -327,31 +329,32 @@ class SAMLAuth(BaseSAMLAuth):
|
||||
return super(SAMLAuth, self).get_user(user_id)
|
||||
|
||||
|
||||
def _update_m2m_from_groups(user, ldap_user, related, opts, remove=True):
|
||||
def _update_m2m_from_groups(ldap_user, opts, remove=True):
|
||||
"""
|
||||
Hepler function to update m2m relationship based on LDAP group membership.
|
||||
Hepler function to evaluate the LDAP team/org options to determine if LDAP user should
|
||||
be a member of the team/org based on their ldap group dns.
|
||||
|
||||
Returns:
|
||||
True - User should be added
|
||||
False - User should be removed
|
||||
None - Users membership should not be changed
|
||||
"""
|
||||
should_add = False
|
||||
if opts is None:
|
||||
return
|
||||
return None
|
||||
elif not opts:
|
||||
pass
|
||||
elif opts is True:
|
||||
should_add = True
|
||||
elif isinstance(opts, bool) and opts is True:
|
||||
return True
|
||||
else:
|
||||
if isinstance(opts, str):
|
||||
opts = [opts]
|
||||
# If any of the users groups matches any of the list options
|
||||
for group_dn in opts:
|
||||
if not isinstance(group_dn, str):
|
||||
continue
|
||||
if ldap_user._get_groups().is_member_of(group_dn):
|
||||
should_add = True
|
||||
if should_add:
|
||||
user.save()
|
||||
related.add(user)
|
||||
elif remove and user in related.all():
|
||||
user.save()
|
||||
related.remove(user)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
@receiver(populate_user, dispatch_uid='populate-ldap-user')
|
||||
@@ -383,31 +386,73 @@ def on_populate_user(sender, **kwargs):
|
||||
force_user_update = True
|
||||
logger.warning('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len))
|
||||
|
||||
# Update organization membership based on group memberships.
|
||||
org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {})
|
||||
for org_name, org_opts in org_map.items():
|
||||
org, created = Organization.objects.get_or_create(name=org_name)
|
||||
remove = bool(org_opts.get('remove', True))
|
||||
admins_opts = org_opts.get('admins', None)
|
||||
remove_admins = bool(org_opts.get('remove_admins', remove))
|
||||
_update_m2m_from_groups(user, ldap_user, org.admin_role.members, admins_opts, remove_admins)
|
||||
auditors_opts = org_opts.get('auditors', None)
|
||||
remove_auditors = bool(org_opts.get('remove_auditors', remove))
|
||||
_update_m2m_from_groups(user, ldap_user, org.auditor_role.members, auditors_opts, remove_auditors)
|
||||
users_opts = org_opts.get('users', None)
|
||||
remove_users = bool(org_opts.get('remove_users', remove))
|
||||
_update_m2m_from_groups(user, ldap_user, org.member_role.members, users_opts, remove_users)
|
||||
|
||||
# Update team membership based on group memberships.
|
||||
team_map = getattr(backend.settings, 'TEAM_MAP', {})
|
||||
|
||||
# Move this junk into save of the settings for performance later, there is no need to do that here
|
||||
# with maybe the exception of someone defining this in settings before the server is started?
|
||||
# ==============================================================================================================
|
||||
|
||||
# Get all of the IDs and names of orgs in the DB and create any new org defined in LDAP that does not exist in the DB
|
||||
existing_orgs = {}
|
||||
for (org_id, org_name) in Organization.objects.all().values_list('id', 'name'):
|
||||
existing_orgs[org_name] = org_id
|
||||
|
||||
# Create any orgs (if needed) for all entries in the org and team maps
|
||||
for org_name in set(list(org_map.keys()) + [item.get('organization', None) for item in team_map.values()]):
|
||||
if org_name and org_name not in existing_orgs:
|
||||
logger.info("LDAP adapter is creating org {}".format(org_name))
|
||||
try:
|
||||
new_org = Organization.objects.create(name=org_name)
|
||||
except IntegrityError:
|
||||
# Another thread must have created this org before we did so now we need to get it
|
||||
new_org = Organization.objects.get(name=org_name)
|
||||
# Add the org name to the existing orgs since we created it and we may need it to build the teams below
|
||||
existing_orgs[org_name] = new_org.id
|
||||
|
||||
# Do the same for teams
|
||||
existing_team_names = list(Team.objects.all().values_list('name', flat=True))
|
||||
for team_name, team_opts in team_map.items():
|
||||
if not team_opts.get('organization', None):
|
||||
# You can't save the LDAP config in the UI w/o an org (or '' or null as the org) so if we somehow got this condition its an error
|
||||
logger.error("Team named {} in LDAP team map settings is invalid due to missing organization".format(team_name))
|
||||
continue
|
||||
if team_name not in existing_team_names:
|
||||
try:
|
||||
Team.objects.create(name=team_name, organization_id=existing_orgs[team_opts['organization']])
|
||||
except IntegrityError:
|
||||
# If another process got here before us that is ok because we don't need the ID from this team or anything
|
||||
pass
|
||||
# End move some day
|
||||
# ==============================================================================================================
|
||||
|
||||
# Compute in memory what the state is of the different LDAP orgs
|
||||
org_roles_and_ldap_attributes = {'admin_role': 'admins', 'auditor_role': 'auditors', 'member_role': 'users'}
|
||||
desired_org_states = {}
|
||||
for org_name, org_opts in org_map.items():
|
||||
remove = bool(org_opts.get('remove', True))
|
||||
desired_org_states[org_name] = {}
|
||||
for org_role_name in org_roles_and_ldap_attributes.keys():
|
||||
ldap_name = org_roles_and_ldap_attributes[org_role_name]
|
||||
opts = org_opts.get(ldap_name, None)
|
||||
remove = bool(org_opts.get('remove_{}'.format(ldap_name), remove))
|
||||
desired_org_states[org_name][org_role_name] = _update_m2m_from_groups(ldap_user, opts, remove)
|
||||
|
||||
# If everything returned None (because there was no configuration) we can remove this org from our map
|
||||
# This will prevent us from loading the org in the next query
|
||||
if all(desired_org_states[org_name][org_role_name] is None for org_role_name in org_roles_and_ldap_attributes.keys()):
|
||||
del desired_org_states[org_name]
|
||||
|
||||
# Compute in memory what the state is of the different LDAP teams
|
||||
desired_team_states = {}
|
||||
for team_name, team_opts in team_map.items():
|
||||
if 'organization' not in team_opts:
|
||||
continue
|
||||
org, created = Organization.objects.get_or_create(name=team_opts['organization'])
|
||||
team, created = Team.objects.get_or_create(name=team_name, organization=org)
|
||||
users_opts = team_opts.get('users', None)
|
||||
remove = bool(team_opts.get('remove', True))
|
||||
_update_m2m_from_groups(user, ldap_user, team.member_role.members, users_opts, remove)
|
||||
state = _update_m2m_from_groups(ldap_user, users_opts, remove)
|
||||
if state is not None:
|
||||
desired_team_states[team_name] = {'member_role': state}
|
||||
|
||||
# Check if user.profile is available, otherwise force user.save()
|
||||
try:
|
||||
@@ -423,3 +468,62 @@ def on_populate_user(sender, **kwargs):
|
||||
if profile.ldap_dn != ldap_user.dn:
|
||||
profile.ldap_dn = ldap_user.dn
|
||||
profile.save()
|
||||
|
||||
reconcile_users_org_team_mappings(user, desired_org_states, desired_team_states, 'LDAP')
|
||||
|
||||
|
||||
def reconcile_users_org_team_mappings(user, desired_org_states, desired_team_states, source):
|
||||
from awx.main.models import Organization, Team
|
||||
|
||||
content_types = []
|
||||
reconcile_items = []
|
||||
if desired_org_states:
|
||||
content_types.append(ContentType.objects.get_for_model(Organization))
|
||||
reconcile_items.append(('organization', desired_org_states, Organization))
|
||||
if desired_team_states:
|
||||
content_types.append(ContentType.objects.get_for_model(Team))
|
||||
reconcile_items.append(('team', desired_team_states, Team))
|
||||
|
||||
if not content_types:
|
||||
# If both desired states were empty we can simply return because there is nothing to reconcile
|
||||
return
|
||||
|
||||
# users_roles is a flat set of IDs
|
||||
users_roles = set(user.roles.filter(content_type__in=content_types).values_list('pk', flat=True))
|
||||
|
||||
for object_type, desired_states, model in reconcile_items:
|
||||
# Get all of the roles in the desired states for efficient DB extraction
|
||||
roles = []
|
||||
for sub_dict in desired_states.values():
|
||||
for role_name in sub_dict:
|
||||
if sub_dict[role_name] is None:
|
||||
continue
|
||||
if role_name not in roles:
|
||||
roles.append(role_name)
|
||||
|
||||
# Get a set of named tuples for the org/team name plus all of the roles we got above
|
||||
model_roles = model.objects.filter(name__in=desired_states.keys()).values_list('name', *roles, named=True)
|
||||
for row in model_roles:
|
||||
for role_name in roles:
|
||||
desired_state = desired_states.get(row.name, {})
|
||||
if desired_state[role_name] is None:
|
||||
# The mapping was not defined for this [org/team]/role so we can just pass
|
||||
pass
|
||||
|
||||
# If somehow the auth adapter knows about an items role but that role is not defined in the DB we are going to print a pretty error
|
||||
# This is your classic safety net that we should never hit; but here you are reading this comment... good luck and Godspeed.
|
||||
role_id = getattr(row, role_name, None)
|
||||
if role_id is None:
|
||||
logger.error("{} adapter wanted to manage role {} of {} {} but that role is not defined".format(source, role_name, object_type, row.name))
|
||||
continue
|
||||
|
||||
if desired_state[role_name]:
|
||||
# The desired state was the user mapped into the object_type, if the user was not mapped in map them in
|
||||
if role_id not in users_roles:
|
||||
logger.debug("{} adapter adding user {} to {} {} as {}".format(source, user.username, object_type, row.name, role_name))
|
||||
user.roles.add(role_id)
|
||||
else:
|
||||
# The desired state was the user was not mapped into the org, if the user has the permission remove it
|
||||
if role_id in users_roles:
|
||||
logger.debug("{} adapter removing user {} permission of {} from {} {}".format(source, user.username, role_name, object_type, row.name))
|
||||
user.roles.remove(role_id)
|
||||
|
||||
@@ -53,7 +53,7 @@ SOCIAL_AUTH_ORGANIZATION_MAP_HELP_TEXT = _(
|
||||
'''\
|
||||
Mapping to organization admins/users from social auth accounts. This setting
|
||||
controls which users are placed into which organizations based on their
|
||||
username and email address. Configuration details are available in the
|
||||
username and email address. Configuration details are available in the
|
||||
documentation.\
|
||||
'''
|
||||
)
|
||||
|
||||
@@ -6,7 +6,7 @@ _values_to_change = ['is_superuser_value', 'is_superuser_role', 'is_system_audit
|
||||
|
||||
def _get_setting():
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f'SELECT value FROM conf_setting WHERE key= %s', ['SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR'])
|
||||
cursor.execute('SELECT value FROM conf_setting WHERE key= %s', ['SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR'])
|
||||
row = cursor.fetchone()
|
||||
if row == None:
|
||||
return {}
|
||||
@@ -24,7 +24,7 @@ def _get_setting():
|
||||
|
||||
def _set_setting(value):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f'UPDATE conf_setting SET value = %s WHERE key = %s', [json.dumps(value), 'SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR'])
|
||||
cursor.execute('UPDATE conf_setting SET value = %s WHERE key = %s', [json.dumps(value), 'SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR'])
|
||||
|
||||
|
||||
def forwards(app, schema_editor):
|
||||
|
||||
@@ -163,9 +163,9 @@ class TestSAMLAttr:
|
||||
'PersonImmutableID': [],
|
||||
},
|
||||
},
|
||||
#'social': <UserSocialAuth: cmeyers@redhat.com>,
|
||||
# 'social': <UserSocialAuth: cmeyers@redhat.com>,
|
||||
'social': None,
|
||||
#'strategy': <awx.sso.strategies.django_strategy.AWXDjangoStrategy object at 0x8523a10>,
|
||||
# 'strategy': <awx.sso.strategies.django_strategy.AWXDjangoStrategy object at 0x8523a10>,
|
||||
'strategy': None,
|
||||
'new_association': False,
|
||||
}
|
||||
|
||||
@@ -11,8 +11,6 @@ from django.http import HttpResponse
|
||||
from django.views.generic import View
|
||||
from django.views.generic.base import RedirectView
|
||||
from django.utils.encoding import smart_str
|
||||
from awx.api.serializers import UserSerializer
|
||||
from rest_framework.renderers import JSONRenderer
|
||||
from django.conf import settings
|
||||
|
||||
logger = logging.getLogger('awx.sso.views')
|
||||
@@ -42,9 +40,6 @@ class CompleteView(BaseRedirectView):
|
||||
if self.request.user and self.request.user.is_authenticated:
|
||||
logger.info(smart_str(u"User {} logged in".format(self.request.user.username)))
|
||||
response.set_cookie('userLoggedIn', 'true')
|
||||
current_user = UserSerializer(self.request.user)
|
||||
current_user = smart_str(JSONRenderer().render(current_user.data))
|
||||
current_user = urllib.parse.quote('%s' % current_user, '')
|
||||
response.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||
return response
|
||||
|
||||
|
||||
184
awx/ui/package-lock.json
generated
184
awx/ui/package-lock.json
generated
@@ -8,14 +8,14 @@
|
||||
"dependencies": {
|
||||
"@lingui/react": "3.14.0",
|
||||
"@patternfly/patternfly": "4.210.2",
|
||||
"@patternfly/react-core": "^4.221.3",
|
||||
"@patternfly/react-icons": "4.75.1",
|
||||
"@patternfly/react-table": "4.100.8",
|
||||
"@patternfly/react-core": "^4.239.0",
|
||||
"@patternfly/react-icons": "4.90.0",
|
||||
"@patternfly/react-table": "4.108.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.27.2",
|
||||
"codemirror": "^6.0.1",
|
||||
"d3": "7.4.4",
|
||||
"d3": "7.6.1",
|
||||
"dagre": "^0.8.4",
|
||||
"dompurify": "2.4.0",
|
||||
"formik": "2.2.9",
|
||||
@@ -31,7 +31,7 @@
|
||||
"react-router-dom": "^5.3.3",
|
||||
"react-virtualized": "^9.21.1",
|
||||
"rrule": "2.7.1",
|
||||
"styled-components": "5.3.5"
|
||||
"styled-components": "5.3.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.16.10",
|
||||
@@ -3752,13 +3752,13 @@
|
||||
"integrity": "sha512-aZiW24Bxi6uVmk5RyNTp+6q6ThtlJZotNRJfWVeGuwu1UlbBuV4DFa1bpjA6jfTZpfEpX2YL5+R+4ZVSCFAVdw=="
|
||||
},
|
||||
"node_modules/@patternfly/react-core": {
|
||||
"version": "4.231.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.231.8.tgz",
|
||||
"integrity": "sha512-2ClqlYCvSADppMfVfkUGIA/8XlO6jX8batoClXLxZDwqGoOfr61XyUgQ6SSlE4w60czoNeX4Nf6cfQKUH4RIKw==",
|
||||
"version": "4.239.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.239.0.tgz",
|
||||
"integrity": "sha512-6CmYABCJLUXTlzCk6C3WouMNZpS0BCT+aHU8CvYpFQ/NrpYp3MJaDsYbqgCRWV42rmIO5iXun/4WhXBJzJEoQg==",
|
||||
"dependencies": {
|
||||
"@patternfly/react-icons": "^4.82.8",
|
||||
"@patternfly/react-styles": "^4.81.8",
|
||||
"@patternfly/react-tokens": "^4.83.8",
|
||||
"@patternfly/react-icons": "^4.90.0",
|
||||
"@patternfly/react-styles": "^4.89.0",
|
||||
"@patternfly/react-tokens": "^4.91.0",
|
||||
"focus-trap": "6.9.2",
|
||||
"react-dropzone": "9.0.0",
|
||||
"tippy.js": "5.1.2",
|
||||
@@ -3769,43 +3769,34 @@
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-core/node_modules/@patternfly/react-icons": {
|
||||
"version": "4.82.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.82.8.tgz",
|
||||
"integrity": "sha512-cKixprTiMLZRe/+kmdZ5suvYb9ly9p1f/HjlcNiWBfsiA8ZDEPmxJnVdend/YsafelC8YC9QGcQf97ay5PNhcw==",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0",
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-core/node_modules/tslib": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz",
|
||||
"integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw=="
|
||||
},
|
||||
"node_modules/@patternfly/react-icons": {
|
||||
"version": "4.75.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.75.1.tgz",
|
||||
"integrity": "sha512-1ly8SVi/kcc0zkiViOjUd8D5BEr7GeqWGmDPuDSBtD60l1dYf3hZc44IWFVkRM/oHZML/musdrJkLfh4MDqX9w==",
|
||||
"version": "4.90.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.90.0.tgz",
|
||||
"integrity": "sha512-qEnQKbxbUgyiosiKSkeKEBwmhgJwWEqniIAFyoxj+kpzAdeu7ueWe5iBbqo06mvDOedecFiM5mIE1N0MXwk8Yw==",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0",
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-styles": {
|
||||
"version": "4.81.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.81.8.tgz",
|
||||
"integrity": "sha512-Q5FiureSSCMIuz+KLMcEm1317TzbXcwmg2q5iNDRKyf/K+5CT6tJp0Wbtk3FlfRvzli4u/7YfXipahia5TL+tA=="
|
||||
"version": "4.89.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.89.0.tgz",
|
||||
"integrity": "sha512-SkT+qx3Xqu70T5s+i/AUT2hI2sKAPDX4ffeiJIUDu/oyWiFdk+/9DEivnLSyJMruroXXN33zKibvzb5rH7DKTQ=="
|
||||
},
|
||||
"node_modules/@patternfly/react-table": {
|
||||
"version": "4.100.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.100.8.tgz",
|
||||
"integrity": "sha512-80XZCZzoYN9gsoufNdXUB/dk33SuWF9lUnOJs7ilezD6noTSD7ARqO1h532eaEPIbPBp4uIVkEUdfGSHd0HJtg==",
|
||||
"version": "4.108.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.108.0.tgz",
|
||||
"integrity": "sha512-EUvd3rlkE1UXobAm7L6JHgNE3TW8IYTaVwwH/px4Mkn5mBayDO6f+w6QM3OeoDQVZcXK6IYFe7QQaYd/vWIJCQ==",
|
||||
"dependencies": {
|
||||
"@patternfly/react-core": "^4.231.8",
|
||||
"@patternfly/react-icons": "^4.82.8",
|
||||
"@patternfly/react-styles": "^4.81.8",
|
||||
"@patternfly/react-tokens": "^4.83.8",
|
||||
"@patternfly/react-core": "^4.239.0",
|
||||
"@patternfly/react-icons": "^4.90.0",
|
||||
"@patternfly/react-styles": "^4.89.0",
|
||||
"@patternfly/react-tokens": "^4.91.0",
|
||||
"lodash": "^4.17.19",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
@@ -3814,24 +3805,15 @@
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-table/node_modules/@patternfly/react-icons": {
|
||||
"version": "4.82.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.82.8.tgz",
|
||||
"integrity": "sha512-cKixprTiMLZRe/+kmdZ5suvYb9ly9p1f/HjlcNiWBfsiA8ZDEPmxJnVdend/YsafelC8YC9QGcQf97ay5PNhcw==",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0",
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-table/node_modules/tslib": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz",
|
||||
"integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ=="
|
||||
},
|
||||
"node_modules/@patternfly/react-tokens": {
|
||||
"version": "4.83.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.83.8.tgz",
|
||||
"integrity": "sha512-Z/MHXNY8PQOuBFGUar2yzPVbz3BNJuhB+Dnk5RJcc/iIn3S+VlSru7g6v5jqoV/+a5wLqZtLGEBp8uhCZ7Xkig=="
|
||||
"version": "4.91.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.91.0.tgz",
|
||||
"integrity": "sha512-QeQCy8o8E/16fAr8mxqXIYRmpTsjCHJXi5p5jmgEDFmYMesN6Pqfv6N5D0FHb+CIaNOZWRps7GkWvlIMIE81sw=="
|
||||
},
|
||||
"node_modules/@pmmmwh/react-refresh-webpack-plugin": {
|
||||
"version": "0.5.4",
|
||||
@@ -7482,16 +7464,16 @@
|
||||
"integrity": "sha512-jXKhWqXPmlUeoQnF/EhTtTl4C9SnrxSH/jZUih3jmO6lBKr99rP3/+FmrMj4EFpOXzMtXHAZkd3x0E6h6Fgflw=="
|
||||
},
|
||||
"node_modules/d3": {
|
||||
"version": "7.4.4",
|
||||
"resolved": "https://registry.npmjs.org/d3/-/d3-7.4.4.tgz",
|
||||
"integrity": "sha512-97FE+MYdAlV3R9P74+R3Uar7wUKkIFu89UWMjEaDhiJ9VxKvqaMxauImy8PC2DdBkdM2BxJOIoLxPrcZUyrKoQ==",
|
||||
"version": "7.6.1",
|
||||
"resolved": "https://registry.npmjs.org/d3/-/d3-7.6.1.tgz",
|
||||
"integrity": "sha512-txMTdIHFbcpLx+8a0IFhZsbp+PfBBPt8yfbmukZTQFroKuFqIwqswF0qE5JXWefylaAVpSXFoKm3yP+jpNLFLw==",
|
||||
"dependencies": {
|
||||
"d3-array": "3",
|
||||
"d3-axis": "3",
|
||||
"d3-brush": "3",
|
||||
"d3-chord": "3",
|
||||
"d3-color": "3",
|
||||
"d3-contour": "3",
|
||||
"d3-contour": "4",
|
||||
"d3-delaunay": "6",
|
||||
"d3-dispatch": "3",
|
||||
"d3-drag": "3",
|
||||
@@ -7522,9 +7504,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/d3-array": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.1.1.tgz",
|
||||
"integrity": "sha512-33qQ+ZoZlli19IFiQx4QEpf2CBEayMRzhlisJHSCsSUbDXv6ZishqS1x7uFVClKG4Wr7rZVHvaAttoLow6GqdQ==",
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.0.tgz",
|
||||
"integrity": "sha512-3yXFQo0oG3QCxbF06rMPFyGRMGJNS7NvsV1+2joOjbBE+9xvWQ8+GcMJAjRCzw06zQ3/arXeJgbPYcjUCuC+3g==",
|
||||
"dependencies": {
|
||||
"internmap": "1 - 2"
|
||||
},
|
||||
@@ -7575,11 +7557,11 @@
|
||||
}
|
||||
},
|
||||
"node_modules/d3-contour": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-3.0.1.tgz",
|
||||
"integrity": "sha512-0Oc4D0KyhwhM7ZL0RMnfGycLN7hxHB8CMmwZ3+H26PWAG0ozNuYG5hXSDNgmP1SgJkQMrlG6cP20HoaSbvcJTQ==",
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.0.tgz",
|
||||
"integrity": "sha512-7aQo0QHUTu/Ko3cP9YK9yUTxtoDEiDGwnBHyLxG5M4vqlBkO/uixMRele3nfsfj6UXOcuReVpVXzAboGraYIJw==",
|
||||
"dependencies": {
|
||||
"d3-array": "2 - 3"
|
||||
"d3-array": "^3.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
@@ -20234,9 +20216,9 @@
|
||||
"integrity": "sha512-OPhtyEjyyN9x3nhPsu76f52yUGXiZcgvsrFVtvTkyGRQJ0XK+GPc6ov1z+lRpbeabka+MYEQxOYRnt5nF30aMw=="
|
||||
},
|
||||
"node_modules/styled-components": {
|
||||
"version": "5.3.5",
|
||||
"resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.5.tgz",
|
||||
"integrity": "sha512-ndETJ9RKaaL6q41B69WudeqLzOpY1A/ET/glXkNZ2T7dPjPqpPCXXQjDFYZWwNnE5co0wX+gTCqx9mfxTmSIPg==",
|
||||
"version": "5.3.6",
|
||||
"resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.6.tgz",
|
||||
"integrity": "sha512-hGTZquGAaTqhGWldX7hhfzjnIYBZ0IXQXkCYdvF1Sq3DsUaLx6+NTHC5Jj1ooM2F68sBiVz3lvhfwQs/S3l6qg==",
|
||||
"hasInstallScript": true,
|
||||
"dependencies": {
|
||||
"@babel/helper-module-imports": "^7.0.0",
|
||||
@@ -25112,25 +25094,19 @@
|
||||
"integrity": "sha512-aZiW24Bxi6uVmk5RyNTp+6q6ThtlJZotNRJfWVeGuwu1UlbBuV4DFa1bpjA6jfTZpfEpX2YL5+R+4ZVSCFAVdw=="
|
||||
},
|
||||
"@patternfly/react-core": {
|
||||
"version": "4.231.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.231.8.tgz",
|
||||
"integrity": "sha512-2ClqlYCvSADppMfVfkUGIA/8XlO6jX8batoClXLxZDwqGoOfr61XyUgQ6SSlE4w60czoNeX4Nf6cfQKUH4RIKw==",
|
||||
"version": "4.239.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.239.0.tgz",
|
||||
"integrity": "sha512-6CmYABCJLUXTlzCk6C3WouMNZpS0BCT+aHU8CvYpFQ/NrpYp3MJaDsYbqgCRWV42rmIO5iXun/4WhXBJzJEoQg==",
|
||||
"requires": {
|
||||
"@patternfly/react-icons": "^4.82.8",
|
||||
"@patternfly/react-styles": "^4.81.8",
|
||||
"@patternfly/react-tokens": "^4.83.8",
|
||||
"@patternfly/react-icons": "^4.90.0",
|
||||
"@patternfly/react-styles": "^4.89.0",
|
||||
"@patternfly/react-tokens": "^4.91.0",
|
||||
"focus-trap": "6.9.2",
|
||||
"react-dropzone": "9.0.0",
|
||||
"tippy.js": "5.1.2",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@patternfly/react-icons": {
|
||||
"version": "4.82.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.82.8.tgz",
|
||||
"integrity": "sha512-cKixprTiMLZRe/+kmdZ5suvYb9ly9p1f/HjlcNiWBfsiA8ZDEPmxJnVdend/YsafelC8YC9QGcQf97ay5PNhcw==",
|
||||
"requires": {}
|
||||
},
|
||||
"tslib": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz",
|
||||
@@ -25139,35 +25115,29 @@
|
||||
}
|
||||
},
|
||||
"@patternfly/react-icons": {
|
||||
"version": "4.75.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.75.1.tgz",
|
||||
"integrity": "sha512-1ly8SVi/kcc0zkiViOjUd8D5BEr7GeqWGmDPuDSBtD60l1dYf3hZc44IWFVkRM/oHZML/musdrJkLfh4MDqX9w==",
|
||||
"version": "4.90.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.90.0.tgz",
|
||||
"integrity": "sha512-qEnQKbxbUgyiosiKSkeKEBwmhgJwWEqniIAFyoxj+kpzAdeu7ueWe5iBbqo06mvDOedecFiM5mIE1N0MXwk8Yw==",
|
||||
"requires": {}
|
||||
},
|
||||
"@patternfly/react-styles": {
|
||||
"version": "4.81.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.81.8.tgz",
|
||||
"integrity": "sha512-Q5FiureSSCMIuz+KLMcEm1317TzbXcwmg2q5iNDRKyf/K+5CT6tJp0Wbtk3FlfRvzli4u/7YfXipahia5TL+tA=="
|
||||
"version": "4.89.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.89.0.tgz",
|
||||
"integrity": "sha512-SkT+qx3Xqu70T5s+i/AUT2hI2sKAPDX4ffeiJIUDu/oyWiFdk+/9DEivnLSyJMruroXXN33zKibvzb5rH7DKTQ=="
|
||||
},
|
||||
"@patternfly/react-table": {
|
||||
"version": "4.100.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.100.8.tgz",
|
||||
"integrity": "sha512-80XZCZzoYN9gsoufNdXUB/dk33SuWF9lUnOJs7ilezD6noTSD7ARqO1h532eaEPIbPBp4uIVkEUdfGSHd0HJtg==",
|
||||
"version": "4.108.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.108.0.tgz",
|
||||
"integrity": "sha512-EUvd3rlkE1UXobAm7L6JHgNE3TW8IYTaVwwH/px4Mkn5mBayDO6f+w6QM3OeoDQVZcXK6IYFe7QQaYd/vWIJCQ==",
|
||||
"requires": {
|
||||
"@patternfly/react-core": "^4.231.8",
|
||||
"@patternfly/react-icons": "^4.82.8",
|
||||
"@patternfly/react-styles": "^4.81.8",
|
||||
"@patternfly/react-tokens": "^4.83.8",
|
||||
"@patternfly/react-core": "^4.239.0",
|
||||
"@patternfly/react-icons": "^4.90.0",
|
||||
"@patternfly/react-styles": "^4.89.0",
|
||||
"@patternfly/react-tokens": "^4.91.0",
|
||||
"lodash": "^4.17.19",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@patternfly/react-icons": {
|
||||
"version": "4.82.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.82.8.tgz",
|
||||
"integrity": "sha512-cKixprTiMLZRe/+kmdZ5suvYb9ly9p1f/HjlcNiWBfsiA8ZDEPmxJnVdend/YsafelC8YC9QGcQf97ay5PNhcw==",
|
||||
"requires": {}
|
||||
},
|
||||
"tslib": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz",
|
||||
@@ -25176,9 +25146,9 @@
|
||||
}
|
||||
},
|
||||
"@patternfly/react-tokens": {
|
||||
"version": "4.83.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.83.8.tgz",
|
||||
"integrity": "sha512-Z/MHXNY8PQOuBFGUar2yzPVbz3BNJuhB+Dnk5RJcc/iIn3S+VlSru7g6v5jqoV/+a5wLqZtLGEBp8uhCZ7Xkig=="
|
||||
"version": "4.91.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.91.0.tgz",
|
||||
"integrity": "sha512-QeQCy8o8E/16fAr8mxqXIYRmpTsjCHJXi5p5jmgEDFmYMesN6Pqfv6N5D0FHb+CIaNOZWRps7GkWvlIMIE81sw=="
|
||||
},
|
||||
"@pmmmwh/react-refresh-webpack-plugin": {
|
||||
"version": "0.5.4",
|
||||
@@ -28082,16 +28052,16 @@
|
||||
"integrity": "sha512-jXKhWqXPmlUeoQnF/EhTtTl4C9SnrxSH/jZUih3jmO6lBKr99rP3/+FmrMj4EFpOXzMtXHAZkd3x0E6h6Fgflw=="
|
||||
},
|
||||
"d3": {
|
||||
"version": "7.4.4",
|
||||
"resolved": "https://registry.npmjs.org/d3/-/d3-7.4.4.tgz",
|
||||
"integrity": "sha512-97FE+MYdAlV3R9P74+R3Uar7wUKkIFu89UWMjEaDhiJ9VxKvqaMxauImy8PC2DdBkdM2BxJOIoLxPrcZUyrKoQ==",
|
||||
"version": "7.6.1",
|
||||
"resolved": "https://registry.npmjs.org/d3/-/d3-7.6.1.tgz",
|
||||
"integrity": "sha512-txMTdIHFbcpLx+8a0IFhZsbp+PfBBPt8yfbmukZTQFroKuFqIwqswF0qE5JXWefylaAVpSXFoKm3yP+jpNLFLw==",
|
||||
"requires": {
|
||||
"d3-array": "3",
|
||||
"d3-axis": "3",
|
||||
"d3-brush": "3",
|
||||
"d3-chord": "3",
|
||||
"d3-color": "3",
|
||||
"d3-contour": "3",
|
||||
"d3-contour": "4",
|
||||
"d3-delaunay": "6",
|
||||
"d3-dispatch": "3",
|
||||
"d3-drag": "3",
|
||||
@@ -28119,9 +28089,9 @@
|
||||
}
|
||||
},
|
||||
"d3-array": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.1.1.tgz",
|
||||
"integrity": "sha512-33qQ+ZoZlli19IFiQx4QEpf2CBEayMRzhlisJHSCsSUbDXv6ZishqS1x7uFVClKG4Wr7rZVHvaAttoLow6GqdQ==",
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.0.tgz",
|
||||
"integrity": "sha512-3yXFQo0oG3QCxbF06rMPFyGRMGJNS7NvsV1+2joOjbBE+9xvWQ8+GcMJAjRCzw06zQ3/arXeJgbPYcjUCuC+3g==",
|
||||
"requires": {
|
||||
"internmap": "1 - 2"
|
||||
}
|
||||
@@ -28157,11 +28127,11 @@
|
||||
"integrity": "sha512-6/SlHkDOBLyQSJ1j1Ghs82OIUXpKWlR0hCsw0XrLSQhuUPuCSmLQ1QPH98vpnQxMUQM2/gfAkUEWsupVpd9JGw=="
|
||||
},
|
||||
"d3-contour": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-3.0.1.tgz",
|
||||
"integrity": "sha512-0Oc4D0KyhwhM7ZL0RMnfGycLN7hxHB8CMmwZ3+H26PWAG0ozNuYG5hXSDNgmP1SgJkQMrlG6cP20HoaSbvcJTQ==",
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.0.tgz",
|
||||
"integrity": "sha512-7aQo0QHUTu/Ko3cP9YK9yUTxtoDEiDGwnBHyLxG5M4vqlBkO/uixMRele3nfsfj6UXOcuReVpVXzAboGraYIJw==",
|
||||
"requires": {
|
||||
"d3-array": "2 - 3"
|
||||
"d3-array": "^3.2.0"
|
||||
}
|
||||
},
|
||||
"d3-delaunay": {
|
||||
@@ -37705,9 +37675,9 @@
|
||||
"integrity": "sha512-OPhtyEjyyN9x3nhPsu76f52yUGXiZcgvsrFVtvTkyGRQJ0XK+GPc6ov1z+lRpbeabka+MYEQxOYRnt5nF30aMw=="
|
||||
},
|
||||
"styled-components": {
|
||||
"version": "5.3.5",
|
||||
"resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.5.tgz",
|
||||
"integrity": "sha512-ndETJ9RKaaL6q41B69WudeqLzOpY1A/ET/glXkNZ2T7dPjPqpPCXXQjDFYZWwNnE5co0wX+gTCqx9mfxTmSIPg==",
|
||||
"version": "5.3.6",
|
||||
"resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.6.tgz",
|
||||
"integrity": "sha512-hGTZquGAaTqhGWldX7hhfzjnIYBZ0IXQXkCYdvF1Sq3DsUaLx6+NTHC5Jj1ooM2F68sBiVz3lvhfwQs/S3l6qg==",
|
||||
"requires": {
|
||||
"@babel/helper-module-imports": "^7.0.0",
|
||||
"@babel/traverse": "^7.4.5",
|
||||
|
||||
@@ -8,14 +8,14 @@
|
||||
"dependencies": {
|
||||
"@lingui/react": "3.14.0",
|
||||
"@patternfly/patternfly": "4.210.2",
|
||||
"@patternfly/react-core": "^4.221.3",
|
||||
"@patternfly/react-icons": "4.75.1",
|
||||
"@patternfly/react-table": "4.100.8",
|
||||
"@patternfly/react-core": "^4.239.0",
|
||||
"@patternfly/react-icons": "4.90.0",
|
||||
"@patternfly/react-table": "4.108.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.27.2",
|
||||
"codemirror": "^6.0.1",
|
||||
"d3": "7.4.4",
|
||||
"d3": "7.6.1",
|
||||
"dagre": "^0.8.4",
|
||||
"dompurify": "2.4.0",
|
||||
"formik": "2.2.9",
|
||||
@@ -31,7 +31,7 @@
|
||||
"react-router-dom": "^5.3.3",
|
||||
"react-virtualized": "^9.21.1",
|
||||
"rrule": "2.7.1",
|
||||
"styled-components": "5.3.5"
|
||||
"styled-components": "5.3.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.16.10",
|
||||
|
||||
@@ -3,7 +3,12 @@ import { Plural, t } from '@lingui/macro';
|
||||
import { Button, DropdownItem, Tooltip } from '@patternfly/react-core';
|
||||
import { useKebabifiedMenu } from 'contexts/Kebabified';
|
||||
|
||||
function HealthCheckButton({ isDisabled, onClick, selectedItems }) {
|
||||
function HealthCheckButton({
|
||||
isDisabled,
|
||||
onClick,
|
||||
selectedItems,
|
||||
healthCheckPending,
|
||||
}) {
|
||||
const { isKebabified } = useKebabifiedMenu();
|
||||
|
||||
const selectedItemsCount = selectedItems.length;
|
||||
@@ -28,8 +33,10 @@ function HealthCheckButton({ isDisabled, onClick, selectedItems }) {
|
||||
component="button"
|
||||
onClick={onClick}
|
||||
ouiaId="health-check"
|
||||
isLoading={healthCheckPending}
|
||||
spinnerAriaLabel={t`Running health check`}
|
||||
>
|
||||
{t`Run health check`}
|
||||
{healthCheckPending ? t`Running health check` : t`Run health check`}
|
||||
</DropdownItem>
|
||||
</Tooltip>
|
||||
);
|
||||
@@ -42,7 +49,11 @@ function HealthCheckButton({ isDisabled, onClick, selectedItems }) {
|
||||
variant="secondary"
|
||||
ouiaId="health-check"
|
||||
onClick={onClick}
|
||||
>{t`Run health check`}</Button>
|
||||
isLoading={healthCheckPending}
|
||||
spinnerAriaLabel={t`Running health check`}
|
||||
>
|
||||
{healthCheckPending ? t`Running health check` : t`Run health check`}
|
||||
</Button>
|
||||
</div>
|
||||
</Tooltip>
|
||||
);
|
||||
|
||||
@@ -107,6 +107,17 @@ function LaunchButton({ resource, children }) {
|
||||
jobPromise = JobsAPI.relaunch(resource.id, params || {});
|
||||
} else if (resource.type === 'workflow_job') {
|
||||
jobPromise = WorkflowJobsAPI.relaunch(resource.id, params || {});
|
||||
} else if (resource.type === 'ad_hoc_command') {
|
||||
if (params?.credential_passwords) {
|
||||
// The api expects the passwords at the top level of the object instead of nested
|
||||
// in credential_passwords like the other relaunch endpoints
|
||||
Object.keys(params.credential_passwords).forEach((key) => {
|
||||
params[key] = params.credential_passwords[key];
|
||||
});
|
||||
|
||||
delete params.credential_passwords;
|
||||
}
|
||||
jobPromise = AdHocCommandsAPI.relaunch(resource.id, params || {});
|
||||
}
|
||||
|
||||
const { data: job } = await jobPromise;
|
||||
|
||||
@@ -129,7 +129,7 @@ function PromptModalForm({
|
||||
}}
|
||||
title={t`Launch | ${resource.name}`}
|
||||
description={
|
||||
resource.description.length > 512 ? (
|
||||
resource.description?.length > 512 ? (
|
||||
<ExpandableSection
|
||||
toggleText={
|
||||
showDescription ? t`Hide description` : t`Show description`
|
||||
|
||||
@@ -67,14 +67,14 @@ function ScheduleForm({
|
||||
if (schedule.id) {
|
||||
if (
|
||||
resource.type === 'job_template' &&
|
||||
launchConfig.ask_credential_on_launch
|
||||
launchConfig?.ask_credential_on_launch
|
||||
) {
|
||||
const {
|
||||
data: { results },
|
||||
} = await SchedulesAPI.readCredentials(schedule.id);
|
||||
creds = results;
|
||||
}
|
||||
if (launchConfig.ask_labels_on_launch) {
|
||||
if (launchConfig?.ask_labels_on_launch) {
|
||||
const {
|
||||
data: { results },
|
||||
} = await SchedulesAPI.readAllLabels(schedule.id);
|
||||
@@ -82,7 +82,7 @@ function ScheduleForm({
|
||||
}
|
||||
if (
|
||||
resource.type === 'job_template' &&
|
||||
launchConfig.ask_instance_groups_on_launch
|
||||
launchConfig?.ask_instance_groups_on_launch
|
||||
) {
|
||||
const {
|
||||
data: { results },
|
||||
@@ -91,7 +91,7 @@ function ScheduleForm({
|
||||
}
|
||||
} else {
|
||||
if (resource.type === 'job_template') {
|
||||
if (launchConfig.ask_labels_on_launch) {
|
||||
if (launchConfig?.ask_labels_on_launch) {
|
||||
const {
|
||||
data: { results },
|
||||
} = await JobTemplatesAPI.readAllLabels(resource.id);
|
||||
@@ -100,7 +100,7 @@ function ScheduleForm({
|
||||
}
|
||||
if (
|
||||
resource.type === 'workflow_job_template' &&
|
||||
launchConfig.ask_labels_on_launch
|
||||
launchConfig?.ask_labels_on_launch
|
||||
) {
|
||||
const {
|
||||
data: { results },
|
||||
@@ -123,14 +123,7 @@ function ScheduleForm({
|
||||
zoneLinks: data.links,
|
||||
credentials: creds,
|
||||
};
|
||||
}, [
|
||||
schedule,
|
||||
resource.id,
|
||||
resource.type,
|
||||
launchConfig.ask_labels_on_launch,
|
||||
launchConfig.ask_instance_groups_on_launch,
|
||||
launchConfig.ask_credential_on_launch,
|
||||
]),
|
||||
}, [schedule, resource.id, resource.type, launchConfig]),
|
||||
{
|
||||
zonesOptions: [],
|
||||
zoneLinks: {},
|
||||
@@ -146,7 +139,7 @@ function ScheduleForm({
|
||||
const missingRequiredInventory = useCallback(() => {
|
||||
let missingInventory = false;
|
||||
if (
|
||||
launchConfig.inventory_needed_to_start &&
|
||||
launchConfig?.inventory_needed_to_start &&
|
||||
!schedule?.summary_fields?.inventory?.id
|
||||
) {
|
||||
missingInventory = true;
|
||||
@@ -423,8 +416,14 @@ function ScheduleForm({
|
||||
|
||||
if (options.end === 'onDate') {
|
||||
if (
|
||||
DateTime.fromISO(values.startDate) >=
|
||||
DateTime.fromISO(options.endDate)
|
||||
DateTime.fromFormat(
|
||||
`${values.startDate} ${values.startTime}`,
|
||||
'yyyy-LL-dd h:mm a'
|
||||
).toMillis() >=
|
||||
DateTime.fromFormat(
|
||||
`${options.endDate} ${options.endTime}`,
|
||||
'yyyy-LL-dd h:mm a'
|
||||
).toMillis()
|
||||
) {
|
||||
freqErrors.endDate = t`Please select an end date/time that comes after the start date/time.`;
|
||||
}
|
||||
|
||||
@@ -900,6 +900,36 @@ describe('<ScheduleForm />', () => {
|
||||
);
|
||||
});
|
||||
|
||||
test('should create schedule with the same start and end date provided that the end date is at a later time', async () => {
|
||||
const today = DateTime.now().toFormat('yyyy-LL-dd');
|
||||
const laterTime = DateTime.now().plus({ hours: 1 }).toFormat('h:mm a');
|
||||
await act(async () => {
|
||||
wrapper.find('DatePicker[aria-label="End date"]').prop('onChange')(
|
||||
today,
|
||||
new Date(today)
|
||||
);
|
||||
});
|
||||
wrapper.update();
|
||||
expect(
|
||||
wrapper
|
||||
.find('FormGroup[data-cy="schedule-End date/time"]')
|
||||
.prop('helperTextInvalid')
|
||||
).toBe(
|
||||
'Please select an end date/time that comes after the start date/time.'
|
||||
);
|
||||
await act(async () => {
|
||||
wrapper.find('TimePicker[aria-label="End time"]').prop('onChange')(
|
||||
laterTime
|
||||
);
|
||||
});
|
||||
wrapper.update();
|
||||
expect(
|
||||
wrapper
|
||||
.find('FormGroup[data-cy="schedule-End date/time"]')
|
||||
.prop('helperTextInvalid')
|
||||
).toBe(undefined);
|
||||
});
|
||||
|
||||
test('error shown when on day number is not between 1 and 31', async () => {
|
||||
await act(async () => {
|
||||
wrapper.find('FrequencySelect#schedule-frequency').invoke('onChange')([
|
||||
|
||||
6241
awx/ui/src/locales/translations/es/django.po
Normal file
6241
awx/ui/src/locales/translations/es/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10833
awx/ui/src/locales/translations/es/messages.po
Normal file
10833
awx/ui/src/locales/translations/es/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6243
awx/ui/src/locales/translations/fr/django.po
Normal file
6243
awx/ui/src/locales/translations/fr/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10713
awx/ui/src/locales/translations/fr/messages.po
Normal file
10713
awx/ui/src/locales/translations/fr/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6240
awx/ui/src/locales/translations/ja/django.po
Normal file
6240
awx/ui/src/locales/translations/ja/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10739
awx/ui/src/locales/translations/ja/messages.po
Normal file
10739
awx/ui/src/locales/translations/ja/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6240
awx/ui/src/locales/translations/ko/django.po
Normal file
6240
awx/ui/src/locales/translations/ko/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10700
awx/ui/src/locales/translations/ko/messages.po
Normal file
10700
awx/ui/src/locales/translations/ko/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6241
awx/ui/src/locales/translations/nl/django.po
Normal file
6241
awx/ui/src/locales/translations/nl/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10725
awx/ui/src/locales/translations/nl/messages.po
Normal file
10725
awx/ui/src/locales/translations/nl/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6242
awx/ui/src/locales/translations/zh/django.po
Normal file
6242
awx/ui/src/locales/translations/zh/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10698
awx/ui/src/locales/translations/zh/messages.po
Normal file
10698
awx/ui/src/locales/translations/zh/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
@@ -12,7 +12,7 @@ import {
|
||||
Tooltip,
|
||||
Slider,
|
||||
} from '@patternfly/react-core';
|
||||
import { CaretLeftIcon } from '@patternfly/react-icons';
|
||||
import { CaretLeftIcon, OutlinedClockIcon } from '@patternfly/react-icons';
|
||||
import styled from 'styled-components';
|
||||
|
||||
import { useConfig } from 'contexts/Config';
|
||||
@@ -23,6 +23,7 @@ import ErrorDetail from 'components/ErrorDetail';
|
||||
import DisassociateButton from 'components/DisassociateButton';
|
||||
import InstanceToggle from 'components/InstanceToggle';
|
||||
import { CardBody, CardActionsRow } from 'components/Card';
|
||||
import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
import { formatDateString } from 'util/dates';
|
||||
import RoutedTabs from 'components/RoutedTabs';
|
||||
import ContentError from 'components/ContentError';
|
||||
@@ -62,7 +63,7 @@ function computeForks(memCapacity, cpuCapacity, selectedCapacityAdjustment) {
|
||||
}
|
||||
|
||||
function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
const { me = {} } = useConfig();
|
||||
const config = useConfig();
|
||||
const { id, instanceId } = useParams();
|
||||
const history = useHistory();
|
||||
|
||||
@@ -115,15 +116,9 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
useEffect(() => {
|
||||
fetchDetails();
|
||||
}, [fetchDetails]);
|
||||
const {
|
||||
error: healthCheckError,
|
||||
isLoading: isRunningHealthCheck,
|
||||
request: fetchHealthCheck,
|
||||
} = useRequest(
|
||||
const { error: healthCheckError, request: fetchHealthCheck } = useRequest(
|
||||
useCallback(async () => {
|
||||
const { status } = await InstancesAPI.healthCheck(instanceId);
|
||||
const { data } = await InstancesAPI.readHealthCheckDetail(instanceId);
|
||||
setHealthCheck(data);
|
||||
if (status === 200) {
|
||||
setShowHealthCheckAlert(true);
|
||||
}
|
||||
@@ -161,6 +156,18 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
debounceUpdateInstance({ capacity_adjustment: roundedValue });
|
||||
};
|
||||
|
||||
const formatHealthCheckTimeStamp = (last) => (
|
||||
<>
|
||||
{formatDateString(last)}
|
||||
{instance.health_check_pending ? (
|
||||
<>
|
||||
{' '}
|
||||
<OutlinedClockIcon />
|
||||
</>
|
||||
) : null}
|
||||
</>
|
||||
);
|
||||
|
||||
const { error, dismissError } = useDismissableError(
|
||||
disassociateError || updateInstanceError || healthCheckError
|
||||
);
|
||||
@@ -189,6 +196,8 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
return <ContentLoading />;
|
||||
}
|
||||
|
||||
const isExecutionNode = instance.node_type === 'execution';
|
||||
|
||||
return (
|
||||
<>
|
||||
<RoutedTabs tabsArray={tabsArray} />
|
||||
@@ -218,7 +227,22 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
<Detail label={t`Total Jobs`} value={instance.jobs_total} />
|
||||
<Detail
|
||||
label={t`Last Health Check`}
|
||||
value={formatDateString(healthCheck?.last_health_check)}
|
||||
helpText={
|
||||
<>
|
||||
{t`Health checks are asynchronous tasks. See the`}{' '}
|
||||
<a
|
||||
href={`${getDocsBaseUrl(
|
||||
config
|
||||
)}/html/administration/instances.html#health-check`}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
{t`documentation`}
|
||||
</a>{' '}
|
||||
{t`for more info.`}
|
||||
</>
|
||||
}
|
||||
value={formatHealthCheckTimeStamp(instance.last_health_check)}
|
||||
/>
|
||||
<Detail label={t`Node Type`} value={instance.node_type} />
|
||||
<Detail
|
||||
@@ -237,7 +261,7 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
step={0.1}
|
||||
value={instance.capacity_adjustment}
|
||||
onChange={handleChangeValue}
|
||||
isDisabled={!me?.is_superuser || !instance.enabled}
|
||||
isDisabled={!config?.me?.is_superuser || !instance.enabled}
|
||||
data-cy="slider"
|
||||
/>
|
||||
</SliderForks>
|
||||
@@ -274,19 +298,25 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
)}
|
||||
</DetailList>
|
||||
<CardActionsRow>
|
||||
<Tooltip content={t`Run a health check on the instance`}>
|
||||
<Button
|
||||
isDisabled={!me.is_superuser || isRunningHealthCheck}
|
||||
variant="primary"
|
||||
ouiaId="health-check-button"
|
||||
onClick={fetchHealthCheck}
|
||||
isLoading={isRunningHealthCheck}
|
||||
spinnerAriaLabel={t`Running health check`}
|
||||
>
|
||||
{t`Run health check`}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
{me.is_superuser && instance.node_type !== 'control' && (
|
||||
{isExecutionNode && (
|
||||
<Tooltip content={t`Run a health check on the instance`}>
|
||||
<Button
|
||||
isDisabled={
|
||||
!config?.me?.is_superuser || instance.health_check_pending
|
||||
}
|
||||
variant="primary"
|
||||
ouiaId="health-check-button"
|
||||
onClick={fetchHealthCheck}
|
||||
isLoading={instance.health_check_pending}
|
||||
spinnerAriaLabel={t`Running health check`}
|
||||
>
|
||||
{instance.health_check_pending
|
||||
? t`Running health check`
|
||||
: t`Run health check`}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
)}
|
||||
{config?.me?.is_superuser && instance.node_type !== 'control' && (
|
||||
<DisassociateButton
|
||||
verifyCannotDisassociate={instanceGroup.name === 'controlplane'}
|
||||
key="disassociate"
|
||||
|
||||
@@ -87,8 +87,9 @@ describe('<InstanceDetails/>', () => {
|
||||
mem_capacity: 38,
|
||||
enabled: true,
|
||||
managed_by_policy: true,
|
||||
node_type: 'hybrid',
|
||||
node_type: 'execution',
|
||||
node_state: 'ready',
|
||||
health_check_pending: false,
|
||||
},
|
||||
});
|
||||
InstancesAPI.readHealthCheckDetail.mockResolvedValue({
|
||||
@@ -347,6 +348,67 @@ describe('<InstanceDetails/>', () => {
|
||||
expect(wrapper.find('ErrorDetail')).toHaveLength(1);
|
||||
});
|
||||
|
||||
test.each([
|
||||
[1, 'hybrid', 0],
|
||||
[2, 'hop', 0],
|
||||
[3, 'control', 0],
|
||||
])(
|
||||
'hide health check button for non-execution type nodes',
|
||||
async (a, b, expected) => {
|
||||
InstancesAPI.readDetail.mockResolvedValue({
|
||||
data: {
|
||||
id: a,
|
||||
type: 'instance',
|
||||
url: '/api/v2/instances/1/',
|
||||
related: {
|
||||
named_url: '/api/v2/instances/awx_1/',
|
||||
jobs: '/api/v2/instances/1/jobs/',
|
||||
instance_groups: '/api/v2/instances/1/instance_groups/',
|
||||
health_check: '/api/v2/instances/1/health_check/',
|
||||
},
|
||||
uuid: '00000000-0000-0000-0000-000000000000',
|
||||
hostname: 'awx_1',
|
||||
created: '2021-09-08T17:10:34.484569Z',
|
||||
modified: '2021-09-09T13:55:44.219900Z',
|
||||
last_seen: '2021-09-09T20:20:31.623148Z',
|
||||
last_health_check: '2021-09-09T20:20:31.623148Z',
|
||||
errors: '',
|
||||
capacity_adjustment: '1.00',
|
||||
version: '19.1.0',
|
||||
capacity: 38,
|
||||
consumed_capacity: 0,
|
||||
percent_capacity_remaining: 100.0,
|
||||
jobs_running: 0,
|
||||
jobs_total: 0,
|
||||
cpu: 8,
|
||||
memory: 6232231936,
|
||||
cpu_capacity: 32,
|
||||
mem_capacity: 38,
|
||||
enabled: true,
|
||||
managed_by_policy: true,
|
||||
node_type: b,
|
||||
node_state: 'ready',
|
||||
health_check_pending: false,
|
||||
},
|
||||
});
|
||||
jest.spyOn(ConfigContext, 'useConfig').mockImplementation(() => ({
|
||||
me: { is_superuser: true },
|
||||
}));
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<InstanceDetails
|
||||
instanceGroup={instanceGroup}
|
||||
setBreadcrumb={() => {}}
|
||||
/>
|
||||
);
|
||||
});
|
||||
await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0);
|
||||
expect(wrapper.find("Button[ouiaId='health-check-button']")).toHaveLength(
|
||||
expected
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
test('Should call disassociate', async () => {
|
||||
InstanceGroupsAPI.readInstances.mockResolvedValue({
|
||||
data: {
|
||||
|
||||
@@ -35,6 +35,8 @@ const QS_CONFIG = getQSConfig('instance', {
|
||||
function InstanceList({ instanceGroup }) {
|
||||
const [isModalOpen, setIsModalOpen] = useState(false);
|
||||
const [showHealthCheckAlert, setShowHealthCheckAlert] = useState(false);
|
||||
const [pendingHealthCheck, setPendingHealthCheck] = useState(false);
|
||||
const [canRunHealthCheck, setCanRunHealthCheck] = useState(true);
|
||||
const location = useLocation();
|
||||
const { id: instanceGroupId } = useParams();
|
||||
|
||||
@@ -56,6 +58,10 @@ function InstanceList({ instanceGroup }) {
|
||||
InstanceGroupsAPI.readInstances(instanceGroupId, params),
|
||||
InstanceGroupsAPI.readInstanceOptions(instanceGroupId),
|
||||
]);
|
||||
const isPending = response.data.results.some(
|
||||
(i) => i.health_check_pending === true
|
||||
);
|
||||
setPendingHealthCheck(isPending);
|
||||
return {
|
||||
instances: response.data.results,
|
||||
count: response.data.count,
|
||||
@@ -90,7 +96,7 @@ function InstanceList({ instanceGroup }) {
|
||||
useCallback(async () => {
|
||||
const [...response] = await Promise.all(
|
||||
selected
|
||||
.filter(({ node_type }) => node_type !== 'hop')
|
||||
.filter(({ node_type }) => node_type === 'execution')
|
||||
.map(({ id }) => InstancesAPI.healthCheck(id))
|
||||
);
|
||||
if (response) {
|
||||
@@ -99,6 +105,18 @@ function InstanceList({ instanceGroup }) {
|
||||
}, [selected])
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (selected) {
|
||||
selected.forEach((i) => {
|
||||
if (i.node_type === 'execution') {
|
||||
setCanRunHealthCheck(true);
|
||||
} else {
|
||||
setCanRunHealthCheck(false);
|
||||
}
|
||||
});
|
||||
}
|
||||
}, [selected]);
|
||||
|
||||
const handleHealthCheck = async () => {
|
||||
await fetchHealthCheck();
|
||||
clearSelected();
|
||||
@@ -246,9 +264,10 @@ function InstanceList({ instanceGroup }) {
|
||||
isProtectedInstanceGroup={instanceGroup.name === 'controlplane'}
|
||||
/>,
|
||||
<HealthCheckButton
|
||||
isDisabled={!canAdd}
|
||||
isDisabled={!canAdd || !canRunHealthCheck}
|
||||
onClick={handleHealthCheck}
|
||||
selectedItems={selected}
|
||||
healthCheckPending={pendingHealthCheck}
|
||||
/>,
|
||||
]}
|
||||
emptyStateControls={
|
||||
@@ -263,7 +282,10 @@ function InstanceList({ instanceGroup }) {
|
||||
)}
|
||||
headerRow={
|
||||
<HeaderRow qsConfig={QS_CONFIG} isExpandable>
|
||||
<HeaderCell sortKey="hostname">{t`Name`}</HeaderCell>
|
||||
<HeaderCell
|
||||
tooltip={t`Health checks can only be run on execution nodes.`}
|
||||
sortKey="hostname"
|
||||
>{t`Name`}</HeaderCell>
|
||||
<HeaderCell sortKey="errors">{t`Status`}</HeaderCell>
|
||||
<HeaderCell sortKey="node_type">{t`Node Type`}</HeaderCell>
|
||||
<HeaderCell>{t`Capacity Adjustment`}</HeaderCell>
|
||||
|
||||
@@ -172,7 +172,7 @@ describe('<InstanceList/>', () => {
|
||||
await act(async () =>
|
||||
wrapper.find('Button[ouiaId="health-check"]').prop('onClick')()
|
||||
);
|
||||
expect(InstancesAPI.healthCheck).toBeCalledTimes(3);
|
||||
expect(InstancesAPI.healthCheck).toBeCalledTimes(1);
|
||||
});
|
||||
test('should render health check error', async () => {
|
||||
InstancesAPI.healthCheck.mockRejectedValue(
|
||||
|
||||
@@ -11,7 +11,9 @@ import {
|
||||
Slider,
|
||||
Tooltip,
|
||||
} from '@patternfly/react-core';
|
||||
import { OutlinedClockIcon } from '@patternfly/react-icons';
|
||||
import { Tr, Td, ExpandableRowContent } from '@patternfly/react-table';
|
||||
import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
import { formatDateString } from 'util/dates';
|
||||
import { ActionsTd, ActionItem } from 'components/PaginatedTable';
|
||||
import InstanceToggle from 'components/InstanceToggle';
|
||||
@@ -52,7 +54,7 @@ function InstanceListItem({
|
||||
fetchInstances,
|
||||
rowIndex,
|
||||
}) {
|
||||
const { me = {} } = useConfig();
|
||||
const config = useConfig();
|
||||
const { id } = useParams();
|
||||
const [forks, setForks] = useState(
|
||||
computeForks(
|
||||
@@ -100,6 +102,18 @@ function InstanceListItem({
|
||||
debounceUpdateInstance({ capacity_adjustment: roundedValue });
|
||||
};
|
||||
|
||||
const formatHealthCheckTimeStamp = (last) => (
|
||||
<>
|
||||
{formatDateString(last)}
|
||||
{instance.health_check_pending ? (
|
||||
<>
|
||||
{' '}
|
||||
<OutlinedClockIcon />
|
||||
</>
|
||||
) : null}
|
||||
</>
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Tr
|
||||
@@ -154,7 +168,7 @@ function InstanceListItem({
|
||||
step={0.1}
|
||||
value={instance.capacity_adjustment}
|
||||
onChange={handleChangeValue}
|
||||
isDisabled={!me?.is_superuser || !instance.enabled}
|
||||
isDisabled={!config?.me?.is_superuser || !instance.enabled}
|
||||
data-cy="slider"
|
||||
/>
|
||||
</SliderForks>
|
||||
@@ -206,7 +220,22 @@ function InstanceListItem({
|
||||
<Detail
|
||||
data-cy="last-health-check"
|
||||
label={t`Last Health Check`}
|
||||
value={formatDateString(instance.last_health_check)}
|
||||
helpText={
|
||||
<>
|
||||
{t`Health checks are asynchronous tasks. See the`}{' '}
|
||||
<a
|
||||
href={`${getDocsBaseUrl(
|
||||
config
|
||||
)}/html/administration/instances.html#health-check`}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
{t`documentation`}
|
||||
</a>{' '}
|
||||
{t`for more info.`}
|
||||
</>
|
||||
}
|
||||
value={formatHealthCheckTimeStamp(instance.last_health_check)}
|
||||
/>
|
||||
</DetailList>
|
||||
</ExpandableRowContent>
|
||||
|
||||
@@ -281,8 +281,8 @@ describe('<InstanceListItem/>', () => {
|
||||
expect(wrapper.find('Detail[label="Policy Type"]').prop('value')).toBe(
|
||||
'Auto'
|
||||
);
|
||||
expect(
|
||||
wrapper.find('Detail[label="Last Health Check"]').prop('value')
|
||||
).toBe('9/15/2021, 6:02:07 PM');
|
||||
expect(wrapper.find('Detail[label="Last Health Check"]').text()).toBe(
|
||||
'Last Health Check9/15/2021, 6:02:07 PM'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
Slider,
|
||||
Label,
|
||||
} from '@patternfly/react-core';
|
||||
import { DownloadIcon } from '@patternfly/react-icons';
|
||||
import { DownloadIcon, OutlinedClockIcon } from '@patternfly/react-icons';
|
||||
import styled from 'styled-components';
|
||||
|
||||
import { useConfig } from 'contexts/Config';
|
||||
@@ -23,6 +23,7 @@ import AlertModal from 'components/AlertModal';
|
||||
import ErrorDetail from 'components/ErrorDetail';
|
||||
import InstanceToggle from 'components/InstanceToggle';
|
||||
import { CardBody, CardActionsRow } from 'components/Card';
|
||||
import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
import { formatDateString } from 'util/dates';
|
||||
import ContentError from 'components/ContentError';
|
||||
import ContentLoading from 'components/ContentLoading';
|
||||
@@ -62,7 +63,8 @@ function computeForks(memCapacity, cpuCapacity, selectedCapacityAdjustment) {
|
||||
}
|
||||
|
||||
function InstanceDetail({ setBreadcrumb, isK8s }) {
|
||||
const { me = {} } = useConfig();
|
||||
const config = useConfig();
|
||||
|
||||
const { id } = useParams();
|
||||
const [forks, setForks] = useState();
|
||||
const history = useHistory();
|
||||
@@ -85,8 +87,7 @@ function InstanceDetail({ setBreadcrumb, isK8s }) {
|
||||
InstancesAPI.readDetail(id),
|
||||
InstancesAPI.readInstanceGroup(id),
|
||||
]);
|
||||
|
||||
if (details.node_type !== 'hop') {
|
||||
if (details.node_type === 'execution') {
|
||||
const { data: healthCheckData } =
|
||||
await InstancesAPI.readHealthCheckDetail(id);
|
||||
setHealthCheck(healthCheckData);
|
||||
@@ -115,15 +116,9 @@ function InstanceDetail({ setBreadcrumb, isK8s }) {
|
||||
setBreadcrumb(instance);
|
||||
}
|
||||
}, [instance, setBreadcrumb]);
|
||||
const {
|
||||
error: healthCheckError,
|
||||
isLoading: isRunningHealthCheck,
|
||||
request: fetchHealthCheck,
|
||||
} = useRequest(
|
||||
const { error: healthCheckError, request: fetchHealthCheck } = useRequest(
|
||||
useCallback(async () => {
|
||||
const { status } = await InstancesAPI.healthCheck(id);
|
||||
const { data } = await InstancesAPI.readHealthCheckDetail(id);
|
||||
setHealthCheck(data);
|
||||
if (status === 200) {
|
||||
setShowHealthCheckAlert(true);
|
||||
}
|
||||
@@ -149,6 +144,18 @@ function InstanceDetail({ setBreadcrumb, isK8s }) {
|
||||
debounceUpdateInstance({ capacity_adjustment: roundedValue });
|
||||
};
|
||||
|
||||
const formatHealthCheckTimeStamp = (last) => (
|
||||
<>
|
||||
{formatDateString(last)}
|
||||
{instance.health_check_pending ? (
|
||||
<>
|
||||
{' '}
|
||||
<OutlinedClockIcon />
|
||||
</>
|
||||
) : null}
|
||||
</>
|
||||
);
|
||||
|
||||
const buildLinkURL = (inst) =>
|
||||
inst.is_container_group
|
||||
? '/instance_groups/container_group/'
|
||||
@@ -179,6 +186,7 @@ function InstanceDetail({ setBreadcrumb, isK8s }) {
|
||||
return <ContentLoading />;
|
||||
}
|
||||
const isHopNode = instance.node_type === 'hop';
|
||||
const isExecutionNode = instance.node_type === 'execution';
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -242,7 +250,22 @@ function InstanceDetail({ setBreadcrumb, isK8s }) {
|
||||
<Detail
|
||||
label={t`Last Health Check`}
|
||||
dataCy="last-health-check"
|
||||
value={formatDateString(healthCheck?.last_health_check)}
|
||||
helpText={
|
||||
<>
|
||||
{t`Health checks are asynchronous tasks. See the`}{' '}
|
||||
<a
|
||||
href={`${getDocsBaseUrl(
|
||||
config
|
||||
)}/html/administration/instances.html#health-check`}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
{t`documentation`}
|
||||
</a>{' '}
|
||||
{t`for more info.`}
|
||||
</>
|
||||
}
|
||||
value={formatHealthCheckTimeStamp(instance.last_health_check)}
|
||||
/>
|
||||
{instance.related?.install_bundle && (
|
||||
<Detail
|
||||
@@ -280,7 +303,9 @@ function InstanceDetail({ setBreadcrumb, isK8s }) {
|
||||
step={0.1}
|
||||
value={instance.capacity_adjustment}
|
||||
onChange={handleChangeValue}
|
||||
isDisabled={!me?.is_superuser || !instance.enabled}
|
||||
isDisabled={
|
||||
!config?.me?.is_superuser || !instance.enabled
|
||||
}
|
||||
data-cy="slider"
|
||||
/>
|
||||
</SliderForks>
|
||||
@@ -324,7 +349,7 @@ function InstanceDetail({ setBreadcrumb, isK8s }) {
|
||||
</DetailList>
|
||||
{!isHopNode && (
|
||||
<CardActionsRow>
|
||||
{me.is_superuser && isK8s && instance.node_type === 'execution' && (
|
||||
{config?.me?.is_superuser && isK8s && isExecutionNode && (
|
||||
<RemoveInstanceButton
|
||||
dataCy="remove-instance-button"
|
||||
itemsToRemove={[instance]}
|
||||
@@ -332,18 +357,24 @@ function InstanceDetail({ setBreadcrumb, isK8s }) {
|
||||
onRemove={removeInstances}
|
||||
/>
|
||||
)}
|
||||
<Tooltip content={t`Run a health check on the instance`}>
|
||||
<Button
|
||||
isDisabled={!me.is_superuser || isRunningHealthCheck}
|
||||
variant="primary"
|
||||
ouiaId="health-check-button"
|
||||
onClick={fetchHealthCheck}
|
||||
isLoading={isRunningHealthCheck}
|
||||
spinnerAriaLabel={t`Running health check`}
|
||||
>
|
||||
{t`Run health check`}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
{isExecutionNode && (
|
||||
<Tooltip content={t`Run a health check on the instance`}>
|
||||
<Button
|
||||
isDisabled={
|
||||
!config?.me?.is_superuser || instance.health_check_pending
|
||||
}
|
||||
variant="primary"
|
||||
ouiaId="health-check-button"
|
||||
onClick={fetchHealthCheck}
|
||||
isLoading={instance.health_check_pending}
|
||||
spinnerAriaLabel={t`Running health check`}
|
||||
>
|
||||
{instance.health_check_pending
|
||||
? t`Running health check`
|
||||
: t`Run health check`}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
)}
|
||||
<InstanceToggle
|
||||
css="display: inline-flex;"
|
||||
fetchInstances={fetchDetails}
|
||||
|
||||
@@ -49,8 +49,9 @@ describe('<InstanceDetail/>', () => {
|
||||
mem_capacity: 38,
|
||||
enabled: true,
|
||||
managed_by_policy: true,
|
||||
node_type: 'hybrid',
|
||||
node_type: 'execution',
|
||||
node_state: 'ready',
|
||||
health_check_pending: false,
|
||||
},
|
||||
});
|
||||
InstancesAPI.readInstanceGroup.mockResolvedValue({
|
||||
|
||||
@@ -37,6 +37,8 @@ function InstanceList() {
|
||||
const location = useLocation();
|
||||
const { me } = useConfig();
|
||||
const [showHealthCheckAlert, setShowHealthCheckAlert] = useState(false);
|
||||
const [pendingHealthCheck, setPendingHealthCheck] = useState(false);
|
||||
const [canRunHealthCheck, setCanRunHealthCheck] = useState(true);
|
||||
|
||||
const {
|
||||
result: { instances, count, relatedSearchableKeys, searchableKeys, isK8s },
|
||||
@@ -51,6 +53,10 @@ function InstanceList() {
|
||||
InstancesAPI.readOptions(),
|
||||
SettingsAPI.readCategory('system'),
|
||||
]);
|
||||
const isPending = response.data.results.some(
|
||||
(i) => i.health_check_pending === true
|
||||
);
|
||||
setPendingHealthCheck(isPending);
|
||||
return {
|
||||
instances: response.data.results,
|
||||
isK8s: sysSettings.data.IS_K8S,
|
||||
@@ -87,7 +93,7 @@ function InstanceList() {
|
||||
useCallback(async () => {
|
||||
const [...response] = await Promise.all(
|
||||
selected
|
||||
.filter(({ node_type }) => node_type !== 'hop')
|
||||
.filter(({ node_type }) => node_type === 'execution')
|
||||
.map(({ id }) => InstancesAPI.healthCheck(id))
|
||||
);
|
||||
if (response) {
|
||||
@@ -96,6 +102,18 @@ function InstanceList() {
|
||||
}, [selected])
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (selected) {
|
||||
selected.forEach((i) => {
|
||||
if (i.node_type === 'execution') {
|
||||
setCanRunHealthCheck(true);
|
||||
} else {
|
||||
setCanRunHealthCheck(false);
|
||||
}
|
||||
});
|
||||
}
|
||||
}, [selected]);
|
||||
|
||||
const handleHealthCheck = async () => {
|
||||
await fetchHealthCheck();
|
||||
clearSelected();
|
||||
@@ -189,6 +207,8 @@ function InstanceList() {
|
||||
onClick={handleHealthCheck}
|
||||
key="healthCheck"
|
||||
selectedItems={selected}
|
||||
healthCheckPending={pendingHealthCheck}
|
||||
isDisabled={!canRunHealthCheck}
|
||||
/>,
|
||||
]}
|
||||
/>
|
||||
@@ -196,7 +216,7 @@ function InstanceList() {
|
||||
headerRow={
|
||||
<HeaderRow qsConfig={QS_CONFIG} isExpandable>
|
||||
<HeaderCell
|
||||
tooltip={t`Cannot run health check on hop nodes.`}
|
||||
tooltip={t`Health checks can only be run on execution nodes.`}
|
||||
sortKey="hostname"
|
||||
>{t`Name`}</HeaderCell>
|
||||
<HeaderCell sortKey="errors">{t`Status`}</HeaderCell>
|
||||
|
||||
@@ -32,7 +32,7 @@ const instances = [
|
||||
jobs_running: 0,
|
||||
jobs_total: 68,
|
||||
cpu: 6,
|
||||
node_type: 'control',
|
||||
node_type: 'execution',
|
||||
node_state: 'ready',
|
||||
memory: 2087469056,
|
||||
cpu_capacity: 24,
|
||||
@@ -52,7 +52,7 @@ const instances = [
|
||||
jobs_running: 0,
|
||||
jobs_total: 68,
|
||||
cpu: 6,
|
||||
node_type: 'hybrid',
|
||||
node_type: 'execution',
|
||||
node_state: 'ready',
|
||||
memory: 2087469056,
|
||||
cpu_capacity: 24,
|
||||
|
||||
@@ -11,7 +11,9 @@ import {
|
||||
Slider,
|
||||
Tooltip,
|
||||
} from '@patternfly/react-core';
|
||||
import { OutlinedClockIcon } from '@patternfly/react-icons';
|
||||
import { Tr, Td, ExpandableRowContent } from '@patternfly/react-table';
|
||||
import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
import { formatDateString } from 'util/dates';
|
||||
import computeForks from 'util/computeForks';
|
||||
import { ActionsTd, ActionItem } from 'components/PaginatedTable';
|
||||
@@ -52,7 +54,7 @@ function InstanceListItem({
|
||||
fetchInstances,
|
||||
rowIndex,
|
||||
}) {
|
||||
const { me = {} } = useConfig();
|
||||
const config = useConfig();
|
||||
const [forks, setForks] = useState(
|
||||
computeForks(
|
||||
instance.mem_capacity,
|
||||
@@ -98,7 +100,21 @@ function InstanceListItem({
|
||||
);
|
||||
debounceUpdateInstance({ capacity_adjustment: roundedValue });
|
||||
};
|
||||
|
||||
const formatHealthCheckTimeStamp = (last) => (
|
||||
<>
|
||||
{formatDateString(last)}
|
||||
{instance.health_check_pending ? (
|
||||
<>
|
||||
{' '}
|
||||
<OutlinedClockIcon />
|
||||
</>
|
||||
) : null}
|
||||
</>
|
||||
);
|
||||
|
||||
const isHopNode = instance.node_type === 'hop';
|
||||
const isExecutionNode = instance.node_type === 'execution';
|
||||
return (
|
||||
<>
|
||||
<Tr
|
||||
@@ -121,7 +137,7 @@ function InstanceListItem({
|
||||
rowIndex,
|
||||
isSelected,
|
||||
onSelect,
|
||||
disable: isHopNode,
|
||||
disable: !isExecutionNode,
|
||||
}}
|
||||
dataLabel={t`Selected`}
|
||||
/>
|
||||
@@ -164,7 +180,7 @@ function InstanceListItem({
|
||||
step={0.1}
|
||||
value={instance.capacity_adjustment}
|
||||
onChange={handleChangeValue}
|
||||
isDisabled={!me?.is_superuser || !instance.enabled}
|
||||
isDisabled={!config?.me?.is_superuser || !instance.enabled}
|
||||
data-cy="slider"
|
||||
/>
|
||||
</SliderForks>
|
||||
@@ -221,7 +237,22 @@ function InstanceListItem({
|
||||
<Detail
|
||||
data-cy="last-health-check"
|
||||
label={t`Last Health Check`}
|
||||
value={formatDateString(instance.last_health_check)}
|
||||
helpText={
|
||||
<>
|
||||
{t`Health checks are asynchronous tasks. See the`}{' '}
|
||||
<a
|
||||
href={`${getDocsBaseUrl(
|
||||
config
|
||||
)}/html/administration/instances.html#health-check`}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
{t`documentation`}
|
||||
</a>{' '}
|
||||
{t`for more info.`}
|
||||
</>
|
||||
}
|
||||
value={formatHealthCheckTimeStamp(instance.last_health_check)}
|
||||
/>
|
||||
</DetailList>
|
||||
</ExpandableRowContent>
|
||||
|
||||
@@ -272,9 +272,9 @@ describe('<InstanceListItem/>', () => {
|
||||
expect(wrapper.find('Detail[label="Policy Type"]').prop('value')).toBe(
|
||||
'Auto'
|
||||
);
|
||||
expect(
|
||||
wrapper.find('Detail[label="Last Health Check"]').prop('value')
|
||||
).toBe('9/15/2021, 6:02:07 PM');
|
||||
expect(wrapper.find('Detail[label="Last Health Check"]').text()).toBe(
|
||||
'Last Health Check9/15/2021, 6:02:07 PM'
|
||||
);
|
||||
});
|
||||
test('Hop should not render some things', async () => {
|
||||
const onSelect = jest.fn();
|
||||
|
||||
@@ -91,9 +91,7 @@ const SmartInventoryFormFields = ({ inventory }) => {
|
||||
id="variables"
|
||||
name="variables"
|
||||
label={t`Variables`}
|
||||
tooltip={t`Enter inventory variables using either JSON or YAML syntax.
|
||||
Use the radio button to toggle between the two. Refer to the
|
||||
Ansible Controller documentation for example syntax.`}
|
||||
tooltip={t`Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Controller documentation for example syntax.`}
|
||||
/>
|
||||
</FormFullWidthLayout>
|
||||
</>
|
||||
|
||||
@@ -40,6 +40,8 @@ const processCodeEditorValue = (value) => {
|
||||
codeEditorValue = '';
|
||||
} else if (typeof value === 'string') {
|
||||
codeEditorValue = encode(value);
|
||||
} else if (Array.isArray(value)) {
|
||||
codeEditorValue = encode(value.join(' '));
|
||||
} else {
|
||||
codeEditorValue = value;
|
||||
}
|
||||
@@ -60,7 +62,7 @@ const getStdOutValue = (hostEvent) => {
|
||||
) {
|
||||
stdOut = res.results.join('\n');
|
||||
} else if (res?.stdout) {
|
||||
stdOut = res.stdout;
|
||||
stdOut = Array.isArray(res.stdout) ? res.stdout.join(' ') : res.stdout;
|
||||
}
|
||||
return stdOut;
|
||||
};
|
||||
|
||||
@@ -52,6 +52,60 @@ const hostEvent = {
|
||||
},
|
||||
};
|
||||
|
||||
/*
|
||||
Some libraries return a list of string in stdout
|
||||
Example: https://github.com/ansible-collections/cisco.ios/blob/main/plugins/modules/ios_command.py#L124-L128
|
||||
*/
|
||||
const hostEventWithArray = {
|
||||
changed: true,
|
||||
event: 'runner_on_ok',
|
||||
event_data: {
|
||||
host: 'foo',
|
||||
play: 'all',
|
||||
playbook: 'run_command.yml',
|
||||
res: {
|
||||
ansible_loop_var: 'item',
|
||||
changed: true,
|
||||
item: '1',
|
||||
msg: 'This is a debug message: 1',
|
||||
stdout: [
|
||||
' total used free shared buff/cache available\nMem: 7973 3005 960 30 4007 4582\nSwap: 1023 0 1023',
|
||||
],
|
||||
stderr: 'problems',
|
||||
cmd: ['free', '-m'],
|
||||
stderr_lines: [],
|
||||
stdout_lines: [
|
||||
' total used free shared buff/cache available',
|
||||
'Mem: 7973 3005 960 30 4007 4582',
|
||||
'Swap: 1023 0 1023',
|
||||
],
|
||||
},
|
||||
task: 'command',
|
||||
task_action: 'command',
|
||||
},
|
||||
event_display: 'Host OK',
|
||||
event_level: 3,
|
||||
failed: false,
|
||||
host: 1,
|
||||
host_name: 'foo',
|
||||
id: 123,
|
||||
job: 4,
|
||||
play: 'all',
|
||||
playbook: 'run_command.yml',
|
||||
stdout: `stdout: "[0;33mchanged: [localhost] => {"changed": true, "cmd": ["free", "-m"], "delta": "0:00:01.479609", "end": "2019-09-10 14:21:45.469533", "rc": 0, "start": "2019-09-10 14:21:43.989924", "stderr": "", "stderr_lines": [], "stdout": " total used free shared buff/cache available\nMem: 7973 3005 960 30 4007 4582\nSwap: 1023 0 1023", "stdout_lines": [" total used free shared buff/cache available", "Mem: 7973 3005 960 30 4007 4582", "Swap: 1023 0 1023"]}[0m"
|
||||
`,
|
||||
task: 'command',
|
||||
type: 'job_event',
|
||||
url: '/api/v2/job_events/123/',
|
||||
summary_fields: {
|
||||
host: {
|
||||
id: 1,
|
||||
name: 'foo',
|
||||
description: 'Bar',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
/* eslint-disable no-useless-escape */
|
||||
const jsonValue = `{
|
||||
\"ansible_loop_var\": \"item\",
|
||||
@@ -281,4 +335,25 @@ describe('HostEventModal', () => {
|
||||
expect(codeEditor.prop('readOnly')).toBe(true);
|
||||
expect(codeEditor.prop('value')).toEqual('baz\nbar');
|
||||
});
|
||||
|
||||
test('should display Standard Out array stdout content', () => {
|
||||
const wrapper = shallow(
|
||||
<HostEventModal
|
||||
hostEvent={hostEventWithArray}
|
||||
onClose={() => {}}
|
||||
isOpen
|
||||
/>
|
||||
);
|
||||
|
||||
const handleTabClick = wrapper.find('Tabs').prop('onSelect');
|
||||
handleTabClick(null, 2);
|
||||
wrapper.update();
|
||||
|
||||
const codeEditor = wrapper.find('Tab[eventKey=2] CodeEditor');
|
||||
expect(codeEditor.prop('mode')).toBe('javascript');
|
||||
expect(codeEditor.prop('readOnly')).toBe(true);
|
||||
expect(codeEditor.prop('value')).toEqual(
|
||||
hostEventWithArray.event_data.res.stdout.join(' ')
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React from 'react';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import styled from 'styled-components';
|
||||
|
||||
import { DateTime, Duration } from 'luxon';
|
||||
import { t } from '@lingui/macro';
|
||||
import { bool, shape, func } from 'prop-types';
|
||||
import {
|
||||
@@ -41,18 +41,18 @@ const Wrapper = styled.div`
|
||||
flex-flow: row wrap;
|
||||
font-size: 14px;
|
||||
`;
|
||||
const calculateElapsed = (started) => {
|
||||
const now = DateTime.now();
|
||||
const duration = now
|
||||
.diff(DateTime.fromISO(`${started}`), [
|
||||
'milliseconds',
|
||||
'seconds',
|
||||
'minutes',
|
||||
'hours',
|
||||
])
|
||||
.toObject();
|
||||
|
||||
const toHHMMSS = (elapsed) => {
|
||||
const sec_num = parseInt(elapsed, 10);
|
||||
const hours = Math.floor(sec_num / 3600);
|
||||
const minutes = Math.floor(sec_num / 60) % 60;
|
||||
const seconds = sec_num % 60;
|
||||
|
||||
const stampHours = hours < 10 ? `0${hours}` : hours;
|
||||
const stampMinutes = minutes < 10 ? `0${minutes}` : minutes;
|
||||
const stampSeconds = seconds < 10 ? `0${seconds}` : seconds;
|
||||
|
||||
return `${stampHours}:${stampMinutes}:${stampSeconds}`;
|
||||
return Duration.fromObject({ ...duration }).toFormat('hh:mm:ss');
|
||||
};
|
||||
|
||||
const OUTPUT_NO_COUNT_JOB_TYPES = [
|
||||
@@ -62,6 +62,7 @@ const OUTPUT_NO_COUNT_JOB_TYPES = [
|
||||
];
|
||||
|
||||
const OutputToolbar = ({ job, onDelete, isDeleteDisabled, jobStatus }) => {
|
||||
const [activeJobElapsedTime, setActiveJobElapsedTime] = useState('00:00:00');
|
||||
const hideCounts = OUTPUT_NO_COUNT_JOB_TYPES.includes(job.type);
|
||||
|
||||
const playCount = job?.playbook_counts?.play_count;
|
||||
@@ -76,6 +77,20 @@ const OutputToolbar = ({ job, onDelete, isDeleteDisabled, jobStatus }) => {
|
||||
: 0;
|
||||
const { me } = useConfig();
|
||||
|
||||
useEffect(() => {
|
||||
let secTimer;
|
||||
if (job.finished) {
|
||||
return () => clearInterval(secTimer);
|
||||
}
|
||||
|
||||
secTimer = setInterval(() => {
|
||||
const elapsedTime = calculateElapsed(job.started);
|
||||
setActiveJobElapsedTime(elapsedTime);
|
||||
}, 1000);
|
||||
|
||||
return () => clearInterval(secTimer);
|
||||
}, [job.started, job.finished]);
|
||||
|
||||
return (
|
||||
<Wrapper>
|
||||
{!hideCounts && (
|
||||
@@ -124,7 +139,13 @@ const OutputToolbar = ({ job, onDelete, isDeleteDisabled, jobStatus }) => {
|
||||
<BadgeGroup aria-label={t`Elapsed Time`}>
|
||||
<div>{t`Elapsed`}</div>
|
||||
<Tooltip content={t`Elapsed time that the job ran`}>
|
||||
<Badge isRead>{toHHMMSS(job.elapsed)}</Badge>
|
||||
<Badge isRead>
|
||||
{job.finished
|
||||
? Duration.fromObject({ seconds: job.elapsed }).toFormat(
|
||||
'hh:mm:ss'
|
||||
)
|
||||
: activeJobElapsedTime}
|
||||
</Badge>
|
||||
</Tooltip>
|
||||
</BadgeGroup>
|
||||
{['pending', 'waiting', 'running'].includes(jobStatus) &&
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user