mirror of
https://github.com/ansible/awx.git
synced 2026-02-06 12:04:44 -03:30
Compare commits
137 Commits
21.7.0
...
test_cyber
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c437a37be7 | ||
|
|
b59cee97d8 | ||
|
|
9ca554ce75 | ||
|
|
81e20c727d | ||
|
|
8e5af2b5f2 | ||
|
|
918db89dc8 | ||
|
|
6e25a552d3 | ||
|
|
83c48bb5fa | ||
|
|
1c65339a24 | ||
|
|
75e6366c5e | ||
|
|
af6fec5592 | ||
|
|
893dba7076 | ||
|
|
b28cc34ff3 | ||
|
|
776d39f057 | ||
|
|
61b242d194 | ||
|
|
22b81f5dd3 | ||
|
|
99e1920d42 | ||
|
|
2218fd5c25 | ||
|
|
bd7635e74e | ||
|
|
0faa999ceb | ||
|
|
1bedf32baf | ||
|
|
577f102e53 | ||
|
|
c5cf39abb7 | ||
|
|
6b315f39de | ||
|
|
529a936d0a | ||
|
|
6538d34b48 | ||
|
|
e40824bded | ||
|
|
ed318ea784 | ||
|
|
d2b69e05f6 | ||
|
|
b57ae592ed | ||
|
|
f3482f4038 | ||
|
|
e22f887765 | ||
|
|
fc838ba44b | ||
|
|
b19aa4a88d | ||
|
|
eba24db74c | ||
|
|
153a197fad | ||
|
|
8f4c329c2a | ||
|
|
368eb46f5b | ||
|
|
d6fea77082 | ||
|
|
878035c13b | ||
|
|
2cc971a43f | ||
|
|
9d77c54612 | ||
|
|
546fabbb97 | ||
|
|
ef651a3a21 | ||
|
|
aaf6f5f17e | ||
|
|
68862d5085 | ||
|
|
3303f7bfcf | ||
|
|
95dba81a9d | ||
|
|
66c7d5e9be | ||
|
|
4b308d313a | ||
|
|
d80db763bc | ||
|
|
41fd6ea37f | ||
|
|
4808a0053f | ||
|
|
de41601f27 | ||
|
|
ddd09461fb | ||
|
|
6d192927ae | ||
|
|
487efb77ce | ||
|
|
e655e1dbc2 | ||
|
|
e41f20320a | ||
|
|
192f45bbd0 | ||
|
|
e013d25e2d | ||
|
|
8a6ad47ca5 | ||
|
|
cba780a8f8 | ||
|
|
3fc67dc76c | ||
|
|
6f85aef5fe | ||
|
|
4d9b8400da | ||
|
|
4a7335676d | ||
|
|
eeb9d61488 | ||
|
|
234ce529fc | ||
|
|
4f36943b47 | ||
|
|
25737ba7c6 | ||
|
|
7127d18072 | ||
|
|
e5c834383c | ||
|
|
b9c9800210 | ||
|
|
c94dc08cf3 | ||
|
|
a0594c8948 | ||
|
|
ab5ea46006 | ||
|
|
6b471e468c | ||
|
|
50614b961e | ||
|
|
a2be320605 | ||
|
|
8a959e9586 | ||
|
|
1db189c7ee | ||
|
|
39c2fcd8c2 | ||
|
|
da857ea334 | ||
|
|
d50c97ae22 | ||
|
|
0f150aa3b3 | ||
|
|
cdb51a75b8 | ||
|
|
22b6ae6903 | ||
|
|
871175f97f | ||
|
|
e6497be200 | ||
|
|
3b9333be9f | ||
|
|
04b814cfd8 | ||
|
|
bb2e5cba0a | ||
|
|
42a4e9f10f | ||
|
|
882d2fdbe8 | ||
|
|
0d69d40859 | ||
|
|
2e38bbcbcd | ||
|
|
6f741b909a | ||
|
|
bbb00e0674 | ||
|
|
560b952dd6 | ||
|
|
62c773e912 | ||
|
|
fd38c926b2 | ||
|
|
7a8874b947 | ||
|
|
150c55c72a | ||
|
|
417ac3b88c | ||
|
|
9e0d1a678c | ||
|
|
1a766c09e7 | ||
|
|
7849c0fb1e | ||
|
|
35a7e43f22 | ||
|
|
47a6a73fc5 | ||
|
|
805091cfc1 | ||
|
|
8d05e339ae | ||
|
|
8472e3a26d | ||
|
|
174121cdbe | ||
|
|
385a2eabce | ||
|
|
a64467c5a6 | ||
|
|
58772d79c7 | ||
|
|
235ed2f0d0 | ||
|
|
03eaeac459 | ||
|
|
63fd18edcb | ||
|
|
208254ab81 | ||
|
|
aae57378f0 | ||
|
|
a4fba37222 | ||
|
|
3a09522d3e | ||
|
|
b5db710c8b | ||
|
|
b964905c80 | ||
|
|
37717ce3d5 | ||
|
|
e7c75f3510 | ||
|
|
8e83f9b134 | ||
|
|
d3eb2c1975 | ||
|
|
5551874352 | ||
|
|
80a0842df1 | ||
|
|
2dd2931ab2 | ||
|
|
e83a4d7234 | ||
|
|
8e2003a36b | ||
|
|
4f52343cd9 | ||
|
|
d0b95c063b |
24
.github/workflows/feature_branch_deletion.yml
vendored
Normal file
24
.github/workflows/feature_branch_deletion.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
---
|
||||
name: Feature branch deletion cleanup
|
||||
on:
|
||||
delete:
|
||||
branches:
|
||||
- feature_**
|
||||
jobs:
|
||||
push:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Delete API Schema
|
||||
env:
|
||||
AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
|
||||
AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
|
||||
AWS_REGION: 'us-east-1'
|
||||
run: |
|
||||
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||
ansible localhost -c local -m aws_s3 \
|
||||
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delete permission=public-read"
|
||||
|
||||
|
||||
18
.github/workflows/pr_body_check.yml
vendored
18
.github/workflows/pr_body_check.yml
vendored
@@ -13,21 +13,13 @@ jobs:
|
||||
packages: write
|
||||
contents: read
|
||||
steps:
|
||||
- name: Write PR body to a file
|
||||
run: |
|
||||
cat >> pr.body << __SOME_RANDOM_PR_EOF__
|
||||
${{ github.event.pull_request.body }}
|
||||
__SOME_RANDOM_PR_EOF__
|
||||
|
||||
- name: Display the received body for troubleshooting
|
||||
run: cat pr.body
|
||||
|
||||
# We want to write these out individually just incase the options were joined on a single line
|
||||
- name: Check for each of the lines
|
||||
env:
|
||||
PR_BODY: ${{ github.event.pull_request.body }}
|
||||
run: |
|
||||
grep "Bug, Docs Fix or other nominal change" pr.body > Z
|
||||
grep "New or Enhanced Feature" pr.body > Y
|
||||
grep "Breaking Change" pr.body > X
|
||||
echo $PR_BODY | grep "Bug, Docs Fix or other nominal change" > Z
|
||||
echo $PR_BODY | grep "New or Enhanced Feature" > Y
|
||||
echo $PR_BODY | grep "Breaking Change" > X
|
||||
exit 0
|
||||
# We exit 0 and set the shell to prevent the returns from the greps from failing this step
|
||||
# See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#exit-codes-and-error-action-preference
|
||||
|
||||
1
.github/workflows/upload_schema.yml
vendored
1
.github/workflows/upload_schema.yml
vendored
@@ -5,6 +5,7 @@ on:
|
||||
branches:
|
||||
- devel
|
||||
- release_**
|
||||
- feature_**
|
||||
jobs:
|
||||
push:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
24
Makefile
24
Makefile
@@ -85,6 +85,7 @@ clean: clean-ui clean-api clean-awxkit clean-dist
|
||||
|
||||
clean-api:
|
||||
rm -rf build $(NAME)-$(VERSION) *.egg-info
|
||||
rm -rf .tox
|
||||
find . -type f -regex ".*\.py[co]$$" -delete
|
||||
find . -type d -name "__pycache__" -delete
|
||||
rm -f awx/awx_test.sqlite3*
|
||||
@@ -181,7 +182,7 @@ collectstatic:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
mkdir -p awx/public/static && $(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||
|
||||
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
||||
|
||||
@@ -377,6 +378,8 @@ clean-ui:
|
||||
rm -rf awx/ui/build
|
||||
rm -rf awx/ui/src/locales/_build
|
||||
rm -rf $(UI_BUILD_FLAG_FILE)
|
||||
# the collectstatic command doesn't like it if this dir doesn't exist.
|
||||
mkdir -p awx/ui/build/static
|
||||
|
||||
awx/ui/node_modules:
|
||||
NODE_OPTIONS=--max-old-space-size=6144 $(NPM_BIN) --prefix awx/ui --loglevel warn --force ci
|
||||
@@ -386,16 +389,14 @@ $(UI_BUILD_FLAG_FILE):
|
||||
$(PYTHON) tools/scripts/compilemessages.py
|
||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
|
||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run build
|
||||
mkdir -p awx/public/static/css
|
||||
mkdir -p awx/public/static/js
|
||||
mkdir -p awx/public/static/media
|
||||
cp -r awx/ui/build/static/css/* awx/public/static/css
|
||||
cp -r awx/ui/build/static/js/* awx/public/static/js
|
||||
cp -r awx/ui/build/static/media/* awx/public/static/media
|
||||
mkdir -p /var/lib/awx/public/static/css
|
||||
mkdir -p /var/lib/awx/public/static/js
|
||||
mkdir -p /var/lib/awx/public/static/media
|
||||
cp -r awx/ui/build/static/css/* /var/lib/awx/public/static/css
|
||||
cp -r awx/ui/build/static/js/* /var/lib/awx/public/static/js
|
||||
cp -r awx/ui/build/static/media/* /var/lib/awx/public/static/media
|
||||
touch $@
|
||||
|
||||
|
||||
|
||||
ui-release: $(UI_BUILD_FLAG_FILE)
|
||||
|
||||
ui-devel: awx/ui/node_modules
|
||||
@@ -453,6 +454,7 @@ COMPOSE_OPTS ?=
|
||||
CONTROL_PLANE_NODE_COUNT ?= 1
|
||||
EXECUTION_NODE_COUNT ?= 2
|
||||
MINIKUBE_CONTAINER_GROUP ?= false
|
||||
MINIKUBE_SETUP ?= false # if false, run minikube separately
|
||||
EXTRA_SOURCES_ANSIBLE_OPTS ?=
|
||||
|
||||
ifneq ($(ADMIN_PASSWORD),)
|
||||
@@ -461,7 +463,7 @@ endif
|
||||
|
||||
docker-compose-sources: .git/hooks/pre-commit
|
||||
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose-minikube/deploy.yml; \
|
||||
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \
|
||||
fi;
|
||||
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
|
||||
@@ -635,4 +637,4 @@ help/generate:
|
||||
} \
|
||||
} \
|
||||
{ lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u
|
||||
@printf "\n"
|
||||
@printf "\n"
|
||||
|
||||
@@ -6,7 +6,6 @@ import inspect
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
import urllib.parse
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -14,7 +13,7 @@ from django.contrib.auth import views as auth_views
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import FieldDoesNotExist
|
||||
from django.db import connection
|
||||
from django.db import connection, transaction
|
||||
from django.db.models.fields.related import OneToOneRel
|
||||
from django.http import QueryDict
|
||||
from django.shortcuts import get_object_or_404
|
||||
@@ -30,7 +29,7 @@ from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework import views
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.renderers import StaticHTMLRenderer, JSONRenderer
|
||||
from rest_framework.renderers import StaticHTMLRenderer
|
||||
from rest_framework.negotiation import DefaultContentNegotiation
|
||||
|
||||
# AWX
|
||||
@@ -41,7 +40,7 @@ from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd,
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.utils.licensing import server_product_name
|
||||
from awx.main.views import ApiErrorView
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||
from awx.api.versioning import URLPathVersioning
|
||||
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
|
||||
from awx.conf import settings_registry
|
||||
@@ -65,6 +64,7 @@ __all__ = [
|
||||
'ParentMixin',
|
||||
'SubListAttachDetachAPIView',
|
||||
'CopyAPIView',
|
||||
'GenericCancelView',
|
||||
'BaseUsersList',
|
||||
]
|
||||
|
||||
@@ -90,13 +90,9 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||
current_user = getattr(request, 'user', None)
|
||||
if request.user.is_authenticated:
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
||||
ret.set_cookie('userLoggedIn', 'true')
|
||||
current_user = UserSerializer(self.request.user)
|
||||
current_user = smart_str(JSONRenderer().render(current_user.data))
|
||||
current_user = urllib.parse.quote('%s' % current_user, '')
|
||||
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||
|
||||
return ret
|
||||
@@ -253,7 +249,7 @@ class APIView(views.APIView):
|
||||
response['X-API-Query-Time'] = '%0.3fs' % sum(q_times)
|
||||
|
||||
if getattr(self, 'deprecated', False):
|
||||
response['Warning'] = '299 awx "This resource has been deprecated and will be removed in a future release."' # noqa
|
||||
response['Warning'] = '299 awx "This resource has been deprecated and will be removed in a future release."'
|
||||
|
||||
return response
|
||||
|
||||
@@ -990,6 +986,23 @@ class CopyAPIView(GenericAPIView):
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
|
||||
class GenericCancelView(RetrieveAPIView):
|
||||
# In subclass set model, serializer_class
|
||||
obj_permission_type = 'cancel'
|
||||
|
||||
@transaction.non_atomic_requests
|
||||
def dispatch(self, *args, **kwargs):
|
||||
return super(GenericCancelView, self).dispatch(*args, **kwargs)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
|
||||
|
||||
class BaseUsersList(SubListCreateAttachDetachAPIView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(BaseUsersList, self).post(request, *args, **kwargs)
|
||||
|
||||
@@ -24,7 +24,6 @@ __all__ = [
|
||||
'InventoryInventorySourcesUpdatePermission',
|
||||
'UserPermission',
|
||||
'IsSystemAdminOrAuditor',
|
||||
'InstanceGroupTowerPermission',
|
||||
'WorkflowApprovalPermission',
|
||||
]
|
||||
|
||||
|
||||
@@ -29,6 +29,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.encoding import force_str
|
||||
from django.utils.text import capfirst
|
||||
from django.utils.timezone import now
|
||||
from django.core.validators import RegexValidator, MaxLengthValidator
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import ValidationError, PermissionDenied
|
||||
@@ -120,6 +121,9 @@ from awx.main.validators import vars_validate_or_raise
|
||||
from awx.api.versioning import reverse
|
||||
from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, VerbatimField, DeprecatedCredentialField
|
||||
|
||||
# AWX Utils
|
||||
from awx.api.validators import HostnameRegexValidator
|
||||
|
||||
logger = logging.getLogger('awx.api.serializers')
|
||||
|
||||
# Fields that should be summarized regardless of object type.
|
||||
@@ -615,7 +619,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
||||
def validate(self, attrs):
|
||||
attrs = super(BaseSerializer, self).validate(attrs)
|
||||
try:
|
||||
# Create/update a model instance and run it's full_clean() method to
|
||||
# Create/update a model instance and run its full_clean() method to
|
||||
# do any validation implemented on the model class.
|
||||
exclusions = self.get_validation_exclusions(self.instance)
|
||||
obj = self.instance or self.Meta.model()
|
||||
@@ -2217,6 +2221,15 @@ class InventorySourceUpdateSerializer(InventorySourceSerializer):
|
||||
class Meta:
|
||||
fields = ('can_update',)
|
||||
|
||||
def validate(self, attrs):
|
||||
project = self.instance.source_project
|
||||
if project:
|
||||
failed_reason = project.get_reason_if_failed()
|
||||
if failed_reason:
|
||||
raise serializers.ValidationError(failed_reason)
|
||||
|
||||
return super(InventorySourceUpdateSerializer, self).validate(attrs)
|
||||
|
||||
|
||||
class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSerializer):
|
||||
|
||||
@@ -3229,7 +3242,7 @@ class JobCreateScheduleSerializer(LabelsListMixin, BaseSerializer):
|
||||
ret['labels'] = self._summary_field_labels(config)
|
||||
return ret
|
||||
except JobLaunchConfig.DoesNotExist:
|
||||
return {'all': _('Unknown, job may have been ran before launch configurations were saved.')}
|
||||
return {'all': _('Unknown, job may have been run before launch configurations were saved.')}
|
||||
|
||||
|
||||
class AdHocCommandSerializer(UnifiedJobSerializer):
|
||||
@@ -3746,7 +3759,11 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
|
||||
# Build unsaved version of this config, use it to detect prompts errors
|
||||
mock_obj = self._build_mock_obj(attrs)
|
||||
accepted, rejected, errors = ujt._accept_or_ignore_job_kwargs(_exclude_errors=self.exclude_errors, **mock_obj.prompts_dict())
|
||||
if set(list(ujt.get_ask_mapping().keys()) + ['extra_data']) & set(attrs.keys()):
|
||||
accepted, rejected, errors = ujt._accept_or_ignore_job_kwargs(_exclude_errors=self.exclude_errors, **mock_obj.prompts_dict())
|
||||
else:
|
||||
# Only perform validation of prompts if prompts fields are provided
|
||||
errors = {}
|
||||
|
||||
# Remove all unprocessed $encrypted$ strings, indicating default usage
|
||||
if 'extra_data' in attrs and password_dict:
|
||||
@@ -4264,17 +4281,10 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
# Basic validation - cannot run a playbook without a playbook
|
||||
if not template.project:
|
||||
errors['project'] = _("A project is required to run a job.")
|
||||
elif template.project.status in ('error', 'failed'):
|
||||
errors['playbook'] = _("Missing a revision to run due to failed project update.")
|
||||
|
||||
latest_update = template.project.project_updates.last()
|
||||
if latest_update is not None and latest_update.failed:
|
||||
failed_validation_tasks = latest_update.project_update_events.filter(
|
||||
event='runner_on_failed',
|
||||
play="Perform project signature/checksum verification",
|
||||
)
|
||||
if failed_validation_tasks:
|
||||
errors['playbook'] = _("Last project update failed due to signature validation failure.")
|
||||
else:
|
||||
failure_reason = template.project.get_reason_if_failed()
|
||||
if failure_reason:
|
||||
errors['playbook'] = failure_reason
|
||||
|
||||
# cannot run a playbook without an inventory
|
||||
if template.inventory and template.inventory.pending_deletion is True:
|
||||
@@ -4921,6 +4931,19 @@ class InstanceSerializer(BaseSerializer):
|
||||
extra_kwargs = {
|
||||
'node_type': {'initial': Instance.Types.EXECUTION, 'default': Instance.Types.EXECUTION},
|
||||
'node_state': {'initial': Instance.States.INSTALLED, 'default': Instance.States.INSTALLED},
|
||||
'hostname': {
|
||||
'validators': [
|
||||
MaxLengthValidator(limit_value=250),
|
||||
validators.UniqueValidator(queryset=Instance.objects.all()),
|
||||
RegexValidator(
|
||||
regex=r'^localhost$|^127(?:\.[0-9]+){0,2}\.[0-9]+$|^(?:0*\:)*?:?0*1$',
|
||||
flags=re.IGNORECASE,
|
||||
inverse_match=True,
|
||||
message="hostname cannot be localhost or 127.0.0.1",
|
||||
),
|
||||
HostnameRegexValidator(),
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
def get_related(self, obj):
|
||||
@@ -4931,7 +4954,7 @@ class InstanceSerializer(BaseSerializer):
|
||||
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
|
||||
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
|
||||
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
||||
if obj.node_type != 'hop':
|
||||
if obj.node_type == 'execution':
|
||||
res['health_check'] = self.reverse('api:instance_health_check', kwargs={'pk': obj.pk})
|
||||
return res
|
||||
|
||||
@@ -4991,6 +5014,10 @@ class InstanceSerializer(BaseSerializer):
|
||||
return value
|
||||
|
||||
def validate_hostname(self, value):
|
||||
"""
|
||||
- Hostname cannot be "localhost" - but can be something like localhost.domain
|
||||
- Cannot change the hostname of an-already instantiated & initialized Instance object
|
||||
"""
|
||||
if self.instance and self.instance.hostname != value:
|
||||
raise serializers.ValidationError("Cannot change hostname.")
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
Launch a Job Template:
|
||||
|
||||
{% ifmeth GET %}
|
||||
Make a GET request to this resource to determine if the job_template can be
|
||||
launched and whether any passwords are required to launch the job_template.
|
||||
The response will include the following fields:
|
||||
@@ -29,8 +29,8 @@ The response will include the following fields:
|
||||
* `inventory_needed_to_start`: Flag indicating the presence of an inventory
|
||||
associated with the job template. If not then one should be supplied when
|
||||
launching the job (boolean, read-only)
|
||||
|
||||
Make a POST request to this resource to launch the job_template. If any
|
||||
{% endifmeth %}
|
||||
{% ifmeth POST %}Make a POST request to this resource to launch the job_template. If any
|
||||
passwords, inventory, or extra variables (extra_vars) are required, they must
|
||||
be passed via POST data, with extra_vars given as a YAML or JSON string and
|
||||
escaped parentheses. If the `inventory_needed_to_start` is `True` then the
|
||||
@@ -41,3 +41,4 @@ are not provided, a 400 status code will be returned. If the job cannot be
|
||||
launched, a 405 status code will be returned. If the provided credential or
|
||||
inventory are not allowed to be used by the user, then a 403 status code will
|
||||
be returned.
|
||||
{% endifmeth %}
|
||||
@@ -1,3 +1,5 @@
|
||||
receptor_user: awx
|
||||
receptor_group: awx
|
||||
receptor_verify: true
|
||||
receptor_tls: true
|
||||
receptor_work_commands:
|
||||
@@ -10,12 +12,12 @@ custom_worksign_public_keyfile: receptor/work-public-key.pem
|
||||
custom_tls_certfile: receptor/tls/receptor.crt
|
||||
custom_tls_keyfile: receptor/tls/receptor.key
|
||||
custom_ca_certfile: receptor/tls/ca/receptor-ca.crt
|
||||
receptor_user: awx
|
||||
receptor_group: awx
|
||||
receptor_protocol: 'tcp'
|
||||
receptor_listener: true
|
||||
receptor_port: {{ instance.listener_port }}
|
||||
receptor_dependencies:
|
||||
- podman
|
||||
- crun
|
||||
- python39-pip
|
||||
{% verbatim %}
|
||||
podman_user: "{{ receptor_user }}"
|
||||
podman_group: "{{ receptor_group }}"
|
||||
{% endverbatim %}
|
||||
|
||||
@@ -9,10 +9,12 @@
|
||||
shell: /bin/bash
|
||||
- name: Enable Copr repo for Receptor
|
||||
command: dnf copr enable ansible-awx/receptor -y
|
||||
- import_role:
|
||||
name: ansible.receptor.podman
|
||||
- import_role:
|
||||
name: ansible.receptor.setup
|
||||
- name: Install ansible-runner
|
||||
pip:
|
||||
name: ansible-runner
|
||||
executable: pip3.9
|
||||
{% endverbatim %}
|
||||
{% endverbatim %}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
---
|
||||
collections:
|
||||
- name: ansible.receptor
|
||||
source: https://github.com/ansible/receptor-collection/
|
||||
type: git
|
||||
version: 0.1.1
|
||||
version: 1.1.0
|
||||
|
||||
@@ -9,9 +9,9 @@ from awx.api.views import (
|
||||
InstanceUnifiedJobsList,
|
||||
InstanceInstanceGroupsList,
|
||||
InstanceHealthCheck,
|
||||
InstanceInstallBundle,
|
||||
InstancePeersList,
|
||||
)
|
||||
from awx.api.views.instance_install_bundle import InstanceInstallBundle
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -3,26 +3,28 @@
|
||||
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views import (
|
||||
from awx.api.views.inventory import (
|
||||
InventoryList,
|
||||
InventoryDetail,
|
||||
InventoryHostsList,
|
||||
InventoryGroupsList,
|
||||
InventoryRootGroupsList,
|
||||
InventoryVariableData,
|
||||
InventoryScriptView,
|
||||
InventoryTreeView,
|
||||
InventoryInventorySourcesList,
|
||||
InventoryInventorySourcesUpdate,
|
||||
InventoryActivityStreamList,
|
||||
InventoryJobTemplateList,
|
||||
InventoryAdHocCommandsList,
|
||||
InventoryAccessList,
|
||||
InventoryObjectRolesList,
|
||||
InventoryInstanceGroupsList,
|
||||
InventoryLabelList,
|
||||
InventoryCopy,
|
||||
)
|
||||
from awx.api.views import (
|
||||
InventoryHostsList,
|
||||
InventoryGroupsList,
|
||||
InventoryInventorySourcesList,
|
||||
InventoryInventorySourcesUpdate,
|
||||
InventoryAdHocCommandsList,
|
||||
InventoryRootGroupsList,
|
||||
InventoryScriptView,
|
||||
InventoryTreeView,
|
||||
InventoryVariableData,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views.inventory import (
|
||||
InventoryUpdateEventsList,
|
||||
)
|
||||
from awx.api.views import (
|
||||
InventoryUpdateList,
|
||||
InventoryUpdateDetail,
|
||||
@@ -10,7 +13,6 @@ from awx.api.views import (
|
||||
InventoryUpdateStdout,
|
||||
InventoryUpdateNotificationsList,
|
||||
InventoryUpdateCredentialsList,
|
||||
InventoryUpdateEventsList,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ from oauthlib import oauth2
|
||||
from oauth2_provider import views
|
||||
|
||||
from awx.main.models import RefreshToken
|
||||
from awx.api.views import ApiOAuthAuthorizationRootView
|
||||
from awx.api.views.root import ApiOAuthAuthorizationRootView
|
||||
|
||||
|
||||
class TokenView(views.TokenView):
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views import (
|
||||
from awx.api.views.organization import (
|
||||
OrganizationList,
|
||||
OrganizationDetail,
|
||||
OrganizationUsersList,
|
||||
@@ -14,7 +14,6 @@ from awx.api.views import (
|
||||
OrganizationJobTemplatesList,
|
||||
OrganizationWorkflowJobTemplatesList,
|
||||
OrganizationTeamsList,
|
||||
OrganizationCredentialList,
|
||||
OrganizationActivityStreamList,
|
||||
OrganizationNotificationTemplatesList,
|
||||
OrganizationNotificationTemplatesErrorList,
|
||||
@@ -25,8 +24,8 @@ from awx.api.views import (
|
||||
OrganizationGalaxyCredentialsList,
|
||||
OrganizationObjectRolesList,
|
||||
OrganizationAccessList,
|
||||
OrganizationApplicationList,
|
||||
)
|
||||
from awx.api.views import OrganizationCredentialList, OrganizationApplicationList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@@ -6,13 +6,15 @@ from django.urls import include, re_path
|
||||
|
||||
from awx import MODE
|
||||
from awx.api.generics import LoggedLoginView, LoggedLogoutView
|
||||
from awx.api.views import (
|
||||
from awx.api.views.root import (
|
||||
ApiRootView,
|
||||
ApiV2RootView,
|
||||
ApiV2PingView,
|
||||
ApiV2ConfigView,
|
||||
ApiV2SubscriptionView,
|
||||
ApiV2AttachView,
|
||||
)
|
||||
from awx.api.views import (
|
||||
AuthView,
|
||||
UserMeList,
|
||||
DashboardView,
|
||||
@@ -28,8 +30,8 @@ from awx.api.views import (
|
||||
OAuth2TokenList,
|
||||
ApplicationOAuth2TokenList,
|
||||
OAuth2ApplicationDetail,
|
||||
MeshVisualizer,
|
||||
)
|
||||
from awx.api.views.mesh_visualizer import MeshVisualizer
|
||||
|
||||
from awx.api.views.metrics import MetricsView
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
|
||||
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
|
||||
55
awx/api/validators.py
Normal file
55
awx/api/validators.py
Normal file
@@ -0,0 +1,55 @@
|
||||
import re
|
||||
|
||||
from django.core.validators import RegexValidator, validate_ipv46_address
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
|
||||
class HostnameRegexValidator(RegexValidator):
|
||||
"""
|
||||
Fully validates a domain name that is compliant with norms in Linux/RHEL
|
||||
- Cannot start with a hyphen
|
||||
- Cannot begin with, or end with a "."
|
||||
- Cannot contain any whitespaces
|
||||
- Entire hostname is max 255 chars (including dots)
|
||||
- Each domain/label is between 1 and 63 characters, except top level domain, which must be at least 2 characters
|
||||
- Supports ipv4, ipv6, simple hostnames and FQDNs
|
||||
- Follows RFC 9210 (modern RFC 1123, 1178) requirements
|
||||
|
||||
Accepts an IP Address or Hostname as the argument
|
||||
"""
|
||||
|
||||
regex = '^[a-z0-9][-a-z0-9]*$|^([a-z0-9][-a-z0-9]{0,62}[.])*[a-z0-9][-a-z0-9]{1,62}$'
|
||||
flags = re.IGNORECASE
|
||||
|
||||
def __call__(self, value):
|
||||
regex_matches, err = self.__validate(value)
|
||||
invalid_input = regex_matches if self.inverse_match else not regex_matches
|
||||
if invalid_input:
|
||||
if err is None:
|
||||
err = ValidationError(self.message, code=self.code, params={"value": value})
|
||||
raise err
|
||||
|
||||
def __str__(self):
|
||||
return f"regex={self.regex}, message={self.message}, code={self.code}, inverse_match={self.inverse_match}, flags={self.flags}"
|
||||
|
||||
def __validate(self, value):
|
||||
|
||||
if ' ' in value:
|
||||
return False, ValidationError("whitespaces in hostnames are illegal")
|
||||
|
||||
"""
|
||||
If we have an IP address, try and validate it.
|
||||
"""
|
||||
try:
|
||||
validate_ipv46_address(value)
|
||||
return True, None
|
||||
except ValidationError:
|
||||
pass
|
||||
|
||||
"""
|
||||
By this point in the code, we probably have a simple hostname, FQDN or a strange hostname like "192.localhost.domain.101"
|
||||
"""
|
||||
if not self.regex.match(value):
|
||||
return False, ValidationError(f"illegal characters detected in hostname={value}. Please verify.")
|
||||
|
||||
return True, None
|
||||
@@ -69,6 +69,7 @@ from awx.api.generics import (
|
||||
APIView,
|
||||
BaseUsersList,
|
||||
CopyAPIView,
|
||||
GenericCancelView,
|
||||
GenericAPIView,
|
||||
ListAPIView,
|
||||
ListCreateAPIView,
|
||||
@@ -122,56 +123,6 @@ from awx.api.views.mixin import (
|
||||
UnifiedJobDeletionMixin,
|
||||
NoTruncateMixin,
|
||||
)
|
||||
from awx.api.views.instance_install_bundle import InstanceInstallBundle # noqa
|
||||
from awx.api.views.inventory import ( # noqa
|
||||
InventoryList,
|
||||
InventoryDetail,
|
||||
InventoryUpdateEventsList,
|
||||
InventoryList,
|
||||
InventoryDetail,
|
||||
InventoryActivityStreamList,
|
||||
InventoryInstanceGroupsList,
|
||||
InventoryAccessList,
|
||||
InventoryObjectRolesList,
|
||||
InventoryJobTemplateList,
|
||||
InventoryLabelList,
|
||||
InventoryCopy,
|
||||
)
|
||||
from awx.api.views.mesh_visualizer import MeshVisualizer # noqa
|
||||
from awx.api.views.organization import ( # noqa
|
||||
OrganizationList,
|
||||
OrganizationDetail,
|
||||
OrganizationInventoriesList,
|
||||
OrganizationUsersList,
|
||||
OrganizationAdminsList,
|
||||
OrganizationExecutionEnvironmentsList,
|
||||
OrganizationProjectsList,
|
||||
OrganizationJobTemplatesList,
|
||||
OrganizationWorkflowJobTemplatesList,
|
||||
OrganizationTeamsList,
|
||||
OrganizationActivityStreamList,
|
||||
OrganizationNotificationTemplatesList,
|
||||
OrganizationNotificationTemplatesAnyList,
|
||||
OrganizationNotificationTemplatesErrorList,
|
||||
OrganizationNotificationTemplatesStartedList,
|
||||
OrganizationNotificationTemplatesSuccessList,
|
||||
OrganizationNotificationTemplatesApprovalList,
|
||||
OrganizationInstanceGroupsList,
|
||||
OrganizationGalaxyCredentialsList,
|
||||
OrganizationAccessList,
|
||||
OrganizationObjectRolesList,
|
||||
)
|
||||
from awx.api.views.root import ( # noqa
|
||||
ApiRootView,
|
||||
ApiOAuthAuthorizationRootView,
|
||||
ApiVersionRootView,
|
||||
ApiV2RootView,
|
||||
ApiV2PingView,
|
||||
ApiV2ConfigView,
|
||||
ApiV2SubscriptionView,
|
||||
ApiV2AttachView,
|
||||
)
|
||||
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver # noqa
|
||||
from awx.api.pagination import UnifiedJobEventPagination
|
||||
from awx.main.utils import set_environ
|
||||
|
||||
@@ -441,8 +392,8 @@ class InstanceHealthCheck(GenericAPIView):
|
||||
permission_classes = (IsSystemAdminOrAuditor,)
|
||||
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().filter(node_type='execution')
|
||||
# FIXME: For now, we don't have a good way of checking the health of a hop node.
|
||||
return super().get_queryset().exclude(node_type='hop')
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
@@ -462,9 +413,10 @@ class InstanceHealthCheck(GenericAPIView):
|
||||
|
||||
execution_node_health_check.apply_async([obj.hostname])
|
||||
else:
|
||||
from awx.main.tasks.system import cluster_node_health_check
|
||||
|
||||
cluster_node_health_check.apply_async([obj.hostname], queue=obj.hostname)
|
||||
return Response(
|
||||
{"error": f"Cannot run a health check on instances of type {obj.node_type}. Health checks can only be run on execution nodes."},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
return Response({'msg': f"Health check is running for {obj.hostname}."}, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@@ -1026,20 +978,11 @@ class SystemJobEventsList(SubListAPIView):
|
||||
return job.get_event_queryset()
|
||||
|
||||
|
||||
class ProjectUpdateCancel(RetrieveAPIView):
|
||||
class ProjectUpdateCancel(GenericCancelView):
|
||||
|
||||
model = models.ProjectUpdate
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = serializers.ProjectUpdateCancelSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
|
||||
|
||||
class ProjectUpdateNotificationsList(SubListAPIView):
|
||||
|
||||
@@ -2278,6 +2221,8 @@ class InventorySourceUpdateView(RetrieveAPIView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
serializer = self.get_serializer(instance=obj, data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
if obj.can_update:
|
||||
update = obj.update()
|
||||
if not update:
|
||||
@@ -2312,20 +2257,11 @@ class InventoryUpdateCredentialsList(SubListAPIView):
|
||||
relationship = 'credentials'
|
||||
|
||||
|
||||
class InventoryUpdateCancel(RetrieveAPIView):
|
||||
class InventoryUpdateCancel(GenericCancelView):
|
||||
|
||||
model = models.InventoryUpdate
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = serializers.InventoryUpdateCancelSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
|
||||
|
||||
class InventoryUpdateNotificationsList(SubListAPIView):
|
||||
|
||||
@@ -2406,9 +2342,8 @@ class JobTemplateLaunch(RetrieveAPIView):
|
||||
"""
|
||||
modern_data = data.copy()
|
||||
|
||||
id_fd = '{}_id'.format('inventory')
|
||||
if 'inventory' not in modern_data and id_fd in modern_data:
|
||||
modern_data['inventory'] = modern_data[id_fd]
|
||||
if 'inventory' not in modern_data and 'inventory_id' in modern_data:
|
||||
modern_data['inventory'] = modern_data['inventory_id']
|
||||
|
||||
# credential passwords were historically provided as top-level attributes
|
||||
if 'credential_passwords' not in modern_data:
|
||||
@@ -3101,8 +3036,7 @@ class WorkflowJobNodeChildrenBaseList(SubListAPIView):
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
#
|
||||
# Limit the set of WorkflowJobeNodes to the related nodes of specified by
|
||||
#'relationship'
|
||||
# Limit the set of WorkflowJobNodes to the related nodes of specified by self.relationship
|
||||
#
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
@@ -3404,20 +3338,15 @@ class WorkflowJobWorkflowNodesList(SubListAPIView):
|
||||
return super(WorkflowJobWorkflowNodesList, self).get_queryset().order_by('id')
|
||||
|
||||
|
||||
class WorkflowJobCancel(RetrieveAPIView):
|
||||
class WorkflowJobCancel(GenericCancelView):
|
||||
|
||||
model = models.WorkflowJob
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = serializers.WorkflowJobCancelSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
ScheduleWorkflowManager().schedule()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
r = super().post(request, *args, **kwargs)
|
||||
ScheduleWorkflowManager().schedule()
|
||||
return r
|
||||
|
||||
|
||||
class WorkflowJobNotificationsList(SubListAPIView):
|
||||
@@ -3573,20 +3502,11 @@ class JobActivityStreamList(SubListAPIView):
|
||||
search_fields = ('changes',)
|
||||
|
||||
|
||||
class JobCancel(RetrieveAPIView):
|
||||
class JobCancel(GenericCancelView):
|
||||
|
||||
model = models.Job
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = serializers.JobCancelSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
|
||||
|
||||
class JobRelaunch(RetrieveAPIView):
|
||||
|
||||
@@ -4057,20 +3977,11 @@ class AdHocCommandDetail(UnifiedJobDeletionMixin, RetrieveDestroyAPIView):
|
||||
serializer_class = serializers.AdHocCommandDetailSerializer
|
||||
|
||||
|
||||
class AdHocCommandCancel(RetrieveAPIView):
|
||||
class AdHocCommandCancel(GenericCancelView):
|
||||
|
||||
model = models.AdHocCommand
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = serializers.AdHocCommandCancelSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
|
||||
|
||||
class AdHocCommandRelaunch(GenericAPIView):
|
||||
|
||||
@@ -4205,20 +4116,11 @@ class SystemJobDetail(UnifiedJobDeletionMixin, RetrieveDestroyAPIView):
|
||||
serializer_class = serializers.SystemJobSerializer
|
||||
|
||||
|
||||
class SystemJobCancel(RetrieveAPIView):
|
||||
class SystemJobCancel(GenericCancelView):
|
||||
|
||||
model = models.SystemJob
|
||||
obj_permission_type = 'cancel'
|
||||
serializer_class = serializers.SystemJobCancelSerializer
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if obj.can_cancel:
|
||||
obj.cancel()
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
else:
|
||||
return self.http_method_not_allowed(request, *args, **kwargs)
|
||||
|
||||
|
||||
class SystemJobNotificationsList(SubListAPIView):
|
||||
|
||||
|
||||
@@ -178,7 +178,7 @@ def generate_receptor_tls(instance_obj):
|
||||
.public_key(csr.public_key())
|
||||
.serial_number(x509.random_serial_number())
|
||||
.not_valid_before(datetime.datetime.utcnow())
|
||||
.not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=10))
|
||||
.not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=3650))
|
||||
.add_extension(
|
||||
csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).value,
|
||||
critical=csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).critical,
|
||||
|
||||
6241
awx/locale/translations/es/django.po
Normal file
6241
awx/locale/translations/es/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10833
awx/locale/translations/es/messages.po
Normal file
10833
awx/locale/translations/es/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6243
awx/locale/translations/fr/django.po
Normal file
6243
awx/locale/translations/fr/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10713
awx/locale/translations/fr/messages.po
Normal file
10713
awx/locale/translations/fr/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6240
awx/locale/translations/ja/django.po
Normal file
6240
awx/locale/translations/ja/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10739
awx/locale/translations/ja/messages.po
Normal file
10739
awx/locale/translations/ja/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6240
awx/locale/translations/ko/django.po
Normal file
6240
awx/locale/translations/ko/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10700
awx/locale/translations/ko/messages.po
Normal file
10700
awx/locale/translations/ko/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6241
awx/locale/translations/nl/django.po
Normal file
6241
awx/locale/translations/nl/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10725
awx/locale/translations/nl/messages.po
Normal file
10725
awx/locale/translations/nl/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6242
awx/locale/translations/zh/django.po
Normal file
6242
awx/locale/translations/zh/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10698
awx/locale/translations/zh/messages.po
Normal file
10698
awx/locale/translations/zh/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
@@ -993,9 +993,6 @@ class HostAccess(BaseAccess):
|
||||
if data and 'name' in data:
|
||||
self.check_license(add_host_name=data['name'])
|
||||
|
||||
# Check the per-org limit
|
||||
self.check_org_host_limit({'inventory': obj.inventory}, add_host_name=data['name'])
|
||||
|
||||
# Checks for admin or change permission on inventory, controls whether
|
||||
# the user can edit variable data.
|
||||
return obj and self.user in obj.inventory.admin_role
|
||||
|
||||
@@ -166,11 +166,7 @@ class Metrics:
|
||||
elif settings.IS_TESTING():
|
||||
self.instance_name = "awx_testing"
|
||||
else:
|
||||
try:
|
||||
self.instance_name = Instance.objects.me().hostname
|
||||
except Exception as e:
|
||||
self.instance_name = settings.CLUSTER_HOST_ID
|
||||
logger.info(f'Instance {self.instance_name} seems to be unregistered, error: {e}')
|
||||
self.instance_name = Instance.objects.my_hostname()
|
||||
|
||||
# metric name, help_text
|
||||
METRICSLIST = [
|
||||
|
||||
@@ -9,10 +9,16 @@ aim_inputs = {
|
||||
'fields': [
|
||||
{
|
||||
'id': 'url',
|
||||
'label': _('CyberArk AIM URL'),
|
||||
'label': _('CyberArk CCP URL'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
},
|
||||
{
|
||||
'id': 'webservice_id',
|
||||
'label': _('Web Service ID'),
|
||||
'type': 'string',
|
||||
'help_text': _('The CCP Web Service ID. Leave blank to default to AIMWebService.'),
|
||||
},
|
||||
{
|
||||
'id': 'app_id',
|
||||
'label': _('Application ID'),
|
||||
@@ -64,10 +70,13 @@ def aim_backend(**kwargs):
|
||||
client_cert = kwargs.get('client_cert', None)
|
||||
client_key = kwargs.get('client_key', None)
|
||||
verify = kwargs['verify']
|
||||
webservice_id = kwargs['webservice_id']
|
||||
app_id = kwargs['app_id']
|
||||
object_query = kwargs['object_query']
|
||||
object_query_format = kwargs['object_query_format']
|
||||
reason = kwargs.get('reason', None)
|
||||
if webservice_id == '':
|
||||
webservice_id = 'AIMWebService'
|
||||
|
||||
query_params = {
|
||||
'AppId': app_id,
|
||||
@@ -78,7 +87,7 @@ def aim_backend(**kwargs):
|
||||
query_params['reason'] = reason
|
||||
|
||||
request_qs = '?' + urlencode(query_params, quote_via=quote)
|
||||
request_url = urljoin(url, '/'.join(['AIMWebService', 'api', 'Accounts']))
|
||||
request_url = urljoin(url, '/'.join([webservice_id, 'api', 'Accounts']))
|
||||
|
||||
with CertFiles(client_cert, client_key) as cert:
|
||||
res = requests.get(
|
||||
@@ -92,4 +101,4 @@ def aim_backend(**kwargs):
|
||||
return res.json()['Content']
|
||||
|
||||
|
||||
aim_plugin = CredentialPlugin('CyberArk AIM Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
|
||||
aim_plugin = CredentialPlugin('CyberArk Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from .plugin import CredentialPlugin, CertFiles, raise_for_status
|
||||
|
||||
import base64
|
||||
from urllib.parse import urljoin, quote
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -61,7 +60,7 @@ def conjur_backend(**kwargs):
|
||||
cacert = kwargs.get('cacert', None)
|
||||
|
||||
auth_kwargs = {
|
||||
'headers': {'Content-Type': 'text/plain'},
|
||||
'headers': {'Content-Type': 'text/plain', 'Accept-Encoding': 'base64'},
|
||||
'data': api_key,
|
||||
'allow_redirects': False,
|
||||
}
|
||||
@@ -69,9 +68,9 @@ def conjur_backend(**kwargs):
|
||||
with CertFiles(cacert) as cert:
|
||||
# https://www.conjur.org/api.html#authentication-authenticate-post
|
||||
auth_kwargs['verify'] = cert
|
||||
resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||
resp = requests.post(urljoin(url, '/'.join(['api', 'authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||
raise_for_status(resp)
|
||||
token = base64.b64encode(resp.content).decode('utf-8')
|
||||
token = resp.content.decode('utf-8')
|
||||
|
||||
lookup_kwargs = {
|
||||
'headers': {'Authorization': 'Token token="{}"'.format(token)},
|
||||
@@ -79,9 +78,10 @@ def conjur_backend(**kwargs):
|
||||
}
|
||||
|
||||
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
|
||||
path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path]))
|
||||
path = urljoin(url, '/'.join(['api', 'secrets', account, 'variable', secret_path]))
|
||||
if version:
|
||||
path = '?'.join([path, version])
|
||||
ver = "version={}".format(version)
|
||||
path = '?'.join([path, ver])
|
||||
|
||||
with CertFiles(cacert) as cert:
|
||||
lookup_kwargs['verify'] = cert
|
||||
@@ -90,4 +90,4 @@ def conjur_backend(**kwargs):
|
||||
return resp.text
|
||||
|
||||
|
||||
conjur_plugin = CredentialPlugin('CyberArk Conjur Secret Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
||||
conjur_plugin = CredentialPlugin('CyberArk Conjur Secrets Manager Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
||||
|
||||
@@ -3,6 +3,7 @@ import uuid
|
||||
import json
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
import redis
|
||||
|
||||
from awx.main.dispatch import get_local_queuename
|
||||
@@ -49,7 +50,10 @@ class Control(object):
|
||||
reply_queue = Control.generate_reply_queue_name()
|
||||
self.result = None
|
||||
|
||||
with pg_bus_conn(new_connection=True) as conn:
|
||||
if not connection.get_autocommit():
|
||||
raise RuntimeError('Control-with-reply messages can only be done in autocommit mode')
|
||||
|
||||
with pg_bus_conn() as conn:
|
||||
conn.listen(reply_queue)
|
||||
send_data = {'control': command, 'reply_to': reply_queue}
|
||||
if extra_data:
|
||||
|
||||
@@ -387,6 +387,8 @@ class AutoscalePool(WorkerPool):
|
||||
reaper.reap_job(j, 'failed')
|
||||
except Exception:
|
||||
logger.exception('failed to reap job UUID {}'.format(w.current_task['uuid']))
|
||||
else:
|
||||
logger.warning(f'Worker was told to quit but has not, pid={w.pid}')
|
||||
orphaned.extend(w.orphaned_tasks)
|
||||
self.workers.remove(w)
|
||||
elif w.idle and len(self.workers) > self.min_workers:
|
||||
@@ -450,9 +452,6 @@ class AutoscalePool(WorkerPool):
|
||||
try:
|
||||
if isinstance(body, dict) and body.get('bind_kwargs'):
|
||||
self.add_bind_kwargs(body)
|
||||
# when the cluster heartbeat occurs, clean up internally
|
||||
if isinstance(body, dict) and 'cluster_node_heartbeat' in body['task']:
|
||||
self.cleanup()
|
||||
if self.should_grow:
|
||||
self.up()
|
||||
# we don't care about "preferred queue" round robin distribution, just
|
||||
|
||||
@@ -16,12 +16,7 @@ def startup_reaping():
|
||||
If this particular instance is starting, then we know that any running jobs are invalid
|
||||
so we will reap those jobs as a special action here
|
||||
"""
|
||||
try:
|
||||
me = Instance.objects.me()
|
||||
except RuntimeError as e:
|
||||
logger.warning(f'Local instance is not registered, not running startup reaper: {e}')
|
||||
return
|
||||
jobs = UnifiedJob.objects.filter(status='running', controller_node=me.hostname)
|
||||
jobs = UnifiedJob.objects.filter(status='running', controller_node=Instance.objects.my_hostname())
|
||||
job_ids = []
|
||||
for j in jobs:
|
||||
job_ids.append(j.id)
|
||||
@@ -62,16 +57,13 @@ def reap_waiting(instance=None, status='failed', job_explanation=None, grace_per
|
||||
if grace_period is None:
|
||||
grace_period = settings.JOB_WAITING_GRACE_PERIOD + settings.TASK_MANAGER_TIMEOUT
|
||||
|
||||
me = instance
|
||||
if me is None:
|
||||
try:
|
||||
me = Instance.objects.me()
|
||||
except RuntimeError as e:
|
||||
logger.warning(f'Local instance is not registered, not running reaper: {e}')
|
||||
return
|
||||
if instance is None:
|
||||
hostname = Instance.objects.my_hostname()
|
||||
else:
|
||||
hostname = instance.hostname
|
||||
if ref_time is None:
|
||||
ref_time = tz_now()
|
||||
jobs = UnifiedJob.objects.filter(status='waiting', modified__lte=ref_time - timedelta(seconds=grace_period), controller_node=me.hostname)
|
||||
jobs = UnifiedJob.objects.filter(status='waiting', modified__lte=ref_time - timedelta(seconds=grace_period), controller_node=hostname)
|
||||
if excluded_uuids:
|
||||
jobs = jobs.exclude(celery_task_id__in=excluded_uuids)
|
||||
for j in jobs:
|
||||
@@ -82,16 +74,13 @@ def reap(instance=None, status='failed', job_explanation=None, excluded_uuids=No
|
||||
"""
|
||||
Reap all jobs in running for this instance.
|
||||
"""
|
||||
me = instance
|
||||
if me is None:
|
||||
try:
|
||||
me = Instance.objects.me()
|
||||
except RuntimeError as e:
|
||||
logger.warning(f'Local instance is not registered, not running reaper: {e}')
|
||||
return
|
||||
if instance is None:
|
||||
hostname = Instance.objects.my_hostname()
|
||||
else:
|
||||
hostname = instance.hostname
|
||||
workflow_ctype_id = ContentType.objects.get_for_model(WorkflowJob).id
|
||||
jobs = UnifiedJob.objects.filter(
|
||||
Q(status='running') & (Q(execution_node=me.hostname) | Q(controller_node=me.hostname)) & ~Q(polymorphic_ctype_id=workflow_ctype_id)
|
||||
Q(status='running') & (Q(execution_node=hostname) | Q(controller_node=hostname)) & ~Q(polymorphic_ctype_id=workflow_ctype_id)
|
||||
)
|
||||
if excluded_uuids:
|
||||
jobs = jobs.exclude(celery_task_id__in=excluded_uuids)
|
||||
|
||||
@@ -114,7 +114,6 @@ class AWXConsumerBase(object):
|
||||
queue = 0
|
||||
self.pool.write(queue, body)
|
||||
self.total_messages += 1
|
||||
self.record_statistics()
|
||||
|
||||
@log_excess_runtime(logger)
|
||||
def record_statistics(self):
|
||||
@@ -156,6 +155,16 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
# if no successful loops have ran since startup, then we should fail right away
|
||||
self.pg_is_down = True # set so that we fail if we get database errors on startup
|
||||
self.pg_down_time = time.time() - self.pg_max_wait # allow no grace period
|
||||
self.last_cleanup = time.time()
|
||||
|
||||
def run_periodic_tasks(self):
|
||||
self.record_statistics() # maintains time buffer in method
|
||||
|
||||
if time.time() - self.last_cleanup > 60: # same as cluster_node_heartbeat
|
||||
# NOTE: if we run out of database connections, it is important to still run cleanup
|
||||
# so that we scale down workers and free up connections
|
||||
self.pool.cleanup()
|
||||
self.last_cleanup = time.time()
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
super(AWXConsumerPG, self).run(*args, **kwargs)
|
||||
@@ -171,8 +180,10 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
if init is False:
|
||||
self.worker.on_start()
|
||||
init = True
|
||||
for e in conn.events():
|
||||
self.process_task(json.loads(e.payload))
|
||||
for e in conn.events(yield_timeouts=True):
|
||||
if e is not None:
|
||||
self.process_task(json.loads(e.payload))
|
||||
self.run_periodic_tasks()
|
||||
self.pg_is_down = False
|
||||
if self.should_stop:
|
||||
return
|
||||
@@ -229,6 +240,8 @@ class BaseWorker(object):
|
||||
# so we can establish a new connection
|
||||
conn.close_if_unusable_or_obsolete()
|
||||
self.perform_work(body, *args)
|
||||
except Exception:
|
||||
logger.exception(f'Unhandled exception in perform_work in worker pid={os.getpid()}')
|
||||
finally:
|
||||
if 'uuid' in body:
|
||||
uuid = body['uuid']
|
||||
|
||||
@@ -25,7 +25,7 @@ class Command(BaseCommand):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
f'''
|
||||
SELECT
|
||||
SELECT
|
||||
b.id, b.job_id, b.host_name, b.created - a.created delta,
|
||||
b.task task,
|
||||
b.event_data::json->'task_action' task_action,
|
||||
|
||||
@@ -53,7 +53,7 @@ class Command(BaseCommand):
|
||||
return lines
|
||||
|
||||
@classmethod
|
||||
def get_connection_status(cls, me, hostnames, data):
|
||||
def get_connection_status(cls, hostnames, data):
|
||||
host_stats = [('hostname', 'state', 'start time', 'duration (sec)')]
|
||||
for h in hostnames:
|
||||
connection_color = '91' # red
|
||||
@@ -78,7 +78,7 @@ class Command(BaseCommand):
|
||||
return host_stats
|
||||
|
||||
@classmethod
|
||||
def get_connection_stats(cls, me, hostnames, data):
|
||||
def get_connection_stats(cls, hostnames, data):
|
||||
host_stats = [('hostname', 'total', 'per minute')]
|
||||
for h in hostnames:
|
||||
h_safe = safe_name(h)
|
||||
@@ -119,8 +119,8 @@ class Command(BaseCommand):
|
||||
return
|
||||
|
||||
try:
|
||||
me = Instance.objects.me()
|
||||
logger.info('Active instance with hostname {} is registered.'.format(me.hostname))
|
||||
my_hostname = Instance.objects.my_hostname()
|
||||
logger.info('Active instance with hostname {} is registered.'.format(my_hostname))
|
||||
except RuntimeError as e:
|
||||
# the CLUSTER_HOST_ID in the task, and web instance must match and
|
||||
# ensure network connectivity between the task and web instance
|
||||
@@ -145,19 +145,19 @@ class Command(BaseCommand):
|
||||
else:
|
||||
data[family.name] = family.samples[0].value
|
||||
|
||||
me = Instance.objects.me()
|
||||
hostnames = [i.hostname for i in Instance.objects.exclude(hostname=me.hostname)]
|
||||
my_hostname = Instance.objects.my_hostname()
|
||||
hostnames = [i.hostname for i in Instance.objects.exclude(hostname=my_hostname)]
|
||||
|
||||
host_stats = Command.get_connection_status(me, hostnames, data)
|
||||
host_stats = Command.get_connection_status(hostnames, data)
|
||||
lines = Command._format_lines(host_stats)
|
||||
|
||||
print(f'Broadcast websocket connection status from "{me.hostname}" to:')
|
||||
print(f'Broadcast websocket connection status from "{my_hostname}" to:')
|
||||
print('\n'.join(lines))
|
||||
|
||||
host_stats = Command.get_connection_stats(me, hostnames, data)
|
||||
host_stats = Command.get_connection_stats(hostnames, data)
|
||||
lines = Command._format_lines(host_stats)
|
||||
|
||||
print(f'\nBroadcast websocket connection stats from "{me.hostname}" to:')
|
||||
print(f'\nBroadcast websocket connection stats from "{my_hostname}" to:')
|
||||
print('\n'.join(lines))
|
||||
|
||||
return
|
||||
|
||||
@@ -99,9 +99,12 @@ class InstanceManager(models.Manager):
|
||||
instance or role.
|
||||
"""
|
||||
|
||||
def my_hostname(self):
|
||||
return settings.CLUSTER_HOST_ID
|
||||
|
||||
def me(self):
|
||||
"""Return the currently active instance."""
|
||||
node = self.filter(hostname=settings.CLUSTER_HOST_ID)
|
||||
node = self.filter(hostname=self.my_hostname())
|
||||
if node.exists():
|
||||
return node[0]
|
||||
raise RuntimeError("No instance found with the current cluster host id")
|
||||
|
||||
@@ -4,7 +4,7 @@ from django.utils.timezone import now
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
__all__ = ['create_collection_jt', 'create_clearsessions_jt', 'create_cleartokens_jt']
|
||||
__all__ = ['create_clearsessions_jt', 'create_cleartokens_jt']
|
||||
|
||||
'''
|
||||
These methods are called by migrations to create various system job templates
|
||||
|
||||
@@ -44,7 +44,7 @@ def migrate_galaxy_settings(apps, schema_editor):
|
||||
credential_type=galaxy_type,
|
||||
inputs={'url': 'https://galaxy.ansible.com/'},
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
# Needed for new migrations, tests
|
||||
public_galaxy_credential = Credential(
|
||||
created=now(), modified=now(), name='Ansible Galaxy', managed=True, credential_type=galaxy_type, inputs={'url': 'https://galaxy.ansible.com/'}
|
||||
|
||||
@@ -282,7 +282,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
return field['default']
|
||||
if 'default' in kwargs:
|
||||
return kwargs['default']
|
||||
raise AttributeError
|
||||
raise AttributeError(field_name)
|
||||
if field_name in self.inputs:
|
||||
return self.inputs[field_name]
|
||||
if 'default' in kwargs:
|
||||
|
||||
@@ -247,6 +247,19 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
return (number, step)
|
||||
|
||||
def get_sliced_hosts(self, host_queryset, slice_number, slice_count):
|
||||
"""
|
||||
Returns a slice of Hosts given a slice number and total slice count, or
|
||||
the original queryset if slicing is not requested.
|
||||
|
||||
NOTE: If slicing is performed, this will return a List[Host] with the
|
||||
resulting slice. If slicing is not performed it will return the
|
||||
original queryset (not evaluating it or forcing it to a list). This
|
||||
puts the burden on the caller to check the resulting type. This is
|
||||
non-ideal because it's easy to get wrong, but I think the only way
|
||||
around it is to force the queryset which has memory implications for
|
||||
large inventories.
|
||||
"""
|
||||
|
||||
if slice_count > 1 and slice_number > 0:
|
||||
offset = slice_number - 1
|
||||
host_queryset = host_queryset[offset::slice_count]
|
||||
|
||||
@@ -15,6 +15,7 @@ from urllib.parse import urljoin
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.db.models.query import QuerySet
|
||||
|
||||
# from django.core.cache import cache
|
||||
from django.utils.encoding import smart_str
|
||||
@@ -844,22 +845,30 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
def get_notification_friendly_name(self):
|
||||
return "Job"
|
||||
|
||||
def _get_inventory_hosts(self, only=['name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id']):
|
||||
def _get_inventory_hosts(self, only=('name', 'ansible_facts', 'ansible_facts_modified', 'modified', 'inventory_id'), **filters):
|
||||
"""Return value is an iterable for the relevant hosts for this job"""
|
||||
if not self.inventory:
|
||||
return []
|
||||
host_queryset = self.inventory.hosts.only(*only)
|
||||
return self.inventory.get_sliced_hosts(host_queryset, self.job_slice_number, self.job_slice_count)
|
||||
if filters:
|
||||
host_queryset = host_queryset.filter(**filters)
|
||||
host_queryset = self.inventory.get_sliced_hosts(host_queryset, self.job_slice_number, self.job_slice_count)
|
||||
if isinstance(host_queryset, QuerySet):
|
||||
return host_queryset.iterator()
|
||||
return host_queryset
|
||||
|
||||
def start_job_fact_cache(self, destination, modification_times, timeout=None):
|
||||
self.log_lifecycle("start_job_fact_cache")
|
||||
os.makedirs(destination, mode=0o700)
|
||||
hosts = self._get_inventory_hosts()
|
||||
|
||||
if timeout is None:
|
||||
timeout = settings.ANSIBLE_FACT_CACHE_TIMEOUT
|
||||
if timeout > 0:
|
||||
# exclude hosts with fact data older than `settings.ANSIBLE_FACT_CACHE_TIMEOUT seconds`
|
||||
timeout = now() - datetime.timedelta(seconds=timeout)
|
||||
hosts = hosts.filter(ansible_facts_modified__gte=timeout)
|
||||
hosts = self._get_inventory_hosts(ansible_facts_modified__gte=timeout)
|
||||
else:
|
||||
hosts = self._get_inventory_hosts()
|
||||
for host in hosts:
|
||||
filepath = os.sep.join(map(str, [destination, host.name]))
|
||||
if not os.path.realpath(filepath).startswith(destination):
|
||||
|
||||
@@ -471,6 +471,29 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:project_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
def get_reason_if_failed(self):
|
||||
"""
|
||||
If the project is in a failed or errored state, return a human-readable
|
||||
error message explaining why. Otherwise return None.
|
||||
|
||||
This is used during validation in the serializer and also by
|
||||
RunProjectUpdate/RunInventoryUpdate.
|
||||
"""
|
||||
|
||||
if self.status not in ('error', 'failed'):
|
||||
return None
|
||||
|
||||
latest_update = self.project_updates.last()
|
||||
if latest_update is not None and latest_update.failed:
|
||||
failed_validation_tasks = latest_update.project_update_events.filter(
|
||||
event='runner_on_failed',
|
||||
play="Perform project signature/checksum verification",
|
||||
)
|
||||
if failed_validation_tasks:
|
||||
return _("Last project update failed due to signature validation failure.")
|
||||
|
||||
return _("Missing a revision to run due to failed project update.")
|
||||
|
||||
'''
|
||||
RelatedJobsMixin
|
||||
'''
|
||||
|
||||
@@ -153,7 +153,7 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
|
||||
#
|
||||
|
||||
# Find the DTSTART rule or raise an error, its usually the first rule but that is not strictly enforced
|
||||
start_date_rule = re.sub('^.*(DTSTART[^\s]+)\s.*$', r'\1', rrule)
|
||||
start_date_rule = re.sub(r'^.*(DTSTART[^\s]+)\s.*$', r'\1', rrule)
|
||||
if not start_date_rule:
|
||||
raise ValueError('A DTSTART field needs to be in the rrule')
|
||||
|
||||
|
||||
@@ -1305,6 +1305,8 @@ class UnifiedJob(
|
||||
status_data['instance_group_name'] = None
|
||||
elif status in ['successful', 'failed', 'canceled'] and self.finished:
|
||||
status_data['finished'] = datetime.datetime.strftime(self.finished, "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
elif status == 'running':
|
||||
status_data['started'] = datetime.datetime.strftime(self.finished, "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
status_data.update(self.websocket_emit_data())
|
||||
status_data['group_name'] = 'jobs'
|
||||
if getattr(self, 'unified_job_template_id', None):
|
||||
@@ -1465,23 +1467,23 @@ class UnifiedJob(
|
||||
self.job_explanation = job_explanation
|
||||
cancel_fields.append('job_explanation')
|
||||
|
||||
# Important to save here before sending cancel signal to dispatcher to cancel because
|
||||
# the job control process will use the cancel_flag to distinguish a shutdown from a cancel
|
||||
self.save(update_fields=cancel_fields)
|
||||
|
||||
controller_notified = False
|
||||
if self.celery_task_id:
|
||||
controller_notified = self.cancel_dispatcher_process()
|
||||
|
||||
else:
|
||||
# Avoid race condition where we have stale model from pending state but job has already started,
|
||||
# its checking signal but not cancel_flag, so re-send signal after this database commit
|
||||
connection.on_commit(self.fallback_cancel)
|
||||
|
||||
# If a SIGTERM signal was sent to the control process, and acked by the dispatcher
|
||||
# then we want to let its own cleanup change status, otherwise change status now
|
||||
if not controller_notified:
|
||||
if self.status != 'canceled':
|
||||
self.status = 'canceled'
|
||||
cancel_fields.append('status')
|
||||
|
||||
self.save(update_fields=cancel_fields)
|
||||
self.save(update_fields=['status'])
|
||||
# Avoid race condition where we have stale model from pending state but job has already started,
|
||||
# its checking signal but not cancel_flag, so re-send signal after updating cancel fields
|
||||
self.fallback_cancel()
|
||||
|
||||
return self.cancel_flag
|
||||
|
||||
|
||||
@@ -5,9 +5,6 @@ import json
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from awx.main.notifications.base import AWXBaseEmailBackend
|
||||
from awx.main.utils import get_awx_http_client_headers
|
||||
from awx.main.notifications.custom_notification_base import CustomNotificationBase
|
||||
@@ -17,6 +14,8 @@ logger = logging.getLogger('awx.main.notifications.webhook_backend')
|
||||
|
||||
class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
|
||||
MAX_RETRIES = 5
|
||||
|
||||
init_parameters = {
|
||||
"url": {"label": "Target URL", "type": "string"},
|
||||
"http_method": {"label": "HTTP Method", "type": "string", "default": "POST"},
|
||||
@@ -64,20 +63,67 @@ class WebhookBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
if self.http_method.lower() not in ['put', 'post']:
|
||||
raise ValueError("HTTP method must be either 'POST' or 'PUT'.")
|
||||
chosen_method = getattr(requests, self.http_method.lower(), None)
|
||||
|
||||
for m in messages:
|
||||
|
||||
auth = None
|
||||
if self.username or self.password:
|
||||
auth = (self.username, self.password)
|
||||
r = chosen_method(
|
||||
"{}".format(m.recipients()[0]),
|
||||
auth=auth,
|
||||
data=json.dumps(m.body, ensure_ascii=False).encode('utf-8'),
|
||||
headers=dict(list(get_awx_http_client_headers().items()) + list((self.headers or {}).items())),
|
||||
verify=(not self.disable_ssl_verification),
|
||||
)
|
||||
if r.status_code >= 400:
|
||||
logger.error(smart_str(_("Error sending notification webhook: {}").format(r.status_code)))
|
||||
|
||||
# the constructor for EmailMessage - https://docs.djangoproject.com/en/4.1/_modules/django/core/mail/message will turn an empty dictionary to an empty string
|
||||
# sometimes an empty dict is intentional and we added this conditional to enforce that
|
||||
if not m.body:
|
||||
m.body = {}
|
||||
|
||||
url = str(m.recipients()[0])
|
||||
data = json.dumps(m.body, ensure_ascii=False).encode('utf-8')
|
||||
headers = {**(get_awx_http_client_headers()), **(self.headers or {})}
|
||||
|
||||
err = None
|
||||
|
||||
for retries in range(self.MAX_RETRIES):
|
||||
|
||||
# Sometimes we hit redirect URLs. We must account for this. We still extract the redirect URL from the response headers and try again. Max retires == 5
|
||||
resp = chosen_method(
|
||||
url=url,
|
||||
auth=auth,
|
||||
data=data,
|
||||
headers=headers,
|
||||
verify=(not self.disable_ssl_verification),
|
||||
allow_redirects=False, # override default behaviour for redirects
|
||||
)
|
||||
|
||||
# either success or error reached if this conditional fires
|
||||
if resp.status_code not in [301, 307]:
|
||||
break
|
||||
|
||||
# we've hit a redirect. extract the redirect URL out of the first response header and try again
|
||||
logger.warning(
|
||||
f"Received a {resp.status_code} from {url}, trying to reach redirect url {resp.headers.get('Location', None)}; attempt #{retries+1}"
|
||||
)
|
||||
|
||||
# take the first redirect URL in the response header and try that
|
||||
url = resp.headers.get("Location", None)
|
||||
|
||||
if url is None:
|
||||
err = f"Webhook notification received redirect to a blank URL from {url}. Response headers={resp.headers}"
|
||||
break
|
||||
else:
|
||||
# no break condition in the loop encountered; therefore we have hit the maximum number of retries
|
||||
err = f"Webhook notification max number of retries [{self.MAX_RETRIES}] exceeded. Failed to send webhook notification to {url}"
|
||||
|
||||
if resp.status_code >= 400:
|
||||
err = f"Error sending webhook notification: {resp.status_code}"
|
||||
|
||||
# log error message
|
||||
if err:
|
||||
logger.error(err)
|
||||
if not self.fail_silently:
|
||||
raise Exception(smart_str(_("Error sending notification webhook: {}").format(r.status_code)))
|
||||
sent_messages += 1
|
||||
raise Exception(err)
|
||||
|
||||
# no errors were encountered therefore we successfully sent off the notification webhook
|
||||
if resp.status_code in range(200, 299):
|
||||
logger.debug(f"Notification webhook successfully sent to {url}. Received {resp.status_code}")
|
||||
sent_messages += 1
|
||||
|
||||
return sent_messages
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
|
||||
from django.db.models.signals import pre_save, post_save, pre_delete, m2m_changed
|
||||
|
||||
from taggit.managers import TaggableManager
|
||||
|
||||
|
||||
class ActivityStreamRegistrar(object):
|
||||
def __init__(self):
|
||||
@@ -19,6 +21,8 @@ class ActivityStreamRegistrar(object):
|
||||
pre_delete.connect(activity_stream_delete, sender=model, dispatch_uid=str(self.__class__) + str(model) + "_delete")
|
||||
|
||||
for m2mfield in model._meta.many_to_many:
|
||||
if isinstance(m2mfield, TaggableManager):
|
||||
continue # Special case for taggit app
|
||||
try:
|
||||
m2m_attr = getattr(model, m2mfield.name)
|
||||
m2m_changed.connect(
|
||||
|
||||
@@ -2,8 +2,6 @@ import json
|
||||
import time
|
||||
import logging
|
||||
from collections import deque
|
||||
import os
|
||||
import stat
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@@ -206,21 +204,6 @@ class RunnerCallback:
|
||||
self.instance = self.update_model(self.instance.pk, job_args=json.dumps(runner_config.command), job_cwd=runner_config.cwd, job_env=job_env)
|
||||
# We opened a connection just for that save, close it here now
|
||||
connections.close_all()
|
||||
elif status_data['status'] == 'failed':
|
||||
# For encrypted ssh_key_data, ansible-runner worker will open and write the
|
||||
# ssh_key_data to a named pipe. Then, once the podman container starts, ssh-agent will
|
||||
# read from this named pipe so that the key can be used in ansible-playbook.
|
||||
# Once the podman container exits, the named pipe is deleted.
|
||||
# However, if the podman container fails to start in the first place, e.g. the image
|
||||
# name is incorrect, then this pipe is not cleaned up. Eventually ansible-runner
|
||||
# processor will attempt to write artifacts to the private data dir via unstream_dir, requiring
|
||||
# that it open this named pipe. This leads to a hang. Thus, before any artifacts
|
||||
# are written by the processor, it's important to remove this ssh_key_data pipe.
|
||||
private_data_dir = self.instance.job_env.get('AWX_PRIVATE_DATA_DIR', None)
|
||||
if private_data_dir:
|
||||
key_data_file = os.path.join(private_data_dir, 'artifacts', str(self.instance.id), 'ssh_key_data')
|
||||
if os.path.exists(key_data_file) and stat.S_ISFIFO(os.stat(key_data_file).st_mode):
|
||||
os.remove(key_data_file)
|
||||
elif status_data['status'] == 'error':
|
||||
result_traceback = status_data.get('result_traceback', None)
|
||||
if result_traceback:
|
||||
|
||||
@@ -700,7 +700,7 @@ class SourceControlMixin(BaseTask):
|
||||
|
||||
def spawn_project_sync(self, project, sync_needs, scm_branch=None):
|
||||
pu_ig = self.instance.instance_group
|
||||
pu_en = Instance.objects.me().hostname
|
||||
pu_en = Instance.objects.my_hostname()
|
||||
|
||||
sync_metafields = dict(
|
||||
launch_type="sync",
|
||||
@@ -767,6 +767,10 @@ class SourceControlMixin(BaseTask):
|
||||
|
||||
try:
|
||||
original_branch = None
|
||||
failed_reason = project.get_reason_if_failed()
|
||||
if failed_reason:
|
||||
self.update_model(self.instance.pk, status='failed', job_explanation=failed_reason)
|
||||
raise RuntimeError(failed_reason)
|
||||
project_path = project.get_project_path(check_if_exists=False)
|
||||
if project.scm_type == 'git' and (scm_branch and scm_branch != project.scm_branch):
|
||||
if os.path.exists(project_path):
|
||||
@@ -1056,10 +1060,6 @@ class RunJob(SourceControlMixin, BaseTask):
|
||||
error = _('Job could not start because no Execution Environment could be found.')
|
||||
self.update_model(job.pk, status='error', job_explanation=error)
|
||||
raise RuntimeError(error)
|
||||
elif job.project.status in ('error', 'failed'):
|
||||
msg = _('The project revision for this job template is unknown due to a failed update.')
|
||||
job = self.update_model(job.pk, status='failed', job_explanation=msg)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
if job.inventory.kind == 'smart':
|
||||
# cache smart inventory memberships so that the host_filter query is not
|
||||
|
||||
@@ -208,7 +208,10 @@ def run_until_complete(node, timing_data=None, **kwargs):
|
||||
if state_name.lower() == 'failed':
|
||||
work_detail = status.get('Detail', '')
|
||||
if work_detail:
|
||||
raise RemoteJobError(f'Receptor error from {node}, detail:\n{work_detail}')
|
||||
if stdout:
|
||||
raise RemoteJobError(f'Receptor error from {node}, detail:\n{work_detail}\nstdout:\n{stdout}')
|
||||
else:
|
||||
raise RemoteJobError(f'Receptor error from {node}, detail:\n{work_detail}')
|
||||
else:
|
||||
raise RemoteJobError(f'Unknown ansible-runner error on node {node}, stdout:\n{stdout}')
|
||||
|
||||
|
||||
@@ -4,8 +4,10 @@ from awx.api.versioning import reverse
|
||||
from awx.main.models.activity_stream import ActivityStream
|
||||
from awx.main.models.ha import Instance
|
||||
|
||||
from django.test.utils import override_settings
|
||||
|
||||
INSTANCE_KWARGS = dict(hostname='example-host', cpu=6, memory=36000000000, cpu_capacity=6, mem_capacity=42)
|
||||
|
||||
INSTANCE_KWARGS = dict(hostname='example-host', cpu=6, node_type='execution', memory=36000000000, cpu_capacity=6, mem_capacity=42)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -54,3 +56,33 @@ def test_health_check_usage(get, post, admin_user):
|
||||
get(url=url, user=admin_user, expect=200)
|
||||
r = post(url=url, user=admin_user, expect=200)
|
||||
assert r.data['msg'] == f"Health check is running for {instance.hostname}."
|
||||
|
||||
|
||||
def test_custom_hostname_regex(post, admin_user):
|
||||
url = reverse('api:instance_list')
|
||||
with override_settings(IS_K8S=True):
|
||||
for value in [
|
||||
("foo.bar.baz", 201),
|
||||
("f.bar.bz", 201),
|
||||
("foo.bar.b", 400),
|
||||
("a.b.c", 400),
|
||||
("localhost", 400),
|
||||
("127.0.0.1", 400),
|
||||
("192.168.56.101", 201),
|
||||
("2001:0db8:85a3:0000:0000:8a2e:0370:7334", 201),
|
||||
("foobar", 201),
|
||||
("--yoooo", 400),
|
||||
("$3$@foobar@#($!@#*$", 400),
|
||||
("999.999.999.999", 201),
|
||||
("0000:0000:0000:0000:0000:0000:0000:0001", 400),
|
||||
("whitespaces are bad for hostnames", 400),
|
||||
("0:0:0:0:0:0:0:1", 400),
|
||||
("192.localhost.domain.101", 201),
|
||||
("F@$%(@#$H%^(I@#^HCTQEWRFG", 400),
|
||||
]:
|
||||
data = {
|
||||
"hostname": value[0],
|
||||
"node_type": "execution",
|
||||
"node_state": "installed",
|
||||
}
|
||||
post(url=url, user=admin_user, data=data, expect=value[1])
|
||||
|
||||
@@ -216,7 +216,7 @@ def test_instance_attach_to_instance_group(post, instance_group, node_type_insta
|
||||
|
||||
count = ActivityStream.objects.count()
|
||||
|
||||
url = reverse(f'api:instance_group_instance_list', kwargs={'pk': instance_group.pk})
|
||||
url = reverse('api:instance_group_instance_list', kwargs={'pk': instance_group.pk})
|
||||
post(url, {'associate': True, 'id': instance.id}, admin, expect=204 if node_type != 'control' else 400)
|
||||
|
||||
new_activity = ActivityStream.objects.all()[count:]
|
||||
@@ -240,7 +240,7 @@ def test_instance_unattach_from_instance_group(post, instance_group, node_type_i
|
||||
|
||||
count = ActivityStream.objects.count()
|
||||
|
||||
url = reverse(f'api:instance_group_instance_list', kwargs={'pk': instance_group.pk})
|
||||
url = reverse('api:instance_group_instance_list', kwargs={'pk': instance_group.pk})
|
||||
post(url, {'disassociate': True, 'id': instance.id}, admin, expect=204 if node_type != 'control' else 400)
|
||||
|
||||
new_activity = ActivityStream.objects.all()[count:]
|
||||
@@ -263,7 +263,7 @@ def test_instance_group_attach_to_instance(post, instance_group, node_type_insta
|
||||
|
||||
count = ActivityStream.objects.count()
|
||||
|
||||
url = reverse(f'api:instance_instance_groups_list', kwargs={'pk': instance.pk})
|
||||
url = reverse('api:instance_instance_groups_list', kwargs={'pk': instance.pk})
|
||||
post(url, {'associate': True, 'id': instance_group.id}, admin, expect=204 if node_type != 'control' else 400)
|
||||
|
||||
new_activity = ActivityStream.objects.all()[count:]
|
||||
@@ -287,7 +287,7 @@ def test_instance_group_unattach_from_instance(post, instance_group, node_type_i
|
||||
|
||||
count = ActivityStream.objects.count()
|
||||
|
||||
url = reverse(f'api:instance_instance_groups_list', kwargs={'pk': instance.pk})
|
||||
url = reverse('api:instance_instance_groups_list', kwargs={'pk': instance.pk})
|
||||
post(url, {'disassociate': True, 'id': instance_group.id}, admin, expect=204 if node_type != 'control' else 400)
|
||||
|
||||
new_activity = ActivityStream.objects.all()[count:]
|
||||
@@ -314,4 +314,4 @@ def test_cannot_remove_controlplane_hybrid_instances(post, controlplane_instance
|
||||
|
||||
url = reverse('api:instance_instance_groups_list', kwargs={'pk': instance.pk})
|
||||
r = post(url, {'disassociate': True, 'id': controlplane_instance_group.id}, admin_user, expect=400)
|
||||
assert f'Cannot disassociate hybrid instance' in str(r.data)
|
||||
assert 'Cannot disassociate hybrid instance' in str(r.data)
|
||||
|
||||
@@ -105,6 +105,30 @@ def test_encrypted_survey_answer(post, patch, admin_user, project, inventory, su
|
||||
assert decrypt_value(get_encryption_key('value', pk=None), schedule.extra_data['var1']) == 'bar'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_survey_password_default(post, patch, admin_user, project, inventory, survey_spec_factory):
|
||||
job_template = JobTemplate.objects.create(
|
||||
name='test-jt',
|
||||
project=project,
|
||||
playbook='helloworld.yml',
|
||||
inventory=inventory,
|
||||
ask_variables_on_launch=False,
|
||||
survey_enabled=True,
|
||||
survey_spec=survey_spec_factory([{'variable': 'var1', 'question_name': 'Q1', 'type': 'password', 'required': True, 'default': 'foobar'}]),
|
||||
)
|
||||
|
||||
# test removal of $encrypted$
|
||||
url = reverse('api:job_template_schedules_list', kwargs={'pk': job_template.id})
|
||||
r = post(url, {'name': 'test sch', 'rrule': RRULE_EXAMPLE, 'extra_data': '{"var1": "$encrypted$"}'}, admin_user, expect=201)
|
||||
schedule = Schedule.objects.get(pk=r.data['id'])
|
||||
assert schedule.extra_data == {}
|
||||
assert schedule.enabled is True
|
||||
|
||||
# test an unrelated change
|
||||
patch(schedule.get_absolute_url(), data={'enabled': False}, user=admin_user, expect=200)
|
||||
patch(schedule.get_absolute_url(), data={'enabled': True}, user=admin_user, expect=200)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
'rrule, error',
|
||||
@@ -123,19 +147,19 @@ def test_encrypted_survey_answer(post, patch, admin_user, project, inventory, su
|
||||
("DTSTART:20030925T104941Z RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z", "RRULE may not contain both COUNT and UNTIL"), # noqa
|
||||
("DTSTART:20300308T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2000", "COUNT > 999 is unsupported"), # noqa
|
||||
# Individual rule test with multiple rules
|
||||
## Bad Rule: RRULE:NONSENSE
|
||||
# Bad Rule: RRULE:NONSENSE
|
||||
("DTSTART:20300308T050000Z RRULE:NONSENSE RRULE:INTERVAL=1;FREQ=DAILY EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU", "INTERVAL required in rrule"),
|
||||
## Bad Rule: RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=5MO
|
||||
# Bad Rule: RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=5MO
|
||||
(
|
||||
"DTSTART:20300308T050000Z RRULE:INTERVAL=1;FREQ=DAILY EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU RRULE:FREQ=YEARLY;INTERVAL=1;BYDAY=5MO",
|
||||
"BYDAY with numeric prefix not supported",
|
||||
), # noqa
|
||||
## Bad Rule: RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z
|
||||
# Bad Rule: RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z
|
||||
(
|
||||
"DTSTART:20030925T104941Z RRULE:INTERVAL=1;FREQ=DAILY EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU RRULE:FREQ=DAILY;INTERVAL=10;COUNT=500;UNTIL=20040925T104941Z",
|
||||
"RRULE may not contain both COUNT and UNTIL",
|
||||
), # noqa
|
||||
## Bad Rule: RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2000
|
||||
# Bad Rule: RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2000
|
||||
(
|
||||
"DTSTART:20300308T050000Z RRULE:INTERVAL=1;FREQ=DAILY EXRULE:FREQ=WEEKLY;INTERVAL=1;BYDAY=SU RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2000",
|
||||
"COUNT > 999 is unsupported",
|
||||
|
||||
@@ -75,6 +75,7 @@ def test_encrypted_subfields(get, post, user, organization):
|
||||
url = reverse('api:notification_template_detail', kwargs={'pk': response.data['id']})
|
||||
response = get(url, u)
|
||||
assert response.data['notification_configuration']['account_token'] == "$encrypted$"
|
||||
|
||||
with mock.patch.object(notification_template_actual.notification_class, "send_messages", assert_send):
|
||||
notification_template_actual.send("Test", {'body': "Test"})
|
||||
|
||||
@@ -175,3 +176,46 @@ def test_custom_environment_injection(post, user, organization):
|
||||
|
||||
fake_send.side_effect = _send_side_effect
|
||||
template.send('subject', 'message')
|
||||
|
||||
|
||||
def mock_post(*args, **kwargs):
|
||||
class MockGoodResponse:
|
||||
def __init__(self):
|
||||
self.status_code = 200
|
||||
|
||||
class MockRedirectResponse:
|
||||
def __init__(self):
|
||||
self.status_code = 301
|
||||
self.headers = {"Location": "http://goodendpoint"}
|
||||
|
||||
if kwargs['url'] == "http://goodendpoint":
|
||||
return MockGoodResponse()
|
||||
else:
|
||||
return MockRedirectResponse()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('requests.post', side_effect=mock_post)
|
||||
def test_webhook_notification_pointed_to_a_redirect_launch_endpoint(post, admin, organization):
|
||||
|
||||
n1 = NotificationTemplate.objects.create(
|
||||
name="test-webhook",
|
||||
description="test webhook",
|
||||
organization=organization,
|
||||
notification_type="webhook",
|
||||
notification_configuration=dict(
|
||||
url="http://some.fake.url",
|
||||
disable_ssl_verification=True,
|
||||
http_method="POST",
|
||||
headers={
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
username=admin.username,
|
||||
password=admin.password,
|
||||
),
|
||||
messages={
|
||||
"success": {"message": "", "body": "{}"},
|
||||
},
|
||||
)
|
||||
|
||||
assert n1.send("", n1.messages.get("success").get("body")) == 1
|
||||
|
||||
@@ -5,7 +5,8 @@ from unittest import mock
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
from awx.api.views import ApiVersionRootView, JobTemplateLabelList, InventoryInventorySourcesUpdate, JobTemplateSurveySpec
|
||||
from awx.api.views.root import ApiVersionRootView
|
||||
from awx.api.views import JobTemplateLabelList, InventoryInventorySourcesUpdate, JobTemplateSurveySpec
|
||||
|
||||
from awx.main.views import handle_error
|
||||
|
||||
@@ -23,7 +24,7 @@ class TestApiRootView:
|
||||
endpoints = [
|
||||
'ping',
|
||||
'config',
|
||||
#'settings',
|
||||
# 'settings',
|
||||
'me',
|
||||
'dashboard',
|
||||
'organizations',
|
||||
|
||||
@@ -50,7 +50,10 @@ def test_cancel(unified_job):
|
||||
# Some more thought may want to go into only emitting canceled if/when the job record
|
||||
# status is changed to canceled. Unlike, currently, where it's emitted unconditionally.
|
||||
unified_job.websocket_emit_status.assert_called_with("canceled")
|
||||
unified_job.save.assert_called_with(update_fields=['cancel_flag', 'start_args', 'status'])
|
||||
assert [(args, kwargs) for args, kwargs in unified_job.save.call_args_list] == [
|
||||
((), {'update_fields': ['cancel_flag', 'start_args']}),
|
||||
((), {'update_fields': ['status']}),
|
||||
]
|
||||
|
||||
|
||||
def test_cancel_job_explanation(unified_job):
|
||||
@@ -60,7 +63,10 @@ def test_cancel_job_explanation(unified_job):
|
||||
unified_job.cancel(job_explanation=job_explanation)
|
||||
|
||||
assert unified_job.job_explanation == job_explanation
|
||||
unified_job.save.assert_called_with(update_fields=['cancel_flag', 'start_args', 'job_explanation', 'status'])
|
||||
assert [(args, kwargs) for args, kwargs in unified_job.save.call_args_list] == [
|
||||
((), {'update_fields': ['cancel_flag', 'start_args', 'job_explanation']}),
|
||||
((), {'update_fields': ['status']}),
|
||||
]
|
||||
|
||||
|
||||
def test_organization_copy_to_jobs():
|
||||
|
||||
@@ -27,11 +27,12 @@ def test_send_messages_as_POST():
|
||||
]
|
||||
)
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'http://example.com',
|
||||
url='http://example.com',
|
||||
auth=None,
|
||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||
verify=True,
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -57,11 +58,12 @@ def test_send_messages_as_PUT():
|
||||
]
|
||||
)
|
||||
requests_mock.put.assert_called_once_with(
|
||||
'http://example.com',
|
||||
url='http://example.com',
|
||||
auth=None,
|
||||
data=json.dumps({'text': 'test body 2'}, ensure_ascii=False).encode('utf-8'),
|
||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||
verify=True,
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -87,11 +89,12 @@ def test_send_messages_with_username():
|
||||
]
|
||||
)
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'http://example.com',
|
||||
url='http://example.com',
|
||||
auth=('userstring', None),
|
||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||
verify=True,
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -117,11 +120,12 @@ def test_send_messages_with_password():
|
||||
]
|
||||
)
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'http://example.com',
|
||||
url='http://example.com',
|
||||
auth=(None, 'passwordstring'),
|
||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||
verify=True,
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -147,11 +151,12 @@ def test_send_messages_with_username_and_password():
|
||||
]
|
||||
)
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'http://example.com',
|
||||
url='http://example.com',
|
||||
auth=('userstring', 'passwordstring'),
|
||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||
verify=True,
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -177,11 +182,12 @@ def test_send_messages_with_no_verify_ssl():
|
||||
]
|
||||
)
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'http://example.com',
|
||||
url='http://example.com',
|
||||
auth=None,
|
||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||
headers={'Content-Type': 'application/json', 'User-Agent': 'AWX 0.0.1.dev (open)'},
|
||||
verify=False,
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -207,7 +213,7 @@ def test_send_messages_with_additional_headers():
|
||||
]
|
||||
)
|
||||
requests_mock.post.assert_called_once_with(
|
||||
'http://example.com',
|
||||
url='http://example.com',
|
||||
auth=None,
|
||||
data=json.dumps({'text': 'test body'}, ensure_ascii=False).encode('utf-8'),
|
||||
headers={
|
||||
@@ -217,5 +223,6 @@ def test_send_messages_with_additional_headers():
|
||||
'X-Test-Header2': 'test-content-2',
|
||||
},
|
||||
verify=True,
|
||||
allow_redirects=False,
|
||||
)
|
||||
assert sent_messages == 1
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
import os
|
||||
import re
|
||||
import pytest
|
||||
from uuid import uuid4
|
||||
import json
|
||||
@@ -12,9 +13,13 @@ from unittest import mock
|
||||
from rest_framework.exceptions import ParseError
|
||||
|
||||
from awx.main.utils import common
|
||||
from awx.api.validators import HostnameRegexValidator
|
||||
|
||||
from awx.main.models import Job, AdHocCommand, InventoryUpdate, ProjectUpdate, SystemJob, WorkflowJob, Inventory, JobTemplate, UnifiedJobTemplate, UnifiedJob
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.regex_helper import _lazy_re_compile
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'input_, output',
|
||||
@@ -194,3 +199,136 @@ def test_extract_ansible_vars():
|
||||
redacted, var_list = common.extract_ansible_vars(json.dumps(my_dict))
|
||||
assert var_list == set(['ansible_connetion_setting'])
|
||||
assert redacted == {"foobar": "baz"}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'scm_type, url, username, password, check_special_cases, scp_format, expected',
|
||||
[
|
||||
# General/random cases
|
||||
('git', '', True, True, True, False, ''),
|
||||
('git', 'git://example.com/foo.git', True, True, True, False, 'git://example.com/foo.git'),
|
||||
('git', 'http://example.com/foo.git', True, True, True, False, 'http://example.com/foo.git'),
|
||||
('git', 'example.com:bar.git', True, True, True, False, 'git+ssh://example.com/bar.git'),
|
||||
('git', 'user@example.com:bar.git', True, True, True, False, 'git+ssh://user@example.com/bar.git'),
|
||||
('git', '127.0.0.1:bar.git', True, True, True, False, 'git+ssh://127.0.0.1/bar.git'),
|
||||
('git', 'git+ssh://127.0.0.1/bar.git', True, True, True, True, '127.0.0.1:bar.git'),
|
||||
('git', 'ssh://127.0.0.1:22/bar.git', True, True, True, False, 'ssh://127.0.0.1:22/bar.git'),
|
||||
('git', 'ssh://root@127.0.0.1:22/bar.git', True, True, True, False, 'ssh://root@127.0.0.1:22/bar.git'),
|
||||
('git', 'some/path', True, True, True, False, 'file:///some/path'),
|
||||
('git', '/some/path', True, True, True, False, 'file:///some/path'),
|
||||
# Invalid URLs - ensure we error properly
|
||||
('cvs', 'anything', True, True, True, False, ValueError('Unsupported SCM type "cvs"')),
|
||||
('svn', 'anything-without-colon-slash-slash', True, True, True, False, ValueError('Invalid svn URL')),
|
||||
('git', 'http://example.com:123invalidport/foo.git', True, True, True, False, ValueError('Invalid git URL')),
|
||||
('git', 'git+ssh://127.0.0.1/bar.git', True, True, True, False, ValueError('Unsupported git URL')),
|
||||
('git', 'git@example.com:3000:/git/repo.git', True, True, True, False, ValueError('Invalid git URL')),
|
||||
('insights', 'git://example.com/foo.git', True, True, True, False, ValueError('Unsupported insights URL')),
|
||||
('svn', 'file://example/path', True, True, True, False, ValueError('Unsupported host "example" for file:// URL')),
|
||||
('svn', 'svn:///example', True, True, True, False, ValueError('Host is required for svn URL')),
|
||||
# Username/password cases
|
||||
('git', 'https://example@example.com/bar.git', False, True, True, False, 'https://example.com/bar.git'),
|
||||
('git', 'https://example@example.com/bar.git', 'user', True, True, False, 'https://user@example.com/bar.git'),
|
||||
('git', 'https://example@example.com/bar.git', 'user:pw', True, True, False, 'https://user%3Apw@example.com/bar.git'),
|
||||
('git', 'https://example@example.com/bar.git', False, 'pw', True, False, 'https://example.com/bar.git'),
|
||||
('git', 'https://some:example@example.com/bar.git', True, False, True, False, 'https://some@example.com/bar.git'),
|
||||
('git', 'https://some:example@example.com/bar.git', False, False, True, False, 'https://example.com/bar.git'),
|
||||
('git', 'https://example.com/bar.git', 'user', 'pw', True, False, 'https://user:pw@example.com/bar.git'),
|
||||
('git', 'https://example@example.com/bar.git', False, 'something', True, False, 'https://example.com/bar.git'),
|
||||
# Special github/bitbucket cases
|
||||
('git', 'notgit@github.com:ansible/awx.git', True, True, True, False, ValueError('Username must be "git" for SSH access to github.com.')),
|
||||
(
|
||||
'git',
|
||||
'notgit@bitbucket.org:does-not-exist/example.git',
|
||||
True,
|
||||
True,
|
||||
True,
|
||||
False,
|
||||
ValueError('Username must be "git" for SSH access to bitbucket.org.'),
|
||||
),
|
||||
(
|
||||
'git',
|
||||
'notgit@altssh.bitbucket.org:does-not-exist/example.git',
|
||||
True,
|
||||
True,
|
||||
True,
|
||||
False,
|
||||
ValueError('Username must be "git" for SSH access to altssh.bitbucket.org.'),
|
||||
),
|
||||
('git', 'git:password@github.com:ansible/awx.git', True, True, True, False, 'git+ssh://git@github.com/ansible/awx.git'),
|
||||
# Disabling the special handling should not raise an error
|
||||
('git', 'notgit@github.com:ansible/awx.git', True, True, False, False, 'git+ssh://notgit@github.com/ansible/awx.git'),
|
||||
('git', 'notgit@bitbucket.org:does-not-exist/example.git', True, True, False, False, 'git+ssh://notgit@bitbucket.org/does-not-exist/example.git'),
|
||||
(
|
||||
'git',
|
||||
'notgit@altssh.bitbucket.org:does-not-exist/example.git',
|
||||
True,
|
||||
True,
|
||||
False,
|
||||
False,
|
||||
'git+ssh://notgit@altssh.bitbucket.org/does-not-exist/example.git',
|
||||
),
|
||||
# awx#12992 - IPv6
|
||||
('git', 'http://[fd00:1234:2345:6789::11]:3000/foo.git', True, True, True, False, 'http://[fd00:1234:2345:6789::11]:3000/foo.git'),
|
||||
('git', 'http://foo:bar@[fd00:1234:2345:6789::11]:3000/foo.git', True, True, True, False, 'http://foo:bar@[fd00:1234:2345:6789::11]:3000/foo.git'),
|
||||
('git', 'example@[fd00:1234:2345:6789::11]:example/foo.git', True, True, True, False, 'git+ssh://example@[fd00:1234:2345:6789::11]/example/foo.git'),
|
||||
],
|
||||
)
|
||||
def test_update_scm_url(scm_type, url, username, password, check_special_cases, scp_format, expected):
|
||||
if isinstance(expected, Exception):
|
||||
with pytest.raises(type(expected)) as excinfo:
|
||||
common.update_scm_url(scm_type, url, username, password, check_special_cases, scp_format)
|
||||
assert str(excinfo.value) == str(expected)
|
||||
else:
|
||||
assert common.update_scm_url(scm_type, url, username, password, check_special_cases, scp_format) == expected
|
||||
|
||||
|
||||
class TestHostnameRegexValidator:
|
||||
@pytest.fixture
|
||||
def regex_expr(self):
|
||||
return '^[a-z0-9][-a-z0-9]*$|^([a-z0-9][-a-z0-9]{0,62}[.])*[a-z0-9][-a-z0-9]{1,62}$'
|
||||
|
||||
@pytest.fixture
|
||||
def re_flags(self):
|
||||
return re.IGNORECASE
|
||||
|
||||
@pytest.fixture
|
||||
def custom_err_message(self):
|
||||
return "foobar"
|
||||
|
||||
def test_hostame_regex_validator_constructor_with_args(self, regex_expr, re_flags, custom_err_message):
|
||||
h = HostnameRegexValidator(regex=regex_expr, flags=re_flags, message=custom_err_message)
|
||||
assert h.regex == _lazy_re_compile(regex_expr, re_flags)
|
||||
assert h.message == 'foobar'
|
||||
assert h.code == 'invalid'
|
||||
assert h.inverse_match == False
|
||||
assert h.flags == re_flags
|
||||
|
||||
def test_hostame_regex_validator_default_constructor(self, regex_expr, re_flags):
|
||||
h = HostnameRegexValidator()
|
||||
assert h.regex == _lazy_re_compile(regex_expr, re_flags)
|
||||
assert h.message == 'Enter a valid value.'
|
||||
assert h.code == 'invalid'
|
||||
assert h.inverse_match == False
|
||||
assert h.flags == re_flags
|
||||
|
||||
def test_good_call(self, regex_expr, re_flags):
|
||||
h = HostnameRegexValidator(regex=regex_expr, flags=re_flags)
|
||||
assert (h("192.168.56.101"), None)
|
||||
|
||||
def test_bad_call(self, regex_expr, re_flags):
|
||||
h = HostnameRegexValidator(regex=regex_expr, flags=re_flags)
|
||||
try:
|
||||
h("@#$%)$#(TUFAS_DG")
|
||||
except ValidationError as e:
|
||||
assert e.message is not None
|
||||
|
||||
def test_good_call_with_inverse(self, regex_expr, re_flags, inverse_match=True):
|
||||
h = HostnameRegexValidator(regex=regex_expr, flags=re_flags, inverse_match=inverse_match)
|
||||
try:
|
||||
h("1.2.3.4")
|
||||
except ValidationError as e:
|
||||
assert e.message is not None
|
||||
|
||||
def test_bad_call_with_inverse(self, regex_expr, re_flags, inverse_match=True):
|
||||
h = HostnameRegexValidator(regex=regex_expr, flags=re_flags, inverse_match=inverse_match)
|
||||
assert (h("@#$%)$#(TUFAS_DG"), None)
|
||||
|
||||
@@ -264,9 +264,15 @@ def update_scm_url(scm_type, url, username=True, password=True, check_special_ca
|
||||
userpass, hostpath = url.split('@', 1)
|
||||
else:
|
||||
userpass, hostpath = '', url
|
||||
if hostpath.count(':') > 1:
|
||||
# Handle IPv6 here. In this case, we might have hostpath of:
|
||||
# [fd00:1234:2345:6789::11]:example/foo.git
|
||||
if hostpath.startswith('[') and ']:' in hostpath:
|
||||
host, path = hostpath.split(']:', 1)
|
||||
host = host + ']'
|
||||
elif hostpath.count(':') > 1:
|
||||
raise ValueError(_('Invalid %s URL') % scm_type)
|
||||
host, path = hostpath.split(':', 1)
|
||||
else:
|
||||
host, path = hostpath.split(':', 1)
|
||||
# if not path.startswith('/') and not path.startswith('~/'):
|
||||
# path = '~/%s' % path
|
||||
# if path.startswith('/'):
|
||||
@@ -325,7 +331,11 @@ def update_scm_url(scm_type, url, username=True, password=True, check_special_ca
|
||||
netloc = u':'.join([urllib.parse.quote(x, safe='') for x in (netloc_username, netloc_password) if x])
|
||||
else:
|
||||
netloc = u''
|
||||
netloc = u'@'.join(filter(None, [netloc, parts.hostname]))
|
||||
# urllib.parse strips brackets from IPv6 addresses, so we need to add them back in
|
||||
hostname = parts.hostname
|
||||
if hostname and ':' in hostname and '[' in url and ']' in url:
|
||||
hostname = f'[{hostname}]'
|
||||
netloc = u'@'.join(filter(None, [netloc, hostname]))
|
||||
if parts.port:
|
||||
netloc = u':'.join([netloc, str(parts.port)])
|
||||
new_url = urllib.parse.urlunsplit([parts.scheme, netloc, parts.path, parts.query, parts.fragment])
|
||||
|
||||
@@ -110,7 +110,7 @@ if settings.COLOR_LOGS is True:
|
||||
# logs rendered with cyan text
|
||||
previous_level_map = self.level_map.copy()
|
||||
if record.name == "awx.analytics.job_lifecycle":
|
||||
self.level_map[logging.DEBUG] = (None, 'cyan', True)
|
||||
self.level_map[logging.INFO] = (None, 'cyan', True)
|
||||
msg = super(ColorHandler, self).colorize(line, record)
|
||||
self.level_map = previous_level_map
|
||||
return msg
|
||||
|
||||
@@ -35,7 +35,7 @@ def unwrap_broadcast_msg(payload: dict):
|
||||
def get_broadcast_hosts():
|
||||
Instance = apps.get_model('main', 'Instance')
|
||||
instances = (
|
||||
Instance.objects.exclude(hostname=Instance.objects.me().hostname)
|
||||
Instance.objects.exclude(hostname=Instance.objects.my_hostname())
|
||||
.exclude(node_type='execution')
|
||||
.exclude(node_type='hop')
|
||||
.order_by('hostname')
|
||||
@@ -47,7 +47,7 @@ def get_broadcast_hosts():
|
||||
|
||||
def get_local_host():
|
||||
Instance = apps.get_model('main', 'Instance')
|
||||
return Instance.objects.me().hostname
|
||||
return Instance.objects.my_hostname()
|
||||
|
||||
|
||||
class WebsocketTask:
|
||||
|
||||
@@ -5,7 +5,6 @@ __metaclass__ = type
|
||||
import gnupg
|
||||
import os
|
||||
import tempfile
|
||||
from ansible.module_utils.basic import *
|
||||
from ansible.plugins.action import ActionBase
|
||||
from ansible.utils.display import Display
|
||||
|
||||
@@ -15,7 +14,7 @@ from ansible_sign.checksum import (
|
||||
InvalidChecksumLine,
|
||||
)
|
||||
from ansible_sign.checksum.differ import DistlibManifestChecksumFileExistenceDiffer
|
||||
from ansible_sign.signing import *
|
||||
from ansible_sign.signing import GPGVerifier
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
@@ -101,7 +101,7 @@ USE_L10N = True
|
||||
|
||||
USE_TZ = True
|
||||
|
||||
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'ui', 'build', 'static'), os.path.join(BASE_DIR, 'static'))
|
||||
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'ui', 'build', 'static'), os.path.join(BASE_DIR, 'static')]
|
||||
|
||||
# Absolute filesystem path to the directory where static file are collected via
|
||||
# the collectstatic command.
|
||||
@@ -254,6 +254,14 @@ START_TASK_LIMIT = 100
|
||||
TASK_MANAGER_TIMEOUT = 300
|
||||
TASK_MANAGER_TIMEOUT_GRACE_PERIOD = 60
|
||||
|
||||
# Number of seconds _in addition to_ the task manager timeout a job can stay
|
||||
# in waiting without being reaped
|
||||
JOB_WAITING_GRACE_PERIOD = 60
|
||||
|
||||
# Number of seconds after a container group job finished time to wait
|
||||
# before the awx_k8s_reaper task will tear down the pods
|
||||
K8S_POD_REAPER_GRACE_PERIOD = 60
|
||||
|
||||
# Disallow sending session cookies over insecure connections
|
||||
SESSION_COOKIE_SECURE = True
|
||||
|
||||
@@ -360,7 +368,7 @@ REST_FRAMEWORK = {
|
||||
# For swagger schema generation
|
||||
# see https://github.com/encode/django-rest-framework/pull/6532
|
||||
'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.AutoSchema',
|
||||
#'URL_FORMAT_OVERRIDE': None,
|
||||
# 'URL_FORMAT_OVERRIDE': None,
|
||||
}
|
||||
|
||||
AUTHENTICATION_BACKENDS = (
|
||||
@@ -1004,16 +1012,5 @@ DEFAULT_CONTAINER_RUN_OPTIONS = ['--network', 'slirp4netns:enable_ipv6=true']
|
||||
# Mount exposed paths as hostPath resource in k8s/ocp
|
||||
AWX_MOUNT_ISOLATED_PATHS_ON_K8S = False
|
||||
|
||||
# Time out task managers if they take longer than this many seconds
|
||||
TASK_MANAGER_TIMEOUT = 300
|
||||
|
||||
# Number of seconds _in addition to_ the task manager timeout a job can stay
|
||||
# in waiting without being reaped
|
||||
JOB_WAITING_GRACE_PERIOD = 60
|
||||
|
||||
# Number of seconds after a container group job finished time to wait
|
||||
# before the awx_k8s_reaper task will tear down the pods
|
||||
K8S_POD_REAPER_GRACE_PERIOD = 60
|
||||
|
||||
# This is overridden downstream via /etc/tower/conf.d/cluster_host_id.py
|
||||
CLUSTER_HOST_ID = socket.gethostname()
|
||||
|
||||
@@ -101,5 +101,5 @@ except IOError:
|
||||
# The below runs AFTER all of the custom settings are imported.
|
||||
|
||||
DATABASES.setdefault('default', dict()).setdefault('OPTIONS', dict()).setdefault(
|
||||
'application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63]
|
||||
'application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63] # NOQA
|
||||
) # noqa
|
||||
|
||||
@@ -11,9 +11,11 @@ import ldap
|
||||
# Django
|
||||
from django.dispatch import receiver
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.conf import settings as django_settings
|
||||
from django.core.signals import setting_changed
|
||||
from django.utils.encoding import force_str
|
||||
from django.db.utils import IntegrityError
|
||||
|
||||
# django-auth-ldap
|
||||
from django_auth_ldap.backend import LDAPSettings as BaseLDAPSettings
|
||||
@@ -327,31 +329,32 @@ class SAMLAuth(BaseSAMLAuth):
|
||||
return super(SAMLAuth, self).get_user(user_id)
|
||||
|
||||
|
||||
def _update_m2m_from_groups(user, ldap_user, related, opts, remove=True):
|
||||
def _update_m2m_from_groups(ldap_user, opts, remove=True):
|
||||
"""
|
||||
Hepler function to update m2m relationship based on LDAP group membership.
|
||||
Hepler function to evaluate the LDAP team/org options to determine if LDAP user should
|
||||
be a member of the team/org based on their ldap group dns.
|
||||
|
||||
Returns:
|
||||
True - User should be added
|
||||
False - User should be removed
|
||||
None - Users membership should not be changed
|
||||
"""
|
||||
should_add = False
|
||||
if opts is None:
|
||||
return
|
||||
return None
|
||||
elif not opts:
|
||||
pass
|
||||
elif opts is True:
|
||||
should_add = True
|
||||
elif isinstance(opts, bool) and opts is True:
|
||||
return True
|
||||
else:
|
||||
if isinstance(opts, str):
|
||||
opts = [opts]
|
||||
# If any of the users groups matches any of the list options
|
||||
for group_dn in opts:
|
||||
if not isinstance(group_dn, str):
|
||||
continue
|
||||
if ldap_user._get_groups().is_member_of(group_dn):
|
||||
should_add = True
|
||||
if should_add:
|
||||
user.save()
|
||||
related.add(user)
|
||||
elif remove and user in related.all():
|
||||
user.save()
|
||||
related.remove(user)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
@receiver(populate_user, dispatch_uid='populate-ldap-user')
|
||||
@@ -383,31 +386,73 @@ def on_populate_user(sender, **kwargs):
|
||||
force_user_update = True
|
||||
logger.warning('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len))
|
||||
|
||||
# Update organization membership based on group memberships.
|
||||
org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {})
|
||||
for org_name, org_opts in org_map.items():
|
||||
org, created = Organization.objects.get_or_create(name=org_name)
|
||||
remove = bool(org_opts.get('remove', True))
|
||||
admins_opts = org_opts.get('admins', None)
|
||||
remove_admins = bool(org_opts.get('remove_admins', remove))
|
||||
_update_m2m_from_groups(user, ldap_user, org.admin_role.members, admins_opts, remove_admins)
|
||||
auditors_opts = org_opts.get('auditors', None)
|
||||
remove_auditors = bool(org_opts.get('remove_auditors', remove))
|
||||
_update_m2m_from_groups(user, ldap_user, org.auditor_role.members, auditors_opts, remove_auditors)
|
||||
users_opts = org_opts.get('users', None)
|
||||
remove_users = bool(org_opts.get('remove_users', remove))
|
||||
_update_m2m_from_groups(user, ldap_user, org.member_role.members, users_opts, remove_users)
|
||||
|
||||
# Update team membership based on group memberships.
|
||||
team_map = getattr(backend.settings, 'TEAM_MAP', {})
|
||||
|
||||
# Move this junk into save of the settings for performance later, there is no need to do that here
|
||||
# with maybe the exception of someone defining this in settings before the server is started?
|
||||
# ==============================================================================================================
|
||||
|
||||
# Get all of the IDs and names of orgs in the DB and create any new org defined in LDAP that does not exist in the DB
|
||||
existing_orgs = {}
|
||||
for (org_id, org_name) in Organization.objects.all().values_list('id', 'name'):
|
||||
existing_orgs[org_name] = org_id
|
||||
|
||||
# Create any orgs (if needed) for all entries in the org and team maps
|
||||
for org_name in set(list(org_map.keys()) + [item.get('organization', None) for item in team_map.values()]):
|
||||
if org_name and org_name not in existing_orgs:
|
||||
logger.info("LDAP adapter is creating org {}".format(org_name))
|
||||
try:
|
||||
new_org = Organization.objects.create(name=org_name)
|
||||
except IntegrityError:
|
||||
# Another thread must have created this org before we did so now we need to get it
|
||||
new_org = Organization.objects.get(name=org_name)
|
||||
# Add the org name to the existing orgs since we created it and we may need it to build the teams below
|
||||
existing_orgs[org_name] = new_org.id
|
||||
|
||||
# Do the same for teams
|
||||
existing_team_names = list(Team.objects.all().values_list('name', flat=True))
|
||||
for team_name, team_opts in team_map.items():
|
||||
if not team_opts.get('organization', None):
|
||||
# You can't save the LDAP config in the UI w/o an org (or '' or null as the org) so if we somehow got this condition its an error
|
||||
logger.error("Team named {} in LDAP team map settings is invalid due to missing organization".format(team_name))
|
||||
continue
|
||||
if team_name not in existing_team_names:
|
||||
try:
|
||||
Team.objects.create(name=team_name, organization_id=existing_orgs[team_opts['organization']])
|
||||
except IntegrityError:
|
||||
# If another process got here before us that is ok because we don't need the ID from this team or anything
|
||||
pass
|
||||
# End move some day
|
||||
# ==============================================================================================================
|
||||
|
||||
# Compute in memory what the state is of the different LDAP orgs
|
||||
org_roles_and_ldap_attributes = {'admin_role': 'admins', 'auditor_role': 'auditors', 'member_role': 'users'}
|
||||
desired_org_states = {}
|
||||
for org_name, org_opts in org_map.items():
|
||||
remove = bool(org_opts.get('remove', True))
|
||||
desired_org_states[org_name] = {}
|
||||
for org_role_name in org_roles_and_ldap_attributes.keys():
|
||||
ldap_name = org_roles_and_ldap_attributes[org_role_name]
|
||||
opts = org_opts.get(ldap_name, None)
|
||||
remove = bool(org_opts.get('remove_{}'.format(ldap_name), remove))
|
||||
desired_org_states[org_name][org_role_name] = _update_m2m_from_groups(ldap_user, opts, remove)
|
||||
|
||||
# If everything returned None (because there was no configuration) we can remove this org from our map
|
||||
# This will prevent us from loading the org in the next query
|
||||
if all(desired_org_states[org_name][org_role_name] is None for org_role_name in org_roles_and_ldap_attributes.keys()):
|
||||
del desired_org_states[org_name]
|
||||
|
||||
# Compute in memory what the state is of the different LDAP teams
|
||||
desired_team_states = {}
|
||||
for team_name, team_opts in team_map.items():
|
||||
if 'organization' not in team_opts:
|
||||
continue
|
||||
org, created = Organization.objects.get_or_create(name=team_opts['organization'])
|
||||
team, created = Team.objects.get_or_create(name=team_name, organization=org)
|
||||
users_opts = team_opts.get('users', None)
|
||||
remove = bool(team_opts.get('remove', True))
|
||||
_update_m2m_from_groups(user, ldap_user, team.member_role.members, users_opts, remove)
|
||||
state = _update_m2m_from_groups(ldap_user, users_opts, remove)
|
||||
if state is not None:
|
||||
desired_team_states[team_name] = {'member_role': state}
|
||||
|
||||
# Check if user.profile is available, otherwise force user.save()
|
||||
try:
|
||||
@@ -423,3 +468,62 @@ def on_populate_user(sender, **kwargs):
|
||||
if profile.ldap_dn != ldap_user.dn:
|
||||
profile.ldap_dn = ldap_user.dn
|
||||
profile.save()
|
||||
|
||||
reconcile_users_org_team_mappings(user, desired_org_states, desired_team_states, 'LDAP')
|
||||
|
||||
|
||||
def reconcile_users_org_team_mappings(user, desired_org_states, desired_team_states, source):
|
||||
from awx.main.models import Organization, Team
|
||||
|
||||
content_types = []
|
||||
reconcile_items = []
|
||||
if desired_org_states:
|
||||
content_types.append(ContentType.objects.get_for_model(Organization))
|
||||
reconcile_items.append(('organization', desired_org_states, Organization))
|
||||
if desired_team_states:
|
||||
content_types.append(ContentType.objects.get_for_model(Team))
|
||||
reconcile_items.append(('team', desired_team_states, Team))
|
||||
|
||||
if not content_types:
|
||||
# If both desired states were empty we can simply return because there is nothing to reconcile
|
||||
return
|
||||
|
||||
# users_roles is a flat set of IDs
|
||||
users_roles = set(user.roles.filter(content_type__in=content_types).values_list('pk', flat=True))
|
||||
|
||||
for object_type, desired_states, model in reconcile_items:
|
||||
# Get all of the roles in the desired states for efficient DB extraction
|
||||
roles = []
|
||||
for sub_dict in desired_states.values():
|
||||
for role_name in sub_dict:
|
||||
if sub_dict[role_name] is None:
|
||||
continue
|
||||
if role_name not in roles:
|
||||
roles.append(role_name)
|
||||
|
||||
# Get a set of named tuples for the org/team name plus all of the roles we got above
|
||||
model_roles = model.objects.filter(name__in=desired_states.keys()).values_list('name', *roles, named=True)
|
||||
for row in model_roles:
|
||||
for role_name in roles:
|
||||
desired_state = desired_states.get(row.name, {})
|
||||
if desired_state[role_name] is None:
|
||||
# The mapping was not defined for this [org/team]/role so we can just pass
|
||||
pass
|
||||
|
||||
# If somehow the auth adapter knows about an items role but that role is not defined in the DB we are going to print a pretty error
|
||||
# This is your classic safety net that we should never hit; but here you are reading this comment... good luck and Godspeed.
|
||||
role_id = getattr(row, role_name, None)
|
||||
if role_id is None:
|
||||
logger.error("{} adapter wanted to manage role {} of {} {} but that role is not defined".format(source, role_name, object_type, row.name))
|
||||
continue
|
||||
|
||||
if desired_state[role_name]:
|
||||
# The desired state was the user mapped into the object_type, if the user was not mapped in map them in
|
||||
if role_id not in users_roles:
|
||||
logger.debug("{} adapter adding user {} to {} {} as {}".format(source, user.username, object_type, row.name, role_name))
|
||||
user.roles.add(role_id)
|
||||
else:
|
||||
# The desired state was the user was not mapped into the org, if the user has the permission remove it
|
||||
if role_id in users_roles:
|
||||
logger.debug("{} adapter removing user {} permission of {} from {} {}".format(source, user.username, role_name, object_type, row.name))
|
||||
user.roles.remove(role_id)
|
||||
|
||||
@@ -53,7 +53,7 @@ SOCIAL_AUTH_ORGANIZATION_MAP_HELP_TEXT = _(
|
||||
'''\
|
||||
Mapping to organization admins/users from social auth accounts. This setting
|
||||
controls which users are placed into which organizations based on their
|
||||
username and email address. Configuration details are available in the
|
||||
username and email address. Configuration details are available in the
|
||||
documentation.\
|
||||
'''
|
||||
)
|
||||
|
||||
@@ -6,7 +6,7 @@ _values_to_change = ['is_superuser_value', 'is_superuser_role', 'is_system_audit
|
||||
|
||||
def _get_setting():
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f'SELECT value FROM conf_setting WHERE key= %s', ['SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR'])
|
||||
cursor.execute('SELECT value FROM conf_setting WHERE key= %s', ['SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR'])
|
||||
row = cursor.fetchone()
|
||||
if row == None:
|
||||
return {}
|
||||
@@ -24,7 +24,7 @@ def _get_setting():
|
||||
|
||||
def _set_setting(value):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f'UPDATE conf_setting SET value = %s WHERE key = %s', [json.dumps(value), 'SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR'])
|
||||
cursor.execute('UPDATE conf_setting SET value = %s WHERE key = %s', [json.dumps(value), 'SOCIAL_AUTH_SAML_USER_FLAGS_BY_ATTR'])
|
||||
|
||||
|
||||
def forwards(app, schema_editor):
|
||||
|
||||
@@ -163,9 +163,9 @@ class TestSAMLAttr:
|
||||
'PersonImmutableID': [],
|
||||
},
|
||||
},
|
||||
#'social': <UserSocialAuth: cmeyers@redhat.com>,
|
||||
# 'social': <UserSocialAuth: cmeyers@redhat.com>,
|
||||
'social': None,
|
||||
#'strategy': <awx.sso.strategies.django_strategy.AWXDjangoStrategy object at 0x8523a10>,
|
||||
# 'strategy': <awx.sso.strategies.django_strategy.AWXDjangoStrategy object at 0x8523a10>,
|
||||
'strategy': None,
|
||||
'new_association': False,
|
||||
}
|
||||
|
||||
@@ -11,8 +11,6 @@ from django.http import HttpResponse
|
||||
from django.views.generic import View
|
||||
from django.views.generic.base import RedirectView
|
||||
from django.utils.encoding import smart_str
|
||||
from awx.api.serializers import UserSerializer
|
||||
from rest_framework.renderers import JSONRenderer
|
||||
from django.conf import settings
|
||||
|
||||
logger = logging.getLogger('awx.sso.views')
|
||||
@@ -42,9 +40,6 @@ class CompleteView(BaseRedirectView):
|
||||
if self.request.user and self.request.user.is_authenticated:
|
||||
logger.info(smart_str(u"User {} logged in".format(self.request.user.username)))
|
||||
response.set_cookie('userLoggedIn', 'true')
|
||||
current_user = UserSerializer(self.request.user)
|
||||
current_user = smart_str(JSONRenderer().render(current_user.data))
|
||||
current_user = urllib.parse.quote('%s' % current_user, '')
|
||||
response.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||
return response
|
||||
|
||||
|
||||
206
awx/ui/package-lock.json
generated
206
awx/ui/package-lock.json
generated
@@ -7,15 +7,15 @@
|
||||
"name": "ui",
|
||||
"dependencies": {
|
||||
"@lingui/react": "3.14.0",
|
||||
"@patternfly/patternfly": "4.210.2",
|
||||
"@patternfly/react-core": "^4.221.3",
|
||||
"@patternfly/react-icons": "4.75.1",
|
||||
"@patternfly/react-table": "4.100.8",
|
||||
"@patternfly/patternfly": "4.217.1",
|
||||
"@patternfly/react-core": "^4.250.1",
|
||||
"@patternfly/react-icons": "4.92.10",
|
||||
"@patternfly/react-table": "4.108.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.27.2",
|
||||
"codemirror": "^6.0.1",
|
||||
"d3": "7.4.4",
|
||||
"d3": "7.6.1",
|
||||
"dagre": "^0.8.4",
|
||||
"dompurify": "2.4.0",
|
||||
"formik": "2.2.9",
|
||||
@@ -31,7 +31,7 @@
|
||||
"react-router-dom": "^5.3.3",
|
||||
"react-virtualized": "^9.21.1",
|
||||
"rrule": "2.7.1",
|
||||
"styled-components": "5.3.5"
|
||||
"styled-components": "5.3.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.16.10",
|
||||
@@ -3747,35 +3747,26 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@patternfly/patternfly": {
|
||||
"version": "4.210.2",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.210.2.tgz",
|
||||
"integrity": "sha512-aZiW24Bxi6uVmk5RyNTp+6q6ThtlJZotNRJfWVeGuwu1UlbBuV4DFa1bpjA6jfTZpfEpX2YL5+R+4ZVSCFAVdw=="
|
||||
"version": "4.217.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.217.1.tgz",
|
||||
"integrity": "sha512-uN7JgfQsyR16YHkuGRCTIcBcnyKIqKjGkB2SGk9x1XXH3yYGenL83kpAavX9Xtozqp17KppOlybJuzcKvZMrgw=="
|
||||
},
|
||||
"node_modules/@patternfly/react-core": {
|
||||
"version": "4.231.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.231.8.tgz",
|
||||
"integrity": "sha512-2ClqlYCvSADppMfVfkUGIA/8XlO6jX8batoClXLxZDwqGoOfr61XyUgQ6SSlE4w60czoNeX4Nf6cfQKUH4RIKw==",
|
||||
"version": "4.250.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.250.1.tgz",
|
||||
"integrity": "sha512-vAOZPQdZzYXl/vkHnHMIt1eC3nrPDdsuuErPatkNPwmSvilXuXmWP5wxoJ36FbSNRRURkprFwx52zMmWS3iHJA==",
|
||||
"dependencies": {
|
||||
"@patternfly/react-icons": "^4.82.8",
|
||||
"@patternfly/react-styles": "^4.81.8",
|
||||
"@patternfly/react-tokens": "^4.83.8",
|
||||
"@patternfly/react-icons": "^4.92.6",
|
||||
"@patternfly/react-styles": "^4.91.6",
|
||||
"@patternfly/react-tokens": "^4.93.6",
|
||||
"focus-trap": "6.9.2",
|
||||
"react-dropzone": "9.0.0",
|
||||
"tippy.js": "5.1.2",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0",
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-core/node_modules/@patternfly/react-icons": {
|
||||
"version": "4.82.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.82.8.tgz",
|
||||
"integrity": "sha512-cKixprTiMLZRe/+kmdZ5suvYb9ly9p1f/HjlcNiWBfsiA8ZDEPmxJnVdend/YsafelC8YC9QGcQf97ay5PNhcw==",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0",
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
"react": "^16.8 || ^17 || ^18",
|
||||
"react-dom": "^16.8 || ^17 || ^18"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-core/node_modules/tslib": {
|
||||
@@ -3784,28 +3775,28 @@
|
||||
"integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw=="
|
||||
},
|
||||
"node_modules/@patternfly/react-icons": {
|
||||
"version": "4.75.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.75.1.tgz",
|
||||
"integrity": "sha512-1ly8SVi/kcc0zkiViOjUd8D5BEr7GeqWGmDPuDSBtD60l1dYf3hZc44IWFVkRM/oHZML/musdrJkLfh4MDqX9w==",
|
||||
"version": "4.92.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.92.10.tgz",
|
||||
"integrity": "sha512-vwCy7b+OyyuvLDSLqLUG2DkJZgMDogjld8tJTdAaG8HiEhC1sJPZac+5wD7AuS3ym/sQolS4vYtNiVDnMEORxA==",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0",
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
"react": "^16.8 || ^17 || ^18",
|
||||
"react-dom": "^16.8 || ^17 || ^18"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-styles": {
|
||||
"version": "4.81.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.81.8.tgz",
|
||||
"integrity": "sha512-Q5FiureSSCMIuz+KLMcEm1317TzbXcwmg2q5iNDRKyf/K+5CT6tJp0Wbtk3FlfRvzli4u/7YfXipahia5TL+tA=="
|
||||
"version": "4.91.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.91.10.tgz",
|
||||
"integrity": "sha512-fAG4Vjp63ohiR92F4e/Gkw5q1DSSckHKqdnEF75KUpSSBORzYP0EKMpupSd6ItpQFJw3iWs3MJi3/KIAAfU1Jw=="
|
||||
},
|
||||
"node_modules/@patternfly/react-table": {
|
||||
"version": "4.100.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.100.8.tgz",
|
||||
"integrity": "sha512-80XZCZzoYN9gsoufNdXUB/dk33SuWF9lUnOJs7ilezD6noTSD7ARqO1h532eaEPIbPBp4uIVkEUdfGSHd0HJtg==",
|
||||
"version": "4.108.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.108.0.tgz",
|
||||
"integrity": "sha512-EUvd3rlkE1UXobAm7L6JHgNE3TW8IYTaVwwH/px4Mkn5mBayDO6f+w6QM3OeoDQVZcXK6IYFe7QQaYd/vWIJCQ==",
|
||||
"dependencies": {
|
||||
"@patternfly/react-core": "^4.231.8",
|
||||
"@patternfly/react-icons": "^4.82.8",
|
||||
"@patternfly/react-styles": "^4.81.8",
|
||||
"@patternfly/react-tokens": "^4.83.8",
|
||||
"@patternfly/react-core": "^4.239.0",
|
||||
"@patternfly/react-icons": "^4.90.0",
|
||||
"@patternfly/react-styles": "^4.89.0",
|
||||
"@patternfly/react-tokens": "^4.91.0",
|
||||
"lodash": "^4.17.19",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
@@ -3814,24 +3805,15 @@
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-table/node_modules/@patternfly/react-icons": {
|
||||
"version": "4.82.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.82.8.tgz",
|
||||
"integrity": "sha512-cKixprTiMLZRe/+kmdZ5suvYb9ly9p1f/HjlcNiWBfsiA8ZDEPmxJnVdend/YsafelC8YC9QGcQf97ay5PNhcw==",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0",
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-table/node_modules/tslib": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz",
|
||||
"integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ=="
|
||||
},
|
||||
"node_modules/@patternfly/react-tokens": {
|
||||
"version": "4.83.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.83.8.tgz",
|
||||
"integrity": "sha512-Z/MHXNY8PQOuBFGUar2yzPVbz3BNJuhB+Dnk5RJcc/iIn3S+VlSru7g6v5jqoV/+a5wLqZtLGEBp8uhCZ7Xkig=="
|
||||
"version": "4.93.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.93.10.tgz",
|
||||
"integrity": "sha512-F+j1irDc9M6zvY6qNtDryhbpnHz3R8ymHRdGelNHQzPTIK88YSWEnT1c9iUI+uM/iuZol7sJmO5STtg2aPIDRQ=="
|
||||
},
|
||||
"node_modules/@pmmmwh/react-refresh-webpack-plugin": {
|
||||
"version": "0.5.4",
|
||||
@@ -7482,16 +7464,16 @@
|
||||
"integrity": "sha512-jXKhWqXPmlUeoQnF/EhTtTl4C9SnrxSH/jZUih3jmO6lBKr99rP3/+FmrMj4EFpOXzMtXHAZkd3x0E6h6Fgflw=="
|
||||
},
|
||||
"node_modules/d3": {
|
||||
"version": "7.4.4",
|
||||
"resolved": "https://registry.npmjs.org/d3/-/d3-7.4.4.tgz",
|
||||
"integrity": "sha512-97FE+MYdAlV3R9P74+R3Uar7wUKkIFu89UWMjEaDhiJ9VxKvqaMxauImy8PC2DdBkdM2BxJOIoLxPrcZUyrKoQ==",
|
||||
"version": "7.6.1",
|
||||
"resolved": "https://registry.npmjs.org/d3/-/d3-7.6.1.tgz",
|
||||
"integrity": "sha512-txMTdIHFbcpLx+8a0IFhZsbp+PfBBPt8yfbmukZTQFroKuFqIwqswF0qE5JXWefylaAVpSXFoKm3yP+jpNLFLw==",
|
||||
"dependencies": {
|
||||
"d3-array": "3",
|
||||
"d3-axis": "3",
|
||||
"d3-brush": "3",
|
||||
"d3-chord": "3",
|
||||
"d3-color": "3",
|
||||
"d3-contour": "3",
|
||||
"d3-contour": "4",
|
||||
"d3-delaunay": "6",
|
||||
"d3-dispatch": "3",
|
||||
"d3-drag": "3",
|
||||
@@ -7522,9 +7504,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/d3-array": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.1.1.tgz",
|
||||
"integrity": "sha512-33qQ+ZoZlli19IFiQx4QEpf2CBEayMRzhlisJHSCsSUbDXv6ZishqS1x7uFVClKG4Wr7rZVHvaAttoLow6GqdQ==",
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.0.tgz",
|
||||
"integrity": "sha512-3yXFQo0oG3QCxbF06rMPFyGRMGJNS7NvsV1+2joOjbBE+9xvWQ8+GcMJAjRCzw06zQ3/arXeJgbPYcjUCuC+3g==",
|
||||
"dependencies": {
|
||||
"internmap": "1 - 2"
|
||||
},
|
||||
@@ -7575,11 +7557,11 @@
|
||||
}
|
||||
},
|
||||
"node_modules/d3-contour": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-3.0.1.tgz",
|
||||
"integrity": "sha512-0Oc4D0KyhwhM7ZL0RMnfGycLN7hxHB8CMmwZ3+H26PWAG0ozNuYG5hXSDNgmP1SgJkQMrlG6cP20HoaSbvcJTQ==",
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.0.tgz",
|
||||
"integrity": "sha512-7aQo0QHUTu/Ko3cP9YK9yUTxtoDEiDGwnBHyLxG5M4vqlBkO/uixMRele3nfsfj6UXOcuReVpVXzAboGraYIJw==",
|
||||
"dependencies": {
|
||||
"d3-array": "2 - 3"
|
||||
"d3-array": "^3.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
@@ -20234,9 +20216,9 @@
|
||||
"integrity": "sha512-OPhtyEjyyN9x3nhPsu76f52yUGXiZcgvsrFVtvTkyGRQJ0XK+GPc6ov1z+lRpbeabka+MYEQxOYRnt5nF30aMw=="
|
||||
},
|
||||
"node_modules/styled-components": {
|
||||
"version": "5.3.5",
|
||||
"resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.5.tgz",
|
||||
"integrity": "sha512-ndETJ9RKaaL6q41B69WudeqLzOpY1A/ET/glXkNZ2T7dPjPqpPCXXQjDFYZWwNnE5co0wX+gTCqx9mfxTmSIPg==",
|
||||
"version": "5.3.6",
|
||||
"resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.6.tgz",
|
||||
"integrity": "sha512-hGTZquGAaTqhGWldX7hhfzjnIYBZ0IXQXkCYdvF1Sq3DsUaLx6+NTHC5Jj1ooM2F68sBiVz3lvhfwQs/S3l6qg==",
|
||||
"hasInstallScript": true,
|
||||
"dependencies": {
|
||||
"@babel/helper-module-imports": "^7.0.0",
|
||||
@@ -25107,30 +25089,24 @@
|
||||
"dev": true
|
||||
},
|
||||
"@patternfly/patternfly": {
|
||||
"version": "4.210.2",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.210.2.tgz",
|
||||
"integrity": "sha512-aZiW24Bxi6uVmk5RyNTp+6q6ThtlJZotNRJfWVeGuwu1UlbBuV4DFa1bpjA6jfTZpfEpX2YL5+R+4ZVSCFAVdw=="
|
||||
"version": "4.217.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.217.1.tgz",
|
||||
"integrity": "sha512-uN7JgfQsyR16YHkuGRCTIcBcnyKIqKjGkB2SGk9x1XXH3yYGenL83kpAavX9Xtozqp17KppOlybJuzcKvZMrgw=="
|
||||
},
|
||||
"@patternfly/react-core": {
|
||||
"version": "4.231.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.231.8.tgz",
|
||||
"integrity": "sha512-2ClqlYCvSADppMfVfkUGIA/8XlO6jX8batoClXLxZDwqGoOfr61XyUgQ6SSlE4w60czoNeX4Nf6cfQKUH4RIKw==",
|
||||
"version": "4.250.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.250.1.tgz",
|
||||
"integrity": "sha512-vAOZPQdZzYXl/vkHnHMIt1eC3nrPDdsuuErPatkNPwmSvilXuXmWP5wxoJ36FbSNRRURkprFwx52zMmWS3iHJA==",
|
||||
"requires": {
|
||||
"@patternfly/react-icons": "^4.82.8",
|
||||
"@patternfly/react-styles": "^4.81.8",
|
||||
"@patternfly/react-tokens": "^4.83.8",
|
||||
"@patternfly/react-icons": "^4.92.6",
|
||||
"@patternfly/react-styles": "^4.91.6",
|
||||
"@patternfly/react-tokens": "^4.93.6",
|
||||
"focus-trap": "6.9.2",
|
||||
"react-dropzone": "9.0.0",
|
||||
"tippy.js": "5.1.2",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@patternfly/react-icons": {
|
||||
"version": "4.82.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.82.8.tgz",
|
||||
"integrity": "sha512-cKixprTiMLZRe/+kmdZ5suvYb9ly9p1f/HjlcNiWBfsiA8ZDEPmxJnVdend/YsafelC8YC9QGcQf97ay5PNhcw==",
|
||||
"requires": {}
|
||||
},
|
||||
"tslib": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz",
|
||||
@@ -25139,35 +25115,29 @@
|
||||
}
|
||||
},
|
||||
"@patternfly/react-icons": {
|
||||
"version": "4.75.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.75.1.tgz",
|
||||
"integrity": "sha512-1ly8SVi/kcc0zkiViOjUd8D5BEr7GeqWGmDPuDSBtD60l1dYf3hZc44IWFVkRM/oHZML/musdrJkLfh4MDqX9w==",
|
||||
"version": "4.92.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.92.10.tgz",
|
||||
"integrity": "sha512-vwCy7b+OyyuvLDSLqLUG2DkJZgMDogjld8tJTdAaG8HiEhC1sJPZac+5wD7AuS3ym/sQolS4vYtNiVDnMEORxA==",
|
||||
"requires": {}
|
||||
},
|
||||
"@patternfly/react-styles": {
|
||||
"version": "4.81.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.81.8.tgz",
|
||||
"integrity": "sha512-Q5FiureSSCMIuz+KLMcEm1317TzbXcwmg2q5iNDRKyf/K+5CT6tJp0Wbtk3FlfRvzli4u/7YfXipahia5TL+tA=="
|
||||
"version": "4.91.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.91.10.tgz",
|
||||
"integrity": "sha512-fAG4Vjp63ohiR92F4e/Gkw5q1DSSckHKqdnEF75KUpSSBORzYP0EKMpupSd6ItpQFJw3iWs3MJi3/KIAAfU1Jw=="
|
||||
},
|
||||
"@patternfly/react-table": {
|
||||
"version": "4.100.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.100.8.tgz",
|
||||
"integrity": "sha512-80XZCZzoYN9gsoufNdXUB/dk33SuWF9lUnOJs7ilezD6noTSD7ARqO1h532eaEPIbPBp4uIVkEUdfGSHd0HJtg==",
|
||||
"version": "4.108.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.108.0.tgz",
|
||||
"integrity": "sha512-EUvd3rlkE1UXobAm7L6JHgNE3TW8IYTaVwwH/px4Mkn5mBayDO6f+w6QM3OeoDQVZcXK6IYFe7QQaYd/vWIJCQ==",
|
||||
"requires": {
|
||||
"@patternfly/react-core": "^4.231.8",
|
||||
"@patternfly/react-icons": "^4.82.8",
|
||||
"@patternfly/react-styles": "^4.81.8",
|
||||
"@patternfly/react-tokens": "^4.83.8",
|
||||
"@patternfly/react-core": "^4.239.0",
|
||||
"@patternfly/react-icons": "^4.90.0",
|
||||
"@patternfly/react-styles": "^4.89.0",
|
||||
"@patternfly/react-tokens": "^4.91.0",
|
||||
"lodash": "^4.17.19",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@patternfly/react-icons": {
|
||||
"version": "4.82.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.82.8.tgz",
|
||||
"integrity": "sha512-cKixprTiMLZRe/+kmdZ5suvYb9ly9p1f/HjlcNiWBfsiA8ZDEPmxJnVdend/YsafelC8YC9QGcQf97ay5PNhcw==",
|
||||
"requires": {}
|
||||
},
|
||||
"tslib": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz",
|
||||
@@ -25176,9 +25146,9 @@
|
||||
}
|
||||
},
|
||||
"@patternfly/react-tokens": {
|
||||
"version": "4.83.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.83.8.tgz",
|
||||
"integrity": "sha512-Z/MHXNY8PQOuBFGUar2yzPVbz3BNJuhB+Dnk5RJcc/iIn3S+VlSru7g6v5jqoV/+a5wLqZtLGEBp8uhCZ7Xkig=="
|
||||
"version": "4.93.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.93.10.tgz",
|
||||
"integrity": "sha512-F+j1irDc9M6zvY6qNtDryhbpnHz3R8ymHRdGelNHQzPTIK88YSWEnT1c9iUI+uM/iuZol7sJmO5STtg2aPIDRQ=="
|
||||
},
|
||||
"@pmmmwh/react-refresh-webpack-plugin": {
|
||||
"version": "0.5.4",
|
||||
@@ -28082,16 +28052,16 @@
|
||||
"integrity": "sha512-jXKhWqXPmlUeoQnF/EhTtTl4C9SnrxSH/jZUih3jmO6lBKr99rP3/+FmrMj4EFpOXzMtXHAZkd3x0E6h6Fgflw=="
|
||||
},
|
||||
"d3": {
|
||||
"version": "7.4.4",
|
||||
"resolved": "https://registry.npmjs.org/d3/-/d3-7.4.4.tgz",
|
||||
"integrity": "sha512-97FE+MYdAlV3R9P74+R3Uar7wUKkIFu89UWMjEaDhiJ9VxKvqaMxauImy8PC2DdBkdM2BxJOIoLxPrcZUyrKoQ==",
|
||||
"version": "7.6.1",
|
||||
"resolved": "https://registry.npmjs.org/d3/-/d3-7.6.1.tgz",
|
||||
"integrity": "sha512-txMTdIHFbcpLx+8a0IFhZsbp+PfBBPt8yfbmukZTQFroKuFqIwqswF0qE5JXWefylaAVpSXFoKm3yP+jpNLFLw==",
|
||||
"requires": {
|
||||
"d3-array": "3",
|
||||
"d3-axis": "3",
|
||||
"d3-brush": "3",
|
||||
"d3-chord": "3",
|
||||
"d3-color": "3",
|
||||
"d3-contour": "3",
|
||||
"d3-contour": "4",
|
||||
"d3-delaunay": "6",
|
||||
"d3-dispatch": "3",
|
||||
"d3-drag": "3",
|
||||
@@ -28119,9 +28089,9 @@
|
||||
}
|
||||
},
|
||||
"d3-array": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.1.1.tgz",
|
||||
"integrity": "sha512-33qQ+ZoZlli19IFiQx4QEpf2CBEayMRzhlisJHSCsSUbDXv6ZishqS1x7uFVClKG4Wr7rZVHvaAttoLow6GqdQ==",
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.0.tgz",
|
||||
"integrity": "sha512-3yXFQo0oG3QCxbF06rMPFyGRMGJNS7NvsV1+2joOjbBE+9xvWQ8+GcMJAjRCzw06zQ3/arXeJgbPYcjUCuC+3g==",
|
||||
"requires": {
|
||||
"internmap": "1 - 2"
|
||||
}
|
||||
@@ -28157,11 +28127,11 @@
|
||||
"integrity": "sha512-6/SlHkDOBLyQSJ1j1Ghs82OIUXpKWlR0hCsw0XrLSQhuUPuCSmLQ1QPH98vpnQxMUQM2/gfAkUEWsupVpd9JGw=="
|
||||
},
|
||||
"d3-contour": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-3.0.1.tgz",
|
||||
"integrity": "sha512-0Oc4D0KyhwhM7ZL0RMnfGycLN7hxHB8CMmwZ3+H26PWAG0ozNuYG5hXSDNgmP1SgJkQMrlG6cP20HoaSbvcJTQ==",
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.0.tgz",
|
||||
"integrity": "sha512-7aQo0QHUTu/Ko3cP9YK9yUTxtoDEiDGwnBHyLxG5M4vqlBkO/uixMRele3nfsfj6UXOcuReVpVXzAboGraYIJw==",
|
||||
"requires": {
|
||||
"d3-array": "2 - 3"
|
||||
"d3-array": "^3.2.0"
|
||||
}
|
||||
},
|
||||
"d3-delaunay": {
|
||||
@@ -37705,9 +37675,9 @@
|
||||
"integrity": "sha512-OPhtyEjyyN9x3nhPsu76f52yUGXiZcgvsrFVtvTkyGRQJ0XK+GPc6ov1z+lRpbeabka+MYEQxOYRnt5nF30aMw=="
|
||||
},
|
||||
"styled-components": {
|
||||
"version": "5.3.5",
|
||||
"resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.5.tgz",
|
||||
"integrity": "sha512-ndETJ9RKaaL6q41B69WudeqLzOpY1A/ET/glXkNZ2T7dPjPqpPCXXQjDFYZWwNnE5co0wX+gTCqx9mfxTmSIPg==",
|
||||
"version": "5.3.6",
|
||||
"resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.6.tgz",
|
||||
"integrity": "sha512-hGTZquGAaTqhGWldX7hhfzjnIYBZ0IXQXkCYdvF1Sq3DsUaLx6+NTHC5Jj1ooM2F68sBiVz3lvhfwQs/S3l6qg==",
|
||||
"requires": {
|
||||
"@babel/helper-module-imports": "^7.0.0",
|
||||
"@babel/traverse": "^7.4.5",
|
||||
|
||||
@@ -7,15 +7,15 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@lingui/react": "3.14.0",
|
||||
"@patternfly/patternfly": "4.210.2",
|
||||
"@patternfly/react-core": "^4.221.3",
|
||||
"@patternfly/react-icons": "4.75.1",
|
||||
"@patternfly/react-table": "4.100.8",
|
||||
"@patternfly/patternfly": "4.217.1",
|
||||
"@patternfly/react-core": "^4.250.1",
|
||||
"@patternfly/react-icons": "4.92.10",
|
||||
"@patternfly/react-table": "4.108.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.27.2",
|
||||
"codemirror": "^6.0.1",
|
||||
"d3": "7.4.4",
|
||||
"d3": "7.6.1",
|
||||
"dagre": "^0.8.4",
|
||||
"dompurify": "2.4.0",
|
||||
"formik": "2.2.9",
|
||||
@@ -31,7 +31,7 @@
|
||||
"react-router-dom": "^5.3.3",
|
||||
"react-virtualized": "^9.21.1",
|
||||
"rrule": "2.7.1",
|
||||
"styled-components": "5.3.5"
|
||||
"styled-components": "5.3.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.16.10",
|
||||
|
||||
@@ -3,7 +3,12 @@ import { Plural, t } from '@lingui/macro';
|
||||
import { Button, DropdownItem, Tooltip } from '@patternfly/react-core';
|
||||
import { useKebabifiedMenu } from 'contexts/Kebabified';
|
||||
|
||||
function HealthCheckButton({ isDisabled, onClick, selectedItems }) {
|
||||
function HealthCheckButton({
|
||||
isDisabled,
|
||||
onClick,
|
||||
selectedItems,
|
||||
healthCheckPending,
|
||||
}) {
|
||||
const { isKebabified } = useKebabifiedMenu();
|
||||
|
||||
const selectedItemsCount = selectedItems.length;
|
||||
@@ -28,8 +33,10 @@ function HealthCheckButton({ isDisabled, onClick, selectedItems }) {
|
||||
component="button"
|
||||
onClick={onClick}
|
||||
ouiaId="health-check"
|
||||
isLoading={healthCheckPending}
|
||||
spinnerAriaLabel={t`Running health check`}
|
||||
>
|
||||
{t`Run health check`}
|
||||
{healthCheckPending ? t`Running health check` : t`Run health check`}
|
||||
</DropdownItem>
|
||||
</Tooltip>
|
||||
);
|
||||
@@ -42,7 +49,11 @@ function HealthCheckButton({ isDisabled, onClick, selectedItems }) {
|
||||
variant="secondary"
|
||||
ouiaId="health-check"
|
||||
onClick={onClick}
|
||||
>{t`Run health check`}</Button>
|
||||
isLoading={healthCheckPending}
|
||||
spinnerAriaLabel={t`Running health check`}
|
||||
>
|
||||
{healthCheckPending ? t`Running health check` : t`Run health check`}
|
||||
</Button>
|
||||
</div>
|
||||
</Tooltip>
|
||||
);
|
||||
|
||||
@@ -107,6 +107,17 @@ function LaunchButton({ resource, children }) {
|
||||
jobPromise = JobsAPI.relaunch(resource.id, params || {});
|
||||
} else if (resource.type === 'workflow_job') {
|
||||
jobPromise = WorkflowJobsAPI.relaunch(resource.id, params || {});
|
||||
} else if (resource.type === 'ad_hoc_command') {
|
||||
if (params?.credential_passwords) {
|
||||
// The api expects the passwords at the top level of the object instead of nested
|
||||
// in credential_passwords like the other relaunch endpoints
|
||||
Object.keys(params.credential_passwords).forEach((key) => {
|
||||
params[key] = params.credential_passwords[key];
|
||||
});
|
||||
|
||||
delete params.credential_passwords;
|
||||
}
|
||||
jobPromise = AdHocCommandsAPI.relaunch(resource.id, params || {});
|
||||
}
|
||||
|
||||
const { data: job } = await jobPromise;
|
||||
|
||||
@@ -129,7 +129,7 @@ function PromptModalForm({
|
||||
}}
|
||||
title={t`Launch | ${resource.name}`}
|
||||
description={
|
||||
resource.description.length > 512 ? (
|
||||
resource.description?.length > 512 ? (
|
||||
<ExpandableSection
|
||||
toggleText={
|
||||
showDescription ? t`Hide description` : t`Show description`
|
||||
|
||||
@@ -67,14 +67,14 @@ function ScheduleForm({
|
||||
if (schedule.id) {
|
||||
if (
|
||||
resource.type === 'job_template' &&
|
||||
launchConfig.ask_credential_on_launch
|
||||
launchConfig?.ask_credential_on_launch
|
||||
) {
|
||||
const {
|
||||
data: { results },
|
||||
} = await SchedulesAPI.readCredentials(schedule.id);
|
||||
creds = results;
|
||||
}
|
||||
if (launchConfig.ask_labels_on_launch) {
|
||||
if (launchConfig?.ask_labels_on_launch) {
|
||||
const {
|
||||
data: { results },
|
||||
} = await SchedulesAPI.readAllLabels(schedule.id);
|
||||
@@ -82,7 +82,7 @@ function ScheduleForm({
|
||||
}
|
||||
if (
|
||||
resource.type === 'job_template' &&
|
||||
launchConfig.ask_instance_groups_on_launch
|
||||
launchConfig?.ask_instance_groups_on_launch
|
||||
) {
|
||||
const {
|
||||
data: { results },
|
||||
@@ -91,7 +91,7 @@ function ScheduleForm({
|
||||
}
|
||||
} else {
|
||||
if (resource.type === 'job_template') {
|
||||
if (launchConfig.ask_labels_on_launch) {
|
||||
if (launchConfig?.ask_labels_on_launch) {
|
||||
const {
|
||||
data: { results },
|
||||
} = await JobTemplatesAPI.readAllLabels(resource.id);
|
||||
@@ -100,7 +100,7 @@ function ScheduleForm({
|
||||
}
|
||||
if (
|
||||
resource.type === 'workflow_job_template' &&
|
||||
launchConfig.ask_labels_on_launch
|
||||
launchConfig?.ask_labels_on_launch
|
||||
) {
|
||||
const {
|
||||
data: { results },
|
||||
@@ -123,14 +123,7 @@ function ScheduleForm({
|
||||
zoneLinks: data.links,
|
||||
credentials: creds,
|
||||
};
|
||||
}, [
|
||||
schedule,
|
||||
resource.id,
|
||||
resource.type,
|
||||
launchConfig.ask_labels_on_launch,
|
||||
launchConfig.ask_instance_groups_on_launch,
|
||||
launchConfig.ask_credential_on_launch,
|
||||
]),
|
||||
}, [schedule, resource.id, resource.type, launchConfig]),
|
||||
{
|
||||
zonesOptions: [],
|
||||
zoneLinks: {},
|
||||
@@ -146,7 +139,7 @@ function ScheduleForm({
|
||||
const missingRequiredInventory = useCallback(() => {
|
||||
let missingInventory = false;
|
||||
if (
|
||||
launchConfig.inventory_needed_to_start &&
|
||||
launchConfig?.inventory_needed_to_start &&
|
||||
!schedule?.summary_fields?.inventory?.id
|
||||
) {
|
||||
missingInventory = true;
|
||||
@@ -423,8 +416,14 @@ function ScheduleForm({
|
||||
|
||||
if (options.end === 'onDate') {
|
||||
if (
|
||||
DateTime.fromISO(values.startDate) >=
|
||||
DateTime.fromISO(options.endDate)
|
||||
DateTime.fromFormat(
|
||||
`${values.startDate} ${values.startTime}`,
|
||||
'yyyy-LL-dd h:mm a'
|
||||
).toMillis() >=
|
||||
DateTime.fromFormat(
|
||||
`${options.endDate} ${options.endTime}`,
|
||||
'yyyy-LL-dd h:mm a'
|
||||
).toMillis()
|
||||
) {
|
||||
freqErrors.endDate = t`Please select an end date/time that comes after the start date/time.`;
|
||||
}
|
||||
|
||||
@@ -900,6 +900,36 @@ describe('<ScheduleForm />', () => {
|
||||
);
|
||||
});
|
||||
|
||||
test('should create schedule with the same start and end date provided that the end date is at a later time', async () => {
|
||||
const today = DateTime.now().toFormat('yyyy-LL-dd');
|
||||
const laterTime = DateTime.now().plus({ hours: 1 }).toFormat('h:mm a');
|
||||
await act(async () => {
|
||||
wrapper.find('DatePicker[aria-label="End date"]').prop('onChange')(
|
||||
today,
|
||||
new Date(today)
|
||||
);
|
||||
});
|
||||
wrapper.update();
|
||||
expect(
|
||||
wrapper
|
||||
.find('FormGroup[data-cy="schedule-End date/time"]')
|
||||
.prop('helperTextInvalid')
|
||||
).toBe(
|
||||
'Please select an end date/time that comes after the start date/time.'
|
||||
);
|
||||
await act(async () => {
|
||||
wrapper.find('TimePicker[aria-label="End time"]').prop('onChange')(
|
||||
laterTime
|
||||
);
|
||||
});
|
||||
wrapper.update();
|
||||
expect(
|
||||
wrapper
|
||||
.find('FormGroup[data-cy="schedule-End date/time"]')
|
||||
.prop('helperTextInvalid')
|
||||
).toBe(undefined);
|
||||
});
|
||||
|
||||
test('error shown when on day number is not between 1 and 31', async () => {
|
||||
await act(async () => {
|
||||
wrapper.find('FrequencySelect#schedule-frequency').invoke('onChange')([
|
||||
|
||||
6241
awx/ui/src/locales/translations/es/django.po
Normal file
6241
awx/ui/src/locales/translations/es/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10833
awx/ui/src/locales/translations/es/messages.po
Normal file
10833
awx/ui/src/locales/translations/es/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6243
awx/ui/src/locales/translations/fr/django.po
Normal file
6243
awx/ui/src/locales/translations/fr/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10713
awx/ui/src/locales/translations/fr/messages.po
Normal file
10713
awx/ui/src/locales/translations/fr/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6240
awx/ui/src/locales/translations/ja/django.po
Normal file
6240
awx/ui/src/locales/translations/ja/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10739
awx/ui/src/locales/translations/ja/messages.po
Normal file
10739
awx/ui/src/locales/translations/ja/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6240
awx/ui/src/locales/translations/ko/django.po
Normal file
6240
awx/ui/src/locales/translations/ko/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10700
awx/ui/src/locales/translations/ko/messages.po
Normal file
10700
awx/ui/src/locales/translations/ko/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6241
awx/ui/src/locales/translations/nl/django.po
Normal file
6241
awx/ui/src/locales/translations/nl/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10725
awx/ui/src/locales/translations/nl/messages.po
Normal file
10725
awx/ui/src/locales/translations/nl/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
6242
awx/ui/src/locales/translations/zh/django.po
Normal file
6242
awx/ui/src/locales/translations/zh/django.po
Normal file
File diff suppressed because it is too large
Load Diff
10698
awx/ui/src/locales/translations/zh/messages.po
Normal file
10698
awx/ui/src/locales/translations/zh/messages.po
Normal file
File diff suppressed because it is too large
Load Diff
@@ -282,7 +282,7 @@ const mockInputSources = {
|
||||
summary_fields: {
|
||||
source_credential: {
|
||||
id: 20,
|
||||
name: 'CyberArk Conjur Secret Lookup',
|
||||
name: 'CyberArk Conjur Secrets Manager Lookup',
|
||||
description: '',
|
||||
kind: 'conjur',
|
||||
cloud: false,
|
||||
@@ -301,7 +301,7 @@ const mockInputSources = {
|
||||
summary_fields: {
|
||||
source_credential: {
|
||||
id: 20,
|
||||
name: 'CyberArk Conjur Secret Lookup',
|
||||
name: 'CyberArk Conjur Secrets Manager Lookup',
|
||||
description: '',
|
||||
kind: 'conjur',
|
||||
cloud: false,
|
||||
|
||||
@@ -36,14 +36,14 @@ const mockCredentialTypeDetail = {
|
||||
url: '/api/v2/credential_types/20/',
|
||||
related: {
|
||||
named_url:
|
||||
'/api/v2/credential_types/CyberArk Conjur Secret Lookup+external/',
|
||||
'/api/v2/credential_types/CyberArk Conjur Secrets Manager Lookup+external/',
|
||||
credentials: '/api/v2/credential_types/20/credentials/',
|
||||
activity_stream: '/api/v2/credential_types/20/activity_stream/',
|
||||
},
|
||||
summary_fields: { user_capabilities: { edit: false, delete: false } },
|
||||
created: '2020-05-18T21:53:35.398260Z',
|
||||
modified: '2020-05-18T21:54:05.451444Z',
|
||||
name: 'CyberArk Conjur Secret Lookup',
|
||||
name: 'CyberArk Conjur Secrets Manager Lookup',
|
||||
description: '',
|
||||
kind: 'external',
|
||||
namespace: 'conjur',
|
||||
|
||||
@@ -465,7 +465,7 @@
|
||||
},
|
||||
"created": "2020-05-18T21:53:35.370730Z",
|
||||
"modified": "2020-05-18T21:54:05.436400Z",
|
||||
"name": "CyberArk AIM Central Credential Provider Lookup",
|
||||
"name": "CyberArk Central Credential Provider Lookup",
|
||||
"description": "",
|
||||
"kind": "external",
|
||||
"namespace": "aim",
|
||||
@@ -546,7 +546,7 @@
|
||||
},
|
||||
"created": "2020-05-18T21:53:35.398260Z",
|
||||
"modified": "2020-05-18T21:54:05.451444Z",
|
||||
"name": "CyberArk Conjur Secret Lookup",
|
||||
"name": "CyberArk Conjur Secrets Manager Lookup",
|
||||
"description": "",
|
||||
"kind": "external",
|
||||
"namespace": "conjur",
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"type": "credential",
|
||||
"url": "/api/v2/credentials/1/",
|
||||
"related": {
|
||||
"named_url": "/api/v2/credentials/CyberArk Conjur Secret Lookup++CyberArk Conjur Secret Lookup+external++/",
|
||||
"named_url": "/api/v2/credentials/CyberArk Conjur Secrets Manager Lookup+external++/",
|
||||
"created_by": "/api/v2/users/1/",
|
||||
"modified_by": "/api/v2/users/1/",
|
||||
"activity_stream": "/api/v2/credentials/1/activity_stream/",
|
||||
@@ -19,7 +19,7 @@
|
||||
"summary_fields": {
|
||||
"credential_type": {
|
||||
"id": 20,
|
||||
"name": "CyberArk Conjur Secret Lookup",
|
||||
"name": "CyberArk Conjur Secrets Manager Lookup",
|
||||
"description": ""
|
||||
},
|
||||
"created_by": {
|
||||
@@ -69,7 +69,7 @@
|
||||
},
|
||||
"created": "2020-05-19T12:51:36.956029Z",
|
||||
"modified": "2020-05-19T12:51:36.956086Z",
|
||||
"name": "CyberArk Conjur Secret Lookup",
|
||||
"name": "CyberArk Conjur Secrets Manager Lookup",
|
||||
"description": "",
|
||||
"organization": null,
|
||||
"credential_type": 20,
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
Tooltip,
|
||||
Slider,
|
||||
} from '@patternfly/react-core';
|
||||
import { CaretLeftIcon } from '@patternfly/react-icons';
|
||||
import { CaretLeftIcon, OutlinedClockIcon } from '@patternfly/react-icons';
|
||||
import styled from 'styled-components';
|
||||
|
||||
import { useConfig } from 'contexts/Config';
|
||||
@@ -23,6 +23,7 @@ import ErrorDetail from 'components/ErrorDetail';
|
||||
import DisassociateButton from 'components/DisassociateButton';
|
||||
import InstanceToggle from 'components/InstanceToggle';
|
||||
import { CardBody, CardActionsRow } from 'components/Card';
|
||||
import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
import { formatDateString } from 'util/dates';
|
||||
import RoutedTabs from 'components/RoutedTabs';
|
||||
import ContentError from 'components/ContentError';
|
||||
@@ -62,7 +63,7 @@ function computeForks(memCapacity, cpuCapacity, selectedCapacityAdjustment) {
|
||||
}
|
||||
|
||||
function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
const { me = {} } = useConfig();
|
||||
const config = useConfig();
|
||||
const { id, instanceId } = useParams();
|
||||
const history = useHistory();
|
||||
|
||||
@@ -115,15 +116,9 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
useEffect(() => {
|
||||
fetchDetails();
|
||||
}, [fetchDetails]);
|
||||
const {
|
||||
error: healthCheckError,
|
||||
isLoading: isRunningHealthCheck,
|
||||
request: fetchHealthCheck,
|
||||
} = useRequest(
|
||||
const { error: healthCheckError, request: fetchHealthCheck } = useRequest(
|
||||
useCallback(async () => {
|
||||
const { status } = await InstancesAPI.healthCheck(instanceId);
|
||||
const { data } = await InstancesAPI.readHealthCheckDetail(instanceId);
|
||||
setHealthCheck(data);
|
||||
if (status === 200) {
|
||||
setShowHealthCheckAlert(true);
|
||||
}
|
||||
@@ -161,6 +156,18 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
debounceUpdateInstance({ capacity_adjustment: roundedValue });
|
||||
};
|
||||
|
||||
const formatHealthCheckTimeStamp = (last) => (
|
||||
<>
|
||||
{formatDateString(last)}
|
||||
{instance.health_check_pending ? (
|
||||
<>
|
||||
{' '}
|
||||
<OutlinedClockIcon />
|
||||
</>
|
||||
) : null}
|
||||
</>
|
||||
);
|
||||
|
||||
const { error, dismissError } = useDismissableError(
|
||||
disassociateError || updateInstanceError || healthCheckError
|
||||
);
|
||||
@@ -189,6 +196,8 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
return <ContentLoading />;
|
||||
}
|
||||
|
||||
const isExecutionNode = instance.node_type === 'execution';
|
||||
|
||||
return (
|
||||
<>
|
||||
<RoutedTabs tabsArray={tabsArray} />
|
||||
@@ -218,7 +227,22 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
<Detail label={t`Total Jobs`} value={instance.jobs_total} />
|
||||
<Detail
|
||||
label={t`Last Health Check`}
|
||||
value={formatDateString(healthCheck?.last_health_check)}
|
||||
helpText={
|
||||
<>
|
||||
{t`Health checks are asynchronous tasks. See the`}{' '}
|
||||
<a
|
||||
href={`${getDocsBaseUrl(
|
||||
config
|
||||
)}/html/administration/instances.html#health-check`}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
{t`documentation`}
|
||||
</a>{' '}
|
||||
{t`for more info.`}
|
||||
</>
|
||||
}
|
||||
value={formatHealthCheckTimeStamp(instance.last_health_check)}
|
||||
/>
|
||||
<Detail label={t`Node Type`} value={instance.node_type} />
|
||||
<Detail
|
||||
@@ -237,7 +261,7 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
step={0.1}
|
||||
value={instance.capacity_adjustment}
|
||||
onChange={handleChangeValue}
|
||||
isDisabled={!me?.is_superuser || !instance.enabled}
|
||||
isDisabled={!config?.me?.is_superuser || !instance.enabled}
|
||||
data-cy="slider"
|
||||
/>
|
||||
</SliderForks>
|
||||
@@ -274,19 +298,25 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
)}
|
||||
</DetailList>
|
||||
<CardActionsRow>
|
||||
<Tooltip content={t`Run a health check on the instance`}>
|
||||
<Button
|
||||
isDisabled={!me.is_superuser || isRunningHealthCheck}
|
||||
variant="primary"
|
||||
ouiaId="health-check-button"
|
||||
onClick={fetchHealthCheck}
|
||||
isLoading={isRunningHealthCheck}
|
||||
spinnerAriaLabel={t`Running health check`}
|
||||
>
|
||||
{t`Run health check`}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
{me.is_superuser && instance.node_type !== 'control' && (
|
||||
{isExecutionNode && (
|
||||
<Tooltip content={t`Run a health check on the instance`}>
|
||||
<Button
|
||||
isDisabled={
|
||||
!config?.me?.is_superuser || instance.health_check_pending
|
||||
}
|
||||
variant="primary"
|
||||
ouiaId="health-check-button"
|
||||
onClick={fetchHealthCheck}
|
||||
isLoading={instance.health_check_pending}
|
||||
spinnerAriaLabel={t`Running health check`}
|
||||
>
|
||||
{instance.health_check_pending
|
||||
? t`Running health check`
|
||||
: t`Run health check`}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
)}
|
||||
{config?.me?.is_superuser && instance.node_type !== 'control' && (
|
||||
<DisassociateButton
|
||||
verifyCannotDisassociate={instanceGroup.name === 'controlplane'}
|
||||
key="disassociate"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user