mirror of
https://github.com/ansible/awx.git
synced 2026-02-06 12:04:44 -03:30
Compare commits
89 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cb5a8aa194 | ||
|
|
8b49f910c7 | ||
|
|
a4f808df34 | ||
|
|
82abd18927 | ||
|
|
5e9d514e5e | ||
|
|
4a34ee1f1e | ||
|
|
3624fe2cac | ||
|
|
0f96d9aca2 | ||
|
|
989b80e771 | ||
|
|
cc64be937d | ||
|
|
94183d602c | ||
|
|
ac4ef141bf | ||
|
|
86f6b54eec | ||
|
|
bd8108b27c | ||
|
|
aed96fb365 | ||
|
|
fe2da52eec | ||
|
|
974465e46a | ||
|
|
c736986023 | ||
|
|
6b381aa79e | ||
|
|
755e55ec70 | ||
|
|
255c2e4172 | ||
|
|
aa8437fd77 | ||
|
|
66f14bfe8f | ||
|
|
721a2002dc | ||
|
|
af39b2cd3f | ||
|
|
cdd48dd7cd | ||
|
|
d3de884baf | ||
|
|
fa8968b95b | ||
|
|
897a19e127 | ||
|
|
4bae961b5f | ||
|
|
900c4fd8f1 | ||
|
|
4d5bbd7065 | ||
|
|
fb8fadc7f9 | ||
|
|
ba99ddfd82 | ||
|
|
9676a95e05 | ||
|
|
36d6ed9cac | ||
|
|
875f1a82e4 | ||
|
|
db71b63829 | ||
|
|
cd4d83acb7 | ||
|
|
7e25a694f3 | ||
|
|
baca43ee62 | ||
|
|
3b69552260 | ||
|
|
f9bd780d62 | ||
|
|
a665d96026 | ||
|
|
e47d30974c | ||
|
|
2b8ed66f3e | ||
|
|
dfe8b3b16b | ||
|
|
c738d0788e | ||
|
|
0c2d589109 | ||
|
|
a47bbb5479 | ||
|
|
4b4b73c02a | ||
|
|
d1d08fe499 | ||
|
|
7e7a9f541c | ||
|
|
98d67e2133 | ||
|
|
7a36041bf2 | ||
|
|
b96564da55 | ||
|
|
044d6bf97c | ||
|
|
d357c1162f | ||
|
|
3c22fc9242 | ||
|
|
8c86092bf5 | ||
|
|
081206965c | ||
|
|
036f85cd80 | ||
|
|
6976ac9273 | ||
|
|
9009a21a32 | ||
|
|
aafd4df288 | ||
|
|
844666df4c | ||
|
|
0ae720244c | ||
|
|
b70fa88b78 | ||
|
|
fbaeb90268 | ||
|
|
2a549c0b23 | ||
|
|
2c320cb16d | ||
|
|
434595481c | ||
|
|
444d05447e | ||
|
|
fbe202bdbf | ||
|
|
d89cad0d9e | ||
|
|
bdfd6f47ff | ||
|
|
ae7be2eea1 | ||
|
|
8957a84738 | ||
|
|
bac124004f | ||
|
|
f46c7452d1 | ||
|
|
098861d906 | ||
|
|
daf39dc77e | ||
|
|
00d8291d40 | ||
|
|
88d1a484fa | ||
|
|
5afdfb1135 | ||
|
|
2f15cc5170 | ||
|
|
f15d40286c | ||
|
|
f58c44590d | ||
|
|
ef99770383 |
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -44,6 +44,7 @@ body:
|
||||
label: Select the relevant components
|
||||
options:
|
||||
- label: UI
|
||||
- label: UI (tech preview)
|
||||
- label: API
|
||||
- label: Docs
|
||||
- label: Collection
|
||||
|
||||
19
.github/dependabot.yml
vendored
19
.github/dependabot.yml
vendored
@@ -1,19 +0,0 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/awx/ui"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 5
|
||||
allow:
|
||||
- dependency-type: "production"
|
||||
reviewers:
|
||||
- "AlexSCorey"
|
||||
- "keithjgrant"
|
||||
- "kialam"
|
||||
- "mabashian"
|
||||
- "marshmalien"
|
||||
labels:
|
||||
- "component:ui"
|
||||
- "dependencies"
|
||||
target-branch: "devel"
|
||||
2
.github/issue_labeler.yml
vendored
2
.github/issue_labeler.yml
vendored
@@ -6,6 +6,8 @@ needs_triage:
|
||||
- "Feature Summary"
|
||||
"component:ui":
|
||||
- "\\[X\\] UI"
|
||||
"component:ui_next":
|
||||
- "\\[X\\] UI \\(tech preview\\)"
|
||||
"component:api":
|
||||
- "\\[X\\] API"
|
||||
"component:docs":
|
||||
|
||||
8
.github/workflows/label_issue.yml
vendored
8
.github/workflows/label_issue.yml
vendored
@@ -6,9 +6,9 @@ on:
|
||||
- opened
|
||||
- reopened
|
||||
|
||||
permissions:
|
||||
contents: read # to fetch code
|
||||
issues: write # to label issues
|
||||
permissions:
|
||||
contents: write # to fetch code
|
||||
issues: write # to label issues
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Label Issue
|
||||
uses: github/issue-labeler@v2.4.1
|
||||
uses: github/issue-labeler@v3.1
|
||||
with:
|
||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
not-before: 2021-12-07T07:00:00Z
|
||||
|
||||
2
.github/workflows/label_pr.yml
vendored
2
.github/workflows/label_pr.yml
vendored
@@ -8,7 +8,7 @@ on:
|
||||
- synchronize
|
||||
|
||||
permissions:
|
||||
contents: read # to determine modified files (actions/labeler)
|
||||
contents: write # to determine modified files (actions/labeler)
|
||||
pull-requests: write # to add labels to PRs (actions/labeler)
|
||||
|
||||
jobs:
|
||||
|
||||
@@ -31,7 +31,7 @@ If your issue isn't considered high priority, then please be patient as it may t
|
||||
|
||||
`state:needs_info` The issue needs more information. This could be more debug output, more specifics out the system such as version information. Any detail that is currently preventing this issue from moving forward. This should be considered a blocked state.
|
||||
|
||||
`state:needs_review` The issue/pull request needs to be reviewed by other maintainers and contributors. This is usually used when there is a question out to another maintainer or when a person is less familar with an area of the code base the issue is for.
|
||||
`state:needs_review` The issue/pull request needs to be reviewed by other maintainers and contributors. This is usually used when there is a question out to another maintainer or when a person is less familiar with an area of the code base the issue is for.
|
||||
|
||||
`state:needs_revision` More commonly used on pull requests, this state represents that there are changes that are being waited on.
|
||||
|
||||
|
||||
19
Makefile
19
Makefile
@@ -8,7 +8,7 @@ NPM_BIN ?= npm
|
||||
CHROMIUM_BIN=/tmp/chrome-linux/chrome
|
||||
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
MANAGEMENT_COMMAND ?= awx-manage
|
||||
VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py)
|
||||
VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py)
|
||||
|
||||
# ansible-test requires semver compatable version, so we allow overrides to hack it
|
||||
COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
|
||||
@@ -27,6 +27,8 @@ COLLECTION_TEMPLATE_VERSION ?= false
|
||||
# NOTE: This defaults the container image version to the branch that's active
|
||||
COMPOSE_TAG ?= $(GIT_BRANCH)
|
||||
MAIN_NODE_TYPE ?= hybrid
|
||||
# If set to true docker-compose will also start a pgbouncer instance and use it
|
||||
PGBOUNCER ?= false
|
||||
# If set to true docker-compose will also start a keycloak instance
|
||||
KEYCLOAK ?= false
|
||||
# If set to true docker-compose will also start an ldap instance
|
||||
@@ -37,6 +39,8 @@ SPLUNK ?= false
|
||||
PROMETHEUS ?= false
|
||||
# If set to true docker-compose will also start a grafana instance
|
||||
GRAFANA ?= false
|
||||
# If set to true docker-compose will also start a hashicorp vault instance
|
||||
VAULT ?= false
|
||||
# If set to true docker-compose will also start a tacacs+ instance
|
||||
TACACS ?= false
|
||||
|
||||
@@ -52,7 +56,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
|
||||
# Python packages to install only from source (not from binary wheels)
|
||||
# Comma separated list
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==7.0.5 wheel==0.38.4
|
||||
@@ -267,11 +271,11 @@ run-wsrelay:
|
||||
$(PYTHON) manage.py run_wsrelay
|
||||
|
||||
## Start the heartbeat process in background in development environment.
|
||||
run-heartbeet:
|
||||
run-ws-heartbeat:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py run_heartbeet
|
||||
$(PYTHON) manage.py run_ws_heartbeat
|
||||
|
||||
reports:
|
||||
mkdir -p $@
|
||||
@@ -520,15 +524,20 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
-e control_plane_node_count=$(CONTROL_PLANE_NODE_COUNT) \
|
||||
-e execution_node_count=$(EXECUTION_NODE_COUNT) \
|
||||
-e minikube_container_group=$(MINIKUBE_CONTAINER_GROUP) \
|
||||
-e enable_pgbouncer=$(PGBOUNCER) \
|
||||
-e enable_keycloak=$(KEYCLOAK) \
|
||||
-e enable_ldap=$(LDAP) \
|
||||
-e enable_splunk=$(SPLUNK) \
|
||||
-e enable_prometheus=$(PROMETHEUS) \
|
||||
-e enable_grafana=$(GRAFANA) \
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e enable_tacacs=$(TACACS) \
|
||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
-e enable_vault=$(VAULT);
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||
|
||||
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
||||
@@ -580,7 +589,7 @@ docker-clean:
|
||||
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
|
||||
|
||||
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
||||
docker volume rm -f tools_awx_db tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
|
||||
docker volume rm -f tools_awx_db tools_vault_1 tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
|
||||
|
||||
docker-refresh: docker-clean docker-compose
|
||||
|
||||
|
||||
@@ -1629,8 +1629,8 @@ class ProjectUpdateDetailSerializer(ProjectUpdateSerializer):
|
||||
fields = ('*', 'host_status_counts', 'playbook_counts')
|
||||
|
||||
def get_playbook_counts(self, obj):
|
||||
task_count = obj.project_update_events.filter(event='playbook_on_task_start').count()
|
||||
play_count = obj.project_update_events.filter(event='playbook_on_play_start').count()
|
||||
task_count = obj.get_event_queryset().filter(event='playbook_on_task_start').count()
|
||||
play_count = obj.get_event_queryset().filter(event='playbook_on_play_start').count()
|
||||
|
||||
data = {'play_count': play_count, 'task_count': task_count}
|
||||
|
||||
@@ -4686,12 +4686,11 @@ class BulkJobNodeSerializer(WorkflowJobNodeSerializer):
|
||||
# many-to-many fields
|
||||
credentials = serializers.ListField(child=serializers.IntegerField(min_value=1), required=False)
|
||||
labels = serializers.ListField(child=serializers.IntegerField(min_value=1), required=False)
|
||||
# TODO: Use instance group role added via PR 13584(once merged), for now everything related to instance group is commented
|
||||
# instance_groups = serializers.ListField(child=serializers.IntegerField(min_value=1), required=False)
|
||||
instance_groups = serializers.ListField(child=serializers.IntegerField(min_value=1), required=False)
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJobNode
|
||||
fields = ('*', 'credentials', 'labels') # m2m fields are not canonical for WJ nodes, TODO: add instance_groups once supported
|
||||
fields = ('*', 'credentials', 'labels', 'instance_groups') # m2m fields are not canonical for WJ nodes
|
||||
|
||||
def validate(self, attrs):
|
||||
return super(LaunchConfigurationBaseSerializer, self).validate(attrs)
|
||||
@@ -4751,21 +4750,21 @@ class BulkJobLaunchSerializer(serializers.Serializer):
|
||||
requested_use_execution_environments = {job['execution_environment'] for job in attrs['jobs'] if 'execution_environment' in job}
|
||||
requested_use_credentials = set()
|
||||
requested_use_labels = set()
|
||||
# requested_use_instance_groups = set()
|
||||
requested_use_instance_groups = set()
|
||||
for job in attrs['jobs']:
|
||||
for cred in job.get('credentials', []):
|
||||
requested_use_credentials.add(cred)
|
||||
for label in job.get('labels', []):
|
||||
requested_use_labels.add(label)
|
||||
# for instance_group in job.get('instance_groups', []):
|
||||
# requested_use_instance_groups.add(instance_group)
|
||||
for instance_group in job.get('instance_groups', []):
|
||||
requested_use_instance_groups.add(instance_group)
|
||||
|
||||
key_to_obj_map = {
|
||||
"unified_job_template": {obj.id: obj for obj in UnifiedJobTemplate.objects.filter(id__in=requested_ujts)},
|
||||
"inventory": {obj.id: obj for obj in Inventory.objects.filter(id__in=requested_use_inventories)},
|
||||
"credentials": {obj.id: obj for obj in Credential.objects.filter(id__in=requested_use_credentials)},
|
||||
"labels": {obj.id: obj for obj in Label.objects.filter(id__in=requested_use_labels)},
|
||||
# "instance_groups": {obj.id: obj for obj in InstanceGroup.objects.filter(id__in=requested_use_instance_groups)},
|
||||
"instance_groups": {obj.id: obj for obj in InstanceGroup.objects.filter(id__in=requested_use_instance_groups)},
|
||||
"execution_environment": {obj.id: obj for obj in ExecutionEnvironment.objects.filter(id__in=requested_use_execution_environments)},
|
||||
}
|
||||
|
||||
@@ -4792,7 +4791,7 @@ class BulkJobLaunchSerializer(serializers.Serializer):
|
||||
|
||||
self.check_list_permission(Credential, requested_use_credentials, 'use_role')
|
||||
self.check_list_permission(Label, requested_use_labels)
|
||||
# self.check_list_permission(InstanceGroup, requested_use_instance_groups) # TODO: change to use_role for conflict
|
||||
self.check_list_permission(InstanceGroup, requested_use_instance_groups) # TODO: change to use_role for conflict
|
||||
self.check_list_permission(ExecutionEnvironment, requested_use_execution_environments) # TODO: change if roles introduced
|
||||
|
||||
jobs_object = self.get_objectified_jobs(attrs, key_to_obj_map)
|
||||
@@ -4839,7 +4838,7 @@ class BulkJobLaunchSerializer(serializers.Serializer):
|
||||
node_m2m_object_types_to_through_model = {
|
||||
'credentials': WorkflowJobNode.credentials.through,
|
||||
'labels': WorkflowJobNode.labels.through,
|
||||
# 'instance_groups': WorkflowJobNode.instance_groups.through,
|
||||
'instance_groups': WorkflowJobNode.instance_groups.through,
|
||||
}
|
||||
node_deferred_attr_names = (
|
||||
'limit',
|
||||
@@ -4892,9 +4891,9 @@ class BulkJobLaunchSerializer(serializers.Serializer):
|
||||
if field_name in node_m2m_objects[node_identifier] and field_name == 'labels':
|
||||
for label in node_m2m_objects[node_identifier][field_name]:
|
||||
through_model_objects.append(through_model(label=label, workflowjobnode=node_m2m_objects[node_identifier]['node']))
|
||||
# if obj_type in node_m2m_objects[node_identifier] and obj_type == 'instance_groups':
|
||||
# for instance_group in node_m2m_objects[node_identifier][obj_type]:
|
||||
# through_model_objects.append(through_model(instancegroup=instance_group, workflowjobnode=node_m2m_objects[node_identifier]['node']))
|
||||
if field_name in node_m2m_objects[node_identifier] and field_name == 'instance_groups':
|
||||
for instance_group in node_m2m_objects[node_identifier][field_name]:
|
||||
through_model_objects.append(through_model(instancegroup=instance_group, workflowjobnode=node_m2m_objects[node_identifier]['node']))
|
||||
if through_model_objects:
|
||||
through_model.objects.bulk_create(through_model_objects)
|
||||
|
||||
@@ -5436,7 +5435,7 @@ class InstanceSerializer(BaseSerializer):
|
||||
res = super(InstanceSerializer, self).get_related(obj)
|
||||
res['jobs'] = self.reverse('api:instance_unified_jobs_list', kwargs={'pk': obj.pk})
|
||||
res['instance_groups'] = self.reverse('api:instance_instance_groups_list', kwargs={'pk': obj.pk})
|
||||
if settings.IS_K8S and obj.node_type in (Instance.Types.EXECUTION,):
|
||||
if obj.node_type in [Instance.Types.EXECUTION, Instance.Types.HOP]:
|
||||
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
|
||||
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
|
||||
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
|
||||
|
||||
@@ -1,16 +1,10 @@
|
||||
import json
|
||||
import warnings
|
||||
|
||||
from coreapi.document import Object, Link
|
||||
|
||||
from rest_framework import exceptions
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.renderers import CoreJSONRenderer
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.schemas import SchemaGenerator, AutoSchema as DRFAuthSchema
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from rest_framework_swagger import renderers
|
||||
from drf_yasg.views import get_schema_view
|
||||
from drf_yasg import openapi
|
||||
|
||||
|
||||
class SuperUserSchemaGenerator(SchemaGenerator):
|
||||
@@ -55,43 +49,15 @@ class AutoSchema(DRFAuthSchema):
|
||||
return description
|
||||
|
||||
|
||||
class SwaggerSchemaView(APIView):
|
||||
_ignore_model_permissions = True
|
||||
exclude_from_schema = True
|
||||
permission_classes = [AllowAny]
|
||||
renderer_classes = [CoreJSONRenderer, renderers.OpenAPIRenderer, renderers.SwaggerUIRenderer]
|
||||
|
||||
def get(self, request):
|
||||
generator = SuperUserSchemaGenerator(title='Ansible Automation Platform controller API', patterns=None, urlconf=None)
|
||||
schema = generator.get_schema(request=request)
|
||||
# python core-api doesn't support the deprecation yet, so track it
|
||||
# ourselves and return it in a response header
|
||||
_deprecated = []
|
||||
|
||||
# By default, DRF OpenAPI serialization places all endpoints in
|
||||
# a single node based on their root path (/api). Instead, we want to
|
||||
# group them by topic/tag so that they're categorized in the rendered
|
||||
# output
|
||||
document = schema._data.pop('api')
|
||||
for path, node in document.items():
|
||||
if isinstance(node, Object):
|
||||
for action in node.values():
|
||||
topic = getattr(action, 'topic', None)
|
||||
if topic:
|
||||
schema._data.setdefault(topic, Object())
|
||||
schema._data[topic]._data[path] = node
|
||||
|
||||
if isinstance(action, Object):
|
||||
for link in action.links.values():
|
||||
if link.deprecated:
|
||||
_deprecated.append(link.url)
|
||||
elif isinstance(node, Link):
|
||||
topic = getattr(node, 'topic', None)
|
||||
if topic:
|
||||
schema._data.setdefault(topic, Object())
|
||||
schema._data[topic]._data[path] = node
|
||||
|
||||
if not schema:
|
||||
raise exceptions.ValidationError('The schema generator did not return a schema Document')
|
||||
|
||||
return Response(schema, headers={'X-Deprecated-Paths': json.dumps(_deprecated)})
|
||||
schema_view = get_schema_view(
|
||||
openapi.Info(
|
||||
title="Snippets API",
|
||||
default_version='v1',
|
||||
description="Test description",
|
||||
terms_of_service="https://www.google.com/policies/terms/",
|
||||
contact=openapi.Contact(email="contact@snippets.local"),
|
||||
license=openapi.License(name="BSD License"),
|
||||
),
|
||||
public=True,
|
||||
permission_classes=[AllowAny],
|
||||
)
|
||||
|
||||
@@ -9,10 +9,10 @@ receptor_work_commands:
|
||||
params: worker
|
||||
allowruntimeparams: true
|
||||
verifysignature: true
|
||||
custom_worksign_public_keyfile: receptor/work-public-key.pem
|
||||
custom_worksign_public_keyfile: receptor/work_public_key.pem
|
||||
custom_tls_certfile: receptor/tls/receptor.crt
|
||||
custom_tls_keyfile: receptor/tls/receptor.key
|
||||
custom_ca_certfile: receptor/tls/ca/receptor-ca.crt
|
||||
custom_ca_certfile: receptor/tls/ca/mesh-CA.crt
|
||||
receptor_protocol: 'tcp'
|
||||
receptor_listener: true
|
||||
receptor_port: {{ instance.listener_port }}
|
||||
|
||||
@@ -167,10 +167,13 @@ urlpatterns = [
|
||||
]
|
||||
if MODE == 'development':
|
||||
# Only include these if we are in the development environment
|
||||
from awx.api.swagger import SwaggerSchemaView
|
||||
|
||||
urlpatterns += [re_path(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')]
|
||||
from awx.api.swagger import schema_view
|
||||
|
||||
from awx.api.urls.debug import urls as debug_urls
|
||||
|
||||
urlpatterns += [re_path(r'^debug/', include(debug_urls))]
|
||||
urlpatterns += [
|
||||
re_path(r'^swagger(?P<format>\.json|\.yaml)/$', schema_view.without_ui(cache_timeout=0), name='schema-json'),
|
||||
re_path(r'^swagger/$', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
|
||||
re_path(r'^redoc/$', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
|
||||
]
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from collections import OrderedDict
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.renderers import JSONRenderer
|
||||
from rest_framework.reverse import reverse
|
||||
@@ -18,6 +20,9 @@ from awx.api import (
|
||||
|
||||
|
||||
class BulkView(APIView):
|
||||
name = _('Bulk')
|
||||
swagger_topic = 'Bulk'
|
||||
|
||||
permission_classes = [IsAuthenticated]
|
||||
renderer_classes = [
|
||||
renderers.BrowsableAPIRenderer,
|
||||
|
||||
@@ -57,13 +57,11 @@ class InstanceInstallBundle(GenericAPIView):
|
||||
|
||||
with io.BytesIO() as f:
|
||||
with tarfile.open(fileobj=f, mode='w:gz') as tar:
|
||||
# copy /etc/receptor/tls/ca/receptor-ca.crt to receptor/tls/ca in the tar file
|
||||
tar.add(
|
||||
os.path.realpath('/etc/receptor/tls/ca/receptor-ca.crt'), arcname=f"{instance_obj.hostname}_install_bundle/receptor/tls/ca/receptor-ca.crt"
|
||||
)
|
||||
# copy /etc/receptor/tls/ca/mesh-CA.crt to receptor/tls/ca in the tar file
|
||||
tar.add(os.path.realpath('/etc/receptor/tls/ca/mesh-CA.crt'), arcname=f"{instance_obj.hostname}_install_bundle/receptor/tls/ca/mesh-CA.crt")
|
||||
|
||||
# copy /etc/receptor/signing/work-public-key.pem to receptor/work-public-key.pem
|
||||
tar.add('/etc/receptor/signing/work-public-key.pem', arcname=f"{instance_obj.hostname}_install_bundle/receptor/work-public-key.pem")
|
||||
# copy /etc/receptor/work_public_key.pem to receptor/work_public_key.pem
|
||||
tar.add('/etc/receptor/work_public_key.pem', arcname=f"{instance_obj.hostname}_install_bundle/receptor/work_public_key.pem")
|
||||
|
||||
# generate and write the receptor key to receptor/tls/receptor.key in the tar file
|
||||
key, cert = generate_receptor_tls(instance_obj)
|
||||
@@ -161,14 +159,14 @@ def generate_receptor_tls(instance_obj):
|
||||
.sign(key, hashes.SHA256())
|
||||
)
|
||||
|
||||
# sign csr with the receptor ca key from /etc/receptor/ca/receptor-ca.key
|
||||
with open('/etc/receptor/tls/ca/receptor-ca.key', 'rb') as f:
|
||||
# sign csr with the receptor ca key from /etc/receptor/ca/mesh-CA.key
|
||||
with open('/etc/receptor/tls/ca/mesh-CA.key', 'rb') as f:
|
||||
ca_key = serialization.load_pem_private_key(
|
||||
f.read(),
|
||||
password=None,
|
||||
)
|
||||
|
||||
with open('/etc/receptor/tls/ca/receptor-ca.crt', 'rb') as f:
|
||||
with open('/etc/receptor/tls/ca/mesh-CA.crt', 'rb') as f:
|
||||
ca_cert = x509.load_pem_x509_certificate(f.read())
|
||||
|
||||
cert = (
|
||||
|
||||
@@ -20,6 +20,7 @@ from rest_framework import status
|
||||
|
||||
import requests
|
||||
|
||||
from awx import MODE
|
||||
from awx.api.generics import APIView
|
||||
from awx.conf.registry import settings_registry
|
||||
from awx.main.analytics import all_collectors
|
||||
@@ -54,6 +55,8 @@ class ApiRootView(APIView):
|
||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
|
||||
if MODE == 'development':
|
||||
data['swagger'] = drf_reverse('api:schema-swagger-ui')
|
||||
return Response(data)
|
||||
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ class ConfConfig(AppConfig):
|
||||
def ready(self):
|
||||
self.module.autodiscover()
|
||||
|
||||
if not set(sys.argv) & {'migrate', 'check_migrations'}:
|
||||
if not set(sys.argv) & {'migrate', 'check_migrations', 'showmigrations'}:
|
||||
from .settings import SettingsWrapper
|
||||
|
||||
SettingsWrapper.initialize()
|
||||
|
||||
17
awx/conf/migrations/0010_change_to_JSONField.py
Normal file
17
awx/conf/migrations/0010_change_to_JSONField.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 4.2 on 2023-06-09 19:51
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('conf', '0009_rename_proot_settings'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='setting',
|
||||
name='value',
|
||||
field=models.JSONField(null=True),
|
||||
),
|
||||
]
|
||||
@@ -8,7 +8,6 @@ import json
|
||||
from django.db import models
|
||||
|
||||
# AWX
|
||||
from awx.main.fields import JSONBlob
|
||||
from awx.main.models.base import CreatedModifiedModel, prevent_search
|
||||
from awx.main.utils import encrypt_field
|
||||
from awx.conf import settings_registry
|
||||
@@ -18,7 +17,7 @@ __all__ = ['Setting']
|
||||
|
||||
class Setting(CreatedModifiedModel):
|
||||
key = models.CharField(max_length=255)
|
||||
value = JSONBlob(null=True)
|
||||
value = models.JSONField(null=True)
|
||||
user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE))
|
||||
|
||||
def __str__(self):
|
||||
|
||||
@@ -399,7 +399,10 @@ def _copy_table(table, query, path):
|
||||
file_path = os.path.join(path, table + '_table.csv')
|
||||
file = FileSplitter(filespec=file_path)
|
||||
with connection.cursor() as cursor:
|
||||
cursor.copy_expert(query, file)
|
||||
with cursor.copy(query) as copy:
|
||||
while data := copy.read():
|
||||
byte_data = bytes(data)
|
||||
file.write(byte_data.decode())
|
||||
return file.file_list()
|
||||
|
||||
|
||||
|
||||
@@ -209,6 +209,11 @@ class Metrics:
|
||||
SetFloatM('workflow_manager_recorded_timestamp', 'Unix timestamp when metrics were last recorded'),
|
||||
SetFloatM('workflow_manager_spawn_workflow_graph_jobs_seconds', 'Time spent spawning workflow tasks'),
|
||||
SetFloatM('workflow_manager_get_tasks_seconds', 'Time spent loading workflow tasks from db'),
|
||||
# dispatcher subsystem metrics
|
||||
SetIntM('dispatcher_pool_scale_up_events', 'Number of times local dispatcher scaled up a worker since startup'),
|
||||
SetIntM('dispatcher_pool_active_task_count', 'Number of active tasks in the worker pool when last task was submitted'),
|
||||
SetIntM('dispatcher_pool_max_worker_count', 'Highest number of workers in worker pool in last collection interval, about 20s'),
|
||||
SetFloatM('dispatcher_availability', 'Fraction of time (in last collection interval) dispatcher was able to receive messages'),
|
||||
]
|
||||
# turn metric list into dictionary with the metric name as a key
|
||||
self.METRICS = {}
|
||||
|
||||
@@ -94,6 +94,20 @@ register(
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'CSRF_TRUSTED_ORIGINS',
|
||||
default=[],
|
||||
field_class=fields.StringListField,
|
||||
label=_('CSRF Trusted Origins List'),
|
||||
help_text=_(
|
||||
"If the service is behind a reverse proxy/load balancer, use this setting "
|
||||
"to configure the schema://addresses from which the service should trust "
|
||||
"Origin header values. "
|
||||
),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'LICENSE',
|
||||
field_class=fields.DictField,
|
||||
@@ -684,11 +698,28 @@ register(
|
||||
field_class=fields.IntegerField,
|
||||
default=1,
|
||||
min_value=1,
|
||||
label=_('Maximum disk persistance for external log aggregation (in GB)'),
|
||||
label=_('Maximum disk persistence for external log aggregation (in GB)'),
|
||||
help_text=_(
|
||||
'Amount of data to store (in gigabytes) during an outage of '
|
||||
'the external log aggregator (defaults to 1). '
|
||||
'Equivalent to the rsyslogd queue.maxdiskspace setting.'
|
||||
'Equivalent to the rsyslogd queue.maxdiskspace setting for main_queue. '
|
||||
'Notably, this is used for the rsyslogd main queue (for input messages).'
|
||||
),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
register(
|
||||
'LOG_AGGREGATOR_ACTION_MAX_DISK_USAGE_GB',
|
||||
field_class=fields.IntegerField,
|
||||
default=1,
|
||||
min_value=1,
|
||||
label=_('Maximum disk persistence for rsyslogd action queuing (in GB)'),
|
||||
help_text=_(
|
||||
'Amount of data to store (in gigabytes) if an rsyslog action takes time '
|
||||
'to process an incoming message (defaults to 1). '
|
||||
'Equivalent to the rsyslogd queue.maxdiskspace setting on the action (e.g. omhttp). '
|
||||
'Like LOG_AGGREGATOR_MAX_DISK_USAGE_GB, it stores files in the directory specified '
|
||||
'by LOG_AGGREGATOR_MAX_DISK_USAGE_PATH.'
|
||||
),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
@@ -831,6 +862,46 @@ register(
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_CLEANUP_PATHS',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Enable or Disable tmp dir cleanup'),
|
||||
default=True,
|
||||
help_text=_('Enable or Disable TMP Dir cleanup'),
|
||||
category=('Debug'),
|
||||
category_slug='debug',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_REQUEST_PROFILE',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Debug Web Requests'),
|
||||
default=False,
|
||||
help_text=_('Debug web request python timing'),
|
||||
category=('Debug'),
|
||||
category_slug='debug',
|
||||
)
|
||||
|
||||
register(
|
||||
'DEFAULT_CONTAINER_RUN_OPTIONS',
|
||||
field_class=fields.StringListField,
|
||||
label=_('Container Run Options'),
|
||||
default=['--network', 'slirp4netns:enable_ipv6=true'],
|
||||
help_text=_("List of options to pass to podman run example: ['--network', 'slirp4netns:enable_ipv6=true', '--log-level', 'debug']"),
|
||||
category=('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'RECEPTOR_RELEASE_WORK',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Release Receptor Work'),
|
||||
default=True,
|
||||
help_text=_('Release receptor work'),
|
||||
category=('Debug'),
|
||||
category_slug='debug',
|
||||
)
|
||||
|
||||
|
||||
def logging_validate(serializer, attrs):
|
||||
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
||||
|
||||
65
awx/main/credential_plugins/aws_secretsmanager.py
Normal file
65
awx/main/credential_plugins/aws_secretsmanager.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from .plugin import CredentialPlugin
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
secrets_manager_inputs = {
|
||||
'fields': [
|
||||
{
|
||||
'id': 'aws_access_key',
|
||||
'label': _('AWS Access Key'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'aws_secret_key',
|
||||
'label': _('AWS Secret Key'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
],
|
||||
'metadata': [
|
||||
{
|
||||
'id': 'region_name',
|
||||
'label': _('AWS Secrets Manager Region'),
|
||||
'type': 'string',
|
||||
'help_text': _('Region which the secrets manager is located'),
|
||||
},
|
||||
{
|
||||
'id': 'secret_name',
|
||||
'label': _('AWS Secret Name'),
|
||||
'type': 'string',
|
||||
},
|
||||
],
|
||||
'required': ['aws_access_key', 'aws_secret_key', 'region_name', 'secret_name'],
|
||||
}
|
||||
|
||||
|
||||
def aws_secretsmanager_backend(**kwargs):
|
||||
secret_name = kwargs['secret_name']
|
||||
region_name = kwargs['region_name']
|
||||
aws_secret_access_key = kwargs['aws_secret_key']
|
||||
aws_access_key_id = kwargs['aws_access_key']
|
||||
|
||||
session = boto3.session.Session()
|
||||
client = session.client(
|
||||
service_name='secretsmanager', region_name=region_name, aws_secret_access_key=aws_secret_access_key, aws_access_key_id=aws_access_key_id
|
||||
)
|
||||
|
||||
try:
|
||||
get_secret_value_response = client.get_secret_value(SecretId=secret_name)
|
||||
except ClientError as e:
|
||||
raise e
|
||||
# Secrets Manager decrypts the secret value using the associated KMS CMK
|
||||
# Depending on whether the secret was a string or binary, only one of these fields will be populated
|
||||
if 'SecretString' in get_secret_value_response:
|
||||
secret = get_secret_value_response['SecretString']
|
||||
|
||||
else:
|
||||
secret = get_secret_value_response['SecretBinary']
|
||||
|
||||
return secret
|
||||
|
||||
|
||||
aws_secretmanager_plugin = CredentialPlugin('AWS Secrets Manager lookup', inputs=secrets_manager_inputs, backend=aws_secretsmanager_backend)
|
||||
@@ -265,6 +265,8 @@ def kv_backend(**kwargs):
|
||||
|
||||
if secret_key:
|
||||
try:
|
||||
if (secret_key != 'data') and (secret_key not in json['data']) and ('data' in json['data']):
|
||||
return json['data']['data'][secret_key]
|
||||
return json['data'][secret_key]
|
||||
except KeyError:
|
||||
raise RuntimeError('{} is not present at {}'.format(secret_key, secret_path))
|
||||
|
||||
@@ -50,7 +50,7 @@ tss_inputs = {
|
||||
|
||||
|
||||
def tss_backend(**kwargs):
|
||||
if 'domain' in kwargs:
|
||||
if kwargs.get("domain"):
|
||||
authorizer = DomainPasswordGrantAuthorizer(kwargs['server_url'], kwargs['username'], kwargs['password'], kwargs['domain'])
|
||||
else:
|
||||
authorizer = PasswordGrantAuthorizer(kwargs['server_url'], kwargs['username'], kwargs['password'])
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import os
|
||||
import psycopg2
|
||||
import psycopg
|
||||
import select
|
||||
|
||||
from contextlib import contextmanager
|
||||
@@ -64,9 +64,9 @@ class PubSub(object):
|
||||
if yield_timeouts:
|
||||
yield None
|
||||
else:
|
||||
self.conn.poll()
|
||||
while self.conn.notifies:
|
||||
yield self.conn.notifies.pop(0)
|
||||
notification_generator = self.conn.notifies()
|
||||
for notification in notification_generator:
|
||||
yield notification
|
||||
|
||||
def close(self):
|
||||
self.conn.close()
|
||||
@@ -89,9 +89,8 @@ def pg_bus_conn(new_connection=False):
|
||||
conf['OPTIONS'] = conf.get('OPTIONS', {}).copy()
|
||||
# Modify the application name to distinguish from other connections the process might use
|
||||
conf['OPTIONS']['application_name'] = get_application_name(settings.CLUSTER_HOST_ID, function='listener')
|
||||
conn = psycopg2.connect(dbname=conf['NAME'], host=conf['HOST'], user=conf['USER'], password=conf['PASSWORD'], port=conf['PORT'], **conf['OPTIONS'])
|
||||
# Django connection.cursor().connection doesn't have autocommit=True on by default
|
||||
conn.set_session(autocommit=True)
|
||||
connection_data = f"dbname={conf['NAME']} host={conf['HOST']} user={conf['USER']} password={conf['PASSWORD']} port={conf['PORT']}"
|
||||
conn = psycopg.connect(connection_data, autocommit=True, **conf['OPTIONS'])
|
||||
else:
|
||||
if pg_connection.connection is None:
|
||||
pg_connection.connect()
|
||||
|
||||
@@ -339,6 +339,17 @@ class AutoscalePool(WorkerPool):
|
||||
# but if the task takes longer than the time defined here, we will force it to stop here
|
||||
self.task_manager_timeout = settings.TASK_MANAGER_TIMEOUT + settings.TASK_MANAGER_TIMEOUT_GRACE_PERIOD
|
||||
|
||||
# initialize some things for subsystem metrics periodic gathering
|
||||
# the AutoscalePool class does not save these to redis directly, but reports via produce_subsystem_metrics
|
||||
self.scale_up_ct = 0
|
||||
self.worker_count_max = 0
|
||||
|
||||
def produce_subsystem_metrics(self, metrics_object):
|
||||
metrics_object.set('dispatcher_pool_scale_up_events', self.scale_up_ct)
|
||||
metrics_object.set('dispatcher_pool_active_task_count', sum(len(w.managed_tasks) for w in self.workers))
|
||||
metrics_object.set('dispatcher_pool_max_worker_count', self.worker_count_max)
|
||||
self.worker_count_max = len(self.workers)
|
||||
|
||||
@property
|
||||
def should_grow(self):
|
||||
if len(self.workers) < self.min_workers:
|
||||
@@ -443,7 +454,12 @@ class AutoscalePool(WorkerPool):
|
||||
idx = random.choice(range(len(self.workers)))
|
||||
return idx, self.workers[idx]
|
||||
else:
|
||||
return super(AutoscalePool, self).up()
|
||||
self.scale_up_ct += 1
|
||||
ret = super(AutoscalePool, self).up()
|
||||
new_worker_ct = len(self.workers)
|
||||
if new_worker_ct > self.worker_count_max:
|
||||
self.worker_count_max = new_worker_ct
|
||||
return ret
|
||||
|
||||
def write(self, preferred_queue, body):
|
||||
if 'guid' in body:
|
||||
|
||||
@@ -7,7 +7,7 @@ import signal
|
||||
import sys
|
||||
import redis
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg
|
||||
import time
|
||||
from uuid import UUID
|
||||
from queue import Empty as QueueEmpty
|
||||
@@ -19,6 +19,7 @@ from awx.main.dispatch.pool import WorkerPool
|
||||
from awx.main.dispatch import pg_bus_conn
|
||||
from awx.main.utils.common import log_excess_runtime
|
||||
from awx.main.utils.db import set_connection_name
|
||||
import awx.main.analytics.subsystem_metrics as s_metrics
|
||||
|
||||
if 'run_callback_receiver' in sys.argv:
|
||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||
@@ -142,9 +143,10 @@ class AWXConsumerRedis(AWXConsumerBase):
|
||||
def run(self, *args, **kwargs):
|
||||
super(AWXConsumerRedis, self).run(*args, **kwargs)
|
||||
self.worker.on_start()
|
||||
logger.info(f'Callback receiver started with pid={os.getpid()}')
|
||||
db.connection.close() # logs use database, so close connection
|
||||
|
||||
while True:
|
||||
logger.debug(f'{os.getpid()} is alive')
|
||||
time.sleep(60)
|
||||
|
||||
|
||||
@@ -154,17 +156,30 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
self.pg_max_wait = settings.DISPATCHER_DB_DOWNTOWN_TOLLERANCE
|
||||
# if no successful loops have ran since startup, then we should fail right away
|
||||
self.pg_is_down = True # set so that we fail if we get database errors on startup
|
||||
self.pg_down_time = time.time() - self.pg_max_wait # allow no grace period
|
||||
self.last_cleanup = time.time()
|
||||
init_time = time.time()
|
||||
self.pg_down_time = init_time - self.pg_max_wait # allow no grace period
|
||||
self.last_cleanup = init_time
|
||||
self.subsystem_metrics = s_metrics.Metrics(auto_pipe_execute=False)
|
||||
self.last_metrics_gather = init_time
|
||||
self.listen_cumulative_time = 0.0
|
||||
|
||||
def run_periodic_tasks(self):
|
||||
self.record_statistics() # maintains time buffer in method
|
||||
|
||||
if time.time() - self.last_cleanup > 60: # same as cluster_node_heartbeat
|
||||
current_time = time.time()
|
||||
if current_time - self.last_cleanup > 60: # same as cluster_node_heartbeat
|
||||
# NOTE: if we run out of database connections, it is important to still run cleanup
|
||||
# so that we scale down workers and free up connections
|
||||
self.pool.cleanup()
|
||||
self.last_cleanup = time.time()
|
||||
self.last_cleanup = current_time
|
||||
|
||||
# record subsystem metrics for the dispatcher
|
||||
if current_time - self.last_metrics_gather > 20:
|
||||
self.pool.produce_subsystem_metrics(self.subsystem_metrics)
|
||||
self.subsystem_metrics.set('dispatcher_availability', self.listen_cumulative_time / (current_time - self.last_metrics_gather))
|
||||
self.subsystem_metrics.pipe_execute()
|
||||
self.listen_cumulative_time = 0.0
|
||||
self.last_metrics_gather = current_time
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
super(AWXConsumerPG, self).run(*args, **kwargs)
|
||||
@@ -180,17 +195,20 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
if init is False:
|
||||
self.worker.on_start()
|
||||
init = True
|
||||
self.listen_start = time.time()
|
||||
for e in conn.events(yield_timeouts=True):
|
||||
self.listen_cumulative_time += time.time() - self.listen_start
|
||||
if e is not None:
|
||||
self.process_task(json.loads(e.payload))
|
||||
self.run_periodic_tasks()
|
||||
self.pg_is_down = False
|
||||
self.listen_start = time.time()
|
||||
if self.should_stop:
|
||||
return
|
||||
except psycopg2.InterfaceError:
|
||||
except psycopg.InterfaceError:
|
||||
logger.warning("Stale Postgres message bus connection, reconnecting")
|
||||
continue
|
||||
except (db.DatabaseError, psycopg2.OperationalError):
|
||||
except (db.DatabaseError, psycopg.OperationalError):
|
||||
# If we have attained stady state operation, tolerate short-term database hickups
|
||||
if not self.pg_is_down:
|
||||
logger.exception(f"Error consuming new events from postgres, will retry for {self.pg_max_wait} s")
|
||||
|
||||
@@ -191,7 +191,9 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
e._retry_count = retry_count
|
||||
|
||||
# special sanitization logic for postgres treatment of NUL 0x00 char
|
||||
if (retry_count == 1) and isinstance(exc_indv, ValueError) and ("\x00" in e.stdout):
|
||||
# This used to check the class of the exception but on the postgres3 upgrade it could appear
|
||||
# as either DataError or ValueError, so now lets just try if its there.
|
||||
if (retry_count == 1) and ("\x00" in e.stdout):
|
||||
e.stdout = e.stdout.replace("\x00", "")
|
||||
|
||||
if retry_count >= self.INDIVIDUAL_EVENT_RETRIES:
|
||||
|
||||
@@ -67,10 +67,60 @@ def __enum_validate__(validator, enums, instance, schema):
|
||||
Draft4Validator.VALIDATORS['enum'] = __enum_validate__
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger('awx.main.fields')
|
||||
|
||||
|
||||
class JSONBlob(JSONField):
|
||||
# Cringe... a JSONField that is back ended with a TextField.
|
||||
# This field was a legacy custom field type that tl;dr; was a TextField
|
||||
# Over the years, with Django upgrades, we were able to go to a JSONField instead of the custom field
|
||||
# However, we didn't want to have large customers with millions of events to update from text to json during an upgrade
|
||||
# So we keep this field type as backended with TextField.
|
||||
def get_internal_type(self):
|
||||
return "TextField"
|
||||
|
||||
# postgres uses a Jsonb field as the default backend
|
||||
# with psycopg2 it was using a psycopg2._json.Json class internally
|
||||
# with psycopg3 it uses a psycopg.types.json.Jsonb class internally
|
||||
# The binary class was not compatible with a text field, so we are going to override these next two methods and ensure we are using a string
|
||||
|
||||
def from_db_value(self, value, expression, connection):
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
return json.loads(value)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load JSONField {self.name}: {e}")
|
||||
|
||||
return value
|
||||
|
||||
def get_db_prep_value(self, value, connection, prepared=False):
|
||||
if not prepared:
|
||||
value = self.get_prep_value(value)
|
||||
try:
|
||||
# Null characters are not allowed in text fields and JSONBlobs are JSON data but saved as text
|
||||
# So we want to make sure we strip out any null characters also note, these "should" be escaped by the dumps process:
|
||||
# >>> my_obj = { 'test': '\x00' }
|
||||
# >>> import json
|
||||
# >>> json.dumps(my_obj)
|
||||
# '{"test": "\\u0000"}'
|
||||
# But just to be safe, lets remove them if they are there. \x00 and \u0000 are the same:
|
||||
# >>> string = "\x00"
|
||||
# >>> "\u0000" in string
|
||||
# True
|
||||
dumped_value = json.dumps(value)
|
||||
if "\x00" in dumped_value:
|
||||
dumped_value = dumped_value.replace("\x00", '')
|
||||
return dumped_value
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to dump JSONField {self.name}: {e} value: {value}")
|
||||
|
||||
return value
|
||||
|
||||
|
||||
# Based on AutoOneToOneField from django-annoying:
|
||||
# https://bitbucket.org/offline/django-annoying/src/a0de8b294db3/annoying/fields.py
|
||||
|
||||
@@ -17,10 +17,7 @@ from django.utils.timezone import now
|
||||
|
||||
# AWX
|
||||
from awx.main.models import Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob, WorkflowJob, Notification
|
||||
|
||||
|
||||
def unified_job_class_to_event_table_name(job_class):
|
||||
return f'main_{job_class().event_class.__name__.lower()}'
|
||||
from awx.main.utils import unified_job_class_to_event_table_name
|
||||
|
||||
|
||||
def partition_table_name(job_class, dt):
|
||||
|
||||
27
awx/main/management/commands/precreate_partitions.py
Normal file
27
awx/main/management/commands/precreate_partitions.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from django.utils.timezone import now
|
||||
from django.core.management.base import BaseCommand, CommandParser
|
||||
from datetime import timedelta
|
||||
from awx.main.utils.common import create_partition, unified_job_class_to_event_table_name
|
||||
from awx.main.models import Job, SystemJob, ProjectUpdate, InventoryUpdate, AdHocCommand
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Command used to precreate database partitions to avoid pg_dump locks"""
|
||||
|
||||
def add_arguments(self, parser: CommandParser) -> None:
|
||||
parser.add_argument('--count', dest='count', action='store', help='The amount of hours of partitions to create', type=int, default=1)
|
||||
|
||||
def _create_partitioned_tables(self, count):
|
||||
tables = list()
|
||||
for model in (Job, SystemJob, ProjectUpdate, InventoryUpdate, AdHocCommand):
|
||||
tables.append(unified_job_class_to_event_table_name(model))
|
||||
start = now()
|
||||
while count > 0:
|
||||
for table in tables:
|
||||
create_partition(table, start)
|
||||
print(f'Created partitions for {table} {start}')
|
||||
start = start + timedelta(hours=1)
|
||||
count -= 1
|
||||
|
||||
def handle(self, **options):
|
||||
self._create_partitioned_tables(count=options.get('count'))
|
||||
@@ -35,7 +35,7 @@ class Command(BaseCommand):
|
||||
|
||||
from awx.main.management.commands.register_queue import RegisterQueue
|
||||
|
||||
(changed, instance) = Instance.objects.register(ip_address=os.environ.get('MY_POD_IP'), node_type='control', uuid=settings.SYSTEM_UUID)
|
||||
(changed, instance) = Instance.objects.register(ip_address=os.environ.get('MY_POD_IP'), node_type='control', node_uuid=settings.SYSTEM_UUID)
|
||||
RegisterQueue(settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME, 100, 0, [], is_container_group=False).register()
|
||||
RegisterQueue(
|
||||
settings.DEFAULT_EXECUTION_QUEUE_NAME,
|
||||
@@ -48,7 +48,7 @@ class Command(BaseCommand):
|
||||
max_concurrent_jobs=settings.DEFAULT_EXECUTION_QUEUE_MAX_CONCURRENT_JOBS,
|
||||
).register()
|
||||
else:
|
||||
(changed, instance) = Instance.objects.register(hostname=hostname, node_type=node_type, uuid=uuid)
|
||||
(changed, instance) = Instance.objects.register(hostname=hostname, node_type=node_type, node_uuid=uuid)
|
||||
if changed:
|
||||
print("Successfully registered instance {}".format(hostname))
|
||||
else:
|
||||
|
||||
@@ -2,6 +2,7 @@ import logging
|
||||
import json
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from awx.main.dispatch import pg_bus_conn
|
||||
from awx.main.dispatch.worker.task import TaskWorker
|
||||
|
||||
@@ -18,7 +19,7 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *arg, **options):
|
||||
try:
|
||||
with pg_bus_conn(new_connection=True) as conn:
|
||||
with pg_bus_conn() as conn:
|
||||
conn.listen("tower_settings_change")
|
||||
for e in conn.events(yield_timeouts=True):
|
||||
if e is not None:
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import logging
|
||||
import yaml
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache as django_cache
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection as django_connection
|
||||
@@ -17,10 +16,6 @@ from awx.main.dispatch import periodic
|
||||
logger = logging.getLogger('awx.main.dispatch')
|
||||
|
||||
|
||||
def construct_bcast_queue_name(common_name):
|
||||
return common_name + '_' + settings.CLUSTER_HOST_ID
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Launch the task dispatcher'
|
||||
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import signal
|
||||
import sys
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.dispatch import pg_bus_conn
|
||||
|
||||
logger = logging.getLogger('awx.main.commands.run_heartbeet')
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Launch the web server beacon (heartbeet)'
|
||||
|
||||
def print_banner(self):
|
||||
heartbeet = r"""
|
||||
********** **********
|
||||
************* *************
|
||||
*****************************
|
||||
***********HEART***********
|
||||
*************************
|
||||
*******************
|
||||
*************** _._
|
||||
*********** /`._ `'. __
|
||||
******* \ .\| \ _'` `)
|
||||
*** (``_) \| ).'` /`- /
|
||||
* `\ `;\_ `\\//`-'` /
|
||||
\ `'.'.| / __/`
|
||||
`'--v_|/`'`
|
||||
__||-._
|
||||
/'` `-`` `'\\
|
||||
/ .'` )
|
||||
\ BEET ' )
|
||||
\. /
|
||||
'. /'`
|
||||
`) |
|
||||
//
|
||||
'(.
|
||||
`\`.
|
||||
``"""
|
||||
print(heartbeet)
|
||||
|
||||
def construct_payload(self, action='online'):
|
||||
payload = {
|
||||
'hostname': settings.CLUSTER_HOST_ID,
|
||||
'ip': os.environ.get('MY_POD_IP'),
|
||||
'action': action,
|
||||
}
|
||||
return json.dumps(payload)
|
||||
|
||||
def notify_listener_and_exit(self, *args):
|
||||
with pg_bus_conn(new_connection=False) as conn:
|
||||
conn.notify('web_heartbeet', self.construct_payload(action='offline'))
|
||||
sys.exit(0)
|
||||
|
||||
def do_hearbeat_loop(self):
|
||||
with pg_bus_conn(new_connection=True) as conn:
|
||||
while True:
|
||||
logger.debug('Sending heartbeat')
|
||||
conn.notify('web_heartbeet', self.construct_payload())
|
||||
time.sleep(settings.BROADCAST_WEBSOCKET_BEACON_FROM_WEB_RATE_SECONDS)
|
||||
|
||||
def handle(self, *arg, **options):
|
||||
self.print_banner()
|
||||
signal.signal(signal.SIGTERM, self.notify_listener_and_exit)
|
||||
signal.signal(signal.SIGINT, self.notify_listener_and_exit)
|
||||
|
||||
# Note: We don't really try any reconnect logic to pg_notify here,
|
||||
# just let supervisor restart if we fail.
|
||||
self.do_hearbeat_loop()
|
||||
@@ -22,7 +22,7 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *arg, **options):
|
||||
try:
|
||||
with pg_bus_conn(new_connection=True) as conn:
|
||||
with pg_bus_conn() as conn:
|
||||
conn.listen("rsyslog_configurer")
|
||||
# reconfigure rsyslog on start up
|
||||
reconfigure_rsyslog()
|
||||
|
||||
45
awx/main/management/commands/run_ws_heartbeat.py
Normal file
45
awx/main/management/commands/run_ws_heartbeat.py
Normal file
@@ -0,0 +1,45 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import signal
|
||||
import sys
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.dispatch import pg_bus_conn
|
||||
|
||||
logger = logging.getLogger('awx.main.commands.run_ws_heartbeat')
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Launch the web server beacon (ws_heartbeat)'
|
||||
|
||||
def construct_payload(self, action='online'):
|
||||
payload = {
|
||||
'hostname': settings.CLUSTER_HOST_ID,
|
||||
'ip': os.environ.get('MY_POD_IP'),
|
||||
'action': action,
|
||||
}
|
||||
return json.dumps(payload)
|
||||
|
||||
def notify_listener_and_exit(self, *args):
|
||||
with pg_bus_conn(new_connection=False) as conn:
|
||||
conn.notify('web_ws_heartbeat', self.construct_payload(action='offline'))
|
||||
sys.exit(0)
|
||||
|
||||
def do_heartbeat_loop(self):
|
||||
while True:
|
||||
with pg_bus_conn() as conn:
|
||||
logger.debug('Sending heartbeat')
|
||||
conn.notify('web_ws_heartbeat', self.construct_payload())
|
||||
time.sleep(settings.BROADCAST_WEBSOCKET_BEACON_FROM_WEB_RATE_SECONDS)
|
||||
|
||||
def handle(self, *arg, **options):
|
||||
signal.signal(signal.SIGTERM, self.notify_listener_and_exit)
|
||||
signal.signal(signal.SIGINT, self.notify_listener_and_exit)
|
||||
|
||||
# Note: We don't really try any reconnect logic to pg_notify here,
|
||||
# just let supervisor restart if we fail.
|
||||
self.do_heartbeat_loop()
|
||||
@@ -2,6 +2,7 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
from django.db.models.functions import Lower
|
||||
@@ -114,7 +115,7 @@ class InstanceManager(models.Manager):
|
||||
return node[0]
|
||||
raise RuntimeError("No instance found with the current cluster host id")
|
||||
|
||||
def register(self, uuid=None, hostname=None, ip_address=None, node_type='hybrid', defaults=None):
|
||||
def register(self, node_uuid=None, hostname=None, ip_address=None, node_type='hybrid', defaults=None):
|
||||
if not hostname:
|
||||
hostname = settings.CLUSTER_HOST_ID
|
||||
|
||||
@@ -131,8 +132,8 @@ class InstanceManager(models.Manager):
|
||||
logger.warning("IP address {0} conflict detected, ip address unset for host {1}.".format(ip_address, other_hostname))
|
||||
|
||||
# Return existing instance that matches hostname or UUID (default to UUID)
|
||||
if uuid is not None and uuid != UUID_DEFAULT and self.filter(uuid=uuid).exists():
|
||||
instance = self.filter(uuid=uuid)
|
||||
if node_uuid is not None and node_uuid != UUID_DEFAULT and self.filter(uuid=node_uuid).exists():
|
||||
instance = self.filter(uuid=node_uuid)
|
||||
else:
|
||||
# if instance was not retrieved by uuid and hostname was, use the hostname
|
||||
instance = self.filter(hostname=hostname)
|
||||
@@ -170,9 +171,7 @@ class InstanceManager(models.Manager):
|
||||
}
|
||||
if defaults is not None:
|
||||
create_defaults.update(defaults)
|
||||
uuid_option = {}
|
||||
if uuid is not None:
|
||||
uuid_option = {'uuid': uuid}
|
||||
uuid_option = {'uuid': node_uuid if node_uuid is not None else uuid.uuid4()}
|
||||
if node_type == 'execution' and 'version' not in create_defaults:
|
||||
create_defaults['version'] = RECEPTOR_PENDING
|
||||
instance = self.create(hostname=hostname, ip_address=ip_address, node_type=node_type, **create_defaults, **uuid_option)
|
||||
|
||||
@@ -2,9 +2,6 @@
|
||||
# Python
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# Psycopg2
|
||||
from psycopg2.extensions import AsIs
|
||||
|
||||
# Django
|
||||
from django.db import connection, migrations, models, OperationalError, ProgrammingError
|
||||
from django.conf import settings
|
||||
@@ -136,8 +133,8 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
[("CREATE INDEX host_ansible_facts_default_gin ON %s USING gin" "(ansible_facts jsonb_path_ops);", [AsIs(Host._meta.db_table)])],
|
||||
[('DROP INDEX host_ansible_facts_default_gin;', None)],
|
||||
sql="CREATE INDEX host_ansible_facts_default_gin ON {} USING gin(ansible_facts jsonb_path_ops);".format(Host._meta.db_table),
|
||||
reverse_sql='DROP INDEX host_ansible_facts_default_gin;',
|
||||
),
|
||||
# SCM file-based inventories
|
||||
migrations.AddField(
|
||||
|
||||
@@ -30,7 +30,7 @@ def migrate_event_data(apps, schema_editor):
|
||||
# otherwise, the schema changes we would make on the old jobevents table
|
||||
# (namely, dropping the primary key constraint) would cause the migration
|
||||
# to suffer a serious performance degradation
|
||||
cursor.execute(f'CREATE TABLE tmp_{tblname} ' f'(LIKE _unpartitioned_{tblname} INCLUDING ALL)')
|
||||
cursor.execute(f'CREATE TABLE tmp_{tblname} (LIKE _unpartitioned_{tblname} INCLUDING ALL)')
|
||||
|
||||
# drop primary key constraint; in a partioned table
|
||||
# constraints must include the partition key itself
|
||||
@@ -48,7 +48,7 @@ def migrate_event_data(apps, schema_editor):
|
||||
cursor.execute(f'DROP TABLE tmp_{tblname}')
|
||||
|
||||
# recreate primary key constraint
|
||||
cursor.execute(f'ALTER TABLE ONLY {tblname} ' f'ADD CONSTRAINT {tblname}_pkey_new PRIMARY KEY (id, job_created);')
|
||||
cursor.execute(f'ALTER TABLE ONLY {tblname} ADD CONSTRAINT {tblname}_pkey_new PRIMARY KEY (id, job_created);')
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
"""
|
||||
|
||||
142
awx/main/migrations/0185_move_JSONBlob_to_JSONField.py
Normal file
142
awx/main/migrations/0185_move_JSONBlob_to_JSONField.py
Normal file
@@ -0,0 +1,142 @@
|
||||
# Generated by Django 4.2 on 2023-06-09 19:51
|
||||
|
||||
import awx.main.models.notifications
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('main', '0184_django_indexes'),
|
||||
('conf', '0010_change_to_JSONField'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='activitystream',
|
||||
name='deleted_actor',
|
||||
field=models.JSONField(null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='activitystream',
|
||||
name='setting',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='instancegroup',
|
||||
name='policy_instance_list',
|
||||
field=models.JSONField(
|
||||
blank=True, default=list, help_text='List of exact-match Instances that will always be automatically assigned to this group'
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='job',
|
||||
name='survey_passwords',
|
||||
field=models.JSONField(blank=True, default=dict, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='joblaunchconfig',
|
||||
name='char_prompts',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='joblaunchconfig',
|
||||
name='survey_passwords',
|
||||
field=models.JSONField(blank=True, default=dict, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='jobtemplate',
|
||||
name='survey_spec',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notification',
|
||||
name='body',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notificationtemplate',
|
||||
name='messages',
|
||||
field=models.JSONField(
|
||||
blank=True,
|
||||
default=awx.main.models.notifications.NotificationTemplate.default_messages,
|
||||
help_text='Optional custom messages for notification template.',
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='notificationtemplate',
|
||||
name='notification_configuration',
|
||||
field=models.JSONField(default=dict),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='inventory_files',
|
||||
field=models.JSONField(
|
||||
blank=True,
|
||||
default=list,
|
||||
editable=False,
|
||||
help_text='Suggested list of content that could be Ansible inventory in the project',
|
||||
verbose_name='Inventory Files',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='playbook_files',
|
||||
field=models.JSONField(blank=True, default=list, editable=False, help_text='List of playbooks found in the project', verbose_name='Playbook Files'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='schedule',
|
||||
name='char_prompts',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='schedule',
|
||||
name='survey_passwords',
|
||||
field=models.JSONField(blank=True, default=dict, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='job_env',
|
||||
field=models.JSONField(blank=True, default=dict, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjob',
|
||||
name='char_prompts',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjob',
|
||||
name='survey_passwords',
|
||||
field=models.JSONField(blank=True, default=dict, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobnode',
|
||||
name='char_prompts',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobnode',
|
||||
name='survey_passwords',
|
||||
field=models.JSONField(blank=True, default=dict, editable=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='char_prompts',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='survey_spec',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplatenode',
|
||||
name='char_prompts',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplatenode',
|
||||
name='survey_passwords',
|
||||
field=models.JSONField(blank=True, default=dict, editable=False),
|
||||
),
|
||||
]
|
||||
@@ -3,7 +3,6 @@
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.fields import JSONBlob
|
||||
from awx.main.models.base import accepts_json
|
||||
|
||||
# Django
|
||||
@@ -36,7 +35,7 @@ class ActivityStream(models.Model):
|
||||
operation = models.CharField(max_length=13, choices=OPERATION_CHOICES)
|
||||
timestamp = models.DateTimeField(auto_now_add=True)
|
||||
changes = accepts_json(models.TextField(blank=True))
|
||||
deleted_actor = JSONBlob(null=True)
|
||||
deleted_actor = models.JSONField(null=True)
|
||||
action_node = models.CharField(
|
||||
blank=True,
|
||||
default='',
|
||||
@@ -84,7 +83,7 @@ class ActivityStream(models.Model):
|
||||
o_auth2_application = models.ManyToManyField("OAuth2Application", blank=True)
|
||||
o_auth2_access_token = models.ManyToManyField("OAuth2AccessToken", blank=True)
|
||||
|
||||
setting = JSONBlob(default=dict, blank=True)
|
||||
setting = models.JSONField(default=dict, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
operation = self.operation if 'operation' in self.__dict__ else '_delayed_'
|
||||
|
||||
@@ -4,6 +4,7 @@ import datetime
|
||||
from datetime import timezone
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
@@ -383,8 +384,17 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
.distinct()
|
||||
) # noqa
|
||||
|
||||
job.get_event_queryset().filter(uuid__in=changed).update(changed=True)
|
||||
job.get_event_queryset().filter(uuid__in=failed).update(failed=True)
|
||||
# NOTE: we take a set of changed and failed parent uuids because the subquery
|
||||
# complicates the plan with large event tables causing very long query execution time
|
||||
changed_start = time.time()
|
||||
changed_res = job.get_event_queryset().filter(uuid__in=set(changed)).update(changed=True)
|
||||
failed_start = time.time()
|
||||
failed_res = job.get_event_queryset().filter(uuid__in=set(failed)).update(failed=True)
|
||||
logger.debug(
|
||||
f'Event propagation for job {job.id}: '
|
||||
f'marked {changed_res} as changed in {failed_start - changed_start:.4f}s, '
|
||||
f'{failed_res} as failed in {time.time() - failed_start:.4f}s'
|
||||
)
|
||||
|
||||
for field in ('playbook', 'play', 'task', 'role'):
|
||||
value = force_str(event_data.get(field, '')).strip()
|
||||
|
||||
@@ -20,7 +20,7 @@ from solo.models import SingletonModel
|
||||
# AWX
|
||||
from awx import __version__ as awx_application_version
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.fields import JSONBlob, ImplicitRoleField
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.managers import InstanceManager, UUID_DEFAULT
|
||||
from awx.main.constants import JOB_FOLDER_PREFIX
|
||||
from awx.main.models.base import BaseModel, HasEditsMixin, prevent_search
|
||||
@@ -406,7 +406,7 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin, ResourceMi
|
||||
max_forks = models.IntegerField(default=0, help_text=_("Max forks to execute on this group. Zero means no limit."))
|
||||
policy_instance_percentage = models.IntegerField(default=0, help_text=_("Percentage of Instances to automatically assign to this group"))
|
||||
policy_instance_minimum = models.IntegerField(default=0, help_text=_("Static minimum number of Instances to automatically assign to this group"))
|
||||
policy_instance_list = JSONBlob(
|
||||
policy_instance_list = models.JSONField(
|
||||
default=list, blank=True, help_text=_("List of exact-match Instances that will always be automatically assigned to this group")
|
||||
)
|
||||
|
||||
|
||||
@@ -1623,6 +1623,7 @@ class rhv(PluginFileInjector):
|
||||
collection = 'ovirt'
|
||||
downstream_namespace = 'redhat'
|
||||
downstream_collection = 'rhv'
|
||||
use_fqcn = True
|
||||
|
||||
|
||||
class satellite6(PluginFileInjector):
|
||||
|
||||
@@ -883,7 +883,7 @@ class LaunchTimeConfigBase(BaseModel):
|
||||
)
|
||||
# All standard fields are stored in this dictionary field
|
||||
# This is a solution to the nullable CharField problem, specific to prompting
|
||||
char_prompts = JSONBlob(default=dict, blank=True)
|
||||
char_prompts = models.JSONField(default=dict, blank=True)
|
||||
|
||||
# Define fields that are not really fields, but alias to char_prompts lookups
|
||||
limit = NullablePromptPseudoField('limit')
|
||||
@@ -960,7 +960,7 @@ class LaunchTimeConfig(LaunchTimeConfigBase):
|
||||
# Special case prompting fields, even more special than the other ones
|
||||
extra_data = JSONBlob(default=dict, blank=True)
|
||||
survey_passwords = prevent_search(
|
||||
JSONBlob(
|
||||
models.JSONField(
|
||||
default=dict,
|
||||
editable=False,
|
||||
blank=True,
|
||||
|
||||
@@ -24,7 +24,7 @@ from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_lice
|
||||
from awx.main.utils.execution_environments import get_default_execution_environment
|
||||
from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted
|
||||
from awx.main.utils.polymorphic import build_polymorphic_ctypes_map
|
||||
from awx.main.fields import AskForField, JSONBlob
|
||||
from awx.main.fields import AskForField
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
|
||||
|
||||
@@ -103,7 +103,7 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
survey_enabled = models.BooleanField(
|
||||
default=False,
|
||||
)
|
||||
survey_spec = prevent_search(JSONBlob(default=dict, blank=True))
|
||||
survey_spec = prevent_search(models.JSONField(default=dict, blank=True))
|
||||
|
||||
ask_inventory_on_launch = AskForField(
|
||||
blank=True,
|
||||
@@ -392,7 +392,7 @@ class SurveyJobMixin(models.Model):
|
||||
abstract = True
|
||||
|
||||
survey_passwords = prevent_search(
|
||||
JSONBlob(
|
||||
models.JSONField(
|
||||
default=dict,
|
||||
editable=False,
|
||||
blank=True,
|
||||
|
||||
@@ -17,7 +17,6 @@ from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.fields import JSONBlob
|
||||
from awx.main.models.base import CommonModelNameNotUnique, CreatedModifiedModel, prevent_search
|
||||
from awx.main.utils import encrypt_field, decrypt_field, set_environ
|
||||
from awx.main.notifications.email_backend import CustomEmailBackend
|
||||
@@ -69,12 +68,12 @@ class NotificationTemplate(CommonModelNameNotUnique):
|
||||
choices=NOTIFICATION_TYPE_CHOICES,
|
||||
)
|
||||
|
||||
notification_configuration = prevent_search(JSONBlob(default=dict))
|
||||
notification_configuration = prevent_search(models.JSONField(default=dict))
|
||||
|
||||
def default_messages():
|
||||
return {'started': None, 'success': None, 'error': None, 'workflow_approval': None}
|
||||
|
||||
messages = JSONBlob(null=True, blank=True, default=default_messages, help_text=_('Optional custom messages for notification template.'))
|
||||
messages = models.JSONField(null=True, blank=True, default=default_messages, help_text=_('Optional custom messages for notification template.'))
|
||||
|
||||
def has_message(self, condition):
|
||||
potential_template = self.messages.get(condition, {})
|
||||
@@ -236,7 +235,7 @@ class Notification(CreatedModifiedModel):
|
||||
default='',
|
||||
editable=False,
|
||||
)
|
||||
body = JSONBlob(default=dict, blank=True)
|
||||
body = models.JSONField(default=dict, blank=True)
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:notification_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
@@ -33,7 +33,7 @@ from awx.main.models.mixins import ResourceMixin, TaskManagerProjectUpdateMixin,
|
||||
from awx.main.utils import update_scm_url, polymorphic
|
||||
from awx.main.utils.ansible import skip_directory, could_be_inventory, could_be_playbook
|
||||
from awx.main.utils.execution_environments import get_control_plane_execution_environment
|
||||
from awx.main.fields import ImplicitRoleField, JSONBlob
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.models.rbac import (
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
@@ -303,7 +303,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
help_text=_('The last revision fetched by a project update'),
|
||||
)
|
||||
|
||||
playbook_files = JSONBlob(
|
||||
playbook_files = models.JSONField(
|
||||
default=list,
|
||||
blank=True,
|
||||
editable=False,
|
||||
@@ -311,7 +311,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
help_text=_('List of playbooks found in the project'),
|
||||
)
|
||||
|
||||
inventory_files = JSONBlob(
|
||||
inventory_files = models.JSONField(
|
||||
default=list,
|
||||
blank=True,
|
||||
editable=False,
|
||||
@@ -479,7 +479,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn
|
||||
RunProjectUpdate/RunInventoryUpdate.
|
||||
"""
|
||||
|
||||
if self.status not in ('error', 'failed'):
|
||||
if self.status not in ('error', 'failed') or self.scm_update_on_launch:
|
||||
return None
|
||||
|
||||
latest_update = self.project_updates.last()
|
||||
|
||||
@@ -55,7 +55,7 @@ from awx.main.utils import polymorphic
|
||||
from awx.main.constants import ACTIVE_STATES, CAN_CANCEL, JOB_VARIABLE_PREFIXES
|
||||
from awx.main.redact import UriCleaner, REPLACE_STR
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
from awx.main.fields import AskForField, OrderedManyToManyField, JSONBlob
|
||||
from awx.main.fields import AskForField, OrderedManyToManyField
|
||||
|
||||
__all__ = ['UnifiedJobTemplate', 'UnifiedJob', 'StdoutMaxBytesExceeded']
|
||||
|
||||
@@ -668,7 +668,7 @@ class UnifiedJob(
|
||||
editable=False,
|
||||
)
|
||||
job_env = prevent_search(
|
||||
JSONBlob(
|
||||
models.JSONField(
|
||||
default=dict,
|
||||
blank=True,
|
||||
editable=False,
|
||||
@@ -1137,11 +1137,6 @@ class UnifiedJob(
|
||||
if total > max_supported:
|
||||
raise StdoutMaxBytesExceeded(total, max_supported)
|
||||
|
||||
# psycopg2's copy_expert writes bytes, but callers of this
|
||||
# function assume a str-based fd will be returned; decode
|
||||
# .write() calls on the fly to maintain this interface
|
||||
_write = fd.write
|
||||
fd.write = lambda s: _write(smart_str(s))
|
||||
tbl = self._meta.db_table + 'event'
|
||||
created_by_cond = ''
|
||||
if self.has_unpartitioned_events:
|
||||
@@ -1150,7 +1145,12 @@ class UnifiedJob(
|
||||
created_by_cond = f"job_created='{self.created.isoformat()}' AND "
|
||||
|
||||
sql = f"copy (select stdout from {tbl} where {created_by_cond}{self.event_parent_key}={self.id} and stdout != '' order by start_line) to stdout" # nosql
|
||||
cursor.copy_expert(sql, fd)
|
||||
# psycopg3's copy writes bytes, but callers of this
|
||||
# function assume a str-based fd will be returned; decode
|
||||
# .write() calls on the fly to maintain this interface
|
||||
with cursor.copy(sql) as copy:
|
||||
while data := copy.read():
|
||||
fd.write(smart_str(bytes(data)))
|
||||
|
||||
if hasattr(fd, 'name'):
|
||||
# If we're dealing with a physical file, use `sed` to clean
|
||||
|
||||
@@ -661,7 +661,11 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
|
||||
|
||||
@property
|
||||
def event_processing_finished(self):
|
||||
return True
|
||||
return True # workflow jobs do not have events
|
||||
|
||||
@property
|
||||
def has_unpartitioned_events(self):
|
||||
return False # workflow jobs do not have events
|
||||
|
||||
def _get_parent_field_name(self):
|
||||
if self.job_template_id:
|
||||
@@ -914,7 +918,11 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
|
||||
|
||||
@property
|
||||
def event_processing_finished(self):
|
||||
return True
|
||||
return True # approval jobs do not have events
|
||||
|
||||
@property
|
||||
def has_unpartitioned_events(self):
|
||||
return False # approval jobs do not have events
|
||||
|
||||
def send_approval_notification(self, approval_status):
|
||||
from awx.main.tasks.system import send_notifications # avoid circular import
|
||||
|
||||
@@ -290,13 +290,6 @@ class BaseTask(object):
|
||||
content = safe_dump(vars, safe_dict)
|
||||
return self.write_private_data_file(private_data_dir, 'extravars', content, sub_dir='env')
|
||||
|
||||
def add_awx_venv(self, env):
|
||||
env['VIRTUAL_ENV'] = settings.AWX_VENV_PATH
|
||||
if 'PATH' in env:
|
||||
env['PATH'] = os.path.join(settings.AWX_VENV_PATH, "bin") + ":" + env['PATH']
|
||||
else:
|
||||
env['PATH'] = os.path.join(settings.AWX_VENV_PATH, "bin")
|
||||
|
||||
def build_env(self, instance, private_data_dir, private_data_files=None):
|
||||
"""
|
||||
Build environment dictionary for ansible-playbook.
|
||||
@@ -926,6 +919,7 @@ class RunJob(SourceControlMixin, BaseTask):
|
||||
path_vars = (
|
||||
('ANSIBLE_COLLECTIONS_PATHS', 'collections_paths', 'requirements_collections', '~/.ansible/collections:/usr/share/ansible/collections'),
|
||||
('ANSIBLE_ROLES_PATH', 'roles_path', 'requirements_roles', '~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles'),
|
||||
('ANSIBLE_COLLECTIONS_PATH', 'collections_path', 'requirements_collections', '~/.ansible/collections:/usr/share/ansible/collections'),
|
||||
)
|
||||
|
||||
config_values = read_ansible_config(os.path.join(private_data_dir, 'project'), list(map(lambda x: x[1], path_vars)))
|
||||
@@ -1268,7 +1262,7 @@ class RunProjectUpdate(BaseTask):
|
||||
|
||||
galaxy_creds_are_defined = project_update.project.organization and project_update.project.organization.galaxy_credentials.exists()
|
||||
if not galaxy_creds_are_defined and (settings.AWX_ROLES_ENABLED or settings.AWX_COLLECTIONS_ENABLED):
|
||||
logger.warning('Galaxy role/collection syncing is enabled, but no ' f'credentials are configured for {project_update.project.organization}.')
|
||||
logger.warning('Galaxy role/collection syncing is enabled, but no credentials are configured for {project_update.project.organization}.')
|
||||
|
||||
extra_vars.update(
|
||||
{
|
||||
|
||||
@@ -639,11 +639,11 @@ class AWXReceptorJob:
|
||||
#
|
||||
RECEPTOR_CONFIG_STARTER = (
|
||||
{'local-only': None},
|
||||
{'log-level': 'info'},
|
||||
{'log-level': settings.RECEPTOR_LOG_LEVEL},
|
||||
{'node': {'firewallrules': [{'action': 'reject', 'tonode': settings.CLUSTER_HOST_ID, 'toservice': 'control'}]}},
|
||||
{'control-service': {'service': 'control', 'filename': '/var/run/receptor/receptor.sock', 'permissions': '0660'}},
|
||||
{'work-command': {'worktype': 'local', 'command': 'ansible-runner', 'params': 'worker', 'allowruntimeparams': True}},
|
||||
{'work-signing': {'privatekey': '/etc/receptor/signing/work-private-key.pem', 'tokenexpiration': '1m'}},
|
||||
{'work-signing': {'privatekey': '/etc/receptor/work_private_key.pem', 'tokenexpiration': '1m'}},
|
||||
{
|
||||
'work-kubernetes': {
|
||||
'worktype': 'kubernetes-runtime-auth',
|
||||
@@ -665,7 +665,7 @@ RECEPTOR_CONFIG_STARTER = (
|
||||
{
|
||||
'tls-client': {
|
||||
'name': 'tlsclient',
|
||||
'rootcas': '/etc/receptor/tls/ca/receptor-ca.crt',
|
||||
'rootcas': '/etc/receptor/tls/ca/mesh-CA.crt',
|
||||
'cert': '/etc/receptor/tls/receptor.crt',
|
||||
'key': '/etc/receptor/tls/receptor.key',
|
||||
'mintls13': False,
|
||||
|
||||
@@ -541,7 +541,7 @@ def cluster_node_heartbeat(dispatch_time=None, worker_tasks=None):
|
||||
logger.warning(f'Heartbeat skew - interval={(nowtime - last_last_seen).total_seconds():.4f}, expected={settings.CLUSTER_NODE_HEARTBEAT_PERIOD}')
|
||||
else:
|
||||
if settings.AWX_AUTO_DEPROVISION_INSTANCES:
|
||||
(changed, this_inst) = Instance.objects.register(ip_address=os.environ.get('MY_POD_IP'), node_type='control', uuid=settings.SYSTEM_UUID)
|
||||
(changed, this_inst) = Instance.objects.register(ip_address=os.environ.get('MY_POD_IP'), node_type='control', node_uuid=settings.SYSTEM_UUID)
|
||||
if changed:
|
||||
logger.warning(f'Recreated instance record {this_inst.hostname} after unexpected removal')
|
||||
this_inst.local_health_check()
|
||||
|
||||
@@ -7,7 +7,7 @@ from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.utils.functional import Promise
|
||||
from django.utils.encoding import force_str
|
||||
|
||||
from openapi_codec.encode import generate_swagger_object
|
||||
from drf_yasg.codecs import OpenAPICodecJson
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import drf_reverse
|
||||
@@ -43,12 +43,12 @@ class TestSwaggerGeneration:
|
||||
@pytest.fixture(autouse=True, scope='function')
|
||||
def _prepare(self, get, admin):
|
||||
if not self.__class__.JSON:
|
||||
url = drf_reverse('api:swagger_view') + '?format=openapi'
|
||||
url = drf_reverse('api:schema-swagger-ui') + '?format=openapi'
|
||||
response = get(url, user=admin)
|
||||
data = generate_swagger_object(response.data)
|
||||
codec = OpenAPICodecJson([])
|
||||
data = codec.generate_swagger_object(response.data)
|
||||
if response.has_header('X-Deprecated-Paths'):
|
||||
data['deprecated_paths'] = json.loads(response['X-Deprecated-Paths'])
|
||||
data.update(response.accepted_renderer.get_customizations() or {})
|
||||
|
||||
data['host'] = None
|
||||
data['schemes'] = ['https']
|
||||
@@ -60,12 +60,21 @@ class TestSwaggerGeneration:
|
||||
# change {version} in paths to the actual default API version (e.g., v2)
|
||||
revised_paths[path.replace('{version}', settings.REST_FRAMEWORK['DEFAULT_VERSION'])] = node
|
||||
for method in node:
|
||||
# Ignore any parameters methods, these cause issues because it can come as an array instead of a dict
|
||||
# Which causes issues in the last for loop in here
|
||||
if method == 'parameters':
|
||||
continue
|
||||
|
||||
if path in deprecated_paths:
|
||||
node[method]['deprecated'] = True
|
||||
if 'description' in node[method]:
|
||||
# Pop off the first line and use that as the summary
|
||||
lines = node[method]['description'].splitlines()
|
||||
node[method]['summary'] = lines.pop(0).strip('#:')
|
||||
# If there was a description then set the summary as the description, otherwise make something up
|
||||
if lines:
|
||||
node[method]['summary'] = lines.pop(0).strip('#:')
|
||||
else:
|
||||
node[method]['summary'] = f'No Description for {method} on {path}'
|
||||
node[method]['description'] = '\n'.join(lines)
|
||||
|
||||
# remove the required `version` parameter
|
||||
@@ -90,13 +99,13 @@ class TestSwaggerGeneration:
|
||||
# The number of API endpoints changes over time, but let's just check
|
||||
# for a reasonable number here; if this test starts failing, raise/lower the bounds
|
||||
paths = JSON['paths']
|
||||
assert 250 < len(paths) < 350
|
||||
assert list(paths['/api/'].keys()) == ['get']
|
||||
assert list(paths['/api/v2/'].keys()) == ['get']
|
||||
assert list(sorted(paths['/api/v2/credentials/'].keys())) == ['get', 'post']
|
||||
assert list(sorted(paths['/api/v2/credentials/{id}/'].keys())) == ['delete', 'get', 'patch', 'put']
|
||||
assert list(paths['/api/v2/settings/'].keys()) == ['get']
|
||||
assert list(paths['/api/v2/settings/{category_slug}/'].keys()) == ['get', 'put', 'patch', 'delete']
|
||||
assert 250 < len(paths) < 375
|
||||
assert set(list(paths['/api/'].keys())) == set(['get', 'parameters'])
|
||||
assert set(list(paths['/api/v2/'].keys())) == set(['get', 'parameters'])
|
||||
assert set(list(sorted(paths['/api/v2/credentials/'].keys()))) == set(['get', 'post', 'parameters'])
|
||||
assert set(list(sorted(paths['/api/v2/credentials/{id}/'].keys()))) == set(['delete', 'get', 'patch', 'put', 'parameters'])
|
||||
assert set(list(paths['/api/v2/settings/'].keys())) == set(['get', 'parameters'])
|
||||
assert set(list(paths['/api/v2/settings/{category_slug}/'].keys())) == set(['get', 'put', 'patch', 'delete', 'parameters'])
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'path',
|
||||
@@ -162,4 +171,8 @@ class TestSwaggerGeneration:
|
||||
data = re.sub(r'[0-9]{4}-[0-9]{2}-[0-9]{2}(T|\s)[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]+(Z|\+[0-9]{2}:[0-9]{2})?', r'2018-02-01T08:00:00.000000Z', data)
|
||||
data = re.sub(r'''(\s+"client_id": ")([a-zA-Z0-9]{40})("\,\s*)''', r'\1xxxx\3', data)
|
||||
data = re.sub(r'"action_node": "[^"]+"', '"action_node": "awx"', data)
|
||||
|
||||
# replace uuids to prevent needless diffs
|
||||
pattern = r'[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}'
|
||||
data = re.sub(pattern, r'00000000-0000-0000-0000-000000000000', data)
|
||||
f.write(data)
|
||||
|
||||
@@ -2,8 +2,8 @@ import pytest
|
||||
import tempfile
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import csv
|
||||
from io import StringIO
|
||||
|
||||
from django.utils.timezone import now
|
||||
from datetime import timedelta
|
||||
@@ -20,15 +20,16 @@ from awx.main.models import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sqlite_copy_expert(request):
|
||||
# copy_expert is postgres-specific, and SQLite doesn't support it; mock its
|
||||
# behavior to test that it writes a file that contains stdout from events
|
||||
path = tempfile.mkdtemp(prefix="copied_tables")
|
||||
class MockCopy:
|
||||
headers = None
|
||||
results = None
|
||||
sent_data = False
|
||||
|
||||
def write_stdout(self, sql, fd):
|
||||
def __init__(self, sql, parent_connection):
|
||||
# Would be cool if we instead properly disected the SQL query and verified
|
||||
# it that way. But instead, we just take the naive approach here.
|
||||
self.results = None
|
||||
self.headers = None
|
||||
sql = sql.strip()
|
||||
assert sql.startswith("COPY (")
|
||||
assert sql.endswith(") TO STDOUT WITH CSV HEADER")
|
||||
@@ -51,29 +52,49 @@ def sqlite_copy_expert(request):
|
||||
elif not line.endswith(","):
|
||||
sql_new[-1] = sql_new[-1].rstrip(",")
|
||||
sql = "\n".join(sql_new)
|
||||
parent_connection.execute(sql)
|
||||
self.results = parent_connection.fetchall()
|
||||
self.headers = [i[0] for i in parent_connection.description]
|
||||
|
||||
self.execute(sql)
|
||||
results = self.fetchall()
|
||||
headers = [i[0] for i in self.description]
|
||||
def read(self):
|
||||
if not self.sent_data:
|
||||
mem_file = StringIO()
|
||||
csv_handle = csv.writer(
|
||||
mem_file,
|
||||
delimiter=",",
|
||||
quoting=csv.QUOTE_ALL,
|
||||
escapechar="\\",
|
||||
lineterminator="\n",
|
||||
)
|
||||
if self.headers:
|
||||
csv_handle.writerow(self.headers)
|
||||
if self.results:
|
||||
csv_handle.writerows(self.results)
|
||||
self.sent_data = True
|
||||
return memoryview((mem_file.getvalue()).encode())
|
||||
return None
|
||||
|
||||
csv_handle = csv.writer(
|
||||
fd,
|
||||
delimiter=",",
|
||||
quoting=csv.QUOTE_ALL,
|
||||
escapechar="\\",
|
||||
lineterminator="\n",
|
||||
)
|
||||
csv_handle.writerow(headers)
|
||||
csv_handle.writerows(results)
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
setattr(SQLiteCursorWrapper, "copy_expert", write_stdout)
|
||||
request.addfinalizer(lambda: shutil.rmtree(path))
|
||||
request.addfinalizer(lambda: delattr(SQLiteCursorWrapper, "copy_expert"))
|
||||
return path
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sqlite_copy(request, mocker):
|
||||
# copy is postgres-specific, and SQLite doesn't support it; mock its
|
||||
# behavior to test that it writes a file that contains stdout from events
|
||||
|
||||
def write_stdout(self, sql):
|
||||
mock_copy = MockCopy(sql, self)
|
||||
return mock_copy
|
||||
|
||||
mocker.patch.object(SQLiteCursorWrapper, 'copy', write_stdout, create=True)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_copy_tables_unified_job_query(sqlite_copy_expert, project, inventory, job_template):
|
||||
def test_copy_tables_unified_job_query(sqlite_copy, project, inventory, job_template):
|
||||
"""
|
||||
Ensure that various unified job types are in the output of the query.
|
||||
"""
|
||||
@@ -127,7 +148,7 @@ def workflow_job(states=["new", "new", "new", "new", "new"]):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_copy_tables_workflow_job_node_query(sqlite_copy_expert, workflow_job):
|
||||
def test_copy_tables_workflow_job_node_query(sqlite_copy, workflow_job):
|
||||
time_start = now() - timedelta(hours=9)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
|
||||
@@ -224,7 +224,7 @@ class TestControllerNode:
|
||||
return AdHocCommand.objects.create(inventory=inventory)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_field_controller_node_exists(self, sqlite_copy_expert, admin_user, job, project_update, inventory_update, adhoc, get, system_job_factory):
|
||||
def test_field_controller_node_exists(self, sqlite_copy, admin_user, job, project_update, inventory_update, adhoc, get, system_job_factory):
|
||||
system_job = system_job_factory()
|
||||
|
||||
r = get(reverse('api:unified_job_list') + '?id={}'.format(job.id), admin_user, expect=200)
|
||||
|
||||
@@ -57,7 +57,7 @@ def _mk_inventory_update(created=None):
|
||||
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
|
||||
],
|
||||
)
|
||||
def test_text_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin):
|
||||
def test_text_stdout(sqlite_copy, Parent, Child, relation, view, get, admin):
|
||||
job = Parent()
|
||||
job.save()
|
||||
for i in range(3):
|
||||
@@ -79,7 +79,7 @@ def test_text_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, adm
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize('download', [True, False])
|
||||
def test_ansi_stdout_filtering(sqlite_copy_expert, Parent, Child, relation, view, download, get, admin):
|
||||
def test_ansi_stdout_filtering(sqlite_copy, Parent, Child, relation, view, download, get, admin):
|
||||
job = Parent()
|
||||
job.save()
|
||||
for i in range(3):
|
||||
@@ -111,7 +111,7 @@ def test_ansi_stdout_filtering(sqlite_copy_expert, Parent, Child, relation, view
|
||||
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
|
||||
],
|
||||
)
|
||||
def test_colorized_html_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin):
|
||||
def test_colorized_html_stdout(sqlite_copy, Parent, Child, relation, view, get, admin):
|
||||
job = Parent()
|
||||
job.save()
|
||||
for i in range(3):
|
||||
@@ -134,7 +134,7 @@ def test_colorized_html_stdout(sqlite_copy_expert, Parent, Child, relation, view
|
||||
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
|
||||
],
|
||||
)
|
||||
def test_stdout_line_range(sqlite_copy_expert, Parent, Child, relation, view, get, admin):
|
||||
def test_stdout_line_range(sqlite_copy, Parent, Child, relation, view, get, admin):
|
||||
job = Parent()
|
||||
job.save()
|
||||
for i in range(20):
|
||||
@@ -146,7 +146,7 @@ def test_stdout_line_range(sqlite_copy_expert, Parent, Child, relation, view, ge
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin):
|
||||
def test_text_stdout_from_system_job_events(sqlite_copy, get, admin):
|
||||
created = tz_now()
|
||||
job = SystemJob(created=created)
|
||||
job.save()
|
||||
@@ -158,7 +158,7 @@ def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin):
|
||||
def test_text_stdout_with_max_stdout(sqlite_copy, get, admin):
|
||||
created = tz_now()
|
||||
job = SystemJob(created=created)
|
||||
job.save()
|
||||
@@ -185,7 +185,7 @@ def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin):
|
||||
)
|
||||
@pytest.mark.parametrize('fmt', ['txt', 'ansi'])
|
||||
@mock.patch('awx.main.redact.UriCleaner.SENSITIVE_URI_PATTERN', mock.Mock(**{'search.return_value': None})) # really slow for large strings
|
||||
def test_max_bytes_display(sqlite_copy_expert, Parent, Child, relation, view, fmt, get, admin):
|
||||
def test_max_bytes_display(sqlite_copy, Parent, Child, relation, view, fmt, get, admin):
|
||||
created = tz_now()
|
||||
job = Parent(created=created)
|
||||
job.save()
|
||||
@@ -255,7 +255,7 @@ def test_legacy_result_stdout_with_max_bytes(Cls, view, fmt, get, admin):
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize('fmt', ['txt', 'ansi', 'txt_download', 'ansi_download'])
|
||||
def test_text_with_unicode_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin, fmt):
|
||||
def test_text_with_unicode_stdout(sqlite_copy, Parent, Child, relation, view, get, admin, fmt):
|
||||
job = Parent()
|
||||
job.save()
|
||||
for i in range(3):
|
||||
@@ -267,7 +267,7 @@ def test_text_with_unicode_stdout(sqlite_copy_expert, Parent, Child, relation, v
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_unicode_with_base64_ansi(sqlite_copy_expert, get, admin):
|
||||
def test_unicode_with_base64_ansi(sqlite_copy, get, admin):
|
||||
created = tz_now()
|
||||
job = Job(created=created)
|
||||
job.save()
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
# Python
|
||||
import pytest
|
||||
from unittest import mock
|
||||
import tempfile
|
||||
import shutil
|
||||
import urllib.parse
|
||||
from unittest.mock import PropertyMock
|
||||
|
||||
@@ -789,25 +787,43 @@ def oauth_application(admin):
|
||||
return Application.objects.create(name='test app', user=admin, client_type='confidential', authorization_grant_type='password')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sqlite_copy_expert(request):
|
||||
# copy_expert is postgres-specific, and SQLite doesn't support it; mock its
|
||||
# behavior to test that it writes a file that contains stdout from events
|
||||
path = tempfile.mkdtemp(prefix='job-event-stdout')
|
||||
class MockCopy:
|
||||
events = []
|
||||
index = -1
|
||||
|
||||
def write_stdout(self, sql, fd):
|
||||
# simulate postgres copy_expert support with ORM code
|
||||
def __init__(self, sql):
|
||||
self.events = []
|
||||
parts = sql.split(' ')
|
||||
tablename = parts[parts.index('from') + 1]
|
||||
for cls in (JobEvent, AdHocCommandEvent, ProjectUpdateEvent, InventoryUpdateEvent, SystemJobEvent):
|
||||
if cls._meta.db_table == tablename:
|
||||
for event in cls.objects.order_by('start_line').all():
|
||||
fd.write(event.stdout)
|
||||
self.events.append(event.stdout)
|
||||
|
||||
setattr(SQLiteCursorWrapper, 'copy_expert', write_stdout)
|
||||
request.addfinalizer(lambda: shutil.rmtree(path))
|
||||
request.addfinalizer(lambda: delattr(SQLiteCursorWrapper, 'copy_expert'))
|
||||
return path
|
||||
def read(self):
|
||||
self.index = self.index + 1
|
||||
if self.index < len(self.events):
|
||||
return memoryview(self.events[self.index].encode())
|
||||
|
||||
return None
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sqlite_copy(request, mocker):
|
||||
# copy is postgres-specific, and SQLite doesn't support it; mock its
|
||||
# behavior to test that it writes a file that contains stdout from events
|
||||
|
||||
def write_stdout(self, sql):
|
||||
mock_copy = MockCopy(sql)
|
||||
return mock_copy
|
||||
|
||||
mocker.patch.object(SQLiteCursorWrapper, 'copy', write_stdout, create=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -98,7 +98,7 @@ class TestJobNotificationMixin(object):
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('JobClass', [AdHocCommand, InventoryUpdate, Job, ProjectUpdate, SystemJob, WorkflowJob])
|
||||
def test_context(self, JobClass, sqlite_copy_expert, project, inventory_source):
|
||||
def test_context(self, JobClass, sqlite_copy, project, inventory_source):
|
||||
"""The Jinja context defines all of the fields that can be used by a template. Ensure that the context generated
|
||||
for each job type has the expected structure."""
|
||||
kwargs = {}
|
||||
|
||||
@@ -78,6 +78,7 @@ def test_default_cred_types():
|
||||
[
|
||||
'aim',
|
||||
'aws',
|
||||
'aws_secretsmanager_credential',
|
||||
'azure_kv',
|
||||
'azure_rm',
|
||||
'centrify_vault_kv',
|
||||
|
||||
@@ -16,8 +16,7 @@ from django.test.utils import override_settings
|
||||
@pytest.mark.django_db
|
||||
def test_get_notification_template_list(get, user, notification_template):
|
||||
url = reverse('api:notification_template_list')
|
||||
response = get(url, user('admin', True))
|
||||
assert response.status_code == 200
|
||||
response = get(url, user('admin', True), expect=200)
|
||||
assert len(response.data['results']) == 1
|
||||
|
||||
|
||||
@@ -35,8 +34,8 @@ def test_basic_parameterization(get, post, user, organization):
|
||||
notification_configuration=dict(url="http://localhost", disable_ssl_verification=False, headers={"Test": "Header"}),
|
||||
),
|
||||
u,
|
||||
expect=201,
|
||||
)
|
||||
assert response.status_code == 201
|
||||
url = reverse('api:notification_template_detail', kwargs={'pk': response.data['id']})
|
||||
response = get(url, u)
|
||||
assert 'related' in response.data
|
||||
@@ -69,8 +68,8 @@ def test_encrypted_subfields(get, post, user, organization):
|
||||
notification_configuration=dict(account_sid="dummy", account_token="shouldhide", from_number="+19999999999", to_numbers=["9998887777"]),
|
||||
),
|
||||
u,
|
||||
expect=201,
|
||||
)
|
||||
assert response.status_code == 201
|
||||
notification_template_actual = NotificationTemplate.objects.get(id=response.data['id'])
|
||||
url = reverse('api:notification_template_detail', kwargs={'pk': response.data['id']})
|
||||
response = get(url, u)
|
||||
@@ -96,8 +95,8 @@ def test_inherited_notification_templates(get, post, user, organization, project
|
||||
notification_configuration=dict(url="http://localhost", disable_ssl_verification=False, headers={"Test": "Header"}),
|
||||
),
|
||||
u,
|
||||
expect=201,
|
||||
)
|
||||
assert response.status_code == 201
|
||||
notification_templates.append(response.data['id'])
|
||||
i = Inventory.objects.create(name='test', organization=organization)
|
||||
i.save()
|
||||
@@ -122,8 +121,7 @@ def test_disallow_delete_when_notifications_pending(delete, user, notification_t
|
||||
u = user('superuser', True)
|
||||
url = reverse('api:notification_template_detail', kwargs={'pk': notification_template.id})
|
||||
Notification.objects.create(notification_template=notification_template, status='pending')
|
||||
response = delete(url, user=u)
|
||||
assert response.status_code == 405
|
||||
delete(url, user=u, expect=405)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -133,9 +131,8 @@ def test_notification_template_list_includes_notification_errors(get, user, noti
|
||||
Notification.objects.create(notification_template=notification_template, status='successful')
|
||||
url = reverse('api:notification_template_list')
|
||||
u = user('superuser', True)
|
||||
response = get(url, user=u)
|
||||
response = get(url, user=u, expect=200)
|
||||
|
||||
assert response.status_code == 200
|
||||
notifications = response.data['results'][0]['summary_fields']['recent_notifications']
|
||||
assert len(notifications) == 3
|
||||
statuses = [n['status'] for n in notifications]
|
||||
@@ -163,8 +160,8 @@ def test_custom_environment_injection(post, user, organization):
|
||||
notification_configuration=dict(url="https://example.org", disable_ssl_verification=False, http_method="POST", headers={"Test": "Header"}),
|
||||
),
|
||||
u,
|
||||
expect=201,
|
||||
)
|
||||
assert response.status_code == 201
|
||||
template = NotificationTemplate.objects.get(pk=response.data['id'])
|
||||
with pytest.raises(ConnectionError), override_settings(AWX_TASK_ENV={'HTTPS_PROXY': '192.168.50.100:1234'}), mock.patch.object(
|
||||
HTTPAdapter, 'send'
|
||||
|
||||
@@ -4,7 +4,7 @@ from unittest import mock # noqa
|
||||
import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import Project
|
||||
from awx.main.models import Project, JobTemplate
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
@@ -451,3 +451,19 @@ def test_project_list_ordering_with_duplicate_names(get, order_by, organization_
|
||||
results = get(reverse('api:project_list'), objects.superusers.admin, QUERY_STRING='order_by=%s' % order_by).data['results']
|
||||
project_ids[x] = [proj['id'] for proj in results]
|
||||
assert project_ids[0] == project_ids[1] == project_ids[2] == [1, 2, 3, 4, 5]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_project_failed_update(post, project, admin, inventory):
|
||||
"""Test to ensure failed projects with update on launch will create launch rather than error"""
|
||||
jt = JobTemplate.objects.create(project=project, inventory=inventory)
|
||||
# set project to update on launch and set status to failed
|
||||
project.update_fields(scm_update_on_launch=True)
|
||||
project.update()
|
||||
project.project_updates.last().update_fields(status='failed')
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': jt.pk}), user=admin, expect=201)
|
||||
assert response.status_code == 201
|
||||
# set project to not update on launch and validate still 400's
|
||||
project.update_fields(scm_update_on_launch=False)
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': jt.pk}), user=admin, expect=400)
|
||||
assert response.status_code == 400
|
||||
|
||||
@@ -47,7 +47,7 @@ data_loggly = {
|
||||
'\n'.join(
|
||||
[
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="logs-01.loggly.com" serverport="80" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="inputs/1fd38090-2af1-4e1e-8d80-492899da0f71/tag/http/")', # noqa
|
||||
'action(type="omhttp" server="logs-01.loggly.com" serverport="80" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" queue.spoolDirectory="/var/lib/awx" queue.filename="awx-external-logger-action-queue" queue.maxdiskspace="1g" queue.type="LinkedList" queue.saveOnShutdown="on" errorfile="/var/log/tower/rsyslog.err" restpath="inputs/1fd38090-2af1-4e1e-8d80-492899da0f71/tag/http/")', # noqa
|
||||
]
|
||||
),
|
||||
),
|
||||
@@ -89,7 +89,7 @@ data_loggly = {
|
||||
'\n'.join(
|
||||
[
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="443" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
|
||||
'action(type="omhttp" server="yoursplunk" serverport="443" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" queue.spoolDirectory="/var/lib/awx" queue.filename="awx-external-logger-action-queue" queue.maxdiskspace="1g" queue.type="LinkedList" queue.saveOnShutdown="on" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
|
||||
]
|
||||
),
|
||||
),
|
||||
@@ -103,7 +103,7 @@ data_loggly = {
|
||||
'\n'.join(
|
||||
[
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="80" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
|
||||
'action(type="omhttp" server="yoursplunk" serverport="80" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" queue.spoolDirectory="/var/lib/awx" queue.filename="awx-external-logger-action-queue" queue.maxdiskspace="1g" queue.type="LinkedList" queue.saveOnShutdown="on" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
|
||||
]
|
||||
),
|
||||
),
|
||||
@@ -117,7 +117,7 @@ data_loggly = {
|
||||
'\n'.join(
|
||||
[
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
|
||||
'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" queue.spoolDirectory="/var/lib/awx" queue.filename="awx-external-logger-action-queue" queue.maxdiskspace="1g" queue.type="LinkedList" queue.saveOnShutdown="on" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
|
||||
]
|
||||
),
|
||||
),
|
||||
@@ -131,7 +131,7 @@ data_loggly = {
|
||||
'\n'.join(
|
||||
[
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
|
||||
'action(type="omhttp" server="yoursplunk" serverport="8088" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" queue.spoolDirectory="/var/lib/awx" queue.filename="awx-external-logger-action-queue" queue.maxdiskspace="1g" queue.type="LinkedList" queue.saveOnShutdown="on" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
|
||||
]
|
||||
),
|
||||
),
|
||||
@@ -145,7 +145,7 @@ data_loggly = {
|
||||
'\n'.join(
|
||||
[
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
|
||||
'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" queue.spoolDirectory="/var/lib/awx" queue.filename="awx-external-logger-action-queue" queue.maxdiskspace="1g" queue.type="LinkedList" queue.saveOnShutdown="on" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
|
||||
]
|
||||
),
|
||||
),
|
||||
@@ -159,7 +159,7 @@ data_loggly = {
|
||||
'\n'.join(
|
||||
[
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
|
||||
'action(type="omhttp" server="yoursplunk.org" serverport="8088" usehttps="off" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" queue.spoolDirectory="/var/lib/awx" queue.filename="awx-external-logger-action-queue" queue.maxdiskspace="1g" queue.type="LinkedList" queue.saveOnShutdown="on" errorfile="/var/log/tower/rsyslog.err" restpath="services/collector/event")', # noqa
|
||||
]
|
||||
),
|
||||
),
|
||||
@@ -173,7 +173,7 @@ data_loggly = {
|
||||
'\n'.join(
|
||||
[
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")\nmodule(load="omhttp")',
|
||||
'action(type="omhttp" server="endpoint5.collection.us2.sumologic.com" serverport="443" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" errorfile="/var/log/tower/rsyslog.err" restpath="receiver/v1/http/ZaVnC4dhaV0qoiETY0MrM3wwLoDgO1jFgjOxE6-39qokkj3LGtOroZ8wNaN2M6DtgYrJZsmSi4-36_Up5TbbN_8hosYonLKHSSOSKY845LuLZBCBwStrHQ==")', # noqa
|
||||
'action(type="omhttp" server="endpoint5.collection.us2.sumologic.com" serverport="443" usehttps="on" allowunsignedcerts="off" skipverifyhost="off" action.resumeRetryCount="-1" template="awx" action.resumeInterval="5" queue.spoolDirectory="/var/lib/awx" queue.filename="awx-external-logger-action-queue" queue.maxdiskspace="1g" queue.type="LinkedList" queue.saveOnShutdown="on" errorfile="/var/log/tower/rsyslog.err" restpath="receiver/v1/http/ZaVnC4dhaV0qoiETY0MrM3wwLoDgO1jFgjOxE6-39qokkj3LGtOroZ8wNaN2M6DtgYrJZsmSi4-36_Up5TbbN_8hosYonLKHSSOSKY845LuLZBCBwStrHQ==")', # noqa
|
||||
]
|
||||
),
|
||||
),
|
||||
|
||||
@@ -90,6 +90,7 @@ __all__ = [
|
||||
'get_event_partition_epoch',
|
||||
'cleanup_new_process',
|
||||
'log_excess_runtime',
|
||||
'unified_job_class_to_event_table_name',
|
||||
]
|
||||
|
||||
|
||||
@@ -1219,3 +1220,7 @@ def log_excess_runtime(func_logger, cutoff=5.0, debug_cutoff=5.0, msg=None, add_
|
||||
return _new_func
|
||||
|
||||
return log_excess_runtime_decorator
|
||||
|
||||
|
||||
def unified_job_class_to_event_table_name(job_class):
|
||||
return f'main_{job_class().event_class.__name__.lower()}'
|
||||
|
||||
@@ -17,7 +17,8 @@ def construct_rsyslog_conf_template(settings=settings):
|
||||
port = getattr(settings, 'LOG_AGGREGATOR_PORT', '')
|
||||
protocol = getattr(settings, 'LOG_AGGREGATOR_PROTOCOL', '')
|
||||
timeout = getattr(settings, 'LOG_AGGREGATOR_TCP_TIMEOUT', 5)
|
||||
max_disk_space = getattr(settings, 'LOG_AGGREGATOR_MAX_DISK_USAGE_GB', 1)
|
||||
max_disk_space_main_queue = getattr(settings, 'LOG_AGGREGATOR_MAX_DISK_USAGE_GB', 1)
|
||||
max_disk_space_action_queue = getattr(settings, 'LOG_AGGREGATOR_ACTION_MAX_DISK_USAGE_GB', 1)
|
||||
spool_directory = getattr(settings, 'LOG_AGGREGATOR_MAX_DISK_USAGE_PATH', '/var/lib/awx').rstrip('/')
|
||||
error_log_file = getattr(settings, 'LOG_AGGREGATOR_RSYSLOGD_ERROR_LOG_FILE', '')
|
||||
|
||||
@@ -32,7 +33,7 @@ def construct_rsyslog_conf_template(settings=settings):
|
||||
'$WorkDirectory /var/lib/awx/rsyslog',
|
||||
f'$MaxMessageSize {max_bytes}',
|
||||
'$IncludeConfig /var/lib/awx/rsyslog/conf.d/*.conf',
|
||||
f'main_queue(queue.spoolDirectory="{spool_directory}" queue.maxdiskspace="{max_disk_space}g" queue.type="Disk" queue.filename="awx-external-logger-backlog")', # noqa
|
||||
f'main_queue(queue.spoolDirectory="{spool_directory}" queue.maxdiskspace="{max_disk_space_main_queue}g" queue.type="Disk" queue.filename="awx-external-logger-backlog")', # noqa
|
||||
'module(load="imuxsock" SysSock.Use="off")',
|
||||
'input(type="imuxsock" Socket="' + settings.LOGGING['handlers']['external_logger']['address'] + '" unlink="on" RateLimit.Burst="0")',
|
||||
'template(name="awx" type="string" string="%rawmsg-after-pri%")',
|
||||
@@ -78,6 +79,11 @@ def construct_rsyslog_conf_template(settings=settings):
|
||||
'action.resumeRetryCount="-1"',
|
||||
'template="awx"',
|
||||
f'action.resumeInterval="{timeout}"',
|
||||
f'queue.spoolDirectory="{spool_directory}"',
|
||||
'queue.filename="awx-external-logger-action-queue"',
|
||||
f'queue.maxdiskspace="{max_disk_space_action_queue}g"',
|
||||
'queue.type="LinkedList"',
|
||||
'queue.saveOnShutdown="on"',
|
||||
]
|
||||
if error_log_file:
|
||||
params.append(f'errorfile="{error_log_file}"')
|
||||
|
||||
@@ -12,7 +12,7 @@ from channels.layers import get_channel_layer
|
||||
from django.conf import settings
|
||||
from django.apps import apps
|
||||
|
||||
import asyncpg
|
||||
import psycopg
|
||||
|
||||
from awx.main.analytics.broadcast_websocket import (
|
||||
RelayWebsocketStats,
|
||||
@@ -209,53 +209,49 @@ class WebSocketRelayManager(object):
|
||||
# hostname -> ip
|
||||
self.known_hosts: Dict[str, str] = dict()
|
||||
|
||||
async def on_heartbeet(self, conn, pid, channel, payload):
|
||||
try:
|
||||
if not payload or channel != "web_heartbeet":
|
||||
return
|
||||
|
||||
async def on_ws_heartbeat(self, conn):
|
||||
await conn.execute("LISTEN web_ws_heartbeat")
|
||||
async for notif in conn.notifies():
|
||||
if notif is None:
|
||||
continue
|
||||
try:
|
||||
payload = json.loads(payload)
|
||||
except json.JSONDecodeError:
|
||||
logmsg = "Failed to decode message from pg_notify channel `web_heartbeet`"
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logmsg = "{} {}".format(logmsg, payload)
|
||||
logger.warning(logmsg)
|
||||
return
|
||||
|
||||
# Skip if the message comes from the same host we are running on
|
||||
# In this case, we'll be sharing a redis, no need to relay.
|
||||
if payload.get("hostname") == self.local_hostname:
|
||||
return
|
||||
|
||||
if payload.get("action") == "online":
|
||||
hostname = payload.get("hostname")
|
||||
ip = payload.get("ip")
|
||||
if ip is None:
|
||||
# If we don't get an IP, just try the hostname, maybe it resolves
|
||||
ip = hostname
|
||||
if ip is None:
|
||||
logger.warning(f"Received invalid online heartbeet, missing hostname and ip: {payload}")
|
||||
if not notif.payload or notif.channel != "web_ws_heartbeat":
|
||||
return
|
||||
self.known_hosts[hostname] = ip
|
||||
logger.debug(f"Web host {hostname} ({ip}) online heartbeat received.")
|
||||
elif payload.get("action") == "offline":
|
||||
hostname = payload.get("hostname")
|
||||
ip = payload.get("ip")
|
||||
if ip is None:
|
||||
# If we don't get an IP, just try the hostname, maybe it resolves
|
||||
ip = hostname
|
||||
if ip is None:
|
||||
logger.warning(f"Received invalid offline heartbeet, missing hostname and ip: {payload}")
|
||||
return
|
||||
self.cleanup_offline_host(ip)
|
||||
logger.debug(f"Web host {hostname} ({ip}) offline heartbeat received.")
|
||||
except Exception as e:
|
||||
# This catch-all is the same as the one above. asyncio will eat the exception
|
||||
# but we want to know about it.
|
||||
logger.exception(f"on_heartbeet exception: {e}")
|
||||
|
||||
def cleanup_offline_host(self, hostname):
|
||||
try:
|
||||
payload = json.loads(notif.payload)
|
||||
except json.JSONDecodeError:
|
||||
logmsg = "Failed to decode message from pg_notify channel `web_ws_heartbeat`"
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logmsg = "{} {}".format(logmsg, payload)
|
||||
logger.warning(logmsg)
|
||||
return
|
||||
|
||||
# Skip if the message comes from the same host we are running on
|
||||
# In this case, we'll be sharing a redis, no need to relay.
|
||||
if payload.get("hostname") == self.local_hostname:
|
||||
return
|
||||
|
||||
action = payload.get("action")
|
||||
|
||||
if action in ("online", "offline"):
|
||||
hostname = payload.get("hostname")
|
||||
ip = payload.get("ip") or hostname # try back to hostname if ip isn't supplied
|
||||
if ip is None:
|
||||
logger.warning(f"Received invalid {action} ws_heartbeat, missing hostname and ip: {payload}")
|
||||
return
|
||||
logger.debug(f"Web host {hostname} ({ip}) {action} heartbeat received.")
|
||||
|
||||
if action == "online":
|
||||
self.known_hosts[hostname] = ip
|
||||
elif action == "offline":
|
||||
await self.cleanup_offline_host(hostname)
|
||||
except Exception as e:
|
||||
# This catch-all is the same as the one above. asyncio will eat the exception
|
||||
# but we want to know about it.
|
||||
logger.exception(f"on_ws_heartbeat exception: {e}")
|
||||
|
||||
async def cleanup_offline_host(self, hostname):
|
||||
"""
|
||||
Given a hostname, try to cancel its task/connection and remove it from
|
||||
the list of hosts we know about.
|
||||
@@ -264,6 +260,19 @@ class WebSocketRelayManager(object):
|
||||
"""
|
||||
if hostname in self.relay_connections:
|
||||
self.relay_connections[hostname].cancel()
|
||||
|
||||
# Wait for the task to actually run its cancel/completion logic
|
||||
# otherwise it might get GC'd too early when we del it below.
|
||||
# Being GC'd too early could generate a scary message in logs:
|
||||
# "Task was destroyed but it is pending!"
|
||||
try:
|
||||
await asyncio.wait_for(self.relay_connections[hostname].async_task, timeout=10)
|
||||
except asyncio.TimeoutError:
|
||||
logger.warning(f"Tried to cancel relay connection for {hostname} but it timed out during cleanup.")
|
||||
except asyncio.CancelledError:
|
||||
# Handle the case where the task was already cancelled by the time we got here.
|
||||
pass
|
||||
|
||||
del self.relay_connections[hostname]
|
||||
|
||||
if hostname in self.known_hosts:
|
||||
@@ -282,16 +291,16 @@ class WebSocketRelayManager(object):
|
||||
|
||||
# Set up a pg_notify consumer for allowing web nodes to "provision" and "deprovision" themselves gracefully.
|
||||
database_conf = settings.DATABASES['default']
|
||||
async_conn = await asyncpg.connect(
|
||||
database=database_conf['NAME'],
|
||||
async_conn = await psycopg.AsyncConnection.connect(
|
||||
dbname=database_conf['NAME'],
|
||||
host=database_conf['HOST'],
|
||||
user=database_conf['USER'],
|
||||
password=database_conf['PASSWORD'],
|
||||
port=database_conf['PORT'],
|
||||
# We cannot include these because asyncpg doesn't allow all the options that psycopg does.
|
||||
# **database_conf.get("OPTIONS", {}),
|
||||
**database_conf.get("OPTIONS", {}),
|
||||
)
|
||||
await async_conn.add_listener("web_heartbeet", self.on_heartbeet)
|
||||
await async_conn.set_autocommit(True)
|
||||
event_loop.create_task(self.on_ws_heartbeat(async_conn))
|
||||
|
||||
# Establishes a websocket connection to /websocket/relay on all API servers
|
||||
while True:
|
||||
@@ -318,13 +327,11 @@ class WebSocketRelayManager(object):
|
||||
|
||||
if deleted_remote_hosts:
|
||||
logger.info(f"Removing {deleted_remote_hosts} from websocket broadcast list")
|
||||
await asyncio.gather(self.cleanup_offline_host(h) for h in deleted_remote_hosts)
|
||||
|
||||
if new_remote_hosts:
|
||||
logger.info(f"Adding {new_remote_hosts} to websocket broadcast list")
|
||||
|
||||
for h in deleted_remote_hosts:
|
||||
self.cleanup_offline_host(h)
|
||||
|
||||
for h in new_remote_hosts:
|
||||
stats = self.stats_mgr.new_remote_host_stats(h)
|
||||
relay_connection = WebsocketRelayConnection(name=self.local_hostname, stats=stats, remote_host=self.known_hosts[h])
|
||||
|
||||
@@ -189,11 +189,12 @@
|
||||
connection: local
|
||||
name: Install content with ansible-galaxy command if necessary
|
||||
vars:
|
||||
galaxy_task_env: # configure in settings
|
||||
additional_collections_env:
|
||||
# These environment variables are used for installing collections, in addition to galaxy_task_env
|
||||
# setting the collections paths silences warnings
|
||||
galaxy_task_env: # configured in settings
|
||||
# additional_galaxy_env contains environment variables are used for installing roles and collections and will take precedence over items in galaxy_task_env
|
||||
additional_galaxy_env:
|
||||
# These paths control where ansible-galaxy installs collections and roles on top the filesystem
|
||||
ANSIBLE_COLLECTIONS_PATHS: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections"
|
||||
ANSIBLE_ROLES_PATH: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_roles"
|
||||
# Put the local tmp directory in same volume as collection destination
|
||||
# otherwise, files cannot be moved accross volumes and will cause error
|
||||
ANSIBLE_LOCAL_TEMP: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/tmp"
|
||||
@@ -212,40 +213,50 @@
|
||||
- name: End play due to disabled content sync
|
||||
ansible.builtin.meta: end_play
|
||||
|
||||
- name: Fetch galaxy roles from requirements.(yml/yaml)
|
||||
ansible.builtin.command: >
|
||||
ansible-galaxy role install -r {{ item }}
|
||||
--roles-path {{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_roles
|
||||
{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{ project_path | quote }}"
|
||||
register: galaxy_result
|
||||
with_fileglob:
|
||||
- "{{ project_path | quote }}/roles/requirements.yaml"
|
||||
- "{{ project_path | quote }}/roles/requirements.yml"
|
||||
changed_when: "'was installed successfully' in galaxy_result.stdout"
|
||||
environment: "{{ galaxy_task_env }}"
|
||||
when: roles_enabled | bool
|
||||
tags:
|
||||
- install_roles
|
||||
- block:
|
||||
- name: Fetch galaxy roles from roles/requirements.(yml/yaml)
|
||||
ansible.builtin.command:
|
||||
cmd: "ansible-galaxy role install -r {{ item }} {{ verbosity }}"
|
||||
register: galaxy_result
|
||||
with_fileglob:
|
||||
- "{{ project_path | quote }}/roles/requirements.yaml"
|
||||
- "{{ project_path | quote }}/roles/requirements.yml"
|
||||
changed_when: "'was installed successfully' in galaxy_result.stdout"
|
||||
when: roles_enabled | bool
|
||||
tags:
|
||||
- install_roles
|
||||
|
||||
- name: Fetch galaxy collections from collections/requirements.(yml/yaml)
|
||||
ansible.builtin.command: >
|
||||
ansible-galaxy collection install -r {{ item }}
|
||||
--collections-path {{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections
|
||||
{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{ project_path | quote }}"
|
||||
register: galaxy_collection_result
|
||||
with_fileglob:
|
||||
- "{{ project_path | quote }}/collections/requirements.yaml"
|
||||
- "{{ project_path | quote }}/collections/requirements.yml"
|
||||
- "{{ project_path | quote }}/requirements.yaml"
|
||||
- "{{ project_path | quote }}/requirements.yml"
|
||||
changed_when: "'Installing ' in galaxy_collection_result.stdout"
|
||||
environment: "{{ additional_collections_env | combine(galaxy_task_env) }}"
|
||||
when:
|
||||
- "ansible_version.full is version_compare('2.9', '>=')"
|
||||
- collections_enabled | bool
|
||||
tags:
|
||||
- install_collections
|
||||
- name: Fetch galaxy collections from collections/requirements.(yml/yaml)
|
||||
ansible.builtin.command:
|
||||
cmd: "ansible-galaxy collection install -r {{ item }} {{ verbosity }}"
|
||||
register: galaxy_collection_result
|
||||
with_fileglob:
|
||||
- "{{ project_path | quote }}/collections/requirements.yaml"
|
||||
- "{{ project_path | quote }}/collections/requirements.yml"
|
||||
changed_when: "'Nothing to do.' not in galaxy_collection_result.stdout"
|
||||
when:
|
||||
- "ansible_version.full is version_compare('2.9', '>=')"
|
||||
- collections_enabled | bool
|
||||
tags:
|
||||
- install_collections
|
||||
|
||||
- name: Fetch galaxy roles and collections from requirements.(yml/yaml)
|
||||
ansible.builtin.command:
|
||||
cmd: "ansible-galaxy install -r {{ item }} {{ verbosity }}"
|
||||
register: galaxy_combined_result
|
||||
with_fileglob:
|
||||
- "{{ project_path | quote }}/requirements.yaml"
|
||||
- "{{ project_path | quote }}/requirements.yml"
|
||||
changed_when: "'Nothing to do.' not in galaxy_combined_result.stdout"
|
||||
when:
|
||||
- "ansible_version.full is version_compare('2.10', '>=')"
|
||||
- collections_enabled | bool
|
||||
- roles_enabled | bool
|
||||
tags:
|
||||
- install_collections
|
||||
- install_roles
|
||||
|
||||
# We combine our additional_galaxy_env into galaxy_task_env so that our values are preferred over anything a user would set
|
||||
environment: "{{ galaxy_task_env | combine(additional_galaxy_env) }}"
|
||||
vars:
|
||||
verbosity: "{{ (ansible_verbosity) | ternary('-'+'v'*ansible_verbosity, '') }}"
|
||||
|
||||
@@ -114,9 +114,6 @@ JOBOUTPUT_ROOT = '/var/lib/awx/job_status/'
|
||||
# Absolute filesystem path to the directory to store logs
|
||||
LOG_ROOT = '/var/log/tower/'
|
||||
|
||||
# The heartbeat file for the scheduler
|
||||
SCHEDULE_METADATA_LOCATION = os.path.join(BASE_DIR, '.tower_cycle')
|
||||
|
||||
# Django gettext files path: locale/<lang-code>/LC_MESSAGES/django.po, django.mo
|
||||
LOCALE_PATHS = (os.path.join(BASE_DIR, 'locale'),)
|
||||
|
||||
@@ -161,6 +158,11 @@ REMOTE_HOST_HEADERS = ['REMOTE_ADDR', 'REMOTE_HOST']
|
||||
# REMOTE_HOST_HEADERS will be trusted unconditionally')
|
||||
PROXY_IP_ALLOWED_LIST = []
|
||||
|
||||
# If we are behind a reverse proxy/load balancer, use this setting to
|
||||
# allow the scheme://addresses from which Tower should trust csrf requests from
|
||||
# If this setting is an empty list (the default), we will only trust ourself
|
||||
CSRF_TRUSTED_ORIGINS = []
|
||||
|
||||
CUSTOM_VENV_PATHS = []
|
||||
|
||||
# Warning: this is a placeholder for a database setting
|
||||
@@ -422,6 +424,7 @@ TACACSPLUS_PORT = 49
|
||||
TACACSPLUS_SECRET = ''
|
||||
TACACSPLUS_SESSION_TIMEOUT = 5
|
||||
TACACSPLUS_AUTH_PROTOCOL = 'ascii'
|
||||
TACACSPLUS_REM_ADDR = False
|
||||
|
||||
# Enable / Disable HTTP Basic Authentication used in the API browser
|
||||
# Note: Session limits are not enforced when using HTTP Basic Authentication.
|
||||
@@ -788,24 +791,18 @@ INSIGHTS_AGENT_MIME = 'application/example'
|
||||
INSIGHTS_SYSTEM_ID_FILE = '/etc/redhat-access-insights/machine-id'
|
||||
INSIGHTS_CERT_PATH = "/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem"
|
||||
|
||||
TOWER_SETTINGS_MANIFEST = {}
|
||||
|
||||
# Settings related to external logger configuration
|
||||
LOG_AGGREGATOR_ENABLED = False
|
||||
LOG_AGGREGATOR_TCP_TIMEOUT = 5
|
||||
LOG_AGGREGATOR_VERIFY_CERT = True
|
||||
LOG_AGGREGATOR_LEVEL = 'INFO'
|
||||
LOG_AGGREGATOR_MAX_DISK_USAGE_GB = 1
|
||||
LOG_AGGREGATOR_MAX_DISK_USAGE_GB = 1 # Main queue
|
||||
LOG_AGGREGATOR_ACTION_MAX_DISK_USAGE_GB = 1 # Action queue
|
||||
LOG_AGGREGATOR_MAX_DISK_USAGE_PATH = '/var/lib/awx'
|
||||
LOG_AGGREGATOR_RSYSLOGD_DEBUG = False
|
||||
LOG_AGGREGATOR_RSYSLOGD_ERROR_LOG_FILE = '/var/log/tower/rsyslog.err'
|
||||
API_400_ERROR_LOG_FORMAT = 'status {status_code} received by user {user_name} attempting to access {url_path} from {remote_addr}'
|
||||
|
||||
# The number of retry attempts for websocket session establishment
|
||||
# If you're encountering issues establishing websockets in a cluster,
|
||||
# raising this value can help
|
||||
CHANNEL_LAYER_RECEIVE_MAX_RETRY = 10
|
||||
|
||||
ASGI_APPLICATION = "awx.main.routing.application"
|
||||
|
||||
CHANNEL_LAYERS = {
|
||||
@@ -861,17 +858,17 @@ LOGGING = {
|
||||
'awx.conf': {'handlers': ['null'], 'level': 'WARNING'},
|
||||
'awx.conf.settings': {'handlers': ['null'], 'level': 'WARNING'},
|
||||
'awx.main': {'handlers': ['null']},
|
||||
'awx.main.commands.run_callback_receiver': {'handlers': ['callback_receiver']}, # level handled by dynamic_level_filter
|
||||
'awx.main.commands.run_callback_receiver': {'handlers': ['callback_receiver'], 'level': 'INFO'}, # very noisey debug-level logs
|
||||
'awx.main.dispatch': {'handlers': ['dispatcher']},
|
||||
'awx.main.consumers': {'handlers': ['console', 'file', 'tower_warnings'], 'level': 'INFO'},
|
||||
'awx.main.rsyslog_configurer': {'handlers': ['rsyslog_configurer']},
|
||||
'awx.main.cache_clear': {'handlers': ['cache_clear']},
|
||||
'awx.main.heartbeet': {'handlers': ['heartbeet']},
|
||||
'awx.main.ws_heartbeat': {'handlers': ['ws_heartbeat']},
|
||||
'awx.main.wsrelay': {'handlers': ['wsrelay']},
|
||||
'awx.main.commands.inventory_import': {'handlers': ['inventory_import'], 'propagate': False},
|
||||
'awx.main.tasks': {'handlers': ['task_system', 'external_logger'], 'propagate': False},
|
||||
'awx.main.analytics': {'handlers': ['task_system', 'external_logger'], 'level': 'INFO', 'propagate': False},
|
||||
'awx.main.scheduler': {'handlers': ['task_system', 'external_logger'], 'propagate': False},
|
||||
'awx.main.tasks': {'handlers': ['task_system', 'external_logger', 'console'], 'propagate': False},
|
||||
'awx.main.analytics': {'handlers': ['task_system', 'external_logger', 'console'], 'level': 'INFO', 'propagate': False},
|
||||
'awx.main.scheduler': {'handlers': ['task_system', 'external_logger', 'console'], 'propagate': False},
|
||||
'awx.main.access': {'level': 'INFO'}, # very verbose debug-level logs
|
||||
'awx.main.signals': {'level': 'INFO'}, # very verbose debug-level logs
|
||||
'awx.api.permissions': {'level': 'INFO'}, # very verbose debug-level logs
|
||||
@@ -900,7 +897,7 @@ handler_config = {
|
||||
'job_lifecycle': {'filename': 'job_lifecycle.log', 'formatter': 'job_lifecycle'},
|
||||
'rsyslog_configurer': {'filename': 'rsyslog_configurer.log'},
|
||||
'cache_clear': {'filename': 'cache_clear.log'},
|
||||
'heartbeet': {'filename': 'heartbeet.log'},
|
||||
'ws_heartbeat': {'filename': 'ws_heartbeat.log'},
|
||||
}
|
||||
|
||||
# If running on a VM, we log to files. When running in a container, we log to stdout.
|
||||
@@ -955,7 +952,8 @@ AWX_CLEANUP_PATHS = True
|
||||
# Allow ansible-runner to store env folder (may contain sensitive information)
|
||||
AWX_RUNNER_OMIT_ENV_FILES = True
|
||||
|
||||
# Allow ansible-runner to save ansible output (may cause performance issues)
|
||||
# Allow ansible-runner to save ansible output
|
||||
# (changing to False may cause performance issues)
|
||||
AWX_RUNNER_SUPPRESS_OUTPUT_FILE = True
|
||||
|
||||
# https://github.com/ansible/ansible-runner/pull/1191/files
|
||||
@@ -966,6 +964,9 @@ AWX_RUNNER_KEEPALIVE_SECONDS = 0
|
||||
# Delete completed work units in receptor
|
||||
RECEPTOR_RELEASE_WORK = True
|
||||
|
||||
# K8S only. Use receptor_log_level on AWX spec to set this properly
|
||||
RECEPTOR_LOG_LEVEL = 'info'
|
||||
|
||||
MIDDLEWARE = [
|
||||
'django_guid.middleware.guid_middleware',
|
||||
'awx.main.middleware.SettingsCacheMiddleware',
|
||||
|
||||
@@ -29,8 +29,6 @@ SHELL_PLUS_PRINT_SQL = False
|
||||
# show colored logs in the dev environment
|
||||
# to disable this, set `COLOR_LOGS = False` in awx/settings/local_settings.py
|
||||
LOGGING['handlers']['console']['()'] = 'awx.main.utils.handlers.ColorHandler' # noqa
|
||||
# task system does not propagate to AWX, so color log these too
|
||||
LOGGING['handlers']['task_system'] = LOGGING['handlers']['console'].copy() # noqa
|
||||
COLOR_LOGS = True
|
||||
|
||||
ALLOWED_HOSTS = ['*']
|
||||
@@ -51,7 +49,7 @@ INSIGHTS_TRACKING_STATE = False
|
||||
|
||||
# debug toolbar and swagger assume that requirements/requirements_dev.txt are installed
|
||||
|
||||
INSTALLED_APPS += ['rest_framework_swagger', 'debug_toolbar'] # NOQA
|
||||
INSTALLED_APPS += ['drf_yasg', 'debug_toolbar'] # NOQA
|
||||
|
||||
MIDDLEWARE = ['debug_toolbar.middleware.DebugToolbarMiddleware'] + MIDDLEWARE # NOQA
|
||||
|
||||
@@ -61,8 +59,9 @@ DEBUG_TOOLBAR_CONFIG = {'ENABLE_STACKTRACES': True}
|
||||
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
INSTALL_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
BASE_VENV_PATH = "/var/lib/awx/venv/"
|
||||
AWX_VENV_PATH = os.path.join(BASE_VENV_PATH, "awx")
|
||||
# Ansible base virtualenv paths and enablement
|
||||
# only used for deprecated fields and management commands for them
|
||||
BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv")
|
||||
|
||||
CLUSTER_HOST_ID = socket.gethostname()
|
||||
|
||||
|
||||
@@ -30,15 +30,10 @@ SECRET_KEY = None
|
||||
# See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
|
||||
ALLOWED_HOSTS = []
|
||||
|
||||
# The heartbeat file for the scheduler
|
||||
SCHEDULE_METADATA_LOCATION = '/var/lib/awx/.tower_cycle'
|
||||
|
||||
# Ansible base virtualenv paths and enablement
|
||||
# only used for deprecated fields and management commands for them
|
||||
BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv")
|
||||
|
||||
# Base virtualenv paths and enablement
|
||||
AWX_VENV_PATH = os.path.join(BASE_VENV_PATH, "awx")
|
||||
|
||||
# Very important that this is editable (not read_only) in the API
|
||||
AWX_ISOLATION_SHOW_PATHS = [
|
||||
'/etc/pki/ca-trust:/etc/pki/ca-trust:O',
|
||||
|
||||
@@ -224,12 +224,18 @@ class TACACSPlusBackend(object):
|
||||
return None
|
||||
try:
|
||||
# Upstream TACACS+ client does not accept non-string, so convert if needed.
|
||||
auth = tacacs_plus.TACACSClient(
|
||||
tacacs_client = tacacs_plus.TACACSClient(
|
||||
django_settings.TACACSPLUS_HOST,
|
||||
django_settings.TACACSPLUS_PORT,
|
||||
django_settings.TACACSPLUS_SECRET,
|
||||
timeout=django_settings.TACACSPLUS_SESSION_TIMEOUT,
|
||||
).authenticate(username, password, authen_type=tacacs_plus.TAC_PLUS_AUTHEN_TYPES[django_settings.TACACSPLUS_AUTH_PROTOCOL])
|
||||
)
|
||||
auth_kwargs = {'authen_type': tacacs_plus.TAC_PLUS_AUTHEN_TYPES[django_settings.TACACSPLUS_AUTH_PROTOCOL]}
|
||||
if django_settings.TACACSPLUS_AUTH_PROTOCOL:
|
||||
client_ip = self._get_client_ip(request)
|
||||
if client_ip:
|
||||
auth_kwargs['rem_addr'] = client_ip
|
||||
auth = tacacs_client.authenticate(username, password, **auth_kwargs)
|
||||
except Exception as e:
|
||||
logger.exception("TACACS+ Authentication Error: %s" % str(e))
|
||||
return None
|
||||
@@ -244,6 +250,17 @@ class TACACSPlusBackend(object):
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
def _get_client_ip(self, request):
|
||||
if not request or not hasattr(request, 'META'):
|
||||
return None
|
||||
|
||||
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
|
||||
if x_forwarded_for:
|
||||
ip = x_forwarded_for.split(',')[0]
|
||||
else:
|
||||
ip = request.META.get('REMOTE_ADDR')
|
||||
return ip
|
||||
|
||||
|
||||
class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider):
|
||||
"""
|
||||
|
||||
@@ -554,6 +554,16 @@ register(
|
||||
category_slug='tacacsplus',
|
||||
)
|
||||
|
||||
register(
|
||||
'TACACSPLUS_REM_ADDR',
|
||||
field_class=fields.BooleanField,
|
||||
default=True,
|
||||
label=_('TACACS+ client address sending enabled'),
|
||||
help_text=_('Enable the client address sending by TACACS+ client.'),
|
||||
category=_('TACACS+'),
|
||||
category_slug='tacacsplus',
|
||||
)
|
||||
|
||||
###############################################################################
|
||||
# GOOGLE OAUTH2 AUTHENTICATION SETTINGS
|
||||
###############################################################################
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from unittest import mock
|
||||
import pytest
|
||||
|
||||
|
||||
def test_empty_host_fails_auth(tacacsplus_backend):
|
||||
@@ -47,3 +48,69 @@ def test_client_return_valid_passes_auth(tacacsplus_backend):
|
||||
settings.TACACSPLUS_AUTH_PROTOCOL = 'ascii'
|
||||
ret_user = tacacsplus_backend.authenticate(None, u"user", u"pass")
|
||||
assert ret_user == user
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"client_ip_header,client_ip_header_value,expected_client_ip",
|
||||
[('HTTP_X_FORWARDED_FOR', '12.34.56.78, 23.45.67.89', '12.34.56.78'), ('REMOTE_ADDR', '12.34.56.78', '12.34.56.78')],
|
||||
)
|
||||
def test_remote_addr_is_passed_to_client_if_available_and_setting_enabled(tacacsplus_backend, client_ip_header, client_ip_header_value, expected_client_ip):
|
||||
auth = mock.MagicMock()
|
||||
auth.valid = True
|
||||
client = mock.MagicMock()
|
||||
client.authenticate.return_value = auth
|
||||
user = mock.MagicMock()
|
||||
user.has_usable_password = mock.MagicMock(return_value=False)
|
||||
request = mock.MagicMock()
|
||||
request.META = {
|
||||
client_ip_header: client_ip_header_value,
|
||||
}
|
||||
with mock.patch('awx.sso.backends.django_settings') as settings, mock.patch('tacacs_plus.TACACSClient', return_value=client), mock.patch(
|
||||
'awx.sso.backends._get_or_set_enterprise_user', return_value=user
|
||||
):
|
||||
settings.TACACSPLUS_HOST = 'localhost'
|
||||
settings.TACACSPLUS_AUTH_PROTOCOL = 'ascii'
|
||||
settings.TACACSPLUS_REM_ADDR = True
|
||||
tacacsplus_backend.authenticate(request, u"user", u"pass")
|
||||
|
||||
client.authenticate.assert_called_once_with('user', 'pass', authen_type=1, rem_addr=expected_client_ip)
|
||||
|
||||
|
||||
def test_remote_addr_is_completely_ignored_in_client_call_if_setting_is_disabled(tacacsplus_backend):
|
||||
auth = mock.MagicMock()
|
||||
auth.valid = True
|
||||
client = mock.MagicMock()
|
||||
client.authenticate.return_value = auth
|
||||
user = mock.MagicMock()
|
||||
user.has_usable_password = mock.MagicMock(return_value=False)
|
||||
request = mock.MagicMock()
|
||||
request.META = {}
|
||||
with mock.patch('awx.sso.backends.django_settings') as settings, mock.patch('tacacs_plus.TACACSClient', return_value=client), mock.patch(
|
||||
'awx.sso.backends._get_or_set_enterprise_user', return_value=user
|
||||
):
|
||||
settings.TACACSPLUS_HOST = 'localhost'
|
||||
settings.TACACSPLUS_AUTH_PROTOCOL = 'ascii'
|
||||
settings.TACACSPLUS_REM_ADDR = False
|
||||
tacacsplus_backend.authenticate(request, u"user", u"pass")
|
||||
|
||||
client.authenticate.assert_called_once_with('user', 'pass', authen_type=1)
|
||||
|
||||
|
||||
def test_remote_addr_is_completely_ignored_in_client_call_if_unavailable_and_setting_enabled(tacacsplus_backend):
|
||||
auth = mock.MagicMock()
|
||||
auth.valid = True
|
||||
client = mock.MagicMock()
|
||||
client.authenticate.return_value = auth
|
||||
user = mock.MagicMock()
|
||||
user.has_usable_password = mock.MagicMock(return_value=False)
|
||||
request = mock.MagicMock()
|
||||
request.META = {}
|
||||
with mock.patch('awx.sso.backends.django_settings') as settings, mock.patch('tacacs_plus.TACACSClient', return_value=client), mock.patch(
|
||||
'awx.sso.backends._get_or_set_enterprise_user', return_value=user
|
||||
):
|
||||
settings.TACACSPLUS_HOST = 'localhost'
|
||||
settings.TACACSPLUS_AUTH_PROTOCOL = 'ascii'
|
||||
settings.TACACSPLUS_REM_ADDR = True
|
||||
tacacsplus_backend.authenticate(request, u"user", u"pass")
|
||||
|
||||
client.authenticate.assert_called_once_with('user', 'pass', authen_type=1)
|
||||
|
||||
BIN
awx/static/RedHatDisplay-Medium.ttf
Normal file
BIN
awx/static/RedHatDisplay-Medium.ttf
Normal file
Binary file not shown.
BIN
awx/static/RedHatDisplay-Regular.ttf
Normal file
BIN
awx/static/RedHatDisplay-Regular.ttf
Normal file
Binary file not shown.
383
awx/static/awx-spud-reading.svg
Normal file
383
awx/static/awx-spud-reading.svg
Normal file
@@ -0,0 +1,383 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 27.4.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Capa_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 1133.858 850.394" enable-background="new 0 0 1133.858 850.394" xml:space="preserve">
|
||||
<g>
|
||||
<ellipse fill="#DE8E98" cx="566.929" cy="700.251" rx="496.063" ry="85.039"/>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#1A69B2" d="M709.902,351.204v285.586c0,8.962,8.586,16.231,19.182,16.231h185.076c10.591,0,19.18-7.27,19.18-16.231
|
||||
V351.204H709.902z"/>
|
||||
<path fill="#ABD9F3" d="M844.681,351.204v301.818h69.48c10.591,0,19.18-7.27,19.18-16.231V351.204H844.681z"/>
|
||||
<path fill="#166CB3" d="M243.013,146.924h408.818l40.898,355.852H283.912c-24.301,0-44.007-16.664-44.007-37.232l-40.899-281.387
|
||||
C199.006,163.59,218.712,146.924,243.013,146.924z"/>
|
||||
<path fill="#498BC9" d="M315.644,465.544c0,20.568,19.69,37.232,44.007,37.232h370.873c24.296,0,44.005-16.664,44.005-37.232
|
||||
l-40.898-281.387c0-20.567-19.708-37.232-44.005-37.232H318.753c-24.317,0-44.007,16.665-44.007,37.232L315.644,465.544z"/>
|
||||
<rect x="317.199" y="466.189" fill="#1A6AB2" width="527.482" height="186.833"/>
|
||||
<rect x="317.199" y="597.491" fill="#ABD8F2" width="527.498" height="55.531"/>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#ABD8F2" d="M839.66,587.211H336.531c-10.669,0-19.332-6.402-19.332-14.301V461.484
|
||||
c0-5.325,8.964-11.426,19.332-14.301c158.702-22.354,488.822,0,488.822,0c11.887,3.041,15.514,6.368,19.333,14.301v120.687
|
||||
C844.686,582.172,845.183,587.211,839.66,587.211z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<path fill="#488BC9" d="M844.686,462.72H317.199v110.19c0,7.899,8.663,14.301,19.332,14.301H839.66
|
||||
c5.524,0,5.026-5.039,5.026-5.039V462.72z"/>
|
||||
</g>
|
||||
<path fill="#FAB949" d="M912.549,653.211h-30.815l36.914,56.335c0,0,4.008,6.967,13.384,6.904c0,0,9.604,1.285,5.368-7.827
|
||||
L912.549,653.211z"/>
|
||||
<path fill="#B4B4B4" d="M881.734,653.211l36.914,56.335c0,0,4.008,6.967,13.384,6.904c0,0,7.861,1.031,6.182-5.56
|
||||
c-0.674,0.912-1.477,1.679-2.498,2.114c-4.155,1.767-8.881-0.06-11.793-3.368l-28.693-56.426H881.734z"/>
|
||||
<path fill="#FAB949" d="M355.511,653.211h30.815l-36.919,56.335c0,0-4,6.967-13.366,6.904c0,0-9.617,1.285-5.381-7.827
|
||||
L355.511,653.211z"/>
|
||||
<path fill="#B4B4B4" d="M386.327,653.211l-36.919,56.335c0,0-4,6.967-13.366,6.904c0,0-7.869,1.031-6.189-5.56
|
||||
c0.668,0.912,1.469,1.679,2.487,2.114c4.156,1.767,8.881-0.06,11.798-3.368l28.685-56.426H386.327z"/>
|
||||
<path fill="#FAB949" d="M197.022,653.211h30.814l-36.918,56.335c0,0-4.006,6.967-13.368,6.904c0,0-9.617,1.285-5.379-7.827
|
||||
L197.022,653.211z"/>
|
||||
<path fill="#B4B4B4" d="M227.836,653.211l-36.918,56.335c0,0-4.006,6.967-13.368,6.904c0,0-7.866,1.031-6.187-5.56
|
||||
c0.669,0.912,1.469,1.679,2.487,2.114c4.158,1.767,8.892-0.06,11.796-3.368l28.683-56.426H227.836z"/>
|
||||
<path fill="#1A6AB2" d="M182.42,351.204v285.586c0,8.962,8.586,16.231,19.177,16.231h185.079c10.589,0,19.183-7.27,19.183-16.231
|
||||
V351.204H182.42z"/>
|
||||
<path fill="#ABD8F2" d="M317.199,351.204v285.421c0,16.397,15.7,16.397,15.7,16.397h53.777c10.589,0,19.183-7.27,19.183-16.231
|
||||
V351.204H317.199z"/>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#412413" d="M471.73,330.794c-11.293-0.386-22.572-1.155-33.812-2.321c-2.769-0.287-5.536-0.598-8.299-0.933
|
||||
c-2.951-0.358-5.335-0.669-7.816-1.017c-5.036-0.706-10.063-1.487-15.046-2.509c-4.849-0.995-9.66-2.22-14.342-3.835
|
||||
c-1.163-0.401-2.318-0.826-3.462-1.277c-0.269-0.106-0.537-0.213-0.805-0.322c-0.098-0.04-0.927-0.386-0.463-0.189
|
||||
c0.564,0.24-0.739-0.325-0.656-0.289c-0.33-0.146-0.659-0.296-0.987-0.447c-2.234-1.031-4.415-2.176-6.522-3.446
|
||||
c-1.953-1.178-3.837-2.467-5.664-3.833c-0.22-0.164-1.321-1.029-0.615-0.464c-0.401-0.321-0.805-0.636-1.205-0.958
|
||||
c-0.9-0.727-1.787-1.469-2.666-2.222c-3.38-2.896-6.622-5.949-9.906-8.952c-7.683-7.025-15.691-13.634-25.469-17.494
|
||||
c-2.195-0.866-5.036,0.948-5.536,3.143c-0.59,2.591,0.793,4.608,3.143,5.536c0.237,0.094,0.473,0.191,0.711,0.284
|
||||
c-0.82-0.32-0.189-0.082-0.022-0.006c0.519,0.236,1.039,0.469,1.552,0.717c1.022,0.495,2.026,1.026,3.012,1.588
|
||||
c1.917,1.093,3.764,2.303,5.554,3.592c0.411,0.296,0.818,0.596,1.223,0.9c0.203,0.152,0.404,0.306,0.606,0.459
|
||||
c-0.687-0.519-0.145-0.112-0.008-0.004c0.898,0.712,1.789,1.432,2.667,2.169c1.719,1.443,3.397,2.932,5.06,4.439
|
||||
c6.755,6.124,13.312,12.552,20.963,17.581c8.581,5.64,18.483,9.094,28.444,11.36c10.566,2.403,21.402,3.738,32.158,4.952
|
||||
c12.693,1.434,25.441,2.361,38.207,2.797c2.355,0.081,4.606-2.127,4.5-4.5C476.118,332.787,474.254,330.88,471.73,330.794
|
||||
L471.73,330.794z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#412413" d="M677.337,268.988c18.882-1.443,38.735-4.288,54.938-14.828c16.238-10.563,25.793-27.621,34.005-44.631
|
||||
c5.414-11.213,10.734-22.473,16.098-33.71c1.012-2.121,0.656-4.957-1.614-6.157c-2.007-1.06-5.074-0.654-6.157,1.614
|
||||
c-4.748,9.948-9.494,19.896-14.243,29.843c-3.911,8.191-7.871,16.394-12.737,24.071c-1.232,1.944-2.521,3.852-3.88,5.709
|
||||
c-0.319,0.435-0.642,0.866-0.967,1.297c-0.091,0.12-0.529,0.667-0.037,0.049c-0.168,0.21-0.333,0.423-0.501,0.633
|
||||
c-0.713,0.893-1.445,1.77-2.196,2.63c-1.458,1.669-2.99,3.273-4.601,4.795c-0.784,0.74-1.587,1.46-2.407,2.159
|
||||
c-0.412,0.351-0.829,0.696-1.25,1.035c-0.211,0.17-0.424,0.339-0.637,0.506c0.5-0.392,0.012-0.013-0.232,0.168
|
||||
c-3.646,2.715-7.597,5.002-11.723,6.902c-0.492,0.227-0.988,0.445-1.484,0.663c-0.142,0.062-0.797,0.33-0.048,0.023
|
||||
c-0.251,0.103-0.502,0.207-0.753,0.308c-1.071,0.432-2.152,0.84-3.241,1.226c-2.124,0.754-4.278,1.425-6.451,2.023
|
||||
c-4.925,1.356-9.946,2.345-14.997,3.097c-0.686,0.102-1.372,0.2-2.06,0.294c0.184-0.025,0.856-0.11-0.041,0.004
|
||||
c-0.344,0.044-0.688,0.088-1.032,0.131c-1.306,0.162-2.614,0.31-3.923,0.447c-2.605,0.273-5.215,0.499-7.826,0.698
|
||||
c-2.342,0.179-4.615,1.942-4.5,4.5C672.94,266.773,674.824,269.18,677.337,268.988L677.337,268.988z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
|
||||
<ellipse transform="matrix(0.8005 -0.5993 0.5993 0.8005 -178.2389 394.8317)" fill="#412413" cx="504.021" cy="465.177" rx="80.106" ry="41.198"/>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#FFFFFF" d="M464.088,514.217c22.882-3.087,44.607-13.359,61.913-28.579c4.755-4.182,9.144-8.762,13.124-13.686
|
||||
c1.477-1.827,1.92-4.602,0-6.364c-1.64-1.506-4.784-1.955-6.364,0c-3.482,4.308-7.277,8.356-11.359,12.101
|
||||
c-2.04,1.872-4.15,3.667-6.322,5.384c-0.084,0.067-1.121,0.872-0.337,0.269c-0.209,0.161-0.419,0.321-0.629,0.48
|
||||
c-0.526,0.398-1.055,0.791-1.587,1.18c-1.171,0.856-2.359,1.689-3.562,2.5c-4.593,3.096-9.41,5.862-14.4,8.266
|
||||
c-1.188,0.573-2.386,1.125-3.593,1.657c-0.241,0.106-0.483,0.212-0.725,0.316c-0.109,0.047-1.323,0.551-0.382,0.164
|
||||
c-0.732,0.301-1.464,0.6-2.201,0.889c-2.578,1.01-5.192,1.927-7.837,2.746c-5.922,1.835-11.989,3.169-18.133,3.997
|
||||
c-2.342,0.316-3.821,3.439-3.143,5.536C459.386,513.65,461.578,514.555,464.088,514.217L464.088,514.217z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
|
||||
<ellipse transform="matrix(0.0887 -0.9961 0.9961 0.0887 211.8688 1113.97)" fill="#412413" cx="714.73" cy="441.196" rx="40.214" ry="78.194"/>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#FFFFFF" d="M667.164,455.425c22.646,15.187,51.212,20.453,77.915,15.524c7.607-1.404,14.937-3.757,22.029-6.831
|
||||
c2.168-0.94,2.822-4.283,1.614-6.157c-1.45-2.249-3.837-2.62-6.157-1.614c-0.103,0.044-1.326,0.557-0.396,0.173
|
||||
c-0.369,0.153-0.739,0.304-1.11,0.453c-0.741,0.297-1.487,0.585-2.235,0.863c-1.497,0.556-3.007,1.073-4.53,1.553
|
||||
c-3.044,0.959-6.136,1.765-9.259,2.42c-1.562,0.328-3.132,0.617-4.708,0.869c-0.394,0.063-0.788,0.124-1.183,0.182
|
||||
c-0.138,0.02-1.457,0.202-0.385,0.06c-0.791,0.105-1.584,0.199-2.378,0.284c-3.176,0.341-6.369,0.532-9.563,0.575
|
||||
c-3.196,0.043-6.394-0.063-9.58-0.317c-1.594-0.127-3.182-0.297-4.769-0.49c-0.885-0.108-0.168-0.021,0.008,0.004
|
||||
c-0.396-0.055-0.791-0.112-1.186-0.171c-0.922-0.138-1.842-0.289-2.76-0.452c-6.289-1.117-12.476-2.817-18.445-5.093
|
||||
c-0.87-0.332-1.734-0.678-2.595-1.032c-0.806-0.331-0.192-0.081-0.028-0.011c-0.488-0.211-0.976-0.423-1.462-0.64
|
||||
c-1.699-0.761-3.377-1.57-5.029-2.427c-3.191-1.655-6.282-3.495-9.267-5.497c-1.96-1.314-5.07-0.444-6.157,1.614
|
||||
C664.352,451.536,665.073,454.022,667.164,455.425L667.164,455.425z"/>
|
||||
</g>
|
||||
</g>
|
||||
<path fill="#C69C6C" stroke="#8C633A" stroke-width="5" stroke-miterlimit="10" d="M528.233,88.334
|
||||
c-31.16-5.173-46.165,9.842-71.057,35.641c-41.413,42.923-34.63,104.898-33.392,114.717
|
||||
c6.251,49.593,33.757,59.796,36.203,117.5c1.185,27.954-5.04,30.981-0.205,48.605c11.535,42.045,58.719,67.648,92.127,76.491
|
||||
c47.839,12.663,117.249,4.147,159.131-50.642c41.5-54.29,38.922-131.713,6.121-168.202
|
||||
c-23.548-26.196-51.55-18.922-73.715-52.922c-18.198-27.916-7.709-45.687-23.298-71.546
|
||||
C609.198,119.814,585.592,98.007,528.233,88.334z"/>
|
||||
|
||||
<ellipse transform="matrix(0.8746 -0.4849 0.4849 0.8746 19.8715 401.6443)" fill="#412413" cx="786.341" cy="162.409" rx="25.691" ry="18.962"/>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#FFFFFF" d="M794.601,154.993c0.058,0.041,0.115,0.084,0.171,0.127c-0.304-0.235-0.607-0.469-0.911-0.704
|
||||
c0.421,0.329,0.794,0.702,1.128,1.12c-0.234-0.304-0.469-0.607-0.704-0.911c0.37,0.48,0.674,0.998,0.915,1.554
|
||||
c-0.151-0.358-0.302-0.717-0.454-1.075c0.247,0.593,0.416,1.208,0.506,1.844c-0.054-0.399-0.107-0.798-0.161-1.196
|
||||
c0.075,0.6,0.076,1.201-0.003,1.801c0.054-0.399,0.107-0.798,0.161-1.196c-0.052,0.352-0.13,0.696-0.236,1.036
|
||||
c-0.341,1.092-0.117,2.493,0.454,3.468c0.546,0.933,1.617,1.824,2.689,2.068c1.152,0.262,2.445,0.206,3.468-0.454
|
||||
c1.005-0.648,1.708-1.534,2.068-2.689c0.168-0.538,0.241-1.075,0.317-1.634c0.074-0.555,0.154-1.097,0.141-1.657
|
||||
c-0.025-1.093-0.18-2.242-0.534-3.284c-0.341-1.005-0.772-2.045-1.4-2.914c-0.418-0.579-0.849-1.17-1.354-1.681
|
||||
c-0.517-0.523-1.128-0.964-1.719-1.393c-0.516-0.329-1.074-0.507-1.673-0.534c-0.598-0.134-1.196-0.107-1.794,0.08
|
||||
c-1.044,0.287-2.175,1.095-2.689,2.068c-0.552,1.044-0.828,2.31-0.454,3.468c0.151,0.358,0.302,0.717,0.454,1.075
|
||||
C793.386,154.054,793.924,154.593,794.601,154.993L794.601,154.993z"/>
|
||||
</g>
|
||||
</g>
|
||||
|
||||
<ellipse transform="matrix(0.3208 -0.9472 0.9472 0.3208 -45.8086 500.2792)" fill="#412413" cx="325.907" cy="282.079" rx="18.962" ry="25.691"/>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#FFFFFF" d="M325.52,276.875c0.353,0.008,0.703,0.034,1.053,0.078c-0.399-0.054-0.798-0.107-1.196-0.161
|
||||
c0.83,0.117,1.633,0.339,2.408,0.657c-0.358-0.151-0.717-0.302-1.075-0.454c0.814,0.347,1.574,0.789,2.279,1.323
|
||||
c-0.304-0.235-0.607-0.469-0.911-0.704c0.698,0.543,1.323,1.162,1.868,1.859c-0.235-0.304-0.469-0.607-0.704-0.911
|
||||
c0.508,0.667,0.929,1.385,1.256,2.157c-0.151-0.358-0.302-0.717-0.454-1.075c0.116,0.286,0.219,0.576,0.309,0.871
|
||||
c0.336,1.102,1.064,2.102,2.068,2.689c0.965,0.564,2.386,0.804,3.468,0.454c1.089-0.352,2.139-1.027,2.689-2.068
|
||||
c0.557-1.055,0.811-2.296,0.454-3.468c-1.799-5.895-7.344-10.117-13.513-10.249c-1.138-0.024-2.383,0.519-3.182,1.318
|
||||
c-0.78,0.78-1.368,2.061-1.318,3.182C321.131,274.857,322.997,276.821,325.52,276.875L325.52,276.875z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#412413" d="M514.092,99.62c7.846-7.548,8.387-20.757,2.42-29.599c-1.702-2.522-3.89-4.644-6.5-6.213
|
||||
c-1.659-0.997-3.169,1.596-1.514,2.59c10.41,6.257,12.207,22.698,3.473,31.1C510.576,98.84,512.699,100.959,514.092,99.62
|
||||
L514.092,99.62z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#412413" d="M503.192,100.457c6.665-9.436,1.248-24.311-9.769-27.489c-1.858-0.536-2.652,2.358-0.798,2.893
|
||||
c9.285,2.678,13.503,15.259,7.976,23.082C499.485,100.523,502.085,102.023,503.192,100.457L503.192,100.457z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#412413" d="M490.874,103.556c1.861-7.432-3.551-15.166-10.678-17.235c-2.082-0.604-4.269-0.658-6.381-0.181
|
||||
c-1.882,0.425-1.086,3.318,0.798,2.893c7.446-1.682,15.216,6.348,13.368,13.726
|
||||
C487.512,104.632,490.405,105.431,490.874,103.556L490.874,103.556z"/>
|
||||
</g>
|
||||
</g>
|
||||
<path fill="#FFFFFF" stroke="#412413" stroke-width="3" stroke-miterlimit="10" d="M566.929,193.615l69.059-70.262
|
||||
c0,0,40.355,14.315,44.448,43.027c2.784,19.529-6.727,48.239-6.727,48.239L566.929,193.615z"/>
|
||||
<path fill="#FFFFFF" stroke="#412413" stroke-width="3" stroke-miterlimit="10" d="M556.901,195.294l-86.474-47.204
|
||||
c0,0-34.466,25.407-30.053,54.072c3.001,19.497,20.433,44.212,20.433,44.212L556.901,195.294z"/>
|
||||
<path fill="#412413" d="M561.982,189.264l68.315-71.824c0,0-0.633-7.364-6.361-11.343c-5.728-3.979-13.836-4.908-13.836-4.908
|
||||
L561.982,189.264z"/>
|
||||
<path fill="#412413" d="M558.069,188.858l-69.962-70.221c0,0-7.378,0.435-11.51,6.054c-4.132,5.619-5.278,13.699-5.278,13.699
|
||||
L558.069,188.858z"/>
|
||||
<path fill="#020203" d="M566.929,193.615l37.486-37.977c0,0,22.055,0.035,26.334,19.077c4.325,19.247-14.465,27.97-14.465,27.97
|
||||
L566.929,193.615z"/>
|
||||
<path fill="#020203" d="M554.938,195.085l-47.263,24.771c0,0-20.999-6.744-19.281-26.185
|
||||
c1.736-19.65,22.289-22.243,22.289-22.243L554.938,195.085z"/>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#FFFFFF" d="M609.241,172.469c0.341-0.035,0.679-0.029,1.019,0.016c-0.399-0.054-0.797-0.107-1.196-0.161
|
||||
c0.362,0.055,0.709,0.153,1.048,0.289c-0.358-0.151-0.717-0.302-1.075-0.454c0.418,0.177,0.804,0.406,1.164,0.682
|
||||
c-0.304-0.235-0.607-0.469-0.911-0.704c0.383,0.303,0.722,0.649,1.021,1.035c-0.235-0.304-0.469-0.607-0.704-0.911
|
||||
c0.293,0.391,0.536,0.81,0.727,1.259c-0.151-0.358-0.302-0.717-0.454-1.075c0.171,0.421,0.29,0.854,0.352,1.304
|
||||
c-0.054-0.399-0.107-0.798-0.161-1.196c0.046,0.391,0.046,0.78-0.006,1.171c0.054-0.399,0.107-0.798,0.161-1.196
|
||||
c-0.052,0.339-0.141,0.665-0.27,0.983c-0.188,0.598-0.214,1.196-0.08,1.794c0.027,0.599,0.205,1.157,0.534,1.673
|
||||
c0.546,0.933,1.617,1.824,2.689,2.068c1.152,0.262,2.445,0.206,3.467-0.454c0.304-0.235,0.607-0.469,0.911-0.704
|
||||
c0.558-0.561,0.943-1.223,1.157-1.986c0.617-1.525,0.523-3.217,0.237-4.796c-0.144-0.797-0.505-1.558-0.835-2.286
|
||||
c-0.178-0.393-0.403-0.757-0.659-1.103c-0.952-1.284-2.033-2.463-3.493-3.192c-1.42-0.708-3.036-1.222-4.645-1.06
|
||||
c-0.62-0.023-1.197,0.106-1.734,0.388c-0.558,0.181-1.041,0.492-1.448,0.93c-0.78,0.781-1.368,2.061-1.318,3.182
|
||||
c0.052,1.164,0.434,2.371,1.318,3.182C606.888,171.911,608.074,172.587,609.241,172.469L609.241,172.469z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#FFFFFF" d="M513.437,188.194c-0.062-0.07-0.121-0.14-0.179-0.212c0.234,0.304,0.469,0.607,0.704,0.911
|
||||
c-0.223-0.29-0.405-0.6-0.551-0.935c0.151,0.358,0.302,0.717,0.454,1.075c-0.161-0.392-0.266-0.796-0.323-1.216
|
||||
c0.054,0.399,0.107,0.798,0.161,1.196c-0.053-0.456-0.048-0.91,0.009-1.366c-0.054,0.399-0.107,0.798-0.161,1.196
|
||||
c0.074-0.52,0.214-1.022,0.417-1.506c-0.151,0.358-0.302,0.717-0.454,1.075c0.186-0.422,0.414-0.816,0.693-1.183
|
||||
c-0.235,0.304-0.469,0.607-0.704,0.911c0.263-0.334,0.557-0.631,0.894-0.891c-0.304,0.235-0.607,0.469-0.911,0.704
|
||||
c0.292-0.216,0.602-0.396,0.936-0.538c-0.358,0.151-0.717,0.302-1.075,0.454c0.2-0.08,0.403-0.145,0.612-0.196
|
||||
c1.08-0.266,2.133-1.118,2.689-2.068c0.564-0.965,0.804-2.386,0.454-3.468c-0.352-1.089-1.027-2.139-2.068-2.689
|
||||
c-1.088-0.575-2.256-0.752-3.468-0.454c-3.19,0.785-5.532,3.456-6.486,6.513c-0.5,1.602-0.473,3.207-0.2,4.837
|
||||
c0.268,1.598,1.133,3.019,2.191,4.215c0.737,0.832,2.099,1.318,3.182,1.318c1.104,0,2.422-0.49,3.182-1.318
|
||||
c0.787-0.858,1.372-1.984,1.318-3.182C514.699,190.142,514.26,189.124,513.437,188.194L513.437,188.194z"/>
|
||||
</g>
|
||||
</g>
|
||||
|
||||
<ellipse transform="matrix(0.9819 -0.1895 0.1895 0.9819 -51.223 124.6555)" fill="#E52629" stroke="#8C633A" stroke-width="5" stroke-miterlimit="10" cx="626.226" cy="330.179" rx="97.259" ry="83.802"/>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#A67D52" d="M541.946,112.271c-2.729,1.26-5.43,3.223-6.263,6.278c-0.861,3.156,0.072,6.791,2.941,8.61
|
||||
c3.18,2.016,6.829,0.943,9.857-0.785c3.043-1.738,5.147-4.69,4.709-8.308c-0.498-4.117-4.272-6.644-8.26-5.395
|
||||
c-3.213,1.006-5.47,4.267-5.462,7.602c0.008,3.401,2.446,6.267,5.682,7.119c3.485,0.917,6.653-0.618,8.686-3.414
|
||||
c1.589-2.186,0.373-5.574-1.794-6.841c-2.528-1.479-5.249-0.397-6.841,1.794c1.144-1.574,2.837-1.721,3.824,0.011
|
||||
c0.76,1.334,0.276,3.382-1.572,3.356c-1.179-0.488-2.357-0.976-3.536-1.464c-0.976-0.792-0.407-1.995-0.37-2.911
|
||||
c0.016-0.4,0.848-0.616-0.113-0.182c-0.123,0.055-1.069,0.593-0.813,0.44c0.271-0.08,0.129-0.031-0.427,0.148
|
||||
c0.584-0.106,1.095,0.123,1.533,0.686c1.123,0.247,1.662,0.944,1.618,2.092c0.001-0.137,0.001-0.275,0.002-0.412
|
||||
c-0.032,0.539-0.216,0.752-0.322,1.226c-0.082,0.054-0.164,0.109-0.246,0.163c0.276-0.188,0.562-0.36,0.857-0.517
|
||||
c0.441-0.242,0.901-0.449,1.357-0.659c2.452-1.132,3.063-4.671,1.794-6.841C547.316,111.549,544.407,111.135,541.946,112.271
|
||||
L541.946,112.271z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#A67D52" d="M459.316,263.127c-3.623,2.127-5.12,6.519-3.921,10.504c1.263,4.199,5.359,6.938,9.715,6.672
|
||||
c4.147-0.253,8.222-2.689,9.866-6.583c1.9-4.5,0.239-9.106-4.349-10.923c-3.125-1.237-6.279,1.874-6.329,4.821
|
||||
c-0.07,4.146,2.016,7.68,5.459,9.899c2.27,1.463,5.52,0.465,6.841-1.794c1.417-2.422,0.482-5.374-1.794-6.841
|
||||
c-0.455-0.293-0.519-0.485-0.506-1.265c-2.11,1.607-4.219,3.214-6.329,4.821c-1.389-0.505-2.114-1.344-2.175-2.515
|
||||
c0.105-0.467,0.112-0.519,0.021-0.154c-0.099,0.336-0.078,0.315,0.064-0.062c0.308-0.305,0.321-0.315,0.04-0.029
|
||||
c-0.622,0.142-0.601,0.627-1.374,0.646c0.408-0.01-0.304-0.261-0.144-0.085c0.221,0.244,0.432,0.484,0.637,0.734
|
||||
c-0.144-0.591-0.163-0.618-0.057-0.081c0.004-0.158,0.009-0.315,0.013-0.473c-0.091,0.499-0.113,1.039-0.63,1.343
|
||||
C469.909,268.504,464.882,259.858,459.316,263.127L459.316,263.127z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#A67D52" d="M539.048,233.226c-1.096,5.864,1.59,12.093,7.627,13.959c5.525,1.708,11.498-0.899,14.092-6.034
|
||||
c2.028-4.014,3.868-11.852,0.238-15.369c-2.452-2.376-5.834-2.004-8.641-0.519c-1.35,0.714-2.933,1.797-4.441,0.58
|
||||
c-0.735,2.786-1.471,5.571-2.206,8.357c0.915-0.366,0.94-0.459,1.669,0c5.461,3.44,10.485-5.209,5.047-8.635
|
||||
c-2.794-1.76-6.274-2.247-9.375-1.008c-3.079,1.23-5.459,5.732-2.206,8.357c2.613,2.109,5.511,3.464,8.945,3.425
|
||||
c1.594-0.018,3.14-0.328,4.616-0.909c1.18-0.464,2.309-1.402,3.539-1.615c-0.886,0-1.772,0-2.658,0
|
||||
c0.201,0.038,0.402,0.076,0.603,0.115c-0.735-0.429-1.471-0.857-2.206-1.286c0.162,0.139,0.324,0.277,0.487,0.416
|
||||
c-0.429-0.735-0.857-1.471-1.286-2.206c0.484,1.41-0.275,5.3-1.438,6.347c-1.177,1.058-3.097,0.473-2.762-1.318
|
||||
c0.496-2.652-0.735-5.393-3.492-6.151C542.763,229.064,539.547,230.558,539.048,233.226L539.048,233.226z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#A67D52" d="M620.757,228.162c0.079,0.603,0.158,1.205,0.237,1.808c-0.026,0.688,0.118,1.331,0.431,1.926
|
||||
c0.202,0.62,0.546,1.156,1.034,1.609c0.867,0.867,2.29,1.52,3.536,1.464c1.293-0.058,2.634-0.483,3.536-1.464
|
||||
c0.261-0.337,0.521-0.675,0.782-1.012c0.452-0.78,0.68-1.621,0.683-2.524c-0.079-0.603-0.158-1.205-0.237-1.808
|
||||
c0.026-0.688-0.118-1.331-0.431-1.926c-0.202-0.62-0.546-1.156-1.034-1.609c-0.867-0.867-2.29-1.52-3.536-1.464
|
||||
c-1.293,0.058-2.634,0.483-3.536,1.464c-0.261,0.337-0.521,0.675-0.782,1.012C620.988,226.418,620.76,227.259,620.757,228.162
|
||||
L620.757,228.162z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#A67D52" d="M496.19,337.629c-6.994-0.505-11.268,6.619-9.708,12.921c1.463,5.907,7.122,10.764,13.323,10.696
|
||||
c5.778-0.063,10.638-4.873,10.842-10.649c0.104-2.952-1.103-5.689-3.31-7.626c-0.913-0.802-5.291-2.808-5.265-3.021
|
||||
c-0.228,0.841-0.455,1.682-0.683,2.524c0.022-0.039,0.044-0.078,0.065-0.118c4.747-4.366-2.339-11.424-7.071-7.071
|
||||
c-5.836,5.368-0.176,12.916,5.739,14.853c0.533,0.174,0.742,0.739,0.595,0.281c-0.26-0.81,0.194,0.197-0.208,0.373
|
||||
c-0.655,0.286-0.094,0.602-1.171,0.432c-1.141-0.18-2.231-1.121-2.774-2.115c-0.212-0.387-0.485-0.96-0.472-1.415
|
||||
c0.015-0.503-0.571-0.112,0.098-0.064C502.611,348.093,502.582,338.091,496.19,337.629L496.19,337.629z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#A67D52" d="M522.291,423.068c-1.936-8.092-10.78-4.888-13.477,0.619c-3.221,6.576-2.22,14.792,2.275,20.499
|
||||
c3.848,4.887,10.368,8.386,16.705,7.374c8.084-1.29,9.242-9.188,11.032-15.813c1.922-7.115,2.127-13.465-6.013-16.201
|
||||
c-5.409-1.818-14.078-4.867-17.625,1.557c-2.945,5.332-1.793,14.682,4.052,17.605c6.298,3.15,12.78-2.386,11.289-9.031
|
||||
c-0.999-4.453-6.892-4.565-9.139-1.194c-2.356,3.535-2.358,7.948,0.005,11.482c3.56,5.323,12.226,0.322,8.635-5.047
|
||||
c-0.638-0.46-0.64-0.923-0.005-1.387c-3.046-0.398-6.092-0.796-9.139-1.194c2.166-1.236,3.157-2.91,2.972-5.023
|
||||
c0.366-1.649,0.107-1.656-0.777-0.021c1.538,0.485,4.764,0.583,5.993,1.588c1.032-0.091,1.35,0.27,0.953,1.083
|
||||
c-0.281,1.042-0.563,2.084-0.845,3.126c-0.556,2.056-0.961,4.251-1.689,6.252c-0.84,2.307-1.184,2.79-3.739,2.062
|
||||
c-1.817-0.518-3.541-1.738-4.726-3.198c-1.376-1.695-2.092-3.788-2.113-5.964c-0.032-1.115,0.135-2.198,0.5-3.25
|
||||
c1.536-2.241,0.651-2.594-2.656-1.059c-0.766-0.996-1.532-1.992-2.298-2.988c0.062,0.261,0.125,0.522,0.187,0.783
|
||||
C514.145,431.982,523.79,429.334,522.291,423.068L522.291,423.068z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#A67D52" d="M636.174,441.633c-1.807-2.349-3.732-4.684-6.76-5.442c-2.963-0.742-6.21,0.403-8.087,2.781
|
||||
c-4.821,6.11,2.493,11.464,7.615,13.572c5.019,2.065,11.857,2.25,14.679-3.323c1.293-2.554,0.939-5.676-0.562-8.053
|
||||
c-1.758-2.783-4.608-3.546-7.707-3.061c-2.67,0.418-4.15,3.759-3.492,6.151c0.782,2.847,3.473,3.912,6.151,3.492
|
||||
c0.181-0.028,0.363-0.057,0.544-0.085c-0.886,0-1.772,0-2.658,0c-0.634-0.326-1.093-0.783-1.377-1.371
|
||||
c-0.158-1.001,0.15-1.775,0.927-2.321c-0.174-0.171-1.317-0.158-1.655-0.255c-1.338-0.388-4.013-1.376-4.605-2.601
|
||||
c0.228,0.841,0.455,1.682,0.683,2.524c-0.512,1.931-1.579,2.638-3.2,2.122c0.302,0.295,0.592,0.601,0.871,0.917
|
||||
c1.644,2.137,4.301,3.28,6.841,1.794C636.483,447.244,637.831,443.786,636.174,441.633L636.174,441.633z"/>
|
||||
</g>
|
||||
</g>
|
||||
<path fill="#FFFFFF" d="M540.287,300.991l25.167,30.346l16.36-46.12l28.716,36.148l15.542-43.814l25.331,31.053l16.064-30.648
|
||||
l21.611,23.243l6.357-28.899c0,0-12.87-14.95-38.314-21.897c-13.672-3.733-30.786-2.102-43.287-0.56
|
||||
c-19.005,2.345-35.005,12.061-47.877,20.764C550.945,280.757,540.287,300.991,540.287,300.991z"/>
|
||||
<path fill="#FFFFFF" d="M714.724,352.947L687.45,324.48l-13.017,47.172l-31.229-34l-12.366,44.814l-27.488-29.16l-13.829,31.719
|
||||
L566.3,363.389l-4.272,29.28c0,0,13.907,13.99,39.783,19.098c13.904,2.745,30.858-0.107,43.216-2.54
|
||||
c18.788-3.699,34.052-14.536,46.268-24.137C705.542,373.892,714.724,352.947,714.724,352.947z"/>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#E52629" d="M608.275,149.325c9.966-0.497,19.051,6.474,25.408,13.523c3.723,4.128,3.788,9.556,3.943,14.83
|
||||
c0.158,5.362-0.106,10.878-2.709,15.697c-2.715,5.027-7.099,8.878-11.311,12.627c-2.409,2.144,1.138,5.67,3.536,3.536
|
||||
c4.24-3.774,8.527-7.675,11.487-12.581c3.273-5.425,4.113-11.663,4.028-17.901c-0.079-5.828-0.067-12.492-3.587-17.439
|
||||
c-1.737-2.441-3.945-4.527-6.173-6.514c-2.117-1.888-4.353-3.666-6.721-5.23c-5.296-3.497-11.476-5.867-17.901-5.547
|
||||
C605.073,144.485,605.054,149.486,608.275,149.325L608.275,149.325z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#E52629" d="M618.636,138.457c14.844,2.017,29.731,13.006,33.532,27.838c3.836,14.969-2.219,29.822-10.632,42.033
|
||||
c-2.928,4.25,4.002,8.255,6.908,4.038c9.912-14.385,16.213-32.329,11.02-49.708c-4.941-16.538-21.751-29.611-38.7-31.915
|
||||
C615.728,130.058,613.55,137.766,618.636,138.457L618.636,138.457z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#E52629" d="M630.084,125.148c9.23,1.044,17.484,6.082,24.496,11.917c7.082,5.894,14.638,12.663,18.886,20.966
|
||||
c9.467,18.503,0.252,40.107-7.043,57.599c-2.474,5.933,7.198,8.521,9.643,2.658c8.68-20.815,17.507-45.457,5.209-66.812
|
||||
c-5.326-9.249-13.863-17.024-22.187-23.546c-8.631-6.763-18.019-11.54-29.003-12.783
|
||||
C623.7,114.426,623.752,124.432,630.084,125.148L630.084,125.148z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#E52629" d="M501.161,160.586c-13.354-0.548-21.109,13.195-23.443,24.572c-2.439,11.89,0.038,24.438,8.058,33.727
|
||||
c3.972,4.601,9.175,8.078,14.968,9.923c3.073,0.979,4.389-3.847,1.329-4.821c-8.668-2.761-15.56-10.06-18.465-18.61
|
||||
c-3.471-10.215-1.963-22.328,3.797-31.454c3.01-4.768,7.884-8.578,13.756-8.337
|
||||
C504.379,165.718,504.373,160.718,501.161,160.586L501.161,160.586z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#E52629" d="M486.423,230.797c-12.592-7.345-17.168-21.789-17.307-35.666c-0.119-11.903,1.529-32.564,14.779-37.635
|
||||
c4.76-1.822,2.694-9.559-2.127-7.714c-16.781,6.423-20.456,27.683-20.65,43.55c-0.211,17.277,5.629,35.253,21.266,44.374
|
||||
C486.841,240.304,490.872,233.392,486.423,230.797L486.423,230.797z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path fill="#E52629" d="M465.947,243.212c-26.543-27.012-24.534-70.616,3.836-95.45c4.852-4.247-2.245-11.296-7.071-7.071
|
||||
c-15.317,13.408-24.81,32.82-25.809,53.178c-1.026,20.899,7.387,41.57,21.973,56.414
|
||||
C463.394,254.881,470.464,247.808,465.947,243.212L465.947,243.212z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<path fill="#FFFFFF" d="M579.081,234.19c0,0,32.284-69.268,187.109-108.466l2.046,23.416L576.385,260.375L579.081,234.19z"/>
|
||||
<path fill="#FFFFFF" d="M347.845,273.079c-1.955-5.87-4.177-11.667-6.553-17.408c-1.818-4.375-3.601-8.78-5.869-13.005
|
||||
c152.888-41.339,246.159-8.762,246.159-8.762l-2.975,22.607l-224.928,40.989C352.573,289.215,350.492,281.055,347.845,273.079z"/>
|
||||
<path fill="#EB5A55" d="M331.821,264.247c-0.064-0.884,0.104-1.699,0.412-2.531c0.363-0.36,0.747-0.681,1.189-0.966
|
||||
l220.845-17.085c0,0,37.908,21.716,49.916-15.584l175.164-88.409c0.899,0.275,1.706,0.689,2.245,1.252l0.047,0.198
|
||||
l-11.139,192.556l-157.889,89.005c0,0-8.004,12.684-30.192,10.703l-202.438,5.649L331.821,264.247z"/>
|
||||
<path fill="none" d="M603.839,381.107c0,0-5.954,1.655-8.606-5.643c-2.657-7.289-9.853-106.565-9.853-106.565
|
||||
s-2.249-7.4,6.686-14.073c0,0,8.38-5.819,10.22-0.743c1.848,5.073,5.849,113.608,6.198,114.558
|
||||
C608.834,369.594,608.886,381.801,603.839,381.107z"/>
|
||||
<path fill="none" d="M545.361,372.891c0,0,3.777,16.419-19.743,16.652c-23.516,0.246-127.949,0.248-153.709-31.013
|
||||
c0,0-7.429-8.416-8.714-14.345c-1.278-5.934-16.732-64.149-16.732-64.149s-3.336-6.756,3.867-9.807l17.081-2.292l139.148-10.334
|
||||
c0,0,21.047-3.399,24.511,8.56C534.544,278.117,545.361,372.891,545.361,372.891z"/>
|
||||
<path fill="none" d="M626.265,369.475c-4.54-23.356-9.97-121.373-9.97-121.373s-3.233-14.292,5.686-20.434
|
||||
c8.902-6.137,145.895-72.964,145.895-72.964s7.899-1.674,7.367,9.469c-0.54,11.15-17.783,104.462-38.118,144.154
|
||||
C716.796,348.007,630.815,392.832,626.265,369.475z"/>
|
||||
<path fill="#EB5A55" d="M604.183,228.081l175.164-88.409c0.899,0.275,1.706,0.689,2.245,1.252l0.047,0.198l-11.139,192.556
|
||||
l-157.889,89.005L604.183,228.081z"/>
|
||||
<path fill="#DE8E98" d="M612.755,422.93c0,0-8.004,12.684-30.192,10.703l-28.153-189.721c0,0,37.908,21.716,49.916-15.584
|
||||
L612.755,422.93z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
</g>
|
||||
<g>
|
||||
</g>
|
||||
<g>
|
||||
</g>
|
||||
<g>
|
||||
</g>
|
||||
<g>
|
||||
</g>
|
||||
<g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 27 KiB |
7
awx/static/custom_404.html
Normal file
7
awx/static/custom_404.html
Normal file
@@ -0,0 +1,7 @@
|
||||
<html>
|
||||
<head>
|
||||
<title>Redirecting</title>
|
||||
<meta http-equiv="refresh" content="0;URL='/#'"/>
|
||||
</head>
|
||||
<body>Redirecting</body>
|
||||
</html>
|
||||
21
awx/static/custom_502.html
Normal file
21
awx/static/custom_502.html
Normal file
@@ -0,0 +1,21 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>On Break...</title>
|
||||
<meta http-equiv="refresh" content="2">
|
||||
<link rel="stylesheet" href="/static/custom_error.css">
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="upper_div">
|
||||
<img class="main_image" src="/static/awx-spud-reading.svg"/>
|
||||
<span class="error_number">502</span>
|
||||
</div>
|
||||
<div class="message_div">
|
||||
<div class="m1">HTTP Response: 502</div>
|
||||
<div class="m2">The spud is taking a much needed break...</div>
|
||||
<div class="m3">Please check back later.</div>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
21
awx/static/custom_504.html
Normal file
21
awx/static/custom_504.html
Normal file
@@ -0,0 +1,21 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>On Break...</title>
|
||||
<meta http-equiv="refresh" content="2">
|
||||
<link rel="stylesheet" href="/static/custom_error.css">
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="upper_div">
|
||||
<img class="main_image" src="/static/awx-spud-reading.svg"/>
|
||||
<span class="error_number">504</span>
|
||||
</div>
|
||||
<div class="message_div">
|
||||
<div class="m1">HTTP Response: 504</div>
|
||||
<div class="m2">The spud is taking a much needed break...</div>
|
||||
<div class="m3">Please check back later.</div>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
82
awx/static/custom_error.css
Normal file
82
awx/static/custom_error.css
Normal file
@@ -0,0 +1,82 @@
|
||||
@font-face {
|
||||
font-family: redhat-display-medium;
|
||||
src: url('./RedHatDisplay-Medium.ttf');
|
||||
}
|
||||
|
||||
@font-face {
|
||||
font-family: redhat-display-regular;
|
||||
src: url('./RedHatDisplay-Regular.ttf');
|
||||
}
|
||||
|
||||
html, body {
|
||||
height:100%;
|
||||
width:100%;
|
||||
}
|
||||
|
||||
body {
|
||||
background-color: #F0F0F0;
|
||||
}
|
||||
|
||||
.container {
|
||||
position: absolute;
|
||||
top: 24px;
|
||||
right: 24px;
|
||||
bottom: 24px;
|
||||
left: 24px;
|
||||
}
|
||||
|
||||
.upper_div {
|
||||
background-color: #F8EBA7;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
text-align: center;
|
||||
height: 50%;
|
||||
align-items: flex-end;
|
||||
display: flex;
|
||||
min-height: 450px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.main_image {
|
||||
height: 450px;
|
||||
width:auto;
|
||||
}
|
||||
|
||||
.error_number {
|
||||
position: absolute;
|
||||
top: 23px;
|
||||
right: 90px;
|
||||
font-size:200px;
|
||||
color: #FDBA48;
|
||||
font-family: Impact, Haettenschweiler, "Franklin Gothic Bold", Charcoal, "Helvetica Inserat", "Bitstream Vera Sans Bold", "Arial Black", "sans serif";
|
||||
}
|
||||
|
||||
.message_div {
|
||||
background-color: #FFFFFF;
|
||||
border: 1px solid #D2D2D2;
|
||||
text-align: center;
|
||||
height: 50%;
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
.m1,.m2,.m3 {
|
||||
color: #151515;
|
||||
width: 100%;
|
||||
font-family: redhat-display-medium;
|
||||
}
|
||||
|
||||
.m1 {
|
||||
font-size: 24px;
|
||||
padding-top: 24px;
|
||||
}
|
||||
|
||||
.m2 {
|
||||
font-size: 20px;
|
||||
padding-top: 20px;
|
||||
}
|
||||
|
||||
.m3 {
|
||||
font-size: 16px;
|
||||
padding-top: 20px;
|
||||
font-family: redhat-display-regular;
|
||||
}
|
||||
@@ -44,6 +44,7 @@
|
||||
{
|
||||
"markupOnly": true,
|
||||
"ignoreAttribute": [
|
||||
"data-testid",
|
||||
"dateFieldName",
|
||||
"timeFieldName",
|
||||
"to",
|
||||
@@ -99,7 +100,8 @@
|
||||
"fieldName",
|
||||
"splitButtonVariant",
|
||||
"pageKey",
|
||||
"textId"
|
||||
"textId",
|
||||
"rel"
|
||||
],
|
||||
"ignore": [
|
||||
"Ansible",
|
||||
|
||||
@@ -159,7 +159,7 @@ Inside these folders, the internal structure is:
|
||||
- **/hooks** - Custom react [hooks](https://reactjs.org/docs/hooks-custom.html)
|
||||
- **/locales** - [Internationalization](#internationalization) config and source files.
|
||||
- **/screens** - Based on the various routes of awx.
|
||||
- **/shared** - Components that are meant to be used specifically by a particular route, but might be sharable across pages of that route. For example, a form component which is used on both add and edit screens.
|
||||
- **/shared** - Components that are meant to be used specifically by a particular route, but might be shareable across pages of that route. For example, a form component which is used on both add and edit screens.
|
||||
- **/util** - Stateless helper functions that aren't tied to react.
|
||||
|
||||
### Patterns
|
||||
|
||||
@@ -280,7 +280,7 @@ All of these functions act on the react-router history using the `pushHistorySta
|
||||
|
||||
**a note on sort_columns and search_columns**
|
||||
|
||||
We have split out column configuration into separate search and sort column array props--these are passed to the search and sort columns. Both accept an isDefault prop for one of the items in the array to be the default option selected when going to the page. Sort column items can pass an isNumeric boolean in order to chnage the iconography of the sort UI element. Search column items can pass type and if applicable choices, in order to configure the right-hand side of the search bar.
|
||||
We have split out column configuration into separate search and sort column array props--these are passed to the search and sort columns. Both accept an isDefault prop for one of the items in the array to be the default option selected when going to the page. Sort column items can pass an isNumeric boolean in order to change the iconography of the sort UI element. Search column items can pass type and if applicable choices, in order to configure the right-hand side of the search bar.
|
||||
|
||||
### FilterTags component
|
||||
|
||||
@@ -301,7 +301,7 @@ For the UI url params, we want to only encode those params that aren't defaults,
|
||||
|
||||
#### mergeParams vs. replaceParams
|
||||
|
||||
**mergeParams** is used to suppport putting values with the same key
|
||||
**mergeParams** is used to support putting values with the same key
|
||||
|
||||
From a UX perspective, we wanted to be able to support searching on the same key multiple times (i.e. searching for things like `?foo=bar&foo=baz`). We do this by creating an array of all values. i.e.:
|
||||
|
||||
|
||||
140
awx/ui/package-lock.json
generated
140
awx/ui/package-lock.json
generated
@@ -7,10 +7,10 @@
|
||||
"name": "ui",
|
||||
"dependencies": {
|
||||
"@lingui/react": "3.14.0",
|
||||
"@patternfly/patternfly": "4.217.1",
|
||||
"@patternfly/react-core": "^4.264.0",
|
||||
"@patternfly/react-icons": "4.92.10",
|
||||
"@patternfly/react-table": "4.108.0",
|
||||
"@patternfly/patternfly": "4.224.2",
|
||||
"@patternfly/react-core": "4.276.8",
|
||||
"@patternfly/react-icons": "4.93.6",
|
||||
"@patternfly/react-table": "4.113.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.27.2",
|
||||
@@ -44,6 +44,7 @@
|
||||
"@lingui/loader": "3.15.0",
|
||||
"@lingui/macro": "^3.7.1",
|
||||
"@nteract/mockument": "^1.0.4",
|
||||
"@testing-library/dom": "^8.18.1",
|
||||
"@testing-library/jest-dom": "^5.16.2",
|
||||
"@testing-library/react": "^12.1.5",
|
||||
"@testing-library/user-event": "14.4.3",
|
||||
@@ -3727,18 +3728,18 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@patternfly/patternfly": {
|
||||
"version": "4.217.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.217.1.tgz",
|
||||
"integrity": "sha512-uN7JgfQsyR16YHkuGRCTIcBcnyKIqKjGkB2SGk9x1XXH3yYGenL83kpAavX9Xtozqp17KppOlybJuzcKvZMrgw=="
|
||||
"version": "4.224.2",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.224.2.tgz",
|
||||
"integrity": "sha512-HGNV26uyHSIECuhjPg/WGn0mXbAotcs6ODfhAOkfYjIgGylddgiwElxUe1rpEHV5mQJJ2rMn4OdeJIIpzRX61g=="
|
||||
},
|
||||
"node_modules/@patternfly/react-core": {
|
||||
"version": "4.264.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.264.0.tgz",
|
||||
"integrity": "sha512-tK0BMWxw8nhukev40HZ6q6d02pDnjX7oyA91vHa18aakJUKBWMaerqpG4NZVMoh0tPKX3aLNj+zyCwDALFAZZw==",
|
||||
"version": "4.276.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.276.8.tgz",
|
||||
"integrity": "sha512-dn322rEzBeiVztZEuCZMUUittNb8l1hk30h9ZN31FLZLLVtXGlThFNV9ieqOJYA9zrYxYZrHMkTnOxSWVacMZg==",
|
||||
"dependencies": {
|
||||
"@patternfly/react-icons": "^4.93.0",
|
||||
"@patternfly/react-styles": "^4.92.0",
|
||||
"@patternfly/react-tokens": "^4.94.0",
|
||||
"@patternfly/react-icons": "^4.93.6",
|
||||
"@patternfly/react-styles": "^4.92.6",
|
||||
"@patternfly/react-tokens": "^4.94.6",
|
||||
"focus-trap": "6.9.2",
|
||||
"react-dropzone": "9.0.0",
|
||||
"tippy.js": "5.1.2",
|
||||
@@ -3749,49 +3750,40 @@
|
||||
"react-dom": "^16.8 || ^17 || ^18"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-core/node_modules/@patternfly/react-icons": {
|
||||
"version": "4.93.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.93.0.tgz",
|
||||
"integrity": "sha512-OH0vORVioL+HLWMEog8/3u8jsiMCeJ0pFpvRKRhy5Uk4CdAe40k1SOBvXJP6opr+O8TLbz0q3bm8Jsh/bPaCuQ==",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8 || ^17 || ^18",
|
||||
"react-dom": "^16.8 || ^17 || ^18"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-core/node_modules/tslib": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz",
|
||||
"integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw=="
|
||||
},
|
||||
"node_modules/@patternfly/react-icons": {
|
||||
"version": "4.92.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.92.10.tgz",
|
||||
"integrity": "sha512-vwCy7b+OyyuvLDSLqLUG2DkJZgMDogjld8tJTdAaG8HiEhC1sJPZac+5wD7AuS3ym/sQolS4vYtNiVDnMEORxA==",
|
||||
"version": "4.93.6",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.93.6.tgz",
|
||||
"integrity": "sha512-ZrXegc/81oiuTIeWvoHb3nG/eZODbB4rYmekBEsrbiysyO7m/sUFoi/RLvgFINrRoh6YCJqL5fj06Jg6d7jX1g==",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8 || ^17 || ^18",
|
||||
"react-dom": "^16.8 || ^17 || ^18"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-styles": {
|
||||
"version": "4.92.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.92.0.tgz",
|
||||
"integrity": "sha512-B/f6iyu8UEN1+wRxdC4sLIhvJeyL8SqInDXZmwOIqK8uPJ8Lze7qrbVhkkVzbMF37/oDPVa6dZH8qZFq062LEA=="
|
||||
"version": "4.92.6",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.92.6.tgz",
|
||||
"integrity": "sha512-b8uQdEReMyeoMzjpMri845QxqtupY/tIFFYfVeKoB2neno8gkcW1RvDdDe62LF88q45OktCwAe/8A99ker10Iw=="
|
||||
},
|
||||
"node_modules/@patternfly/react-table": {
|
||||
"version": "4.108.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.108.0.tgz",
|
||||
"integrity": "sha512-EUvd3rlkE1UXobAm7L6JHgNE3TW8IYTaVwwH/px4Mkn5mBayDO6f+w6QM3OeoDQVZcXK6IYFe7QQaYd/vWIJCQ==",
|
||||
"version": "4.113.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.113.0.tgz",
|
||||
"integrity": "sha512-qxa3NWCdYasqQQL1rqEd8DyNa8oWr6HNveNW5YJRakE7imWZhUPG2Nd6Op60+KYX8kbCSl7gwSmgAZAYMBMZkQ==",
|
||||
"dependencies": {
|
||||
"@patternfly/react-core": "^4.239.0",
|
||||
"@patternfly/react-icons": "^4.90.0",
|
||||
"@patternfly/react-styles": "^4.89.0",
|
||||
"@patternfly/react-tokens": "^4.91.0",
|
||||
"@patternfly/react-core": "^4.276.8",
|
||||
"@patternfly/react-icons": "^4.93.6",
|
||||
"@patternfly/react-styles": "^4.92.6",
|
||||
"@patternfly/react-tokens": "^4.94.6",
|
||||
"lodash": "^4.17.19",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0",
|
||||
"react-dom": "^16.8.0 || ^17.0.0"
|
||||
"react": "^16.8 || ^17 || ^18",
|
||||
"react-dom": "^16.8 || ^17 || ^18"
|
||||
}
|
||||
},
|
||||
"node_modules/@patternfly/react-table/node_modules/tslib": {
|
||||
@@ -3800,9 +3792,9 @@
|
||||
"integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ=="
|
||||
},
|
||||
"node_modules/@patternfly/react-tokens": {
|
||||
"version": "4.94.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.94.0.tgz",
|
||||
"integrity": "sha512-fYXxUJZnzpn89K2zzHF0cSncZZVGKrohdb5f5T1wzxwU2NZPVGpvr88xhm+V2Y/fSrrTPwXcP3IIdtNOOtJdZw=="
|
||||
"version": "4.94.6",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.94.6.tgz",
|
||||
"integrity": "sha512-tm7C6nat+uKMr1hrapis7hS3rN9cr41tpcCKhx6cod6FLU8KwF2Yt5KUxakhIOCEcE/M/EhXhAw/qejp8w0r7Q=="
|
||||
},
|
||||
"node_modules/@pmmmwh/react-refresh-webpack-plugin": {
|
||||
"version": "0.5.4",
|
||||
@@ -4232,9 +4224,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@testing-library/dom": {
|
||||
"version": "8.11.3",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-8.11.3.tgz",
|
||||
"integrity": "sha512-9LId28I+lx70wUiZjLvi1DB/WT2zGOxUh46glrSNMaWVx849kKAluezVzZrXJfTKKoQTmEOutLes/bHg4Bj3aA==",
|
||||
"version": "8.18.1",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-8.18.1.tgz",
|
||||
"integrity": "sha512-oEvsm2B/WtcHKE+IcEeeCqNU/ltFGaVyGbpcm4g/2ytuT49jrlH9x5qRKL/H3A6yfM4YAbSbC0ceT5+9CEXnLg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@babel/code-frame": "^7.10.4",
|
||||
@@ -25162,30 +25154,24 @@
|
||||
"dev": true
|
||||
},
|
||||
"@patternfly/patternfly": {
|
||||
"version": "4.217.1",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.217.1.tgz",
|
||||
"integrity": "sha512-uN7JgfQsyR16YHkuGRCTIcBcnyKIqKjGkB2SGk9x1XXH3yYGenL83kpAavX9Xtozqp17KppOlybJuzcKvZMrgw=="
|
||||
"version": "4.224.2",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/patternfly/-/patternfly-4.224.2.tgz",
|
||||
"integrity": "sha512-HGNV26uyHSIECuhjPg/WGn0mXbAotcs6ODfhAOkfYjIgGylddgiwElxUe1rpEHV5mQJJ2rMn4OdeJIIpzRX61g=="
|
||||
},
|
||||
"@patternfly/react-core": {
|
||||
"version": "4.264.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.264.0.tgz",
|
||||
"integrity": "sha512-tK0BMWxw8nhukev40HZ6q6d02pDnjX7oyA91vHa18aakJUKBWMaerqpG4NZVMoh0tPKX3aLNj+zyCwDALFAZZw==",
|
||||
"version": "4.276.8",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-core/-/react-core-4.276.8.tgz",
|
||||
"integrity": "sha512-dn322rEzBeiVztZEuCZMUUittNb8l1hk30h9ZN31FLZLLVtXGlThFNV9ieqOJYA9zrYxYZrHMkTnOxSWVacMZg==",
|
||||
"requires": {
|
||||
"@patternfly/react-icons": "^4.93.0",
|
||||
"@patternfly/react-styles": "^4.92.0",
|
||||
"@patternfly/react-tokens": "^4.94.0",
|
||||
"@patternfly/react-icons": "^4.93.6",
|
||||
"@patternfly/react-styles": "^4.92.6",
|
||||
"@patternfly/react-tokens": "^4.94.6",
|
||||
"focus-trap": "6.9.2",
|
||||
"react-dropzone": "9.0.0",
|
||||
"tippy.js": "5.1.2",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@patternfly/react-icons": {
|
||||
"version": "4.93.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.93.0.tgz",
|
||||
"integrity": "sha512-OH0vORVioL+HLWMEog8/3u8jsiMCeJ0pFpvRKRhy5Uk4CdAe40k1SOBvXJP6opr+O8TLbz0q3bm8Jsh/bPaCuQ==",
|
||||
"requires": {}
|
||||
},
|
||||
"tslib": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz",
|
||||
@@ -25194,25 +25180,25 @@
|
||||
}
|
||||
},
|
||||
"@patternfly/react-icons": {
|
||||
"version": "4.92.10",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.92.10.tgz",
|
||||
"integrity": "sha512-vwCy7b+OyyuvLDSLqLUG2DkJZgMDogjld8tJTdAaG8HiEhC1sJPZac+5wD7AuS3ym/sQolS4vYtNiVDnMEORxA==",
|
||||
"version": "4.93.6",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-icons/-/react-icons-4.93.6.tgz",
|
||||
"integrity": "sha512-ZrXegc/81oiuTIeWvoHb3nG/eZODbB4rYmekBEsrbiysyO7m/sUFoi/RLvgFINrRoh6YCJqL5fj06Jg6d7jX1g==",
|
||||
"requires": {}
|
||||
},
|
||||
"@patternfly/react-styles": {
|
||||
"version": "4.92.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.92.0.tgz",
|
||||
"integrity": "sha512-B/f6iyu8UEN1+wRxdC4sLIhvJeyL8SqInDXZmwOIqK8uPJ8Lze7qrbVhkkVzbMF37/oDPVa6dZH8qZFq062LEA=="
|
||||
"version": "4.92.6",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-styles/-/react-styles-4.92.6.tgz",
|
||||
"integrity": "sha512-b8uQdEReMyeoMzjpMri845QxqtupY/tIFFYfVeKoB2neno8gkcW1RvDdDe62LF88q45OktCwAe/8A99ker10Iw=="
|
||||
},
|
||||
"@patternfly/react-table": {
|
||||
"version": "4.108.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.108.0.tgz",
|
||||
"integrity": "sha512-EUvd3rlkE1UXobAm7L6JHgNE3TW8IYTaVwwH/px4Mkn5mBayDO6f+w6QM3OeoDQVZcXK6IYFe7QQaYd/vWIJCQ==",
|
||||
"version": "4.113.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-table/-/react-table-4.113.0.tgz",
|
||||
"integrity": "sha512-qxa3NWCdYasqQQL1rqEd8DyNa8oWr6HNveNW5YJRakE7imWZhUPG2Nd6Op60+KYX8kbCSl7gwSmgAZAYMBMZkQ==",
|
||||
"requires": {
|
||||
"@patternfly/react-core": "^4.239.0",
|
||||
"@patternfly/react-icons": "^4.90.0",
|
||||
"@patternfly/react-styles": "^4.89.0",
|
||||
"@patternfly/react-tokens": "^4.91.0",
|
||||
"@patternfly/react-core": "^4.276.8",
|
||||
"@patternfly/react-icons": "^4.93.6",
|
||||
"@patternfly/react-styles": "^4.92.6",
|
||||
"@patternfly/react-tokens": "^4.94.6",
|
||||
"lodash": "^4.17.19",
|
||||
"tslib": "^2.0.0"
|
||||
},
|
||||
@@ -25225,9 +25211,9 @@
|
||||
}
|
||||
},
|
||||
"@patternfly/react-tokens": {
|
||||
"version": "4.94.0",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.94.0.tgz",
|
||||
"integrity": "sha512-fYXxUJZnzpn89K2zzHF0cSncZZVGKrohdb5f5T1wzxwU2NZPVGpvr88xhm+V2Y/fSrrTPwXcP3IIdtNOOtJdZw=="
|
||||
"version": "4.94.6",
|
||||
"resolved": "https://registry.npmjs.org/@patternfly/react-tokens/-/react-tokens-4.94.6.tgz",
|
||||
"integrity": "sha512-tm7C6nat+uKMr1hrapis7hS3rN9cr41tpcCKhx6cod6FLU8KwF2Yt5KUxakhIOCEcE/M/EhXhAw/qejp8w0r7Q=="
|
||||
},
|
||||
"@pmmmwh/react-refresh-webpack-plugin": {
|
||||
"version": "0.5.4",
|
||||
@@ -25528,9 +25514,9 @@
|
||||
}
|
||||
},
|
||||
"@testing-library/dom": {
|
||||
"version": "8.11.3",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-8.11.3.tgz",
|
||||
"integrity": "sha512-9LId28I+lx70wUiZjLvi1DB/WT2zGOxUh46glrSNMaWVx849kKAluezVzZrXJfTKKoQTmEOutLes/bHg4Bj3aA==",
|
||||
"version": "8.18.1",
|
||||
"resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-8.18.1.tgz",
|
||||
"integrity": "sha512-oEvsm2B/WtcHKE+IcEeeCqNU/ltFGaVyGbpcm4g/2ytuT49jrlH9x5qRKL/H3A6yfM4YAbSbC0ceT5+9CEXnLg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@babel/code-frame": "^7.10.4",
|
||||
|
||||
@@ -7,10 +7,10 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@lingui/react": "3.14.0",
|
||||
"@patternfly/patternfly": "4.217.1",
|
||||
"@patternfly/react-core": "^4.264.0",
|
||||
"@patternfly/react-icons": "4.92.10",
|
||||
"@patternfly/react-table": "4.108.0",
|
||||
"@patternfly/patternfly": "4.224.2",
|
||||
"@patternfly/react-core": "4.276.8",
|
||||
"@patternfly/react-icons": "4.93.6",
|
||||
"@patternfly/react-table": "4.113.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.27.2",
|
||||
@@ -44,6 +44,7 @@
|
||||
"@lingui/loader": "3.15.0",
|
||||
"@lingui/macro": "^3.7.1",
|
||||
"@nteract/mockument": "^1.0.4",
|
||||
"@testing-library/dom": "^8.18.1",
|
||||
"@testing-library/jest-dom": "^5.16.2",
|
||||
"@testing-library/react": "^12.1.5",
|
||||
"@testing-library/user-event": "14.4.3",
|
||||
|
||||
@@ -3,7 +3,7 @@ import Base from '../Base';
|
||||
class JobEvents extends Base {
|
||||
constructor(http) {
|
||||
super(http);
|
||||
this.baseUrl = '/api/v2/job_events/';
|
||||
this.baseUrl = 'api/v2/job_events/';
|
||||
}
|
||||
|
||||
readChildren(id, params) {
|
||||
|
||||
@@ -3,7 +3,7 @@ import Base from '../Base';
|
||||
class Mesh extends Base {
|
||||
constructor(http) {
|
||||
super(http);
|
||||
this.baseUrl = '/api/v2/mesh_visualizer/';
|
||||
this.baseUrl = 'api/v2/mesh_visualizer/';
|
||||
}
|
||||
}
|
||||
export default Mesh;
|
||||
|
||||
@@ -4,7 +4,7 @@ class Root extends Base {
|
||||
constructor(http) {
|
||||
super(http);
|
||||
this.baseUrl = 'api/';
|
||||
this.redirectURL = '/api/v2/config/';
|
||||
this.redirectURL = 'api/v2/config/';
|
||||
}
|
||||
|
||||
async login(username, password, redirect = this.redirectURL) {
|
||||
|
||||
@@ -35,7 +35,7 @@ describe('RootAPI', () => {
|
||||
|
||||
expect(mockHttp.post).toHaveBeenCalledTimes(2);
|
||||
expect(mockHttp.post.mock.calls[0]).toContainEqual(
|
||||
'username=foo&password=bar&next=%2Fapi%2Fv2%2Fconfig%2F'
|
||||
'username=foo&password=bar&next=api%2Fv2%2Fconfig%2F'
|
||||
);
|
||||
expect(mockHttp.post.mock.calls[1]).toContainEqual(
|
||||
'username=foo&password=bar&next=baz'
|
||||
|
||||
@@ -50,7 +50,7 @@ const userSortColumns = [
|
||||
const teamSearchColumns = [
|
||||
{
|
||||
name: t`Name`,
|
||||
key: 'name',
|
||||
key: 'name__icontains',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -112,6 +112,7 @@ function PageHeaderToolbar({
|
||||
target="_blank"
|
||||
href={`${getDocsBaseUrl(config)}/html/userguide/index.html`}
|
||||
ouiaId="help-dropdown-item"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
{t`Help`}
|
||||
</DropdownItem>,
|
||||
|
||||
@@ -27,6 +27,7 @@ function AssociateModal({
|
||||
isModalOpen = false,
|
||||
displayKey = 'name',
|
||||
ouiaId,
|
||||
modalNote,
|
||||
}) {
|
||||
const history = useHistory();
|
||||
const { selected, handleSelect } = useSelected([]);
|
||||
@@ -120,6 +121,7 @@ function AssociateModal({
|
||||
</Button>,
|
||||
]}
|
||||
>
|
||||
{modalNote}
|
||||
<OptionsList
|
||||
displayKey={displayKey}
|
||||
contentError={contentError}
|
||||
|
||||
@@ -1,5 +1,13 @@
|
||||
import React, { useState, useEffect, useContext } from 'react';
|
||||
import { arrayOf, func, shape, string, oneOfType, number } from 'prop-types';
|
||||
import {
|
||||
arrayOf,
|
||||
func,
|
||||
shape,
|
||||
string,
|
||||
oneOfType,
|
||||
number,
|
||||
node,
|
||||
} from 'prop-types';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { Button, Tooltip, DropdownItem } from '@patternfly/react-core';
|
||||
@@ -180,7 +188,7 @@ DisassociateButton.propTypes = {
|
||||
})
|
||||
),
|
||||
]),
|
||||
modalNote: string,
|
||||
modalNote: node,
|
||||
modalTitle: string,
|
||||
onDisassociate: func.isRequired,
|
||||
};
|
||||
|
||||
@@ -94,7 +94,7 @@ export default function FrequencyDetails({
|
||||
value={getRunEveryLabel()}
|
||||
dataCy={`${prefix}-run-every`}
|
||||
/>
|
||||
{type === 'week' ? (
|
||||
{type === 'week' && options.daysOfWeek ? (
|
||||
<Detail
|
||||
label={t`On days`}
|
||||
value={options.daysOfWeek
|
||||
|
||||
@@ -24,10 +24,10 @@ function DateTimePicker({ dateFieldName, timeFieldName, label }) {
|
||||
validate: combine([required(null), validateTime()]),
|
||||
});
|
||||
|
||||
const onDateChange = (inputDate, newDate) => {
|
||||
const onDateChange = (_, dateString, date) => {
|
||||
dateHelpers.setTouched();
|
||||
if (isValidDate(newDate) && inputDate === yyyyMMddFormat(newDate)) {
|
||||
dateHelpers.setValue(inputDate);
|
||||
if (isValidDate(date) && dateString === yyyyMMddFormat(date)) {
|
||||
dateHelpers.setValue(dateString);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -62,7 +62,7 @@ function DateTimePicker({ dateFieldName, timeFieldName, label }) {
|
||||
}
|
||||
time={timeField.value}
|
||||
{...timeField}
|
||||
onChange={(time) => timeHelpers.setValue(time)}
|
||||
onChange={(_, time) => timeHelpers.setValue(time)}
|
||||
/>
|
||||
</DateTimeGroup>
|
||||
</FormGroup>
|
||||
|
||||
@@ -43,10 +43,11 @@ describe('<DateTimePicker/>', () => {
|
||||
|
||||
await act(async () => {
|
||||
wrapper.find('DatePicker').prop('onChange')(
|
||||
null,
|
||||
'2021-05-29',
|
||||
new Date('Sat May 29 2021 00:00:00 GMT-0400 (Eastern Daylight Time)')
|
||||
);
|
||||
wrapper.find('TimePicker').prop('onChange')('7:15 PM');
|
||||
wrapper.find('TimePicker').prop('onChange')(null, '7:15 PM');
|
||||
});
|
||||
wrapper.update();
|
||||
expect(wrapper.find('DatePicker').prop('value')).toBe('2021-05-29');
|
||||
|
||||
@@ -885,6 +885,7 @@ describe('<ScheduleForm />', () => {
|
||||
).toBe(true);
|
||||
await act(async () => {
|
||||
wrapper.find('DatePicker[aria-label="End date"]').prop('onChange')(
|
||||
null,
|
||||
'2020-03-14',
|
||||
new Date('2020-03-14')
|
||||
);
|
||||
@@ -905,6 +906,7 @@ describe('<ScheduleForm />', () => {
|
||||
const laterTime = DateTime.now().plus({ hours: 1 }).toFormat('h:mm a');
|
||||
await act(async () => {
|
||||
wrapper.find('DatePicker[aria-label="End date"]').prop('onChange')(
|
||||
null,
|
||||
today,
|
||||
new Date(today)
|
||||
);
|
||||
@@ -919,6 +921,7 @@ describe('<ScheduleForm />', () => {
|
||||
);
|
||||
await act(async () => {
|
||||
wrapper.find('TimePicker[aria-label="End time"]').prop('onChange')(
|
||||
null,
|
||||
laterTime
|
||||
);
|
||||
});
|
||||
|
||||
@@ -322,6 +322,7 @@ function AdvancedSearch({
|
||||
variant="plain"
|
||||
target="_blank"
|
||||
href={`${getDocsBaseUrl(config)}/html/userguide/search_sort.html`}
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
<QuestionCircleIcon />
|
||||
</Button>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
|
||||
import { useParams, useHistory } from 'react-router-dom';
|
||||
import { t, Plural } from '@lingui/macro';
|
||||
import { t, Trans, Plural } from '@lingui/macro';
|
||||
import {
|
||||
Button,
|
||||
Progress,
|
||||
@@ -71,6 +71,10 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
const [showHealthCheckAlert, setShowHealthCheckAlert] = useState(false);
|
||||
const [forks, setForks] = useState();
|
||||
|
||||
const policyRulesDocsLink = `${getDocsBaseUrl(
|
||||
config
|
||||
)}/html/administration/containers_instance_groups.html#ag-instance-group-policies`;
|
||||
|
||||
const {
|
||||
isLoading,
|
||||
error: contentError,
|
||||
@@ -319,6 +323,23 @@ function InstanceDetails({ setBreadcrumb, instanceGroup }) {
|
||||
itemsToDisassociate={[instance]}
|
||||
isProtectedInstanceGroup={instanceGroup.name === 'controlplane'}
|
||||
modalTitle={t`Disassociate instance from instance group?`}
|
||||
modalNote={
|
||||
instance.managed_by_policy ? (
|
||||
<Trans>
|
||||
<b>
|
||||
Note: This instance may be re-associated with this
|
||||
instance group if it is managed by{' '}
|
||||
<a
|
||||
href={policyRulesDocsLink}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
policy rules.
|
||||
</a>
|
||||
</b>
|
||||
</Trans>
|
||||
) : null
|
||||
}
|
||||
/>
|
||||
)}
|
||||
<InstanceToggle
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
import { t } from '@lingui/macro';
|
||||
import { t, Trans } from '@lingui/macro';
|
||||
import { useLocation, useParams } from 'react-router-dom';
|
||||
import 'styled-components/macro';
|
||||
|
||||
@@ -22,6 +22,8 @@ import useRequest, {
|
||||
import useSelected from 'hooks/useSelected';
|
||||
import { InstanceGroupsAPI, InstancesAPI } from 'api';
|
||||
import { getQSConfig, parseQueryString, mergeParams } from 'util/qs';
|
||||
import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
import { useConfig } from 'contexts/Config';
|
||||
import HealthCheckButton from 'components/HealthCheckButton/HealthCheckButton';
|
||||
import HealthCheckAlert from 'components/HealthCheckAlert';
|
||||
import InstanceListItem from './InstanceListItem';
|
||||
@@ -33,6 +35,7 @@ const QS_CONFIG = getQSConfig('instance', {
|
||||
});
|
||||
|
||||
function InstanceList({ instanceGroup }) {
|
||||
const config = useConfig();
|
||||
const [isModalOpen, setIsModalOpen] = useState(false);
|
||||
const [showHealthCheckAlert, setShowHealthCheckAlert] = useState(false);
|
||||
const [pendingHealthCheck, setPendingHealthCheck] = useState(false);
|
||||
@@ -40,6 +43,10 @@ function InstanceList({ instanceGroup }) {
|
||||
const location = useLocation();
|
||||
const { id: instanceGroupId } = useParams();
|
||||
|
||||
const policyRulesDocsLink = `${getDocsBaseUrl(
|
||||
config
|
||||
)}/html/administration/containers_instance_groups.html#ag-instance-group-policies`;
|
||||
|
||||
const {
|
||||
result: {
|
||||
instances,
|
||||
@@ -262,6 +269,25 @@ function InstanceList({ instanceGroup }) {
|
||||
itemsToDisassociate={selected}
|
||||
modalTitle={t`Disassociate instance from instance group?`}
|
||||
isProtectedInstanceGroup={instanceGroup.name === 'controlplane'}
|
||||
modalNote={
|
||||
selected.some(
|
||||
(instance) => instance.managed_by_policy === true
|
||||
) ? (
|
||||
<Trans>
|
||||
<b>
|
||||
Note: Instances may be re-associated with this instance
|
||||
group if they are managed by{' '}
|
||||
<a
|
||||
href={policyRulesDocsLink}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
policy rules.
|
||||
</a>
|
||||
</b>
|
||||
</Trans>
|
||||
) : null
|
||||
}
|
||||
/>,
|
||||
<HealthCheckButton
|
||||
isDisabled={!canAdd || !canRunHealthCheck}
|
||||
@@ -321,6 +347,24 @@ function InstanceList({ instanceGroup }) {
|
||||
{ key: 'hostname', name: t`Name` },
|
||||
{ key: 'node_type', name: t`Node Type` },
|
||||
]}
|
||||
modalNote={
|
||||
<b>
|
||||
<Trans>
|
||||
<b>
|
||||
Note: Manually associated instances may be automatically
|
||||
disassociated from an instance group if the instance is
|
||||
managed by{' '}
|
||||
<a
|
||||
href={policyRulesDocsLink}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
policy rules.
|
||||
</a>
|
||||
</b>
|
||||
</Trans>
|
||||
</b>
|
||||
}
|
||||
/>
|
||||
)}
|
||||
{error && (
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user