mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 19:44:43 -03:30
Compare commits
154 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
45c13c25a4 | ||
|
|
ba0e9831d2 | ||
|
|
92dce85468 | ||
|
|
77139e4138 | ||
|
|
b28e14c630 | ||
|
|
bf5594e338 | ||
|
|
f012a69c93 | ||
|
|
0fb334e372 | ||
|
|
b7c5cbac3f | ||
|
|
eb7407593f | ||
|
|
287596234c | ||
|
|
ee7b3470da | ||
|
|
0faa1c8a24 | ||
|
|
77175d2862 | ||
|
|
22464a5838 | ||
|
|
3919ea6270 | ||
|
|
9d9f650051 | ||
|
|
66a3cb6b09 | ||
|
|
d282393035 | ||
|
|
6ea3b20912 | ||
|
|
3025ef0dfa | ||
|
|
397d58c459 | ||
|
|
d739a4a90a | ||
|
|
3fe64ad101 | ||
|
|
919d1e5d40 | ||
|
|
7fda4b0675 | ||
|
|
d8af19d169 | ||
|
|
1821e540f7 | ||
|
|
77be6c7495 | ||
|
|
baed869d93 | ||
|
|
b87ff45c07 | ||
|
|
7acc0067f5 | ||
|
|
0a13762f11 | ||
|
|
2c673c8f1f | ||
|
|
8c187c74fc | ||
|
|
2ce9440bab | ||
|
|
765487390f | ||
|
|
086722149c | ||
|
|
c10ada6f44 | ||
|
|
b350cd053d | ||
|
|
d0acb1c53f | ||
|
|
f61b73010a | ||
|
|
adb89cd48f | ||
|
|
3e509b3d55 | ||
|
|
f0badea9d3 | ||
|
|
6a1ec0dc89 | ||
|
|
329fb88bbb | ||
|
|
177f8cb7b2 | ||
|
|
b43107a5e9 | ||
|
|
4857685e1c | ||
|
|
8ba1a2bcf7 | ||
|
|
e7c80fe1e8 | ||
|
|
33f1c35292 | ||
|
|
ba899324f2 | ||
|
|
9c236eb8dd | ||
|
|
36559a4539 | ||
|
|
7a4b3ed139 | ||
|
|
cd5cc64d6a | ||
|
|
71a11ea3ad | ||
|
|
cfbbc4cb92 | ||
|
|
592920ee51 | ||
|
|
b75b84e282 | ||
|
|
f4b80c70e3 | ||
|
|
9870187af5 | ||
|
|
bbb436ddbb | ||
|
|
abf915fafe | ||
|
|
481814991e | ||
|
|
e94ee8f8d7 | ||
|
|
e660f62a59 | ||
|
|
a2a04002b6 | ||
|
|
93117c8264 | ||
|
|
b8118ac86a | ||
|
|
c08f1ddcaa | ||
|
|
d57f549a4c | ||
|
|
93e6f974f6 | ||
|
|
32f7dfece1 | ||
|
|
68b32b9b4f | ||
|
|
886ba1ea7f | ||
|
|
b128f05a37 | ||
|
|
36c9c9cdc4 | ||
|
|
342e9197b8 | ||
|
|
2205664fb4 | ||
|
|
7cdf471894 | ||
|
|
8719648ff5 | ||
|
|
c1455ee125 | ||
|
|
11d5e5c7d4 | ||
|
|
fba4e06c50 | ||
|
|
12a4c301b8 | ||
|
|
8a1cdf859e | ||
|
|
2f68317e5f | ||
|
|
0f4bac7aed | ||
|
|
e42461d96f | ||
|
|
9b716235a2 | ||
|
|
eb704dbaad | ||
|
|
105609ec20 | ||
|
|
9b390a624f | ||
|
|
0046ce5e69 | ||
|
|
b80d0ae85b | ||
|
|
1c0142f75c | ||
|
|
1ea6d15ee3 | ||
|
|
3cd5d59d87 | ||
|
|
d32a5905e8 | ||
|
|
e53a5da91e | ||
|
|
1a56272eaf | ||
|
|
3975028bd4 | ||
|
|
1c51ef8a69 | ||
|
|
6b0fe8d137 | ||
|
|
4a3d437b32 | ||
|
|
23f3ab6a66 | ||
|
|
ffa3cd1fff | ||
|
|
236de7e209 | ||
|
|
4e5cce8d15 | ||
|
|
184719e9f2 | ||
|
|
6c9e2502a5 | ||
|
|
0b1b866128 | ||
|
|
80ebe13841 | ||
|
|
328880609b | ||
|
|
71c307ab8a | ||
|
|
3ce68ced1e | ||
|
|
20817789bd | ||
|
|
2b63b55b34 | ||
|
|
64923e12fc | ||
|
|
6d4f92e1e8 | ||
|
|
fff6fa7d7a | ||
|
|
44db4587be | ||
|
|
dc0958150a | ||
|
|
9f27436c75 | ||
|
|
e60869e653 | ||
|
|
51e19d9d0b | ||
|
|
0fea29ad4d | ||
|
|
0a40b758c3 | ||
|
|
1191458d80 | ||
|
|
c0491a7b10 | ||
|
|
14e613bc92 | ||
|
|
98e37383c2 | ||
|
|
9e336d55e4 | ||
|
|
0e68caf0f7 | ||
|
|
c9c150b5a6 | ||
|
|
f97605430b | ||
|
|
454f31f6a4 | ||
|
|
f62bf6a4c3 | ||
|
|
a0dafbfd8c | ||
|
|
b5c052b2e6 | ||
|
|
1e690fcd7f | ||
|
|
479d0c2b12 | ||
|
|
ede185504c | ||
|
|
b0c416334f | ||
|
|
7c4aedf716 | ||
|
|
76f03b9adc | ||
|
|
b3bda415da | ||
|
|
21291b53fd | ||
|
|
3eb748ff1f | ||
|
|
6d2c10ad02 | ||
|
|
ede9d961da |
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -3,7 +3,7 @@ name: CI
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
DEV_DOCKER_TAG_BASE: ghcr.io/${{ github.repository_owner }}
|
||||
DEV_DOCKER_OWNER: ${{ github.repository_owner }}
|
||||
COMPOSE_TAG: ${{ github.base_ref || 'devel' }}
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
4
.github/workflows/label_issue.yml
vendored
4
.github/workflows/label_issue.yml
vendored
@@ -6,6 +6,10 @@ on:
|
||||
- opened
|
||||
- reopened
|
||||
|
||||
permissions:
|
||||
contents: read # to fetch code
|
||||
issues: write # to label issues
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
4
.github/workflows/label_pr.yml
vendored
4
.github/workflows/label_pr.yml
vendored
@@ -7,6 +7,10 @@ on:
|
||||
- reopened
|
||||
- synchronize
|
||||
|
||||
permissions:
|
||||
contents: read # to determine modified files (actions/labeler)
|
||||
pull-requests: write # to add labels to PRs (actions/labeler)
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
3
.github/workflows/promote.yml
vendored
3
.github/workflows/promote.yml
vendored
@@ -8,6 +8,9 @@ on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: read # to fetch code (actions/checkout)
|
||||
|
||||
jobs:
|
||||
promote:
|
||||
if: endsWith(github.repository, '/awx')
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -157,9 +157,10 @@ use_dev_supervisor.txt
|
||||
*.unison.tmp
|
||||
*.#
|
||||
/awx/ui/.ui-built
|
||||
/Dockerfile
|
||||
/_build/
|
||||
/_build_kube_dev/
|
||||
/Dockerfile
|
||||
/Dockerfile.dev
|
||||
/Dockerfile.kube-dev
|
||||
|
||||
awx/ui_next/src
|
||||
|
||||
86
Makefile
86
Makefile
@@ -1,6 +1,6 @@
|
||||
-include awx/ui_next/Makefile
|
||||
|
||||
PYTHON ?= python3.9
|
||||
PYTHON := $(notdir $(shell for i in python3.9 python3; do command -v $$i; done|sed 1q))
|
||||
DOCKER_COMPOSE ?= docker-compose
|
||||
OFFICIAL ?= no
|
||||
NODE ?= node
|
||||
@@ -37,10 +37,15 @@ SPLUNK ?= false
|
||||
PROMETHEUS ?= false
|
||||
# If set to true docker-compose will also start a grafana instance
|
||||
GRAFANA ?= false
|
||||
# If set to true docker-compose will also start a tacacs+ instance
|
||||
TACACS ?= false
|
||||
|
||||
VENV_BASE ?= /var/lib/awx/venv
|
||||
|
||||
DEV_DOCKER_TAG_BASE ?= ghcr.io/ansible
|
||||
DEV_DOCKER_OWNER ?= ansible
|
||||
# Docker will only accept lowercase, so github names like Paul need to be paul
|
||||
DEV_DOCKER_OWNER_LOWER = $(shell echo $(DEV_DOCKER_OWNER) | tr A-Z a-z)
|
||||
DEV_DOCKER_TAG_BASE ?= ghcr.io/$(DEV_DOCKER_OWNER_LOWER)
|
||||
DEVEL_IMAGE_NAME ?= $(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG)
|
||||
|
||||
RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
@@ -294,13 +299,13 @@ swagger: reports
|
||||
check: black
|
||||
|
||||
api-lint:
|
||||
BLACK_ARGS="--check" make black
|
||||
BLACK_ARGS="--check" $(MAKE) black
|
||||
flake8 awx
|
||||
yamllint -s .
|
||||
|
||||
## Run egg_info_dev to generate awx.egg-info for development.
|
||||
awx-link:
|
||||
[ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/tools/scripts/egg_info_dev
|
||||
cp -f /tmp/awx.egg-link /var/lib/awx/venv/awx/lib/$(PYTHON)/site-packages/awx.egg-link
|
||||
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||
PYTEST_ARGS ?= -n auto
|
||||
@@ -319,7 +324,7 @@ github_ci_setup:
|
||||
# CI_GITHUB_TOKEN is defined in .github files
|
||||
echo $(CI_GITHUB_TOKEN) | docker login ghcr.io -u $(GITHUB_ACTOR) --password-stdin
|
||||
docker pull $(DEVEL_IMAGE_NAME) || : # Pre-pull image to warm build cache
|
||||
make docker-compose-build
|
||||
$(MAKE) docker-compose-build
|
||||
|
||||
## Runs AWX_DOCKER_CMD inside a new docker container.
|
||||
docker-runner:
|
||||
@@ -369,7 +374,7 @@ test_collection_sanity:
|
||||
rm -rf $(COLLECTION_INSTALL)
|
||||
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi
|
||||
ansible --version
|
||||
COLLECTION_VERSION=1.0.0 make install_collection
|
||||
COLLECTION_VERSION=1.0.0 $(MAKE) install_collection
|
||||
cd $(COLLECTION_INSTALL) && ansible-test sanity $(COLLECTION_SANITY_ARGS)
|
||||
|
||||
test_collection_integration: install_collection
|
||||
@@ -519,7 +524,9 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
-e enable_ldap=$(LDAP) \
|
||||
-e enable_splunk=$(SPLUNK) \
|
||||
-e enable_prometheus=$(PROMETHEUS) \
|
||||
-e enable_grafana=$(GRAFANA) $(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
-e enable_grafana=$(GRAFANA) \
|
||||
-e enable_tacacs=$(TACACS) \
|
||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||
@@ -552,12 +559,21 @@ docker-compose-container-group-clean:
|
||||
fi
|
||||
rm -rf tools/docker-compose-minikube/_sources/
|
||||
|
||||
## Base development image build
|
||||
docker-compose-build:
|
||||
ansible-playbook tools/ansible/dockerfile.yml -e build_dev=True -e receptor_image=$(RECEPTOR_IMAGE)
|
||||
DOCKER_BUILDKIT=1 docker build -t $(DEVEL_IMAGE_NAME) \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||
.PHONY: Dockerfile.dev
|
||||
## Generate Dockerfile.dev for awx_devel image
|
||||
Dockerfile.dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
-e dockerfile_name=Dockerfile.dev \
|
||||
-e build_dev=True \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE)
|
||||
|
||||
## Build awx_devel image for docker compose development environment
|
||||
docker-compose-build: Dockerfile.dev
|
||||
DOCKER_BUILDKIT=1 docker build \
|
||||
-f Dockerfile.dev \
|
||||
-t $(DEVEL_IMAGE_NAME) \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||
|
||||
docker-clean:
|
||||
-$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||
@@ -576,7 +592,7 @@ docker-compose-cluster-elk: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
docker-compose-container-group:
|
||||
MINIKUBE_CONTAINER_GROUP=true make docker-compose
|
||||
MINIKUBE_CONTAINER_GROUP=true $(MAKE) docker-compose
|
||||
|
||||
clean-elk:
|
||||
docker stop tools_kibana_1
|
||||
@@ -593,12 +609,36 @@ VERSION:
|
||||
@echo "awx: $(VERSION)"
|
||||
|
||||
PYTHON_VERSION:
|
||||
@echo "$(PYTHON)" | sed 's:python::'
|
||||
@echo "$(subst python,,$(PYTHON))"
|
||||
|
||||
.PHONY: version-for-buildyml
|
||||
version-for-buildyml:
|
||||
@echo $(firstword $(subst +, ,$(VERSION)))
|
||||
# version-for-buildyml prints a special version string for build.yml,
|
||||
# chopping off the sha after the '+' sign.
|
||||
# tools/ansible/build.yml was doing this: make print-VERSION | cut -d + -f -1
|
||||
# This does the same thing in native make without
|
||||
# the pipe or the extra processes, and now the pb does `make version-for-buildyml`
|
||||
# Example:
|
||||
# 22.1.1.dev38+g523c0d9781 becomes 22.1.1.dev38
|
||||
|
||||
.PHONY: Dockerfile
|
||||
## Generate Dockerfile for awx image
|
||||
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml -e receptor_image=$(RECEPTOR_IMAGE)
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
-e receptor_image=$(RECEPTOR_IMAGE) \
|
||||
-e headless=$(HEADLESS)
|
||||
|
||||
## Build awx image for deployment on Kubernetes environment.
|
||||
awx-kube-build: Dockerfile
|
||||
DOCKER_BUILDKIT=1 docker build -f Dockerfile \
|
||||
--build-arg VERSION=$(VERSION) \
|
||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||
--build-arg HEADLESS=$(HEADLESS) \
|
||||
-t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
|
||||
|
||||
.PHONY: Dockerfile.kube-dev
|
||||
## Generate Docker.kube-dev for awx_kube_devel image
|
||||
Dockerfile.kube-dev: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml \
|
||||
-e dockerfile_name=Dockerfile.kube-dev \
|
||||
@@ -613,13 +653,6 @@ awx-kube-dev-build: Dockerfile.kube-dev
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) \
|
||||
-t $(DEV_DOCKER_TAG_BASE)/awx_kube_devel:$(COMPOSE_TAG) .
|
||||
|
||||
## Build awx image for deployment on Kubernetes environment.
|
||||
awx-kube-build: Dockerfile
|
||||
DOCKER_BUILDKIT=1 docker build -f Dockerfile \
|
||||
--build-arg VERSION=$(VERSION) \
|
||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION=$(VERSION) \
|
||||
--build-arg HEADLESS=$(HEADLESS) \
|
||||
-t $(DEV_DOCKER_TAG_BASE)/awx:$(COMPOSE_TAG) .
|
||||
|
||||
# Translation TASKS
|
||||
# --------------------------------------
|
||||
@@ -639,6 +672,7 @@ messages:
|
||||
fi; \
|
||||
$(PYTHON) manage.py makemessages -l en_us --keep-pot
|
||||
|
||||
.PHONY: print-%
|
||||
print-%:
|
||||
@echo $($*)
|
||||
|
||||
@@ -650,12 +684,12 @@ HELP_FILTER=.PHONY
|
||||
## Display help targets
|
||||
help:
|
||||
@printf "Available targets:\n"
|
||||
@make -s help/generate | grep -vE "\w($(HELP_FILTER))"
|
||||
@$(MAKE) -s help/generate | grep -vE "\w($(HELP_FILTER))"
|
||||
|
||||
## Display help for all targets
|
||||
help/all:
|
||||
@printf "Available targets:\n"
|
||||
@make -s help/generate
|
||||
@$(MAKE) -s help/generate
|
||||
|
||||
## Generate help output from MAKEFILE_LIST
|
||||
help/generate:
|
||||
@@ -679,4 +713,4 @@ help/generate:
|
||||
|
||||
## Display help for ui-next targets
|
||||
help/ui-next:
|
||||
@make -s help MAKEFILE_LIST="awx/ui_next/Makefile"
|
||||
@$(MAKE) -s help MAKEFILE_LIST="awx/ui_next/Makefile"
|
||||
|
||||
@@ -5,13 +5,11 @@
|
||||
import inspect
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import views as auth_views
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import FieldDoesNotExist
|
||||
from django.db import connection, transaction
|
||||
from django.db.models.fields.related import OneToOneRel
|
||||
@@ -35,7 +33,7 @@ from rest_framework.negotiation import DefaultContentNegotiation
|
||||
# AWX
|
||||
from awx.api.filters import FieldLookupBackend
|
||||
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.access import optimize_queryset
|
||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.utils.licensing import server_product_name
|
||||
@@ -364,12 +362,7 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
return self.queryset._clone()
|
||||
elif self.model is not None:
|
||||
qs = self.model._default_manager
|
||||
if self.model in access_registry:
|
||||
access_class = access_registry[self.model]
|
||||
if access_class.select_related:
|
||||
qs = qs.select_related(*access_class.select_related)
|
||||
if access_class.prefetch_related:
|
||||
qs = qs.prefetch_related(*access_class.prefetch_related)
|
||||
qs = optimize_queryset(qs)
|
||||
return qs
|
||||
else:
|
||||
return super(GenericAPIView, self).get_queryset()
|
||||
@@ -512,6 +505,9 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
||||
# And optionally (user must have given access permission on parent object
|
||||
# to view sublist):
|
||||
# parent_access = 'read'
|
||||
# filter_read_permission sets whether or not to override the default intersection behavior
|
||||
# implemented here
|
||||
filter_read_permission = True
|
||||
|
||||
def get_description_context(self):
|
||||
d = super(SubListAPIView, self).get_description_context()
|
||||
@@ -526,12 +522,16 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
qs = self.request.user.get_queryset(self.model).distinct()
|
||||
sublist_qs = self.get_sublist_queryset(parent)
|
||||
return qs & sublist_qs
|
||||
if not self.filter_read_permission:
|
||||
return optimize_queryset(self.get_sublist_queryset(parent))
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
if hasattr(self, 'parent_key'):
|
||||
# This is vastly preferable for ReverseForeignKey relationships
|
||||
return qs.filter(**{self.parent_key: parent})
|
||||
return qs.distinct() & self.get_sublist_queryset(parent).distinct()
|
||||
|
||||
def get_sublist_queryset(self, parent):
|
||||
return getattrd(parent, self.relationship).distinct()
|
||||
return getattrd(parent, self.relationship)
|
||||
|
||||
|
||||
class DestroyAPIView(generics.DestroyAPIView):
|
||||
@@ -580,15 +580,6 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
||||
d.update({'parent_key': getattr(self, 'parent_key', None)})
|
||||
return d
|
||||
|
||||
def get_queryset(self):
|
||||
if hasattr(self, 'parent_key'):
|
||||
# Prefer this filtering because ForeignKey allows us more assumptions
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
return qs.filter(**{self.parent_key: parent})
|
||||
return super(SubListCreateAPIView, self).get_queryset()
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
# If the object ID was not specified, it probably doesn't exist in the
|
||||
# DB yet. We want to see if we can create it. The URL may choose to
|
||||
@@ -967,16 +958,11 @@ class CopyAPIView(GenericAPIView):
|
||||
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
|
||||
new_obj.admin_role.members.add(request.user)
|
||||
if sub_objs:
|
||||
# store the copied object dict into cache, because it's
|
||||
# often too large for postgres' notification bus
|
||||
# (which has a default maximum message size of 8k)
|
||||
key = 'deep-copy-{}'.format(str(uuid.uuid4()))
|
||||
cache.set(key, sub_objs, timeout=3600)
|
||||
permission_check_func = None
|
||||
if hasattr(type(self), 'deep_copy_permission_check_func'):
|
||||
permission_check_func = (type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func')
|
||||
trigger_delayed_deep_copy(
|
||||
self.model.__module__, self.model.__name__, obj.pk, new_obj.pk, request.user.pk, key, permission_check_func=permission_check_func
|
||||
self.model.__module__, self.model.__name__, obj.pk, new_obj.pk, request.user.pk, permission_check_func=permission_check_func
|
||||
)
|
||||
serializer = self._get_copy_return_serializer(new_obj)
|
||||
headers = {'Location': new_obj.get_absolute_url(request=request)}
|
||||
|
||||
@@ -25,6 +25,7 @@ __all__ = [
|
||||
'UserPermission',
|
||||
'IsSystemAdminOrAuditor',
|
||||
'WorkflowApprovalPermission',
|
||||
'AnalyticsPermission',
|
||||
]
|
||||
|
||||
|
||||
@@ -250,3 +251,16 @@ class IsSystemAdminOrAuditor(permissions.BasePermission):
|
||||
class WebhookKeyPermission(permissions.BasePermission):
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return request.user.can_access(view.model, 'admin', obj, request.data)
|
||||
|
||||
|
||||
class AnalyticsPermission(permissions.BasePermission):
|
||||
"""
|
||||
Allows GET/POST/OPTIONS to system admins and system auditors.
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
if not (request.user and request.user.is_authenticated):
|
||||
return False
|
||||
if request.method in ["GET", "POST", "OPTIONS"]:
|
||||
return request.user.is_superuser or request.user.is_system_auditor
|
||||
return request.user.is_superuser
|
||||
|
||||
@@ -954,7 +954,7 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
|
||||
|
||||
|
||||
class UserSerializer(BaseSerializer):
|
||||
password = serializers.CharField(required=False, default='', write_only=True, help_text=_('Write-only field used to change the password.'))
|
||||
password = serializers.CharField(required=False, default='', help_text=_('Field used to change the password.'))
|
||||
ldap_dn = serializers.CharField(source='profile.ldap_dn', read_only=True)
|
||||
external_account = serializers.SerializerMethodField(help_text=_('Set if the account is managed by an external service'))
|
||||
is_system_auditor = serializers.BooleanField(default=False)
|
||||
@@ -981,7 +981,12 @@ class UserSerializer(BaseSerializer):
|
||||
|
||||
def to_representation(self, obj):
|
||||
ret = super(UserSerializer, self).to_representation(obj)
|
||||
ret.pop('password', None)
|
||||
if self.get_external_account(obj):
|
||||
# If this is an external account it shouldn't have a password field
|
||||
ret.pop('password', None)
|
||||
else:
|
||||
# If its an internal account lets assume there is a password and return $encrypted$ to the user
|
||||
ret['password'] = '$encrypted$'
|
||||
if obj and type(self) is UserSerializer:
|
||||
ret['auth'] = obj.social_auth.values('provider', 'uid')
|
||||
return ret
|
||||
@@ -995,13 +1000,31 @@ class UserSerializer(BaseSerializer):
|
||||
django_validate_password(value)
|
||||
if not self.instance and value in (None, ''):
|
||||
raise serializers.ValidationError(_('Password required for new User.'))
|
||||
|
||||
# Check if a password is too long
|
||||
password_max_length = User._meta.get_field('password').max_length
|
||||
if len(value) > password_max_length:
|
||||
raise serializers.ValidationError(_('Password max length is {}'.format(password_max_length)))
|
||||
if getattr(settings, 'LOCAL_PASSWORD_MIN_LENGTH', 0) and len(value) < getattr(settings, 'LOCAL_PASSWORD_MIN_LENGTH'):
|
||||
raise serializers.ValidationError(_('Password must be at least {} characters long.'.format(getattr(settings, 'LOCAL_PASSWORD_MIN_LENGTH'))))
|
||||
if getattr(settings, 'LOCAL_PASSWORD_MIN_DIGITS', 0) and sum(c.isdigit() for c in value) < getattr(settings, 'LOCAL_PASSWORD_MIN_DIGITS'):
|
||||
raise serializers.ValidationError(_('Password must contain at least {} digits.'.format(getattr(settings, 'LOCAL_PASSWORD_MIN_DIGITS'))))
|
||||
if getattr(settings, 'LOCAL_PASSWORD_MIN_UPPER', 0) and sum(c.isupper() for c in value) < getattr(settings, 'LOCAL_PASSWORD_MIN_UPPER'):
|
||||
raise serializers.ValidationError(
|
||||
_('Password must contain at least {} uppercase characters.'.format(getattr(settings, 'LOCAL_PASSWORD_MIN_UPPER')))
|
||||
)
|
||||
if getattr(settings, 'LOCAL_PASSWORD_MIN_SPECIAL', 0) and sum(not c.isalnum() for c in value) < getattr(settings, 'LOCAL_PASSWORD_MIN_SPECIAL'):
|
||||
raise serializers.ValidationError(
|
||||
_('Password must contain at least {} special characters.'.format(getattr(settings, 'LOCAL_PASSWORD_MIN_SPECIAL')))
|
||||
)
|
||||
|
||||
return value
|
||||
|
||||
def _update_password(self, obj, new_password):
|
||||
# For now we're not raising an error, just not saving password for
|
||||
# users managed by LDAP who already have an unusable password set.
|
||||
# Get external password will return something like ldap or enterprise or None if the user isn't external. We only want to allow a password update for a None option
|
||||
if new_password and not self.get_external_account(obj):
|
||||
if new_password and new_password != '$encrypted$' and not self.get_external_account(obj):
|
||||
obj.set_password(new_password)
|
||||
obj.save(update_fields=['password'])
|
||||
|
||||
@@ -2167,7 +2190,7 @@ class BulkHostCreateSerializer(serializers.Serializer):
|
||||
host_data = []
|
||||
for r in result:
|
||||
item = {k: getattr(r, k) for k in return_keys}
|
||||
if not settings.IS_TESTING_MODE:
|
||||
if settings.DATABASES and ('sqlite3' not in settings.DATABASES.get('default', {}).get('ENGINE')):
|
||||
# sqlite acts different with bulk_create -- it doesn't return the id of the objects
|
||||
# to get it, you have to do an additional query, which is not useful for our tests
|
||||
item['url'] = reverse('api:host_detail', kwargs={'pk': r.id})
|
||||
|
||||
31
awx/api/urls/analytics.py
Normal file
31
awx/api/urls/analytics.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.urls import re_path
|
||||
|
||||
import awx.api.views.analytics as analytics
|
||||
|
||||
|
||||
urls = [
|
||||
re_path(r'^$', analytics.AnalyticsRootView.as_view(), name='analytics_root_view'),
|
||||
re_path(r'^authorized/$', analytics.AnalyticsAuthorizedView.as_view(), name='analytics_authorized'),
|
||||
re_path(r'^reports/$', analytics.AnalyticsReportsList.as_view(), name='analytics_reports_list'),
|
||||
re_path(r'^report/(?P<slug>[\w-]+)/$', analytics.AnalyticsReportDetail.as_view(), name='analytics_report_detail'),
|
||||
re_path(r'^report_options/$', analytics.AnalyticsReportOptionsList.as_view(), name='analytics_report_options_list'),
|
||||
re_path(r'^adoption_rate/$', analytics.AnalyticsAdoptionRateList.as_view(), name='analytics_adoption_rate'),
|
||||
re_path(r'^adoption_rate_options/$', analytics.AnalyticsAdoptionRateList.as_view(), name='analytics_adoption_rate_options'),
|
||||
re_path(r'^event_explorer/$', analytics.AnalyticsEventExplorerList.as_view(), name='analytics_event_explorer'),
|
||||
re_path(r'^event_explorer_options/$', analytics.AnalyticsEventExplorerList.as_view(), name='analytics_event_explorer_options'),
|
||||
re_path(r'^host_explorer/$', analytics.AnalyticsHostExplorerList.as_view(), name='analytics_host_explorer'),
|
||||
re_path(r'^host_explorer_options/$', analytics.AnalyticsHostExplorerList.as_view(), name='analytics_host_explorer_options'),
|
||||
re_path(r'^job_explorer/$', analytics.AnalyticsJobExplorerList.as_view(), name='analytics_job_explorer'),
|
||||
re_path(r'^job_explorer_options/$', analytics.AnalyticsJobExplorerList.as_view(), name='analytics_job_explorer_options'),
|
||||
re_path(r'^probe_templates/$', analytics.AnalyticsProbeTemplatesList.as_view(), name='analytics_probe_templates_explorer'),
|
||||
re_path(r'^probe_templates_options/$', analytics.AnalyticsProbeTemplatesList.as_view(), name='analytics_probe_templates_options'),
|
||||
re_path(r'^probe_template_for_hosts/$', analytics.AnalyticsProbeTemplateForHostsList.as_view(), name='analytics_probe_template_for_hosts_explorer'),
|
||||
re_path(r'^probe_template_for_hosts_options/$', analytics.AnalyticsProbeTemplateForHostsList.as_view(), name='analytics_probe_template_for_hosts_options'),
|
||||
re_path(r'^roi_templates/$', analytics.AnalyticsRoiTemplatesList.as_view(), name='analytics_roi_templates_explorer'),
|
||||
re_path(r'^roi_templates_options/$', analytics.AnalyticsRoiTemplatesList.as_view(), name='analytics_roi_templates_options'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
@@ -42,6 +42,7 @@ from awx.api.views.bulk import (
|
||||
from awx.api.views.mesh_visualizer import MeshVisualizer
|
||||
|
||||
from awx.api.views.metrics import MetricsView
|
||||
from awx.api.views.analytics import AWX_ANALYTICS_API_PREFIX
|
||||
|
||||
from .organization import urls as organization_urls
|
||||
from .user import urls as user_urls
|
||||
@@ -82,7 +83,7 @@ from .oauth2 import urls as oauth2_urls
|
||||
from .oauth2_root import urls as oauth2_root_urls
|
||||
from .workflow_approval_template import urls as workflow_approval_template_urls
|
||||
from .workflow_approval import urls as workflow_approval_urls
|
||||
|
||||
from .analytics import urls as analytics_urls
|
||||
|
||||
v2_urls = [
|
||||
re_path(r'^$', ApiV2RootView.as_view(), name='api_v2_root_view'),
|
||||
@@ -147,6 +148,7 @@ v2_urls = [
|
||||
re_path(r'^unified_job_templates/$', UnifiedJobTemplateList.as_view(), name='unified_job_template_list'),
|
||||
re_path(r'^unified_jobs/$', UnifiedJobList.as_view(), name='unified_job_list'),
|
||||
re_path(r'^activity_stream/', include(activity_stream_urls)),
|
||||
re_path(rf'^{AWX_ANALYTICS_API_PREFIX}/', include(analytics_urls)),
|
||||
re_path(r'^workflow_approval_templates/', include(workflow_approval_template_urls)),
|
||||
re_path(r'^workflow_approvals/', include(workflow_approval_urls)),
|
||||
re_path(r'^bulk/$', BulkView.as_view(), name='bulk'),
|
||||
|
||||
@@ -62,7 +62,7 @@ from wsgiref.util import FileWrapper
|
||||
|
||||
# AWX
|
||||
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
||||
from awx.main.access import get_user_queryset, HostAccess
|
||||
from awx.main.access import get_user_queryset
|
||||
from awx.api.generics import (
|
||||
APIView,
|
||||
BaseUsersList,
|
||||
@@ -794,13 +794,7 @@ class ExecutionEnvironmentActivityStreamList(SubListAPIView):
|
||||
parent_model = models.ExecutionEnvironment
|
||||
relationship = 'activitystream_set'
|
||||
search_fields = ('changes',)
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
return qs.filter(execution_environment=parent)
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class ProjectList(ListCreateAPIView):
|
||||
@@ -1634,13 +1628,7 @@ class InventoryHostsList(HostRelatedSearchMixin, SubListCreateAttachDetachAPIVie
|
||||
parent_model = models.Inventory
|
||||
relationship = 'hosts'
|
||||
parent_key = 'inventory'
|
||||
|
||||
def get_queryset(self):
|
||||
inventory = self.get_parent_object()
|
||||
qs = getattrd(inventory, self.relationship).all()
|
||||
# Apply queryset optimizations
|
||||
qs = qs.select_related(*HostAccess.select_related).prefetch_related(*HostAccess.prefetch_related)
|
||||
return qs
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class HostGroupsList(SubListCreateAttachDetachAPIView):
|
||||
@@ -2581,16 +2569,7 @@ class JobTemplateCredentialsList(SubListCreateAttachDetachAPIView):
|
||||
serializer_class = serializers.CredentialSerializer
|
||||
parent_model = models.JobTemplate
|
||||
relationship = 'credentials'
|
||||
|
||||
def get_queryset(self):
|
||||
# Return the full list of credentials
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
sublist_qs = getattrd(parent, self.relationship)
|
||||
sublist_qs = sublist_qs.prefetch_related(
|
||||
'created_by', 'modified_by', 'admin_role', 'use_role', 'read_role', 'admin_role__parents', 'admin_role__members'
|
||||
)
|
||||
return sublist_qs
|
||||
filter_read_permission = False
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
if sub.unique_hash() in [cred.unique_hash() for cred in parent.credentials.all()]:
|
||||
@@ -2692,7 +2671,10 @@ class JobTemplateCallback(GenericAPIView):
|
||||
# Permission class should have already validated host_config_key.
|
||||
job_template = self.get_object()
|
||||
# Attempt to find matching hosts based on remote address.
|
||||
matching_hosts = self.find_matching_hosts()
|
||||
if job_template.inventory:
|
||||
matching_hosts = self.find_matching_hosts()
|
||||
else:
|
||||
return Response({"msg": _("Cannot start automatically, an inventory is required.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
# If the host is not found, update the inventory before trying to
|
||||
# match again.
|
||||
inventory_sources_already_updated = []
|
||||
@@ -2777,6 +2759,7 @@ class JobTemplateInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
serializer_class = serializers.InstanceGroupSerializer
|
||||
parent_model = models.JobTemplate
|
||||
relationship = 'instance_groups'
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class JobTemplateAccessList(ResourceAccessList):
|
||||
@@ -2867,16 +2850,7 @@ class WorkflowJobTemplateNodeChildrenBaseList(EnforceParentRelationshipMixin, Su
|
||||
relationship = ''
|
||||
enforce_parent_relationship = 'workflow_job_template'
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
'''
|
||||
Limit the set of WorkflowJobTemplateNodes to the related nodes of specified by
|
||||
'relationship'
|
||||
'''
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).all()
|
||||
filter_read_permission = False
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
if created:
|
||||
@@ -2951,14 +2925,7 @@ class WorkflowJobNodeChildrenBaseList(SubListAPIView):
|
||||
parent_model = models.WorkflowJobNode
|
||||
relationship = ''
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
#
|
||||
# Limit the set of WorkflowJobNodes to the related nodes of specified by self.relationship
|
||||
#
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).all()
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class WorkflowJobNodeSuccessNodesList(WorkflowJobNodeChildrenBaseList):
|
||||
@@ -3137,11 +3104,8 @@ class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView):
|
||||
relationship = 'workflow_job_template_nodes'
|
||||
parent_key = 'workflow_job_template'
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).order_by('id')
|
||||
ordering = ('id',) # assure ordering by id for consistency
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class WorkflowJobTemplateJobsList(SubListAPIView):
|
||||
@@ -3233,11 +3197,8 @@ class WorkflowJobWorkflowNodesList(SubListAPIView):
|
||||
relationship = 'workflow_job_nodes'
|
||||
parent_key = 'workflow_job'
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).order_by('id')
|
||||
ordering = ('id',) # assure ordering by id for consistency
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class WorkflowJobCancel(GenericCancelView):
|
||||
@@ -3551,11 +3512,7 @@ class BaseJobHostSummariesList(SubListAPIView):
|
||||
relationship = 'job_host_summaries'
|
||||
name = _('Job Host Summaries List')
|
||||
search_fields = ('host_name',)
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).select_related('job', 'job__job_template', 'host')
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class HostJobHostSummariesList(BaseJobHostSummariesList):
|
||||
|
||||
296
awx/api/views/analytics.py
Normal file
296
awx/api/views/analytics.py
Normal file
@@ -0,0 +1,296 @@
|
||||
import requests
|
||||
import logging
|
||||
import urllib.parse as urlparse
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils import translation
|
||||
|
||||
from awx.api.generics import APIView, Response
|
||||
from awx.api.permissions import AnalyticsPermission
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.utils import get_awx_version
|
||||
from rest_framework import status
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
AUTOMATION_ANALYTICS_API_URL_PATH = "/api/tower-analytics/v1"
|
||||
AWX_ANALYTICS_API_PREFIX = 'analytics'
|
||||
|
||||
ERROR_UPLOAD_NOT_ENABLED = "analytics-upload-not-enabled"
|
||||
ERROR_MISSING_URL = "missing-url"
|
||||
ERROR_MISSING_USER = "missing-user"
|
||||
ERROR_MISSING_PASSWORD = "missing-password"
|
||||
ERROR_NO_DATA_OR_ENTITLEMENT = "no-data-or-entitlement"
|
||||
ERROR_NOT_FOUND = "not-found"
|
||||
ERROR_UNAUTHORIZED = "unauthorized"
|
||||
ERROR_UNKNOWN = "unknown"
|
||||
ERROR_UNSUPPORTED_METHOD = "unsupported-method"
|
||||
|
||||
logger = logging.getLogger('awx.api.views.analytics')
|
||||
|
||||
|
||||
class MissingSettings(Exception):
|
||||
"""Settings are not correct Exception"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class GetNotAllowedMixin(object):
|
||||
def get(self, request, format=None):
|
||||
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||
|
||||
|
||||
class AnalyticsRootView(APIView):
|
||||
permission_classes = (AnalyticsPermission,)
|
||||
name = _('Automation Analytics')
|
||||
swagger_topic = 'Automation Analytics'
|
||||
|
||||
def get(self, request, format=None):
|
||||
data = OrderedDict()
|
||||
data['authorized'] = reverse('api:analytics_authorized')
|
||||
data['reports'] = reverse('api:analytics_reports_list')
|
||||
data['report_options'] = reverse('api:analytics_report_options_list')
|
||||
data['adoption_rate'] = reverse('api:analytics_adoption_rate')
|
||||
data['adoption_rate_options'] = reverse('api:analytics_adoption_rate_options')
|
||||
data['event_explorer'] = reverse('api:analytics_event_explorer')
|
||||
data['event_explorer_options'] = reverse('api:analytics_event_explorer_options')
|
||||
data['host_explorer'] = reverse('api:analytics_host_explorer')
|
||||
data['host_explorer_options'] = reverse('api:analytics_host_explorer_options')
|
||||
data['job_explorer'] = reverse('api:analytics_job_explorer')
|
||||
data['job_explorer_options'] = reverse('api:analytics_job_explorer_options')
|
||||
data['probe_templates'] = reverse('api:analytics_probe_templates_explorer')
|
||||
data['probe_templates_options'] = reverse('api:analytics_probe_templates_options')
|
||||
data['probe_template_for_hosts'] = reverse('api:analytics_probe_template_for_hosts_explorer')
|
||||
data['probe_template_for_hosts_options'] = reverse('api:analytics_probe_template_for_hosts_options')
|
||||
data['roi_templates'] = reverse('api:analytics_roi_templates_explorer')
|
||||
data['roi_templates_options'] = reverse('api:analytics_roi_templates_options')
|
||||
return Response(data)
|
||||
|
||||
|
||||
class AnalyticsGenericView(APIView):
|
||||
"""
|
||||
Example:
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
params = {
|
||||
'limit': '20',
|
||||
'offset': '0',
|
||||
'sort_by': 'name:asc',
|
||||
}
|
||||
|
||||
json_data = {
|
||||
'limit': '20',
|
||||
'offset': '0',
|
||||
'sort_options': 'name',
|
||||
'sort_order': 'asc',
|
||||
'tags': [],
|
||||
'slug': [],
|
||||
'name': [],
|
||||
'description': '',
|
||||
}
|
||||
|
||||
response = requests.post(f'{AUTOMATION_ANALYTICS_API_URL}/reports/', params=params,
|
||||
headers=headers, json=json_data)
|
||||
|
||||
return Response(response.json(), status=response.status_code)
|
||||
"""
|
||||
|
||||
permission_classes = (AnalyticsPermission,)
|
||||
|
||||
@staticmethod
|
||||
def _request_headers(request):
|
||||
headers = {}
|
||||
for header in ['Content-Type', 'Content-Length', 'Accept-Encoding', 'User-Agent', 'Accept']:
|
||||
if request.headers.get(header, None):
|
||||
headers[header] = request.headers.get(header)
|
||||
headers['X-Rh-Analytics-Source'] = 'controller'
|
||||
headers['X-Rh-Analytics-Source-Version'] = get_awx_version()
|
||||
headers['Accept-Language'] = translation.get_language()
|
||||
|
||||
return headers
|
||||
|
||||
@staticmethod
|
||||
def _get_analytics_path(request_path):
|
||||
parts = request_path.split(f'{AWX_ANALYTICS_API_PREFIX}/')
|
||||
path_specific = parts[-1]
|
||||
return f"{AUTOMATION_ANALYTICS_API_URL_PATH}/{path_specific}"
|
||||
|
||||
def _get_analytics_url(self, request_path):
|
||||
analytics_path = self._get_analytics_path(request_path)
|
||||
url = getattr(settings, 'AUTOMATION_ANALYTICS_URL', None)
|
||||
if not url:
|
||||
raise MissingSettings(ERROR_MISSING_URL)
|
||||
url_parts = urlparse.urlsplit(url)
|
||||
analytics_url = urlparse.urlunsplit([url_parts.scheme, url_parts.netloc, analytics_path, url_parts.query, url_parts.fragment])
|
||||
return analytics_url
|
||||
|
||||
@staticmethod
|
||||
def _get_setting(setting_name, default, error_message):
|
||||
setting = getattr(settings, setting_name, default)
|
||||
if not setting:
|
||||
raise MissingSettings(error_message)
|
||||
return setting
|
||||
|
||||
@staticmethod
|
||||
def _error_response(keyword, message=None, remote=True, remote_status_code=None, status_code=status.HTTP_403_FORBIDDEN):
|
||||
text = {"error": {"remote": remote, "remote_status": remote_status_code, "keyword": keyword}}
|
||||
if message:
|
||||
text["error"]["message"] = message
|
||||
return Response(text, status=status_code)
|
||||
|
||||
def _error_response_404(self, response):
|
||||
try:
|
||||
json_response = response.json()
|
||||
# Subscription/entitlement problem or missing tenant data in AA db => HTTP 403
|
||||
message = json_response.get('error', None)
|
||||
if message:
|
||||
return self._error_response(ERROR_NO_DATA_OR_ENTITLEMENT, message, remote=True, remote_status_code=response.status_code)
|
||||
|
||||
# Standard 404 problem => HTTP 404
|
||||
message = json_response.get('detail', None) or response.text
|
||||
except requests.exceptions.JSONDecodeError:
|
||||
# Unexpected text => still HTTP 404
|
||||
message = response.text
|
||||
|
||||
return self._error_response(ERROR_NOT_FOUND, message, remote=True, remote_status_code=status.HTTP_404_NOT_FOUND, status_code=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
@staticmethod
|
||||
def _update_response_links(json_response):
|
||||
if not json_response.get('links', None):
|
||||
return
|
||||
|
||||
for key, value in json_response['links'].items():
|
||||
if value:
|
||||
json_response['links'][key] = value.replace(AUTOMATION_ANALYTICS_API_URL_PATH, f"/api/v2/{AWX_ANALYTICS_API_PREFIX}")
|
||||
|
||||
def _forward_response(self, response):
|
||||
try:
|
||||
content_type = response.headers.get('content-type', '')
|
||||
if content_type.find('application/json') != -1:
|
||||
json_response = response.json()
|
||||
self._update_response_links(json_response)
|
||||
|
||||
return Response(json_response, status=response.status_code)
|
||||
except Exception as e:
|
||||
logger.error(f"Analytics API: Response error: {e}")
|
||||
|
||||
return Response(response.content, status=response.status_code)
|
||||
|
||||
def _send_to_analytics(self, request, method):
|
||||
try:
|
||||
headers = self._request_headers(request)
|
||||
|
||||
self._get_setting('INSIGHTS_TRACKING_STATE', False, ERROR_UPLOAD_NOT_ENABLED)
|
||||
url = self._get_analytics_url(request.path)
|
||||
rh_user = self._get_setting('REDHAT_USERNAME', None, ERROR_MISSING_USER)
|
||||
rh_password = self._get_setting('REDHAT_PASSWORD', None, ERROR_MISSING_PASSWORD)
|
||||
|
||||
if method not in ["GET", "POST", "OPTIONS"]:
|
||||
return self._error_response(ERROR_UNSUPPORTED_METHOD, method, remote=False, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
else:
|
||||
response = requests.request(
|
||||
method,
|
||||
url,
|
||||
auth=(rh_user, rh_password),
|
||||
verify=settings.INSIGHTS_CERT_PATH,
|
||||
params=request.query_params,
|
||||
headers=headers,
|
||||
json=request.data,
|
||||
timeout=(31, 31),
|
||||
)
|
||||
#
|
||||
# Missing or wrong user/pass
|
||||
#
|
||||
if response.status_code == status.HTTP_401_UNAUTHORIZED:
|
||||
text = (response.text or '').rstrip("\n")
|
||||
return self._error_response(ERROR_UNAUTHORIZED, text, remote=True, remote_status_code=response.status_code)
|
||||
#
|
||||
# Not found, No entitlement or No data in Analytics
|
||||
#
|
||||
elif response.status_code == status.HTTP_404_NOT_FOUND:
|
||||
return self._error_response_404(response)
|
||||
#
|
||||
# Success or not a 401/404 errors are just forwarded
|
||||
#
|
||||
else:
|
||||
return self._forward_response(response)
|
||||
|
||||
except MissingSettings as e:
|
||||
logger.warning(f"Analytics API: Setting missing: {e.args[0]}")
|
||||
return self._error_response(e.args[0], remote=False)
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Analytics API: Request error: {e}")
|
||||
return self._error_response(ERROR_UNKNOWN, str(e), remote=False, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
except Exception as e:
|
||||
logger.error(f"Analytics API: Error: {e}")
|
||||
return self._error_response(ERROR_UNKNOWN, str(e), remote=False, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
|
||||
class AnalyticsGenericListView(AnalyticsGenericView):
|
||||
def get(self, request, format=None):
|
||||
return self._send_to_analytics(request, method="GET")
|
||||
|
||||
def post(self, request, format=None):
|
||||
return self._send_to_analytics(request, method="POST")
|
||||
|
||||
def options(self, request, format=None):
|
||||
return self._send_to_analytics(request, method="OPTIONS")
|
||||
|
||||
|
||||
class AnalyticsGenericDetailView(AnalyticsGenericView):
|
||||
def get(self, request, slug, format=None):
|
||||
return self._send_to_analytics(request, method="GET")
|
||||
|
||||
def post(self, request, slug, format=None):
|
||||
return self._send_to_analytics(request, method="POST")
|
||||
|
||||
def options(self, request, slug, format=None):
|
||||
return self._send_to_analytics(request, method="OPTIONS")
|
||||
|
||||
|
||||
class AnalyticsAuthorizedView(AnalyticsGenericListView):
|
||||
name = _("Authorized")
|
||||
|
||||
|
||||
class AnalyticsReportsList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("Reports")
|
||||
swagger_topic = "Automation Analytics"
|
||||
|
||||
|
||||
class AnalyticsReportDetail(AnalyticsGenericDetailView):
|
||||
name = _("Report")
|
||||
|
||||
|
||||
class AnalyticsReportOptionsList(AnalyticsGenericListView):
|
||||
name = _("Report Options")
|
||||
|
||||
|
||||
class AnalyticsAdoptionRateList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("Adoption Rate")
|
||||
|
||||
|
||||
class AnalyticsEventExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("Event Explorer")
|
||||
|
||||
|
||||
class AnalyticsHostExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("Host Explorer")
|
||||
|
||||
|
||||
class AnalyticsJobExplorerList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("Job Explorer")
|
||||
|
||||
|
||||
class AnalyticsProbeTemplatesList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("Probe Templates")
|
||||
|
||||
|
||||
class AnalyticsProbeTemplateForHostsList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("Probe Template For Hosts")
|
||||
|
||||
|
||||
class AnalyticsRoiTemplatesList(GetNotAllowedMixin, AnalyticsGenericListView):
|
||||
name = _("ROI Templates")
|
||||
@@ -61,12 +61,6 @@ class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
qs = Organization.accessible_objects(self.request.user, 'read_role')
|
||||
qs = qs.select_related('admin_role', 'auditor_role', 'member_role', 'read_role')
|
||||
qs = qs.prefetch_related('created_by', 'modified_by')
|
||||
return qs
|
||||
|
||||
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = Organization
|
||||
@@ -207,6 +201,7 @@ class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
serializer_class = InstanceGroupSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'instance_groups'
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
||||
@@ -214,6 +209,7 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
||||
serializer_class = CredentialSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'galaxy_credentials'
|
||||
filter_read_permission = False
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
if sub.kind != 'galaxy_api_token':
|
||||
|
||||
@@ -126,6 +126,7 @@ class ApiVersionRootView(APIView):
|
||||
data['workflow_job_nodes'] = reverse('api:workflow_job_node_list', request=request)
|
||||
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
|
||||
data['bulk'] = reverse('api:bulk', request=request)
|
||||
data['analytics'] = reverse('api:analytics_root_view', request=request)
|
||||
return Response(data)
|
||||
|
||||
|
||||
|
||||
@@ -2952,3 +2952,19 @@ class WorkflowApprovalTemplateAccess(BaseAccess):
|
||||
for cls in BaseAccess.__subclasses__():
|
||||
access_registry[cls.model] = cls
|
||||
access_registry[UnpartitionedJobEvent] = UnpartitionedJobEventAccess
|
||||
|
||||
|
||||
def optimize_queryset(queryset):
|
||||
"""
|
||||
A utility method in case you already have a queryset and just want to
|
||||
apply the standard optimizations for that model.
|
||||
In other words, use if you do not want to start from filtered_queryset for some reason.
|
||||
"""
|
||||
if not queryset.model or queryset.model not in access_registry:
|
||||
return queryset
|
||||
access_class = access_registry[queryset.model]
|
||||
if access_class.select_related:
|
||||
queryset = queryset.select_related(*access_class.select_related)
|
||||
if access_class.prefetch_related:
|
||||
queryset = queryset.prefetch_related(*access_class.prefetch_related)
|
||||
return queryset
|
||||
|
||||
@@ -6,7 +6,7 @@ import platform
|
||||
import distro
|
||||
|
||||
from django.db import connection
|
||||
from django.db.models import Count
|
||||
from django.db.models import Count, Min
|
||||
from django.conf import settings
|
||||
from django.contrib.sessions.models import Session
|
||||
from django.utils.timezone import now, timedelta
|
||||
@@ -35,7 +35,7 @@ data _since_ the last report date - i.e., new data in the last 24 hours)
|
||||
"""
|
||||
|
||||
|
||||
def trivial_slicing(key, since, until, last_gather):
|
||||
def trivial_slicing(key, since, until, last_gather, **kwargs):
|
||||
if since is not None:
|
||||
return [(since, until)]
|
||||
|
||||
@@ -48,7 +48,7 @@ def trivial_slicing(key, since, until, last_gather):
|
||||
return [(last_entry, until)]
|
||||
|
||||
|
||||
def four_hour_slicing(key, since, until, last_gather):
|
||||
def four_hour_slicing(key, since, until, last_gather, **kwargs):
|
||||
if since is not None:
|
||||
last_entry = since
|
||||
else:
|
||||
@@ -69,6 +69,54 @@ def four_hour_slicing(key, since, until, last_gather):
|
||||
start = end
|
||||
|
||||
|
||||
def host_metric_slicing(key, since, until, last_gather, **kwargs):
|
||||
"""
|
||||
Slicing doesn't start 4 weeks ago, but sends whole table monthly or first time
|
||||
"""
|
||||
from awx.main.models.inventory import HostMetric
|
||||
|
||||
if since is not None:
|
||||
return [(since, until)]
|
||||
|
||||
from awx.conf.models import Setting
|
||||
|
||||
# Check if full sync should be done
|
||||
full_sync_enabled = kwargs.get('full_sync_enabled', False)
|
||||
last_entry = None
|
||||
if not full_sync_enabled:
|
||||
#
|
||||
# If not, try incremental sync first
|
||||
#
|
||||
last_entries = Setting.objects.filter(key='AUTOMATION_ANALYTICS_LAST_ENTRIES').first()
|
||||
last_entries = json.loads((last_entries.value if last_entries is not None else '') or '{}', object_hook=datetime_hook)
|
||||
last_entry = last_entries.get(key)
|
||||
if not last_entry:
|
||||
#
|
||||
# If not done before, switch to full sync
|
||||
#
|
||||
full_sync_enabled = True
|
||||
|
||||
if full_sync_enabled:
|
||||
#
|
||||
# Find the lowest date for full sync
|
||||
#
|
||||
min_dates = HostMetric.objects.aggregate(min_last_automation=Min('last_automation'), min_last_deleted=Min('last_deleted'))
|
||||
if min_dates['min_last_automation'] and min_dates['min_last_deleted']:
|
||||
last_entry = min(min_dates['min_last_automation'], min_dates['min_last_deleted'])
|
||||
elif min_dates['min_last_automation'] or min_dates['min_last_deleted']:
|
||||
last_entry = min_dates['min_last_automation'] or min_dates['min_last_deleted']
|
||||
|
||||
if not last_entry:
|
||||
# empty table
|
||||
return []
|
||||
|
||||
start, end = last_entry, None
|
||||
while start < until:
|
||||
end = min(start + timedelta(days=30), until)
|
||||
yield (start, end)
|
||||
start = end
|
||||
|
||||
|
||||
def _identify_lower(key, since, until, last_gather):
|
||||
from awx.conf.models import Setting
|
||||
|
||||
@@ -83,7 +131,7 @@ def _identify_lower(key, since, until, last_gather):
|
||||
return lower, last_entries
|
||||
|
||||
|
||||
@register('config', '1.5', description=_('General platform configuration.'))
|
||||
@register('config', '1.6', description=_('General platform configuration.'))
|
||||
def config(since, **kwargs):
|
||||
license_info = get_license()
|
||||
install_type = 'traditional'
|
||||
@@ -107,10 +155,13 @@ def config(since, **kwargs):
|
||||
'subscription_name': license_info.get('subscription_name'),
|
||||
'sku': license_info.get('sku'),
|
||||
'support_level': license_info.get('support_level'),
|
||||
'usage': license_info.get('usage'),
|
||||
'product_name': license_info.get('product_name'),
|
||||
'valid_key': license_info.get('valid_key'),
|
||||
'satellite': license_info.get('satellite'),
|
||||
'pool_id': license_info.get('pool_id'),
|
||||
'subscription_id': license_info.get('subscription_id'),
|
||||
'account_number': license_info.get('account_number'),
|
||||
'current_instances': license_info.get('current_instances'),
|
||||
'automated_instances': license_info.get('automated_instances'),
|
||||
'automated_since': license_info.get('automated_since'),
|
||||
@@ -537,3 +588,25 @@ def workflow_job_template_node_table(since, full_path, **kwargs):
|
||||
) always_nodes ON main_workflowjobtemplatenode.id = always_nodes.from_workflowjobtemplatenode_id
|
||||
ORDER BY main_workflowjobtemplatenode.id ASC) TO STDOUT WITH CSV HEADER'''
|
||||
return _copy_table(table='workflow_job_template_node', query=workflow_job_template_node_query, path=full_path)
|
||||
|
||||
|
||||
@register(
|
||||
'host_metric_table', '1.0', format='csv', description=_('Host Metric data, incremental/full sync'), expensive=host_metric_slicing, full_sync_interval=30
|
||||
)
|
||||
def host_metric_table(since, full_path, until, **kwargs):
|
||||
host_metric_query = '''COPY (SELECT main_hostmetric.id,
|
||||
main_hostmetric.hostname,
|
||||
main_hostmetric.first_automation,
|
||||
main_hostmetric.last_automation,
|
||||
main_hostmetric.last_deleted,
|
||||
main_hostmetric.deleted,
|
||||
main_hostmetric.automated_counter,
|
||||
main_hostmetric.deleted_counter,
|
||||
main_hostmetric.used_in_inventories
|
||||
FROM main_hostmetric
|
||||
WHERE (main_hostmetric.last_automation > '{}' AND main_hostmetric.last_automation <= '{}') OR
|
||||
(main_hostmetric.last_deleted > '{}' AND main_hostmetric.last_deleted <= '{}')
|
||||
ORDER BY main_hostmetric.id ASC) TO STDOUT WITH CSV HEADER'''.format(
|
||||
since.isoformat(), until.isoformat(), since.isoformat(), until.isoformat()
|
||||
)
|
||||
return _copy_table(table='host_metric', query=host_metric_query, path=full_path)
|
||||
|
||||
@@ -52,7 +52,7 @@ def all_collectors():
|
||||
}
|
||||
|
||||
|
||||
def register(key, version, description=None, format='json', expensive=None):
|
||||
def register(key, version, description=None, format='json', expensive=None, full_sync_interval=None):
|
||||
"""
|
||||
A decorator used to register a function as a metric collector.
|
||||
|
||||
@@ -71,6 +71,7 @@ def register(key, version, description=None, format='json', expensive=None):
|
||||
f.__awx_analytics_description__ = description
|
||||
f.__awx_analytics_type__ = format
|
||||
f.__awx_expensive__ = expensive
|
||||
f.__awx_full_sync_interval__ = full_sync_interval
|
||||
return f
|
||||
|
||||
return decorate
|
||||
@@ -259,10 +260,19 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti
|
||||
# These slicer functions may return a generator. The `since` parameter is
|
||||
# allowed to be None, and will fall back to LAST_ENTRIES[key] or to
|
||||
# LAST_GATHER (truncated appropriately to match the 4-week limit).
|
||||
#
|
||||
# Or it can force full table sync if interval is given
|
||||
kwargs = dict()
|
||||
full_sync_enabled = False
|
||||
if func.__awx_full_sync_interval__:
|
||||
last_full_sync = last_entries.get(f"{key}_full")
|
||||
full_sync_enabled = not last_full_sync or last_full_sync < now() - timedelta(days=func.__awx_full_sync_interval__)
|
||||
|
||||
kwargs['full_sync_enabled'] = full_sync_enabled
|
||||
if func.__awx_expensive__:
|
||||
slices = func.__awx_expensive__(key, since, until, last_gather)
|
||||
slices = func.__awx_expensive__(key, since, until, last_gather, **kwargs)
|
||||
else:
|
||||
slices = collectors.trivial_slicing(key, since, until, last_gather)
|
||||
slices = collectors.trivial_slicing(key, since, until, last_gather, **kwargs)
|
||||
|
||||
for start, end in slices:
|
||||
files = func(start, full_path=gather_dir, until=end)
|
||||
@@ -301,6 +311,12 @@ def gather(dest=None, module=None, subset=None, since=None, until=None, collecti
|
||||
succeeded = False
|
||||
logger.exception("Could not generate metric {}".format(filename))
|
||||
|
||||
# update full sync timestamp if successfully shipped
|
||||
if full_sync_enabled and collection_type != 'dry-run' and succeeded:
|
||||
with disable_activity_stream():
|
||||
last_entries[f"{key}_full"] = now()
|
||||
settings.AUTOMATION_ANALYTICS_LAST_ENTRIES = json.dumps(last_entries, cls=DjangoJSONEncoder)
|
||||
|
||||
if collection_type != 'dry-run':
|
||||
if succeeded:
|
||||
for fpath in tarfiles:
|
||||
@@ -359,9 +375,7 @@ def ship(path):
|
||||
s.headers = get_awx_http_client_headers()
|
||||
s.headers.pop('Content-Type')
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
response = s.post(
|
||||
url, files=files, verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", auth=(rh_user, rh_password), headers=s.headers, timeout=(31, 31)
|
||||
)
|
||||
response = s.post(url, files=files, verify=settings.INSIGHTS_CERT_PATH, auth=(rh_user, rh_password), headers=s.headers, timeout=(31, 31))
|
||||
# Accept 2XX status_codes
|
||||
if response.status_code >= 300:
|
||||
logger.error('Upload failed with status {}, {}'.format(response.status_code, response.text))
|
||||
|
||||
@@ -298,11 +298,13 @@ class Metrics:
|
||||
try:
|
||||
current_time = time.time()
|
||||
if current_time - self.previous_send_metrics.decode(self.conn) > self.send_metrics_interval:
|
||||
serialized_metrics = self.serialize_local_metrics()
|
||||
payload = {
|
||||
'instance': self.instance_name,
|
||||
'metrics': self.serialize_local_metrics(),
|
||||
'metrics': serialized_metrics,
|
||||
}
|
||||
|
||||
# store the serialized data locally as well, so that load_other_metrics will read it
|
||||
self.conn.set(root_key + '_instance_' + self.instance_name, serialized_metrics)
|
||||
emit_channel_notification("metrics", payload)
|
||||
|
||||
self.previous_send_metrics.set(current_time)
|
||||
|
||||
@@ -822,6 +822,15 @@ register(
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'CLEANUP_HOST_METRICS_LAST_TS',
|
||||
field_class=fields.DateTimeField,
|
||||
label=_('Last cleanup date for HostMetrics'),
|
||||
allow_null=True,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
|
||||
def logging_validate(serializer, attrs):
|
||||
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
||||
|
||||
@@ -54,6 +54,12 @@ aim_inputs = {
|
||||
'help_text': _('Lookup query for the object. Ex: Safe=TestSafe;Object=testAccountName123'),
|
||||
},
|
||||
{'id': 'object_query_format', 'label': _('Object Query Format'), 'type': 'string', 'default': 'Exact', 'choices': ['Exact', 'Regexp']},
|
||||
{
|
||||
'id': 'object_property',
|
||||
'label': _('Object Property'),
|
||||
'type': 'string',
|
||||
'help_text': _('The property of the object to return. Default: Content Ex: Username, Address, etc.'),
|
||||
},
|
||||
{
|
||||
'id': 'reason',
|
||||
'label': _('Reason'),
|
||||
@@ -74,6 +80,7 @@ def aim_backend(**kwargs):
|
||||
app_id = kwargs['app_id']
|
||||
object_query = kwargs['object_query']
|
||||
object_query_format = kwargs['object_query_format']
|
||||
object_property = kwargs.get('object_property', '')
|
||||
reason = kwargs.get('reason', None)
|
||||
if webservice_id == '':
|
||||
webservice_id = 'AIMWebService'
|
||||
@@ -98,7 +105,18 @@ def aim_backend(**kwargs):
|
||||
allow_redirects=False,
|
||||
)
|
||||
raise_for_status(res)
|
||||
return res.json()['Content']
|
||||
# CCP returns the property name capitalized, username is camel case
|
||||
# so we need to handle that case
|
||||
if object_property == '':
|
||||
object_property = 'Content'
|
||||
elif object_property.lower() == 'username':
|
||||
object_property = 'UserName'
|
||||
elif object_property not in res:
|
||||
raise KeyError('Property {} not found in object'.format(object_property))
|
||||
else:
|
||||
object_property = object_property.capitalize()
|
||||
|
||||
return res.json()[object_property]
|
||||
|
||||
|
||||
aim_plugin = CredentialPlugin('CyberArk Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
|
||||
|
||||
@@ -35,8 +35,14 @@ dsv_inputs = {
|
||||
'type': 'string',
|
||||
'help_text': _('The secret path e.g. /test/secret1'),
|
||||
},
|
||||
{
|
||||
'id': 'secret_field',
|
||||
'label': _('Secret Field'),
|
||||
'help_text': _('The field to extract from the secret'),
|
||||
'type': 'string',
|
||||
},
|
||||
],
|
||||
'required': ['tenant', 'client_id', 'client_secret', 'path'],
|
||||
'required': ['tenant', 'client_id', 'client_secret', 'path', 'secret_field'],
|
||||
}
|
||||
|
||||
if settings.DEBUG:
|
||||
@@ -52,5 +58,5 @@ if settings.DEBUG:
|
||||
dsv_plugin = CredentialPlugin(
|
||||
'Thycotic DevOps Secrets Vault',
|
||||
dsv_inputs,
|
||||
lambda **kwargs: SecretsVault(**{k: v for (k, v) in kwargs.items() if k in [field['id'] for field in dsv_inputs['fields']]}).get_secret(kwargs['path']),
|
||||
lambda **kwargs: SecretsVault(**{k: v for (k, v) in kwargs.items() if k in [field['id'] for field in dsv_inputs['fields']]}).get_secret(kwargs['path'])['data'][kwargs['secret_field']], # fmt: skip
|
||||
)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from .plugin import CredentialPlugin
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from thycotic.secrets.server import PasswordGrantAuthorizer, SecretServer, ServerSecret
|
||||
from thycotic.secrets.server import DomainPasswordGrantAuthorizer, PasswordGrantAuthorizer, SecretServer, ServerSecret
|
||||
|
||||
tss_inputs = {
|
||||
'fields': [
|
||||
@@ -17,6 +17,12 @@ tss_inputs = {
|
||||
'help_text': _('The (Application) user username'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'domain',
|
||||
'label': _('Domain'),
|
||||
'help_text': _('The (Application) user domain'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'password',
|
||||
'label': _('Password'),
|
||||
@@ -44,7 +50,10 @@ tss_inputs = {
|
||||
|
||||
|
||||
def tss_backend(**kwargs):
|
||||
authorizer = PasswordGrantAuthorizer(kwargs['server_url'], kwargs['username'], kwargs['password'])
|
||||
if 'domain' in kwargs:
|
||||
authorizer = DomainPasswordGrantAuthorizer(kwargs['server_url'], kwargs['username'], kwargs['password'], kwargs['domain'])
|
||||
else:
|
||||
authorizer = PasswordGrantAuthorizer(kwargs['server_url'], kwargs['username'], kwargs['password'])
|
||||
secret_server = SecretServer(kwargs['server_url'], authorizer)
|
||||
secret_dict = secret_server.get_secret(kwargs['secret_id'])
|
||||
secret = ServerSecret(**secret_dict)
|
||||
|
||||
@@ -4,6 +4,8 @@ import select
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
from awx.settings.application_name import get_application_name
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection as pg_connection
|
||||
|
||||
@@ -83,10 +85,11 @@ def pg_bus_conn(new_connection=False):
|
||||
'''
|
||||
|
||||
if new_connection:
|
||||
conf = settings.DATABASES['default']
|
||||
conn = psycopg2.connect(
|
||||
dbname=conf['NAME'], host=conf['HOST'], user=conf['USER'], password=conf['PASSWORD'], port=conf['PORT'], **conf.get("OPTIONS", {})
|
||||
)
|
||||
conf = settings.DATABASES['default'].copy()
|
||||
conf['OPTIONS'] = conf.get('OPTIONS', {}).copy()
|
||||
# Modify the application name to distinguish from other connections the process might use
|
||||
conf['OPTIONS']['application_name'] = get_application_name(settings.CLUSTER_HOST_ID, function='listener')
|
||||
conn = psycopg2.connect(dbname=conf['NAME'], host=conf['HOST'], user=conf['USER'], password=conf['PASSWORD'], port=conf['PORT'], **conf['OPTIONS'])
|
||||
# Django connection.cursor().connection doesn't have autocommit=True on by default
|
||||
conn.set_session(autocommit=True)
|
||||
else:
|
||||
|
||||
@@ -10,6 +10,7 @@ from django_guid import set_guid
|
||||
from django_guid.utils import generate_guid
|
||||
|
||||
from awx.main.dispatch.worker import TaskWorker
|
||||
from awx.main.utils.db import set_connection_name
|
||||
|
||||
logger = logging.getLogger('awx.main.dispatch.periodic')
|
||||
|
||||
@@ -21,6 +22,9 @@ class Scheduler(Scheduler):
|
||||
def run():
|
||||
ppid = os.getppid()
|
||||
logger.warning('periodic beat started')
|
||||
|
||||
set_connection_name('periodic') # set application_name to distinguish from other dispatcher processes
|
||||
|
||||
while True:
|
||||
if os.getppid() != ppid:
|
||||
# if the parent PID changes, this process has been orphaned
|
||||
|
||||
@@ -18,6 +18,7 @@ from django.conf import settings
|
||||
from awx.main.dispatch.pool import WorkerPool
|
||||
from awx.main.dispatch import pg_bus_conn
|
||||
from awx.main.utils.common import log_excess_runtime
|
||||
from awx.main.utils.db import set_connection_name
|
||||
|
||||
if 'run_callback_receiver' in sys.argv:
|
||||
logger = logging.getLogger('awx.main.commands.run_callback_receiver')
|
||||
@@ -219,6 +220,7 @@ class BaseWorker(object):
|
||||
def work_loop(self, queue, finished, idx, *args):
|
||||
ppid = os.getppid()
|
||||
signal_handler = WorkerSignalHandler()
|
||||
set_connection_name('worker') # set application_name to distinguish from other dispatcher processes
|
||||
while not signal_handler.kill_now:
|
||||
# if the parent PID changes, this process has been orphaned
|
||||
# via e.g., segfault or sigkill, we should exit too
|
||||
|
||||
22
awx/main/management/commands/cleanup_host_metrics.py
Normal file
22
awx/main/management/commands/cleanup_host_metrics.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from awx.main.models import HostMetric
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""
|
||||
Run soft-deleting of HostMetrics
|
||||
"""
|
||||
|
||||
help = 'Run soft-deleting of HostMetrics'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--months-ago', type=int, dest='months-ago', action='store', help='Threshold in months for soft-deleting')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
months_ago = options.get('months-ago') or None
|
||||
|
||||
if not months_ago:
|
||||
months_ago = getattr(settings, 'CLEANUP_HOST_METRICS_THRESHOLD', 12)
|
||||
|
||||
HostMetric.cleanup_task(months_ago)
|
||||
@@ -44,16 +44,18 @@ class Command(BaseCommand):
|
||||
|
||||
for x in ig.instances.all():
|
||||
color = '\033[92m'
|
||||
end_color = '\033[0m'
|
||||
if x.capacity == 0 and x.node_type != 'hop':
|
||||
color = '\033[91m'
|
||||
if not x.enabled:
|
||||
color = '\033[90m[DISABLED] '
|
||||
if no_color:
|
||||
color = ''
|
||||
end_color = ''
|
||||
|
||||
capacity = f' capacity={x.capacity}' if x.node_type != 'hop' else ''
|
||||
version = f" version={x.version or '?'}" if x.node_type != 'hop' else ''
|
||||
heartbeat = f' heartbeat="{x.last_seen:%Y-%m-%d %H:%M:%S}"' if x.capacity or x.node_type == 'hop' else ''
|
||||
print(f'\t{color}{x.hostname}{capacity} node_type={x.node_type}{version}{heartbeat}\033[0m')
|
||||
print(f'\t{color}{x.hostname}{capacity} node_type={x.node_type}{version}{heartbeat}{end_color}')
|
||||
|
||||
print()
|
||||
|
||||
@@ -2,6 +2,8 @@ import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import signal
|
||||
import sys
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
@@ -50,6 +52,11 @@ class Command(BaseCommand):
|
||||
}
|
||||
return json.dumps(payload)
|
||||
|
||||
def notify_listener_and_exit(self, *args):
|
||||
with pg_bus_conn(new_connection=False) as conn:
|
||||
conn.notify('web_heartbeet', self.construct_payload(action='offline'))
|
||||
sys.exit(0)
|
||||
|
||||
def do_hearbeat_loop(self):
|
||||
with pg_bus_conn(new_connection=True) as conn:
|
||||
while True:
|
||||
@@ -57,10 +64,10 @@ class Command(BaseCommand):
|
||||
conn.notify('web_heartbeet', self.construct_payload())
|
||||
time.sleep(settings.BROADCAST_WEBSOCKET_BEACON_FROM_WEB_RATE_SECONDS)
|
||||
|
||||
# TODO: Send a message with action=offline if we notice a SIGTERM or SIGINT
|
||||
# (wsrelay can use this to remove the node quicker)
|
||||
def handle(self, *arg, **options):
|
||||
self.print_banner()
|
||||
signal.signal(signal.SIGTERM, self.notify_listener_and_exit)
|
||||
signal.signal(signal.SIGINT, self.notify_listener_and_exit)
|
||||
|
||||
# Note: We don't really try any reconnect logic to pg_notify here,
|
||||
# just let supervisor restart if we fail.
|
||||
|
||||
@@ -98,6 +98,7 @@ class Command(BaseCommand):
|
||||
try:
|
||||
executor = MigrationExecutor(connection)
|
||||
migrating = bool(executor.migration_plan(executor.loader.graph.leaf_nodes()))
|
||||
connection.close() # Because of async nature, main loop will use new connection, so close this
|
||||
except Exception as exc:
|
||||
logger.warning(f'Error on startup of run_wsrelay (error: {exc}), retry in 10s...')
|
||||
time.sleep(10)
|
||||
|
||||
@@ -9,6 +9,8 @@ import re
|
||||
import copy
|
||||
import os.path
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import dateutil.relativedelta
|
||||
import yaml
|
||||
|
||||
# Django
|
||||
@@ -17,6 +19,7 @@ from django.db import models, connection
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.db import transaction
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.urls import resolve
|
||||
from django.utils.timezone import now
|
||||
from django.db.models import Q
|
||||
|
||||
@@ -206,8 +209,14 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
)
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
if self.kind == 'constructed':
|
||||
return reverse('api:constructed_inventory_detail', kwargs={'pk': self.pk}, request=request)
|
||||
if request is not None:
|
||||
# circular import
|
||||
from awx.api.urls.inventory import constructed_inventory_urls
|
||||
|
||||
route = resolve(request.path_info)
|
||||
if any(route.url_name == url.name for url in constructed_inventory_urls):
|
||||
return reverse('api:constructed_inventory_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
return reverse('api:inventory_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
variables_dict = VarsDictProperty('variables')
|
||||
@@ -881,6 +890,23 @@ class HostMetric(models.Model):
|
||||
self.deleted = False
|
||||
self.save(update_fields=['deleted'])
|
||||
|
||||
@classmethod
|
||||
def cleanup_task(cls, months_ago):
|
||||
try:
|
||||
months_ago = int(months_ago)
|
||||
if months_ago <= 0:
|
||||
raise ValueError()
|
||||
|
||||
last_automation_before = now() - dateutil.relativedelta.relativedelta(months=months_ago)
|
||||
|
||||
logger.info(f'Cleanup [HostMetric]: soft-deleting records last automated before {last_automation_before}')
|
||||
HostMetric.active_objects.filter(last_automation__lt=last_automation_before).update(
|
||||
deleted=True, deleted_counter=models.F('deleted_counter') + 1, last_deleted=now()
|
||||
)
|
||||
settings.CLEANUP_HOST_METRICS_LAST_TS = now()
|
||||
except (TypeError, ValueError):
|
||||
logger.error(f"Cleanup [HostMetric]: months_ago({months_ago}) has to be a positive integer value")
|
||||
|
||||
|
||||
class HostMetricSummaryMonthly(models.Model):
|
||||
"""
|
||||
@@ -1453,8 +1479,6 @@ class PluginFileInjector(object):
|
||||
def build_env(self, inventory_update, env, private_data_dir, private_data_files):
|
||||
injector_env = self.get_plugin_env(inventory_update, private_data_dir, private_data_files)
|
||||
env.update(injector_env)
|
||||
# Preserves current behavior for Ansible change in default planned for 2.10
|
||||
env['ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS'] = 'never'
|
||||
# All CLOUD_PROVIDERS sources implement as inventory plugin from collection
|
||||
env['ANSIBLE_INVENTORY_ENABLED'] = 'auto'
|
||||
return env
|
||||
|
||||
@@ -284,7 +284,7 @@ class JobNotificationMixin(object):
|
||||
'workflow_url',
|
||||
'scm_branch',
|
||||
'artifacts',
|
||||
{'host_status_counts': ['skipped', 'ok', 'changed', 'failed', 'failures', 'dark' 'processed', 'rescued', 'ignored']},
|
||||
{'host_status_counts': ['skipped', 'ok', 'changed', 'failed', 'failures', 'dark', 'processed', 'rescued', 'ignored']},
|
||||
{
|
||||
'summary_fields': [
|
||||
{
|
||||
|
||||
@@ -639,7 +639,7 @@ class AWXReceptorJob:
|
||||
#
|
||||
RECEPTOR_CONFIG_STARTER = (
|
||||
{'local-only': None},
|
||||
{'log-level': 'debug'},
|
||||
{'log-level': 'info'},
|
||||
{'node': {'firewallrules': [{'action': 'reject', 'tonode': settings.CLUSTER_HOST_ID, 'toservice': 'control'}]}},
|
||||
{'control-service': {'service': 'control', 'filename': '/var/run/receptor/receptor.sock', 'permissions': '0660'}},
|
||||
{'work-command': {'worktype': 'local', 'command': 'ansible-runner', 'params': 'worker', 'allowruntimeparams': True}},
|
||||
|
||||
@@ -47,6 +47,7 @@ from awx.main.models import (
|
||||
Inventory,
|
||||
SmartInventoryMembership,
|
||||
Job,
|
||||
HostMetric,
|
||||
)
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.dispatch.publish import task
|
||||
@@ -378,6 +379,20 @@ def cleanup_images_and_files():
|
||||
_cleanup_images_and_files()
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def cleanup_host_metrics():
|
||||
from awx.conf.models import Setting
|
||||
from rest_framework.fields import DateTimeField
|
||||
|
||||
last_cleanup = Setting.objects.filter(key='CLEANUP_HOST_METRICS_LAST_TS').first()
|
||||
last_time = DateTimeField().to_internal_value(last_cleanup.value) if last_cleanup and last_cleanup.value else None
|
||||
|
||||
cleanup_interval_secs = getattr(settings, 'CLEANUP_HOST_METRICS_INTERVAL', 30) * 86400
|
||||
if not last_time or ((now() - last_time).total_seconds() > cleanup_interval_secs):
|
||||
months_ago = getattr(settings, 'CLEANUP_HOST_METRICS_THRESHOLD', 12)
|
||||
HostMetric.cleanup_task(months_ago)
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def cluster_node_health_check(node):
|
||||
"""
|
||||
@@ -878,15 +893,8 @@ def _reconstruct_relationships(copy_mapping):
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, uuid, permission_check_func=None):
|
||||
sub_obj_list = cache.get(uuid)
|
||||
if sub_obj_list is None:
|
||||
logger.error('Deep copy {} from {} to {} failed unexpectedly.'.format(model_name, obj_pk, new_obj_pk))
|
||||
return
|
||||
|
||||
def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, permission_check_func=None):
|
||||
logger.debug('Deep copy {} from {} to {}.'.format(model_name, obj_pk, new_obj_pk))
|
||||
from awx.api.generics import CopyAPIView
|
||||
from awx.main.signals import disable_activity_stream
|
||||
|
||||
model = getattr(importlib.import_module(model_module), model_name, None)
|
||||
if model is None:
|
||||
@@ -898,6 +906,28 @@ def deep_copy_model_obj(model_module, model_name, obj_pk, new_obj_pk, user_pk, u
|
||||
except ObjectDoesNotExist:
|
||||
logger.warning("Object or user no longer exists.")
|
||||
return
|
||||
|
||||
o2m_to_preserve = {}
|
||||
fields_to_preserve = set(getattr(model, 'FIELDS_TO_PRESERVE_AT_COPY', []))
|
||||
|
||||
for field in model._meta.get_fields():
|
||||
if field.name in fields_to_preserve:
|
||||
if field.one_to_many:
|
||||
try:
|
||||
field_val = getattr(obj, field.name)
|
||||
except AttributeError:
|
||||
continue
|
||||
o2m_to_preserve[field.name] = field_val
|
||||
|
||||
sub_obj_list = []
|
||||
for o2m in o2m_to_preserve:
|
||||
for sub_obj in o2m_to_preserve[o2m].all():
|
||||
sub_model = type(sub_obj)
|
||||
sub_obj_list.append((sub_model.__module__, sub_model.__name__, sub_obj.pk))
|
||||
|
||||
from awx.api.generics import CopyAPIView
|
||||
from awx.main.signals import disable_activity_stream
|
||||
|
||||
with transaction.atomic(), ignore_inventory_computed_fields(), disable_activity_stream():
|
||||
copy_mapping = {}
|
||||
for sub_obj_setup in sub_obj_list:
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
{
|
||||
"ANSIBLE_JINJA2_NATIVE": "True",
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"AZURE_CLIENT_ID": "fooo",
|
||||
"AZURE_CLOUD_ENVIRONMENT": "fooo",
|
||||
"AZURE_SECRET": "fooo",
|
||||
"AZURE_SUBSCRIPTION_ID": "fooo",
|
||||
"AZURE_TENANT": "fooo"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"TOWER_HOST": "https://foo.invalid",
|
||||
"TOWER_PASSWORD": "fooo",
|
||||
"TOWER_USERNAME": "fooo",
|
||||
@@ -10,4 +9,4 @@
|
||||
"CONTROLLER_USERNAME": "fooo",
|
||||
"CONTROLLER_OAUTH_TOKEN": "",
|
||||
"CONTROLLER_VERIFY_SSL": "False"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
{
|
||||
"ANSIBLE_JINJA2_NATIVE": "True",
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"AWS_ACCESS_KEY_ID": "fooo",
|
||||
"AWS_SECRET_ACCESS_KEY": "fooo",
|
||||
"AWS_SECURITY_TOKEN": "fooo",
|
||||
"AWS_SESSION_TOKEN": "fooo"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
{
|
||||
"ANSIBLE_JINJA2_NATIVE": "True",
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"GCE_CREDENTIALS_FILE_PATH": "{{ file_reference }}",
|
||||
"GOOGLE_APPLICATION_CREDENTIALS": "{{ file_reference }}",
|
||||
"GCP_AUTH_KIND": "serviceaccount",
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"INSIGHTS_USER": "fooo",
|
||||
"INSIGHTS_PASSWORD": "fooo"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
{
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"OS_CLIENT_CONFIG_FILE": "{{ file_reference }}"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"OVIRT_INI_PATH": "{{ file_reference }}",
|
||||
"OVIRT_PASSWORD": "fooo",
|
||||
"OVIRT_URL": "https://foo.invalid",
|
||||
"OVIRT_USERNAME": "fooo"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
{
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"FOREMAN_PASSWORD": "fooo",
|
||||
"FOREMAN_SERVER": "https://foo.invalid",
|
||||
"FOREMAN_USER": "fooo"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"VMWARE_HOST": "https://foo.invalid",
|
||||
"VMWARE_PASSWORD": "fooo",
|
||||
"VMWARE_USER": "fooo",
|
||||
"VMWARE_VALIDATE_CERTS": "False"
|
||||
}
|
||||
}
|
||||
|
||||
86
awx/main/tests/functional/api/test_analytics.py
Normal file
86
awx/main/tests/functional/api/test_analytics.py
Normal file
@@ -0,0 +1,86 @@
|
||||
import pytest
|
||||
import requests
|
||||
from awx.api.views.analytics import AnalyticsGenericView, MissingSettings, AUTOMATION_ANALYTICS_API_URL_PATH
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from awx.main.utils import get_awx_version
|
||||
from django.utils import translation
|
||||
|
||||
|
||||
class TestAnalyticsGenericView:
|
||||
@pytest.mark.parametrize(
|
||||
"existing_headers,expected_headers",
|
||||
[
|
||||
({}, {}),
|
||||
({'Hey': 'There'}, {}), # We don't forward just any headers
|
||||
({'Content-Type': 'text/html', 'Content-Length': '12'}, {'Content-Type': 'text/html', 'Content-Length': '12'}),
|
||||
# Requests will auto-add the following headers (so we don't need to test them): 'Accept-Encoding', 'User-Agent', 'Accept'
|
||||
],
|
||||
)
|
||||
def test__request_headers(self, existing_headers, expected_headers):
|
||||
expected_headers['X-Rh-Analytics-Source'] = 'controller'
|
||||
expected_headers['X-Rh-Analytics-Source-Version'] = get_awx_version()
|
||||
expected_headers['Accept-Language'] = translation.get_language()
|
||||
|
||||
request = requests.session()
|
||||
request.headers.update(existing_headers)
|
||||
assert set(expected_headers.items()).issubset(set(AnalyticsGenericView._request_headers(request).items()))
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"path,expected_path",
|
||||
[
|
||||
('A/B', f'{AUTOMATION_ANALYTICS_API_URL_PATH}/A/B'),
|
||||
('B', f'{AUTOMATION_ANALYTICS_API_URL_PATH}/B'),
|
||||
('/a/b/c/analytics/reports/my_slug', f'{AUTOMATION_ANALYTICS_API_URL_PATH}/reports/my_slug'),
|
||||
('/a/b/c/analytics/', f'{AUTOMATION_ANALYTICS_API_URL_PATH}/'),
|
||||
('/a/b/c/analytics', f'{AUTOMATION_ANALYTICS_API_URL_PATH}//a/b/c/analytics'), # Because there is no ending / on analytics we get a weird condition
|
||||
('/a/b/c/analytics/', f'{AUTOMATION_ANALYTICS_API_URL_PATH}/'),
|
||||
],
|
||||
)
|
||||
@pytest.mark.django_db
|
||||
def test__get_analytics_path(self, path, expected_path):
|
||||
assert AnalyticsGenericView._get_analytics_path(path) == expected_path
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test__get_analytics_url_no_url(self):
|
||||
with override_settings(AUTOMATION_ANALYTICS_URL=None):
|
||||
with pytest.raises(MissingSettings):
|
||||
agw = AnalyticsGenericView()
|
||||
agw._get_analytics_url('A')
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"request_path,ending_url",
|
||||
[
|
||||
('A', 'A'),
|
||||
('A/B', 'A/B'),
|
||||
('A/B/analytics/', ''), # we split on analytics but because there is nothing after
|
||||
('A/B/analytics/report', 'report'),
|
||||
('A/B/analytics/report/slug', 'report/slug'),
|
||||
],
|
||||
)
|
||||
@pytest.mark.django_db
|
||||
def test__get_analytics_url(self, request_path, ending_url):
|
||||
base_url = 'http://testing'
|
||||
with override_settings(AUTOMATION_ANALYTICS_URL=base_url):
|
||||
agw = AnalyticsGenericView()
|
||||
assert agw._get_analytics_url(request_path) == f'{base_url}{AUTOMATION_ANALYTICS_API_URL_PATH}/{ending_url}'
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"setting_name,setting_value,raises",
|
||||
[
|
||||
('INSIGHTS_TRACKING_STATE', None, True),
|
||||
('INSIGHTS_TRACKING_STATE', False, True),
|
||||
('INSIGHTS_TRACKING_STATE', True, False),
|
||||
('INSIGHTS_TRACKING_STATE', 'Steve', False),
|
||||
('INSIGHTS_TRACKING_STATE', 1, False),
|
||||
('INSIGHTS_TRACKING_STATE', '', True),
|
||||
],
|
||||
)
|
||||
@pytest.mark.django_db
|
||||
def test__get_setting(self, setting_name, setting_value, raises):
|
||||
with override_settings(**{setting_name: setting_value}):
|
||||
if raises:
|
||||
with pytest.raises(MissingSettings):
|
||||
AnalyticsGenericView._get_setting(setting_name, False, None)
|
||||
else:
|
||||
assert AnalyticsGenericView._get_setting(setting_name, False, None) == setting_value
|
||||
23
awx/main/tests/functional/api/test_application_name.py
Normal file
23
awx/main/tests/functional/api/test_application_name.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import pytest
|
||||
from awx.settings.application_name import get_service_name, set_application_name
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'argv,result',
|
||||
(
|
||||
([], None),
|
||||
(['-m'], None),
|
||||
(['-m', 'python'], None),
|
||||
(['-m', 'python', 'manage'], None),
|
||||
(['-m', 'python', 'manage', 'a'], 'a'),
|
||||
(['-m', 'python', 'manage', 'b', 'a'], 'b'),
|
||||
(['-m', 'python', 'manage', 'run_something', 'b', 'a'], 'something'),
|
||||
),
|
||||
)
|
||||
def test_get_service_name(argv, result):
|
||||
assert get_service_name(argv) == result
|
||||
|
||||
|
||||
@pytest.mark.parametrize('DATABASES,CLUSTER_ID,function', (({}, 12, ''), ({'default': {'ENGINE': 'sqllite3'}}, 12, '')))
|
||||
def test_set_application_name(DATABASES, CLUSTER_ID, function):
|
||||
set_application_name(DATABASES, CLUSTER_ID, function)
|
||||
@@ -680,3 +680,22 @@ class TestConstructedInventory:
|
||||
inv_src = constructed_inventory.inventory_sources.first()
|
||||
assert inv_src.update_cache_timeout == 55
|
||||
assert inv_src.limit == 'foobar'
|
||||
|
||||
def test_get_absolute_url_for_constructed_inventory(self, constructed_inventory, admin_user, get):
|
||||
"""
|
||||
If we are using the normal inventory API endpoint to look at a
|
||||
constructed inventory, then we should get a normal inventory API route
|
||||
back. If we are accessing it via the special constructed inventory
|
||||
endpoint, then we should get that back.
|
||||
"""
|
||||
|
||||
url_const = reverse('api:constructed_inventory_detail', kwargs={'pk': constructed_inventory.pk})
|
||||
url_inv = reverse('api:inventory_detail', kwargs={'pk': constructed_inventory.pk})
|
||||
|
||||
const_r = get(url=url_const, user=admin_user, expect=200)
|
||||
inv_r = get(url=url_inv, user=admin_user, expect=200)
|
||||
assert const_r.data['url'] == url_const
|
||||
assert inv_r.data['url'] == url_inv
|
||||
assert inv_r.data['url'] != const_r.data['url']
|
||||
assert inv_r.data['related']['constructed_url'] == url_const
|
||||
assert const_r.data['related']['constructed_url'] == url_const
|
||||
|
||||
@@ -3,7 +3,7 @@ import pytest
|
||||
# AWX
|
||||
from awx.api.serializers import JobTemplateSerializer
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import Job, JobTemplate, CredentialType, WorkflowJobTemplate, Organization, Project
|
||||
from awx.main.models import Job, JobTemplate, CredentialType, WorkflowJobTemplate, Organization, Project, Inventory
|
||||
from awx.main.migrations import _save_password_keys as save_password_keys
|
||||
|
||||
# Django
|
||||
@@ -353,3 +353,19 @@ def test_job_template_branch_prompt_error(project, inventory, post, admin_user):
|
||||
expect=400,
|
||||
)
|
||||
assert 'Project does not allow overriding branch' in str(r.data['ask_scm_branch_on_launch'])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_template_missing_inventory(project, inventory, admin_user, post):
|
||||
jt = JobTemplate.objects.create(
|
||||
name='test-jt', inventory=inventory, ask_inventory_on_launch=True, project=project, playbook='helloworld.yml', host_config_key='abcd'
|
||||
)
|
||||
Inventory.objects.get(pk=inventory.pk).delete()
|
||||
r = post(
|
||||
url=reverse('api:job_template_callback', kwargs={'pk': jt.pk}),
|
||||
data={'host_config_key': 'abcd'},
|
||||
user=admin_user,
|
||||
expect=400,
|
||||
)
|
||||
assert r.status_code == 400
|
||||
assert "Cannot start automatically, an inventory is required." in str(r.data)
|
||||
|
||||
@@ -153,3 +153,13 @@ def test_post_org_approval_notification(get, post, admin, notification_template,
|
||||
response = get(url, admin)
|
||||
assert response.status_code == 200
|
||||
assert len(response.data['results']) == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_post_wfj_notification(get, post, admin, workflow_job, notification):
|
||||
workflow_job.notifications.add(notification)
|
||||
workflow_job.save()
|
||||
url = reverse("api:workflow_job_notifications_list", kwargs={'pk': workflow_job.pk})
|
||||
response = get(url, admin)
|
||||
assert response.status_code == 200
|
||||
assert len(response.data['results']) == 1
|
||||
|
||||
@@ -329,3 +329,21 @@ def test_galaxy_credential_association(alice, admin, organization, post, get):
|
||||
'Public Galaxy 4',
|
||||
'Public Galaxy 5',
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_admin_credential_count(org_admin, admin, organization, post, get):
|
||||
galaxy = CredentialType.defaults['galaxy_api_token']()
|
||||
galaxy.save()
|
||||
|
||||
for i in range(3):
|
||||
cred = Credential.objects.create(credential_type=galaxy, name=f'test_{i}', inputs={'url': 'https://galaxy.ansible.com/'})
|
||||
url = reverse('api:organization_galaxy_credentials_list', kwargs={'pk': organization.pk})
|
||||
post(url, {'associate': True, 'id': cred.pk}, user=admin, expect=204)
|
||||
# org admin should see all associated galaxy credentials
|
||||
resp = get(url, user=org_admin)
|
||||
assert resp.data['count'] == 3
|
||||
# removing one to validate new count
|
||||
post(url, {'disassociate': True, 'id': Credential.objects.get(name='test_1').pk}, user=admin, expect=204)
|
||||
resp_new = get(url, user=org_admin)
|
||||
assert resp_new.data['count'] == 2
|
||||
|
||||
75
awx/main/tests/functional/api/test_serializers.py
Normal file
75
awx/main/tests/functional/api/test_serializers.py
Normal file
@@ -0,0 +1,75 @@
|
||||
import pytest
|
||||
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from rest_framework.serializers import ValidationError
|
||||
|
||||
from awx.api.serializers import UserSerializer
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"password,min_length,min_digits,min_upper,min_special,expect_error",
|
||||
[
|
||||
# Test length
|
||||
("a", 1, 0, 0, 0, False),
|
||||
("a", 2, 0, 0, 0, True),
|
||||
("aa", 2, 0, 0, 0, False),
|
||||
("aaabcDEF123$%^", 2, 0, 0, 0, False),
|
||||
# Test digits
|
||||
("a", 0, 1, 0, 0, True),
|
||||
("1", 0, 1, 0, 0, False),
|
||||
("1", 0, 2, 0, 0, True),
|
||||
("12", 0, 2, 0, 0, False),
|
||||
("12abcDEF123$%^", 0, 2, 0, 0, False),
|
||||
# Test upper
|
||||
("a", 0, 0, 1, 0, True),
|
||||
("A", 0, 0, 1, 0, False),
|
||||
("A", 0, 0, 2, 0, True),
|
||||
("AB", 0, 0, 2, 0, False),
|
||||
("ABabcDEF123$%^", 0, 0, 2, 0, False),
|
||||
# Test special
|
||||
("a", 0, 0, 0, 1, True),
|
||||
("!", 0, 0, 0, 1, False),
|
||||
("!", 0, 0, 0, 2, True),
|
||||
("!@", 0, 0, 0, 2, False),
|
||||
("!@abcDEF123$%^", 0, 0, 0, 2, False),
|
||||
],
|
||||
)
|
||||
@pytest.mark.django_db
|
||||
def test_validate_password_rules(password, min_length, min_digits, min_upper, min_special, expect_error):
|
||||
user_serializer = UserSerializer()
|
||||
|
||||
# First test password with no params, this should always pass
|
||||
try:
|
||||
user_serializer.validate_password(password)
|
||||
except ValidationError:
|
||||
assert False, f"Password {password} should not have validation issue if no params are used"
|
||||
|
||||
with override_settings(
|
||||
LOCAL_PASSWORD_MIN_LENGTH=min_length, LOCAL_PASSWORD_MIN_DIGITS=min_digits, LOCAL_PASSWORD_MIN_UPPER=min_upper, LOCAL_PASSWORD_MIN_SPECIAL=min_special
|
||||
):
|
||||
if expect_error:
|
||||
with pytest.raises(ValidationError):
|
||||
user_serializer.validate_password(password)
|
||||
else:
|
||||
try:
|
||||
user_serializer.validate_password(password)
|
||||
except ValidationError:
|
||||
assert False, "validate_password raised an unexpected exception"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_validate_password_too_long():
|
||||
password_max_length = User._meta.get_field('password').max_length
|
||||
password = "x" * password_max_length
|
||||
|
||||
user_serializer = UserSerializer()
|
||||
try:
|
||||
user_serializer.validate_password(password)
|
||||
except ValidationError:
|
||||
assert False, f"Password {password} should not have validation"
|
||||
|
||||
password = f"{password}x"
|
||||
with pytest.raises(ValidationError):
|
||||
user_serializer.validate_password(password)
|
||||
54
awx/main/tests/functional/api/test_workflow_job.py
Normal file
54
awx/main/tests/functional/api/test_workflow_job.py
Normal file
@@ -0,0 +1,54 @@
|
||||
import pytest
|
||||
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"is_admin, status",
|
||||
[
|
||||
[True, 201],
|
||||
[False, 403],
|
||||
], # if they're a WFJ admin, they get a 201 # if they're not a WFJ *nor* org admin, they get a 403
|
||||
)
|
||||
def test_workflow_job_relaunch(workflow_job, post, admin_user, alice, is_admin, status):
|
||||
url = reverse("api:workflow_job_relaunch", kwargs={'pk': workflow_job.pk})
|
||||
if is_admin:
|
||||
post(url, user=admin_user, expect=status)
|
||||
else:
|
||||
post(url, user=alice, expect=status)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_workflow_job_relaunch_failure(workflow_job, post, admin_user):
|
||||
workflow_job.is_sliced_job = True
|
||||
workflow_job.job_template = None
|
||||
workflow_job.save()
|
||||
url = reverse("api:workflow_job_relaunch", kwargs={'pk': workflow_job.pk})
|
||||
post(url, user=admin_user, expect=400)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_workflow_job_relaunch_not_inventory_failure(workflow_job, post, admin_user):
|
||||
workflow_job.is_sliced_job = True
|
||||
workflow_job.inventory = None
|
||||
workflow_job.save()
|
||||
url = reverse("api:workflow_job_relaunch", kwargs={'pk': workflow_job.pk})
|
||||
post(url, user=admin_user, expect=400)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"is_admin, status",
|
||||
[
|
||||
[True, 202],
|
||||
[False, 403],
|
||||
], # if they're a WFJ admin, they get a 202 # if they're not a WFJ *nor* org admin, they get a 403
|
||||
)
|
||||
def test_workflow_job_cancel(workflow_job, post, admin_user, alice, is_admin, status):
|
||||
url = reverse("api:workflow_job_cancel", kwargs={'pk': workflow_job.pk})
|
||||
if is_admin:
|
||||
post(url, user=admin_user, expect=status)
|
||||
else:
|
||||
post(url, user=alice, expect=status)
|
||||
@@ -743,6 +743,30 @@ def system_job_factory(system_job_template, admin):
|
||||
return factory
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def wfjt(workflow_job_template_factory, organization):
|
||||
objects = workflow_job_template_factory('test_workflow', organization=organization, persisted=True)
|
||||
return objects.workflow_job_template
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def wfjt_with_nodes(workflow_job_template_factory, organization, job_template):
|
||||
objects = workflow_job_template_factory(
|
||||
'test_workflow', organization=organization, workflow_job_template_nodes=[{'unified_job_template': job_template}], persisted=True
|
||||
)
|
||||
return objects.workflow_job_template
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def wfjt_node(wfjt_with_nodes):
|
||||
return wfjt_with_nodes.workflow_job_template_nodes.all()[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_job(wfjt):
|
||||
return wfjt.workflow_jobs.create(name='test_workflow')
|
||||
|
||||
|
||||
def dumps(value):
|
||||
return DjangoJSONEncoder().encode(value)
|
||||
|
||||
|
||||
@@ -123,6 +123,24 @@ def test_inventory_copy(inventory, group_factory, post, get, alice, organization
|
||||
assert set(group_2_2_copy.hosts.all()) == set()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"is_admin, can_copy, status",
|
||||
[
|
||||
[True, True, 200],
|
||||
[False, False, 200],
|
||||
],
|
||||
)
|
||||
def test_workflow_job_template_copy_access(get, admin_user, alice, workflow_job_template, is_admin, can_copy, status):
|
||||
url = reverse('api:workflow_job_template_copy', kwargs={'pk': workflow_job_template.pk})
|
||||
if is_admin:
|
||||
response = get(url, user=admin_user, expect=status)
|
||||
else:
|
||||
workflow_job_template.organization.auditor_role.members.add(alice)
|
||||
response = get(url, user=alice, expect=status)
|
||||
assert response.data['can_copy'] == can_copy
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_workflow_job_template_copy(workflow_job_template, post, get, admin, organization):
|
||||
'''
|
||||
|
||||
@@ -26,12 +26,12 @@ def test_python_and_js_licenses():
|
||||
return (is_gpl, is_lgpl)
|
||||
|
||||
def find_embedded_source_version(path, name):
|
||||
for entry in os.listdir(path):
|
||||
# Check variations of '-' and '_' in filenames due to python
|
||||
for fname in [name, name.replace('-', '_')]:
|
||||
if entry.startswith(fname) and entry.endswith('.tar.gz'):
|
||||
v = entry.split(name + '-')[1].split('.tar.gz')[0]
|
||||
return v
|
||||
files = os.listdir(path)
|
||||
tgz_files = [f for f in files if f.endswith('.tar.gz')]
|
||||
for tgz in tgz_files:
|
||||
pkg_name = tgz.split('-')[0].split('_')[0]
|
||||
if pkg_name == name:
|
||||
return tgz.split('-')[1].split('.tar.gz')[0]
|
||||
return None
|
||||
|
||||
list = {}
|
||||
|
||||
@@ -218,3 +218,31 @@ def test_webhook_notification_pointed_to_a_redirect_launch_endpoint(post, admin,
|
||||
)
|
||||
|
||||
assert n1.send("", n1.messages.get("success").get("body")) == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_notification_template(admin, notification_template):
|
||||
notification_template.messages['workflow_approval'] = {
|
||||
"running": {
|
||||
"message": None,
|
||||
"body": None,
|
||||
}
|
||||
}
|
||||
notification_template.save()
|
||||
|
||||
workflow_approval_message = {
|
||||
"approved": {
|
||||
"message": None,
|
||||
"body": None,
|
||||
},
|
||||
"running": {
|
||||
"message": "test-message",
|
||||
"body": None,
|
||||
},
|
||||
}
|
||||
notification_template.messages['workflow_approval'] = workflow_approval_message
|
||||
notification_template.save()
|
||||
|
||||
subevents = sorted(notification_template.messages["workflow_approval"].keys())
|
||||
assert subevents == ["approved", "running"]
|
||||
assert notification_template.messages['workflow_approval'] == workflow_approval_message
|
||||
|
||||
@@ -13,30 +13,6 @@ from rest_framework.exceptions import PermissionDenied
|
||||
from awx.main.models import InventorySource, JobLaunchConfig
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def wfjt(workflow_job_template_factory, organization):
|
||||
objects = workflow_job_template_factory('test_workflow', organization=organization, persisted=True)
|
||||
return objects.workflow_job_template
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def wfjt_with_nodes(workflow_job_template_factory, organization, job_template):
|
||||
objects = workflow_job_template_factory(
|
||||
'test_workflow', organization=organization, workflow_job_template_nodes=[{'unified_job_template': job_template}], persisted=True
|
||||
)
|
||||
return objects.workflow_job_template
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def wfjt_node(wfjt_with_nodes):
|
||||
return wfjt_with_nodes.workflow_job_template_nodes.all()[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_job(wfjt):
|
||||
return wfjt.workflow_jobs.create(name='test_workflow')
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestWorkflowJobTemplateAccess:
|
||||
def test_random_user_no_edit(self, wfjt, rando):
|
||||
|
||||
28
awx/main/tests/settings_for_test.py
Normal file
28
awx/main/tests/settings_for_test.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Python
|
||||
from unittest import mock
|
||||
import uuid
|
||||
|
||||
# patch python-ldap
|
||||
with mock.patch('__main__.__builtins__.dir', return_value=[]):
|
||||
import ldap # NOQA
|
||||
|
||||
# Load development settings for base variables.
|
||||
from awx.settings.development import * # NOQA
|
||||
|
||||
# Some things make decisions based on settings.SETTINGS_MODULE, so this is done for that
|
||||
SETTINGS_MODULE = 'awx.settings.development'
|
||||
|
||||
# Use SQLite for unit tests instead of PostgreSQL. If the lines below are
|
||||
# commented out, Django will create the test_awx-dev database in PostgreSQL to
|
||||
# run unit tests.
|
||||
CACHES = {'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'unique-{}'.format(str(uuid.uuid4()))}}
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': os.path.join(BASE_DIR, 'awx.sqlite3'), # noqa
|
||||
'TEST': {
|
||||
# Test database cannot be :memory: for inventory tests.
|
||||
'NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3') # noqa
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -50,6 +50,7 @@ class TestApiRootView:
|
||||
'activity_stream',
|
||||
'workflow_job_templates',
|
||||
'workflow_jobs',
|
||||
'analytics',
|
||||
]
|
||||
view = ApiVersionRootView()
|
||||
ret = view.get(mocker.MagicMock())
|
||||
|
||||
@@ -1,8 +1,32 @@
|
||||
from split_settings.tools import include
|
||||
|
||||
|
||||
LOCAL_SETTINGS = (
|
||||
'ALLOWED_HOSTS',
|
||||
'BROADCAST_WEBSOCKET_PORT',
|
||||
'BROADCAST_WEBSOCKET_VERIFY_CERT',
|
||||
'BROADCAST_WEBSOCKET_PROTOCOL',
|
||||
'BROADCAST_WEBSOCKET_SECRET',
|
||||
'DATABASES',
|
||||
'CACHES',
|
||||
'DEBUG',
|
||||
'NAMED_URL_GRAPH',
|
||||
)
|
||||
|
||||
|
||||
def test_postprocess_auth_basic_enabled():
|
||||
locals().update({'__file__': __file__})
|
||||
|
||||
include('../../../settings/defaults.py', scope=locals())
|
||||
assert 'awx.api.authentication.LoggedBasicAuthentication' in locals()['REST_FRAMEWORK']['DEFAULT_AUTHENTICATION_CLASSES']
|
||||
|
||||
|
||||
def test_default_settings():
|
||||
from django.conf import settings
|
||||
|
||||
for k in dir(settings):
|
||||
if k not in settings.DEFAULTS_SNAPSHOT or k in LOCAL_SETTINGS:
|
||||
continue
|
||||
default_val = getattr(settings.default_settings, k, None)
|
||||
snapshot_val = settings.DEFAULTS_SNAPSHOT[k]
|
||||
assert default_val == snapshot_val, f'Setting for {k} does not match shapshot:\nsnapshot: {snapshot_val}\ndefault: {default_val}'
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
|
||||
from itertools import chain
|
||||
|
||||
from awx.settings.application_name import set_application_name
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
def get_all_field_names(model):
|
||||
# Implements compatibility with _meta.get_all_field_names
|
||||
@@ -18,3 +21,7 @@ def get_all_field_names(model):
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def set_connection_name(function):
|
||||
set_application_name(settings.DATABASES, settings.CLUSTER_HOST_ID, function=function)
|
||||
|
||||
@@ -170,6 +170,8 @@ class Licenser(object):
|
||||
|
||||
license.setdefault('sku', sub['pool']['productId'])
|
||||
license.setdefault('subscription_name', sub['pool']['productName'])
|
||||
license.setdefault('subscription_id', sub['pool']['subscriptionId'])
|
||||
license.setdefault('account_number', sub['pool']['accountNumber'])
|
||||
license.setdefault('pool_id', sub['pool']['id'])
|
||||
license.setdefault('product_name', sub['pool']['productName'])
|
||||
license.setdefault('valid_key', True)
|
||||
@@ -185,6 +187,14 @@ class Licenser(object):
|
||||
license['instance_count'] = license.get('instance_count', 0) + instances
|
||||
license['subscription_name'] = re.sub(r'[\d]* Managed Nodes', '%d Managed Nodes' % license['instance_count'], license['subscription_name'])
|
||||
|
||||
license['support_level'] = ''
|
||||
license['usage'] = ''
|
||||
for attr in sub['pool'].get('productAttributes', []):
|
||||
if attr.get('name') == 'support_level':
|
||||
license['support_level'] = attr.get('value')
|
||||
elif attr.get('name') == 'usage':
|
||||
license['usage'] = attr.get('value')
|
||||
|
||||
if not license:
|
||||
logger.error("No valid subscriptions found in manifest")
|
||||
self._attrs.update(license)
|
||||
@@ -277,7 +287,10 @@ class Licenser(object):
|
||||
license['productId'] = sub['product_id']
|
||||
license['quantity'] = int(sub['quantity'])
|
||||
license['support_level'] = sub['support_level']
|
||||
license['usage'] = sub['usage']
|
||||
license['subscription_name'] = sub['name']
|
||||
license['subscriptionId'] = sub['subscription_id']
|
||||
license['accountNumber'] = sub['account_number']
|
||||
license['id'] = sub['upstream_pool_id']
|
||||
license['endDate'] = sub['end_date']
|
||||
license['productName'] = "Red Hat Ansible Automation"
|
||||
@@ -304,7 +317,7 @@ class Licenser(object):
|
||||
def generate_license_options_from_entitlements(self, json):
|
||||
from dateutil.parser import parse
|
||||
|
||||
ValidSub = collections.namedtuple('ValidSub', 'sku name support_level end_date trial quantity pool_id satellite')
|
||||
ValidSub = collections.namedtuple('ValidSub', 'sku name support_level end_date trial quantity pool_id satellite subscription_id account_number usage')
|
||||
valid_subs = []
|
||||
for sub in json:
|
||||
satellite = sub.get('satellite')
|
||||
@@ -333,15 +346,23 @@ class Licenser(object):
|
||||
sku = sub['productId']
|
||||
trial = sku.startswith('S') # i.e.,, SER/SVC
|
||||
support_level = ''
|
||||
usage = ''
|
||||
pool_id = sub['id']
|
||||
subscription_id = sub['subscriptionId']
|
||||
account_number = sub['accountNumber']
|
||||
if satellite:
|
||||
support_level = sub['support_level']
|
||||
usage = sub['usage']
|
||||
else:
|
||||
for attr in sub.get('productAttributes', []):
|
||||
if attr.get('name') == 'support_level':
|
||||
support_level = attr.get('value')
|
||||
elif attr.get('name') == 'usage':
|
||||
usage = attr.get('value')
|
||||
|
||||
valid_subs.append(ValidSub(sku, sub['productName'], support_level, end_date, trial, quantity, pool_id, satellite))
|
||||
valid_subs.append(
|
||||
ValidSub(sku, sub['productName'], support_level, end_date, trial, quantity, pool_id, satellite, subscription_id, account_number, usage)
|
||||
)
|
||||
|
||||
if valid_subs:
|
||||
licenses = []
|
||||
@@ -350,6 +371,7 @@ class Licenser(object):
|
||||
license._attrs['instance_count'] = int(sub.quantity)
|
||||
license._attrs['sku'] = sub.sku
|
||||
license._attrs['support_level'] = sub.support_level
|
||||
license._attrs['usage'] = sub.usage
|
||||
license._attrs['license_type'] = 'enterprise'
|
||||
if sub.trial:
|
||||
license._attrs['trial'] = True
|
||||
@@ -364,6 +386,8 @@ class Licenser(object):
|
||||
license._attrs['valid_key'] = True
|
||||
license.update(license_date=int(sub.end_date.strftime('%s')))
|
||||
license.update(pool_id=sub.pool_id)
|
||||
license.update(subscription_id=sub.subscription_id)
|
||||
license.update(account_number=sub.account_number)
|
||||
licenses.append(license._attrs.copy())
|
||||
return licenses
|
||||
|
||||
@@ -388,9 +412,13 @@ class Licenser(object):
|
||||
if subscription_model == SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS:
|
||||
automated_instances = HostMetric.active_objects.count()
|
||||
first_host = HostMetric.active_objects.only('first_automation').order_by('first_automation').first()
|
||||
attrs['deleted_instances'] = HostMetric.objects.filter(deleted=True).count()
|
||||
attrs['reactivated_instances'] = HostMetric.active_objects.filter(deleted_counter__gte=1).count()
|
||||
else:
|
||||
automated_instances = HostMetric.objects.count()
|
||||
automated_instances = 0
|
||||
first_host = HostMetric.objects.only('first_automation').order_by('first_automation').first()
|
||||
attrs['deleted_instances'] = 0
|
||||
attrs['reactivated_instances'] = 0
|
||||
|
||||
if first_host:
|
||||
automated_since = int(first_host.first_automation.timestamp())
|
||||
|
||||
35
awx/settings/application_name.py
Normal file
35
awx/settings/application_name.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def get_service_name(argv):
|
||||
'''
|
||||
Return best-effort guess as to the name of this service
|
||||
'''
|
||||
for arg in argv:
|
||||
if arg == '-m':
|
||||
continue
|
||||
if 'python' in arg:
|
||||
continue
|
||||
if 'manage' in arg:
|
||||
continue
|
||||
if arg.startswith('run_'):
|
||||
return arg[len('run_') :]
|
||||
return arg
|
||||
|
||||
|
||||
def get_application_name(CLUSTER_HOST_ID, function=''):
|
||||
if function:
|
||||
function = f'_{function}'
|
||||
return f'awx-{os.getpid()}-{get_service_name(sys.argv)}{function}-{CLUSTER_HOST_ID}'[:63]
|
||||
|
||||
|
||||
def set_application_name(DATABASES, CLUSTER_HOST_ID, function=''):
|
||||
# If settings files were not properly passed DATABASES could be {} at which point we don't need to set the app name.
|
||||
if not DATABASES or 'default' not in DATABASES:
|
||||
return
|
||||
|
||||
if 'sqlite3' in DATABASES['default']['ENGINE']:
|
||||
return
|
||||
options_dict = DATABASES['default'].setdefault('OPTIONS', dict())
|
||||
options_dict['application_name'] = get_application_name(CLUSTER_HOST_ID, function)
|
||||
@@ -1,24 +1,16 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import base64
|
||||
import os
|
||||
import re # noqa
|
||||
import sys
|
||||
import tempfile
|
||||
import socket
|
||||
from datetime import timedelta
|
||||
|
||||
|
||||
if "pytest" in sys.modules:
|
||||
IS_TESTING_MODE = True
|
||||
from unittest import mock
|
||||
|
||||
with mock.patch('__main__.__builtins__.dir', return_value=[]):
|
||||
import ldap
|
||||
else:
|
||||
IS_TESTING_MODE = False
|
||||
import ldap
|
||||
# python-ldap
|
||||
import ldap
|
||||
|
||||
|
||||
DEBUG = True
|
||||
@@ -475,6 +467,7 @@ CELERYBEAT_SCHEDULE = {
|
||||
'receptor_reaper': {'task': 'awx.main.tasks.system.awx_receptor_workunit_reaper', 'schedule': timedelta(seconds=60)},
|
||||
'send_subsystem_metrics': {'task': 'awx.main.analytics.analytics_tasks.send_subsystem_metrics', 'schedule': timedelta(seconds=20)},
|
||||
'cleanup_images': {'task': 'awx.main.tasks.system.cleanup_images_and_files', 'schedule': timedelta(hours=3)},
|
||||
'cleanup_host_metrics': {'task': 'awx.main.tasks.system.cleanup_host_metrics', 'schedule': timedelta(days=1)},
|
||||
}
|
||||
|
||||
# Django Caching Configuration
|
||||
@@ -733,10 +726,10 @@ CONTROLLER_INSTANCE_ID_VAR = 'remote_tower_id'
|
||||
# ---------------------
|
||||
# ----- Foreman -----
|
||||
# ---------------------
|
||||
SATELLITE6_ENABLED_VAR = 'foreman_enabled'
|
||||
SATELLITE6_ENABLED_VAR = 'foreman_enabled,foreman.enabled'
|
||||
SATELLITE6_ENABLED_VALUE = 'True'
|
||||
SATELLITE6_EXCLUDE_EMPTY_GROUPS = True
|
||||
SATELLITE6_INSTANCE_ID_VAR = 'foreman_id'
|
||||
SATELLITE6_INSTANCE_ID_VAR = 'foreman_id,foreman.id'
|
||||
# SATELLITE6_GROUP_PREFIX and SATELLITE6_GROUP_PATTERNS defined in source vars
|
||||
|
||||
# ----------------
|
||||
@@ -792,6 +785,7 @@ INSIGHTS_URL_BASE = "https://example.org"
|
||||
INSIGHTS_AGENT_MIME = 'application/example'
|
||||
# See https://github.com/ansible/awx-facts-playbooks
|
||||
INSIGHTS_SYSTEM_ID_FILE = '/etc/redhat-access-insights/machine-id'
|
||||
INSIGHTS_CERT_PATH = "/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem"
|
||||
|
||||
TOWER_SETTINGS_MANIFEST = {}
|
||||
|
||||
@@ -871,6 +865,7 @@ LOGGING = {
|
||||
'awx.main.consumers': {'handlers': ['console', 'file', 'tower_warnings'], 'level': 'INFO'},
|
||||
'awx.main.rsyslog_configurer': {'handlers': ['rsyslog_configurer']},
|
||||
'awx.main.cache_clear': {'handlers': ['cache_clear']},
|
||||
'awx.main.heartbeet': {'handlers': ['heartbeet']},
|
||||
'awx.main.wsrelay': {'handlers': ['wsrelay']},
|
||||
'awx.main.commands.inventory_import': {'handlers': ['inventory_import'], 'propagate': False},
|
||||
'awx.main.tasks': {'handlers': ['task_system', 'external_logger'], 'propagate': False},
|
||||
@@ -904,6 +899,7 @@ handler_config = {
|
||||
'job_lifecycle': {'filename': 'job_lifecycle.log', 'formatter': 'job_lifecycle'},
|
||||
'rsyslog_configurer': {'filename': 'rsyslog_configurer.log'},
|
||||
'cache_clear': {'filename': 'cache_clear.log'},
|
||||
'heartbeet': {'filename': 'heartbeet.log'},
|
||||
}
|
||||
|
||||
# If running on a VM, we log to files. When running in a container, we log to stdout.
|
||||
@@ -1050,3 +1046,10 @@ UI_NEXT = True
|
||||
# - '': No model - Subscription not counted from Host Metrics
|
||||
# - 'unique_managed_hosts': Compliant = automated - deleted hosts (using /api/v2/host_metrics/)
|
||||
SUBSCRIPTION_USAGE_MODEL = ''
|
||||
|
||||
# Host metrics cleanup - last time of the cleanup run (soft-deleting records)
|
||||
CLEANUP_HOST_METRICS_LAST_TS = None
|
||||
# Host metrics cleanup - minimal interval between two cleanups in days
|
||||
CLEANUP_HOST_METRICS_INTERVAL = 30 # days
|
||||
# Host metrics cleanup - soft-delete HostMetric records with last_automation < [threshold] (in months)
|
||||
CLEANUP_HOST_METRICS_THRESHOLD = 12 # months
|
||||
|
||||
@@ -9,7 +9,6 @@ import socket
|
||||
import copy
|
||||
import sys
|
||||
import traceback
|
||||
import uuid
|
||||
|
||||
# Centos-7 doesn't include the svg mime type
|
||||
# /usr/lib64/python/mimetypes.py
|
||||
@@ -62,38 +61,9 @@ DEBUG_TOOLBAR_CONFIG = {'ENABLE_STACKTRACES': True}
|
||||
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
INSTALL_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
# Store a snapshot of default settings at this point before loading any
|
||||
# customizable config files.
|
||||
DEFAULTS_SNAPSHOT = {}
|
||||
this_module = sys.modules[__name__]
|
||||
for setting in dir(this_module):
|
||||
if setting == setting.upper():
|
||||
DEFAULTS_SNAPSHOT[setting] = copy.deepcopy(getattr(this_module, setting))
|
||||
|
||||
# If there is an `/etc/tower/settings.py`, include it.
|
||||
# If there is a `/etc/tower/conf.d/*.py`, include them.
|
||||
include(optional('/etc/tower/settings.py'), scope=locals())
|
||||
include(optional('/etc/tower/conf.d/*.py'), scope=locals())
|
||||
|
||||
BASE_VENV_PATH = "/var/lib/awx/venv/"
|
||||
AWX_VENV_PATH = os.path.join(BASE_VENV_PATH, "awx")
|
||||
|
||||
# Use SQLite for unit tests instead of PostgreSQL. If the lines below are
|
||||
# commented out, Django will create the test_awx-dev database in PostgreSQL to
|
||||
# run unit tests.
|
||||
if "pytest" in sys.modules:
|
||||
CACHES = {'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'unique-{}'.format(str(uuid.uuid4()))}}
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': os.path.join(BASE_DIR, 'awx.sqlite3'), # noqa
|
||||
'TEST': {
|
||||
# Test database cannot be :memory: for inventory tests.
|
||||
'NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3') # noqa
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
CLUSTER_HOST_ID = socket.gethostname()
|
||||
|
||||
AWX_CALLBACK_PROFILE = True
|
||||
@@ -105,8 +75,28 @@ AWX_CALLBACK_PROFILE = True
|
||||
AWX_DISABLE_TASK_MANAGERS = False
|
||||
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
||||
|
||||
if 'sqlite3' not in DATABASES['default']['ENGINE']: # noqa
|
||||
DATABASES['default'].setdefault('OPTIONS', dict()).setdefault('application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63]) # noqa
|
||||
# Store a snapshot of default settings at this point before loading any
|
||||
# customizable config files.
|
||||
this_module = sys.modules[__name__]
|
||||
local_vars = dir(this_module)
|
||||
DEFAULTS_SNAPSHOT = {} # define after we save local_vars so we do not snapshot the snapshot
|
||||
for setting in local_vars:
|
||||
if setting.isupper():
|
||||
DEFAULTS_SNAPSHOT[setting] = copy.deepcopy(getattr(this_module, setting))
|
||||
|
||||
del local_vars # avoid temporary variables from showing up in dir(settings)
|
||||
del this_module
|
||||
#
|
||||
###############################################################################################
|
||||
#
|
||||
# Any settings defined after this point will be marked as as a read_only database setting
|
||||
#
|
||||
################################################################################################
|
||||
|
||||
# If there is an `/etc/tower/settings.py`, include it.
|
||||
# If there is a `/etc/tower/conf.d/*.py`, include them.
|
||||
include(optional('/etc/tower/settings.py'), scope=locals())
|
||||
include(optional('/etc/tower/conf.d/*.py'), scope=locals())
|
||||
|
||||
# If any local_*.py files are present in awx/settings/, use them to override
|
||||
# default settings for development. If not present, we can still run using
|
||||
@@ -120,3 +110,11 @@ try:
|
||||
except ImportError:
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
# The below runs AFTER all of the custom settings are imported
|
||||
# because conf.d files will define DATABASES and this should modify that
|
||||
from .application_name import set_application_name
|
||||
|
||||
set_application_name(DATABASES, CLUSTER_HOST_ID) # NOQA
|
||||
|
||||
del set_application_name
|
||||
|
||||
@@ -47,17 +47,21 @@ AWX_ISOLATION_SHOW_PATHS = [
|
||||
|
||||
# Store a snapshot of default settings at this point before loading any
|
||||
# customizable config files.
|
||||
this_module = sys.modules[__name__]
|
||||
local_vars = dir(this_module)
|
||||
DEFAULTS_SNAPSHOT = {} # define after we save local_vars so we do not snapshot the snapshot
|
||||
for setting in local_vars:
|
||||
if setting.isupper():
|
||||
DEFAULTS_SNAPSHOT[setting] = copy.deepcopy(getattr(this_module, setting))
|
||||
|
||||
del local_vars # avoid temporary variables from showing up in dir(settings)
|
||||
del this_module
|
||||
#
|
||||
###############################################################################################
|
||||
#
|
||||
# Any settings defined after this point will be marked as as a read_only database setting
|
||||
#
|
||||
################################################################################################
|
||||
DEFAULTS_SNAPSHOT = {}
|
||||
this_module = sys.modules[__name__]
|
||||
for setting in dir(this_module):
|
||||
if setting == setting.upper():
|
||||
DEFAULTS_SNAPSHOT[setting] = copy.deepcopy(getattr(this_module, setting))
|
||||
|
||||
# Load settings from any .py files in the global conf.d directory specified in
|
||||
# the environment, defaulting to /etc/tower/conf.d/.
|
||||
@@ -98,8 +102,10 @@ except IOError:
|
||||
else:
|
||||
raise
|
||||
|
||||
# The below runs AFTER all of the custom settings are imported.
|
||||
# The below runs AFTER all of the custom settings are imported
|
||||
# because conf.d files will define DATABASES and this should modify that
|
||||
from .application_name import set_application_name
|
||||
|
||||
DATABASES.setdefault('default', dict()).setdefault('OPTIONS', dict()).setdefault(
|
||||
'application_name', f'{CLUSTER_HOST_ID}-{os.getpid()}-{" ".join(sys.argv)}'[:63] # NOQA
|
||||
) # noqa
|
||||
set_application_name(DATABASES, CLUSTER_HOST_ID) # NOQA
|
||||
|
||||
del set_application_name
|
||||
|
||||
@@ -1603,6 +1603,50 @@ register(
|
||||
],
|
||||
)
|
||||
|
||||
register(
|
||||
'LOCAL_PASSWORD_MIN_LENGTH',
|
||||
field_class=fields.IntegerField,
|
||||
min_value=0,
|
||||
default=0,
|
||||
label=_('Minimum number of characters in local password'),
|
||||
help_text=_('Minimum number of characters required in a local password. 0 means no minimum'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
register(
|
||||
'LOCAL_PASSWORD_MIN_DIGITS',
|
||||
field_class=fields.IntegerField,
|
||||
min_value=0,
|
||||
default=0,
|
||||
label=_('Minimum number of digit characters in local password'),
|
||||
help_text=_('Minimum number of digit characters required in a local password. 0 means no minimum'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
register(
|
||||
'LOCAL_PASSWORD_MIN_UPPER',
|
||||
field_class=fields.IntegerField,
|
||||
min_value=0,
|
||||
default=0,
|
||||
label=_('Minimum number of uppercase characters in local password'),
|
||||
help_text=_('Minimum number of uppercase characters required in a local password. 0 means no minimum'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
register(
|
||||
'LOCAL_PASSWORD_MIN_SPECIAL',
|
||||
field_class=fields.IntegerField,
|
||||
min_value=0,
|
||||
default=0,
|
||||
label=_('Minimum number of special characters in local password'),
|
||||
help_text=_('Minimum number of special characters required in a local password. 0 means no minimum'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
|
||||
def tacacs_validate(serializer, attrs):
|
||||
if not serializer.instance or not hasattr(serializer.instance, 'TACACSPLUS_HOST') or not hasattr(serializer.instance, 'TACACSPLUS_SECRET'):
|
||||
|
||||
247
awx/ui/package-lock.json
generated
247
awx/ui/package-lock.json
generated
@@ -14,7 +14,6 @@
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.27.2",
|
||||
"codemirror": "^6.0.1",
|
||||
"d3": "7.6.1",
|
||||
"dagre": "^0.8.4",
|
||||
"dompurify": "2.4.0",
|
||||
@@ -1944,82 +1943,6 @@
|
||||
"integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@codemirror/autocomplete": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.0.2.tgz",
|
||||
"integrity": "sha512-9PDjnllmXan/7Uax87KGORbxerDJ/cu10SB+n4Jz0zXMEvIh3+TGgZxhIvDOtaQ4jDBQEM7kHYW4vLdQB0DGZQ==",
|
||||
"dependencies": {
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0",
|
||||
"@lezer/common": "^1.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0",
|
||||
"@lezer/common": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/commands": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/commands/-/commands-6.0.1.tgz",
|
||||
"integrity": "sha512-iNHDByicYqQjs0Wo1MKGfqNbMYMyhS9WV6EwMVwsHXImlFemgEUC+c5X22bXKBStN3qnwg4fArNZM+gkv22baQ==",
|
||||
"dependencies": {
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0",
|
||||
"@lezer/common": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/language": {
|
||||
"version": "6.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.2.0.tgz",
|
||||
"integrity": "sha512-tabB0Ef/BflwoEmTB4a//WZ9P90UQyne9qWB9YFsmeS4bnEqSys7UpGk/da1URMXhyfuzWCwp+AQNMhvu8SfnA==",
|
||||
"dependencies": {
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0",
|
||||
"@lezer/common": "^1.0.0",
|
||||
"@lezer/highlight": "^1.0.0",
|
||||
"@lezer/lr": "^1.0.0",
|
||||
"style-mod": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/lint": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.0.0.tgz",
|
||||
"integrity": "sha512-nUUXcJW1Xp54kNs+a1ToPLK8MadO0rMTnJB8Zk4Z8gBdrN0kqV7uvUraU/T2yqg+grDNR38Vmy/MrhQN/RgwiA==",
|
||||
"dependencies": {
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0",
|
||||
"crelt": "^1.0.5"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/search": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/search/-/search-6.0.0.tgz",
|
||||
"integrity": "sha512-rL0rd3AhI0TAsaJPUaEwC63KHLO7KL0Z/dYozXj6E7L3wNHRyx7RfE0/j5HsIf912EE5n2PCb4Vg0rGYmDv4UQ==",
|
||||
"dependencies": {
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0",
|
||||
"crelt": "^1.0.5"
|
||||
}
|
||||
},
|
||||
"node_modules/@codemirror/state": {
|
||||
"version": "6.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.1.0.tgz",
|
||||
"integrity": "sha512-qbUr94DZTe6/V1VS7LDLz11rM/1t/nJxR1El4I6UaxDEdc0aZZvq6JCLJWiRmUf95NRAnDH6fhXn+PWp9wGCIg=="
|
||||
},
|
||||
"node_modules/@codemirror/view": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.0.2.tgz",
|
||||
"integrity": "sha512-mnVT/q1JvKPjpmjXJNeCi/xHyaJ3abGJsumIVpdQ1nE1MXAyHf7GHWt8QpWMUvDiqF0j+inkhVR2OviTdFFX7Q==",
|
||||
"dependencies": {
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"style-mod": "^4.0.0",
|
||||
"w3c-keyname": "^2.2.4"
|
||||
}
|
||||
},
|
||||
"node_modules/@cspotcode/source-map-support": {
|
||||
"version": "0.8.1",
|
||||
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
|
||||
@@ -3470,27 +3393,6 @@
|
||||
"@jridgewell/sourcemap-codec": "1.4.14"
|
||||
}
|
||||
},
|
||||
"node_modules/@lezer/common": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.0.0.tgz",
|
||||
"integrity": "sha512-ohydQe+Hb+w4oMDvXzs8uuJd2NoA3D8YDcLiuDsLqH+yflDTPEpgCsWI3/6rH5C3BAedtH1/R51dxENldQceEA=="
|
||||
},
|
||||
"node_modules/@lezer/highlight": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.0.0.tgz",
|
||||
"integrity": "sha512-nsCnNtim90UKsB5YxoX65v3GEIw3iCHw9RM2DtdgkiqAbKh9pCdvi8AWNwkYf10Lu6fxNhXPpkpHbW6mihhvJA==",
|
||||
"dependencies": {
|
||||
"@lezer/common": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@lezer/lr": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.1.0.tgz",
|
||||
"integrity": "sha512-Iad04uVwk1PvSnj25mqj7zEEIRAsasbsTRmVzI0AUTs/+1Dz1//iYAaoLr7A+Xa7bZDfql5MKTxZmSlkYZD3Dg==",
|
||||
"dependencies": {
|
||||
"@lezer/common": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@lingui/babel-plugin-extract-messages": {
|
||||
"version": "3.15.0",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/babel-plugin-extract-messages/-/babel-plugin-extract-messages-3.15.0.tgz",
|
||||
@@ -6998,20 +6900,6 @@
|
||||
"node": ">= 4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/codemirror": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/codemirror/-/codemirror-6.0.1.tgz",
|
||||
"integrity": "sha512-J8j+nZ+CdWmIeFIGXEFbFPtpiYacFMDR8GlHK3IyHQJMCaVRfGx9NT+Hxivv1ckLWPvNdZqndbr/7lVhrf/Svg==",
|
||||
"dependencies": {
|
||||
"@codemirror/autocomplete": "^6.0.0",
|
||||
"@codemirror/commands": "^6.0.0",
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/lint": "^6.0.0",
|
||||
"@codemirror/search": "^6.0.0",
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/collect-v8-coverage": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz",
|
||||
@@ -7294,11 +7182,6 @@
|
||||
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/crelt": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.5.tgz",
|
||||
"integrity": "sha512-+BO9wPPi+DWTDcNYhr/W90myha8ptzftZT+LwcmUbbok0rcP/fequmFYCw8NMoH7pkAZQzU78b3kYrlua5a9eA=="
|
||||
},
|
||||
"node_modules/cross-spawn": {
|
||||
"version": "7.0.3",
|
||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
|
||||
@@ -20388,11 +20271,6 @@
|
||||
"webpack": "^5.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/style-mod": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.0.0.tgz",
|
||||
"integrity": "sha512-OPhtyEjyyN9x3nhPsu76f52yUGXiZcgvsrFVtvTkyGRQJ0XK+GPc6ov1z+lRpbeabka+MYEQxOYRnt5nF30aMw=="
|
||||
},
|
||||
"node_modules/styled-components": {
|
||||
"version": "5.3.6",
|
||||
"resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.6.tgz",
|
||||
@@ -21515,11 +21393,6 @@
|
||||
"browser-process-hrtime": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/w3c-keyname": {
|
||||
"version": "2.2.4",
|
||||
"resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.4.tgz",
|
||||
"integrity": "sha512-tOhfEwEzFLJzf6d1ZPkYfGj+FWhIpBux9ppoP3rlclw3Z0BZv3N7b7030Z1kYth+6rDuAsXUFr+d0VE6Ed1ikw=="
|
||||
},
|
||||
"node_modules/w3c-xmlserializer": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz",
|
||||
@@ -23890,76 +23763,6 @@
|
||||
"integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==",
|
||||
"dev": true
|
||||
},
|
||||
"@codemirror/autocomplete": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.0.2.tgz",
|
||||
"integrity": "sha512-9PDjnllmXan/7Uax87KGORbxerDJ/cu10SB+n4Jz0zXMEvIh3+TGgZxhIvDOtaQ4jDBQEM7kHYW4vLdQB0DGZQ==",
|
||||
"requires": {
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0",
|
||||
"@lezer/common": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"@codemirror/commands": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/commands/-/commands-6.0.1.tgz",
|
||||
"integrity": "sha512-iNHDByicYqQjs0Wo1MKGfqNbMYMyhS9WV6EwMVwsHXImlFemgEUC+c5X22bXKBStN3qnwg4fArNZM+gkv22baQ==",
|
||||
"requires": {
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0",
|
||||
"@lezer/common": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"@codemirror/language": {
|
||||
"version": "6.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.2.0.tgz",
|
||||
"integrity": "sha512-tabB0Ef/BflwoEmTB4a//WZ9P90UQyne9qWB9YFsmeS4bnEqSys7UpGk/da1URMXhyfuzWCwp+AQNMhvu8SfnA==",
|
||||
"requires": {
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0",
|
||||
"@lezer/common": "^1.0.0",
|
||||
"@lezer/highlight": "^1.0.0",
|
||||
"@lezer/lr": "^1.0.0",
|
||||
"style-mod": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"@codemirror/lint": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.0.0.tgz",
|
||||
"integrity": "sha512-nUUXcJW1Xp54kNs+a1ToPLK8MadO0rMTnJB8Zk4Z8gBdrN0kqV7uvUraU/T2yqg+grDNR38Vmy/MrhQN/RgwiA==",
|
||||
"requires": {
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0",
|
||||
"crelt": "^1.0.5"
|
||||
}
|
||||
},
|
||||
"@codemirror/search": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/search/-/search-6.0.0.tgz",
|
||||
"integrity": "sha512-rL0rd3AhI0TAsaJPUaEwC63KHLO7KL0Z/dYozXj6E7L3wNHRyx7RfE0/j5HsIf912EE5n2PCb4Vg0rGYmDv4UQ==",
|
||||
"requires": {
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0",
|
||||
"crelt": "^1.0.5"
|
||||
}
|
||||
},
|
||||
"@codemirror/state": {
|
||||
"version": "6.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.1.0.tgz",
|
||||
"integrity": "sha512-qbUr94DZTe6/V1VS7LDLz11rM/1t/nJxR1El4I6UaxDEdc0aZZvq6JCLJWiRmUf95NRAnDH6fhXn+PWp9wGCIg=="
|
||||
},
|
||||
"@codemirror/view": {
|
||||
"version": "6.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.0.2.tgz",
|
||||
"integrity": "sha512-mnVT/q1JvKPjpmjXJNeCi/xHyaJ3abGJsumIVpdQ1nE1MXAyHf7GHWt8QpWMUvDiqF0j+inkhVR2OviTdFFX7Q==",
|
||||
"requires": {
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"style-mod": "^4.0.0",
|
||||
"w3c-keyname": "^2.2.4"
|
||||
}
|
||||
},
|
||||
"@cspotcode/source-map-support": {
|
||||
"version": "0.8.1",
|
||||
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
|
||||
@@ -25101,27 +24904,6 @@
|
||||
"@jridgewell/sourcemap-codec": "1.4.14"
|
||||
}
|
||||
},
|
||||
"@lezer/common": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.0.0.tgz",
|
||||
"integrity": "sha512-ohydQe+Hb+w4oMDvXzs8uuJd2NoA3D8YDcLiuDsLqH+yflDTPEpgCsWI3/6rH5C3BAedtH1/R51dxENldQceEA=="
|
||||
},
|
||||
"@lezer/highlight": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.0.0.tgz",
|
||||
"integrity": "sha512-nsCnNtim90UKsB5YxoX65v3GEIw3iCHw9RM2DtdgkiqAbKh9pCdvi8AWNwkYf10Lu6fxNhXPpkpHbW6mihhvJA==",
|
||||
"requires": {
|
||||
"@lezer/common": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"@lezer/lr": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.1.0.tgz",
|
||||
"integrity": "sha512-Iad04uVwk1PvSnj25mqj7zEEIRAsasbsTRmVzI0AUTs/+1Dz1//iYAaoLr7A+Xa7bZDfql5MKTxZmSlkYZD3Dg==",
|
||||
"requires": {
|
||||
"@lezer/common": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"@lingui/babel-plugin-extract-messages": {
|
||||
"version": "3.15.0",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/babel-plugin-extract-messages/-/babel-plugin-extract-messages-3.15.0.tgz",
|
||||
@@ -27900,20 +27682,6 @@
|
||||
"q": "^1.1.2"
|
||||
}
|
||||
},
|
||||
"codemirror": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/codemirror/-/codemirror-6.0.1.tgz",
|
||||
"integrity": "sha512-J8j+nZ+CdWmIeFIGXEFbFPtpiYacFMDR8GlHK3IyHQJMCaVRfGx9NT+Hxivv1ckLWPvNdZqndbr/7lVhrf/Svg==",
|
||||
"requires": {
|
||||
"@codemirror/autocomplete": "^6.0.0",
|
||||
"@codemirror/commands": "^6.0.0",
|
||||
"@codemirror/language": "^6.0.0",
|
||||
"@codemirror/lint": "^6.0.0",
|
||||
"@codemirror/search": "^6.0.0",
|
||||
"@codemirror/state": "^6.0.0",
|
||||
"@codemirror/view": "^6.0.0"
|
||||
}
|
||||
},
|
||||
"collect-v8-coverage": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz",
|
||||
@@ -28141,11 +27909,6 @@
|
||||
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
|
||||
"dev": true
|
||||
},
|
||||
"crelt": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.5.tgz",
|
||||
"integrity": "sha512-+BO9wPPi+DWTDcNYhr/W90myha8ptzftZT+LwcmUbbok0rcP/fequmFYCw8NMoH7pkAZQzU78b3kYrlua5a9eA=="
|
||||
},
|
||||
"cross-spawn": {
|
||||
"version": "7.0.3",
|
||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
|
||||
@@ -38040,11 +37803,6 @@
|
||||
"dev": true,
|
||||
"requires": {}
|
||||
},
|
||||
"style-mod": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.0.0.tgz",
|
||||
"integrity": "sha512-OPhtyEjyyN9x3nhPsu76f52yUGXiZcgvsrFVtvTkyGRQJ0XK+GPc6ov1z+lRpbeabka+MYEQxOYRnt5nF30aMw=="
|
||||
},
|
||||
"styled-components": {
|
||||
"version": "5.3.6",
|
||||
"resolved": "https://registry.npmjs.org/styled-components/-/styled-components-5.3.6.tgz",
|
||||
@@ -38886,11 +38644,6 @@
|
||||
"browser-process-hrtime": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"w3c-keyname": {
|
||||
"version": "2.2.4",
|
||||
"resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.4.tgz",
|
||||
"integrity": "sha512-tOhfEwEzFLJzf6d1ZPkYfGj+FWhIpBux9ppoP3rlclw3Z0BZv3N7b7030Z1kYth+6rDuAsXUFr+d0VE6Ed1ikw=="
|
||||
},
|
||||
"w3c-xmlserializer": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-2.0.0.tgz",
|
||||
|
||||
@@ -14,7 +14,6 @@
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.27.2",
|
||||
"codemirror": "^6.0.1",
|
||||
"d3": "7.6.1",
|
||||
"dagre": "^0.8.4",
|
||||
"dompurify": "2.4.0",
|
||||
|
||||
@@ -28,7 +28,7 @@ import { getLanguageWithoutRegionCode } from 'util/language';
|
||||
import Metrics from 'screens/Metrics';
|
||||
import SubscriptionEdit from 'screens/Setting/Subscription/SubscriptionEdit';
|
||||
import useTitle from 'hooks/useTitle';
|
||||
import { dynamicActivate } from './i18nLoader';
|
||||
import { dynamicActivate, locales } from './i18nLoader';
|
||||
import getRouteConfig from './routeConfig';
|
||||
import { SESSION_REDIRECT_URL } from './constants';
|
||||
|
||||
@@ -142,9 +142,15 @@ function App() {
|
||||
const searchParams = Object.fromEntries(new URLSearchParams(search));
|
||||
const pseudolocalization =
|
||||
searchParams.pseudolocalization === 'true' || false;
|
||||
const language =
|
||||
let language =
|
||||
searchParams.lang || getLanguageWithoutRegionCode(navigator) || 'en';
|
||||
|
||||
if (!Object.keys(locales).includes(language)) {
|
||||
// If there isn't a string catalog available for the browser's
|
||||
// preferred language, default to one that has strings.
|
||||
language = 'en';
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
dynamicActivate(language, pseudolocalization);
|
||||
}, [language, pseudolocalization]);
|
||||
|
||||
@@ -115,16 +115,16 @@ function AdHocCredentialStep({ credentialTypeId }) {
|
||||
searchColumns={[
|
||||
{
|
||||
name: t`Name`,
|
||||
key: 'name',
|
||||
key: 'name__icontains',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
name: t`Created By (Username)`,
|
||||
key: 'created_by__username',
|
||||
key: 'created_by__username__icontains',
|
||||
},
|
||||
{
|
||||
name: t`Modified By (Username)`,
|
||||
key: 'modified_by__username',
|
||||
key: 'modified_by__username__icontains',
|
||||
},
|
||||
]}
|
||||
sortColumns={[
|
||||
|
||||
@@ -43,6 +43,7 @@ function LaunchButton({ resource, children }) {
|
||||
const [surveyConfig, setSurveyConfig] = useState(null);
|
||||
const [labels, setLabels] = useState([]);
|
||||
const [isLaunching, setIsLaunching] = useState(false);
|
||||
const [resourceCredentials, setResourceCredentials] = useState([]);
|
||||
const [error, setError] = useState(null);
|
||||
|
||||
const handleLaunch = async () => {
|
||||
@@ -83,6 +84,13 @@ function LaunchButton({ resource, children }) {
|
||||
setLabels(allLabels);
|
||||
}
|
||||
|
||||
if (launch.ask_credential_on_launch) {
|
||||
const {
|
||||
data: { results: templateCredentials },
|
||||
} = await JobTemplatesAPI.readCredentials(resource.id);
|
||||
setResourceCredentials(templateCredentials);
|
||||
}
|
||||
|
||||
if (canLaunchWithoutPrompt(launch)) {
|
||||
await launchWithParams({});
|
||||
} else {
|
||||
@@ -208,6 +216,7 @@ function LaunchButton({ resource, children }) {
|
||||
labels={labels}
|
||||
onLaunch={launchWithParams}
|
||||
onCancel={() => setShowLaunchPrompt(false)}
|
||||
resourceDefaultCredentials={resourceCredentials}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
|
||||
@@ -47,6 +47,12 @@ describe('LaunchButton', () => {
|
||||
variables_needed_to_start: [],
|
||||
},
|
||||
});
|
||||
JobTemplatesAPI.readCredentials.mockResolvedValue({
|
||||
data: {
|
||||
count: 0,
|
||||
results: [],
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => jest.clearAllMocks());
|
||||
|
||||
@@ -19,6 +19,7 @@ function PromptModalForm({
|
||||
labels,
|
||||
surveyConfig,
|
||||
instanceGroups,
|
||||
resourceDefaultCredentials,
|
||||
}) {
|
||||
const { setFieldTouched, values } = useFormikContext();
|
||||
const [showDescription, setShowDescription] = useState(false);
|
||||
@@ -35,9 +36,9 @@ function PromptModalForm({
|
||||
surveyConfig,
|
||||
resource,
|
||||
labels,
|
||||
instanceGroups
|
||||
instanceGroups,
|
||||
resourceDefaultCredentials
|
||||
);
|
||||
|
||||
const handleSubmit = async () => {
|
||||
const postValues = {};
|
||||
const setValue = (key, value) => {
|
||||
|
||||
@@ -69,6 +69,20 @@ describe('LaunchPrompt', () => {
|
||||
spec: [{ type: 'text', variable: 'foo' }],
|
||||
},
|
||||
});
|
||||
JobTemplatesAPI.readCredentials.mockResolvedValue({
|
||||
data: {
|
||||
results: [
|
||||
{
|
||||
id: 5,
|
||||
name: 'cred that prompts',
|
||||
credential_type: 1,
|
||||
inputs: {
|
||||
password: 'ASK',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
InstanceGroupsAPI.read.mockResolvedValue({
|
||||
data: {
|
||||
results: [
|
||||
@@ -212,6 +226,16 @@ describe('LaunchPrompt', () => {
|
||||
],
|
||||
},
|
||||
}}
|
||||
resourceDefaultCredentials={[
|
||||
{
|
||||
id: 5,
|
||||
name: 'cred that prompts',
|
||||
credential_type: 1,
|
||||
inputs: {
|
||||
password: 'ASK',
|
||||
},
|
||||
},
|
||||
]}
|
||||
onLaunch={noop}
|
||||
onCancel={noop}
|
||||
surveyConfig={{
|
||||
@@ -289,6 +313,16 @@ describe('LaunchPrompt', () => {
|
||||
resource={resource}
|
||||
onLaunch={noop}
|
||||
onCancel={noop}
|
||||
resourceDefaultCredentials={[
|
||||
{
|
||||
id: 5,
|
||||
name: 'cred that prompts',
|
||||
credential_type: 1,
|
||||
inputs: {
|
||||
password: 'ASK',
|
||||
},
|
||||
},
|
||||
]}
|
||||
/>
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import 'styled-components/macro';
|
||||
import React, { useState, useCallback, useEffect } from 'react';
|
||||
import { useHistory } from 'react-router-dom';
|
||||
import { useHistory, useLocation } from 'react-router-dom';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { useField } from 'formik';
|
||||
@@ -8,7 +8,7 @@ import styled from 'styled-components';
|
||||
import { Alert, ToolbarItem } from '@patternfly/react-core';
|
||||
import { CredentialsAPI, CredentialTypesAPI } from 'api';
|
||||
import { getSearchableKeys } from 'components/PaginatedTable';
|
||||
import { getQSConfig, parseQueryString } from 'util/qs';
|
||||
import { getQSConfig, parseQueryString, updateQueryString } from 'util/qs';
|
||||
import useRequest from 'hooks/useRequest';
|
||||
import AnsibleSelect from '../../AnsibleSelect';
|
||||
import OptionsList from '../../OptionsList';
|
||||
@@ -31,18 +31,18 @@ function CredentialsStep({
|
||||
allowCredentialsWithPasswords,
|
||||
defaultCredentials = [],
|
||||
}) {
|
||||
const history = useHistory();
|
||||
const location = useLocation();
|
||||
const [field, meta, helpers] = useField({
|
||||
name: 'credentials',
|
||||
validate: (val) =>
|
||||
credentialsValidator(
|
||||
allowCredentialsWithPasswords,
|
||||
val,
|
||||
defaultCredentials
|
||||
defaultCredentials ?? []
|
||||
),
|
||||
});
|
||||
const [selectedType, setSelectedType] = useState(null);
|
||||
const history = useHistory();
|
||||
|
||||
const {
|
||||
result: types,
|
||||
error: typesError,
|
||||
@@ -104,12 +104,32 @@ function CredentialsStep({
|
||||
credentialsValidator(
|
||||
allowCredentialsWithPasswords,
|
||||
field.value,
|
||||
defaultCredentials
|
||||
defaultCredentials ?? []
|
||||
)
|
||||
);
|
||||
/* eslint-disable-next-line react-hooks/exhaustive-deps */
|
||||
}, []);
|
||||
|
||||
const removeAllSearchTerms = (qsConfig) => {
|
||||
const oldParams = parseQueryString(qsConfig, location.search);
|
||||
Object.keys(oldParams).forEach((key) => {
|
||||
oldParams[key] = null;
|
||||
});
|
||||
const defaultParams = {
|
||||
...oldParams,
|
||||
page: 1,
|
||||
page_size: 5,
|
||||
order_by: 'name',
|
||||
};
|
||||
const qs = updateQueryString(qsConfig, location.search, defaultParams);
|
||||
pushHistoryState(qs);
|
||||
};
|
||||
|
||||
const pushHistoryState = (qs) => {
|
||||
const { pathname } = history.location;
|
||||
history.push(qs ? `${pathname}?${qs}` : pathname);
|
||||
};
|
||||
|
||||
if (isTypesLoading) {
|
||||
return <ContentLoading />;
|
||||
}
|
||||
@@ -154,9 +174,7 @@ function CredentialsStep({
|
||||
value={selectedType && selectedType.id}
|
||||
onChange={(e, id) => {
|
||||
// Reset query params when the category of credentials is changed
|
||||
history.replace({
|
||||
search: '',
|
||||
});
|
||||
removeAllSearchTerms(QS_CONFIG);
|
||||
setSelectedType(types.find((o) => o.id === parseInt(id, 10)));
|
||||
}}
|
||||
/>
|
||||
|
||||
@@ -168,7 +168,9 @@ describe('CredentialsStep', () => {
|
||||
test('should reset query params (credential.page) when selected credential type is changed', async () => {
|
||||
let wrapper;
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['?credential.page=2'],
|
||||
initialEntries: [
|
||||
'?credential.page=2&credential.page_size=5&credential.order_by=name',
|
||||
],
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
|
||||
@@ -46,7 +46,8 @@ export default function useLaunchSteps(
|
||||
surveyConfig,
|
||||
resource,
|
||||
labels,
|
||||
instanceGroups
|
||||
instanceGroups,
|
||||
resourceDefaultCredentials
|
||||
) {
|
||||
const [visited, setVisited] = useState({});
|
||||
const [isReady, setIsReady] = useState(false);
|
||||
@@ -56,7 +57,7 @@ export default function useLaunchSteps(
|
||||
useCredentialsStep(
|
||||
launchConfig,
|
||||
resource,
|
||||
resource.summary_fields.credentials || [],
|
||||
resourceDefaultCredentials,
|
||||
true
|
||||
),
|
||||
useCredentialPasswordsStep(
|
||||
|
||||
@@ -1,25 +1,9 @@
|
||||
import { useEffect } from 'react';
|
||||
import { useLocation, useHistory } from 'react-router';
|
||||
import { useLocation } from 'react-router';
|
||||
import { PERSISTENT_FILTER_KEY } from '../../constants';
|
||||
|
||||
export default function PersistentFilters({ pageKey, children }) {
|
||||
const location = useLocation();
|
||||
const history = useHistory();
|
||||
|
||||
useEffect(() => {
|
||||
if (!location.search.includes('restoreFilters=true')) {
|
||||
return;
|
||||
}
|
||||
|
||||
const filterString = sessionStorage.getItem(PERSISTENT_FILTER_KEY);
|
||||
const filter = filterString ? JSON.parse(filterString) : { qs: '' };
|
||||
|
||||
if (filter.pageKey === pageKey) {
|
||||
history.replace(`${location.pathname}${filter.qs}`);
|
||||
} else {
|
||||
history.replace(location.pathname);
|
||||
}
|
||||
}, [history, location, pageKey]);
|
||||
|
||||
useEffect(() => {
|
||||
const filter = {
|
||||
@@ -31,3 +15,13 @@ export default function PersistentFilters({ pageKey, children }) {
|
||||
|
||||
return children;
|
||||
}
|
||||
|
||||
export function getPersistentFilters(key) {
|
||||
const filterString = sessionStorage.getItem(PERSISTENT_FILTER_KEY);
|
||||
const filter = filterString ? JSON.parse(filterString) : { qs: '' };
|
||||
|
||||
if (filter.pageKey === key) {
|
||||
return filter.qs;
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
@@ -24,28 +24,6 @@ describe('PersistentFilters', () => {
|
||||
});
|
||||
});
|
||||
|
||||
test('should restore filters from sessionStorage', () => {
|
||||
expect(
|
||||
sessionStorage.setItem(
|
||||
KEY,
|
||||
JSON.stringify({
|
||||
pageKey: 'templates',
|
||||
qs: '?page=2&name=foo',
|
||||
})
|
||||
)
|
||||
);
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/templates?restoreFilters=true'],
|
||||
});
|
||||
render(
|
||||
<Router history={history}>
|
||||
<PersistentFilters pageKey="templates">test</PersistentFilters>
|
||||
</Router>
|
||||
);
|
||||
|
||||
expect(history.location.search).toEqual('?page=2&name=foo');
|
||||
});
|
||||
|
||||
test('should not restore filters without restoreFilters query param', () => {
|
||||
expect(
|
||||
sessionStorage.setItem(
|
||||
@@ -68,28 +46,6 @@ describe('PersistentFilters', () => {
|
||||
expect(history.location.search).toEqual('');
|
||||
});
|
||||
|
||||
test("should not restore filters if page key doesn't match", () => {
|
||||
expect(
|
||||
sessionStorage.setItem(
|
||||
KEY,
|
||||
JSON.stringify({
|
||||
pageKey: 'projects',
|
||||
qs: '?page=2&name=foo',
|
||||
})
|
||||
)
|
||||
);
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/templates?restoreFilters=true'],
|
||||
});
|
||||
render(
|
||||
<Router history={history}>
|
||||
<PersistentFilters pageKey="templates">test</PersistentFilters>
|
||||
</Router>
|
||||
);
|
||||
|
||||
expect(history.location.search).toEqual('');
|
||||
});
|
||||
|
||||
test('should update stored filters when qs changes', async () => {
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/templates'],
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
export { default } from './PersistentFilters';
|
||||
export { getPersistentFilters } from './PersistentFilters';
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
} from '@patternfly/react-core';
|
||||
import { useHistory, useLocation } from 'react-router-dom';
|
||||
import styled from 'styled-components';
|
||||
import { getPersistentFilters } from 'components/PersistentFilters';
|
||||
|
||||
const Tabs = styled(PFTabs)`
|
||||
& > ul {
|
||||
@@ -40,8 +41,8 @@ function RoutedTabs({ tabsArray }) {
|
||||
const match = tabsArray.find((tab) => tab.id === eventKey);
|
||||
if (match) {
|
||||
event.preventDefault();
|
||||
const link = match.isBackButton
|
||||
? `${match.link}?restoreFilters=true`
|
||||
const link = match.persistentFilterKey
|
||||
? `${match.link}${getPersistentFilters(match.persistentFilterKey)}`
|
||||
: match.link;
|
||||
history.push(link);
|
||||
}
|
||||
|
||||
@@ -122,6 +122,18 @@ function sortWeekday(a, b) {
|
||||
}
|
||||
|
||||
function RunOnDetail({ type, options, prefix }) {
|
||||
const weekdays = {
|
||||
sunday: t`Sunday`,
|
||||
monday: t`Monday`,
|
||||
tuesday: t`Tuesday`,
|
||||
wednesday: t`Wednesday`,
|
||||
thursday: t`Thursday`,
|
||||
friday: t`Friday`,
|
||||
saturday: t`Saturday`,
|
||||
day: t`day`,
|
||||
weekday: t`weekday`,
|
||||
weekendDay: t`weekend day`,
|
||||
};
|
||||
if (type === 'month') {
|
||||
if (options.runOn === 'day') {
|
||||
return (
|
||||
@@ -132,16 +144,16 @@ function RunOnDetail({ type, options, prefix }) {
|
||||
/>
|
||||
);
|
||||
}
|
||||
const dayOfWeek = options.runOnTheDay;
|
||||
const dayOfWeek = weekdays[options.runOnTheDay];
|
||||
return (
|
||||
<Detail
|
||||
label={t`Run on`}
|
||||
value={
|
||||
options.runOnDayNumber === -1 ? (
|
||||
options.runOnTheOccurrence === -1 ? (
|
||||
t`The last ${dayOfWeek}`
|
||||
) : (
|
||||
<SelectOrdinal
|
||||
value={options.runOnDayNumber}
|
||||
value={options.runOnTheOccurrence}
|
||||
one={`The first ${dayOfWeek}`}
|
||||
two={`The second ${dayOfWeek}`}
|
||||
_3={`The third ${dayOfWeek}`}
|
||||
@@ -178,18 +190,6 @@ function RunOnDetail({ type, options, prefix }) {
|
||||
/>
|
||||
);
|
||||
}
|
||||
const weekdays = {
|
||||
sunday: t`Sunday`,
|
||||
monday: t`Monday`,
|
||||
tuesday: t`Tuesday`,
|
||||
wednesday: t`Wednesday`,
|
||||
thursday: t`Thursday`,
|
||||
friday: t`Friday`,
|
||||
saturday: t`Saturday`,
|
||||
day: t`day`,
|
||||
weekday: t`weekday`,
|
||||
weekendDay: t`weekend day`,
|
||||
};
|
||||
const weekday = weekdays[options.runOnTheDay];
|
||||
const month = months[options.runOnTheMonth];
|
||||
return (
|
||||
|
||||
@@ -11,7 +11,8 @@ import { JobTemplatesAPI, SchedulesAPI, WorkflowJobTemplatesAPI } from 'api';
|
||||
import { parseVariableField, jsonToYaml } from 'util/yaml';
|
||||
import { useConfig } from 'contexts/Config';
|
||||
import InstanceGroupLabels from 'components/InstanceGroupLabels';
|
||||
import parseRuleObj from '../shared/parseRuleObj';
|
||||
import parseRuleObj, { UnsupportedRRuleError } from '../shared/parseRuleObj';
|
||||
import UnsupportedRRuleAlert from '../shared/UnsupportedRRuleAlert';
|
||||
import FrequencyDetails from './FrequencyDetails';
|
||||
import AlertModal from '../../AlertModal';
|
||||
import { CardBody, CardActionsRow } from '../../Card';
|
||||
@@ -182,8 +183,20 @@ function ScheduleDetail({ hasDaysToKeepField, schedule, surveyConfig }) {
|
||||
month: t`Month`,
|
||||
year: t`Year`,
|
||||
};
|
||||
const { frequency, frequencyOptions, exceptionFrequency, exceptionOptions } =
|
||||
parseRuleObj(schedule);
|
||||
let rruleError;
|
||||
let frequency = [];
|
||||
let frequencyOptions = {};
|
||||
let exceptionFrequency = [];
|
||||
let exceptionOptions = {};
|
||||
try {
|
||||
({ frequency, frequencyOptions, exceptionFrequency, exceptionOptions } =
|
||||
parseRuleObj(schedule));
|
||||
} catch (parseRuleError) {
|
||||
if (parseRuleError instanceof UnsupportedRRuleError) {
|
||||
rruleError = parseRuleError;
|
||||
}
|
||||
}
|
||||
|
||||
const repeatFrequency = frequency.length
|
||||
? frequency.map((f) => frequencies[f]).join(', ')
|
||||
: t`None (Run Once)`;
|
||||
@@ -602,6 +615,7 @@ function ScheduleDetail({ hasDaysToKeepField, schedule, surveyConfig }) {
|
||||
</PromptDetailList>
|
||||
</>
|
||||
)}
|
||||
{rruleError && <UnsupportedRRuleAlert schedule={schedule} />}
|
||||
<CardActionsRow>
|
||||
{summary_fields?.user_capabilities?.edit && (
|
||||
<Button
|
||||
|
||||
@@ -587,4 +587,31 @@ describe('<ScheduleDetail />', () => {
|
||||
(el) => el.prop('isDisabled') === true
|
||||
);
|
||||
});
|
||||
test('should display warning for unsupported recurrence rules ', async () => {
|
||||
const unsupportedSchedule = {
|
||||
...schedule,
|
||||
rrule:
|
||||
'DTSTART:20221220T161500Z RRULE:FREQ=HOURLY;INTERVAL=1 EXRULE:FREQ=HOURLY;INTERVAL=1;BYDAY=TU;BYMONTHDAY=1,2,3,4,5,6,7 EXRULE:FREQ=HOURLY;INTERVAL=1;BYDAY=WE;BYMONTHDAY=2,3,4,5,6,7,8',
|
||||
};
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Route
|
||||
path="/templates/job_template/:id/schedules/:scheduleId"
|
||||
component={() => <ScheduleDetail schedule={unsupportedSchedule} />}
|
||||
/>,
|
||||
{
|
||||
context: {
|
||||
router: {
|
||||
history,
|
||||
route: {
|
||||
location: history.location,
|
||||
match: { params: { id: 1 } },
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
});
|
||||
expect(wrapper.find('UnsupportedRRuleAlert').length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -113,6 +113,9 @@ function ScheduleEdit({
|
||||
days: values.daysToKeep,
|
||||
});
|
||||
} else {
|
||||
if (typeof requestData.extra_data === 'string') {
|
||||
requestData.extra_data = JSON.parse(requestData.extra_data);
|
||||
}
|
||||
requestData.extra_data.days = values.daysToKeep;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
import React from 'react';
|
||||
import styled from 'styled-components';
|
||||
import { t } from '@lingui/macro';
|
||||
import { Alert } from '@patternfly/react-core';
|
||||
|
||||
const AlertWrapper = styled.div`
|
||||
margin-top: var(--pf-global--spacer--lg);
|
||||
margin-bottom: var(--pf-global--spacer--lg);
|
||||
`;
|
||||
const RulesTitle = styled.p`
|
||||
margin-top: var(--pf-global--spacer--lg);
|
||||
margin-bottom: var(--pf-global--spacer--lg);
|
||||
font-weight: var(--pf-global--FontWeight--bold);
|
||||
`;
|
||||
|
||||
export default function UnsupportedRRuleAlert({ schedule }) {
|
||||
return (
|
||||
<AlertWrapper>
|
||||
<Alert
|
||||
isInline
|
||||
variant="danger"
|
||||
ouiaId="schedule-warning"
|
||||
title={t`This schedule uses complex rules that are not supported in the
|
||||
UI. Please use the API to manage this schedule.`}
|
||||
/>
|
||||
<RulesTitle>{t`Schedule Rules`}:</RulesTitle>
|
||||
<pre css="white-space: pre; font-family: var(--pf-global--FontFamily--monospace)">
|
||||
{schedule.rrule.split(' ').join('\n')}
|
||||
</pre>
|
||||
</AlertWrapper>
|
||||
);
|
||||
}
|
||||
@@ -82,11 +82,7 @@ const frequencyTypes = {
|
||||
};
|
||||
|
||||
function parseRrule(rruleString, schedule, values) {
|
||||
const { frequency, options } = parseRule(
|
||||
rruleString,
|
||||
schedule,
|
||||
values.exceptionFrequency
|
||||
);
|
||||
const { frequency, options } = parseRule(rruleString, schedule);
|
||||
|
||||
if (values.frequencyOptions[frequency]) {
|
||||
throw new UnsupportedRRuleError(
|
||||
@@ -105,11 +101,7 @@ function parseRrule(rruleString, schedule, values) {
|
||||
}
|
||||
|
||||
function parseExRule(exruleString, schedule, values) {
|
||||
const { frequency, options } = parseRule(
|
||||
exruleString,
|
||||
schedule,
|
||||
values.exceptionFrequency
|
||||
);
|
||||
const { frequency, options } = parseRule(exruleString, schedule);
|
||||
|
||||
if (values.exceptionOptions[frequency]) {
|
||||
throw new UnsupportedRRuleError(
|
||||
@@ -129,7 +121,7 @@ function parseExRule(exruleString, schedule, values) {
|
||||
};
|
||||
}
|
||||
|
||||
function parseRule(ruleString, schedule, frequencies) {
|
||||
function parseRule(ruleString, schedule) {
|
||||
const {
|
||||
origOptions: {
|
||||
bymonth,
|
||||
@@ -178,9 +170,6 @@ function parseRule(ruleString, schedule, frequencies) {
|
||||
throw new Error(`Unexpected rrule frequency: ${freq}`);
|
||||
}
|
||||
const frequency = frequencyTypes[freq];
|
||||
if (frequencies.includes(frequency)) {
|
||||
throw new Error(`Duplicate frequency types not supported (${frequency})`);
|
||||
}
|
||||
|
||||
if (freq === RRule.WEEKLY && byweekday) {
|
||||
options.daysOfWeek = byweekday;
|
||||
|
||||
@@ -195,9 +195,9 @@ function getRouteConfig(userProfile = {}) {
|
||||
deleteRoute('host_metrics');
|
||||
deleteRouteGroup('settings');
|
||||
deleteRoute('management_jobs');
|
||||
if (userProfile?.isOrgAdmin) return routeConfig;
|
||||
deleteRoute('topology_view');
|
||||
deleteRoute('instances');
|
||||
if (userProfile?.isOrgAdmin) return routeConfig;
|
||||
if (!userProfile?.isNotificationAdmin) deleteRoute('notification_templates');
|
||||
|
||||
return routeConfig;
|
||||
|
||||
@@ -101,10 +101,8 @@ describe('getRouteConfig', () => {
|
||||
'/credential_types',
|
||||
'/notification_templates',
|
||||
'/instance_groups',
|
||||
'/instances',
|
||||
'/applications',
|
||||
'/execution_environments',
|
||||
'/topology_view',
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -237,10 +235,8 @@ describe('getRouteConfig', () => {
|
||||
'/credential_types',
|
||||
'/notification_templates',
|
||||
'/instance_groups',
|
||||
'/instances',
|
||||
'/applications',
|
||||
'/execution_environments',
|
||||
'/topology_view',
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -268,10 +264,8 @@ describe('getRouteConfig', () => {
|
||||
'/credential_types',
|
||||
'/notification_templates',
|
||||
'/instance_groups',
|
||||
'/instances',
|
||||
'/applications',
|
||||
'/execution_environments',
|
||||
'/topology_view',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -74,7 +74,7 @@ function Application({ setBreadcrumb }) {
|
||||
),
|
||||
link: '/applications',
|
||||
id: 0,
|
||||
isBackButton: true,
|
||||
persistentFilterKey: 'applications',
|
||||
},
|
||||
{ name: t`Details`, link: `/applications/${id}/details`, id: 1 },
|
||||
{ name: t`Tokens`, link: `/applications/${id}/tokens`, id: 2 },
|
||||
|
||||
@@ -82,7 +82,7 @@ function Credential({ setBreadcrumb }) {
|
||||
),
|
||||
link: `/credentials`,
|
||||
id: 99,
|
||||
isBackButton: true,
|
||||
persistentFilterKey: 'credentials',
|
||||
},
|
||||
{ name: t`Details`, link: `/credentials/${id}/details`, id: 0 },
|
||||
{
|
||||
|
||||
@@ -91,6 +91,11 @@ function CredentialEdit({ credential }) {
|
||||
modifiedData.user = me.id;
|
||||
}
|
||||
}
|
||||
|
||||
if (credential.kind === 'vault' && !credential.inputs?.vault_id) {
|
||||
delete modifiedData.inputs.vault_id;
|
||||
}
|
||||
|
||||
const [{ data }] = await Promise.all([
|
||||
CredentialsAPI.update(credId, modifiedData),
|
||||
...destroyInputSources(),
|
||||
@@ -100,7 +105,7 @@ function CredentialEdit({ credential }) {
|
||||
|
||||
return data;
|
||||
},
|
||||
[me, credId]
|
||||
[me, credId, credential]
|
||||
)
|
||||
);
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ function CredentialType({ setBreadcrumb }) {
|
||||
),
|
||||
link: '/credential_types',
|
||||
id: 99,
|
||||
isBackButton: true,
|
||||
persistentFilterKey: 'credentialTypes',
|
||||
},
|
||||
{
|
||||
name: t`Details`,
|
||||
|
||||
@@ -59,7 +59,7 @@ function ExecutionEnvironment({ setBreadcrumb }) {
|
||||
),
|
||||
link: '/execution_environments',
|
||||
id: 99,
|
||||
isBackButton: true,
|
||||
persistentFilterKey: 'executionEnvironments',
|
||||
},
|
||||
{
|
||||
name: t`Details`,
|
||||
|
||||
@@ -52,7 +52,7 @@ function Host({ setBreadcrumb }) {
|
||||
),
|
||||
link: `/hosts`,
|
||||
id: 99,
|
||||
isBackButton: true,
|
||||
persistentFilterKey: 'hosts',
|
||||
},
|
||||
{
|
||||
name: t`Details`,
|
||||
|
||||
@@ -131,12 +131,6 @@ function HostMetrics() {
|
||||
>
|
||||
{t`Automation`}
|
||||
</HeaderCell>
|
||||
<HeaderCell
|
||||
sortKey="used_in_inventories"
|
||||
tooltip={t`How many inventories is the host in, recomputed on a weekly schedule`}
|
||||
>
|
||||
{t`Inventories`}
|
||||
</HeaderCell>
|
||||
<HeaderCell
|
||||
sortKey="deleted_counter"
|
||||
tooltip={t`How many times was the host deleted`}
|
||||
|
||||
@@ -21,7 +21,6 @@ function HostMetricsListItem({ item, isSelected, onSelect, rowIndex }) {
|
||||
{formatDateString(item.last_automation)}
|
||||
</Td>
|
||||
<Td dataLabel={t`Automation`}>{item.automated_counter}</Td>
|
||||
<Td dataLabel={t`Inventories`}>{item.used_in_inventories || 0}</Td>
|
||||
<Td dataLabel={t`Deleted`}>{item.deleted_counter}</Td>
|
||||
</Tr>
|
||||
);
|
||||
|
||||
@@ -63,7 +63,7 @@ function InstanceGroup({ setBreadcrumb }) {
|
||||
),
|
||||
link: '/instance_groups',
|
||||
id: 99,
|
||||
isBackButton: true,
|
||||
persistentFilterKey: 'instanceGroups',
|
||||
},
|
||||
{
|
||||
name: t`Details`,
|
||||
|
||||
@@ -4,6 +4,7 @@ import { t } from '@lingui/macro';
|
||||
import { Switch, Route, Redirect, Link, useRouteMatch } from 'react-router-dom';
|
||||
import { CaretLeftIcon } from '@patternfly/react-icons';
|
||||
import { Card, PageSection } from '@patternfly/react-core';
|
||||
import { useConfig } from 'contexts/Config';
|
||||
import ContentError from 'components/ContentError';
|
||||
import RoutedTabs from 'components/RoutedTabs';
|
||||
import useRequest from 'hooks/useRequest';
|
||||
@@ -13,6 +14,9 @@ import InstanceDetail from './InstanceDetail';
|
||||
import InstancePeerList from './InstancePeers';
|
||||
|
||||
function Instance({ setBreadcrumb }) {
|
||||
const { me } = useConfig();
|
||||
const canReadSettings = me.is_superuser || me.is_system_auditor;
|
||||
|
||||
const match = useRouteMatch();
|
||||
const tabsArray = [
|
||||
{
|
||||
@@ -24,25 +28,27 @@ function Instance({ setBreadcrumb }) {
|
||||
),
|
||||
link: `/instances`,
|
||||
id: 99,
|
||||
isBackButton: true,
|
||||
persistentFilterKey: 'instances',
|
||||
},
|
||||
{ name: t`Details`, link: `${match.url}/details`, id: 0 },
|
||||
];
|
||||
|
||||
const {
|
||||
result: { isK8s },
|
||||
result: isK8s,
|
||||
error,
|
||||
isLoading,
|
||||
request,
|
||||
} = useRequest(
|
||||
useCallback(async () => {
|
||||
if (!canReadSettings) {
|
||||
return false;
|
||||
}
|
||||
const { data } = await SettingsAPI.readCategory('system');
|
||||
return {
|
||||
isK8s: data.IS_K8S,
|
||||
};
|
||||
}, []),
|
||||
return data?.IS_K8S ?? false;
|
||||
}, [canReadSettings]),
|
||||
{ isK8s: false, isLoading: true }
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
request();
|
||||
}, [request]);
|
||||
|
||||
@@ -36,6 +36,7 @@ const QS_CONFIG = getQSConfig('instance', {
|
||||
function InstanceList() {
|
||||
const location = useLocation();
|
||||
const { me } = useConfig();
|
||||
const canReadSettings = me.is_superuser || me.is_system_auditor;
|
||||
const [showHealthCheckAlert, setShowHealthCheckAlert] = useState(false);
|
||||
const [pendingHealthCheck, setPendingHealthCheck] = useState(false);
|
||||
const [canRunHealthCheck, setCanRunHealthCheck] = useState(true);
|
||||
@@ -48,18 +49,24 @@ function InstanceList() {
|
||||
} = useRequest(
|
||||
useCallback(async () => {
|
||||
const params = parseQueryString(QS_CONFIG, location.search);
|
||||
const [response, responseActions, sysSettings] = await Promise.all([
|
||||
|
||||
const [response, responseActions] = await Promise.all([
|
||||
InstancesAPI.read(params),
|
||||
InstancesAPI.readOptions(),
|
||||
SettingsAPI.readCategory('system'),
|
||||
]);
|
||||
|
||||
let sysSettings = {};
|
||||
if (canReadSettings) {
|
||||
sysSettings = await SettingsAPI.readCategory('system');
|
||||
}
|
||||
|
||||
const isPending = response.data.results.some(
|
||||
(i) => i.health_check_pending === true
|
||||
);
|
||||
setPendingHealthCheck(isPending);
|
||||
return {
|
||||
instances: response.data.results,
|
||||
isK8s: sysSettings.data.IS_K8S,
|
||||
isK8s: sysSettings?.data?.IS_K8S ?? false,
|
||||
count: response.data.count,
|
||||
actions: responseActions.data.actions,
|
||||
relatedSearchableKeys: (
|
||||
@@ -67,7 +74,7 @@ function InstanceList() {
|
||||
).map((val) => val.slice(0, -8)),
|
||||
searchableKeys: getSearchableKeys(responseActions.data.actions?.GET),
|
||||
};
|
||||
}, [location.search]),
|
||||
}, [location.search, canReadSettings]),
|
||||
{
|
||||
instances: [],
|
||||
count: 0,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user