Compare commits
163 Commits
constructe
...
21.14.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
32a5186eea | ||
|
|
bf98f62654 | ||
|
|
1f9925cf51 | ||
|
|
4bf8366687 | ||
|
|
21b4755587 | ||
|
|
b4163dd00f | ||
|
|
6908f415a1 | ||
|
|
746cd4bf77 | ||
|
|
39ea162aa9 | ||
|
|
5bd00adb59 | ||
|
|
28b1c62275 | ||
|
|
f3cdf368df | ||
|
|
4302348e8e | ||
|
|
cd6cb3352e | ||
|
|
d1895bb92e | ||
|
|
8d47644659 | ||
|
|
46227f14a1 | ||
|
|
2d114a4d16 | ||
|
|
7deddabea6 | ||
|
|
e15f4de0dd | ||
|
|
f558957538 | ||
|
|
fa3920d3a3 | ||
|
|
48a04bff5a | ||
|
|
c30760aaa9 | ||
|
|
3636c5e95e | ||
|
|
ae0d868681 | ||
|
|
edbed92c95 | ||
|
|
b75b098ee9 | ||
|
|
4f2f345e23 | ||
|
|
41a4551c91 | ||
|
|
229dbe0905 | ||
|
|
d137086870 | ||
|
|
f53aa2d26b | ||
|
|
42c848b57b | ||
|
|
3e6e0463b9 | ||
|
|
ededc61a71 | ||
|
|
3747f5b097 | ||
|
|
64b0e09e87 | ||
|
|
790ccd984c | ||
|
|
5d0849d746 | ||
|
|
7f1750324f | ||
|
|
a63067da38 | ||
|
|
7a45048463 | ||
|
|
97a5e87448 | ||
|
|
11475590e7 | ||
|
|
7e88a735ad | ||
|
|
2f3e65d4ef | ||
|
|
cc18c1220a | ||
|
|
d2aa1b94e3 | ||
|
|
a97c1b46c0 | ||
|
|
6a3282a689 | ||
|
|
be27d89895 | ||
|
|
160508c907 | ||
|
|
5a3900a927 | ||
|
|
f2bfaf7aca | ||
|
|
d1cf7245f7 | ||
|
|
0de7551477 | ||
|
|
ac99708952 | ||
|
|
47b7bbeda7 | ||
|
|
bca0f2dd47 | ||
|
|
3efc7d5bc4 | ||
|
|
4b9ca3deee | ||
|
|
f622d3a1e6 | ||
|
|
ede1b9af92 | ||
|
|
2becc5dda9 | ||
|
|
7aad16964c | ||
|
|
b1af27c4f6 | ||
|
|
7cb16ef91d | ||
|
|
9358d59f20 | ||
|
|
9e037f1a02 | ||
|
|
266ebe5501 | ||
|
|
ce5270434c | ||
|
|
34834252ff | ||
|
|
861ba8a727 | ||
|
|
02e5ba5f94 | ||
|
|
81ba6c0234 | ||
|
|
5c47c24e28 | ||
|
|
752289e175 | ||
|
|
a24aaba6bc | ||
|
|
349785550c | ||
|
|
ab6511a833 | ||
|
|
a7b4c03188 | ||
|
|
a5f9506f49 | ||
|
|
8e6f4fae80 | ||
|
|
a952ab0a75 | ||
|
|
7cca6c4cd9 | ||
|
|
3945db60eb | ||
|
|
252b0dda9f | ||
|
|
0a2f1622f6 | ||
|
|
00817d6b89 | ||
|
|
06808ef4c4 | ||
|
|
3aba5b5a04 | ||
|
|
5c19efdc32 | ||
|
|
f0c967c1b2 | ||
|
|
2ca0b7bc01 | ||
|
|
217dc57c24 | ||
|
|
1411d11a0e | ||
|
|
2fe1ea94bd | ||
|
|
a47cfc55ab | ||
|
|
0eb9de02f3 | ||
|
|
39ee4285ce | ||
|
|
2dcda04a9e | ||
|
|
52d46c88e4 | ||
|
|
c2df22e0f0 | ||
|
|
90f54b98cd | ||
|
|
b143df3183 | ||
|
|
6fa22f5be2 | ||
|
|
d5de1f9d11 | ||
|
|
7cca39d069 | ||
|
|
cf21eab7f4 | ||
|
|
98b2f51c18 | ||
|
|
327352feaf | ||
|
|
ccaace8b30 | ||
|
|
2902b40084 | ||
|
|
9669b9dd2f | ||
|
|
a6a9d3427c | ||
|
|
d27aada817 | ||
|
|
2fca07ee4c | ||
|
|
335ac636b5 | ||
|
|
f4bcc03ac7 | ||
|
|
3051384f95 | ||
|
|
811ecb8673 | ||
|
|
5e28f5dca1 | ||
|
|
d088d36448 | ||
|
|
89e41597a6 | ||
|
|
283adc30a8 | ||
|
|
019e6a52fe | ||
|
|
35e5610642 | ||
|
|
3a303875bb | ||
|
|
4499a50019 | ||
|
|
3fe46e2e27 | ||
|
|
6d3f39fe92 | ||
|
|
a3233b5fdd | ||
|
|
fe3aa6ce2b | ||
|
|
77ec46f6cf | ||
|
|
b5f240ce70 | ||
|
|
fb2647ff7b | ||
|
|
35fbb94aa6 | ||
|
|
f2ab8d637c | ||
|
|
166b586591 | ||
|
|
d1c608a281 | ||
|
|
b4803ca894 | ||
|
|
ce7f597c7e | ||
|
|
23a34c5dc9 | ||
|
|
bef3da6fb2 | ||
|
|
7f50679e68 | ||
|
|
52d071f9d1 | ||
|
|
26a888547d | ||
|
|
951eee944c | ||
|
|
4630757f5f | ||
|
|
f0c91bb1f3 | ||
|
|
b1dceefac3 | ||
|
|
bb65945b4f | ||
|
|
1b8f6630bf | ||
|
|
5157838d83 | ||
|
|
ebabea54e1 | ||
|
|
0eaa7816e9 | ||
|
|
eb9431ee1f | ||
|
|
fd6605932a | ||
|
|
83149519f8 | ||
|
|
5d96ee084d | ||
|
|
e2cee10767 | ||
|
|
31c2e1a450 |
22
.github/workflows/devel_images.yml
vendored
@@ -7,6 +7,7 @@ on:
|
||||
branches:
|
||||
- devel
|
||||
- release_*
|
||||
- feature_*
|
||||
jobs:
|
||||
push:
|
||||
if: endsWith(github.repository, '/awx') || startsWith(github.ref, 'refs/heads/release_')
|
||||
@@ -20,6 +21,12 @@ jobs:
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Set lower case owner name
|
||||
run: |
|
||||
echo "OWNER_LC=${OWNER,,}" >>${GITHUB_ENV}
|
||||
env:
|
||||
OWNER: '${{ github.repository_owner }}'
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
@@ -31,15 +38,18 @@ jobs:
|
||||
|
||||
- name: Pre-pull image to warm build cache
|
||||
run: |
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_kube_devel:${GITHUB_REF##*/} || :
|
||||
docker pull ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/} || :
|
||||
docker pull ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/} || :
|
||||
docker pull ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/} || :
|
||||
|
||||
- name: Build images
|
||||
run: |
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-dev-build
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make docker-compose-build
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-dev-build
|
||||
DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER_LC} COMPOSE_TAG=${GITHUB_REF##*/} make awx-kube-build
|
||||
|
||||
- name: Push image
|
||||
run: |
|
||||
docker push ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/}
|
||||
docker push ghcr.io/${{ github.repository_owner }}/awx_kube_devel:${GITHUB_REF##*/}
|
||||
docker push ghcr.io/${OWNER_LC}/awx_devel:${GITHUB_REF##*/}
|
||||
docker push ghcr.io/${OWNER_LC}/awx_kube_devel:${GITHUB_REF##*/}
|
||||
docker push ghcr.io/${OWNER_LC}/awx:${GITHUB_REF##*/}
|
||||
|
||||
1
.github/workflows/promote.yml
vendored
@@ -10,6 +10,7 @@ on:
|
||||
|
||||
jobs:
|
||||
promote:
|
||||
if: endsWith(github.repository, '/awx')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout awx
|
||||
|
||||
1
.github/workflows/stage.yml
vendored
@@ -21,6 +21,7 @@ on:
|
||||
|
||||
jobs:
|
||||
stage:
|
||||
if: endsWith(github.repository, '/awx')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
3
.gitignore
vendored
@@ -161,3 +161,6 @@ use_dev_supervisor.txt
|
||||
/_build/
|
||||
/_build_kube_dev/
|
||||
/Dockerfile.kube-dev
|
||||
|
||||
awx/ui_next/src
|
||||
awx/ui_next/build
|
||||
|
||||
@@ -6,6 +6,7 @@ recursive-include awx/templates *.html
|
||||
recursive-include awx/api/templates *.md *.html *.yml
|
||||
recursive-include awx/ui/build *.html
|
||||
recursive-include awx/ui/build *
|
||||
recursive-include awx/ui_next/build *
|
||||
recursive-include awx/playbooks *.yml
|
||||
recursive-include awx/lib/site-packages *
|
||||
recursive-include awx/plugins *.ps1
|
||||
|
||||
64
Makefile
@@ -1,4 +1,7 @@
|
||||
-include awx/ui_next/Makefile
|
||||
|
||||
PYTHON ?= python3.9
|
||||
DOCKER_COMPOSE ?= docker-compose
|
||||
OFFICIAL ?= no
|
||||
NODE ?= node
|
||||
NPM_BIN ?= npm
|
||||
@@ -203,19 +206,7 @@ uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
uwsgi -b 32768 \
|
||||
--socket 127.0.0.1:8050 \
|
||||
--module=awx.wsgi:application \
|
||||
--home=/var/lib/awx/venv/awx \
|
||||
--chdir=/awx_devel/ \
|
||||
--vacuum \
|
||||
--processes=5 \
|
||||
--harakiri=120 --master \
|
||||
--no-orphans \
|
||||
--max-requests=1000 \
|
||||
--stats /tmp/stats.socket \
|
||||
--lazy-apps \
|
||||
--logformat "%(addr) %(method) %(uri) - %(proto) %(status)"
|
||||
uwsgi /etc/tower/uwsgi.ini
|
||||
|
||||
awx-autoreload:
|
||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
|
||||
@@ -422,12 +413,14 @@ ui-release: $(UI_BUILD_FLAG_FILE)
|
||||
|
||||
ui-devel: awx/ui/node_modules
|
||||
@$(MAKE) -B $(UI_BUILD_FLAG_FILE)
|
||||
mkdir -p /var/lib/awx/public/static/css
|
||||
mkdir -p /var/lib/awx/public/static/js
|
||||
mkdir -p /var/lib/awx/public/static/media
|
||||
cp -r awx/ui/build/static/css/* /var/lib/awx/public/static/css
|
||||
cp -r awx/ui/build/static/js/* /var/lib/awx/public/static/js
|
||||
cp -r awx/ui/build/static/media/* /var/lib/awx/public/static/media
|
||||
@if [ -d "/var/lib/awx" ] ; then \
|
||||
mkdir -p /var/lib/awx/public/static/css; \
|
||||
mkdir -p /var/lib/awx/public/static/js; \
|
||||
mkdir -p /var/lib/awx/public/static/media; \
|
||||
cp -r awx/ui/build/static/css/* /var/lib/awx/public/static/css; \
|
||||
cp -r awx/ui/build/static/js/* /var/lib/awx/public/static/js; \
|
||||
cp -r awx/ui/build/static/media/* /var/lib/awx/public/static/media; \
|
||||
fi
|
||||
|
||||
ui-devel-instrumented: awx/ui/node_modules
|
||||
$(NPM_BIN) --prefix awx/ui --loglevel warn run start-instrumented
|
||||
@@ -454,11 +447,12 @@ ui-test-general:
|
||||
$(NPM_BIN) run --prefix awx/ui pretest
|
||||
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
|
||||
|
||||
# NOTE: The make target ui-next is imported from awx/ui_next/Makefile
|
||||
HEADLESS ?= no
|
||||
ifeq ($(HEADLESS), yes)
|
||||
dist/$(SDIST_TAR_FILE):
|
||||
else
|
||||
dist/$(SDIST_TAR_FILE): $(UI_BUILD_FLAG_FILE)
|
||||
dist/$(SDIST_TAR_FILE): $(UI_BUILD_FLAG_FILE) ui-next
|
||||
endif
|
||||
$(PYTHON) -m build -s
|
||||
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
|
||||
@@ -506,23 +500,21 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
-e enable_prometheus=$(PROMETHEUS) \
|
||||
-e enable_grafana=$(GRAFANA) $(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
|
||||
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||
|
||||
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||
|
||||
docker-compose-test: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash
|
||||
|
||||
docker-compose-runtest: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
|
||||
|
||||
docker-compose-build-swagger: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
||||
|
||||
SCHEMA_DIFF_BASE_BRANCH ?= devel
|
||||
detect-schema-change: genschema
|
||||
@@ -531,7 +523,7 @@ detect-schema-change: genschema
|
||||
diff -u -b reference-schema.json schema.json
|
||||
|
||||
docker-compose-clean: awx/projects
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||
|
||||
docker-compose-container-group-clean:
|
||||
@if [ -f "tools/docker-compose-minikube/_sources/minikube" ]; then \
|
||||
@@ -547,10 +539,8 @@ docker-compose-build:
|
||||
--cache-from=$(DEV_DOCKER_TAG_BASE)/awx_devel:$(COMPOSE_TAG) .
|
||||
|
||||
docker-clean:
|
||||
$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||
if [ "$(shell docker images | grep awx_devel)" ]; then \
|
||||
docker images | grep awx_devel | awk '{print $$3}' | xargs docker rmi --force; \
|
||||
fi
|
||||
-$(foreach container_id,$(shell docker ps -f name=tools_awx -aq && docker ps -f name=tools_receptor -aq),docker stop $(container_id); docker rm -f $(container_id);)
|
||||
-$(foreach image_id,$(shell docker images --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
|
||||
|
||||
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
||||
docker volume rm -f tools_awx_db tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
|
||||
@@ -559,10 +549,10 @@ docker-refresh: docker-clean docker-compose
|
||||
|
||||
## Docker Development Environment with Elastic Stack Connected
|
||||
docker-compose-elk: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
docker-compose-cluster-elk: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
docker-compose-container-group:
|
||||
MINIKUBE_CONTAINER_GROUP=true make docker-compose
|
||||
@@ -584,6 +574,7 @@ VERSION:
|
||||
PYTHON_VERSION:
|
||||
@echo "$(PYTHON)" | sed 's:python::'
|
||||
|
||||
.PHONY: Dockerfile
|
||||
Dockerfile: tools/ansible/roles/dockerfile/templates/Dockerfile.j2
|
||||
ansible-playbook tools/ansible/dockerfile.yml -e receptor_image=$(RECEPTOR_IMAGE)
|
||||
|
||||
@@ -664,3 +655,8 @@ help/generate:
|
||||
} \
|
||||
{ lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u
|
||||
@printf "\n"
|
||||
|
||||
## Display help for ui-next targets
|
||||
help/ui-next:
|
||||
@make -s help MAKEFILE_LIST="awx/ui_next/Makefile"
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
@@ -9,6 +8,7 @@ from rest_framework import serializers
|
||||
from awx.conf import fields, register, register_validate
|
||||
from awx.api.fields import OAuth2ProviderField
|
||||
from oauth2_provider.settings import oauth2_settings
|
||||
from awx.sso.common import is_remote_auth_enabled
|
||||
|
||||
|
||||
register(
|
||||
@@ -108,19 +108,8 @@ register(
|
||||
|
||||
|
||||
def authentication_validate(serializer, attrs):
|
||||
remote_auth_settings = [
|
||||
'AUTH_LDAP_SERVER_URI',
|
||||
'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_ORG_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_TEAM_KEY',
|
||||
'SOCIAL_AUTH_SAML_ENABLED_IDPS',
|
||||
'RADIUS_SERVER',
|
||||
'TACACSPLUS_HOST',
|
||||
]
|
||||
if attrs.get('DISABLE_LOCAL_AUTH', False):
|
||||
if not any(getattr(settings, s, None) for s in remote_auth_settings):
|
||||
raise serializers.ValidationError(_("There are no remote authentication systems configured."))
|
||||
if attrs.get('DISABLE_LOCAL_AUTH', False) and not is_remote_auth_enabled():
|
||||
raise serializers.ValidationError(_("There are no remote authentication systems configured."))
|
||||
return attrs
|
||||
|
||||
|
||||
|
||||
@@ -155,7 +155,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
'search',
|
||||
)
|
||||
|
||||
# A list of fields that we know can be filtered on without the possiblity
|
||||
# A list of fields that we know can be filtered on without the possibility
|
||||
# of introducing duplicates
|
||||
NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField, TextField)
|
||||
|
||||
@@ -268,7 +268,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
continue
|
||||
|
||||
# HACK: make `created` available via API for the Django User ORM model
|
||||
# so it keep compatiblity with other objects which exposes the `created` attr.
|
||||
# so it keep compatibility with other objects which exposes the `created` attr.
|
||||
if queryset.model._meta.object_name == 'User' and key.startswith('created'):
|
||||
key = key.replace('created', 'date_joined')
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ from rest_framework import generics
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework import views
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.renderers import StaticHTMLRenderer
|
||||
from rest_framework.negotiation import DefaultContentNegotiation
|
||||
|
||||
@@ -674,7 +674,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
location = None
|
||||
created = True
|
||||
|
||||
# Retrive the sub object (whether created or by ID).
|
||||
# Retrieve the sub object (whether created or by ID).
|
||||
sub = get_object_or_400(self.model, pk=sub_id)
|
||||
|
||||
# Verify we have permission to attach.
|
||||
@@ -822,7 +822,7 @@ def trigger_delayed_deep_copy(*args, **kwargs):
|
||||
|
||||
class CopyAPIView(GenericAPIView):
|
||||
serializer_class = CopySerializer
|
||||
permission_classes = (AllowAny,)
|
||||
permission_classes = (IsAuthenticated,)
|
||||
copy_return_serializer_class = None
|
||||
new_in_330 = True
|
||||
new_in_api_v2 = True
|
||||
|
||||
@@ -60,7 +60,7 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
delattr(renderer_context['view'], '_request')
|
||||
|
||||
def get_raw_data_form(self, data, view, method, request):
|
||||
# Set a flag on the view to indiciate to the view/serializer that we're
|
||||
# Set a flag on the view to indicate to the view/serializer that we're
|
||||
# creating a raw data form for the browsable API. Store the original
|
||||
# request method to determine how to populate the raw data form.
|
||||
if request.method in {'OPTIONS', 'DELETE'}:
|
||||
|
||||
@@ -8,6 +8,7 @@ import logging
|
||||
import re
|
||||
from collections import OrderedDict
|
||||
from datetime import timedelta
|
||||
from uuid import uuid4
|
||||
|
||||
# OAuth2
|
||||
from oauthlib import oauth2
|
||||
@@ -108,13 +109,15 @@ from awx.main.utils import (
|
||||
extract_ansible_vars,
|
||||
encrypt_dict,
|
||||
prefetch_page_capabilities,
|
||||
get_external_account,
|
||||
truncate_stdout,
|
||||
get_licenser,
|
||||
)
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
from awx.main.utils.named_url_graph import reset_counters
|
||||
from awx.main.scheduler.task_manager_models import TaskManagerModels
|
||||
from awx.main.redact import UriCleaner, REPLACE_STR
|
||||
from awx.main.signals import update_inventory_computed_fields
|
||||
|
||||
|
||||
from awx.main.validators import vars_validate_or_raise
|
||||
|
||||
@@ -124,6 +127,8 @@ from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, Ver
|
||||
# AWX Utils
|
||||
from awx.api.validators import HostnameRegexValidator
|
||||
|
||||
from awx.sso.common import get_external_account
|
||||
|
||||
logger = logging.getLogger('awx.api.serializers')
|
||||
|
||||
# Fields that should be summarized regardless of object type.
|
||||
@@ -155,7 +160,7 @@ SUMMARIZABLE_FK_FIELDS = {
|
||||
'default_environment': DEFAULT_SUMMARY_FIELDS + ('image',),
|
||||
'execution_environment': DEFAULT_SUMMARY_FIELDS + ('image',),
|
||||
'project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type', 'allow_override'),
|
||||
'source_project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'),
|
||||
'source_project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type', 'allow_override'),
|
||||
'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed'),
|
||||
'credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'kubernetes', 'credential_type_id'),
|
||||
'signature_validation_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'credential_type_id'),
|
||||
@@ -536,7 +541,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
||||
#
|
||||
# This logic is to force rendering choice's on an uneditable field.
|
||||
# Note: Consider expanding this rendering for more than just choices fields
|
||||
# Note: This logic works in conjuction with
|
||||
# Note: This logic works in conjunction with
|
||||
if hasattr(model_field, 'choices') and model_field.choices:
|
||||
was_editable = model_field.editable
|
||||
model_field.editable = True
|
||||
@@ -987,23 +992,8 @@ class UserSerializer(BaseSerializer):
|
||||
def _update_password(self, obj, new_password):
|
||||
# For now we're not raising an error, just not saving password for
|
||||
# users managed by LDAP who already have an unusable password set.
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
|
||||
try:
|
||||
if obj.pk and obj.profile.ldap_dn and not obj.has_usable_password():
|
||||
new_password = None
|
||||
except AttributeError:
|
||||
pass
|
||||
if (
|
||||
getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)
|
||||
) and obj.social_auth.all():
|
||||
new_password = None
|
||||
if (getattr(settings, 'RADIUS_SERVER', None) or getattr(settings, 'TACACSPLUS_HOST', None)) and obj.enterprise_auth.all():
|
||||
new_password = None
|
||||
if new_password:
|
||||
# Get external password will return something like ldap or enterprise or None if the user isn't external. We only want to allow a password update for a None option
|
||||
if new_password and not self.get_external_account(obj):
|
||||
obj.set_password(new_password)
|
||||
obj.save(update_fields=['password'])
|
||||
|
||||
@@ -1867,7 +1857,7 @@ class HostSerializer(BaseSerializerWithVariables):
|
||||
vars_dict = parse_yaml_or_json(variables)
|
||||
vars_dict['ansible_ssh_port'] = port
|
||||
attrs['variables'] = json.dumps(vars_dict)
|
||||
if Group.objects.filter(name=name, inventory=inventory).exists():
|
||||
if inventory and Group.objects.filter(name=name, inventory=inventory).exists():
|
||||
raise serializers.ValidationError(_('A Group with that name already exists.'))
|
||||
|
||||
return super(HostSerializer, self).validate(attrs)
|
||||
@@ -1959,6 +1949,130 @@ class GroupSerializer(BaseSerializerWithVariables):
|
||||
return ret
|
||||
|
||||
|
||||
class BulkHostSerializer(HostSerializer):
|
||||
class Meta:
|
||||
model = Host
|
||||
fields = (
|
||||
'name',
|
||||
'enabled',
|
||||
'instance_id',
|
||||
'description',
|
||||
'variables',
|
||||
)
|
||||
|
||||
|
||||
class BulkHostCreateSerializer(serializers.Serializer):
|
||||
inventory = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Inventory.objects.all(), required=True, write_only=True, help_text=_('Primary Key ID of inventory to add hosts to.')
|
||||
)
|
||||
hosts = serializers.ListField(
|
||||
child=BulkHostSerializer(),
|
||||
allow_empty=False,
|
||||
max_length=100000,
|
||||
write_only=True,
|
||||
help_text=_('List of hosts to be created, JSON. e.g. [{"name": "example.com"}, {"name": "127.0.0.1"}]'),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Inventory
|
||||
fields = ('inventory', 'hosts')
|
||||
read_only_fields = ()
|
||||
|
||||
def raise_if_host_counts_violated(self, attrs):
|
||||
validation_info = get_licenser().validate()
|
||||
|
||||
org = attrs['inventory'].organization
|
||||
|
||||
if org:
|
||||
org_active_count = Host.objects.org_active_count(org.id)
|
||||
new_hosts = [h['name'] for h in attrs['hosts']]
|
||||
org_net_new_host_count = len(new_hosts) - Host.objects.filter(inventory__organization=1, name__in=new_hosts).values('name').distinct().count()
|
||||
if org.max_hosts > 0 and org_active_count + org_net_new_host_count > org.max_hosts:
|
||||
raise PermissionDenied(
|
||||
_(
|
||||
"You have already reached the maximum number of %s hosts"
|
||||
" allowed for your organization. Contact your System Administrator"
|
||||
" for assistance." % org.max_hosts
|
||||
)
|
||||
)
|
||||
|
||||
# Don't check license if it is open license
|
||||
if validation_info.get('license_type', 'UNLICENSED') == 'open':
|
||||
return
|
||||
|
||||
sys_free_instances = validation_info.get('free_instances', 0)
|
||||
system_net_new_host_count = Host.objects.exclude(name__in=new_hosts).count()
|
||||
|
||||
if system_net_new_host_count > sys_free_instances:
|
||||
hard_error = validation_info.get('trial', False) is True or validation_info['instance_count'] == 10
|
||||
if hard_error:
|
||||
# Only raise permission error for trial, otherwise just log a warning as we do in other inventory import situations
|
||||
raise PermissionDenied(_("Host count exceeds available instances."))
|
||||
logger.warning(_("Number of hosts allowed by license has been exceeded."))
|
||||
|
||||
def validate(self, attrs):
|
||||
request = self.context.get('request', None)
|
||||
inv = attrs['inventory']
|
||||
if inv.kind != '':
|
||||
raise serializers.ValidationError(_('Hosts can only be created in manual inventories (not smart or constructed types).'))
|
||||
if len(attrs['hosts']) > settings.BULK_HOST_MAX_CREATE:
|
||||
raise serializers.ValidationError(_('Number of hosts exceeds system setting BULK_HOST_MAX_CREATE'))
|
||||
if request and not request.user.is_superuser:
|
||||
if request.user not in inv.admin_role:
|
||||
raise serializers.ValidationError(_(f'Inventory with id {inv.id} not found or lack permissions to add hosts.'))
|
||||
current_hostnames = set(inv.hosts.values_list('name', flat=True))
|
||||
new_names = [host['name'] for host in attrs['hosts']]
|
||||
duplicate_new_names = [n for n in new_names if n in current_hostnames or new_names.count(n) > 1]
|
||||
if duplicate_new_names:
|
||||
raise serializers.ValidationError(_(f'Hostnames must be unique in an inventory. Duplicates found: {duplicate_new_names}'))
|
||||
|
||||
self.raise_if_host_counts_violated(attrs)
|
||||
|
||||
_now = now()
|
||||
for host in attrs['hosts']:
|
||||
host['created'] = _now
|
||||
host['modified'] = _now
|
||||
host['inventory'] = inv
|
||||
return attrs
|
||||
|
||||
def create(self, validated_data):
|
||||
# This assumes total_hosts is up to date, and it can get out of date if the inventory computed fields have not been updated lately.
|
||||
# If we wanted to side step this we could query Hosts.objects.filter(inventory...)
|
||||
old_total_hosts = validated_data['inventory'].total_hosts
|
||||
result = [Host(**attrs) for attrs in validated_data['hosts']]
|
||||
try:
|
||||
Host.objects.bulk_create(result)
|
||||
except Exception as e:
|
||||
raise serializers.ValidationError({"detail": _(f"cannot create host, host creation error {e}")})
|
||||
new_total_hosts = old_total_hosts + len(result)
|
||||
request = self.context.get('request', None)
|
||||
changes = {'total_hosts': [old_total_hosts, new_total_hosts]}
|
||||
activity_entry = ActivityStream.objects.create(
|
||||
operation='update',
|
||||
object1='inventory',
|
||||
changes=json.dumps(changes),
|
||||
actor=request.user,
|
||||
)
|
||||
activity_entry.inventory.add(validated_data['inventory'])
|
||||
|
||||
# This actually updates the cached "total_hosts" field on the inventory
|
||||
update_inventory_computed_fields.delay(validated_data['inventory'].id)
|
||||
return_keys = [k for k in BulkHostSerializer().fields.keys()] + ['id']
|
||||
return_data = {}
|
||||
host_data = []
|
||||
for r in result:
|
||||
item = {k: getattr(r, k) for k in return_keys}
|
||||
if not settings.IS_TESTING_MODE:
|
||||
# sqlite acts different with bulk_create -- it doesn't return the id of the objects
|
||||
# to get it, you have to do an additional query, which is not useful for our tests
|
||||
item['url'] = reverse('api:host_detail', kwargs={'pk': r.id})
|
||||
item['inventory'] = reverse('api:inventory_detail', kwargs={'pk': validated_data['inventory'].id})
|
||||
host_data.append(item)
|
||||
return_data['url'] = reverse('api:inventory_detail', kwargs={'pk': validated_data['inventory'].id})
|
||||
return_data['hosts'] = host_data
|
||||
return return_data
|
||||
|
||||
|
||||
class GroupTreeSerializer(GroupSerializer):
|
||||
children = serializers.SerializerMethodField()
|
||||
|
||||
@@ -2014,6 +2128,7 @@ class InventorySourceOptionsSerializer(BaseSerializer):
|
||||
'source',
|
||||
'source_path',
|
||||
'source_vars',
|
||||
'scm_branch',
|
||||
'credential',
|
||||
'enabled_var',
|
||||
'enabled_value',
|
||||
@@ -2178,10 +2293,14 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
|
||||
if ('source' in attrs or 'source_project' in attrs) and get_field_from_model_or_attrs('source_project') is None:
|
||||
raise serializers.ValidationError({"source_project": _("Project required for scm type sources.")})
|
||||
else:
|
||||
redundant_scm_fields = list(filter(lambda x: attrs.get(x, None), ['source_project', 'source_path']))
|
||||
redundant_scm_fields = list(filter(lambda x: attrs.get(x, None), ['source_project', 'source_path', 'scm_branch']))
|
||||
if redundant_scm_fields:
|
||||
raise serializers.ValidationError({"detail": _("Cannot set %s if not SCM type." % ' '.join(redundant_scm_fields))})
|
||||
|
||||
project = get_field_from_model_or_attrs('source_project')
|
||||
if get_field_from_model_or_attrs('scm_branch') and not project.allow_override:
|
||||
raise serializers.ValidationError({'scm_branch': _('Project does not allow overriding branch.')})
|
||||
|
||||
attrs = super(InventorySourceSerializer, self).validate(attrs)
|
||||
|
||||
# Check type consistency of source and cloud credential, if provided
|
||||
@@ -3997,7 +4116,7 @@ class JobEventSerializer(BaseSerializer):
|
||||
# Show full stdout for playbook_on_* events.
|
||||
if obj and obj.event.startswith('playbook_on'):
|
||||
return data
|
||||
# If the view logic says to not trunctate (request was to the detail view or a param was used)
|
||||
# If the view logic says to not truncate (request was to the detail view or a param was used)
|
||||
if self.context.get('no_truncate', False):
|
||||
return data
|
||||
max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY
|
||||
@@ -4028,7 +4147,7 @@ class ProjectUpdateEventSerializer(JobEventSerializer):
|
||||
# raw SCM URLs in their stdout (which *could* contain passwords)
|
||||
# attempt to detect and filter HTTP basic auth passwords in the stdout
|
||||
# of these types of events
|
||||
if obj.event_data.get('task_action') in ('git', 'svn'):
|
||||
if obj.event_data.get('task_action') in ('git', 'svn', 'ansible.builtin.git', 'ansible.builtin.svn'):
|
||||
try:
|
||||
return json.loads(UriCleaner.remove_sensitive(json.dumps(obj.event_data)))
|
||||
except Exception:
|
||||
@@ -4072,7 +4191,7 @@ class AdHocCommandEventSerializer(BaseSerializer):
|
||||
|
||||
def to_representation(self, obj):
|
||||
data = super(AdHocCommandEventSerializer, self).to_representation(obj)
|
||||
# If the view logic says to not trunctate (request was to the detail view or a param was used)
|
||||
# If the view logic says to not truncate (request was to the detail view or a param was used)
|
||||
if self.context.get('no_truncate', False):
|
||||
return data
|
||||
max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY
|
||||
@@ -4419,6 +4538,271 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
|
||||
return accepted
|
||||
|
||||
|
||||
class BulkJobNodeSerializer(WorkflowJobNodeSerializer):
|
||||
# We don't do a PrimaryKeyRelatedField for unified_job_template and others, because that increases the number
|
||||
# of database queries, rather we take them as integer and later convert them to objects in get_objectified_jobs
|
||||
unified_job_template = serializers.IntegerField(
|
||||
required=True, min_value=1, help_text=_('Primary key of the template for this job, can be a job template or inventory source.')
|
||||
)
|
||||
inventory = serializers.IntegerField(required=False, min_value=1)
|
||||
execution_environment = serializers.IntegerField(required=False, min_value=1)
|
||||
# many-to-many fields
|
||||
credentials = serializers.ListField(child=serializers.IntegerField(min_value=1), required=False)
|
||||
labels = serializers.ListField(child=serializers.IntegerField(min_value=1), required=False)
|
||||
# TODO: Use instance group role added via PR 13584(once merged), for now everything related to instance group is commented
|
||||
# instance_groups = serializers.ListField(child=serializers.IntegerField(min_value=1), required=False)
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJobNode
|
||||
fields = ('*', 'credentials', 'labels') # m2m fields are not canonical for WJ nodes, TODO: add instance_groups once supported
|
||||
|
||||
def validate(self, attrs):
|
||||
return super(LaunchConfigurationBaseSerializer, self).validate(attrs)
|
||||
|
||||
def get_validation_exclusions(self, obj=None):
|
||||
ret = super().get_validation_exclusions(obj)
|
||||
ret.extend(['unified_job_template', 'inventory', 'execution_environment'])
|
||||
return ret
|
||||
|
||||
|
||||
class BulkJobLaunchSerializer(serializers.Serializer):
|
||||
name = serializers.CharField(default='Bulk Job Launch', max_length=512, write_only=True, required=False, allow_blank=True) # limited by max name of jobs
|
||||
jobs = BulkJobNodeSerializer(
|
||||
many=True,
|
||||
allow_empty=False,
|
||||
write_only=True,
|
||||
max_length=100000,
|
||||
help_text=_('List of jobs to be launched, JSON. e.g. [{"unified_job_template": 7}, {"unified_job_template": 10}]'),
|
||||
)
|
||||
description = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
||||
extra_vars = serializers.JSONField(write_only=True, required=False)
|
||||
organization = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Organization.objects.all(),
|
||||
required=False,
|
||||
default=None,
|
||||
allow_null=True,
|
||||
write_only=True,
|
||||
help_text=_('Inherit permissions from this organization. If not provided, a organization the user is a member of will be selected automatically.'),
|
||||
)
|
||||
inventory = serializers.PrimaryKeyRelatedField(queryset=Inventory.objects.all(), required=False, write_only=True)
|
||||
limit = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
||||
scm_branch = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
||||
skip_tags = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
||||
job_tags = serializers.CharField(write_only=True, required=False, allow_blank=False)
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJob
|
||||
fields = ('name', 'jobs', 'description', 'extra_vars', 'organization', 'inventory', 'limit', 'scm_branch', 'skip_tags', 'job_tags')
|
||||
read_only_fields = ()
|
||||
|
||||
def validate(self, attrs):
|
||||
request = self.context.get('request', None)
|
||||
identifiers = set()
|
||||
if len(attrs['jobs']) > settings.BULK_JOB_MAX_LAUNCH:
|
||||
raise serializers.ValidationError(_('Number of requested jobs exceeds system setting BULK_JOB_MAX_LAUNCH'))
|
||||
|
||||
for node in attrs['jobs']:
|
||||
if 'identifier' in node:
|
||||
if node['identifier'] in identifiers:
|
||||
raise serializers.ValidationError(_(f"Identifier {node['identifier']} not unique"))
|
||||
identifiers.add(node['identifier'])
|
||||
else:
|
||||
node['identifier'] = str(uuid4())
|
||||
|
||||
requested_ujts = {j['unified_job_template'] for j in attrs['jobs']}
|
||||
requested_use_inventories = {job['inventory'] for job in attrs['jobs'] if 'inventory' in job}
|
||||
requested_use_execution_environments = {job['execution_environment'] for job in attrs['jobs'] if 'execution_environment' in job}
|
||||
requested_use_credentials = set()
|
||||
requested_use_labels = set()
|
||||
# requested_use_instance_groups = set()
|
||||
for job in attrs['jobs']:
|
||||
for cred in job.get('credentials', []):
|
||||
requested_use_credentials.add(cred)
|
||||
for label in job.get('labels', []):
|
||||
requested_use_labels.add(label)
|
||||
# for instance_group in job.get('instance_groups', []):
|
||||
# requested_use_instance_groups.add(instance_group)
|
||||
|
||||
key_to_obj_map = {
|
||||
"unified_job_template": {obj.id: obj for obj in UnifiedJobTemplate.objects.filter(id__in=requested_ujts)},
|
||||
"inventory": {obj.id: obj for obj in Inventory.objects.filter(id__in=requested_use_inventories)},
|
||||
"credentials": {obj.id: obj for obj in Credential.objects.filter(id__in=requested_use_credentials)},
|
||||
"labels": {obj.id: obj for obj in Label.objects.filter(id__in=requested_use_labels)},
|
||||
# "instance_groups": {obj.id: obj for obj in InstanceGroup.objects.filter(id__in=requested_use_instance_groups)},
|
||||
"execution_environment": {obj.id: obj for obj in ExecutionEnvironment.objects.filter(id__in=requested_use_execution_environments)},
|
||||
}
|
||||
|
||||
ujts = {}
|
||||
for ujt in key_to_obj_map['unified_job_template'].values():
|
||||
ujts.setdefault(type(ujt), [])
|
||||
ujts[type(ujt)].append(ujt)
|
||||
|
||||
unallowed_types = set(ujts.keys()) - set([JobTemplate, Project, InventorySource, WorkflowJobTemplate])
|
||||
if unallowed_types:
|
||||
type_names = ' '.join([cls._meta.verbose_name.title() for cls in unallowed_types])
|
||||
raise serializers.ValidationError(_("Template types {type_names} not allowed in bulk jobs").format(type_names=type_names))
|
||||
|
||||
for model, obj_list in ujts.items():
|
||||
role_field = 'execute_role' if issubclass(model, (JobTemplate, WorkflowJobTemplate)) else 'update_role'
|
||||
self.check_list_permission(model, set([obj.id for obj in obj_list]), role_field)
|
||||
|
||||
self.check_organization_permission(attrs, request)
|
||||
|
||||
if 'inventory' in attrs:
|
||||
requested_use_inventories.add(attrs['inventory'].id)
|
||||
|
||||
self.check_list_permission(Inventory, requested_use_inventories, 'use_role')
|
||||
|
||||
self.check_list_permission(Credential, requested_use_credentials, 'use_role')
|
||||
self.check_list_permission(Label, requested_use_labels)
|
||||
# self.check_list_permission(InstanceGroup, requested_use_instance_groups) # TODO: change to use_role for conflict
|
||||
self.check_list_permission(ExecutionEnvironment, requested_use_execution_environments) # TODO: change if roles introduced
|
||||
|
||||
jobs_object = self.get_objectified_jobs(attrs, key_to_obj_map)
|
||||
|
||||
attrs['jobs'] = jobs_object
|
||||
if 'extra_vars' in attrs:
|
||||
extra_vars_dict = parse_yaml_or_json(attrs['extra_vars'])
|
||||
attrs['extra_vars'] = json.dumps(extra_vars_dict)
|
||||
attrs = super().validate(attrs)
|
||||
return attrs
|
||||
|
||||
def check_list_permission(self, model, id_list, role_field=None):
|
||||
if not id_list:
|
||||
return
|
||||
user = self.context['request'].user
|
||||
if role_field is None: # implies "read" level permission is required
|
||||
access_qs = user.get_queryset(model)
|
||||
else:
|
||||
access_qs = model.accessible_objects(user, role_field)
|
||||
|
||||
not_allowed = set(id_list) - set(access_qs.filter(id__in=id_list).values_list('id', flat=True))
|
||||
if not_allowed:
|
||||
raise serializers.ValidationError(
|
||||
_("{model_name} {not_allowed} not found or you don't have permissions to access it").format(
|
||||
model_name=model._meta.verbose_name_plural.title(), not_allowed=not_allowed
|
||||
)
|
||||
)
|
||||
|
||||
def create(self, validated_data):
|
||||
request = self.context.get('request', None)
|
||||
launch_user = request.user if request else None
|
||||
job_node_data = validated_data.pop('jobs')
|
||||
wfj_deferred_attr_names = ('skip_tags', 'limit', 'job_tags')
|
||||
wfj_deferred_vals = {}
|
||||
for item in wfj_deferred_attr_names:
|
||||
wfj_deferred_vals[item] = validated_data.pop(item, None)
|
||||
|
||||
wfj = WorkflowJob.objects.create(**validated_data, is_bulk_job=True, launch_type='manual', created_by=launch_user)
|
||||
for key, val in wfj_deferred_vals.items():
|
||||
if val:
|
||||
setattr(wfj, key, val)
|
||||
nodes = []
|
||||
node_m2m_objects = {}
|
||||
node_m2m_object_types_to_through_model = {
|
||||
'credentials': WorkflowJobNode.credentials.through,
|
||||
'labels': WorkflowJobNode.labels.through,
|
||||
# 'instance_groups': WorkflowJobNode.instance_groups.through,
|
||||
}
|
||||
node_deferred_attr_names = (
|
||||
'limit',
|
||||
'scm_branch',
|
||||
'verbosity',
|
||||
'forks',
|
||||
'diff_mode',
|
||||
'job_tags',
|
||||
'job_type',
|
||||
'skip_tags',
|
||||
'job_slice_count',
|
||||
'timeout',
|
||||
)
|
||||
node_deferred_attrs = {}
|
||||
for node_attrs in job_node_data:
|
||||
# we need to add any m2m objects after creation via the through model
|
||||
node_m2m_objects[node_attrs['identifier']] = {}
|
||||
node_deferred_attrs[node_attrs['identifier']] = {}
|
||||
for item in node_m2m_object_types_to_through_model.keys():
|
||||
if item in node_attrs:
|
||||
node_m2m_objects[node_attrs['identifier']][item] = node_attrs.pop(item)
|
||||
|
||||
# Some attributes are not accepted by WorkflowJobNode __init__, we have to set them after
|
||||
for item in node_deferred_attr_names:
|
||||
if item in node_attrs:
|
||||
node_deferred_attrs[node_attrs['identifier']][item] = node_attrs.pop(item)
|
||||
|
||||
# Create the node objects
|
||||
node_obj = WorkflowJobNode(workflow_job=wfj, created=wfj.created, modified=wfj.modified, **node_attrs)
|
||||
|
||||
# we can set the deferred attrs now
|
||||
for item, value in node_deferred_attrs[node_attrs['identifier']].items():
|
||||
setattr(node_obj, item, value)
|
||||
|
||||
# the node is now ready to be bulk created
|
||||
nodes.append(node_obj)
|
||||
|
||||
# we'll need this later when we do the m2m through model bulk create
|
||||
node_m2m_objects[node_attrs['identifier']]['node'] = node_obj
|
||||
|
||||
WorkflowJobNode.objects.bulk_create(nodes)
|
||||
|
||||
# Deal with the m2m objects we have to create once the node exists
|
||||
for field_name, through_model in node_m2m_object_types_to_through_model.items():
|
||||
through_model_objects = []
|
||||
for node_identifier in node_m2m_objects.keys():
|
||||
if field_name in node_m2m_objects[node_identifier] and field_name == 'credentials':
|
||||
for cred in node_m2m_objects[node_identifier][field_name]:
|
||||
through_model_objects.append(through_model(credential=cred, workflowjobnode=node_m2m_objects[node_identifier]['node']))
|
||||
if field_name in node_m2m_objects[node_identifier] and field_name == 'labels':
|
||||
for label in node_m2m_objects[node_identifier][field_name]:
|
||||
through_model_objects.append(through_model(label=label, workflowjobnode=node_m2m_objects[node_identifier]['node']))
|
||||
# if obj_type in node_m2m_objects[node_identifier] and obj_type == 'instance_groups':
|
||||
# for instance_group in node_m2m_objects[node_identifier][obj_type]:
|
||||
# through_model_objects.append(through_model(instancegroup=instance_group, workflowjobnode=node_m2m_objects[node_identifier]['node']))
|
||||
if through_model_objects:
|
||||
through_model.objects.bulk_create(through_model_objects)
|
||||
|
||||
wfj.save()
|
||||
wfj.signal_start()
|
||||
|
||||
return WorkflowJobSerializer().to_representation(wfj)
|
||||
|
||||
def check_organization_permission(self, attrs, request):
|
||||
# validate Organization
|
||||
# - If the orgs is not set, set it to the org of the launching user
|
||||
# - If the user is part of multiple orgs, throw a validation error saying user is part of multiple orgs, please provide one
|
||||
if not request.user.is_superuser:
|
||||
read_org_qs = Organization.accessible_objects(request.user, 'member_role')
|
||||
if 'organization' not in attrs or attrs['organization'] == None or attrs['organization'] == '':
|
||||
read_org_ct = read_org_qs.count()
|
||||
if read_org_ct == 1:
|
||||
attrs['organization'] = read_org_qs.first()
|
||||
elif read_org_ct > 1:
|
||||
raise serializers.ValidationError("User has permission to multiple Organizations, please set one of them in the request")
|
||||
else:
|
||||
raise serializers.ValidationError("User not part of any organization, please assign an organization to assign to the bulk job")
|
||||
else:
|
||||
allowed_orgs = set(read_org_qs.values_list('id', flat=True))
|
||||
requested_org = attrs['organization']
|
||||
if requested_org.id not in allowed_orgs:
|
||||
raise ValidationError(_(f"Organization {requested_org.id} not found or you don't have permissions to access it"))
|
||||
|
||||
def get_objectified_jobs(self, attrs, key_to_obj_map):
|
||||
objectified_jobs = []
|
||||
# This loop is generalized so we should only have to add related items to the key_to_obj_map
|
||||
for job in attrs['jobs']:
|
||||
objectified_job = {}
|
||||
for key, value in job.items():
|
||||
if key in key_to_obj_map:
|
||||
if isinstance(value, int):
|
||||
objectified_job[key] = key_to_obj_map[key][value]
|
||||
elif isinstance(value, list):
|
||||
objectified_job[key] = [key_to_obj_map[key][item] for item in value]
|
||||
else:
|
||||
objectified_job[key] = value
|
||||
objectified_jobs.append(objectified_job)
|
||||
return objectified_jobs
|
||||
|
||||
|
||||
class NotificationTemplateSerializer(BaseSerializer):
|
||||
show_capabilities = ['edit', 'delete', 'copy']
|
||||
capabilities_prefetch = [{'copy': 'organization.admin'}]
|
||||
@@ -4765,7 +5149,7 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
||||
),
|
||||
)
|
||||
until = serializers.SerializerMethodField(
|
||||
help_text=_('The date this schedule will end. This field is computed from the RRULE. If the schedule does not end an emptry string will be returned'),
|
||||
help_text=_('The date this schedule will end. This field is computed from the RRULE. If the schedule does not end an empty string will be returned'),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
@@ -5087,6 +5471,8 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
res = super(InstanceGroupSerializer, self).get_related(obj)
|
||||
res['jobs'] = self.reverse('api:instance_group_unified_jobs_list', kwargs={'pk': obj.pk})
|
||||
res['instances'] = self.reverse('api:instance_group_instance_list', kwargs={'pk': obj.pk})
|
||||
res['access_list'] = self.reverse('api:instance_group_access_list', kwargs={'pk': obj.pk})
|
||||
res['object_roles'] = self.reverse('api:instance_group_object_role_list', kwargs={'pk': obj.pk})
|
||||
if obj.credential:
|
||||
res['credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.credential_id})
|
||||
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
Version 1 of the Ansible Tower REST API.
|
||||
|
||||
Make a GET request to this resource to obtain a list of all child resources
|
||||
available via the API.
|
||||
@@ -7,10 +7,12 @@ the following fields (some fields may not be visible to all users):
|
||||
* `project_base_dir`: Path on the server where projects and playbooks are \
|
||||
stored.
|
||||
* `project_local_paths`: List of directories beneath `project_base_dir` to
|
||||
use when creating/editing a project.
|
||||
use when creating/editing a manual project.
|
||||
* `time_zone`: The configured time zone for the server.
|
||||
* `license_info`: Information about the current license.
|
||||
* `version`: Version of Ansible Tower package installed.
|
||||
* `custom_virtualenvs`: Deprecated venv locations from before migration to
|
||||
execution environments. Export tooling is in `awx-manage` commands.
|
||||
* `eula`: The current End-User License Agreement
|
||||
{% endifmeth %}
|
||||
|
||||
41
awx/api/templates/api/bulk_host_create_view.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Bulk Host Create
|
||||
|
||||
This endpoint allows the client to create multiple hosts and associate them with an inventory. They may do this by providing the inventory ID and a list of json that would normally be provided to create hosts.
|
||||
|
||||
Example:
|
||||
|
||||
{
|
||||
"inventory": 1,
|
||||
"hosts": [
|
||||
{"name": "example1.com", "variables": "ansible_connection: local"},
|
||||
{"name": "example2.com"}
|
||||
]
|
||||
}
|
||||
|
||||
Return data:
|
||||
|
||||
{
|
||||
"url": "/api/v2/inventories/3/hosts/",
|
||||
"hosts": [
|
||||
{
|
||||
"name": "example1.com",
|
||||
"enabled": true,
|
||||
"instance_id": "",
|
||||
"description": "",
|
||||
"variables": "ansible_connection: local",
|
||||
"id": 1255,
|
||||
"url": "/api/v2/hosts/1255/",
|
||||
"inventory": "/api/v2/inventories/3/"
|
||||
},
|
||||
{
|
||||
"name": "example2.com",
|
||||
"enabled": true,
|
||||
"instance_id": "",
|
||||
"description": "",
|
||||
"variables": "",
|
||||
"id": 1256,
|
||||
"url": "/api/v2/hosts/1256/",
|
||||
"inventory": "/api/v2/inventories/3/"
|
||||
}
|
||||
]
|
||||
}
|
||||
13
awx/api/templates/api/bulk_job_launch_view.md
Normal file
@@ -0,0 +1,13 @@
|
||||
# Bulk Job Launch
|
||||
|
||||
This endpoint allows the client to launch multiple UnifiedJobTemplates at a time, along side any launch time parameters that they would normally set at launch time.
|
||||
|
||||
Example:
|
||||
|
||||
{
|
||||
"name": "my bulk job",
|
||||
"jobs": [
|
||||
{"unified_job_template": 7, "inventory": 2},
|
||||
{"unified_job_template": 7, "credentials": [3]}
|
||||
]
|
||||
}
|
||||
3
awx/api/templates/api/bulk_view.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Bulk Actions
|
||||
|
||||
This endpoint lists available bulk action APIs.
|
||||
@@ -3,7 +3,7 @@ Make a GET request to this resource to retrieve aggregate statistics about inven
|
||||
Including fetching the number of total hosts tracked by Tower over an amount of time and the current success or
|
||||
failed status of hosts which have run jobs within an Inventory.
|
||||
|
||||
## Parmeters and Filtering
|
||||
## Parameters and Filtering
|
||||
|
||||
The `period` of the data can be adjusted with:
|
||||
|
||||
@@ -24,7 +24,7 @@ Data about the number of hosts will be returned in the following format:
|
||||
Each element contains an epoch timestamp represented in seconds and a numerical value indicating
|
||||
the number of hosts that exist at a given moment
|
||||
|
||||
Data about failed and successfull hosts by inventory will be given as:
|
||||
Data about failed and successful hosts by inventory will be given as:
|
||||
|
||||
{
|
||||
"sources": [
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Make a GET request to this resource to retrieve aggregate statistics about job runs suitable for graphing.
|
||||
|
||||
## Parmeters and Filtering
|
||||
## Parameters and Filtering
|
||||
|
||||
The `period` of the data can be adjusted with:
|
||||
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
# List Fact Scans for a Host Specific Host Scan
|
||||
|
||||
Make a GET request to this resource to retrieve system tracking data for a particular scan
|
||||
|
||||
You may filter by datetime:
|
||||
|
||||
`?datetime=2015-06-01`
|
||||
|
||||
and module
|
||||
|
||||
`?datetime=2015-06-01&module=ansible`
|
||||
@@ -1,11 +0,0 @@
|
||||
# List Fact Scans for a Host by Module and Date
|
||||
|
||||
Make a GET request to this resource to retrieve system tracking scans by module and date/time
|
||||
|
||||
You may filter scan runs using the `from` and `to` properties:
|
||||
|
||||
`?from=2015-06-01%2012:00:00&to=2015-06-03`
|
||||
|
||||
You may also filter by module
|
||||
|
||||
`?module=packages`
|
||||
@@ -1 +0,0 @@
|
||||
# List Red Hat Insights for a Host
|
||||
@@ -18,7 +18,7 @@ inventory sources:
|
||||
* `inventory_update`: ID of the inventory update job that was started.
|
||||
(integer, read-only)
|
||||
* `project_update`: ID of the project update job that was started if this inventory source is an SCM source.
|
||||
(interger, read-only, optional)
|
||||
(integer, read-only, optional)
|
||||
|
||||
Note: All manual inventory sources (source="") will be ignored by the update_inventory_sources endpoint. This endpoint will not update inventory sources for Smart Inventories.
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
{% ifmeth GET %}
|
||||
# Determine if a Job can be started
|
||||
|
||||
Make a GET request to this resource to determine if the job can be started and
|
||||
whether any passwords are required to start the job. The response will include
|
||||
the following fields:
|
||||
|
||||
* `can_start`: Flag indicating if this job can be started (boolean, read-only)
|
||||
* `passwords_needed_to_start`: Password names required to start the job (array,
|
||||
read-only)
|
||||
{% endifmeth %}
|
||||
|
||||
{% ifmeth POST %}
|
||||
# Start a Job
|
||||
Make a POST request to this resource to start the job. If any passwords are
|
||||
required, they must be passed via POST data.
|
||||
|
||||
If successful, the response status code will be 202. If any required passwords
|
||||
are not provided, a 400 status code will be returned. If the job cannot be
|
||||
started, a 405 status code will be returned.
|
||||
{% endifmeth %}
|
||||
@@ -3,7 +3,14 @@
|
||||
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views import InstanceGroupList, InstanceGroupDetail, InstanceGroupUnifiedJobsList, InstanceGroupInstanceList
|
||||
from awx.api.views import (
|
||||
InstanceGroupList,
|
||||
InstanceGroupDetail,
|
||||
InstanceGroupUnifiedJobsList,
|
||||
InstanceGroupInstanceList,
|
||||
InstanceGroupAccessList,
|
||||
InstanceGroupObjectRolesList,
|
||||
)
|
||||
|
||||
|
||||
urls = [
|
||||
@@ -11,6 +18,8 @@ urls = [
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', InstanceGroupDetail.as_view(), name='instance_group_detail'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/jobs/$', InstanceGroupUnifiedJobsList.as_view(), name='instance_group_unified_jobs_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/instances/$', InstanceGroupInstanceList.as_view(), name='instance_group_instance_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/access_list/$', InstanceGroupAccessList.as_view(), name='instance_group_access_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/object_roles/$', InstanceGroupObjectRolesList.as_view(), name='instance_group_object_role_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@@ -31,6 +31,13 @@ from awx.api.views import (
|
||||
ApplicationOAuth2TokenList,
|
||||
OAuth2ApplicationDetail,
|
||||
)
|
||||
|
||||
from awx.api.views.bulk import (
|
||||
BulkView,
|
||||
BulkHostCreateView,
|
||||
BulkJobLaunchView,
|
||||
)
|
||||
|
||||
from awx.api.views.mesh_visualizer import MeshVisualizer
|
||||
|
||||
from awx.api.views.metrics import MetricsView
|
||||
@@ -136,6 +143,9 @@ v2_urls = [
|
||||
re_path(r'^activity_stream/', include(activity_stream_urls)),
|
||||
re_path(r'^workflow_approval_templates/', include(workflow_approval_template_urls)),
|
||||
re_path(r'^workflow_approvals/', include(workflow_approval_urls)),
|
||||
re_path(r'^bulk/$', BulkView.as_view(), name='bulk'),
|
||||
re_path(r'^bulk/host_create/$', BulkHostCreateView.as_view(), name='bulk_host_create'),
|
||||
re_path(r'^bulk/job_launch/$', BulkJobLaunchView.as_view(), name='bulk_job_launch'),
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -152,7 +152,7 @@ def api_exception_handler(exc, context):
|
||||
if 'awx.named_url_rewritten' in req.environ and not str(getattr(exc, 'status_code', 0)).startswith('2'):
|
||||
# if the URL was rewritten, and it's not a 2xx level status code,
|
||||
# revert the request.path to its original value to avoid leaking
|
||||
# any context about the existance of resources
|
||||
# any context about the existence of resources
|
||||
req.path = req.environ['awx.named_url_rewritten']
|
||||
if exc.status_code == 403:
|
||||
exc = NotFound(detail=_('Not found.'))
|
||||
@@ -172,7 +172,7 @@ class DashboardView(APIView):
|
||||
user_inventory = get_user_queryset(request.user, models.Inventory)
|
||||
inventory_with_failed_hosts = user_inventory.filter(hosts_with_active_failures__gt=0)
|
||||
user_inventory_external = user_inventory.filter(has_inventory_sources=True)
|
||||
# if there are *zero* inventories, this aggregrate query will be None, fall back to 0
|
||||
# if there are *zero* inventories, this aggregate query will be None, fall back to 0
|
||||
failed_inventory = user_inventory.aggregate(Sum('inventory_sources_with_failures'))['inventory_sources_with_failures__sum'] or 0
|
||||
data['inventories'] = {
|
||||
'url': reverse('api:inventory_list', request=request),
|
||||
@@ -466,6 +466,23 @@ class InstanceGroupUnifiedJobsList(SubListAPIView):
|
||||
relationship = "unifiedjob_set"
|
||||
|
||||
|
||||
class InstanceGroupAccessList(ResourceAccessList):
|
||||
model = models.User # needs to be User for AccessLists
|
||||
parent_model = models.InstanceGroup
|
||||
|
||||
|
||||
class InstanceGroupObjectRolesList(SubListAPIView):
|
||||
model = models.Role
|
||||
serializer_class = serializers.RoleSerializer
|
||||
parent_model = models.InstanceGroup
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
content_type = ContentType.objects.get_for_model(self.parent_model)
|
||||
return models.Role.objects.filter(content_type=content_type, object_id=po.pk)
|
||||
|
||||
|
||||
class InstanceGroupInstanceList(InstanceGroupMembershipMixin, SubListAttachDetachAPIView):
|
||||
name = _("Instance Group's Instances")
|
||||
model = models.Instance
|
||||
@@ -1667,7 +1684,7 @@ class GroupList(ListCreateAPIView):
|
||||
|
||||
class EnforceParentRelationshipMixin(object):
|
||||
"""
|
||||
Useful when you have a self-refering ManyToManyRelationship.
|
||||
Useful when you have a self-referring ManyToManyRelationship.
|
||||
* Tower uses a shallow (2-deep only) url pattern. For example:
|
||||
|
||||
When an object hangs off of a parent object you would have the url of the
|
||||
@@ -2415,7 +2432,7 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
# if it's a multiselect or multiple choice, it must have coices listed
|
||||
# choices and defualts must come in as strings seperated by /n characters.
|
||||
# choices and defaults must come in as strings separated by /n characters.
|
||||
if qtype == 'multiselect' or qtype == 'multiplechoice':
|
||||
if 'choices' in survey_item:
|
||||
if isinstance(survey_item['choices'], str):
|
||||
@@ -3078,7 +3095,9 @@ class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView):
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
def get_queryset(self):
|
||||
return super(WorkflowJobTemplateWorkflowNodesList, self).get_queryset().order_by('id')
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).order_by('id')
|
||||
|
||||
|
||||
class WorkflowJobTemplateJobsList(SubListAPIView):
|
||||
@@ -3172,7 +3191,9 @@ class WorkflowJobWorkflowNodesList(SubListAPIView):
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
def get_queryset(self):
|
||||
return super(WorkflowJobWorkflowNodesList, self).get_queryset().order_by('id')
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).order_by('id')
|
||||
|
||||
|
||||
class WorkflowJobCancel(GenericCancelView):
|
||||
@@ -3430,7 +3451,7 @@ class JobCreateSchedule(RetrieveAPIView):
|
||||
|
||||
config = obj.launch_config
|
||||
|
||||
# Make up a name for the schedule, guarentee that it is unique
|
||||
# Make up a name for the schedule, guarantee that it is unique
|
||||
name = 'Auto-generated schedule from job {}'.format(obj.id)
|
||||
existing_names = models.Schedule.objects.filter(name__startswith=name).values_list('name', flat=True)
|
||||
if name in existing_names:
|
||||
@@ -3621,7 +3642,7 @@ class JobJobEventsChildrenSummary(APIView):
|
||||
# key is counter of meta events (i.e. verbose), value is uuid of the assigned parent
|
||||
map_meta_counter_nested_uuid = {}
|
||||
|
||||
# collapsable tree view in the UI only makes sense for tree-like
|
||||
# collapsible tree view in the UI only makes sense for tree-like
|
||||
# hierarchy. If ansible is ran with a strategy like free or host_pinned, then
|
||||
# events can be out of sequential order, and no longer follow a tree structure
|
||||
# E1
|
||||
@@ -4288,7 +4309,7 @@ class WorkflowApprovalTemplateJobsList(SubListAPIView):
|
||||
parent_key = 'workflow_approval_template'
|
||||
|
||||
|
||||
class WorkflowApprovalList(ListCreateAPIView):
|
||||
class WorkflowApprovalList(ListAPIView):
|
||||
model = models.WorkflowApproval
|
||||
serializer_class = serializers.WorkflowApprovalListSerializer
|
||||
|
||||
|
||||
69
awx/api/views/bulk.py
Normal file
@@ -0,0 +1,69 @@
|
||||
from collections import OrderedDict
|
||||
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.renderers import JSONRenderer
|
||||
from rest_framework.reverse import reverse
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
|
||||
from awx.main.models import UnifiedJob, Host
|
||||
from awx.api.generics import (
|
||||
GenericAPIView,
|
||||
APIView,
|
||||
)
|
||||
from awx.api import (
|
||||
serializers,
|
||||
renderers,
|
||||
)
|
||||
|
||||
|
||||
class BulkView(APIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
renderer_classes = [
|
||||
renderers.BrowsableAPIRenderer,
|
||||
JSONRenderer,
|
||||
]
|
||||
allowed_methods = ['GET', 'OPTIONS']
|
||||
|
||||
def get(self, request, format=None):
|
||||
'''List top level resources'''
|
||||
data = OrderedDict()
|
||||
data['host_create'] = reverse('api:bulk_host_create', request=request)
|
||||
data['job_launch'] = reverse('api:bulk_job_launch', request=request)
|
||||
return Response(data)
|
||||
|
||||
|
||||
class BulkJobLaunchView(GenericAPIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
model = UnifiedJob
|
||||
serializer_class = serializers.BulkJobLaunchSerializer
|
||||
allowed_methods = ['GET', 'POST', 'OPTIONS']
|
||||
|
||||
def get(self, request):
|
||||
data = OrderedDict()
|
||||
data['detail'] = "Specify a list of unified job templates to launch alongside their launchtime parameters"
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
def post(self, request):
|
||||
bulkjob_serializer = serializers.BulkJobLaunchSerializer(data=request.data, context={'request': request})
|
||||
if bulkjob_serializer.is_valid():
|
||||
result = bulkjob_serializer.create(bulkjob_serializer.validated_data)
|
||||
return Response(result, status=status.HTTP_201_CREATED)
|
||||
return Response(bulkjob_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class BulkHostCreateView(GenericAPIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
model = Host
|
||||
serializer_class = serializers.BulkHostCreateSerializer
|
||||
allowed_methods = ['GET', 'POST', 'OPTIONS']
|
||||
|
||||
def get(self, request):
|
||||
return Response({"detail": "Bulk create hosts with this endpoint"}, status=status.HTTP_200_OK)
|
||||
|
||||
def post(self, request):
|
||||
serializer = serializers.BulkHostCreateSerializer(data=request.data, context={'request': request})
|
||||
if serializer.is_valid():
|
||||
result = serializer.create(serializer.validated_data)
|
||||
return Response(result, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
@@ -121,6 +121,7 @@ class ApiVersionRootView(APIView):
|
||||
data['workflow_job_template_nodes'] = reverse('api:workflow_job_template_node_list', request=request)
|
||||
data['workflow_job_nodes'] = reverse('api:workflow_job_node_list', request=request)
|
||||
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
|
||||
data['bulk'] = reverse('api:bulk', request=request)
|
||||
return Response(data)
|
||||
|
||||
|
||||
@@ -271,6 +272,9 @@ class ApiV2ConfigView(APIView):
|
||||
|
||||
pendo_state = settings.PENDO_TRACKING_STATE if settings.PENDO_TRACKING_STATE in ('off', 'anonymous', 'detailed') else 'off'
|
||||
|
||||
# Guarding against settings.UI_NEXT being set to a non-boolean value
|
||||
ui_next_state = settings.UI_NEXT if settings.UI_NEXT in (True, False) else False
|
||||
|
||||
data = dict(
|
||||
time_zone=settings.TIME_ZONE,
|
||||
license_info=license_data,
|
||||
@@ -279,6 +283,7 @@ class ApiV2ConfigView(APIView):
|
||||
analytics_status=pendo_state,
|
||||
analytics_collectors=all_collectors(),
|
||||
become_methods=PRIVILEGE_ESCALATION_METHODS,
|
||||
ui_next=ui_next_state,
|
||||
)
|
||||
|
||||
# If LDAP is enabled, user_ldap_fields will return a list of field
|
||||
|
||||
@@ -21,7 +21,7 @@ logger = logging.getLogger('awx.conf.fields')
|
||||
# Use DRF fields to convert/validate settings:
|
||||
# - to_representation(obj) should convert a native Python object to a primitive
|
||||
# serializable type. This primitive type will be what is presented in the API
|
||||
# and stored in the JSON field in the datbase.
|
||||
# and stored in the JSON field in the database.
|
||||
# - to_internal_value(data) should convert the primitive type back into the
|
||||
# appropriate Python type to be used in settings.
|
||||
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import inspect
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.timezone import now
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.conf.migrations')
|
||||
|
||||
|
||||
def fill_ldap_group_type_params(apps, schema_editor):
|
||||
@@ -15,7 +19,7 @@ def fill_ldap_group_type_params(apps, schema_editor):
|
||||
entry = qs[0]
|
||||
group_type_params = entry.value
|
||||
else:
|
||||
entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS', value=group_type_params, created=now(), modified=now())
|
||||
return # for new installs we prefer to use the default value
|
||||
|
||||
init_attrs = set(inspect.getfullargspec(group_type.__init__).args[1:])
|
||||
for k in list(group_type_params.keys()):
|
||||
@@ -23,4 +27,5 @@ def fill_ldap_group_type_params(apps, schema_editor):
|
||||
del group_type_params[k]
|
||||
|
||||
entry.value = group_type_params
|
||||
logger.warning(f'Migration updating AUTH_LDAP_GROUP_TYPE_PARAMS with value {entry.value}')
|
||||
entry.save()
|
||||
|
||||
25
awx/conf/tests/functional/test_migrations.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import pytest
|
||||
|
||||
from awx.conf.migrations._ldap_group_type import fill_ldap_group_type_params
|
||||
from awx.conf.models import Setting
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fill_group_type_params_no_op():
|
||||
fill_ldap_group_type_params(apps, 'dont-use-me')
|
||||
assert Setting.objects.count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_keep_old_setting_with_default_value():
|
||||
Setting.objects.create(key='AUTH_LDAP_GROUP_TYPE', value={'name_attr': 'cn', 'member_attr': 'member'})
|
||||
fill_ldap_group_type_params(apps, 'dont-use-me')
|
||||
assert Setting.objects.count() == 1
|
||||
s = Setting.objects.first()
|
||||
assert s.value == {'name_attr': 'cn', 'member_attr': 'member'}
|
||||
|
||||
|
||||
# NOTE: would be good to test the removal of attributes by migration
|
||||
# but this requires fighting with the validator and is not done here
|
||||
@@ -180,7 +180,7 @@ class SettingLoggingTest(GenericAPIView):
|
||||
if not port:
|
||||
return Response({'error': 'Port required for ' + protocol}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
# if http/https by this point, domain is reacheable
|
||||
# if http/https by this point, domain is reachable
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
|
||||
if protocol == 'udp':
|
||||
|
||||
@@ -1972,7 +1972,7 @@ msgid ""
|
||||
"HTTP headers and meta keys to search to determine remote host name or IP. "
|
||||
"Add additional items to this list, such as \"HTTP_X_FORWARDED_FOR\", if "
|
||||
"behind a reverse proxy. See the \"Proxy Support\" section of the "
|
||||
"Adminstrator guide for more details."
|
||||
"Administrator guide for more details."
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/conf.py:85
|
||||
@@ -2457,7 +2457,7 @@ msgid ""
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/conf.py:631
|
||||
msgid "Maximum disk persistance for external log aggregation (in GB)"
|
||||
msgid "Maximum disk persistence for external log aggregation (in GB)"
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/conf.py:633
|
||||
@@ -2548,7 +2548,7 @@ msgid "Enable"
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/constants.py:27
|
||||
msgid "Doas"
|
||||
msgid "Does"
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/constants.py:28
|
||||
@@ -4801,7 +4801,7 @@ msgstr ""
|
||||
|
||||
#: awx/main/models/workflow.py:251
|
||||
msgid ""
|
||||
"An identifier coresponding to the workflow job template node that this node "
|
||||
"An identifier corresponding to the workflow job template node that this node "
|
||||
"was created from."
|
||||
msgstr ""
|
||||
|
||||
@@ -5521,7 +5521,7 @@ msgstr ""
|
||||
#: awx/sso/conf.py:606
|
||||
msgid ""
|
||||
"Extra arguments for Google OAuth2 login. You can restrict it to only allow a "
|
||||
"single domain to authenticate, even if the user is logged in with multple "
|
||||
"single domain to authenticate, even if the user is logged in with multiple "
|
||||
"Google accounts. Refer to the documentation for more detail."
|
||||
msgstr ""
|
||||
|
||||
@@ -5905,7 +5905,7 @@ msgstr ""
|
||||
|
||||
#: awx/sso/conf.py:1290
|
||||
msgid ""
|
||||
"Create a keypair to use as a service provider (SP) and include the "
|
||||
"Create a key pair to use as a service provider (SP) and include the "
|
||||
"certificate content here."
|
||||
msgstr ""
|
||||
|
||||
@@ -5915,7 +5915,7 @@ msgstr ""
|
||||
|
||||
#: awx/sso/conf.py:1302
|
||||
msgid ""
|
||||
"Create a keypair to use as a service provider (SP) and include the private "
|
||||
"Create a key pair to use as a service provider (SP) and include the private "
|
||||
"key content here."
|
||||
msgstr ""
|
||||
|
||||
|
||||
@@ -1971,7 +1971,7 @@ msgid ""
|
||||
"HTTP headers and meta keys to search to determine remote host name or IP. "
|
||||
"Add additional items to this list, such as \"HTTP_X_FORWARDED_FOR\", if "
|
||||
"behind a reverse proxy. See the \"Proxy Support\" section of the "
|
||||
"Adminstrator guide for more details."
|
||||
"Administrator guide for more details."
|
||||
msgstr "Los encabezados HTTP y las llaves de activación para buscar y determinar el nombre de host remoto o IP. Añada elementos adicionales a esta lista, como \"HTTP_X_FORWARDED_FOR\", si está detrás de un proxy inverso. Consulte la sección \"Soporte de proxy\" de la guía del adminstrador para obtener más información."
|
||||
|
||||
#: awx/main/conf.py:85
|
||||
@@ -4804,7 +4804,7 @@ msgstr "Indica que un trabajo no se creará cuando es sea True. La semántica de
|
||||
|
||||
#: awx/main/models/workflow.py:251
|
||||
msgid ""
|
||||
"An identifier coresponding to the workflow job template node that this node "
|
||||
"An identifier corresponding to the workflow job template node that this node "
|
||||
"was created from."
|
||||
msgstr "Un identificador que corresponde al nodo de plantilla de tarea del flujo de trabajo a partir del cual se creó este nodo."
|
||||
|
||||
@@ -5526,7 +5526,7 @@ msgstr "Argumentos adicionales para Google OAuth2"
|
||||
#: awx/sso/conf.py:606
|
||||
msgid ""
|
||||
"Extra arguments for Google OAuth2 login. You can restrict it to only allow a "
|
||||
"single domain to authenticate, even if the user is logged in with multple "
|
||||
"single domain to authenticate, even if the user is logged in with multiple "
|
||||
"Google accounts. Refer to the documentation for more detail."
|
||||
msgstr "Argumentos adicionales para el inicio de sesión en Google OAuth2. Puede limitarlo para permitir la autenticación de un solo dominio, incluso si el usuario ha iniciado sesión con varias cuentas de Google. Consulte la documentación para obtener información detallada."
|
||||
|
||||
@@ -5910,7 +5910,7 @@ msgstr "Certificado público del proveedor de servicio SAML"
|
||||
|
||||
#: awx/sso/conf.py:1290
|
||||
msgid ""
|
||||
"Create a keypair to use as a service provider (SP) and include the "
|
||||
"Create a key pair to use as a service provider (SP) and include the "
|
||||
"certificate content here."
|
||||
msgstr "Crear un par de claves para usar como proveedor de servicio (SP) e incluir el contenido del certificado aquí."
|
||||
|
||||
@@ -5920,7 +5920,7 @@ msgstr "Clave privada del proveedor de servicio SAML"
|
||||
|
||||
#: awx/sso/conf.py:1302
|
||||
msgid ""
|
||||
"Create a keypair to use as a service provider (SP) and include the private "
|
||||
"Create a key pair to use as a service provider (SP) and include the private "
|
||||
"key content here."
|
||||
msgstr "Crear un par de claves para usar como proveedor de servicio (SP) e incluir el contenido de la clave privada aquí."
|
||||
|
||||
|
||||
@@ -588,17 +588,39 @@ class InstanceAccess(BaseAccess):
|
||||
|
||||
|
||||
class InstanceGroupAccess(BaseAccess):
|
||||
"""
|
||||
I can see Instance Groups when I am:
|
||||
- a superuser(system administrator)
|
||||
- at least read_role on the instance group
|
||||
I can edit Instance Groups when I am:
|
||||
- a superuser
|
||||
- admin role on the Instance group
|
||||
I can add/delete Instance Groups:
|
||||
- a superuser(system administrator)
|
||||
I can use Instance Groups when I have:
|
||||
- use_role on the instance group
|
||||
"""
|
||||
|
||||
model = InstanceGroup
|
||||
prefetch_related = ('instances',)
|
||||
|
||||
def filtered_queryset(self):
|
||||
return InstanceGroup.objects.filter(organization__in=Organization.accessible_pk_qs(self.user, 'admin_role')).distinct()
|
||||
return self.model.accessible_objects(self.user, 'read_role')
|
||||
|
||||
@check_superuser
|
||||
def can_use(self, obj):
|
||||
return self.user in obj.use_role
|
||||
|
||||
def can_add(self, data):
|
||||
return self.user.is_superuser
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
return self.user.is_superuser
|
||||
return self.can_admin(obj)
|
||||
|
||||
@check_superuser
|
||||
def can_admin(self, obj):
|
||||
return self.user in obj.admin_role
|
||||
|
||||
def can_delete(self, obj):
|
||||
if obj.name in [settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME]:
|
||||
@@ -845,7 +867,7 @@ class OrganizationAccess(NotificationAttachMixin, BaseAccess):
|
||||
return RoleAccess(self.user).can_attach(rel_role, sub_obj, 'members', *args, **kwargs)
|
||||
|
||||
if relationship == "instance_groups":
|
||||
if self.user.is_superuser:
|
||||
if self.user in obj.admin_role and self.user in sub_obj.use_role:
|
||||
return True
|
||||
return False
|
||||
return super(OrganizationAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
@@ -934,7 +956,7 @@ class InventoryAccess(BaseAccess):
|
||||
|
||||
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
if relationship == "instance_groups":
|
||||
if self.user.can_access(type(sub_obj), "read", sub_obj) and self.user in obj.organization.admin_role:
|
||||
if self.user in sub_obj.use_role and self.user in obj.admin_role:
|
||||
return True
|
||||
return False
|
||||
return super(InventoryAccess, self).can_attach(obj, sub_obj, relationship, *args, **kwargs)
|
||||
@@ -1671,11 +1693,12 @@ class JobTemplateAccess(NotificationAttachMixin, UnifiedCredentialsMixin, BaseAc
|
||||
return self.user.is_superuser or self.user in obj.admin_role
|
||||
|
||||
@check_superuser
|
||||
# object here is the job template. sub_object here is what is being attached
|
||||
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
|
||||
if relationship == "instance_groups":
|
||||
if not obj.organization:
|
||||
return False
|
||||
return self.user.can_access(type(sub_obj), "read", sub_obj) and self.user in obj.organization.admin_role
|
||||
return self.user in sub_obj.use_role and self.user in obj.admin_role
|
||||
return super(JobTemplateAccess, self).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
|
||||
|
||||
@check_superuser
|
||||
@@ -1852,8 +1875,6 @@ class JobLaunchConfigAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||
def _related_filtered_queryset(self, cls):
|
||||
if cls is Label:
|
||||
return LabelAccess(self.user).filtered_queryset()
|
||||
elif cls is InstanceGroup:
|
||||
return InstanceGroupAccess(self.user).filtered_queryset()
|
||||
else:
|
||||
return cls._accessible_pk_qs(cls, self.user, 'use_role')
|
||||
|
||||
@@ -1865,6 +1886,7 @@ class JobLaunchConfigAccess(UnifiedCredentialsMixin, BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_add(self, data, template=None):
|
||||
# WARNING: duplicated with BulkJobLaunchSerializer, check when changing permission levels
|
||||
# This is a special case, we don't check related many-to-many elsewhere
|
||||
# launch RBAC checks use this
|
||||
if 'reference_obj' in data:
|
||||
@@ -1997,7 +2019,16 @@ class WorkflowJobNodeAccess(BaseAccess):
|
||||
)
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.objects.filter(workflow_job__unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
|
||||
return self.model.objects.filter(
|
||||
Q(workflow_job__unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
|
||||
| Q(workflow_job__organization__in=Organization.objects.filter(Q(admin_role__members=self.user)))
|
||||
)
|
||||
|
||||
def can_read(self, obj):
|
||||
"""Overriding this opens up detail view access for bulk jobs, where the workflow job has no associated workflow job template."""
|
||||
if obj.workflow_job.is_bulk_job and obj.workflow_job.created_by_id == self.user.id:
|
||||
return True
|
||||
return super().can_read(obj)
|
||||
|
||||
@check_superuser
|
||||
def can_add(self, data):
|
||||
@@ -2123,7 +2154,16 @@ class WorkflowJobAccess(BaseAccess):
|
||||
)
|
||||
|
||||
def filtered_queryset(self):
|
||||
return WorkflowJob.objects.filter(unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
|
||||
return WorkflowJob.objects.filter(
|
||||
Q(unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
|
||||
| Q(organization__in=Organization.objects.filter(Q(admin_role__members=self.user)), is_bulk_job=True)
|
||||
)
|
||||
|
||||
def can_read(self, obj):
|
||||
"""Overriding this opens up detail view access for bulk jobs, where the workflow job has no associated workflow job template."""
|
||||
if obj.is_bulk_job and obj.created_by_id == self.user.id:
|
||||
return True
|
||||
return super().can_read(obj)
|
||||
|
||||
def can_add(self, data):
|
||||
# Old add-start system for launching jobs is being depreciated, and
|
||||
|
||||
@@ -233,11 +233,13 @@ def projects_by_scm_type(since, **kwargs):
|
||||
return counts
|
||||
|
||||
|
||||
@register('instance_info', '1.2', description=_('Cluster topology and capacity'))
|
||||
@register('instance_info', '1.3', description=_('Cluster topology and capacity'))
|
||||
def instance_info(since, include_hostnames=False, **kwargs):
|
||||
info = {}
|
||||
# Use same method that the TaskManager does to compute consumed capacity without querying all running jobs for each Instance
|
||||
tm_models = TaskManagerModels.init_with_consumed_capacity(instance_fields=['uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'enabled'])
|
||||
tm_models = TaskManagerModels.init_with_consumed_capacity(
|
||||
instance_fields=['uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'enabled', 'node_type']
|
||||
)
|
||||
for tm_instance in tm_models.instances.instances_by_hostname.values():
|
||||
instance = tm_instance.obj
|
||||
instance_info = {
|
||||
|
||||
@@ -282,6 +282,16 @@ register(
|
||||
placeholder={'HTTP_PROXY': 'myproxy.local:8080'},
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_RUNNER_KEEPALIVE_SECONDS',
|
||||
field_class=fields.IntegerField,
|
||||
label=_('K8S Ansible Runner Keep-Alive Message Interval'),
|
||||
help_text=_('Only applies to jobs running in a Container Group. If not 0, send a message every so-many seconds to keep connection open.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
placeholder=240, # intended to be under common 5 minute idle timeout
|
||||
)
|
||||
|
||||
register(
|
||||
'GALAXY_TASK_ENV',
|
||||
field_class=fields.KeyValueField,
|
||||
@@ -765,6 +775,36 @@ register(
|
||||
help_text=_('Indicates whether the instance is part of a kubernetes-based deployment.'),
|
||||
)
|
||||
|
||||
register(
|
||||
'BULK_JOB_MAX_LAUNCH',
|
||||
field_class=fields.IntegerField,
|
||||
default=100,
|
||||
label=_('Max jobs to allow bulk jobs to launch'),
|
||||
help_text=_('Max jobs to allow bulk jobs to launch'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
)
|
||||
|
||||
register(
|
||||
'BULK_HOST_MAX_CREATE',
|
||||
field_class=fields.IntegerField,
|
||||
default=100,
|
||||
label=_('Max number of hosts to allow to be created in a single bulk action'),
|
||||
help_text=_('Max number of hosts to allow to be created in a single bulk action'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
)
|
||||
|
||||
register(
|
||||
'UI_NEXT',
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Enable Preview of New User Interface'),
|
||||
help_text=_('Enable preview of new user interface.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
|
||||
def logging_validate(serializer, attrs):
|
||||
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
||||
|
||||
@@ -70,7 +70,7 @@ def aim_backend(**kwargs):
|
||||
client_cert = kwargs.get('client_cert', None)
|
||||
client_key = kwargs.get('client_key', None)
|
||||
verify = kwargs['verify']
|
||||
webservice_id = kwargs['webservice_id']
|
||||
webservice_id = kwargs.get('webservice_id', '')
|
||||
app_id = kwargs['app_id']
|
||||
object_query = kwargs['object_query']
|
||||
object_query_format = kwargs['object_query_format']
|
||||
|
||||
@@ -49,7 +49,10 @@ def tss_backend(**kwargs):
|
||||
secret_dict = secret_server.get_secret(kwargs['secret_id'])
|
||||
secret = ServerSecret(**secret_dict)
|
||||
|
||||
return secret.fields[kwargs['secret_field']].value
|
||||
if isinstance(secret.fields[kwargs['secret_field']].value, str) == False:
|
||||
return secret.fields[kwargs['secret_field']].value.text
|
||||
else:
|
||||
return secret.fields[kwargs['secret_field']].value
|
||||
|
||||
|
||||
tss_plugin = CredentialPlugin(
|
||||
|
||||
@@ -70,7 +70,7 @@ def reap_waiting(instance=None, status='failed', job_explanation=None, grace_per
|
||||
reap_job(j, status, job_explanation=job_explanation)
|
||||
|
||||
|
||||
def reap(instance=None, status='failed', job_explanation=None, excluded_uuids=None):
|
||||
def reap(instance=None, status='failed', job_explanation=None, excluded_uuids=None, ref_time=None):
|
||||
"""
|
||||
Reap all jobs in running for this instance.
|
||||
"""
|
||||
@@ -79,9 +79,11 @@ def reap(instance=None, status='failed', job_explanation=None, excluded_uuids=No
|
||||
else:
|
||||
hostname = instance.hostname
|
||||
workflow_ctype_id = ContentType.objects.get_for_model(WorkflowJob).id
|
||||
jobs = UnifiedJob.objects.filter(
|
||||
Q(status='running') & (Q(execution_node=hostname) | Q(controller_node=hostname)) & ~Q(polymorphic_ctype_id=workflow_ctype_id)
|
||||
)
|
||||
base_Q = Q(status='running') & (Q(execution_node=hostname) | Q(controller_node=hostname)) & ~Q(polymorphic_ctype_id=workflow_ctype_id)
|
||||
if ref_time:
|
||||
jobs = UnifiedJob.objects.filter(base_Q & Q(started__lte=ref_time))
|
||||
else:
|
||||
jobs = UnifiedJob.objects.filter(base_Q)
|
||||
if excluded_uuids:
|
||||
jobs = jobs.exclude(celery_task_id__in=excluded_uuids)
|
||||
for j in jobs:
|
||||
|
||||
143
awx/main/management/commands/disable_instance.py
Normal file
@@ -0,0 +1,143 @@
|
||||
import time
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from argparse import ArgumentTypeError
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db.models import Q
|
||||
from django.utils.timezone import now
|
||||
|
||||
from awx.main.models import Instance, UnifiedJob
|
||||
|
||||
|
||||
class AWXInstance:
|
||||
def __init__(self, **filter):
|
||||
self.filter = filter
|
||||
self.get_instance()
|
||||
|
||||
def get_instance(self):
|
||||
filter = self.filter if self.filter is not None else dict(hostname=settings.CLUSTER_HOST_ID)
|
||||
qs = Instance.objects.filter(**filter)
|
||||
if not qs.exists():
|
||||
raise ValueError(f"No AWX instance found with {filter} parameters")
|
||||
self.instance = qs.first()
|
||||
|
||||
def disable(self):
|
||||
if self.instance.enabled:
|
||||
self.instance.enabled = False
|
||||
self.instance.save()
|
||||
return True
|
||||
|
||||
def enable(self):
|
||||
if not self.instance.enabled:
|
||||
self.instance.enabled = True
|
||||
self.instance.save()
|
||||
return True
|
||||
|
||||
def jobs(self):
|
||||
return UnifiedJob.objects.filter(
|
||||
Q(controller_node=self.instance.hostname) | Q(execution_node=self.instance.hostname), status__in=("running", "waiting")
|
||||
)
|
||||
|
||||
def jobs_pretty(self):
|
||||
jobs = []
|
||||
for j in self.jobs():
|
||||
job_started = j.started if j.started else now()
|
||||
# similar calculation of `elapsed` as the corresponding serializer
|
||||
# does
|
||||
td = now() - job_started
|
||||
elapsed = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / (10**6 * 1.0)
|
||||
elapsed = float(elapsed)
|
||||
details = dict(
|
||||
name=j.name,
|
||||
url=j.get_ui_url(),
|
||||
elapsed=elapsed,
|
||||
)
|
||||
jobs.append(details)
|
||||
|
||||
jobs = sorted(jobs, reverse=True, key=lambda j: j["elapsed"])
|
||||
|
||||
return ", ".join([f"[\"{j['name']}\"]({j['url']})" for j in jobs])
|
||||
|
||||
def instance_pretty(self):
|
||||
instance = (
|
||||
self.instance.hostname,
|
||||
urljoin(settings.TOWER_URL_BASE, f"/#/instances/{self.instance.pk}/details"),
|
||||
)
|
||||
return f"[\"{instance[0]}\"]({instance[1]})"
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Disable instance, optionally waiting for all its managed jobs to finish."
|
||||
|
||||
@staticmethod
|
||||
def ge_1(arg):
|
||||
if arg == "inf":
|
||||
return float("inf")
|
||||
|
||||
int_arg = int(arg)
|
||||
if int_arg < 1:
|
||||
raise ArgumentTypeError(f"The value must be a positive number >= 1. Provided: \"{arg}\"")
|
||||
return int_arg
|
||||
|
||||
def add_arguments(self, parser):
|
||||
filter_group = parser.add_mutually_exclusive_group()
|
||||
|
||||
filter_group.add_argument(
|
||||
"--hostname",
|
||||
type=str,
|
||||
default=settings.CLUSTER_HOST_ID,
|
||||
help=f"{Instance.hostname.field.help_text} Defaults to the hostname of the machine where the Python interpreter is currently executing".strip(),
|
||||
)
|
||||
filter_group.add_argument("--id", type=self.ge_1, help=Instance.id.field.help_text)
|
||||
|
||||
parser.add_argument(
|
||||
"--wait",
|
||||
action="store_true",
|
||||
help="Wait for jobs managed by the instance to finish. With default retry arguments waits ~1h",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--retry",
|
||||
type=self.ge_1,
|
||||
default=120,
|
||||
help="Number of retries when waiting for jobs to finish. Default: 120. Also accepts \"inf\" to wait indefinitely",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--retry_sleep",
|
||||
type=self.ge_1,
|
||||
default=30,
|
||||
help="Number of seconds to sleep before consequtive retries when waiting. Default: 30",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
try:
|
||||
filter = dict(id=options["id"]) if options["id"] is not None else dict(hostname=options["hostname"])
|
||||
instance = AWXInstance(**filter)
|
||||
except ValueError as e:
|
||||
raise CommandError(e)
|
||||
|
||||
if instance.disable():
|
||||
self.stdout.write(self.style.SUCCESS(f"Instance {instance.instance_pretty()} has been disabled"))
|
||||
else:
|
||||
self.stdout.write(f"Instance {instance.instance_pretty()} has already been disabled")
|
||||
|
||||
if not options["wait"]:
|
||||
return
|
||||
|
||||
rc = 1
|
||||
while instance.jobs().count() > 0:
|
||||
if rc < options["retry"]:
|
||||
self.stdout.write(
|
||||
f"{rc}/{options['retry']}: Waiting {options['retry_sleep']}s before the next attempt to see if the following instance' managed jobs have finished: {instance.jobs_pretty()}"
|
||||
)
|
||||
rc += 1
|
||||
time.sleep(options["retry_sleep"])
|
||||
else:
|
||||
raise CommandError(
|
||||
f"{rc}/{options['retry']}: No more retry attempts left, but the instance still has associated managed jobs: {instance.jobs_pretty()}"
|
||||
)
|
||||
else:
|
||||
self.stdout.write(self.style.SUCCESS("Done waiting for instance' managed jobs to finish!"))
|
||||
@@ -851,6 +851,7 @@ class Command(BaseCommand):
|
||||
logger.info('Updating inventory %d: %s' % (inventory.pk, inventory.name))
|
||||
|
||||
# Create ad-hoc inventory source and inventory update objects
|
||||
ee = get_default_execution_environment()
|
||||
with ignore_inventory_computed_fields():
|
||||
source = Command.get_source_absolute_path(raw_source)
|
||||
|
||||
@@ -860,14 +861,22 @@ class Command(BaseCommand):
|
||||
source_path=os.path.abspath(source),
|
||||
overwrite=bool(options.get('overwrite', False)),
|
||||
overwrite_vars=bool(options.get('overwrite_vars', False)),
|
||||
execution_environment=ee,
|
||||
)
|
||||
inventory_update = inventory_source.create_inventory_update(
|
||||
_eager_fields=dict(status='running', job_args=json.dumps(sys.argv), job_env=dict(os.environ.items()), job_cwd=os.getcwd())
|
||||
_eager_fields=dict(
|
||||
status='running', job_args=json.dumps(sys.argv), job_env=dict(os.environ.items()), job_cwd=os.getcwd(), execution_environment=ee
|
||||
)
|
||||
)
|
||||
|
||||
data = AnsibleInventoryLoader(source=source, verbosity=verbosity).load()
|
||||
try:
|
||||
data = AnsibleInventoryLoader(source=source, verbosity=verbosity).load()
|
||||
logger.debug('Finished loading from source: %s', source)
|
||||
|
||||
logger.debug('Finished loading from source: %s', source)
|
||||
except SystemExit:
|
||||
logger.debug("Error occurred while running ansible-inventory")
|
||||
inventory_update.cancel()
|
||||
sys.exit(1)
|
||||
|
||||
status, tb, exc = 'error', '', None
|
||||
try:
|
||||
|
||||
17
awx/main/migrations/0175_workflowjob_is_bulk_job.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 3.2.16 on 2023-01-05 15:39
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('main', '0174_ensure_org_ee_admin_roles'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='workflowjob',
|
||||
name='is_bulk_job',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
32
awx/main/migrations/0176_inventorysource_scm_branch.py
Normal file
@@ -0,0 +1,32 @@
|
||||
# Generated by Django 3.2.16 on 2023-03-03 20:44
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('main', '0175_workflowjob_is_bulk_job'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
name='scm_branch',
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='Inventory source SCM branch. Project default used if blank. Only allowed if project allow_override field is set to true.',
|
||||
max_length=1024,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventoryupdate',
|
||||
name='scm_branch',
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='Inventory source SCM branch. Project default used if blank. Only allowed if project allow_override field is set to true.',
|
||||
max_length=1024,
|
||||
),
|
||||
),
|
||||
]
|
||||
48
awx/main/migrations/0177_instance_group_role_addition.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# Generated by Django 3.2.16 on 2023-02-17 02:45
|
||||
|
||||
import awx.main.fields
|
||||
from django.db import migrations
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('main', '0176_inventorysource_scm_branch'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='instancegroup',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_role=['singleton:system_administrator'],
|
||||
related_name='+',
|
||||
to='main.role',
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='instancegroup',
|
||||
name='read_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False,
|
||||
null='True',
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_role=['singleton:system_auditor', 'use_role', 'admin_role'],
|
||||
related_name='+',
|
||||
to='main.role',
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='instancegroup',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(
|
||||
editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['admin_role'], related_name='+', to='main.role'
|
||||
),
|
||||
preserve_default='True',
|
||||
),
|
||||
]
|
||||
18
awx/main/migrations/0178_instance_group_admin_migration.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.16 on 2023-02-17 02:45
|
||||
|
||||
from django.db import migrations
|
||||
from awx.main.migrations import _rbac as rbac
|
||||
from awx.main.migrations import _migration_utils as migration_utils
|
||||
from awx.main.migrations import _OrgAdmin_to_use_ig as oamigrate
|
||||
from awx.main.migrations import ActivityStreamDisabledMigration
|
||||
|
||||
|
||||
class Migration(ActivityStreamDisabledMigration):
|
||||
dependencies = [
|
||||
('main', '0177_instance_group_role_addition'),
|
||||
]
|
||||
operations = [
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(rbac.create_roles),
|
||||
migrations.RunPython(oamigrate.migrate_org_admin_to_use),
|
||||
]
|
||||
18
awx/main/migrations/0179_change_cyberark_plugin_names.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.16 on 2023-03-16 15:16
|
||||
from django.db import migrations
|
||||
|
||||
from awx.main.migrations._credentialtypes import migrate_credential_type
|
||||
from awx.main.models import CredentialType
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
def update_cyberark_plugin_names(apps, schema_editor):
|
||||
CredentialType.setup_tower_managed_defaults(apps)
|
||||
migrate_credential_type(apps, 'aim')
|
||||
migrate_credential_type(apps, 'conjur')
|
||||
|
||||
dependencies = [
|
||||
('main', '0178_instance_group_admin_migration'),
|
||||
]
|
||||
|
||||
operations = [migrations.RunPython(update_cyberark_plugin_names)]
|
||||
20
awx/main/migrations/_OrgAdmin_to_use_ig.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import logging
|
||||
|
||||
from awx.main.models import Organization
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
|
||||
def migrate_org_admin_to_use(apps, schema_editor):
|
||||
logger.info('Initiated migration from Org admin to use role')
|
||||
roles_added = 0
|
||||
for org in Organization.objects.prefetch_related('admin_role__members').iterator():
|
||||
igs = list(org.instance_groups.all())
|
||||
if not igs:
|
||||
continue
|
||||
for admin in org.admin_role.members.filter(is_superuser=False):
|
||||
for ig in igs:
|
||||
ig.use_role.members.add(admin)
|
||||
roles_added += 1
|
||||
if roles_added:
|
||||
logger.info(f'Migration converted {roles_added} from organization admin to use role')
|
||||
@@ -1,6 +1,9 @@
|
||||
import logging
|
||||
|
||||
from awx.main.models import CredentialType
|
||||
from django.db.models import Q
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
DEPRECATED_CRED_KIND = {
|
||||
'rax': {
|
||||
@@ -76,3 +79,14 @@ def add_tower_verify_field(apps, schema_editor):
|
||||
def remove_become_methods(apps, schema_editor):
|
||||
# this is no longer necessary; schemas are defined in code
|
||||
pass
|
||||
|
||||
|
||||
def migrate_credential_type(apps, namespace):
|
||||
ns_types = apps.get_model('main', 'CredentialType').objects.filter(namespace=namespace).order_by('created')
|
||||
if ns_types.count() == 2:
|
||||
original, renamed = ns_types.all()
|
||||
logger.info(f'There are credential types to migrate in the "{namespace}" namespace: {original.name}')
|
||||
apps.get_model('main', 'Credential').objects.filter(credential_type_id=original.id).update(credential_type_id=renamed.id)
|
||||
|
||||
logger.info(f'Removing old credential type: {renamed.name}')
|
||||
original.delete()
|
||||
|
||||
@@ -29,6 +29,7 @@ def create_roles(apps, schema_editor):
|
||||
'Project',
|
||||
'Credential',
|
||||
'JobTemplate',
|
||||
'InstanceGroup',
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
@@ -17,15 +17,20 @@ from django.db.models import Sum
|
||||
import redis
|
||||
from solo.models import SingletonModel
|
||||
|
||||
# AWX
|
||||
from awx import __version__ as awx_application_version
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.fields import JSONBlob
|
||||
from awx.main.fields import JSONBlob, ImplicitRoleField
|
||||
from awx.main.managers import InstanceManager, UUID_DEFAULT
|
||||
from awx.main.constants import JOB_FOLDER_PREFIX
|
||||
from awx.main.models.base import BaseModel, HasEditsMixin, prevent_search
|
||||
from awx.main.models.rbac import (
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
)
|
||||
from awx.main.models.unified_jobs import UnifiedJob
|
||||
from awx.main.utils.common import get_corrected_cpu, get_cpu_effective_capacity, get_corrected_memory, get_mem_effective_capacity
|
||||
from awx.main.models.mixins import RelatedJobsMixin
|
||||
from awx.main.models.mixins import RelatedJobsMixin, ResourceMixin
|
||||
|
||||
# ansible-runner
|
||||
from ansible_runner.utils.capacity import get_cpu_count, get_mem_in_bytes
|
||||
@@ -352,7 +357,7 @@ class Instance(HasPolicyEditsMixin, BaseModel):
|
||||
self.save_health_data(awx_application_version, get_cpu_count(), get_mem_in_bytes(), update_last_seen=True, errors=errors)
|
||||
|
||||
|
||||
class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
|
||||
class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin, ResourceMixin):
|
||||
"""A model representing a Queue/Group of AWX Instances."""
|
||||
|
||||
name = models.CharField(max_length=250, unique=True)
|
||||
@@ -379,6 +384,24 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin):
|
||||
default='',
|
||||
)
|
||||
)
|
||||
admin_role = ImplicitRoleField(
|
||||
parent_role=[
|
||||
'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
]
|
||||
)
|
||||
use_role = ImplicitRoleField(
|
||||
parent_role=[
|
||||
'admin_role',
|
||||
]
|
||||
)
|
||||
read_role = ImplicitRoleField(
|
||||
parent_role=[
|
||||
'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
'use_role',
|
||||
'admin_role',
|
||||
]
|
||||
)
|
||||
|
||||
max_concurrent_jobs = models.IntegerField(default=0, help_text=_("Maximum number of concurrent jobs to run on this group. Zero means no limit."))
|
||||
max_forks = models.IntegerField(default=0, help_text=_("Max forks to execute on this group. Zero means no limit."))
|
||||
policy_instance_percentage = models.IntegerField(default=0, help_text=_("Percentage of Instances to automatically assign to this group"))
|
||||
|
||||
@@ -872,6 +872,12 @@ class InventorySourceOptions(BaseModel):
|
||||
default='',
|
||||
help_text=_('Inventory source variables in YAML or JSON format.'),
|
||||
)
|
||||
scm_branch = models.CharField(
|
||||
max_length=1024,
|
||||
default='',
|
||||
blank=True,
|
||||
help_text=_('Inventory source SCM branch. Project default used if blank. Only allowed if project allow_override field is set to true.'),
|
||||
)
|
||||
enabled_var = models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
|
||||
@@ -831,6 +831,9 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
for name in JOB_VARIABLE_PREFIXES:
|
||||
r['{}_job_template_id'.format(name)] = self.job_template.pk
|
||||
r['{}_job_template_name'.format(name)] = self.job_template.name
|
||||
if self.execution_node:
|
||||
for name in JOB_VARIABLE_PREFIXES:
|
||||
r['{}_execution_node'.format(name)] = self.execution_node
|
||||
return r
|
||||
|
||||
'''
|
||||
|
||||
@@ -14,7 +14,7 @@ from oauth2_provider.models import AbstractApplication, AbstractAccessToken
|
||||
from oauth2_provider.generators import generate_client_secret
|
||||
from oauthlib import oauth2
|
||||
|
||||
from awx.main.utils import get_external_account
|
||||
from awx.sso.common import get_external_account
|
||||
from awx.main.fields import OAuth2ClientSecretField
|
||||
|
||||
|
||||
|
||||
@@ -650,6 +650,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
|
||||
help_text=_("If automatically created for a sliced job run, the job template " "the workflow job was created from."),
|
||||
)
|
||||
is_sliced_job = models.BooleanField(default=False)
|
||||
is_bulk_job = models.BooleanField(default=False)
|
||||
|
||||
def _set_default_dependencies_processed(self):
|
||||
self.dependencies_processed = True
|
||||
|
||||
@@ -85,6 +85,8 @@ class RunnerCallback:
|
||||
# which generate job events from two 'streams':
|
||||
# ansible-inventory and the awx.main.commands.inventory_import
|
||||
# logger
|
||||
if event_data.get('event') == 'keepalive':
|
||||
return
|
||||
|
||||
if event_data.get(self.event_data_key, None):
|
||||
if self.event_data_key != 'job_id':
|
||||
@@ -116,7 +118,7 @@ class RunnerCallback:
|
||||
# so it *should* have a negligible performance impact
|
||||
task = event_data.get('event_data', {}).get('task_action')
|
||||
try:
|
||||
if task in ('git', 'svn'):
|
||||
if task in ('git', 'svn', 'ansible.builtin.git', 'ansible.builtin.svn'):
|
||||
event_data_json = json.dumps(event_data)
|
||||
event_data_json = UriCleaner.remove_sensitive(event_data_json)
|
||||
event_data = json.loads(event_data_json)
|
||||
@@ -219,7 +221,7 @@ class RunnerCallbackForProjectUpdate(RunnerCallback):
|
||||
def event_handler(self, event_data):
|
||||
super_return_value = super(RunnerCallbackForProjectUpdate, self).event_handler(event_data)
|
||||
returned_data = event_data.get('event_data', {})
|
||||
if returned_data.get('task_action', '') == 'set_fact':
|
||||
if returned_data.get('task_action', '') in ('set_fact', 'ansible.builtin.set_fact'):
|
||||
returned_facts = returned_data.get('res', {}).get('ansible_facts', {})
|
||||
if 'scm_version' in returned_facts:
|
||||
self.playbook_new_revision = returned_facts['scm_version']
|
||||
|
||||
@@ -311,7 +311,7 @@ class BaseTask(object):
|
||||
env['AWX_PRIVATE_DATA_DIR'] = private_data_dir
|
||||
|
||||
if self.instance.execution_environment is None:
|
||||
raise RuntimeError('The project could not sync because there is no Execution Environment.')
|
||||
raise RuntimeError(f'The {self.model.__name__} could not run because there is no Execution Environment.')
|
||||
|
||||
return env
|
||||
|
||||
@@ -759,7 +759,7 @@ class SourceControlMixin(BaseTask):
|
||||
|
||||
def sync_and_copy(self, project, private_data_dir, scm_branch=None):
|
||||
self.acquire_lock(project, self.instance.id)
|
||||
|
||||
is_commit = False
|
||||
try:
|
||||
original_branch = None
|
||||
failed_reason = project.get_reason_if_failed()
|
||||
@@ -771,6 +771,7 @@ class SourceControlMixin(BaseTask):
|
||||
if os.path.exists(project_path):
|
||||
git_repo = git.Repo(project_path)
|
||||
if git_repo.head.is_detached:
|
||||
is_commit = True
|
||||
original_branch = git_repo.head.commit
|
||||
else:
|
||||
original_branch = git_repo.active_branch
|
||||
@@ -782,7 +783,11 @@ class SourceControlMixin(BaseTask):
|
||||
# for git project syncs, non-default branches can be problems
|
||||
# restore to branch the repo was on before this run
|
||||
try:
|
||||
original_branch.checkout()
|
||||
if is_commit:
|
||||
git_repo.head.set_commit(original_branch)
|
||||
git_repo.head.reset(index=True, working_tree=True)
|
||||
else:
|
||||
original_branch.checkout()
|
||||
except Exception:
|
||||
# this could have failed due to dirty tree, but difficult to predict all cases
|
||||
logger.exception(f'Failed to restore project repo to prior state after {self.instance.id}')
|
||||
@@ -1581,7 +1586,7 @@ class RunInventoryUpdate(SourceControlMixin, BaseTask):
|
||||
if inventory_update.source == 'scm':
|
||||
if not source_project:
|
||||
raise RuntimeError('Could not find project to run SCM inventory update from.')
|
||||
self.sync_and_copy(source_project, private_data_dir)
|
||||
self.sync_and_copy(source_project, private_data_dir, scm_branch=inventory_update.inventory_source.scm_branch)
|
||||
else:
|
||||
# If source is not SCM make an empty project directory, content is built inside inventory folder
|
||||
super(RunInventoryUpdate, self).build_project_dir(inventory_update, private_data_dir)
|
||||
|
||||
@@ -526,6 +526,10 @@ class AWXReceptorJob:
|
||||
pod_spec['spec']['containers'][0]['image'] = ee.image
|
||||
pod_spec['spec']['containers'][0]['args'] = ['ansible-runner', 'worker', '--private-data-dir=/runner']
|
||||
|
||||
if settings.AWX_RUNNER_KEEPALIVE_SECONDS:
|
||||
pod_spec['spec']['containers'][0].setdefault('env', [])
|
||||
pod_spec['spec']['containers'][0]['env'].append({'name': 'ANSIBLE_RUNNER_KEEPALIVE_SECONDS', 'value': str(settings.AWX_RUNNER_KEEPALIVE_SECONDS)})
|
||||
|
||||
# Enforce EE Pull Policy
|
||||
pull_options = {"always": "Always", "missing": "IfNotPresent", "never": "Never"}
|
||||
if self.task and self.task.instance.execution_environment:
|
||||
|
||||
@@ -581,7 +581,7 @@ def cluster_node_heartbeat(dispatch_time=None, worker_tasks=None):
|
||||
active_task_ids = []
|
||||
for task_list in worker_tasks.values():
|
||||
active_task_ids.extend(task_list)
|
||||
reaper.reap(instance=this_inst, excluded_uuids=active_task_ids)
|
||||
reaper.reap(instance=this_inst, excluded_uuids=active_task_ids, ref_time=datetime.fromisoformat(dispatch_time))
|
||||
if max(len(task_list) for task_list in worker_tasks.values()) <= 1:
|
||||
reaper.reap_waiting(instance=this_inst, excluded_uuids=active_task_ids, ref_time=datetime.fromisoformat(dispatch_time))
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ from awx.main.constants import JOB_VARIABLE_PREFIXES
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_subclass_types(rando):
|
||||
def test_subclass_types():
|
||||
assert set(UnifiedJobTemplate._submodels_with_roles()) == set(
|
||||
[
|
||||
ContentType.objects.get_for_model(JobTemplate).id,
|
||||
|
||||
311
awx/main/tests/functional/test_bulk.py
Normal file
@@ -0,0 +1,311 @@
|
||||
import pytest
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
from awx.main.models.jobs import JobTemplate
|
||||
from awx.main.models import Organization, Inventory, WorkflowJob, ExecutionEnvironment, Host
|
||||
from awx.main.scheduler import TaskManager
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('num_hosts, num_queries', [(1, 15), (10, 15)])
|
||||
def test_bulk_host_create_num_queries(organization, inventory, post, get, user, num_hosts, num_queries, django_assert_max_num_queries):
|
||||
'''
|
||||
If I am a...
|
||||
org admin
|
||||
inventory admin at org level
|
||||
admin of a particular inventory
|
||||
superuser
|
||||
|
||||
Bulk Host create should take under a certain number of queries
|
||||
'''
|
||||
inventory.organization = organization
|
||||
inventory_admin = user('inventory_admin', False)
|
||||
org_admin = user('org_admin', False)
|
||||
org_inv_admin = user('org_admin', False)
|
||||
superuser = user('admin', True)
|
||||
for u in [org_admin, org_inv_admin, inventory_admin]:
|
||||
organization.member_role.members.add(u)
|
||||
organization.admin_role.members.add(org_admin)
|
||||
organization.inventory_admin_role.members.add(org_inv_admin)
|
||||
inventory.admin_role.members.add(inventory_admin)
|
||||
|
||||
for u in [org_admin, inventory_admin, org_inv_admin, superuser]:
|
||||
hosts = [{'name': uuid4()} for i in range(num_hosts)]
|
||||
with django_assert_max_num_queries(num_queries):
|
||||
bulk_host_create_response = post(reverse('api:bulk_host_create'), {'inventory': inventory.id, 'hosts': hosts}, u, expect=201).data
|
||||
assert len(bulk_host_create_response['hosts']) == len(hosts), f"unexpected number of hosts created for user {u}"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bulk_host_create_rbac(organization, inventory, post, get, user):
|
||||
'''
|
||||
If I am a...
|
||||
org admin
|
||||
inventory admin at org level
|
||||
admin of a particular invenotry
|
||||
... I can bulk add hosts
|
||||
|
||||
Everyone else cannot
|
||||
'''
|
||||
inventory.organization = organization
|
||||
inventory_admin = user('inventory_admin', False)
|
||||
org_admin = user('org_admin', False)
|
||||
org_inv_admin = user('org_admin', False)
|
||||
auditor = user('auditor', False)
|
||||
member = user('member', False)
|
||||
use_inv_member = user('member', False)
|
||||
for u in [org_admin, org_inv_admin, auditor, member, inventory_admin, use_inv_member]:
|
||||
organization.member_role.members.add(u)
|
||||
organization.admin_role.members.add(org_admin)
|
||||
organization.inventory_admin_role.members.add(org_inv_admin)
|
||||
inventory.admin_role.members.add(inventory_admin)
|
||||
inventory.use_role.members.add(use_inv_member)
|
||||
organization.auditor_role.members.add(auditor)
|
||||
|
||||
for indx, u in enumerate([org_admin, inventory_admin, org_inv_admin]):
|
||||
bulk_host_create_response = post(
|
||||
reverse('api:bulk_host_create'), {'inventory': inventory.id, 'hosts': [{'name': f'foobar-{indx}'}]}, u, expect=201
|
||||
).data
|
||||
assert len(bulk_host_create_response['hosts']) == 1, f"unexpected number of hosts created for user {u}"
|
||||
assert Host.objects.filter(inventory__id=inventory.id)[0].name == 'foobar-0'
|
||||
|
||||
for indx, u in enumerate([member, auditor, use_inv_member]):
|
||||
bulk_host_create_response = post(
|
||||
reverse('api:bulk_host_create'), {'inventory': inventory.id, 'hosts': [{'name': f'foobar2-{indx}'}]}, u, expect=400
|
||||
).data
|
||||
assert bulk_host_create_response['__all__'][0] == f'Inventory with id {inventory.id} not found or lack permissions to add hosts.'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('num_jobs, num_queries', [(1, 25), (10, 25)])
|
||||
def test_bulk_job_launch_queries(job_template, organization, inventory, project, post, get, user, num_jobs, num_queries, django_assert_max_num_queries):
|
||||
'''
|
||||
if I have access to the unified job template
|
||||
... I can launch the bulk job
|
||||
... and the number of queries should NOT scale with the number of jobs
|
||||
'''
|
||||
normal_user = user('normal_user', False)
|
||||
org_admin = user('org_admin', False)
|
||||
jt = JobTemplate.objects.create(name='my-jt', ask_inventory_on_launch=True, project=project, playbook='helloworld.yml')
|
||||
organization.member_role.members.add(normal_user)
|
||||
organization.admin_role.members.add(org_admin)
|
||||
jt.execute_role.members.add(normal_user)
|
||||
inventory.use_role.members.add(normal_user)
|
||||
jt.save()
|
||||
inventory.save()
|
||||
jobs = [{'unified_job_template': jt.id, 'inventory': inventory.id} for _ in range(num_jobs)]
|
||||
|
||||
# This is not working, we need to figure that out if we want to include tests for more jobs
|
||||
# with mock.patch('awx.api.serializers.settings.BULK_JOB_MAX_LAUNCH', num_jobs + 1):
|
||||
with django_assert_max_num_queries(num_queries):
|
||||
bulk_job_launch_response = post(reverse('api:bulk_job_launch'), {'name': 'Bulk Job Launch', 'jobs': jobs}, normal_user, expect=201).data
|
||||
|
||||
# Run task manager so the workflow job nodes actually spawn
|
||||
TaskManager().schedule()
|
||||
|
||||
for u in (org_admin, normal_user):
|
||||
bulk_job = get(bulk_job_launch_response['url'], u, expect=200).data
|
||||
assert organization.id == bulk_job['summary_fields']['organization']['id']
|
||||
resp = get(bulk_job_launch_response['related']['workflow_nodes'], u)
|
||||
assert resp.data['count'] == num_jobs
|
||||
for item in resp.data['results']:
|
||||
assert item["unified_job_template"] == jt.id
|
||||
assert item["inventory"] == inventory.id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bulk_job_launch_no_access_to_job_template(job_template, organization, inventory, project, credential, post, get, user):
|
||||
'''
|
||||
if I don't have access to the unified job templare
|
||||
... I can't launch the bulk job
|
||||
'''
|
||||
normal_user = user('normal_user', False)
|
||||
jt = JobTemplate.objects.create(name='my-jt', inventory=inventory, project=project, playbook='helloworld.yml')
|
||||
jt.save()
|
||||
organization.member_role.members.add(normal_user)
|
||||
bulk_job_launch_response = post(
|
||||
reverse('api:bulk_job_launch'), {'name': 'Bulk Job Launch', 'jobs': [{'unified_job_template': jt.id}]}, normal_user, expect=400
|
||||
).data
|
||||
assert bulk_job_launch_response['__all__'][0] == f'Job Templates {{{jt.id}}} not found or you don\'t have permissions to access it'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bulk_job_launch_no_org_assigned(job_template, organization, inventory, project, credential, post, get, user):
|
||||
'''
|
||||
if I am not part of any organization...
|
||||
... I can't launch the bulk job
|
||||
'''
|
||||
normal_user = user('normal_user', False)
|
||||
jt = JobTemplate.objects.create(name='my-jt', inventory=inventory, project=project, playbook='helloworld.yml')
|
||||
jt.save()
|
||||
jt.execute_role.members.add(normal_user)
|
||||
bulk_job_launch_response = post(
|
||||
reverse('api:bulk_job_launch'), {'name': 'Bulk Job Launch', 'jobs': [{'unified_job_template': jt.id}]}, normal_user, expect=400
|
||||
).data
|
||||
assert bulk_job_launch_response['__all__'][0] == 'User not part of any organization, please assign an organization to assign to the bulk job'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bulk_job_launch_multiple_org_assigned(job_template, organization, inventory, project, credential, post, get, user):
|
||||
'''
|
||||
if I am part of multiple organization...
|
||||
and if I do not provide org at the launch time
|
||||
... I can't launch the bulk job
|
||||
'''
|
||||
normal_user = user('normal_user', False)
|
||||
org1 = Organization.objects.create(name='foo1')
|
||||
org2 = Organization.objects.create(name='foo2')
|
||||
org1.member_role.members.add(normal_user)
|
||||
org2.member_role.members.add(normal_user)
|
||||
jt = JobTemplate.objects.create(name='my-jt', inventory=inventory, project=project, playbook='helloworld.yml')
|
||||
jt.save()
|
||||
jt.execute_role.members.add(normal_user)
|
||||
bulk_job_launch_response = post(
|
||||
reverse('api:bulk_job_launch'), {'name': 'Bulk Job Launch', 'jobs': [{'unified_job_template': jt.id}]}, normal_user, expect=400
|
||||
).data
|
||||
assert bulk_job_launch_response['__all__'][0] == 'User has permission to multiple Organizations, please set one of them in the request'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bulk_job_launch_specific_org(job_template, organization, inventory, project, credential, post, get, user):
|
||||
'''
|
||||
if I am part of multiple organization...
|
||||
and if I provide org at the launch time
|
||||
... I can launch the bulk job
|
||||
'''
|
||||
normal_user = user('normal_user', False)
|
||||
org1 = Organization.objects.create(name='foo1')
|
||||
org2 = Organization.objects.create(name='foo2')
|
||||
org1.member_role.members.add(normal_user)
|
||||
org2.member_role.members.add(normal_user)
|
||||
jt = JobTemplate.objects.create(name='my-jt', inventory=inventory, project=project, playbook='helloworld.yml')
|
||||
jt.save()
|
||||
jt.execute_role.members.add(normal_user)
|
||||
bulk_job_launch_response = post(
|
||||
reverse('api:bulk_job_launch'), {'name': 'Bulk Job Launch', 'jobs': [{'unified_job_template': jt.id}], 'organization': org1.id}, normal_user, expect=201
|
||||
).data
|
||||
bulk_job_id = bulk_job_launch_response['id']
|
||||
bulk_job_obj = WorkflowJob.objects.filter(id=bulk_job_id, is_bulk_job=True).first()
|
||||
assert org1.id == bulk_job_obj.organization.id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bulk_job_launch_inventory_no_access(job_template, organization, inventory, project, credential, post, get, user):
|
||||
'''
|
||||
if I don't have access to the inventory...
|
||||
and if I try to use it at the launch time
|
||||
... I can't launch the bulk job
|
||||
'''
|
||||
normal_user = user('normal_user', False)
|
||||
org1 = Organization.objects.create(name='foo1')
|
||||
org2 = Organization.objects.create(name='foo2')
|
||||
jt = JobTemplate.objects.create(name='my-jt', inventory=inventory, project=project, playbook='helloworld.yml')
|
||||
jt.save()
|
||||
org1.member_role.members.add(normal_user)
|
||||
inv = Inventory.objects.create(name='inv1', organization=org2)
|
||||
jt.execute_role.members.add(normal_user)
|
||||
bulk_job_launch_response = post(
|
||||
reverse('api:bulk_job_launch'), {'name': 'Bulk Job Launch', 'jobs': [{'unified_job_template': jt.id, 'inventory': inv.id}]}, normal_user, expect=400
|
||||
).data
|
||||
assert bulk_job_launch_response['__all__'][0] == f'Inventories {{{inv.id}}} not found or you don\'t have permissions to access it'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bulk_job_inventory_prompt(job_template, organization, inventory, project, credential, post, get, user):
|
||||
'''
|
||||
Job template has an inventory set as prompt_on_launch
|
||||
and if I provide the inventory as a parameter in bulk job
|
||||
... job uses that inventory
|
||||
'''
|
||||
normal_user = user('normal_user', False)
|
||||
org1 = Organization.objects.create(name='foo1')
|
||||
jt = JobTemplate.objects.create(name='my-jt', ask_inventory_on_launch=True, project=project, playbook='helloworld.yml')
|
||||
jt.save()
|
||||
org1.member_role.members.add(normal_user)
|
||||
inv = Inventory.objects.create(name='inv1', organization=org1)
|
||||
jt.execute_role.members.add(normal_user)
|
||||
inv.use_role.members.add(normal_user)
|
||||
bulk_job_launch_response = post(
|
||||
reverse('api:bulk_job_launch'), {'name': 'Bulk Job Launch', 'jobs': [{'unified_job_template': jt.id, 'inventory': inv.id}]}, normal_user, expect=201
|
||||
).data
|
||||
bulk_job_id = bulk_job_launch_response['id']
|
||||
node = WorkflowJob.objects.get(id=bulk_job_id).workflow_job_nodes.all().order_by('created')
|
||||
assert inv.id == node[0].inventory.id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bulk_job_set_all_prompt(job_template, organization, inventory, project, credentialtype_ssh, post, get, user):
|
||||
'''
|
||||
Job template has many fields set as prompt_on_launch
|
||||
and if I provide all those fields as a parameter in bulk job
|
||||
... job uses them
|
||||
'''
|
||||
normal_user = user('normal_user', False)
|
||||
jt = JobTemplate.objects.create(
|
||||
name='my-jt',
|
||||
ask_inventory_on_launch=True,
|
||||
ask_diff_mode_on_launch=True,
|
||||
ask_job_type_on_launch=True,
|
||||
ask_verbosity_on_launch=True,
|
||||
ask_execution_environment_on_launch=True,
|
||||
ask_forks_on_launch=True,
|
||||
ask_job_slice_count_on_launch=True,
|
||||
ask_timeout_on_launch=True,
|
||||
ask_variables_on_launch=True,
|
||||
ask_scm_branch_on_launch=True,
|
||||
ask_limit_on_launch=True,
|
||||
ask_skip_tags_on_launch=True,
|
||||
ask_tags_on_launch=True,
|
||||
project=project,
|
||||
playbook='helloworld.yml',
|
||||
)
|
||||
jt.save()
|
||||
organization.member_role.members.add(normal_user)
|
||||
inv = Inventory.objects.create(name='inv1', organization=organization)
|
||||
ee = ExecutionEnvironment.objects.create(name='test-ee', image='quay.io/foo/bar')
|
||||
jt.execute_role.members.add(normal_user)
|
||||
inv.use_role.members.add(normal_user)
|
||||
bulk_job_launch_response = post(
|
||||
reverse('api:bulk_job_launch'),
|
||||
{
|
||||
'name': 'Bulk Job Launch',
|
||||
'jobs': [
|
||||
{
|
||||
'unified_job_template': jt.id,
|
||||
'inventory': inv.id,
|
||||
'diff_mode': True,
|
||||
'job_type': 'check',
|
||||
'verbosity': 3,
|
||||
'execution_environment': ee.id,
|
||||
'forks': 1,
|
||||
'job_slice_count': 1,
|
||||
'timeout': 200,
|
||||
'extra_data': {'prompted_key': 'prompted_val'},
|
||||
'scm_branch': 'non_dev',
|
||||
'limit': 'kansas',
|
||||
'skip_tags': 'foobar',
|
||||
'job_tags': 'untagged',
|
||||
}
|
||||
],
|
||||
},
|
||||
normal_user,
|
||||
expect=201,
|
||||
).data
|
||||
bulk_job_id = bulk_job_launch_response['id']
|
||||
node = WorkflowJob.objects.get(id=bulk_job_id).workflow_job_nodes.all().order_by('created')
|
||||
assert node[0].inventory.id == inv.id
|
||||
assert node[0].diff_mode == True
|
||||
assert node[0].job_type == 'check'
|
||||
assert node[0].verbosity == 3
|
||||
assert node[0].execution_environment.id == ee.id
|
||||
assert node[0].forks == 1
|
||||
assert node[0].job_slice_count == 1
|
||||
assert node[0].timeout == 200
|
||||
assert node[0].extra_data == {'prompted_key': 'prompted_val'}
|
||||
assert node[0].scm_branch == 'non_dev'
|
||||
assert node[0].limit == 'kansas'
|
||||
assert node[0].skip_tags == 'foobar'
|
||||
assert node[0].job_tags == 'untagged'
|
||||
@@ -235,6 +235,7 @@ class TestAutoScaling:
|
||||
assert len(self.pool) == 10
|
||||
assert self.pool.workers[0].messages_sent == 2
|
||||
|
||||
@pytest.mark.timeout(20)
|
||||
def test_lost_worker_autoscale(self):
|
||||
# if a worker exits, it should be replaced automatically up to min_workers
|
||||
self.pool.init_workers(ResultWriter().work_loop, multiprocessing.Queue())
|
||||
@@ -243,8 +244,8 @@ class TestAutoScaling:
|
||||
assert len(self.pool) == 2
|
||||
assert not self.pool.should_grow
|
||||
alive_pid = self.pool.workers[1].pid
|
||||
self.pool.workers[0].process.terminate()
|
||||
time.sleep(2) # wait a moment for sigterm
|
||||
self.pool.workers[0].process.kill()
|
||||
self.pool.workers[0].process.join() # waits for process to full terminate
|
||||
|
||||
# clean up and the dead worker
|
||||
self.pool.cleanup()
|
||||
@@ -336,6 +337,8 @@ class TestTaskPublisher:
|
||||
|
||||
|
||||
yesterday = tz_now() - datetime.timedelta(days=1)
|
||||
minute = tz_now() - datetime.timedelta(seconds=120)
|
||||
now = tz_now()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -378,13 +381,15 @@ class TestJobReaper(object):
|
||||
assert job.status == status
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'excluded_uuids, fail',
|
||||
'excluded_uuids, fail, started',
|
||||
[
|
||||
(['abc123'], False),
|
||||
([], True),
|
||||
(['abc123'], False, None),
|
||||
([], False, None),
|
||||
([], True, minute),
|
||||
],
|
||||
)
|
||||
def test_do_not_reap_excluded_uuids(self, excluded_uuids, fail):
|
||||
def test_do_not_reap_excluded_uuids(self, excluded_uuids, fail, started):
|
||||
"""Modified Test to account for ref_time in reap()"""
|
||||
i = Instance(hostname='awx')
|
||||
i.save()
|
||||
j = Job(
|
||||
@@ -395,10 +400,13 @@ class TestJobReaper(object):
|
||||
celery_task_id='abc123',
|
||||
)
|
||||
j.save()
|
||||
if started:
|
||||
Job.objects.filter(id=j.id).update(started=started)
|
||||
|
||||
# if the UUID is excluded, don't reap it
|
||||
reaper.reap(i, excluded_uuids=excluded_uuids)
|
||||
reaper.reap(i, excluded_uuids=excluded_uuids, ref_time=now)
|
||||
job = Job.objects.first()
|
||||
|
||||
if fail:
|
||||
assert job.status == 'failed'
|
||||
assert 'marked as failed' in job.job_explanation
|
||||
@@ -414,3 +422,20 @@ class TestJobReaper(object):
|
||||
reaper.reap(i)
|
||||
|
||||
assert WorkflowJob.objects.first().status == 'running'
|
||||
|
||||
def test_should_not_reap_new(self):
|
||||
"""
|
||||
This test is designed specifically to ensure that jobs that are launched after the dispatcher has provided a list of UUIDs aren't reaped.
|
||||
It is very racy and this test is designed with that in mind
|
||||
"""
|
||||
i = Instance(hostname='awx')
|
||||
# ref_time is set to 10 seconds in the past to mimic someone launching a job in the heartbeat window.
|
||||
ref_time = tz_now() - datetime.timedelta(seconds=10)
|
||||
# creating job at current time
|
||||
job = Job.objects.create(status='running', controller_node=i.hostname)
|
||||
reaper.reap(i, ref_time=ref_time)
|
||||
# explictly refreshing from db to ensure up to date cache
|
||||
job.refresh_from_db()
|
||||
assert job.started > ref_time
|
||||
assert job.status == 'running'
|
||||
assert job.job_explanation == ''
|
||||
|
||||
@@ -99,12 +99,12 @@ def test_instance_dup(org_admin, organization, project, instance_factory, instan
|
||||
list_response = get(reverse('api:instance_list'), user=system_auditor)
|
||||
api_num_instances_auditor = list(list_response.data.items())[0][1]
|
||||
|
||||
ig_all.read_role.members.add(org_admin)
|
||||
list_response2 = get(reverse('api:instance_list'), user=org_admin)
|
||||
api_num_instances_oa = list(list_response2.data.items())[0][1]
|
||||
|
||||
assert api_num_instances_auditor == actual_num_instances
|
||||
# Note: The org_admin will not see the default 'tower' node
|
||||
# (instance fixture) because it is not in its group, as expected
|
||||
# Note: The org_admin will not see instances unless at least read_role to the IG has been assigned
|
||||
assert api_num_instances_oa == (actual_num_instances - 1)
|
||||
|
||||
|
||||
|
||||
16
awx/main/tests/functional/test_org_admin_migration.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import pytest
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
from awx.main.models import InstanceGroup
|
||||
from awx.main.migrations import _OrgAdmin_to_use_ig as orgadmin
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_migrate_admin_role(org_admin, organization):
|
||||
instance_group = InstanceGroup.objects.create(name='test')
|
||||
organization.admin_role.members.add(org_admin)
|
||||
organization.instance_groups.add(instance_group)
|
||||
orgadmin.migrate_org_admin_to_use(apps, None)
|
||||
assert org_admin in instance_group.use_role.members.all()
|
||||
assert instance_group.use_role.members.count() == 1
|
||||
@@ -6,7 +6,47 @@ from awx.main.access import (
|
||||
InventoryAccess,
|
||||
JobTemplateAccess,
|
||||
)
|
||||
from awx.main.models import Organization
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"obj_perm,allowed,readonly,partial", [("admin_role", True, True, True), ("use_role", False, True, True), ("read_role", False, True, False)]
|
||||
)
|
||||
def test_ig_role_base_visibility(default_instance_group, rando, obj_perm, allowed, partial, readonly):
|
||||
if obj_perm:
|
||||
getattr(default_instance_group, obj_perm).members.add(rando)
|
||||
|
||||
assert readonly == InstanceGroupAccess(rando).can_read(default_instance_group)
|
||||
assert partial == InstanceGroupAccess(rando).can_use(default_instance_group)
|
||||
assert not InstanceGroupAccess(rando).can_add(default_instance_group)
|
||||
assert allowed == InstanceGroupAccess(rando).can_admin(default_instance_group)
|
||||
assert allowed == InstanceGroupAccess(rando).can_change(default_instance_group, {'name': 'New Name'})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"obj_perm,subobj_perm,allowed", [('admin_role', 'use_role', True), ('admin_role', 'read_role', False), ('admin_role', 'admin_role', True)]
|
||||
)
|
||||
def test_ig_role_based_associability(default_instance_group, rando, organization, job_template_factory, obj_perm, subobj_perm, allowed):
|
||||
objects = job_template_factory('jt', organization=organization, project='p', inventory='i', credential='c')
|
||||
if obj_perm:
|
||||
getattr(objects.job_template, obj_perm).members.add(rando)
|
||||
getattr(objects.inventory, obj_perm).members.add(rando)
|
||||
getattr(objects.organization, obj_perm).members.add(rando)
|
||||
if subobj_perm:
|
||||
getattr(default_instance_group, subobj_perm).members.add(rando)
|
||||
|
||||
assert allowed == JobTemplateAccess(rando).can_attach(objects.job_template, default_instance_group, 'instance_groups', None)
|
||||
assert allowed == InventoryAccess(rando).can_attach(objects.inventory, default_instance_group, 'instance_groups', None)
|
||||
assert allowed == OrganizationAccess(rando).can_attach(objects.organization, default_instance_group, 'instance_groups', None)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_ig_use_with_org_admin(default_instance_group, rando, org_admin):
|
||||
default_instance_group.use_role.members.add(rando)
|
||||
|
||||
assert list(InstanceGroupAccess(org_admin).get_queryset()) != [default_instance_group]
|
||||
assert list(InstanceGroupAccess(rando).get_queryset()) == [default_instance_group]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -24,7 +64,7 @@ def test_ig_admin_user_visibility(organization, default_instance_group, admin, s
|
||||
assert len(InstanceGroupAccess(system_auditor).get_queryset()) == 1
|
||||
assert len(InstanceGroupAccess(org_admin).get_queryset()) == 0
|
||||
organization.instance_groups.add(default_instance_group)
|
||||
assert len(InstanceGroupAccess(org_admin).get_queryset()) == 1
|
||||
assert len(InstanceGroupAccess(org_admin).get_queryset()) == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -37,16 +77,6 @@ def test_ig_normal_user_associability(organization, default_instance_group, user
|
||||
assert not access.can_attach(organization, default_instance_group, 'instance_groups', None)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_access_via_two_organizations(rando, default_instance_group):
|
||||
for org_name in ['org1', 'org2']:
|
||||
org = Organization.objects.create(name=org_name)
|
||||
org.instance_groups.add(default_instance_group)
|
||||
org.admin_role.members.add(rando)
|
||||
access = InstanceGroupAccess(rando)
|
||||
assert list(access.get_queryset()) == [default_instance_group]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_ig_associability(organization, default_instance_group, admin, system_auditor, org_admin, org_member, job_template_factory):
|
||||
admin_access = OrganizationAccess(admin)
|
||||
@@ -72,7 +102,7 @@ def test_ig_associability(organization, default_instance_group, admin, system_au
|
||||
omember_access = InventoryAccess(org_member)
|
||||
|
||||
assert admin_access.can_attach(objects.inventory, default_instance_group, 'instance_groups', None)
|
||||
assert oadmin_access.can_attach(objects.inventory, default_instance_group, 'instance_groups', None)
|
||||
assert not oadmin_access.can_attach(objects.inventory, default_instance_group, 'instance_groups', None)
|
||||
assert not auditor_access.can_attach(objects.inventory, default_instance_group, 'instance_groups', None)
|
||||
assert not omember_access.can_attach(objects.inventory, default_instance_group, 'instance_groups', None)
|
||||
|
||||
@@ -82,6 +112,6 @@ def test_ig_associability(organization, default_instance_group, admin, system_au
|
||||
omember_access = JobTemplateAccess(org_member)
|
||||
|
||||
assert admin_access.can_attach(objects.job_template, default_instance_group, 'instance_groups', None)
|
||||
assert oadmin_access.can_attach(objects.job_template, default_instance_group, 'instance_groups', None)
|
||||
assert not oadmin_access.can_attach(objects.job_template, default_instance_group, 'instance_groups', None)
|
||||
assert not auditor_access.can_attach(objects.job_template, default_instance_group, 'instance_groups', None)
|
||||
assert not omember_access.can_attach(objects.job_template, default_instance_group, 'instance_groups', None)
|
||||
|
||||
@@ -148,7 +148,7 @@ class TestWorkflowJobTemplateNodeAccess:
|
||||
elif permission_type == 'instance_groups':
|
||||
sub_obj = InstanceGroup.objects.create()
|
||||
org = Organization.objects.create()
|
||||
org.admin_role.members.add(rando) # only admins can see IGs
|
||||
sub_obj.use_role.members.add(rando) # only admins can see IGs
|
||||
org.instance_groups.add(sub_obj)
|
||||
|
||||
access = WorkflowJobTemplateNodeAccess(rando)
|
||||
|
||||
@@ -18,7 +18,7 @@ class DistinctParametrize(object):
|
||||
|
||||
|
||||
@pytest.mark.survey
|
||||
class SurveyVariableValidation:
|
||||
class TestSurveyVariableValidation:
|
||||
def test_survey_answers_as_string(self, job_template_factory):
|
||||
objects = job_template_factory('job-template-with-survey', survey=[{'variable': 'var1', 'type': 'text'}], persisted=False)
|
||||
jt = objects.job_template
|
||||
@@ -57,7 +57,7 @@ class SurveyVariableValidation:
|
||||
accepted, rejected, errors = obj.accept_or_ignore_variables({"a": 5})
|
||||
assert rejected == {"a": 5}
|
||||
assert accepted == {}
|
||||
assert str(errors[0]) == "Value 5 for 'a' expected to be a string."
|
||||
assert str(errors['variables_needed_to_start'][0]) == "Value 5 for 'a' expected to be a string."
|
||||
|
||||
def test_job_template_survey_default_variable_validation(self, job_template_factory):
|
||||
objects = job_template_factory(
|
||||
@@ -88,7 +88,7 @@ class SurveyVariableValidation:
|
||||
|
||||
obj.survey_enabled = True
|
||||
accepted, _, errors = obj.accept_or_ignore_variables({"a": 2})
|
||||
assert accepted == {{"a": 2.0}}
|
||||
assert accepted == {"a": 2.0}
|
||||
assert not errors
|
||||
|
||||
|
||||
|
||||
@@ -107,7 +107,11 @@ class TestMetaVars:
|
||||
result_hash['{}_user_id'.format(name)] = 47
|
||||
result_hash['{}_inventory_id'.format(name)] = 45
|
||||
result_hash['{}_inventory_name'.format(name)] = 'example-inv'
|
||||
assert Job(name='fake-job', pk=42, id=42, launch_type='manual', created_by=maker, inventory=inv).awx_meta_vars() == result_hash
|
||||
result_hash['{}_execution_node'.format(name)] = 'example-exec-node'
|
||||
assert (
|
||||
Job(name='fake-job', pk=42, id=42, launch_type='manual', created_by=maker, inventory=inv, execution_node='example-exec-node').awx_meta_vars()
|
||||
== result_hash
|
||||
)
|
||||
|
||||
def test_project_update_metavars(self):
|
||||
data = Job(
|
||||
|
||||
@@ -2008,7 +2008,7 @@ def test_project_update_no_ee(mock_me):
|
||||
with pytest.raises(RuntimeError) as e:
|
||||
task.build_env(job, {})
|
||||
|
||||
assert 'The project could not sync because there is no Execution Environment' in str(e.value)
|
||||
assert 'The ProjectUpdate could not run because there is no Execution Environment' in str(e.value)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -80,7 +80,6 @@ __all__ = [
|
||||
'set_environ',
|
||||
'IllegalArgumentError',
|
||||
'get_custom_venv_choices',
|
||||
'get_external_account',
|
||||
'ScheduleTaskManager',
|
||||
'ScheduleDependencyManager',
|
||||
'ScheduleWorkflowManager',
|
||||
@@ -1089,29 +1088,6 @@ def has_model_field_prefetched(model_obj, field_name):
|
||||
return getattr(getattr(model_obj, field_name, None), 'prefetch_cache_name', '') in getattr(model_obj, '_prefetched_objects_cache', {})
|
||||
|
||||
|
||||
def get_external_account(user):
|
||||
from django.conf import settings
|
||||
|
||||
account_type = None
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
|
||||
try:
|
||||
if user.pk and user.profile.ldap_dn and not user.has_usable_password():
|
||||
account_type = "ldap"
|
||||
except AttributeError:
|
||||
pass
|
||||
if (
|
||||
getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)
|
||||
) and user.social_auth.all():
|
||||
account_type = "social"
|
||||
if (getattr(settings, 'RADIUS_SERVER', None) or getattr(settings, 'TACACSPLUS_HOST', None)) and user.enterprise_auth.all():
|
||||
account_type = "enterprise"
|
||||
return account_type
|
||||
|
||||
|
||||
class classproperty:
|
||||
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
|
||||
self.fget = fget
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
@@ -6,8 +7,15 @@ from django.conf import settings
|
||||
from awx.main.models.execution_environments import ExecutionEnvironment
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_control_plane_execution_environment():
|
||||
return ExecutionEnvironment.objects.filter(organization=None, managed=True).first()
|
||||
ee = ExecutionEnvironment.objects.filter(organization=None, managed=True).first()
|
||||
if ee == None:
|
||||
logger.error('Failed to find control plane ee, there are no managed EEs without organizations')
|
||||
raise RuntimeError("Failed to find default control plane EE")
|
||||
return ee
|
||||
|
||||
|
||||
def get_default_execution_environment():
|
||||
|
||||
@@ -25,42 +25,47 @@
|
||||
connection: local
|
||||
name: Update source tree if necessary
|
||||
tasks:
|
||||
|
||||
- name: delete project directory before update
|
||||
command: "find -delete" # volume mounted, cannot delete folder itself
|
||||
- name: Delete project directory before update
|
||||
ansible.builtin.shell: set -o pipefail && find . -delete -print | tail -2 # volume mounted, cannot delete folder itself
|
||||
register: reg
|
||||
changed_when: reg.stdout_lines | length > 1
|
||||
args:
|
||||
chdir: "{{ project_path }}"
|
||||
tags:
|
||||
- delete
|
||||
|
||||
- block:
|
||||
- name: update project using git
|
||||
git:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url}}"
|
||||
version: "{{scm_branch|quote}}"
|
||||
refspec: "{{scm_refspec|default(omit)}}"
|
||||
force: "{{scm_clean}}"
|
||||
track_submodules: "{{scm_track_submodules|default(omit)}}"
|
||||
accept_hostkey: "{{scm_accept_hostkey|default(omit)}}"
|
||||
- name: Update project using git
|
||||
tags:
|
||||
- update_git
|
||||
block:
|
||||
- name: Update project using git
|
||||
ansible.builtin.git:
|
||||
dest: "{{ project_path | quote }}"
|
||||
repo: "{{ scm_url }}"
|
||||
version: "{{ scm_branch | quote }}"
|
||||
refspec: "{{ scm_refspec | default(omit) }}"
|
||||
force: "{{ scm_clean }}"
|
||||
track_submodules: "{{ scm_track_submodules | default(omit) }}"
|
||||
accept_hostkey: "{{ scm_accept_hostkey | default(omit) }}"
|
||||
register: git_result
|
||||
|
||||
- name: Set the git repository version
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ git_result['after'] }}"
|
||||
when: "'after' in git_result"
|
||||
tags:
|
||||
- update_git
|
||||
|
||||
- block:
|
||||
- name: update project using svn
|
||||
subversion:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url|quote}}"
|
||||
revision: "{{scm_branch|quote}}"
|
||||
force: "{{scm_clean}}"
|
||||
username: "{{scm_username|default(omit)}}"
|
||||
password: "{{scm_password|default(omit)}}"
|
||||
- name: Update project using svn
|
||||
tags:
|
||||
- update_svn
|
||||
block:
|
||||
- name: Update project using svn
|
||||
ansible.builtin.subversion:
|
||||
dest: "{{ project_path | quote }}"
|
||||
repo: "{{ scm_url | quote }}"
|
||||
revision: "{{ scm_branch | quote }}"
|
||||
force: "{{ scm_clean }}"
|
||||
username: "{{ scm_username | default(omit) }}"
|
||||
password: "{{ scm_password | default(omit) }}"
|
||||
# must be in_place because folder pre-existing, because it is mounted
|
||||
in_place: true
|
||||
environment:
|
||||
@@ -68,85 +73,90 @@
|
||||
register: svn_result
|
||||
|
||||
- name: Set the svn repository version
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ svn_result['after'] }}"
|
||||
when: "'after' in svn_result"
|
||||
|
||||
- name: parse subversion version string properly
|
||||
set_fact:
|
||||
scm_version: "{{scm_version|regex_replace('^.*Revision: ([0-9]+).*$', '\\1')}}"
|
||||
tags:
|
||||
- update_svn
|
||||
- name: Parse subversion version string properly
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ scm_version | regex_replace('^.*Revision: ([0-9]+).*$', '\\1') }}"
|
||||
|
||||
- block:
|
||||
|
||||
- name: Project update for Insights
|
||||
tags:
|
||||
- update_insights
|
||||
block:
|
||||
- name: Ensure the project directory is present
|
||||
file:
|
||||
dest: "{{project_path|quote}}"
|
||||
ansible.builtin.file:
|
||||
dest: "{{ project_path | quote }}"
|
||||
state: directory
|
||||
mode: '0755'
|
||||
|
||||
- name: Fetch Insights Playbook(s)
|
||||
insights:
|
||||
insights_url: "{{insights_url}}"
|
||||
username: "{{scm_username}}"
|
||||
password: "{{scm_password}}"
|
||||
project_path: "{{project_path}}"
|
||||
awx_license_type: "{{awx_license_type}}"
|
||||
awx_version: "{{awx_version}}"
|
||||
insights_url: "{{ insights_url }}"
|
||||
username: "{{ scm_username }}"
|
||||
password: "{{ scm_password }}"
|
||||
project_path: "{{ project_path }}"
|
||||
awx_license_type: "{{ awx_license_type }}"
|
||||
awx_version: "{{ awx_version }}"
|
||||
register: results
|
||||
|
||||
- name: Save Insights Version
|
||||
set_fact:
|
||||
scm_version: "{{results.version}}"
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ results.version }}"
|
||||
when: results is defined
|
||||
tags:
|
||||
- update_insights
|
||||
|
||||
- block:
|
||||
|
||||
- name: Update project using archive
|
||||
tags:
|
||||
- update_archive
|
||||
block:
|
||||
- name: Ensure the project archive directory is present
|
||||
file:
|
||||
dest: "{{ project_path|quote }}/.archive"
|
||||
ansible.builtin.file:
|
||||
dest: "{{ project_path | quote }}/.archive"
|
||||
state: directory
|
||||
mode: '0755'
|
||||
|
||||
- name: Get archive from url
|
||||
get_url:
|
||||
url: "{{ scm_url|quote }}"
|
||||
dest: "{{ project_path|quote }}/.archive/"
|
||||
url_username: "{{ scm_username|default(omit) }}"
|
||||
url_password: "{{ scm_password|default(omit) }}"
|
||||
ansible.builtin.get_url:
|
||||
url: "{{ scm_url | quote }}"
|
||||
dest: "{{ project_path | quote }}/.archive/"
|
||||
url_username: "{{ scm_username | default(omit) }}"
|
||||
url_password: "{{ scm_password | default(omit) }}"
|
||||
force_basic_auth: true
|
||||
mode: '0755'
|
||||
register: get_archive
|
||||
|
||||
- name: Unpack archive
|
||||
project_archive:
|
||||
src: "{{ get_archive.dest }}"
|
||||
project_path: "{{ project_path|quote }}"
|
||||
project_path: "{{ project_path | quote }}"
|
||||
force: "{{ scm_clean }}"
|
||||
when: get_archive.changed or scm_clean
|
||||
register: unarchived
|
||||
|
||||
- name: Find previous archives
|
||||
find:
|
||||
paths: "{{ project_path|quote }}/.archive/"
|
||||
ansible.builtin.find:
|
||||
paths: "{{ project_path | quote }}/.archive/"
|
||||
excludes:
|
||||
- "{{ get_archive.dest|basename }}"
|
||||
- "{{ get_archive.dest | basename }}"
|
||||
when: unarchived.changed
|
||||
register: previous_archive
|
||||
|
||||
- name: Remove previous archives
|
||||
file:
|
||||
ansible.builtin.file:
|
||||
path: "{{ item.path }}"
|
||||
state: absent
|
||||
loop: "{{ previous_archive.files }}"
|
||||
when: previous_archive.files|default([])
|
||||
when: previous_archive.files | default([])
|
||||
|
||||
- name: Set scm_version to archive sha1 checksum
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ get_archive.checksum_src }}"
|
||||
tags:
|
||||
- update_archive
|
||||
|
||||
- name: Repository Version
|
||||
debug:
|
||||
ansible.builtin.debug:
|
||||
msg: "Repository Version {{ scm_version }}"
|
||||
tags:
|
||||
- update_git
|
||||
@@ -183,60 +193,59 @@
|
||||
additional_collections_env:
|
||||
# These environment variables are used for installing collections, in addition to galaxy_task_env
|
||||
# setting the collections paths silences warnings
|
||||
ANSIBLE_COLLECTIONS_PATHS: "{{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_collections"
|
||||
ANSIBLE_COLLECTIONS_PATHS: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections"
|
||||
# Put the local tmp directory in same volume as collection destination
|
||||
# otherwise, files cannot be moved accross volumes and will cause error
|
||||
ANSIBLE_LOCAL_TEMP: "{{projects_root}}/.__awx_cache/{{local_path}}/stage/tmp"
|
||||
ANSIBLE_LOCAL_TEMP: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/tmp"
|
||||
tasks:
|
||||
|
||||
- name: Check content sync settings
|
||||
block:
|
||||
- debug:
|
||||
msg: >
|
||||
Collection and role syncing disabled. Check the AWX_ROLES_ENABLED and
|
||||
AWX_COLLECTIONS_ENABLED settings and Galaxy credentials on the project's organization.
|
||||
|
||||
- meta: end_play
|
||||
|
||||
when: not roles_enabled|bool and not collections_enabled|bool
|
||||
when: not roles_enabled | bool and not collections_enabled | bool
|
||||
tags:
|
||||
- install_roles
|
||||
- install_collections
|
||||
block:
|
||||
- name: Warn about disabled content sync
|
||||
ansible.builtin.debug:
|
||||
msg: >
|
||||
Collection and role syncing disabled. Check the AWX_ROLES_ENABLED and
|
||||
AWX_COLLECTIONS_ENABLED settings and Galaxy credentials on the project's organization.
|
||||
- name: End play due to disabled content sync
|
||||
ansible.builtin.meta: end_play
|
||||
|
||||
- name: fetch galaxy roles from requirements.(yml/yaml)
|
||||
command: >
|
||||
- name: Fetch galaxy roles from requirements.(yml/yaml)
|
||||
ansible.builtin.command: >
|
||||
ansible-galaxy role install -r {{ item }}
|
||||
--roles-path {{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_roles
|
||||
--roles-path {{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_roles
|
||||
{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}"
|
||||
chdir: "{{ project_path | quote }}"
|
||||
register: galaxy_result
|
||||
with_fileglob:
|
||||
- "{{project_path|quote}}/roles/requirements.yaml"
|
||||
- "{{project_path|quote}}/roles/requirements.yml"
|
||||
- "{{ project_path | quote }}/roles/requirements.yaml"
|
||||
- "{{ project_path | quote }}/roles/requirements.yml"
|
||||
changed_when: "'was installed successfully' in galaxy_result.stdout"
|
||||
environment: "{{ galaxy_task_env }}"
|
||||
when: roles_enabled|bool
|
||||
when: roles_enabled | bool
|
||||
tags:
|
||||
- install_roles
|
||||
|
||||
- name: fetch galaxy collections from collections/requirements.(yml/yaml)
|
||||
command: >
|
||||
- name: Fetch galaxy collections from collections/requirements.(yml/yaml)
|
||||
ansible.builtin.command: >
|
||||
ansible-galaxy collection install -r {{ item }}
|
||||
--collections-path {{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_collections
|
||||
--collections-path {{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections
|
||||
{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}"
|
||||
chdir: "{{ project_path | quote }}"
|
||||
register: galaxy_collection_result
|
||||
with_fileglob:
|
||||
- "{{project_path|quote}}/collections/requirements.yaml"
|
||||
- "{{project_path|quote}}/collections/requirements.yml"
|
||||
- "{{project_path|quote}}/requirements.yaml"
|
||||
- "{{project_path|quote}}/requirements.yml"
|
||||
- "{{ project_path | quote }}/collections/requirements.yaml"
|
||||
- "{{ project_path | quote }}/collections/requirements.yml"
|
||||
- "{{ project_path | quote }}/requirements.yaml"
|
||||
- "{{ project_path | quote }}/requirements.yml"
|
||||
changed_when: "'Installing ' in galaxy_collection_result.stdout"
|
||||
environment: "{{ additional_collections_env | combine(galaxy_task_env) }}"
|
||||
when:
|
||||
- "ansible_version.full is version_compare('2.9', '>=')"
|
||||
- collections_enabled|bool
|
||||
- collections_enabled | bool
|
||||
tags:
|
||||
- install_collections
|
||||
|
||||
@@ -11,11 +11,13 @@ from datetime import timedelta
|
||||
|
||||
|
||||
if "pytest" in sys.modules:
|
||||
IS_TESTING_MODE = True
|
||||
from unittest import mock
|
||||
|
||||
with mock.patch('__main__.__builtins__.dir', return_value=[]):
|
||||
import ldap
|
||||
else:
|
||||
IS_TESTING_MODE = False
|
||||
import ldap
|
||||
|
||||
|
||||
@@ -83,7 +85,11 @@ USE_L10N = True
|
||||
|
||||
USE_TZ = True
|
||||
|
||||
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'ui', 'build', 'static'), os.path.join(BASE_DIR, 'static')]
|
||||
STATICFILES_DIRS = [
|
||||
os.path.join(BASE_DIR, 'ui', 'build', 'static'),
|
||||
os.path.join(BASE_DIR, 'ui_next', 'build'),
|
||||
os.path.join(BASE_DIR, 'static'),
|
||||
]
|
||||
|
||||
# Absolute filesystem path to the directory where static file are collected via
|
||||
# the collectstatic command.
|
||||
@@ -129,6 +135,13 @@ NAMED_URL_GRAPH = {}
|
||||
# Note: This setting may be overridden by database settings.
|
||||
SCHEDULE_MAX_JOBS = 10
|
||||
|
||||
# Bulk API related settings
|
||||
# Maximum number of jobs that can be launched in 1 bulk job
|
||||
BULK_JOB_MAX_LAUNCH = 100
|
||||
|
||||
# Maximum number of host that can be created in 1 bulk host create
|
||||
BULK_HOST_MAX_CREATE = 100
|
||||
|
||||
SITE_ID = 1
|
||||
|
||||
# Make this unique, and don't share it with anybody.
|
||||
@@ -290,7 +303,12 @@ TEMPLATES = [
|
||||
],
|
||||
'builtins': ['awx.main.templatetags.swagger'],
|
||||
},
|
||||
'DIRS': [os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR, 'ui', 'build'), os.path.join(BASE_DIR, 'ui', 'public')],
|
||||
'DIRS': [
|
||||
os.path.join(BASE_DIR, 'templates'),
|
||||
os.path.join(BASE_DIR, 'ui', 'build'),
|
||||
os.path.join(BASE_DIR, 'ui', 'public'),
|
||||
os.path.join(BASE_DIR, 'ui_next', 'build', 'awx'),
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
@@ -929,6 +947,11 @@ AWX_RUNNER_OMIT_ENV_FILES = True
|
||||
# Allow ansible-runner to save ansible output (may cause performance issues)
|
||||
AWX_RUNNER_SUPPRESS_OUTPUT_FILE = True
|
||||
|
||||
# https://github.com/ansible/ansible-runner/pull/1191/files
|
||||
# Interval in seconds between the last message and keep-alive messages that
|
||||
# ansible-runner will send
|
||||
AWX_RUNNER_KEEPALIVE_SECONDS = 0
|
||||
|
||||
# Delete completed work units in receptor
|
||||
RECEPTOR_RELEASE_WORK = True
|
||||
|
||||
@@ -1003,3 +1026,5 @@ AWX_MOUNT_ISOLATED_PATHS_ON_K8S = False
|
||||
|
||||
# This is overridden downstream via /etc/tower/conf.d/cluster_host_id.py
|
||||
CLUSTER_HOST_ID = socket.gethostname()
|
||||
|
||||
UI_NEXT = True
|
||||
|
||||
@@ -385,10 +385,10 @@ def on_populate_user(sender, **kwargs):
|
||||
logger.warning('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len))
|
||||
|
||||
org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {})
|
||||
team_map = getattr(backend.settings, 'TEAM_MAP', {})
|
||||
team_map_settings = getattr(backend.settings, 'TEAM_MAP', {})
|
||||
orgs_list = list(org_map.keys())
|
||||
team_map = {}
|
||||
for team_name, team_opts in team_map.items():
|
||||
for team_name, team_opts in team_map_settings.items():
|
||||
if not team_opts.get('organization', None):
|
||||
# You can't save the LDAP config in the UI w/o an org (or '' or null as the org) so if we somehow got this condition its an error
|
||||
logger.error("Team named {} in LDAP team map settings is invalid due to missing organization".format(team_name))
|
||||
@@ -416,7 +416,7 @@ def on_populate_user(sender, **kwargs):
|
||||
|
||||
# Compute in memory what the state is of the different LDAP teams
|
||||
desired_team_states = {}
|
||||
for team_name, team_opts in team_map.items():
|
||||
for team_name, team_opts in team_map_settings.items():
|
||||
if 'organization' not in team_opts:
|
||||
continue
|
||||
users_opts = team_opts.get('users', None)
|
||||
|
||||
@@ -169,3 +169,45 @@ def get_or_create_org_with_default_galaxy_cred(**kwargs):
|
||||
else:
|
||||
logger.debug("Could not find default Ansible Galaxy credential to add to org")
|
||||
return org
|
||||
|
||||
|
||||
def get_external_account(user):
|
||||
account_type = None
|
||||
|
||||
# Previously this method also checked for active configuration which meant that if a user logged in from LDAP
|
||||
# and then LDAP was no longer configured it would "convert" the user from an LDAP account_type to none.
|
||||
# This did have one benefit that if a login type was removed intentionally the user could be given a username password.
|
||||
# But it had a limitation that the user would have to have an active session (or an admin would have to go set a temp password).
|
||||
# It also lead to the side affect that if LDAP was ever reconfigured the user would convert back to LDAP but still have a local password.
|
||||
# That local password could then be used to bypass LDAP authentication.
|
||||
try:
|
||||
if user.pk and user.profile.ldap_dn and not user.has_usable_password():
|
||||
account_type = "ldap"
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if user.social_auth.all():
|
||||
account_type = "social"
|
||||
|
||||
if user.enterprise_auth.all():
|
||||
account_type = "enterprise"
|
||||
|
||||
return account_type
|
||||
|
||||
|
||||
def is_remote_auth_enabled():
|
||||
from django.conf import settings
|
||||
|
||||
# Append LDAP, Radius, TACACS+ and SAML options
|
||||
settings_that_turn_on_remote_auth = [
|
||||
'AUTH_LDAP_SERVER_URI',
|
||||
'SOCIAL_AUTH_SAML_ENABLED_IDPS',
|
||||
'RADIUS_SERVER',
|
||||
'TACACSPLUS_HOST',
|
||||
]
|
||||
# Also include any SOCAIL_AUTH_*KEY (except SAML)
|
||||
for social_auth_key in dir(settings):
|
||||
if social_auth_key.startswith('SOCIAL_AUTH_') and social_auth_key.endswith('_KEY') and 'SAML' not in social_auth_key:
|
||||
settings_that_turn_on_remote_auth.append(social_auth_key)
|
||||
|
||||
return any(getattr(settings, s, None) for s in settings_that_turn_on_remote_auth)
|
||||
|
||||
@@ -148,6 +148,16 @@ register(
|
||||
placeholder=['username', 'email'],
|
||||
)
|
||||
|
||||
register(
|
||||
'SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL',
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Use Email address for usernames'),
|
||||
help_text=_('Enabling this setting will tell social auth to use the full Email as username instead of the full name'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
###############################################################################
|
||||
# LDAP AUTHENTICATION SETTINGS
|
||||
###############################################################################
|
||||
|
||||
@@ -2,9 +2,22 @@ import pytest
|
||||
from collections import Counter
|
||||
from django.core.exceptions import FieldError
|
||||
from django.utils.timezone import now
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from awx.main.models import Credential, CredentialType, Organization, Team, User
|
||||
from awx.sso.common import get_orgs_by_ids, reconcile_users_org_team_mappings, create_org_and_teams, get_or_create_org_with_default_galaxy_cred
|
||||
from awx.sso.common import (
|
||||
get_orgs_by_ids,
|
||||
reconcile_users_org_team_mappings,
|
||||
create_org_and_teams,
|
||||
get_or_create_org_with_default_galaxy_cred,
|
||||
is_remote_auth_enabled,
|
||||
get_external_account,
|
||||
)
|
||||
|
||||
|
||||
class MicroMockObject(object):
|
||||
def all(self):
|
||||
return True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -278,3 +291,87 @@ class TestCommonFunctions:
|
||||
|
||||
for o in Organization.objects.all():
|
||||
assert o.galaxy_credentials.count() == 0
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"enable_ldap, enable_social, enable_enterprise, expected_results",
|
||||
[
|
||||
(False, False, False, None),
|
||||
(True, False, False, 'ldap'),
|
||||
(True, True, False, 'social'),
|
||||
(True, True, True, 'enterprise'),
|
||||
(False, True, True, 'enterprise'),
|
||||
(False, False, True, 'enterprise'),
|
||||
(False, True, False, 'social'),
|
||||
],
|
||||
)
|
||||
def test_get_external_account(self, enable_ldap, enable_social, enable_enterprise, expected_results):
|
||||
try:
|
||||
user = User.objects.get(username="external_tester")
|
||||
except User.DoesNotExist:
|
||||
user = User(username="external_tester")
|
||||
user.set_unusable_password()
|
||||
user.save()
|
||||
|
||||
if enable_ldap:
|
||||
user.profile.ldap_dn = 'test.dn'
|
||||
if enable_social:
|
||||
from social_django.models import UserSocialAuth
|
||||
|
||||
social_auth, _ = UserSocialAuth.objects.get_or_create(
|
||||
uid='667ec049-cdf3-45d0-a4dc-0465f7505954',
|
||||
provider='oidc',
|
||||
extra_data={},
|
||||
user_id=user.id,
|
||||
)
|
||||
user.social_auth.set([social_auth])
|
||||
if enable_enterprise:
|
||||
from awx.sso.models import UserEnterpriseAuth
|
||||
|
||||
enterprise_auth = UserEnterpriseAuth(user=user, provider='tacacs+')
|
||||
enterprise_auth.save()
|
||||
|
||||
assert get_external_account(user) == expected_results
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"setting, expected",
|
||||
[
|
||||
# Set none of the social auth settings
|
||||
('JUNK_SETTING', False),
|
||||
# Set the hard coded settings
|
||||
('AUTH_LDAP_SERVER_URI', True),
|
||||
('SOCIAL_AUTH_SAML_ENABLED_IDPS', True),
|
||||
('RADIUS_SERVER', True),
|
||||
('TACACSPLUS_HOST', True),
|
||||
# Set some SOCIAL_SOCIAL_AUTH_OIDC_KEYAUTH_*_KEY settings
|
||||
('SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ORG_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_TEAM_KEY', True),
|
||||
('SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', True),
|
||||
('SOCIAL_AUTH_OIDC_KEY', True),
|
||||
# Try a hypothetical future one
|
||||
('SOCIAL_AUTH_GIBBERISH_KEY', True),
|
||||
# Do a SAML one
|
||||
('SOCIAL_AUTH_SAML_SP_PRIVATE_KEY', False),
|
||||
],
|
||||
)
|
||||
def test_is_remote_auth_enabled(self, setting, expected):
|
||||
with override_settings(**{setting: True}):
|
||||
assert is_remote_auth_enabled() == expected
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"key_one, key_one_value, key_two, key_two_value, expected",
|
||||
[
|
||||
('JUNK_SETTING', True, 'JUNK2_SETTING', True, False),
|
||||
('AUTH_LDAP_SERVER_URI', True, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', True, True),
|
||||
('JUNK_SETTING', True, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', True, True),
|
||||
('AUTH_LDAP_SERVER_URI', False, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', False, False),
|
||||
],
|
||||
)
|
||||
def test_is_remote_auth_enabled_multiple_keys(self, key_one, key_one_value, key_two, key_two_value, expected):
|
||||
with override_settings(**{key_one: key_one_value}):
|
||||
with override_settings(**{key_two: key_two_value}):
|
||||
assert is_remote_auth_enabled() == expected
|
||||
|
||||
BIN
awx/ui/public/static/media/192.png
Normal file
|
After Width: | Height: | Size: 19 KiB |
BIN
awx/ui/public/static/media/256.png
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
awx/ui/public/static/media/384.png
Normal file
|
After Width: | Height: | Size: 56 KiB |
BIN
awx/ui/public/static/media/512.png
Normal file
|
After Width: | Height: | Size: 87 KiB |
BIN
awx/ui/public/static/media/brand-logo.png
Normal file
|
After Width: | Height: | Size: 7.3 KiB |
232
awx/ui/public/static/media/brand-logo.svg
Normal file
@@ -0,0 +1,232 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 21.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 500 500" style="enable-background:new 0 0 500 500;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{display:none;}
|
||||
.st1{display:inline;fill:#ED1C24;}
|
||||
.st2{fill:#42210B;}
|
||||
.st3{fill:#FFFFFF;}
|
||||
.st4{fill:#C69C6D;stroke:#8C6239;stroke-width:5;stroke-miterlimit:10;}
|
||||
.st5{fill:#FFFFFF;stroke:#42210B;stroke-width:3;stroke-miterlimit:10;}
|
||||
.st6{fill:#ED1C24;stroke:#8C6239;stroke-width:5;stroke-miterlimit:10;}
|
||||
.st7{fill:#A67C52;}
|
||||
.st8{fill:#ED1C24;}
|
||||
</style>
|
||||
<g class="st0">
|
||||
<path class="st1" d="M319.8,169.3c1.5-14.2,13.7-27.2,29.9-31.9c-13.1,1.5-27.3-1.7-36-10c-8.7-8.3-10-21.9-1.4-30.1
|
||||
c-12,6.7-28.1,8.1-41.4,3.4c-13.3-4.6-23.5-15.1-26.2-26.9c-2-8.8,0-17.9,2-26.7c-6.2,9.4-17.6,17.3-30.5,17.3
|
||||
c-12.9,0.1-25.7-10.2-22.9-20.7c-5.5,7.8-11.4,15.9-21,20.2c-9.5,4.3-23.7,2.7-28.2-5.5c-1.6,10.8-7.5,22-19.1,27
|
||||
c-9,3.9-21.5,2.2-28-3.8c5.7,11.4,4.3,25.3-4.1,35.6c-9.9,12.2-29.1,18.6-46.4,15.6c14.7,7.2,28.5,17.7,32.1,31.5
|
||||
c3.7,13.8-7.1,30.7-24.1,31.7c13.6,3.1,28,7.4,35.6,17.2c7.6,9.8,2.9,26.4-11.1,28c12.8-2.6,27.4,3.9,31.9,14.2
|
||||
c4.1,9.5-0.9,20.9-10.9,26.5c18.6-8.9,41-17.1,59.6-8.8c13.9,6.2,20.8,21.6,15.1,33.8c10.4-10.6,23-21.3,39.2-23.5
|
||||
c12.8-1.8,27.5,4.6,31.9,14.1c-0.3-12.7,6.1-25.5,17.5-34c13.8-10.3,34.4-14,52-9.2c-11.1-7.8-14.9-22-8.9-33
|
||||
c6-11,21.3-18,35.7-16.2C327.5,198.1,318.3,183.5,319.8,169.3z"/>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st2" d="M179.7,297.3c-10.1,3.2-20.3,6-30.6,8.4c-10.7,2.5-21.7,5-32.8,5.1C96,311.1,79.9,297.2,60,296.1
|
||||
c-5.8-0.3-5.8,8.7,0,9c9.9,0.5,18.9,5.1,27.9,8.8c9.8,4,19.6,6.3,30.2,5.9c21.5-0.8,43.5-7.4,64-13.8
|
||||
C187.6,304.3,185.2,295.6,179.7,297.3L179.7,297.3z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st2" d="M322.2,194.8c17.9-8,36-18.5,44.3-37.2c4.2-9.3,6-19.2,7.2-29.3c1.5-11.7,2.5-23.4,3.7-35.2
|
||||
c0.6-5.8-8.4-5.7-9,0c-1.1,10.3-2.1,20.6-3.3,30.9c-1.1,9.7-2.5,19.7-6.4,28.7c-7.5,17.5-24.6,26.8-41.2,34.2
|
||||
C312.4,189.4,316.9,197.2,322.2,194.8L322.2,194.8z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
|
||||
<ellipse transform="matrix(0.5541 -0.8324 0.8324 0.5541 -219.4917 376.0051)" class="st2" cx="241.2" cy="392.9" rx="65.5" ry="33.7"/>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st3" d="M224.1,442.5c22-11.5,38.7-31,47.1-54.3c2-5.5-6.7-7.8-8.7-2.4c-7.6,21.1-23.1,38.5-43,48.9
|
||||
C214.4,437.4,218.9,445.1,224.1,442.5L224.1,442.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
|
||||
<ellipse transform="matrix(0.9684 -0.2494 0.2494 0.9684 -66.4734 109.0276)" class="st2" cx="397" cy="316.8" rx="63.9" ry="32.9"/>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st3" d="M363.8,341.5c28.3,7,58.7-0.8,80.2-20.5c4.3-3.9-2.1-10.3-6.4-6.4c-19.1,17.5-46.4,24.4-71.5,18.2
|
||||
C360.5,331.5,358.1,340.1,363.8,341.5L363.8,341.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
<path class="st4" d="M156.9,96c-25.4,4.5-32.9,20.2-45,46.9c-20.2,44.4,2,90.3,5.6,97.5c18.4,36.5,42.3,36.8,60,80.6
|
||||
c8.6,21.2,4.6,25.2,13.1,37.5c20.4,29.2,63.7,36.1,91.9,33.8c40.3-3.3,91.5-28.8,108.8-82.5c17.1-53.2-6-112.1-41.2-131.2
|
||||
c-25.3-13.7-44.9-0.5-71.2-20.6c-21.6-16.5-18.4-33.1-37.5-48.8C227.9,98.1,203.7,87.7,156.9,96z"/>
|
||||
<ellipse transform="matrix(0.6622 -0.7494 0.7494 0.6622 65.2068 309.6339)" class="st2" cx="376" cy="82.5" rx="21" ry="15.5"/>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st3" d="M379.8,75.3c0.8,0.2-0.6-0.4-0.1-0.1c0.2,0.1,0.3,0.2,0.5,0.3c0.4,0.2-0.6-0.7-0.1-0.1
|
||||
c0.1,0.1,0.7,0.8,0.2,0.2c-0.4-0.5,0,0,0.1,0.1c0.4,0.7,0,0.2,0-0.2c0,0.1,0.1,0.4,0.2,0.5c0.3,0.9-0.1-1,0-0.1
|
||||
c0.1,2.3,2,4.6,4.5,4.5c2.3-0.1,4.6-2,4.5-4.5c-0.3-4.4-3-8.1-7.3-9.4c-2.2-0.7-5,0.8-5.5,3.1C376.1,72.2,377.4,74.5,379.8,75.3
|
||||
L379.8,75.3z"/>
|
||||
</g>
|
||||
</g>
|
||||
|
||||
<ellipse transform="matrix(0.9999 -1.433736e-02 1.433736e-02 0.9999 -4.303 0.8051)" class="st2" cx="54" cy="300.5" rx="21" ry="15.5"/>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st3" d="M52.2,297.5c1.1-0.3,1.4-0.4,2.5,0c0.8,0.3,1.3,0.7,2,1.7c1.5,1.9,4.8,1.6,6.4,0c1.9-1.9,1.5-4.4,0-6.4
|
||||
c-3.1-3.9-8.6-5.4-13.3-4C44.3,290.5,46.7,299.2,52.2,297.5L52.2,297.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st2" d="M149.3,108.8c4.9-10.8-1.3-24.2-12.9-26.9c-1.9-0.4-2.7,2.4-0.8,2.9c9.6,2.3,15.3,13.5,11.2,22.5
|
||||
C145.9,109,148.5,110.5,149.3,108.8L149.3,108.8z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st2" d="M141.2,112.3c2.4-9.4-5.4-19.3-15.2-19c-1.9,0.1-1.9,3.1,0,3c7.8-0.2,14.2,7.6,12.3,15.2
|
||||
C137.8,113.4,140.7,114.2,141.2,112.3L141.2,112.3z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st2" d="M132.6,118c-1.1-8.3-10.9-13.4-18.2-9.1c-1.7,1-0.2,3.6,1.5,2.6c5.2-3,12.9,0.4,13.7,6.5
|
||||
C129.8,119.9,132.8,119.9,132.6,118L132.6,118z"/>
|
||||
</g>
|
||||
</g>
|
||||
<path class="st5" d="M215.5,166.5l34-73c0,0,35,0,46,21c7.5,14.3,8,39,8,39L215.5,166.5z"/>
|
||||
<path class="st5" d="M208.2,170.5l-79.5-12.7c0,0-19.6,29-8.4,49.9c7.6,14.2,27.8,28.5,27.8,28.5L208.2,170.5z"/>
|
||||
<path class="st2" d="M210.5,164.5l33-74c0,0-2.5-5.5-8-7s-12,0-12,0L210.5,164.5z"/>
|
||||
<path class="st2" d="M207.4,165.3l-73.1-35c0,0-5.6,2.4-7.2,7.8c-1.6,5.5-0.3,12-0.3,12L207.4,165.3z"/>
|
||||
<path d="M215.5,166.5L234,127c0,0,17-6,25.5,7.5c8.6,13.6-3.5,25.5-3.5,25.5L215.5,166.5z"/>
|
||||
<path d="M206.7,170.9l-29.6,32c0,0-18,0.5-22-14.9c-4-15.6,11.1-23.2,11.1-23.2L206.7,170.9z"/>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st3" d="M243.4,139.1c-0.6,0.2-0.7,0.3-0.4,0.2c0.3-0.1,0.2-0.1-0.5,0.1c0.7,0-0.3,0-0.4-0.1c0.1,0,0.3,0.1,0.4,0.1
|
||||
c0.3,0.1,0.2,0-0.4-0.2c0,0,0.6,0.3,0.6,0.3c0.5,0.2-0.9-0.6-0.1-0.1c0.6,0.4-0.3-0.5-0.1-0.1c0.3,0.5-0.3-1-0.1-0.2
|
||||
c0.2,0.8,0-1,0-0.1c0,2.4,2.1,4.6,4.5,4.5c2.5-0.1,4.5-2,4.5-4.5c0-3-1.6-5.7-4.1-7.3c-2.6-1.7-5.6-1.6-8.4-0.4
|
||||
c-2.2,0.9-2.8,4.3-1.6,6.2C238.7,139.7,241,140.1,243.4,139.1L243.4,139.1z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st3" d="M173.5,176.4c-0.5-0.3-0.1,0,0.2,0.1c-0.7-0.6,0.3,0.5,0.1,0c-0.3-0.5,0.4,0.8,0.1,0.2
|
||||
c-0.4-0.8,0.2,0.2,0,0.1c0,0,0-0.6,0-0.6c-0.1,0.1-0.1,1,0,0.3c-0.1,0.2-0.1,0.3-0.2,0.5c0.2-0.3,0.2-0.4,0-0.1
|
||||
c-0.2,0.2-0.2,0.3-0.1,0.1c0.2-0.2,0.1-0.2-0.3,0.2c1.9-1.4,3-4,1.6-6.2c-1.2-1.9-4.1-3.1-6.2-1.6c-2.4,1.7-4,4.3-3.9,7.4
|
||||
c0.1,3,1.6,5.7,4.1,7.3c2,1.2,5,0.5,6.2-1.6C176.3,180.4,175.7,177.7,173.5,176.4L173.5,176.4z"/>
|
||||
</g>
|
||||
</g>
|
||||
|
||||
<ellipse transform="matrix(0.862 -0.5069 0.5069 0.862 -88.3186 186.5516)" class="st6" cx="298.5" cy="255.5" rx="79.5" ry="68.5"/>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st7" d="M173.6,109.8c-2.1,2-3.9,4.6-3.6,7.6c0.3,3.5,2.8,6.6,6.6,6.7c6,0.2,11.5-7.7,8.2-13c-1-1.7-3.1-3.1-5.2-3
|
||||
c-1.7,0.1-3.1,0.8-4.4,1.9c-2,1.8-2.8,5.2-1.9,7.7c2.4,6.6,11.8,5.9,13.8-0.7c0.7-2.5-0.9-5.6-3.5-6.2c-2.7-0.6-5.4,0.8-6.2,3.5
|
||||
c0.6-2.1,3.1-2.6,4.6-1c0.8,0.9,1,1.8,0.8,2.8c0.2-0.5,0.1-0.4-0.1,0.3c-0.4,0.7-1,1.2-1.8,1.4c-0.9,0-1.8,0-2.7,0
|
||||
c-1.8-0.6-2.5-1.6-2.3-3.1c-0.1-0.4-0.1-0.7,0.1-1c0.2-0.3,0.1-0.3-0.1,0.1c0.1-0.1,0.2-0.2,0.3-0.4c-0.2,0.3-0.5,0.5-0.7,0.8
|
||||
c-0.1,0.1-0.2,0.2-0.3,0.3c-0.3,0.2-0.2,0.2,0.1-0.1c1.3,0.2,2.6,0.4,3.9,0.6c0.2,0.4,0.5,0.9,0.7,1.3c0.2,0.6-0.2,0.9-0.2,1.4
|
||||
c0,0.4,0.4-0.5-0.1,0.1c0.3-0.4,0.6-0.7,1-1c1.9-1.8,2-5.3,0-7.1C178.7,107.9,175.6,107.8,173.6,109.8L173.6,109.8z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st7" d="M151.2,248.6c-5.7,7,1.7,16.9,10,13.3c3.4-1.5,6.3-5,6.3-8.9c0-4.2-2.7-7.6-7-7.8c-3.1-0.1-5.8,3.3-4.8,6.3
|
||||
c1.2,3.4,3.7,6.1,7.3,7c2.6,0.6,5.4-0.8,6.2-3.5c0.7-2.5-0.9-5.5-3.5-6.2c-1.7-0.4,0,0.1-0.2,0.1c-0.4,0-0.4-0.8-0.1-0.1
|
||||
c-1.6,2.1-3.2,4.2-4.8,6.3c-2.4-0.1-2.8-1.1-3-2.6c0.1,0.7-0.1,0.2,0.1-0.1c0.7-0.9-0.5,0.5,0,0c-0.5,0.5-0.3,0.1-0.2,0.2
|
||||
c0.1,0,0.6,0,0.7,0c0.4,0.1,0.5,0.4,0.8,0.6c0.2,0.3,0.2,0.2-0.1-0.2c0.1,0.1,0.1,0.3,0.2,0.4c0,1,0.1,1.1-0.7,2.1
|
||||
c1.7-2.1,2-5,0-7.1C156.5,246.8,152.9,246.5,151.2,248.6L151.2,248.6z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st7" d="M204.1,205.7c0.8,4.8,5.3,8.6,10.1,8.6c5.1,0,9.5-3.9,10.3-8.9c0.7-4.4-0.2-12.1-5.3-13.6
|
||||
c-2.7-0.8-5.2,0.5-7,2.4c-1.1,1.2-1.5,1.7-3.1,1.2c0.7,2.8,1.5,5.6,2.2,8.4c0.2-0.2-0.5,0.2-0.5,0.2c6.3,1.4,8.9-8.2,2.7-9.6
|
||||
c-3.5-0.8-6.6,0-9.3,2.4c-3,2.6-1.1,7.2,2.2,8.4c2.6,0.9,5.5,0.8,8-0.2c1.3-0.5,2.4-1.2,3.4-2.1c0.4-0.3,0.7-0.6,1-1
|
||||
c0.2-0.3,0.4-0.5,0.6-0.7c0.4-0.4,0.3-0.4-0.5,0.3c-0.9,0-1.8,0-2.7,0c0.2,0.1,0.3,0.1,0.5,0.2c-0.7-0.4-1.5-0.9-2.2-1.3
|
||||
c0.1,0.2,0.3,0.3,0.4,0.5c-0.4-0.7-0.9-1.5-1.3-2.2c0.4,1.2,0.8,2.5,1,3.7c0,0.4,0,0.8,0,1.2c0,0.5-0.5,0.9,0,0.4
|
||||
c-0.8,0.6-0.9,0.2-1.1-0.9c-0.4-2.7-3.8-4.1-6.2-3.5C204.7,200.3,203.7,203,204.1,205.7L204.1,205.7z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st7" d="M265.9,179.6c0.2,0.4,0.5,0.9,0.7,1.3c0.6,1.1,1.8,2,3,2.3c1.2,0.3,2.8,0.2,3.9-0.5c1.1-0.7,2-1.7,2.3-3
|
||||
c0.3-1.4,0.1-2.6-0.5-3.9c-0.2-0.4-0.5-0.9-0.7-1.3c-0.6-1.1-1.8-2-3-2.3c-1.2-0.3-2.8-0.2-3.9,0.5c-1.1,0.7-2,1.7-2.3,3
|
||||
C265.1,177.1,265.3,178.3,265.9,179.6L265.9,179.6z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st7" d="M200.4,295.8c-6.1,1.6-8.1,8.6-5,13.7c2.8,4.7,9.1,7.2,14.3,5.4c4.9-1.7,7.8-7.1,6.3-12.2
|
||||
c-0.8-2.7-2.7-4.8-5.3-5.8c-1.4-0.5-2.8-0.7-4.2-0.8c-0.1,0-0.9-0.1-0.9-0.1c0.2-0.4,1.2,2.5,0.9,0.7c0,0.9,0,1.8,0,2.7
|
||||
c-0.1,0.1-0.1,0.1-0.2,0.2c3.1-5.6-5.5-10.7-8.6-5c-1.7,3-1.1,6.6,1.4,9c1.3,1.2,2.8,2,4.5,2.3c0.8,0.1,1.6,0.2,2.4,0.3
|
||||
c0.4,0,0.7,0,1.1,0.1c0.2,0.1,0.1,0.1-0.2-0.1c0,0.1-0.6-0.5-0.6-0.5c-0.1-0.1-0.1-0.2,0-0.3c0.1-0.3,0.1-0.1-0.1,0.5
|
||||
c-0.3-0.1,0.7-0.2-0.3-0.3c-0.9-0.1-1.1-0.6-1.8-0.9c0,0-0.2-0.3-0.3-0.3c0.3,0-0.8,1.2-0.8,1.2
|
||||
C209.3,303.8,206.6,294.2,200.4,295.8L200.4,295.8z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st7" d="M244.8,355.3c-4-6.2-11.2-2.3-12,3.9c-0.8,5.9,1.8,12,6.5,15.6c4.5,3.5,11.5,4.9,16.7,2.1
|
||||
c6.4-3.3,5.4-9.8,4.9-15.9c-0.5-6.3-1.9-12-9.5-12.1c-5.1-0.1-13.1,0.2-14.5,6.4c-1.2,5.4,2.5,12.8,8.2,13.8
|
||||
c6.2,1.1,11.2-5.5,7.8-11c-2.2-3.5-8.1-3.1-9.1,1.2c-1.1,4.4,0.5,8,4.1,10.6c5.2,3.8,10.2-4.8,5-8.6c0.2,0.2,0.4,0.5,0.5,0.7
|
||||
c-3,0.4-6.1,0.8-9.1,1.2c-0.4-0.7,3.4-3.1,2.9-4.8c-0.8-2.6-1.7,1.4-1.9,1.1c0,0.1,5.2-0.1,5.6-0.4c0.7,0.1,0.8-0.1,0.2-0.6
|
||||
c-0.4-0.7-0.5-0.8-0.4-0.3c-0.2,0.3,0.2,1.9,0.2,2.3c0.2,2,0.3,4,0.5,5.9c0.1,1.6,0.4,1.7-1.1,2c-1.3,0.2-2.9-0.3-4-0.9
|
||||
c-1.4-0.8-2.5-2-3.1-3.5c-0.3-0.7-0.4-1.3-0.5-2c0-0.3-0.1-0.7,0-1c0.2-1.9-1.1-1.5-3.8,1.2c-1-0.8-2-1.5-3-2.3
|
||||
c0.1,0.2,0.2,0.4,0.4,0.6C239.6,365.7,248.3,360.7,244.8,355.3L244.8,355.3z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st7" d="M336.5,337.4c-2.4-1.5-5.1-2.5-7.9-1.8c-2.7,0.7-4.9,3.2-5.3,6c-0.9,6.4,6.3,8.3,11.2,8.4
|
||||
c4.8,0.1,10.6-2.4,10.9-7.9c0.2-5.6-5.5-9.6-10.6-6.9c-5.7,3-0.7,11.6,5,8.6c-0.1,0.1-0.2,0.1-0.3,0.2c-0.9,0-1.8,0-2.7,0
|
||||
c-2.1-0.4-1.4-4.8-0.3-4.3c0,0-1.3,0.3-1.3,0.3c-0.6,0-1.2,0-1.8-0.1c-0.5-0.1-1-0.2-1.5-0.4c-1.2-0.5-1-0.2,0.6,0.7
|
||||
c0.2,0.8,0.5,1.7,0.7,2.5c-3.4,1.1-4.4,1.9-2.8,2.7c0.4,0.2,0.7,0.4,1.1,0.7C336.9,349.6,341.9,340.9,336.5,337.4L336.5,337.4z"
|
||||
/>
|
||||
</g>
|
||||
</g>
|
||||
<path class="st3" d="M224.3,256.5L252,273v-40l32,20v-38l28,17l4-28l23,12l-3-24c0,0-14-8-35.5-6.4c-11.6,0.9-24.3,6.8-33.5,11.4
|
||||
c-14,7-23.7,18.9-31.2,29.1C227,238,224.3,256.5,224.3,256.5z"/>
|
||||
<path class="st3" d="M372.9,248.9l-28.8-14.5l2.9,39.9l-33.3-17.7l2.7,37.9l-29.1-15l-2,28.2l-23.8-10.3l4.7,23.7
|
||||
c0,0,14.5,7,35.9,3.8c11.5-1.7,23.7-8.5,32.6-13.8c13.5-8,22.3-20.5,29-31.2C371.5,267.5,372.9,248.9,372.9,248.9z"/>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st8" d="M235.2,121.6c8.5-3.1,23.2-0.1,27.8,8.4c2.3,4.4,4.5,9.9,4.5,14.9c0.1,5.5-2.7,10.5-5.3,15.3
|
||||
c-1.5,2.8,2.8,5.4,4.3,2.5c3.1-5.8,6.3-11.9,6-18.7c-0.3-6-2.8-12.8-5.9-17.9c-6-9.5-22.6-13.1-32.7-9.4
|
||||
C230.9,117.8,232.2,122.7,235.2,121.6L235.2,121.6z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st8" d="M241.1,110.5c11.6-2.3,25.6,2.3,32.2,12.4c6.6,10.2,6.1,22.8,3.1,34.2c-1.3,5,6.4,7.1,7.7,2.1
|
||||
c3.8-14.3,3.8-30.3-5.5-42.6c-8.9-11.7-25.5-16.6-39.6-13.8C233.9,103.8,236.1,111.5,241.1,110.5L241.1,110.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st8" d="M245.4,97.5c7.8-1.8,15.5,0,22.9,2.8c7.2,2.7,15,6.1,20.3,11.8c10.7,11.7,9.5,29.3,8.7,44
|
||||
c-0.3,6.4,9.7,6.4,10,0c1-17.9,1.2-38.5-12.7-52.1c-6.4-6.3-15.3-10.2-23.6-13.3c-9.1-3.4-18.6-4.9-28.2-2.8
|
||||
C236.5,89.2,239.1,98.9,245.4,97.5L245.4,97.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st8" d="M155.8,158.5c-13.1,4.8-14.2,21.6-10.1,33.1c4.3,12,15.2,20.6,28.2,20.5c3.2,0,3.2-5,0-5
|
||||
c-9.9,0.1-18.6-5.9-22.6-14.9c-3.9-8.6-5.2-24.8,5.8-28.9C160.2,162.3,158.9,157.4,155.8,158.5L155.8,158.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st8" d="M164.1,216.5c-11.4-2.2-18.8-11.4-22.7-21.9c-3.6-9.6-7.7-25.3,1.2-33.1c3.9-3.4-1.8-9-5.7-5.7
|
||||
c-11.3,9.9-7.9,28.5-3.3,40.9c4.8,13,14.1,24.7,28.3,27.5C167,225.2,169.1,217.5,164.1,216.5L164.1,216.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st8" d="M152,231.7c-27.3-13.3-38.1-46.5-23.3-73.2c3.1-5.6-5.5-10.7-8.6-5c-17.3,31.2-5.3,71.1,26.9,86.9
|
||||
C152.7,243.1,157.8,234.5,152,231.7L152,231.7z"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 12 KiB |
BIN
awx/ui/public/static/media/brand-logo192.png
Normal file
|
After Width: | Height: | Size: 7.7 KiB |
BIN
awx/ui/public/static/media/favicon.png
Normal file
|
After Width: | Height: | Size: 3.6 KiB |
232
awx/ui/public/static/media/favicon.svg
Normal file
@@ -0,0 +1,232 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 21.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 500 500" style="enable-background:new 0 0 500 500;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{display:none;}
|
||||
.st1{display:inline;fill:#ED1C24;}
|
||||
.st2{fill:#42210B;}
|
||||
.st3{fill:#FFFFFF;}
|
||||
.st4{fill:#C69C6D;stroke:#8C6239;stroke-width:5;stroke-miterlimit:10;}
|
||||
.st5{fill:#FFFFFF;stroke:#42210B;stroke-width:3;stroke-miterlimit:10;}
|
||||
.st6{fill:#ED1C24;stroke:#8C6239;stroke-width:5;stroke-miterlimit:10;}
|
||||
.st7{fill:#A67C52;}
|
||||
.st8{fill:#ED1C24;}
|
||||
</style>
|
||||
<g class="st0">
|
||||
<path class="st1" d="M319.8,169.3c1.5-14.2,13.7-27.2,29.9-31.9c-13.1,1.5-27.3-1.7-36-10c-8.7-8.3-10-21.9-1.4-30.1
|
||||
c-12,6.7-28.1,8.1-41.4,3.4c-13.3-4.6-23.5-15.1-26.2-26.9c-2-8.8,0-17.9,2-26.7c-6.2,9.4-17.6,17.3-30.5,17.3
|
||||
c-12.9,0.1-25.7-10.2-22.9-20.7c-5.5,7.8-11.4,15.9-21,20.2c-9.5,4.3-23.7,2.7-28.2-5.5c-1.6,10.8-7.5,22-19.1,27
|
||||
c-9,3.9-21.5,2.2-28-3.8c5.7,11.4,4.3,25.3-4.1,35.6c-9.9,12.2-29.1,18.6-46.4,15.6c14.7,7.2,28.5,17.7,32.1,31.5
|
||||
c3.7,13.8-7.1,30.7-24.1,31.7c13.6,3.1,28,7.4,35.6,17.2c7.6,9.8,2.9,26.4-11.1,28c12.8-2.6,27.4,3.9,31.9,14.2
|
||||
c4.1,9.5-0.9,20.9-10.9,26.5c18.6-8.9,41-17.1,59.6-8.8c13.9,6.2,20.8,21.6,15.1,33.8c10.4-10.6,23-21.3,39.2-23.5
|
||||
c12.8-1.8,27.5,4.6,31.9,14.1c-0.3-12.7,6.1-25.5,17.5-34c13.8-10.3,34.4-14,52-9.2c-11.1-7.8-14.9-22-8.9-33
|
||||
c6-11,21.3-18,35.7-16.2C327.5,198.1,318.3,183.5,319.8,169.3z"/>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st2" d="M179.7,297.3c-10.1,3.2-20.3,6-30.6,8.4c-10.7,2.5-21.7,5-32.8,5.1C96,311.1,79.9,297.2,60,296.1
|
||||
c-5.8-0.3-5.8,8.7,0,9c9.9,0.5,18.9,5.1,27.9,8.8c9.8,4,19.6,6.3,30.2,5.9c21.5-0.8,43.5-7.4,64-13.8
|
||||
C187.6,304.3,185.2,295.6,179.7,297.3L179.7,297.3z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st2" d="M322.2,194.8c17.9-8,36-18.5,44.3-37.2c4.2-9.3,6-19.2,7.2-29.3c1.5-11.7,2.5-23.4,3.7-35.2
|
||||
c0.6-5.8-8.4-5.7-9,0c-1.1,10.3-2.1,20.6-3.3,30.9c-1.1,9.7-2.5,19.7-6.4,28.7c-7.5,17.5-24.6,26.8-41.2,34.2
|
||||
C312.4,189.4,316.9,197.2,322.2,194.8L322.2,194.8z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
|
||||
<ellipse transform="matrix(0.5541 -0.8324 0.8324 0.5541 -219.4917 376.0051)" class="st2" cx="241.2" cy="392.9" rx="65.5" ry="33.7"/>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st3" d="M224.1,442.5c22-11.5,38.7-31,47.1-54.3c2-5.5-6.7-7.8-8.7-2.4c-7.6,21.1-23.1,38.5-43,48.9
|
||||
C214.4,437.4,218.9,445.1,224.1,442.5L224.1,442.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
|
||||
<ellipse transform="matrix(0.9684 -0.2494 0.2494 0.9684 -66.4734 109.0276)" class="st2" cx="397" cy="316.8" rx="63.9" ry="32.9"/>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st3" d="M363.8,341.5c28.3,7,58.7-0.8,80.2-20.5c4.3-3.9-2.1-10.3-6.4-6.4c-19.1,17.5-46.4,24.4-71.5,18.2
|
||||
C360.5,331.5,358.1,340.1,363.8,341.5L363.8,341.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
<path class="st4" d="M156.9,96c-25.4,4.5-32.9,20.2-45,46.9c-20.2,44.4,2,90.3,5.6,97.5c18.4,36.5,42.3,36.8,60,80.6
|
||||
c8.6,21.2,4.6,25.2,13.1,37.5c20.4,29.2,63.7,36.1,91.9,33.8c40.3-3.3,91.5-28.8,108.8-82.5c17.1-53.2-6-112.1-41.2-131.2
|
||||
c-25.3-13.7-44.9-0.5-71.2-20.6c-21.6-16.5-18.4-33.1-37.5-48.8C227.9,98.1,203.7,87.7,156.9,96z"/>
|
||||
<ellipse transform="matrix(0.6622 -0.7494 0.7494 0.6622 65.2068 309.6339)" class="st2" cx="376" cy="82.5" rx="21" ry="15.5"/>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st3" d="M379.8,75.3c0.8,0.2-0.6-0.4-0.1-0.1c0.2,0.1,0.3,0.2,0.5,0.3c0.4,0.2-0.6-0.7-0.1-0.1
|
||||
c0.1,0.1,0.7,0.8,0.2,0.2c-0.4-0.5,0,0,0.1,0.1c0.4,0.7,0,0.2,0-0.2c0,0.1,0.1,0.4,0.2,0.5c0.3,0.9-0.1-1,0-0.1
|
||||
c0.1,2.3,2,4.6,4.5,4.5c2.3-0.1,4.6-2,4.5-4.5c-0.3-4.4-3-8.1-7.3-9.4c-2.2-0.7-5,0.8-5.5,3.1C376.1,72.2,377.4,74.5,379.8,75.3
|
||||
L379.8,75.3z"/>
|
||||
</g>
|
||||
</g>
|
||||
|
||||
<ellipse transform="matrix(0.9999 -1.433736e-02 1.433736e-02 0.9999 -4.303 0.8051)" class="st2" cx="54" cy="300.5" rx="21" ry="15.5"/>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st3" d="M52.2,297.5c1.1-0.3,1.4-0.4,2.5,0c0.8,0.3,1.3,0.7,2,1.7c1.5,1.9,4.8,1.6,6.4,0c1.9-1.9,1.5-4.4,0-6.4
|
||||
c-3.1-3.9-8.6-5.4-13.3-4C44.3,290.5,46.7,299.2,52.2,297.5L52.2,297.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st2" d="M149.3,108.8c4.9-10.8-1.3-24.2-12.9-26.9c-1.9-0.4-2.7,2.4-0.8,2.9c9.6,2.3,15.3,13.5,11.2,22.5
|
||||
C145.9,109,148.5,110.5,149.3,108.8L149.3,108.8z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st2" d="M141.2,112.3c2.4-9.4-5.4-19.3-15.2-19c-1.9,0.1-1.9,3.1,0,3c7.8-0.2,14.2,7.6,12.3,15.2
|
||||
C137.8,113.4,140.7,114.2,141.2,112.3L141.2,112.3z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st2" d="M132.6,118c-1.1-8.3-10.9-13.4-18.2-9.1c-1.7,1-0.2,3.6,1.5,2.6c5.2-3,12.9,0.4,13.7,6.5
|
||||
C129.8,119.9,132.8,119.9,132.6,118L132.6,118z"/>
|
||||
</g>
|
||||
</g>
|
||||
<path class="st5" d="M215.5,166.5l34-73c0,0,35,0,46,21c7.5,14.3,8,39,8,39L215.5,166.5z"/>
|
||||
<path class="st5" d="M208.2,170.5l-79.5-12.7c0,0-19.6,29-8.4,49.9c7.6,14.2,27.8,28.5,27.8,28.5L208.2,170.5z"/>
|
||||
<path class="st2" d="M210.5,164.5l33-74c0,0-2.5-5.5-8-7s-12,0-12,0L210.5,164.5z"/>
|
||||
<path class="st2" d="M207.4,165.3l-73.1-35c0,0-5.6,2.4-7.2,7.8c-1.6,5.5-0.3,12-0.3,12L207.4,165.3z"/>
|
||||
<path d="M215.5,166.5L234,127c0,0,17-6,25.5,7.5c8.6,13.6-3.5,25.5-3.5,25.5L215.5,166.5z"/>
|
||||
<path d="M206.7,170.9l-29.6,32c0,0-18,0.5-22-14.9c-4-15.6,11.1-23.2,11.1-23.2L206.7,170.9z"/>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st3" d="M243.4,139.1c-0.6,0.2-0.7,0.3-0.4,0.2c0.3-0.1,0.2-0.1-0.5,0.1c0.7,0-0.3,0-0.4-0.1c0.1,0,0.3,0.1,0.4,0.1
|
||||
c0.3,0.1,0.2,0-0.4-0.2c0,0,0.6,0.3,0.6,0.3c0.5,0.2-0.9-0.6-0.1-0.1c0.6,0.4-0.3-0.5-0.1-0.1c0.3,0.5-0.3-1-0.1-0.2
|
||||
c0.2,0.8,0-1,0-0.1c0,2.4,2.1,4.6,4.5,4.5c2.5-0.1,4.5-2,4.5-4.5c0-3-1.6-5.7-4.1-7.3c-2.6-1.7-5.6-1.6-8.4-0.4
|
||||
c-2.2,0.9-2.8,4.3-1.6,6.2C238.7,139.7,241,140.1,243.4,139.1L243.4,139.1z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st3" d="M173.5,176.4c-0.5-0.3-0.1,0,0.2,0.1c-0.7-0.6,0.3,0.5,0.1,0c-0.3-0.5,0.4,0.8,0.1,0.2
|
||||
c-0.4-0.8,0.2,0.2,0,0.1c0,0,0-0.6,0-0.6c-0.1,0.1-0.1,1,0,0.3c-0.1,0.2-0.1,0.3-0.2,0.5c0.2-0.3,0.2-0.4,0-0.1
|
||||
c-0.2,0.2-0.2,0.3-0.1,0.1c0.2-0.2,0.1-0.2-0.3,0.2c1.9-1.4,3-4,1.6-6.2c-1.2-1.9-4.1-3.1-6.2-1.6c-2.4,1.7-4,4.3-3.9,7.4
|
||||
c0.1,3,1.6,5.7,4.1,7.3c2,1.2,5,0.5,6.2-1.6C176.3,180.4,175.7,177.7,173.5,176.4L173.5,176.4z"/>
|
||||
</g>
|
||||
</g>
|
||||
|
||||
<ellipse transform="matrix(0.862 -0.5069 0.5069 0.862 -88.3186 186.5516)" class="st6" cx="298.5" cy="255.5" rx="79.5" ry="68.5"/>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st7" d="M173.6,109.8c-2.1,2-3.9,4.6-3.6,7.6c0.3,3.5,2.8,6.6,6.6,6.7c6,0.2,11.5-7.7,8.2-13c-1-1.7-3.1-3.1-5.2-3
|
||||
c-1.7,0.1-3.1,0.8-4.4,1.9c-2,1.8-2.8,5.2-1.9,7.7c2.4,6.6,11.8,5.9,13.8-0.7c0.7-2.5-0.9-5.6-3.5-6.2c-2.7-0.6-5.4,0.8-6.2,3.5
|
||||
c0.6-2.1,3.1-2.6,4.6-1c0.8,0.9,1,1.8,0.8,2.8c0.2-0.5,0.1-0.4-0.1,0.3c-0.4,0.7-1,1.2-1.8,1.4c-0.9,0-1.8,0-2.7,0
|
||||
c-1.8-0.6-2.5-1.6-2.3-3.1c-0.1-0.4-0.1-0.7,0.1-1c0.2-0.3,0.1-0.3-0.1,0.1c0.1-0.1,0.2-0.2,0.3-0.4c-0.2,0.3-0.5,0.5-0.7,0.8
|
||||
c-0.1,0.1-0.2,0.2-0.3,0.3c-0.3,0.2-0.2,0.2,0.1-0.1c1.3,0.2,2.6,0.4,3.9,0.6c0.2,0.4,0.5,0.9,0.7,1.3c0.2,0.6-0.2,0.9-0.2,1.4
|
||||
c0,0.4,0.4-0.5-0.1,0.1c0.3-0.4,0.6-0.7,1-1c1.9-1.8,2-5.3,0-7.1C178.7,107.9,175.6,107.8,173.6,109.8L173.6,109.8z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st7" d="M151.2,248.6c-5.7,7,1.7,16.9,10,13.3c3.4-1.5,6.3-5,6.3-8.9c0-4.2-2.7-7.6-7-7.8c-3.1-0.1-5.8,3.3-4.8,6.3
|
||||
c1.2,3.4,3.7,6.1,7.3,7c2.6,0.6,5.4-0.8,6.2-3.5c0.7-2.5-0.9-5.5-3.5-6.2c-1.7-0.4,0,0.1-0.2,0.1c-0.4,0-0.4-0.8-0.1-0.1
|
||||
c-1.6,2.1-3.2,4.2-4.8,6.3c-2.4-0.1-2.8-1.1-3-2.6c0.1,0.7-0.1,0.2,0.1-0.1c0.7-0.9-0.5,0.5,0,0c-0.5,0.5-0.3,0.1-0.2,0.2
|
||||
c0.1,0,0.6,0,0.7,0c0.4,0.1,0.5,0.4,0.8,0.6c0.2,0.3,0.2,0.2-0.1-0.2c0.1,0.1,0.1,0.3,0.2,0.4c0,1,0.1,1.1-0.7,2.1
|
||||
c1.7-2.1,2-5,0-7.1C156.5,246.8,152.9,246.5,151.2,248.6L151.2,248.6z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st7" d="M204.1,205.7c0.8,4.8,5.3,8.6,10.1,8.6c5.1,0,9.5-3.9,10.3-8.9c0.7-4.4-0.2-12.1-5.3-13.6
|
||||
c-2.7-0.8-5.2,0.5-7,2.4c-1.1,1.2-1.5,1.7-3.1,1.2c0.7,2.8,1.5,5.6,2.2,8.4c0.2-0.2-0.5,0.2-0.5,0.2c6.3,1.4,8.9-8.2,2.7-9.6
|
||||
c-3.5-0.8-6.6,0-9.3,2.4c-3,2.6-1.1,7.2,2.2,8.4c2.6,0.9,5.5,0.8,8-0.2c1.3-0.5,2.4-1.2,3.4-2.1c0.4-0.3,0.7-0.6,1-1
|
||||
c0.2-0.3,0.4-0.5,0.6-0.7c0.4-0.4,0.3-0.4-0.5,0.3c-0.9,0-1.8,0-2.7,0c0.2,0.1,0.3,0.1,0.5,0.2c-0.7-0.4-1.5-0.9-2.2-1.3
|
||||
c0.1,0.2,0.3,0.3,0.4,0.5c-0.4-0.7-0.9-1.5-1.3-2.2c0.4,1.2,0.8,2.5,1,3.7c0,0.4,0,0.8,0,1.2c0,0.5-0.5,0.9,0,0.4
|
||||
c-0.8,0.6-0.9,0.2-1.1-0.9c-0.4-2.7-3.8-4.1-6.2-3.5C204.7,200.3,203.7,203,204.1,205.7L204.1,205.7z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st7" d="M265.9,179.6c0.2,0.4,0.5,0.9,0.7,1.3c0.6,1.1,1.8,2,3,2.3c1.2,0.3,2.8,0.2,3.9-0.5c1.1-0.7,2-1.7,2.3-3
|
||||
c0.3-1.4,0.1-2.6-0.5-3.9c-0.2-0.4-0.5-0.9-0.7-1.3c-0.6-1.1-1.8-2-3-2.3c-1.2-0.3-2.8-0.2-3.9,0.5c-1.1,0.7-2,1.7-2.3,3
|
||||
C265.1,177.1,265.3,178.3,265.9,179.6L265.9,179.6z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st7" d="M200.4,295.8c-6.1,1.6-8.1,8.6-5,13.7c2.8,4.7,9.1,7.2,14.3,5.4c4.9-1.7,7.8-7.1,6.3-12.2
|
||||
c-0.8-2.7-2.7-4.8-5.3-5.8c-1.4-0.5-2.8-0.7-4.2-0.8c-0.1,0-0.9-0.1-0.9-0.1c0.2-0.4,1.2,2.5,0.9,0.7c0,0.9,0,1.8,0,2.7
|
||||
c-0.1,0.1-0.1,0.1-0.2,0.2c3.1-5.6-5.5-10.7-8.6-5c-1.7,3-1.1,6.6,1.4,9c1.3,1.2,2.8,2,4.5,2.3c0.8,0.1,1.6,0.2,2.4,0.3
|
||||
c0.4,0,0.7,0,1.1,0.1c0.2,0.1,0.1,0.1-0.2-0.1c0,0.1-0.6-0.5-0.6-0.5c-0.1-0.1-0.1-0.2,0-0.3c0.1-0.3,0.1-0.1-0.1,0.5
|
||||
c-0.3-0.1,0.7-0.2-0.3-0.3c-0.9-0.1-1.1-0.6-1.8-0.9c0,0-0.2-0.3-0.3-0.3c0.3,0-0.8,1.2-0.8,1.2
|
||||
C209.3,303.8,206.6,294.2,200.4,295.8L200.4,295.8z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st7" d="M244.8,355.3c-4-6.2-11.2-2.3-12,3.9c-0.8,5.9,1.8,12,6.5,15.6c4.5,3.5,11.5,4.9,16.7,2.1
|
||||
c6.4-3.3,5.4-9.8,4.9-15.9c-0.5-6.3-1.9-12-9.5-12.1c-5.1-0.1-13.1,0.2-14.5,6.4c-1.2,5.4,2.5,12.8,8.2,13.8
|
||||
c6.2,1.1,11.2-5.5,7.8-11c-2.2-3.5-8.1-3.1-9.1,1.2c-1.1,4.4,0.5,8,4.1,10.6c5.2,3.8,10.2-4.8,5-8.6c0.2,0.2,0.4,0.5,0.5,0.7
|
||||
c-3,0.4-6.1,0.8-9.1,1.2c-0.4-0.7,3.4-3.1,2.9-4.8c-0.8-2.6-1.7,1.4-1.9,1.1c0,0.1,5.2-0.1,5.6-0.4c0.7,0.1,0.8-0.1,0.2-0.6
|
||||
c-0.4-0.7-0.5-0.8-0.4-0.3c-0.2,0.3,0.2,1.9,0.2,2.3c0.2,2,0.3,4,0.5,5.9c0.1,1.6,0.4,1.7-1.1,2c-1.3,0.2-2.9-0.3-4-0.9
|
||||
c-1.4-0.8-2.5-2-3.1-3.5c-0.3-0.7-0.4-1.3-0.5-2c0-0.3-0.1-0.7,0-1c0.2-1.9-1.1-1.5-3.8,1.2c-1-0.8-2-1.5-3-2.3
|
||||
c0.1,0.2,0.2,0.4,0.4,0.6C239.6,365.7,248.3,360.7,244.8,355.3L244.8,355.3z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st7" d="M336.5,337.4c-2.4-1.5-5.1-2.5-7.9-1.8c-2.7,0.7-4.9,3.2-5.3,6c-0.9,6.4,6.3,8.3,11.2,8.4
|
||||
c4.8,0.1,10.6-2.4,10.9-7.9c0.2-5.6-5.5-9.6-10.6-6.9c-5.7,3-0.7,11.6,5,8.6c-0.1,0.1-0.2,0.1-0.3,0.2c-0.9,0-1.8,0-2.7,0
|
||||
c-2.1-0.4-1.4-4.8-0.3-4.3c0,0-1.3,0.3-1.3,0.3c-0.6,0-1.2,0-1.8-0.1c-0.5-0.1-1-0.2-1.5-0.4c-1.2-0.5-1-0.2,0.6,0.7
|
||||
c0.2,0.8,0.5,1.7,0.7,2.5c-3.4,1.1-4.4,1.9-2.8,2.7c0.4,0.2,0.7,0.4,1.1,0.7C336.9,349.6,341.9,340.9,336.5,337.4L336.5,337.4z"
|
||||
/>
|
||||
</g>
|
||||
</g>
|
||||
<path class="st3" d="M224.3,256.5L252,273v-40l32,20v-38l28,17l4-28l23,12l-3-24c0,0-14-8-35.5-6.4c-11.6,0.9-24.3,6.8-33.5,11.4
|
||||
c-14,7-23.7,18.9-31.2,29.1C227,238,224.3,256.5,224.3,256.5z"/>
|
||||
<path class="st3" d="M372.9,248.9l-28.8-14.5l2.9,39.9l-33.3-17.7l2.7,37.9l-29.1-15l-2,28.2l-23.8-10.3l4.7,23.7
|
||||
c0,0,14.5,7,35.9,3.8c11.5-1.7,23.7-8.5,32.6-13.8c13.5-8,22.3-20.5,29-31.2C371.5,267.5,372.9,248.9,372.9,248.9z"/>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st8" d="M235.2,121.6c8.5-3.1,23.2-0.1,27.8,8.4c2.3,4.4,4.5,9.9,4.5,14.9c0.1,5.5-2.7,10.5-5.3,15.3
|
||||
c-1.5,2.8,2.8,5.4,4.3,2.5c3.1-5.8,6.3-11.9,6-18.7c-0.3-6-2.8-12.8-5.9-17.9c-6-9.5-22.6-13.1-32.7-9.4
|
||||
C230.9,117.8,232.2,122.7,235.2,121.6L235.2,121.6z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st8" d="M241.1,110.5c11.6-2.3,25.6,2.3,32.2,12.4c6.6,10.2,6.1,22.8,3.1,34.2c-1.3,5,6.4,7.1,7.7,2.1
|
||||
c3.8-14.3,3.8-30.3-5.5-42.6c-8.9-11.7-25.5-16.6-39.6-13.8C233.9,103.8,236.1,111.5,241.1,110.5L241.1,110.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st8" d="M245.4,97.5c7.8-1.8,15.5,0,22.9,2.8c7.2,2.7,15,6.1,20.3,11.8c10.7,11.7,9.5,29.3,8.7,44
|
||||
c-0.3,6.4,9.7,6.4,10,0c1-17.9,1.2-38.5-12.7-52.1c-6.4-6.3-15.3-10.2-23.6-13.3c-9.1-3.4-18.6-4.9-28.2-2.8
|
||||
C236.5,89.2,239.1,98.9,245.4,97.5L245.4,97.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st8" d="M155.8,158.5c-13.1,4.8-14.2,21.6-10.1,33.1c4.3,12,15.2,20.6,28.2,20.5c3.2,0,3.2-5,0-5
|
||||
c-9.9,0.1-18.6-5.9-22.6-14.9c-3.9-8.6-5.2-24.8,5.8-28.9C160.2,162.3,158.9,157.4,155.8,158.5L155.8,158.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st8" d="M164.1,216.5c-11.4-2.2-18.8-11.4-22.7-21.9c-3.6-9.6-7.7-25.3,1.2-33.1c3.9-3.4-1.8-9-5.7-5.7
|
||||
c-11.3,9.9-7.9,28.5-3.3,40.9c4.8,13,14.1,24.7,28.3,27.5C167,225.2,169.1,217.5,164.1,216.5L164.1,216.5z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st8" d="M152,231.7c-27.3-13.3-38.1-46.5-23.3-73.2c3.1-5.6-5.5-10.7-8.6-5c-17.3,31.2-5.3,71.1,26.9,86.9
|
||||
C152.7,243.1,157.8,234.5,152,231.7L152,231.7z"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 12 KiB |
@@ -28,7 +28,7 @@ import { getLanguageWithoutRegionCode } from 'util/language';
|
||||
import Metrics from 'screens/Metrics';
|
||||
import SubscriptionEdit from 'screens/Setting/Subscription/SubscriptionEdit';
|
||||
import useTitle from 'hooks/useTitle';
|
||||
import { dynamicActivate, locales } from './i18nLoader';
|
||||
import { dynamicActivate } from './i18nLoader';
|
||||
import getRouteConfig from './routeConfig';
|
||||
import { SESSION_REDIRECT_URL } from './constants';
|
||||
|
||||
@@ -139,16 +139,15 @@ export function ProtectedRoute({ children, ...rest }) {
|
||||
function App() {
|
||||
const history = useHistory();
|
||||
const { hash, search, pathname } = useLocation();
|
||||
let language = getLanguageWithoutRegionCode(navigator);
|
||||
if (!Object.keys(locales).includes(language)) {
|
||||
// If there isn't a string catalog available for the browser's
|
||||
// preferred language, default to one that has strings.
|
||||
language = 'en';
|
||||
}
|
||||
const searchParams = Object.fromEntries(new URLSearchParams(search));
|
||||
const pseudolocalization =
|
||||
searchParams.pseudolocalization === 'true' || false;
|
||||
const language =
|
||||
searchParams.lang || getLanguageWithoutRegionCode(navigator) || 'en';
|
||||
|
||||
useEffect(() => {
|
||||
dynamicActivate(language);
|
||||
}, [language]);
|
||||
dynamicActivate(language, pseudolocalization);
|
||||
}, [language, pseudolocalization]);
|
||||
|
||||
useTitle();
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ class InstanceGroups extends Base {
|
||||
this.associateInstance = this.associateInstance.bind(this);
|
||||
this.disassociateInstance = this.disassociateInstance.bind(this);
|
||||
this.readInstanceOptions = this.readInstanceOptions.bind(this);
|
||||
this.readInstanceGroupOptions = this.readInstanceGroupOptions.bind(this);
|
||||
this.readInstances = this.readInstances.bind(this);
|
||||
this.readJobs = this.readJobs.bind(this);
|
||||
}
|
||||
@@ -33,6 +34,10 @@ class InstanceGroups extends Base {
|
||||
return this.http.options(`${this.baseUrl}${id}/instances/`);
|
||||
}
|
||||
|
||||
readInstanceGroupOptions(id) {
|
||||
return this.http.options(`${this.baseUrl}${id}/`);
|
||||
}
|
||||
|
||||
readJobs(id) {
|
||||
return this.http.get(`${this.baseUrl}${id}/jobs/`);
|
||||
}
|
||||
|
||||
@@ -6,7 +6,12 @@ import { useField } from 'formik';
|
||||
import styled from 'styled-components';
|
||||
import { Split, SplitItem, Button, Modal } from '@patternfly/react-core';
|
||||
import { ExpandArrowsAltIcon } from '@patternfly/react-icons';
|
||||
import { yamlToJson, jsonToYaml, isJsonString } from 'util/yaml';
|
||||
import {
|
||||
yamlToJson,
|
||||
jsonToYaml,
|
||||
isJsonString,
|
||||
parseVariableField,
|
||||
} from 'util/yaml';
|
||||
import { CheckboxField } from '../FormField';
|
||||
import MultiButtonToggle from '../MultiButtonToggle';
|
||||
import CodeEditor from './CodeEditor';
|
||||
@@ -37,36 +42,24 @@ function VariablesField({
|
||||
// track focus manually, because the Code Editor library doesn't wire
|
||||
// into Formik completely
|
||||
const [shouldValidate, setShouldValidate] = useState(false);
|
||||
const [mode, setMode] = useState(initialMode || YAML_MODE);
|
||||
const validate = useCallback(
|
||||
(value) => {
|
||||
if (!shouldValidate) {
|
||||
return undefined;
|
||||
}
|
||||
try {
|
||||
if (mode === YAML_MODE) {
|
||||
yamlToJson(value);
|
||||
} else {
|
||||
JSON.parse(value);
|
||||
}
|
||||
parseVariableField(value);
|
||||
} catch (error) {
|
||||
return error.message;
|
||||
}
|
||||
return undefined;
|
||||
},
|
||||
[shouldValidate, mode]
|
||||
[shouldValidate]
|
||||
);
|
||||
const [field, meta, helpers] = useField({ name, validate });
|
||||
|
||||
useEffect(() => {
|
||||
if (isJsonString(field.value)) {
|
||||
// mode's useState above couldn't be initialized to JSON_MODE because
|
||||
// the field value had to be defined below it
|
||||
setMode(JSON_MODE);
|
||||
onModeChange(JSON_MODE);
|
||||
helpers.setValue(JSON.stringify(JSON.parse(field.value), null, 2));
|
||||
}
|
||||
}, []); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
const [mode, setMode] = useState(() =>
|
||||
isJsonString(field.value) ? JSON_MODE : initialMode || YAML_MODE
|
||||
);
|
||||
|
||||
useEffect(
|
||||
() => {
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
InventoriesAPI,
|
||||
ProjectsAPI,
|
||||
OrganizationsAPI,
|
||||
InstanceGroupsAPI,
|
||||
} from 'api';
|
||||
|
||||
export default function getResourceAccessConfig() {
|
||||
@@ -210,5 +211,32 @@ export default function getResourceAccessConfig() {
|
||||
fetchItems: (queryParams) => OrganizationsAPI.read(queryParams),
|
||||
fetchOptions: () => OrganizationsAPI.readOptions(),
|
||||
},
|
||||
{
|
||||
selectedResource: 'Instance Groups',
|
||||
label: t`Instance Groups`,
|
||||
searchColumns: [
|
||||
{
|
||||
name: t`Name`,
|
||||
key: 'name__icontains',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
name: t`Created By (Username)`,
|
||||
key: 'created_by__username__icontains',
|
||||
},
|
||||
{
|
||||
name: t`Modified By (Username)`,
|
||||
key: 'modified_by__username__icontains',
|
||||
},
|
||||
],
|
||||
sortColumns: [
|
||||
{
|
||||
name: t`Name`,
|
||||
key: 'name',
|
||||
},
|
||||
],
|
||||
fetchItems: (queryParams) => InstanceGroupsAPI.read(queryParams),
|
||||
fetchOptions: () => InstanceGroupsAPI.readOptions(),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
/* eslint-disable-next-line import/prefer-default-export */
|
||||
export const JOB_TYPE_URL_SEGMENTS = {
|
||||
job: 'playbook',
|
||||
project_update: 'project',
|
||||
|
||||
@@ -27,8 +27,21 @@ i18n.loadLocaleData({
|
||||
* We do a dynamic import of just the catalog that we need
|
||||
* @param locale any locale string
|
||||
*/
|
||||
export async function dynamicActivate(locale) {
|
||||
export async function dynamicActivate(locale, pseudolocalization = false) {
|
||||
const { messages } = await import(`./locales/${locale}/messages`);
|
||||
|
||||
if (pseudolocalization) {
|
||||
Object.keys(messages).forEach((key) => {
|
||||
if (Array.isArray(messages[key])) {
|
||||
// t`Foo ${param}` -> ["Foo ", ['param']] => [">>", "Foo ", ['param'], "<<"]
|
||||
messages[key] = ['»', ...messages[key], '«'];
|
||||
} else {
|
||||
// simple string
|
||||
messages[key] = `»${messages[key]}«`;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
i18n.load(locale, messages);
|
||||
i18n.activate(locale);
|
||||
}
|
||||
|
||||