Compare commits
62 Commits
daoneill-i
...
dmzoneill-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5ca017d359 | ||
|
|
4b6f7e0ebe | ||
|
|
370c567be1 | ||
|
|
9be64f3de5 | ||
|
|
30500e5a95 | ||
|
|
bb323c5710 | ||
|
|
7571df49d5 | ||
|
|
1559c21033 | ||
|
|
d9b81731e9 | ||
|
|
2034cca3a9 | ||
|
|
0b5e59d9cb | ||
|
|
f48b2d1ae5 | ||
|
|
b44bb98c7e | ||
|
|
8cafdf0400 | ||
|
|
3f566c8737 | ||
|
|
c8021a25bf | ||
|
|
934646a0f6 | ||
|
|
9bb97dd658 | ||
|
|
7150f5edc6 | ||
|
|
93da15c0ee | ||
|
|
ab593bda45 | ||
|
|
065bd3ae2a | ||
|
|
8ff7260bc6 | ||
|
|
a635445082 | ||
|
|
949e7efab1 | ||
|
|
615f09226f | ||
|
|
d903c524f5 | ||
|
|
393d9c39c6 | ||
|
|
dfab342bb4 | ||
|
|
12843eccf7 | ||
|
|
dd9160135d | ||
|
|
ad96a92fa7 | ||
|
|
ca8085fe7e | ||
|
|
b076cb00a9 | ||
|
|
ee9eac15dc | ||
|
|
3f2f7b75a6 | ||
|
|
b71645f3b1 | ||
|
|
eb300252b8 | ||
|
|
2e2cd7f2de | ||
|
|
727278aaa3 | ||
|
|
81825ab755 | ||
|
|
7f2a1b6b03 | ||
|
|
1b56d94d30 | ||
|
|
e1e32c971c | ||
|
|
a4a2fabc01 | ||
|
|
b7b7bfa520 | ||
|
|
887604317e | ||
|
|
d35d8b6ed7 | ||
|
|
ec28eff7f7 | ||
|
|
a5d17539c6 | ||
|
|
a49d894cf1 | ||
|
|
b3466d4449 | ||
|
|
237adc6150 | ||
|
|
09b028ee3c | ||
|
|
fb83bfbc31 | ||
|
|
88e406e121 | ||
|
|
59d0bcc63f | ||
|
|
3fb3125bc3 | ||
|
|
d70c6b9474 | ||
|
|
5549516a37 | ||
|
|
14ac91a8a2 | ||
|
|
d5753818a0 |
8
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -7,6 +7,14 @@ commit message and your description; but you should still explain what
|
||||
the change does.
|
||||
-->
|
||||
|
||||
##### Depends on
|
||||
<!---
|
||||
Please provide links to any other PR dependanices.
|
||||
Indicating these should be merged first prior to this PR.
|
||||
-->
|
||||
- #12345
|
||||
- https://github.com/xxx/yyy/pulls/1234
|
||||
|
||||
##### ISSUE TYPE
|
||||
<!--- Pick one below and delete the rest: -->
|
||||
- Breaking Change
|
||||
|
||||
40
.github/actions/issue_metrics/issue_metrics.yml
vendored
@@ -1,40 +0,0 @@
|
||||
name: Monthly issue metrics
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '3 2 1 * *'
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: issue metrics
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get dates for last month
|
||||
shell: bash
|
||||
run: |
|
||||
# Calculate the first day of the previous month
|
||||
first_day=$(date -d "last month" +%Y-%m-01)
|
||||
|
||||
# Calculate the last day of the previous month
|
||||
last_day=$(date -d "$first_day +1 month -1 day" +%Y-%m-%d)
|
||||
|
||||
#Set an environment variable with the date range
|
||||
echo "$first_day..$last_day"
|
||||
echo "last_month=$first_day..$last_day" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Run issue-metrics tool
|
||||
uses: github/issue-metrics@v2
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SEARCH_QUERY: 'repo:ansible/awx is:issue created:${{ env.last_month }} -reason:"not planned"'
|
||||
|
||||
- name: Create issue
|
||||
uses: peter-evans/create-issue-from-file@v4
|
||||
with:
|
||||
title: Monthly issue metrics report
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
content-filepath: ./issue_metrics.md
|
||||
6
.github/workflows/ci.yml
vendored
@@ -107,7 +107,7 @@ jobs:
|
||||
ansible-galaxy collection install -r molecule/requirements.yml
|
||||
sudo rm -f $(which kustomize)
|
||||
make kustomize
|
||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind
|
||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas
|
||||
env:
|
||||
AWX_TEST_IMAGE: awx
|
||||
AWX_TEST_VERSION: ci
|
||||
@@ -127,10 +127,6 @@ jobs:
|
||||
|
||||
- name: Run sanity tests
|
||||
run: make test_collection_sanity
|
||||
env:
|
||||
# needed due to cgroupsv2. This is fixed, but a stable release
|
||||
# with the fix has not been made yet.
|
||||
ANSIBLE_TEST_PREFER_PODMAN: 1
|
||||
|
||||
collection-integration:
|
||||
name: awx_collection integration
|
||||
|
||||
1
.github/workflows/devel_images.yml
vendored
@@ -3,6 +3,7 @@ name: Build/Push Development Images
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- devel
|
||||
|
||||
20
.github/workflows/promote.yml
vendored
@@ -83,11 +83,15 @@ jobs:
|
||||
|
||||
- name: Re-tag and promote awx image
|
||||
run: |
|
||||
docker pull ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
|
||||
docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker push quay.io/${{ github.repository }}:latest
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker push quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
docker buildx imagetools create \
|
||||
ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} \
|
||||
--tag quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
|
||||
docker buildx imagetools create \
|
||||
ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} \
|
||||
--tag quay.io/${{ github.repository }}:latest
|
||||
|
||||
- name: Re-tag and promote awx-ee image
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} \
|
||||
--tag quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
|
||||
|
||||
6
.github/workflows/stage.yml
vendored
@@ -102,9 +102,9 @@ jobs:
|
||||
|
||||
- name: tag awx-ee:latest with version input
|
||||
run: |
|
||||
docker pull quay.io/ansible/awx-ee:latest
|
||||
docker tag quay.io/ansible/awx-ee:latest ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
docker push ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
docker buildx imagetools create \
|
||||
quay.io/ansible/awx-ee:latest \
|
||||
--tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
|
||||
- name: Stage awx-operator image
|
||||
working-directory: awx-operator
|
||||
|
||||
5
.gitignore
vendored
@@ -46,6 +46,11 @@ tools/docker-compose/overrides/
|
||||
tools/docker-compose-minikube/_sources
|
||||
tools/docker-compose/keycloak.awx.realm.json
|
||||
|
||||
!tools/docker-compose/editable_dependencies
|
||||
tools/docker-compose/editable_dependencies/*
|
||||
!tools/docker-compose/editable_dependencies/README.md
|
||||
!tools/docker-compose/editable_dependencies/install.sh
|
||||
|
||||
# Tower setup playbook testing
|
||||
setup/test/roles/postgresql
|
||||
**/provision_docker
|
||||
|
||||
113
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,113 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "run_ws_heartbeat",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_ws_heartbeat"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-ws-heartbeat",
|
||||
"postDebugTask": "start awx-ws-heartbeat"
|
||||
},
|
||||
{
|
||||
"name": "run_cache_clear",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_cache_clear"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-cache-clear",
|
||||
"postDebugTask": "start awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"name": "run_callback_receiver",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_callback_receiver"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-receiver",
|
||||
"postDebugTask": "start awx-receiver"
|
||||
},
|
||||
{
|
||||
"name": "run_dispatcher",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_dispatcher"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-dispatcher",
|
||||
"postDebugTask": "start awx-dispatcher"
|
||||
},
|
||||
{
|
||||
"name": "run_rsyslog_configurer",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_rsyslog_configurer"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-rsyslog-configurer",
|
||||
"postDebugTask": "start awx-rsyslog-configurer"
|
||||
},
|
||||
{
|
||||
"name": "run_cache_clear",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_cache_clear"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-cache-clear",
|
||||
"postDebugTask": "start awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"name": "run_wsrelay",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_wsrelay"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-wsrelay",
|
||||
"postDebugTask": "start awx-wsrelay"
|
||||
},
|
||||
{
|
||||
"name": "daphne",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "/var/lib/awx/venv/awx/bin/daphne",
|
||||
"args": ["-b", "127.0.0.1", "-p", "8051", "awx.asgi:channel_layer"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-daphne",
|
||||
"postDebugTask": "start awx-daphne"
|
||||
},
|
||||
{
|
||||
"name": "runserver(uwsgi alternative)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["runserver", "127.0.0.1:8052"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-uwsgi",
|
||||
"postDebugTask": "start awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"name": "runserver_plus(uwsgi alternative)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["runserver_plus", "127.0.0.1:8052"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-uwsgi and install Werkzeug",
|
||||
"postDebugTask": "start awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"name": "shell_plus",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["shell_plus"],
|
||||
"django": true,
|
||||
},
|
||||
]
|
||||
}
|
||||
100
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "start awx-cache-clear",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-cache-clear",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"label": "start awx-daphne",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-daphne"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-daphne",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-daphne"
|
||||
},
|
||||
{
|
||||
"label": "start awx-dispatcher",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-dispatcher"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-dispatcher",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-dispatcher"
|
||||
},
|
||||
{
|
||||
"label": "start awx-receiver",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-receiver"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-receiver",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-receiver"
|
||||
},
|
||||
{
|
||||
"label": "start awx-rsyslog-configurer",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-rsyslog-configurer"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-rsyslog-configurer",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-rsyslog-configurer"
|
||||
},
|
||||
{
|
||||
"label": "start awx-rsyslogd",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-rsyslogd"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-rsyslogd",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-rsyslogd"
|
||||
},
|
||||
{
|
||||
"label": "start awx-uwsgi",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-uwsgi",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-uwsgi and install Werkzeug",
|
||||
"type": "shell",
|
||||
"command": "pip install Werkzeug; supervisorctl stop tower-processes:awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"label": "start awx-ws-heartbeat",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-ws-heartbeat"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-ws-heartbeat",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-ws-heartbeat"
|
||||
},
|
||||
{
|
||||
"label": "start awx-wsrelay",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-wsrelay"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-wsrelay",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-wsrelay"
|
||||
}
|
||||
]
|
||||
}
|
||||
26
Makefile
@@ -1,6 +1,6 @@
|
||||
-include awx/ui_next/Makefile
|
||||
|
||||
PYTHON := $(notdir $(shell for i in python3.9 python3; do command -v $$i; done|sed 1q))
|
||||
PYTHON := $(notdir $(shell for i in python3.11 python3; do command -v $$i; done|sed 1q))
|
||||
SHELL := bash
|
||||
DOCKER_COMPOSE ?= docker-compose
|
||||
OFFICIAL ?= no
|
||||
@@ -47,6 +47,8 @@ VAULT ?= false
|
||||
VAULT_TLS ?= false
|
||||
# If set to true docker-compose will also start a tacacs+ instance
|
||||
TACACS ?= false
|
||||
# If set to true docker-compose will install editable dependencies
|
||||
EDITABLE_DEPENDENCIES ?= false
|
||||
|
||||
VENV_BASE ?= /var/lib/awx/venv
|
||||
|
||||
@@ -63,7 +65,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==8.0.4 wheel==0.38.4
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0
|
||||
|
||||
NAME ?= awx
|
||||
|
||||
@@ -216,8 +218,6 @@ collectstatic:
|
||||
fi; \
|
||||
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||
|
||||
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
||||
|
||||
uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
@@ -225,7 +225,7 @@ uwsgi: collectstatic
|
||||
uwsgi /etc/tower/uwsgi.ini
|
||||
|
||||
awx-autoreload:
|
||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
|
||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx
|
||||
|
||||
daphne:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -305,7 +305,7 @@ swagger: reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
|
||||
|
||||
check: black
|
||||
|
||||
@@ -535,6 +535,7 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_tacacs=$(TACACS) \
|
||||
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
|
||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
@@ -542,9 +543,15 @@ docker-compose: awx/projects docker-compose-sources
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_ldap=$(LDAP);
|
||||
-e enable_ldap=$(LDAP); \
|
||||
$(MAKE) docker-compose-up
|
||||
|
||||
docker-compose-up:
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||
|
||||
docker-compose-down:
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) down --remove-orphans
|
||||
|
||||
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||
@@ -609,7 +616,7 @@ docker-clean:
|
||||
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
|
||||
|
||||
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
|
||||
docker volume rm -f tools_awx_db tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
|
||||
docker volume rm -f tools_var_lib_awx tools_awx_db tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
|
||||
|
||||
docker-refresh: docker-clean docker-compose
|
||||
|
||||
@@ -631,9 +638,6 @@ clean-elk:
|
||||
docker rm tools_elasticsearch_1
|
||||
docker rm tools_kibana_1
|
||||
|
||||
psql-container:
|
||||
docker run -it --net tools_default --rm postgres:12 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
|
||||
VERSION:
|
||||
@echo "awx: $(VERSION)"
|
||||
|
||||
|
||||
@@ -154,10 +154,12 @@ def manage():
|
||||
from django.conf import settings
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
# enforce the postgres version is equal to 12. if not, then terminate program with exit code of 1
|
||||
# enforce the postgres version is a minimum of 12 (we need this for partitioning); if not, then terminate program with exit code of 1
|
||||
# In the future if we require a feature of a version of postgres > 12 this should be updated to reflect that.
|
||||
# The return of connection.pg_version is something like 12013
|
||||
if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development':
|
||||
if (connection.pg_version // 10000) < 12:
|
||||
sys.stderr.write("Postgres version 12 is required\n")
|
||||
sys.stderr.write("At a minimum, postgres version 12 is required\n")
|
||||
sys.exit(1)
|
||||
|
||||
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
||||
|
||||
@@ -93,6 +93,7 @@ register(
|
||||
default='',
|
||||
label=_('Login redirect override URL'),
|
||||
help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'),
|
||||
warning_text=_('Changing the redirect URL could impact the ability to login if local authentication is also disabled.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
@@ -36,11 +36,13 @@ class Metadata(metadata.SimpleMetadata):
|
||||
field_info = OrderedDict()
|
||||
field_info['type'] = self.label_lookup[field]
|
||||
field_info['required'] = getattr(field, 'required', False)
|
||||
field_info['hidden'] = getattr(field, 'hidden', False)
|
||||
|
||||
text_attrs = [
|
||||
'read_only',
|
||||
'label',
|
||||
'help_text',
|
||||
'warning_text',
|
||||
'min_length',
|
||||
'max_length',
|
||||
'min_value',
|
||||
|
||||
@@ -191,6 +191,7 @@ SUMMARIZABLE_FK_FIELDS = {
|
||||
'webhook_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
'approved_or_denied_by': ('id', 'username', 'first_name', 'last_name'),
|
||||
'credential_type': DEFAULT_SUMMARY_FIELDS,
|
||||
'resource': ('ansible_id', 'resource_type'),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -24,6 +24,10 @@ def drf_reverse(viewname, args=None, kwargs=None, request=None, format=None, **e
|
||||
else:
|
||||
url = _reverse(viewname, args, kwargs, request, format, **extra)
|
||||
|
||||
if settings.OPTIONAL_API_URLPATTERN_PREFIX and request:
|
||||
if request.path.startswith(f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}"):
|
||||
url = url.replace('/api', f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}")
|
||||
|
||||
return url
|
||||
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.urls import reverse as django_reverse
|
||||
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
@@ -130,6 +131,7 @@ class ApiVersionRootView(APIView):
|
||||
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
|
||||
data['bulk'] = reverse('api:bulk', request=request)
|
||||
data['analytics'] = reverse('api:analytics_root_view', request=request)
|
||||
data['service_index'] = django_reverse('service-index-root')
|
||||
return Response(data)
|
||||
|
||||
|
||||
|
||||
@@ -55,6 +55,7 @@ register(
|
||||
# Optional; category_slug will be slugified version of category if not
|
||||
# explicitly provided.
|
||||
category_slug='cows',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -127,6 +127,8 @@ class SettingsRegistry(object):
|
||||
encrypted = bool(field_kwargs.pop('encrypted', False))
|
||||
defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
|
||||
unit = field_kwargs.pop('unit', None)
|
||||
hidden = field_kwargs.pop('hidden', False)
|
||||
warning_text = field_kwargs.pop('warning_text', None)
|
||||
if getattr(field_kwargs.get('child', None), 'source', None) is not None:
|
||||
field_kwargs['child'].source = None
|
||||
field_instance = field_class(**field_kwargs)
|
||||
@@ -134,12 +136,14 @@ class SettingsRegistry(object):
|
||||
field_instance.category = category
|
||||
field_instance.depends_on = depends_on
|
||||
field_instance.unit = unit
|
||||
field_instance.hidden = hidden
|
||||
if placeholder is not empty:
|
||||
field_instance.placeholder = placeholder
|
||||
field_instance.defined_in_file = defined_in_file
|
||||
if field_instance.defined_in_file:
|
||||
field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
|
||||
field_instance.encrypted = encrypted
|
||||
field_instance.warning_text = warning_text
|
||||
original_field_instance = field_instance
|
||||
if field_class != original_field_class:
|
||||
original_field_instance = original_field_class(**field_kwargs)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# Python
|
||||
import contextlib
|
||||
import logging
|
||||
import psycopg
|
||||
import threading
|
||||
import time
|
||||
import os
|
||||
@@ -13,7 +14,7 @@ from django.conf import settings, UserSettingsHolder
|
||||
from django.core.cache import cache as django_cache
|
||||
from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation
|
||||
from django.db import transaction, connection
|
||||
from django.db.utils import Error as DBError, ProgrammingError
|
||||
from django.db.utils import DatabaseError, ProgrammingError
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
# Django REST Framework
|
||||
@@ -80,18 +81,26 @@ def _ctit_db_wrapper(trans_safe=False):
|
||||
logger.debug('Obtaining database settings in spite of broken transaction.')
|
||||
transaction.set_rollback(False)
|
||||
yield
|
||||
except DBError as exc:
|
||||
except ProgrammingError as e:
|
||||
# Exception raised for programming errors
|
||||
# Examples may be table not found or already exists,
|
||||
# this generally means we can't fetch Tower configuration
|
||||
# because the database hasn't actually finished migrating yet;
|
||||
# this is usually a sign that a service in a container (such as ws_broadcast)
|
||||
# has come up *before* the database has finished migrating, and
|
||||
# especially that the conf.settings table doesn't exist yet
|
||||
# syntax error in the SQL statement, wrong number of parameters specified, etc.
|
||||
if trans_safe:
|
||||
level = logger.warning
|
||||
if isinstance(exc, ProgrammingError):
|
||||
if 'relation' in str(exc) and 'does not exist' in str(exc):
|
||||
# this generally means we can't fetch Tower configuration
|
||||
# because the database hasn't actually finished migrating yet;
|
||||
# this is usually a sign that a service in a container (such as ws_broadcast)
|
||||
# has come up *before* the database has finished migrating, and
|
||||
# especially that the conf.settings table doesn't exist yet
|
||||
level = logger.debug
|
||||
level(f'Database settings are not available, using defaults. error: {str(exc)}')
|
||||
logger.debug(f'Database settings are not available, using defaults. error: {str(e)}')
|
||||
else:
|
||||
logger.exception('Error modifying something related to database settings.')
|
||||
except DatabaseError as e:
|
||||
if trans_safe:
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
else:
|
||||
logger.exception('Error modifying something related to database settings.')
|
||||
finally:
|
||||
|
||||
@@ -639,7 +639,10 @@ class UserAccess(BaseAccess):
|
||||
"""
|
||||
|
||||
model = User
|
||||
prefetch_related = ('profile',)
|
||||
prefetch_related = (
|
||||
'profile',
|
||||
'resource',
|
||||
)
|
||||
|
||||
def filtered_queryset(self):
|
||||
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
|
||||
@@ -835,6 +838,7 @@ class OrganizationAccess(NotificationAttachMixin, BaseAccess):
|
||||
prefetch_related = (
|
||||
'created_by',
|
||||
'modified_by',
|
||||
'resource', # dab_resource_registry
|
||||
)
|
||||
# organization admin_role is not a parent of organization auditor_role
|
||||
notification_attach_roles = ['admin_role', 'auditor_role']
|
||||
@@ -1303,6 +1307,7 @@ class TeamAccess(BaseAccess):
|
||||
'created_by',
|
||||
'modified_by',
|
||||
'organization',
|
||||
'resource', # dab_resource_registry
|
||||
)
|
||||
|
||||
def filtered_queryset(self):
|
||||
|
||||
@@ -419,7 +419,7 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create
|
||||
resolved_action,
|
||||
resolved_role,
|
||||
-- '-' operator listed here:
|
||||
-- https://www.postgresql.org/docs/12/functions-json.html
|
||||
-- https://www.postgresql.org/docs/15/functions-json.html
|
||||
-- note that operator is only supported by jsonb objects
|
||||
-- https://www.postgresql.org/docs/current/datatype-json.html
|
||||
(CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats,
|
||||
|
||||
@@ -92,6 +92,7 @@ register(
|
||||
),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
required=False,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -774,6 +775,7 @@ register(
|
||||
allow_null=True,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
required=False,
|
||||
)
|
||||
register(
|
||||
'AUTOMATION_ANALYTICS_LAST_ENTRIES',
|
||||
@@ -815,6 +817,7 @@ register(
|
||||
help_text=_('Max jobs to allow bulk jobs to launch'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -825,6 +828,7 @@ register(
|
||||
help_text=_('Max number of hosts to allow to be created in a single bulk action'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -835,6 +839,7 @@ register(
|
||||
help_text=_('Max number of hosts to allow to be deleted in a single bulk action'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -845,6 +850,7 @@ register(
|
||||
help_text=_('Enable preview of new user interface.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
|
||||
@@ -14,7 +14,7 @@ __all__ = [
|
||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||
]
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform')
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')),
|
||||
('su', _('Su')),
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
from azure.keyvault.secrets import SecretClient
|
||||
from azure.identity import ClientSecretCredential
|
||||
from msrestazure import azure_cloud
|
||||
|
||||
from .plugin import CredentialPlugin
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from azure.keyvault import KeyVaultClient, KeyVaultAuthentication
|
||||
from azure.common.credentials import ServicePrincipalCredentials
|
||||
from msrestazure import azure_cloud
|
||||
|
||||
|
||||
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
|
||||
@@ -54,22 +55,9 @@ azure_keyvault_inputs = {
|
||||
|
||||
|
||||
def azure_keyvault_backend(**kwargs):
|
||||
url = kwargs['url']
|
||||
[cloud] = [c for c in clouds if c.name == kwargs.get('cloud_name', default_cloud.name)]
|
||||
|
||||
def auth_callback(server, resource, scope):
|
||||
credentials = ServicePrincipalCredentials(
|
||||
url=url,
|
||||
client_id=kwargs['client'],
|
||||
secret=kwargs['secret'],
|
||||
tenant=kwargs['tenant'],
|
||||
resource=f"https://{cloud.suffixes.keyvault_dns.split('.', 1).pop()}",
|
||||
)
|
||||
token = credentials.token
|
||||
return token['token_type'], token['access_token']
|
||||
|
||||
kv = KeyVaultClient(KeyVaultAuthentication(auth_callback))
|
||||
return kv.get_secret(url, kwargs['secret_field'], kwargs.get('secret_version', '')).value
|
||||
csc = ClientSecretCredential(tenant_id=kwargs['tenant'], client_id=kwargs['client'], client_secret=kwargs['secret'])
|
||||
kv = SecretClient(credential=csc, vault_url=kwargs['url'])
|
||||
return kv.get_secret(name=kwargs['secret_field'], version=kwargs.get('secret_version', '')).value
|
||||
|
||||
|
||||
azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)
|
||||
|
||||
@@ -259,6 +259,12 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
current_downtime = time.time() - self.pg_down_time
|
||||
if current_downtime > self.pg_max_wait:
|
||||
logger.exception(f"Postgres event consumer has not recovered in {current_downtime} s, exiting")
|
||||
# Sending QUIT to multiprocess queue to signal workers to exit
|
||||
for worker in self.pool.workers:
|
||||
try:
|
||||
worker.quit()
|
||||
except Exception:
|
||||
logger.exception(f"Error sending QUIT to worker {worker}")
|
||||
raise
|
||||
# Wait for a second before next attempt, but still listen for any shutdown signals
|
||||
for i in range(10):
|
||||
@@ -270,6 +276,12 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
except Exception:
|
||||
# Log unanticipated exception in addition to writing to stderr to get timestamps and other metadata
|
||||
logger.exception('Encountered unhandled error in dispatcher main loop')
|
||||
# Sending QUIT to multiprocess queue to signal workers to exit
|
||||
for worker in self.pool.workers:
|
||||
try:
|
||||
worker.quit()
|
||||
except Exception:
|
||||
logger.exception(f"Error sending QUIT to worker {worker}")
|
||||
raise
|
||||
|
||||
|
||||
|
||||
179
awx/main/management/commands/dump_auth_config.py
Normal file
@@ -0,0 +1,179 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
|
||||
from typing import Any
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
from awx.conf import settings_registry
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Dump the current auth configuration in django_ansible_base.authenticator format, currently supports LDAP and SAML'
|
||||
|
||||
DAB_SAML_AUTHENTICATOR_KEYS = {
|
||||
"SP_ENTITY_ID": True,
|
||||
"SP_PUBLIC_CERT": True,
|
||||
"SP_PRIVATE_KEY": True,
|
||||
"ORG_INFO": True,
|
||||
"TECHNICAL_CONTACT": True,
|
||||
"SUPPORT_CONTACT": True,
|
||||
"SP_EXTRA": False,
|
||||
"SECURITY_CONFIG": False,
|
||||
"EXTRA_DATA": False,
|
||||
"ENABLED_IDPS": True,
|
||||
"CALLBACK_URL": False,
|
||||
}
|
||||
|
||||
DAB_LDAP_AUTHENTICATOR_KEYS = {
|
||||
"SERVER_URI": True,
|
||||
"BIND_DN": False,
|
||||
"BIND_PASSWORD": False,
|
||||
"CONNECTION_OPTIONS": False,
|
||||
"GROUP_TYPE": True,
|
||||
"GROUP_TYPE_PARAMS": True,
|
||||
"GROUP_SEARCH": False,
|
||||
"START_TLS": False,
|
||||
"USER_DN_TEMPLATE": True,
|
||||
"USER_ATTR_MAP": True,
|
||||
"USER_SEARCH": False,
|
||||
}
|
||||
|
||||
def get_awx_ldap_settings(self) -> dict[str, dict[str, Any]]:
|
||||
awx_ldap_settings = {}
|
||||
|
||||
for awx_ldap_setting in settings_registry.get_registered_settings(category_slug='ldap'):
|
||||
key = awx_ldap_setting.removeprefix("AUTH_LDAP_")
|
||||
value = getattr(settings, awx_ldap_setting, None)
|
||||
awx_ldap_settings[key] = value
|
||||
|
||||
grouped_settings = {}
|
||||
|
||||
for key, value in awx_ldap_settings.items():
|
||||
match = re.search(r'(\d+)', key)
|
||||
index = int(match.group()) if match else 0
|
||||
new_key = re.sub(r'\d+_', '', key)
|
||||
|
||||
if index not in grouped_settings:
|
||||
grouped_settings[index] = {}
|
||||
|
||||
grouped_settings[index][new_key] = value
|
||||
if new_key == "GROUP_TYPE" and value:
|
||||
grouped_settings[index][new_key] = type(value).__name__
|
||||
|
||||
if new_key == "SERVER_URI" and value:
|
||||
value = value.split(", ")
|
||||
|
||||
return grouped_settings
|
||||
|
||||
def is_enabled(self, settings, keys):
|
||||
for key, required in keys.items():
|
||||
if required and not settings.get(key):
|
||||
return False
|
||||
return True
|
||||
|
||||
def get_awx_saml_settings(self) -> dict[str, Any]:
|
||||
awx_saml_settings = {}
|
||||
for awx_saml_setting in settings_registry.get_registered_settings(category_slug='saml'):
|
||||
awx_saml_settings[awx_saml_setting.removeprefix("SOCIAL_AUTH_SAML_")] = getattr(settings, awx_saml_setting, None)
|
||||
|
||||
return awx_saml_settings
|
||||
|
||||
def format_config_data(self, enabled, awx_settings, type, keys, name):
|
||||
config = {
|
||||
"type": f"awx.authentication.authenticator_plugins.{type}",
|
||||
"name": name,
|
||||
"enabled": enabled,
|
||||
"create_objects": True,
|
||||
"users_unique": False,
|
||||
"remove_users": True,
|
||||
"configuration": {},
|
||||
}
|
||||
for k in keys:
|
||||
v = awx_settings.get(k)
|
||||
config["configuration"].update({k: v})
|
||||
|
||||
if type == "saml":
|
||||
idp_to_key_mapping = {
|
||||
"url": "IDP_URL",
|
||||
"x509cert": "IDP_X509_CERT",
|
||||
"entity_id": "IDP_ENTITY_ID",
|
||||
"attr_email": "IDP_ATTR_EMAIL",
|
||||
"attr_groups": "IDP_GROUPS",
|
||||
"attr_username": "IDP_ATTR_USERNAME",
|
||||
"attr_last_name": "IDP_ATTR_LAST_NAME",
|
||||
"attr_first_name": "IDP_ATTR_FIRST_NAME",
|
||||
"attr_user_permanent_id": "IDP_ATTR_USER_PERMANENT_ID",
|
||||
}
|
||||
for idp_name in awx_settings.get("ENABLED_IDPS", {}):
|
||||
for key in idp_to_key_mapping:
|
||||
value = awx_settings["ENABLED_IDPS"][idp_name].get(key)
|
||||
if value is not None:
|
||||
config["name"] = idp_name
|
||||
config["configuration"].update({idp_to_key_mapping[key]: value})
|
||||
|
||||
return config
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"output_file",
|
||||
nargs="?",
|
||||
type=str,
|
||||
default=None,
|
||||
help="Output JSON file path",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
try:
|
||||
data = []
|
||||
|
||||
# dump SAML settings
|
||||
awx_saml_settings = self.get_awx_saml_settings()
|
||||
awx_saml_enabled = self.is_enabled(awx_saml_settings, self.DAB_SAML_AUTHENTICATOR_KEYS)
|
||||
if awx_saml_enabled:
|
||||
awx_saml_name = awx_saml_settings["ENABLED_IDPS"]
|
||||
data.append(
|
||||
self.format_config_data(
|
||||
awx_saml_enabled,
|
||||
awx_saml_settings,
|
||||
"saml",
|
||||
self.DAB_SAML_AUTHENTICATOR_KEYS,
|
||||
awx_saml_name,
|
||||
)
|
||||
)
|
||||
|
||||
# dump LDAP settings
|
||||
awx_ldap_group_settings = self.get_awx_ldap_settings()
|
||||
for awx_ldap_name, awx_ldap_settings in enumerate(awx_ldap_group_settings.values()):
|
||||
enabled = self.is_enabled(awx_ldap_settings, self.DAB_LDAP_AUTHENTICATOR_KEYS)
|
||||
if enabled:
|
||||
data.append(
|
||||
self.format_config_data(
|
||||
enabled,
|
||||
awx_ldap_settings,
|
||||
"ldap",
|
||||
self.DAB_LDAP_AUTHENTICATOR_KEYS,
|
||||
str(awx_ldap_name),
|
||||
)
|
||||
)
|
||||
|
||||
# write to file if requested
|
||||
if options["output_file"]:
|
||||
# Define the path for the output JSON file
|
||||
output_file = options["output_file"]
|
||||
|
||||
# Ensure the directory exists
|
||||
os.makedirs(os.path.dirname(output_file), exist_ok=True)
|
||||
|
||||
# Write data to the JSON file
|
||||
with open(output_file, "w") as f:
|
||||
json.dump(data, f, indent=4)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f"Auth config data dumped to {output_file}"))
|
||||
else:
|
||||
self.stdout.write(json.dumps(data, indent=4))
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"An error occurred: {str(e)}"))
|
||||
sys.exit(1)
|
||||
@@ -92,8 +92,6 @@ class Command(BaseCommand):
|
||||
return host_stats
|
||||
|
||||
def handle(self, *arg, **options):
|
||||
WebsocketsMetricsServer().start()
|
||||
|
||||
# it's necessary to delay this import in case
|
||||
# database migrations are still running
|
||||
from awx.main.models.ha import Instance
|
||||
@@ -166,8 +164,15 @@ class Command(BaseCommand):
|
||||
|
||||
return
|
||||
|
||||
try:
|
||||
websocket_relay_manager = WebSocketRelayManager()
|
||||
asyncio.run(websocket_relay_manager.run())
|
||||
except KeyboardInterrupt:
|
||||
logger.info('Terminating Websocket Relayer')
|
||||
WebsocketsMetricsServer().start()
|
||||
websocket_relay_manager = WebSocketRelayManager()
|
||||
|
||||
while True:
|
||||
try:
|
||||
asyncio.run(websocket_relay_manager.run())
|
||||
except KeyboardInterrupt:
|
||||
logger.info('Shutting down Websocket Relayer')
|
||||
break
|
||||
except Exception as e:
|
||||
logger.exception('Error in Websocket Relayer, exception: {}. Restarting in 10 seconds'.format(e))
|
||||
time.sleep(10)
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
# Generated by Django 4.2.6 on 2024-02-15 20:51
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0189_inbound_hop_nodes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -6,6 +6,8 @@ from django.conf import settings # noqa
|
||||
from django.db import connection
|
||||
from django.db.models.signals import pre_delete # noqa
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.resource_registry.fields import AnsibleResourceField
|
||||
from ansible_base.lib.utils.models import prevent_search
|
||||
|
||||
# AWX
|
||||
@@ -99,6 +101,7 @@ from awx.main.access import get_user_queryset, check_user_access, check_user_acc
|
||||
User.add_to_class('get_queryset', get_user_queryset)
|
||||
User.add_to_class('can_access', check_user_access)
|
||||
User.add_to_class('can_access_with_errors', check_user_access_with_errors)
|
||||
User.add_to_class('resource', AnsibleResourceField(primary_key_field="id"))
|
||||
|
||||
|
||||
def convert_jsonfields():
|
||||
|
||||
@@ -925,6 +925,7 @@ class InventorySourceOptions(BaseModel):
|
||||
('rhv', _('Red Hat Virtualization')),
|
||||
('controller', _('Red Hat Ansible Automation Platform')),
|
||||
('insights', _('Red Hat Insights')),
|
||||
('terraform', _('Terraform State')),
|
||||
]
|
||||
|
||||
# From the options of the Django management base command
|
||||
@@ -1630,6 +1631,20 @@ class satellite6(PluginFileInjector):
|
||||
return ret
|
||||
|
||||
|
||||
class terraform(PluginFileInjector):
|
||||
plugin_name = 'terraform_state'
|
||||
base_injector = 'managed'
|
||||
namespace = 'cloud'
|
||||
collection = 'terraform'
|
||||
use_fqcn = True
|
||||
|
||||
def inventory_as_dict(self, inventory_update, private_data_dir):
|
||||
env = super(terraform, self).get_plugin_env(inventory_update, private_data_dir, None)
|
||||
ret = super().inventory_as_dict(inventory_update, private_data_dir)
|
||||
ret['backend_config_files'] = env["TF_BACKEND_CONFIG_FILE"]
|
||||
return ret
|
||||
|
||||
|
||||
class controller(PluginFileInjector):
|
||||
plugin_name = 'tower' # TODO: relying on routing for now, update after EEs pick up revised collection
|
||||
base_injector = 'template'
|
||||
|
||||
@@ -498,7 +498,7 @@ class JobNotificationMixin(object):
|
||||
# Body should have at least 2 CRLF, some clients will interpret
|
||||
# the email incorrectly with blank body. So we will check that
|
||||
|
||||
if len(body.strip().splitlines()) <= 2:
|
||||
if len(body.strip().splitlines()) < 1:
|
||||
# blank body
|
||||
body = '\r\n'.join(
|
||||
[
|
||||
|
||||
@@ -10,6 +10,8 @@ from django.contrib.sessions.models import Session
|
||||
from django.utils.timezone import now as tz_now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# django-ansible-base
|
||||
from ansible_base.resource_registry.fields import AnsibleResourceField
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
@@ -103,6 +105,7 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
|
||||
approval_role = ImplicitRoleField(
|
||||
parent_role='admin_role',
|
||||
)
|
||||
resource = AnsibleResourceField(primary_key_field="id")
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:organization_detail', kwargs={'pk': self.pk}, request=request)
|
||||
@@ -151,6 +154,7 @@ class Team(CommonModelNameNotUnique, ResourceMixin):
|
||||
read_role = ImplicitRoleField(
|
||||
parent_role=['organization.auditor_role', 'member_role'],
|
||||
)
|
||||
resource = AnsibleResourceField(primary_key_field="id")
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:team_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
@@ -49,6 +49,70 @@ class ReceptorConnectionType(Enum):
|
||||
STREAMTLS = 2
|
||||
|
||||
|
||||
"""
|
||||
Translate receptorctl messages that come in over stdout into
|
||||
structured messages. Currently, these are error messages.
|
||||
"""
|
||||
|
||||
|
||||
class ReceptorErrorBase:
|
||||
_MESSAGE = 'Receptor Error'
|
||||
|
||||
def __init__(self, node: str = 'N/A', state_name: str = 'N/A'):
|
||||
self.node = node
|
||||
self.state_name = state_name
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.__class__.__name__} '{self._MESSAGE}' on node '{self.node}' with state '{self.state_name}'"
|
||||
|
||||
|
||||
class WorkUnitError(ReceptorErrorBase):
|
||||
_MESSAGE = 'unknown work unit '
|
||||
|
||||
def __init__(self, work_unit_id: str, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.work_unit_id = work_unit_id
|
||||
|
||||
def __str__(self):
|
||||
return f"{super().__str__()} work unit id '{self.work_unit_id}'"
|
||||
|
||||
|
||||
class WorkUnitCancelError(WorkUnitError):
|
||||
_MESSAGE = 'error cancelling remote unit: unknown work unit '
|
||||
|
||||
|
||||
class WorkUnitResultsError(WorkUnitError):
|
||||
_MESSAGE = 'Failed to get results: unknown work unit '
|
||||
|
||||
|
||||
class UnknownError(ReceptorErrorBase):
|
||||
_MESSAGE = 'Unknown receptor ctl error'
|
||||
|
||||
def __init__(self, msg, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._MESSAGE = msg
|
||||
|
||||
|
||||
class FuzzyError:
|
||||
def __new__(self, e: RuntimeError, node: str, state_name: str):
|
||||
"""
|
||||
At the time of writing this comment all of the sub-classes detection
|
||||
is centralized in this parent class. It's like a Router().
|
||||
Someone may find it better to push down the error detection logic into
|
||||
each sub-class.
|
||||
"""
|
||||
msg = e.args[0]
|
||||
|
||||
common_startswith = (WorkUnitCancelError, WorkUnitResultsError, WorkUnitError)
|
||||
|
||||
for klass in common_startswith:
|
||||
if msg.startswith(klass._MESSAGE):
|
||||
work_unit_id = msg[len(klass._MESSAGE) :]
|
||||
return klass(work_unit_id, node=node, state_name=state_name)
|
||||
|
||||
return UnknownError(msg, node=node, state_name=state_name)
|
||||
|
||||
|
||||
def read_receptor_config():
|
||||
# for K8S deployments, getting a lock is necessary as another process
|
||||
# may be re-writing the config at this time
|
||||
@@ -185,6 +249,7 @@ def run_until_complete(node, timing_data=None, **kwargs):
|
||||
timing_data['transmit_timing'] = run_start - transmit_start
|
||||
run_timing = 0.0
|
||||
stdout = ''
|
||||
state_name = 'local var never set'
|
||||
|
||||
try:
|
||||
resultfile = receptor_ctl.get_work_results(unit_id)
|
||||
@@ -205,13 +270,33 @@ def run_until_complete(node, timing_data=None, **kwargs):
|
||||
stdout = resultfile.read()
|
||||
stdout = str(stdout, encoding='utf-8')
|
||||
|
||||
except RuntimeError as e:
|
||||
receptor_e = FuzzyError(e, node, state_name)
|
||||
if type(receptor_e) in (
|
||||
WorkUnitError,
|
||||
WorkUnitResultsError,
|
||||
):
|
||||
logger.warning(f'While consuming job results: {receptor_e}')
|
||||
else:
|
||||
raise
|
||||
finally:
|
||||
if settings.RECEPTOR_RELEASE_WORK:
|
||||
res = receptor_ctl.simple_command(f"work release {unit_id}")
|
||||
if res != {'released': unit_id}:
|
||||
logger.warning(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}')
|
||||
try:
|
||||
res = receptor_ctl.simple_command(f"work release {unit_id}")
|
||||
|
||||
receptor_ctl.close()
|
||||
if res != {'released': unit_id}:
|
||||
logger.warning(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}')
|
||||
|
||||
receptor_ctl.close()
|
||||
except RuntimeError as e:
|
||||
receptor_e = FuzzyError(e, node, state_name)
|
||||
if type(receptor_e) in (
|
||||
WorkUnitError,
|
||||
WorkUnitCancelError,
|
||||
):
|
||||
logger.warning(f"While releasing work: {receptor_e}")
|
||||
else:
|
||||
logger.error(f"While releasing work: {receptor_e}")
|
||||
|
||||
if state_name.lower() == 'failed':
|
||||
work_detail = status.get('Detail', '')
|
||||
@@ -275,7 +360,7 @@ def _convert_args_to_cli(vargs):
|
||||
args = ['cleanup']
|
||||
for option in ('exclude_strings', 'remove_images'):
|
||||
if vargs.get(option):
|
||||
args.append('--{}={}'.format(option.replace('_', '-'), ' '.join(vargs.get(option))))
|
||||
args.append('--{}="{}"'.format(option.replace('_', '-'), ' '.join(vargs.get(option))))
|
||||
for option in ('file_pattern', 'image_prune', 'process_isolation_executable', 'grace_period'):
|
||||
if vargs.get(option) is True:
|
||||
args.append('--{}'.format(option.replace('_', '-')))
|
||||
|
||||
@@ -6,6 +6,7 @@ import itertools
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import psycopg
|
||||
from io import StringIO
|
||||
from contextlib import redirect_stdout
|
||||
import shutil
|
||||
@@ -416,7 +417,7 @@ def handle_removed_image(remove_images=None):
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def cleanup_images_and_files():
|
||||
_cleanup_images_and_files()
|
||||
_cleanup_images_and_files(image_prune=True)
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
@@ -630,10 +631,18 @@ def cluster_node_heartbeat(dispatch_time=None, worker_tasks=None):
|
||||
logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.last_seen))
|
||||
|
||||
except DatabaseError as e:
|
||||
if 'did not affect any rows' in str(e):
|
||||
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.debug('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
|
||||
if sqlstate == psycopg.errors.NoData:
|
||||
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
|
||||
else:
|
||||
logger.exception("Error marking {} as lost.".format(other_inst.hostname))
|
||||
else:
|
||||
logger.exception('Error marking {} as lost'.format(other_inst.hostname))
|
||||
logger.exception('No SQL state available. Error marking {} as lost'.format(other_inst.hostname))
|
||||
|
||||
# Run local reaper
|
||||
if worker_tasks is not None:
|
||||
@@ -788,10 +797,19 @@ def update_inventory_computed_fields(inventory_id):
|
||||
try:
|
||||
i.update_computed_fields()
|
||||
except DatabaseError as e:
|
||||
if 'did not affect any rows' in str(e):
|
||||
logger.debug('Exiting duplicate update_inventory_computed_fields task.')
|
||||
return
|
||||
raise
|
||||
# https://github.com/django/django/blob/eff21d8e7a1cb297aedf1c702668b590a1b618f3/django/db/models/base.py#L1105
|
||||
# django raises DatabaseError("Forced update did not affect any rows.")
|
||||
|
||||
# if sqlstate is set then there was a database error and otherwise will re-raise that error
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
raise
|
||||
|
||||
# otherwise
|
||||
logger.debug('Exiting duplicate update_inventory_computed_fields task.')
|
||||
|
||||
|
||||
def update_smart_memberships_for_inventory(smart_inventory):
|
||||
|
||||
@@ -3,5 +3,5 @@
|
||||
hosts: all
|
||||
tasks:
|
||||
- name: Hello Message
|
||||
debug:
|
||||
ansible.builtin.debug:
|
||||
msg: "Hello World!"
|
||||
|
||||
3
awx/main/tests/data/inventory/plugins/terraform/env.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"TF_BACKEND_CONFIG_FILE": "{{ file_reference }}"
|
||||
}
|
||||
@@ -1,13 +1,8 @@
|
||||
from awx.main.tests.functional.conftest import * # noqa
|
||||
import os
|
||||
import pytest
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--release", action="store", help="a release version number, e.g., 3.3.0")
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc):
|
||||
# This is called for every test. Only get/set command line arguments
|
||||
# if the argument is specified in the list of test "fixturenames".
|
||||
option_value = metafunc.config.option.release
|
||||
if 'release' in metafunc.fixturenames and option_value is not None:
|
||||
metafunc.parametrize("release", [option_value])
|
||||
@pytest.fixture()
|
||||
def release():
|
||||
return os.environ.get('VERSION_TARGET', '')
|
||||
|
||||
@@ -3,15 +3,19 @@ import pytest
|
||||
from unittest import mock
|
||||
import urllib.parse
|
||||
from unittest.mock import PropertyMock
|
||||
import importlib
|
||||
|
||||
# Django
|
||||
from django.urls import resolve
|
||||
from django.http import Http404
|
||||
from django.apps import apps
|
||||
from django.core.handlers.exception import response_for_exception
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.backends.sqlite3.base import SQLiteCursorWrapper
|
||||
|
||||
from django.db.models.signals import post_migrate
|
||||
|
||||
# AWX
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.ha import Instance
|
||||
@@ -41,10 +45,19 @@ from awx.main.models.workflow import WorkflowJobTemplate
|
||||
from awx.main.models.ad_hoc_commands import AdHocCommand
|
||||
from awx.main.models.oauth import OAuth2Application as Application
|
||||
from awx.main.models.execution_environments import ExecutionEnvironment
|
||||
from awx.main.utils import is_testing
|
||||
|
||||
__SWAGGER_REQUESTS__ = {}
|
||||
|
||||
|
||||
# HACK: the dab_resource_registry app required ServiceID in migrations which checks do not run
|
||||
dab_rr_initial = importlib.import_module('ansible_base.resource_registry.migrations.0001_initial')
|
||||
|
||||
|
||||
if is_testing():
|
||||
post_migrate.connect(lambda **kwargs: dab_rr_initial.create_service_id(apps, None))
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def swagger_autogen(requests=__SWAGGER_REQUESTS__):
|
||||
return requests
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
import pytest
|
||||
|
||||
from ansible_base.resource_registry.models import Resource
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
|
||||
def assert_has_resource(list_response, obj=None):
|
||||
data = list_response.data
|
||||
assert 'resource' in data['results'][0]['summary_fields']
|
||||
resource_data = data['results'][0]['summary_fields']['resource']
|
||||
assert resource_data['ansible_id']
|
||||
resource = Resource.objects.filter(ansible_id=resource_data['ansible_id']).first()
|
||||
assert resource
|
||||
assert resource.content_object
|
||||
if obj:
|
||||
objects = [Resource.objects.get(ansible_id=entry['summary_fields']['resource']['ansible_id']).content_object for entry in data['results']]
|
||||
assert obj in objects
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_organization_ansible_id(organization, admin_user, get):
|
||||
url = reverse('api:organization_list')
|
||||
response = get(url=url, user=admin_user, expect=200)
|
||||
assert_has_resource(response, obj=organization)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_team_ansible_id(team, admin_user, get):
|
||||
url = reverse('api:team_list')
|
||||
response = get(url=url, user=admin_user, expect=200)
|
||||
assert_has_resource(response, obj=team)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_ansible_id(rando, admin_user, get):
|
||||
url = reverse('api:user_list')
|
||||
response = get(url=url, user=admin_user, expect=200)
|
||||
assert_has_resource(response, obj=rando)
|
||||
@@ -193,6 +193,7 @@ class TestInventorySourceInjectors:
|
||||
('satellite6', 'theforeman.foreman.foreman'),
|
||||
('insights', 'redhatinsights.insights.insights'),
|
||||
('controller', 'awx.awx.tower'),
|
||||
('terraform', 'cloud.terraform.terraform_state'),
|
||||
],
|
||||
)
|
||||
def test_plugin_proper_names(self, source, proper_name):
|
||||
|
||||
@@ -107,6 +107,7 @@ def read_content(private_data_dir, raw_env, inventory_update):
|
||||
for filename in os.listdir(os.path.join(private_data_dir, subdir)):
|
||||
filename_list.append(os.path.join(subdir, filename))
|
||||
filename_list = sorted(filename_list, key=lambda fn: inverse_env.get(os.path.join(private_data_dir, fn), [fn])[0])
|
||||
inventory_content = ""
|
||||
for filename in filename_list:
|
||||
if filename in ('args', 'project'):
|
||||
continue # Ansible runner
|
||||
@@ -130,6 +131,7 @@ def read_content(private_data_dir, raw_env, inventory_update):
|
||||
dir_contents[abs_file_path] = f.read()
|
||||
# Declare a reference to inventory plugin file if it exists
|
||||
if abs_file_path.endswith('.yml') and 'plugin: ' in dir_contents[abs_file_path]:
|
||||
inventory_content = dir_contents[abs_file_path]
|
||||
referenced_paths.add(abs_file_path) # used as inventory file
|
||||
elif cache_file_regex.match(abs_file_path):
|
||||
file_aliases[abs_file_path] = 'cache_file'
|
||||
@@ -157,7 +159,11 @@ def read_content(private_data_dir, raw_env, inventory_update):
|
||||
content = {}
|
||||
for abs_file_path, file_content in dir_contents.items():
|
||||
# assert that all files laid down are used
|
||||
if abs_file_path not in referenced_paths and abs_file_path not in ignore_files:
|
||||
if (
|
||||
abs_file_path not in referenced_paths
|
||||
and to_container_path(abs_file_path, private_data_dir) not in inventory_content
|
||||
and abs_file_path not in ignore_files
|
||||
):
|
||||
raise AssertionError(
|
||||
"File {} is not referenced. References and files:\n{}\n{}".format(abs_file_path, json.dumps(env, indent=4), json.dumps(dir_contents, indent=4))
|
||||
)
|
||||
|
||||
@@ -411,14 +411,14 @@ def test_project_delete(delete, organization, admin_user):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'order_by, expected_names, expected_ids',
|
||||
'order_by, expected_names',
|
||||
[
|
||||
('name', ['alice project', 'bob project', 'shared project'], [1, 2, 3]),
|
||||
('-name', ['shared project', 'bob project', 'alice project'], [3, 2, 1]),
|
||||
('name', ['alice project', 'bob project', 'shared project']),
|
||||
('-name', ['shared project', 'bob project', 'alice project']),
|
||||
],
|
||||
)
|
||||
@pytest.mark.django_db
|
||||
def test_project_list_ordering_by_name(get, order_by, expected_names, expected_ids, organization_factory):
|
||||
def test_project_list_ordering_by_name(get, order_by, expected_names, organization_factory):
|
||||
'ensure sorted order of project list is maintained correctly when the requested order is invalid or not applicable'
|
||||
objects = organization_factory(
|
||||
'org1',
|
||||
@@ -426,13 +426,11 @@ def test_project_list_ordering_by_name(get, order_by, expected_names, expected_i
|
||||
superusers=['admin'],
|
||||
)
|
||||
project_names = []
|
||||
project_ids = []
|
||||
# TODO: ask for an order by here that doesn't apply
|
||||
results = get(reverse('api:project_list'), objects.superusers.admin, QUERY_STRING='order_by=%s' % order_by).data['results']
|
||||
for x in range(len(results)):
|
||||
project_names.append(results[x]['name'])
|
||||
project_ids.append(results[x]['id'])
|
||||
assert project_names == expected_names and project_ids == expected_ids
|
||||
assert project_names == expected_names
|
||||
|
||||
|
||||
@pytest.mark.parametrize('order_by', ('name', '-name'))
|
||||
@@ -450,7 +448,8 @@ def test_project_list_ordering_with_duplicate_names(get, order_by, organization_
|
||||
for x in range(3):
|
||||
results = get(reverse('api:project_list'), objects.superusers.admin, QUERY_STRING='order_by=%s' % order_by).data['results']
|
||||
project_ids[x] = [proj['id'] for proj in results]
|
||||
assert project_ids[0] == project_ids[1] == project_ids[2] == [1, 2, 3, 4, 5]
|
||||
assert project_ids[0] == project_ids[1] == project_ids[2]
|
||||
assert project_ids[0] == sorted(project_ids[0])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
# Python
|
||||
from unittest import mock
|
||||
import uuid
|
||||
|
||||
# patch python-ldap
|
||||
with mock.patch('__main__.__builtins__.dir', return_value=[]):
|
||||
import ldap # NOQA
|
||||
|
||||
# Load development settings for base variables.
|
||||
from awx.settings.development import * # NOQA
|
||||
|
||||
|
||||
122
awx/main/tests/unit/commands/test_dump_auth_config.py
Normal file
@@ -0,0 +1,122 @@
|
||||
from io import StringIO
|
||||
import json
|
||||
from django.core.management import call_command
|
||||
from django.test import TestCase, override_settings
|
||||
|
||||
|
||||
settings_dict = {
|
||||
"SOCIAL_AUTH_SAML_SP_ENTITY_ID": "SP_ENTITY_ID",
|
||||
"SOCIAL_AUTH_SAML_SP_PUBLIC_CERT": "SP_PUBLIC_CERT",
|
||||
"SOCIAL_AUTH_SAML_SP_PRIVATE_KEY": "SP_PRIVATE_KEY",
|
||||
"SOCIAL_AUTH_SAML_ORG_INFO": "ORG_INFO",
|
||||
"SOCIAL_AUTH_SAML_TECHNICAL_CONTACT": "TECHNICAL_CONTACT",
|
||||
"SOCIAL_AUTH_SAML_SUPPORT_CONTACT": "SUPPORT_CONTACT",
|
||||
"SOCIAL_AUTH_SAML_SP_EXTRA": "SP_EXTRA",
|
||||
"SOCIAL_AUTH_SAML_SECURITY_CONFIG": "SECURITY_CONFIG",
|
||||
"SOCIAL_AUTH_SAML_EXTRA_DATA": "EXTRA_DATA",
|
||||
"SOCIAL_AUTH_SAML_ENABLED_IDPS": {
|
||||
"Keycloak": {
|
||||
"attr_last_name": "last_name",
|
||||
"attr_groups": "groups",
|
||||
"attr_email": "email",
|
||||
"attr_user_permanent_id": "name_id",
|
||||
"attr_username": "username",
|
||||
"entity_id": "https://example.com/auth/realms/awx",
|
||||
"url": "https://example.com/auth/realms/awx/protocol/saml",
|
||||
"x509cert": "-----BEGIN CERTIFICATE-----\nMIIDDjCCAfYCCQCPBeVvpo8+VzANBgkqhkiG9w0BAQsFADBJMQswCQYDVQQGEwJV\nUzELMAkGA1UECAwCTkMxDzANBgNVBAcMBkR1cmhhbTEMMAoGA1UECgwDYXd4MQ4w\nDAYDVQQDDAVsb2NhbDAeFw0yNDAxMTgxNDA4MzFaFw0yNTAxMTcxNDA4MzFaMEkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJOQzEPMA0GA1UEBwwGRHVyaGFtMQwwCgYD\nVQQKDANhd3gxDjAMBgNVBAMMBWxvY2FsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\nMIIBCgKCAQEAzouj93oyFXsHEABdPESh3CYpp5QJJBM4TLYIIolk6PFOFIVwBuFY\nfExi5w7Hh4A42lPM6RkrT+u3h7LV39H9MRUfqygOSmaxICTOI0sU9ROHc44fWWzN\n756OP4B5zSiqG82q8X7nYVkcID+2F/3ekPLMOlWn53OrcdfKKDIcqavoTkQJefc2\nggXU3WgVCxGki/qCm+e5cZ1Cpl/ykSLOT8dWMEzDd12kin66zJ3KYz9F2Q5kQTh4\nKRAChnBBoEqzOfENHEAaHALiXOlVSy61VcLbtvskRMMwBtsydlnd9n/HGnktgrid\n3Ca0z5wBTHWjAOBvCKxKJuDa+jmyHEnpcQIDAQABMA0GCSqGSIb3DQEBCwUAA4IB\nAQBXvmyPWgXhC26cHYJBgQqj57dZ+n7p00kM1J+27oDMjGmbmX+XIKXLWazw/rG3\ngDjw9MXI2tVCrQMX0ohjphaULXhb/VBUPDOiW+k7C6AB3nZySFRflcR3cM4f83zF\nMoBd0549h5Red4p72FeOKNJRTN8YO4ooH9YNh5g0FQkgqn7fV9w2CNlomeKIW9zP\nm8tjFw0cJUk2wEYBVl8O7ko5rgNlzhkLoZkMvJhKa99AQJA6MAdyoLl1lv56Kq4X\njk+mMEiz9SaInp+ILQ1uQxZEwuC7DoGRW76rV4Fnie6+DLft4WKZfX1497mx8NV3\noR0abutJaKnCj07dwRu4/EsK\n-----END CERTIFICATE-----",
|
||||
"attr_first_name": "first_name",
|
||||
}
|
||||
},
|
||||
"SOCIAL_AUTH_SAML_CALLBACK_URL": "CALLBACK_URL",
|
||||
"AUTH_LDAP_1_SERVER_URI": "SERVER_URI",
|
||||
"AUTH_LDAP_1_BIND_DN": "BIND_DN",
|
||||
"AUTH_LDAP_1_BIND_PASSWORD": "BIND_PASSWORD",
|
||||
"AUTH_LDAP_1_GROUP_SEARCH": ["GROUP_SEARCH"],
|
||||
"AUTH_LDAP_1_GROUP_TYPE": "string object",
|
||||
"AUTH_LDAP_1_GROUP_TYPE_PARAMS": {"member_attr": "member", "name_attr": "cn"},
|
||||
"AUTH_LDAP_1_USER_DN_TEMPLATE": "USER_DN_TEMPLATE",
|
||||
"AUTH_LDAP_1_USER_SEARCH": ["USER_SEARCH"],
|
||||
"AUTH_LDAP_1_USER_ATTR_MAP": {
|
||||
"email": "email",
|
||||
"last_name": "last_name",
|
||||
"first_name": "first_name",
|
||||
},
|
||||
"AUTH_LDAP_1_CONNECTION_OPTIONS": {},
|
||||
"AUTH_LDAP_1_START_TLS": None,
|
||||
}
|
||||
|
||||
|
||||
@override_settings(**settings_dict)
|
||||
class TestDumpAuthConfigCommand(TestCase):
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.expected_config = [
|
||||
{
|
||||
"type": "awx.authentication.authenticator_plugins.saml",
|
||||
"name": "Keycloak",
|
||||
"enabled": True,
|
||||
"create_objects": True,
|
||||
"users_unique": False,
|
||||
"remove_users": True,
|
||||
"configuration": {
|
||||
"SP_ENTITY_ID": "SP_ENTITY_ID",
|
||||
"SP_PUBLIC_CERT": "SP_PUBLIC_CERT",
|
||||
"SP_PRIVATE_KEY": "SP_PRIVATE_KEY",
|
||||
"ORG_INFO": "ORG_INFO",
|
||||
"TECHNICAL_CONTACT": "TECHNICAL_CONTACT",
|
||||
"SUPPORT_CONTACT": "SUPPORT_CONTACT",
|
||||
"SP_EXTRA": "SP_EXTRA",
|
||||
"SECURITY_CONFIG": "SECURITY_CONFIG",
|
||||
"EXTRA_DATA": "EXTRA_DATA",
|
||||
"ENABLED_IDPS": {
|
||||
"Keycloak": {
|
||||
"attr_last_name": "last_name",
|
||||
"attr_groups": "groups",
|
||||
"attr_email": "email",
|
||||
"attr_user_permanent_id": "name_id",
|
||||
"attr_username": "username",
|
||||
"entity_id": "https://example.com/auth/realms/awx",
|
||||
"url": "https://example.com/auth/realms/awx/protocol/saml",
|
||||
"x509cert": "-----BEGIN CERTIFICATE-----\nMIIDDjCCAfYCCQCPBeVvpo8+VzANBgkqhkiG9w0BAQsFADBJMQswCQYDVQQGEwJV\nUzELMAkGA1UECAwCTkMxDzANBgNVBAcMBkR1cmhhbTEMMAoGA1UECgwDYXd4MQ4w\nDAYDVQQDDAVsb2NhbDAeFw0yNDAxMTgxNDA4MzFaFw0yNTAxMTcxNDA4MzFaMEkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJOQzEPMA0GA1UEBwwGRHVyaGFtMQwwCgYD\nVQQKDANhd3gxDjAMBgNVBAMMBWxvY2FsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\nMIIBCgKCAQEAzouj93oyFXsHEABdPESh3CYpp5QJJBM4TLYIIolk6PFOFIVwBuFY\nfExi5w7Hh4A42lPM6RkrT+u3h7LV39H9MRUfqygOSmaxICTOI0sU9ROHc44fWWzN\n756OP4B5zSiqG82q8X7nYVkcID+2F/3ekPLMOlWn53OrcdfKKDIcqavoTkQJefc2\nggXU3WgVCxGki/qCm+e5cZ1Cpl/ykSLOT8dWMEzDd12kin66zJ3KYz9F2Q5kQTh4\nKRAChnBBoEqzOfENHEAaHALiXOlVSy61VcLbtvskRMMwBtsydlnd9n/HGnktgrid\n3Ca0z5wBTHWjAOBvCKxKJuDa+jmyHEnpcQIDAQABMA0GCSqGSIb3DQEBCwUAA4IB\nAQBXvmyPWgXhC26cHYJBgQqj57dZ+n7p00kM1J+27oDMjGmbmX+XIKXLWazw/rG3\ngDjw9MXI2tVCrQMX0ohjphaULXhb/VBUPDOiW+k7C6AB3nZySFRflcR3cM4f83zF\nMoBd0549h5Red4p72FeOKNJRTN8YO4ooH9YNh5g0FQkgqn7fV9w2CNlomeKIW9zP\nm8tjFw0cJUk2wEYBVl8O7ko5rgNlzhkLoZkMvJhKa99AQJA6MAdyoLl1lv56Kq4X\njk+mMEiz9SaInp+ILQ1uQxZEwuC7DoGRW76rV4Fnie6+DLft4WKZfX1497mx8NV3\noR0abutJaKnCj07dwRu4/EsK\n-----END CERTIFICATE-----",
|
||||
"attr_first_name": "first_name",
|
||||
}
|
||||
},
|
||||
"CALLBACK_URL": "CALLBACK_URL",
|
||||
"IDP_URL": "https://example.com/auth/realms/awx/protocol/saml",
|
||||
"IDP_X509_CERT": "-----BEGIN CERTIFICATE-----\nMIIDDjCCAfYCCQCPBeVvpo8+VzANBgkqhkiG9w0BAQsFADBJMQswCQYDVQQGEwJV\nUzELMAkGA1UECAwCTkMxDzANBgNVBAcMBkR1cmhhbTEMMAoGA1UECgwDYXd4MQ4w\nDAYDVQQDDAVsb2NhbDAeFw0yNDAxMTgxNDA4MzFaFw0yNTAxMTcxNDA4MzFaMEkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJOQzEPMA0GA1UEBwwGRHVyaGFtMQwwCgYD\nVQQKDANhd3gxDjAMBgNVBAMMBWxvY2FsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\nMIIBCgKCAQEAzouj93oyFXsHEABdPESh3CYpp5QJJBM4TLYIIolk6PFOFIVwBuFY\nfExi5w7Hh4A42lPM6RkrT+u3h7LV39H9MRUfqygOSmaxICTOI0sU9ROHc44fWWzN\n756OP4B5zSiqG82q8X7nYVkcID+2F/3ekPLMOlWn53OrcdfKKDIcqavoTkQJefc2\nggXU3WgVCxGki/qCm+e5cZ1Cpl/ykSLOT8dWMEzDd12kin66zJ3KYz9F2Q5kQTh4\nKRAChnBBoEqzOfENHEAaHALiXOlVSy61VcLbtvskRMMwBtsydlnd9n/HGnktgrid\n3Ca0z5wBTHWjAOBvCKxKJuDa+jmyHEnpcQIDAQABMA0GCSqGSIb3DQEBCwUAA4IB\nAQBXvmyPWgXhC26cHYJBgQqj57dZ+n7p00kM1J+27oDMjGmbmX+XIKXLWazw/rG3\ngDjw9MXI2tVCrQMX0ohjphaULXhb/VBUPDOiW+k7C6AB3nZySFRflcR3cM4f83zF\nMoBd0549h5Red4p72FeOKNJRTN8YO4ooH9YNh5g0FQkgqn7fV9w2CNlomeKIW9zP\nm8tjFw0cJUk2wEYBVl8O7ko5rgNlzhkLoZkMvJhKa99AQJA6MAdyoLl1lv56Kq4X\njk+mMEiz9SaInp+ILQ1uQxZEwuC7DoGRW76rV4Fnie6+DLft4WKZfX1497mx8NV3\noR0abutJaKnCj07dwRu4/EsK\n-----END CERTIFICATE-----",
|
||||
"IDP_ENTITY_ID": "https://example.com/auth/realms/awx",
|
||||
"IDP_ATTR_EMAIL": "email",
|
||||
"IDP_GROUPS": "groups",
|
||||
"IDP_ATTR_USERNAME": "username",
|
||||
"IDP_ATTR_LAST_NAME": "last_name",
|
||||
"IDP_ATTR_FIRST_NAME": "first_name",
|
||||
"IDP_ATTR_USER_PERMANENT_ID": "name_id",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "awx.authentication.authenticator_plugins.ldap",
|
||||
"name": "1",
|
||||
"enabled": True,
|
||||
"create_objects": True,
|
||||
"users_unique": False,
|
||||
"remove_users": True,
|
||||
"configuration": {
|
||||
"SERVER_URI": "SERVER_URI",
|
||||
"BIND_DN": "BIND_DN",
|
||||
"BIND_PASSWORD": "BIND_PASSWORD",
|
||||
"CONNECTION_OPTIONS": {},
|
||||
"GROUP_TYPE": "str",
|
||||
"GROUP_TYPE_PARAMS": {"member_attr": "member", "name_attr": "cn"},
|
||||
"GROUP_SEARCH": ["GROUP_SEARCH"],
|
||||
"START_TLS": None,
|
||||
"USER_DN_TEMPLATE": "USER_DN_TEMPLATE",
|
||||
"USER_ATTR_MAP": {"email": "email", "last_name": "last_name", "first_name": "first_name"},
|
||||
"USER_SEARCH": ["USER_SEARCH"],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
def test_json_returned_from_cmd(self):
|
||||
output = StringIO()
|
||||
call_command("dump_auth_config", stdout=output)
|
||||
assert json.loads(output.getvalue()) == self.expected_config
|
||||
64
awx/main/tests/unit/tasks/test_system.py
Normal file
@@ -0,0 +1,64 @@
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch
|
||||
from awx.main.tasks.system import update_inventory_computed_fields
|
||||
from awx.main.models import Inventory
|
||||
from django.db import DatabaseError
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_logger():
|
||||
with patch("awx.main.tasks.system.logger") as logger:
|
||||
yield logger
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_inventory():
|
||||
return MagicMock(spec=Inventory)
|
||||
|
||||
|
||||
def test_update_inventory_computed_fields_existing_inventory(mock_logger, mock_inventory):
|
||||
# Mocking the Inventory.objects.filter method to return a non-empty queryset
|
||||
with patch("awx.main.tasks.system.Inventory.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
mock_filter.return_value.__getitem__.return_value = mock_inventory
|
||||
|
||||
# Mocking the update_computed_fields method
|
||||
with patch.object(mock_inventory, "update_computed_fields") as mock_update_computed_fields:
|
||||
update_inventory_computed_fields(1)
|
||||
|
||||
# Assertions
|
||||
mock_filter.assert_called_once_with(id=1)
|
||||
mock_update_computed_fields.assert_called_once()
|
||||
|
||||
# You can add more assertions based on your specific requirements
|
||||
|
||||
|
||||
def test_update_inventory_computed_fields_missing_inventory(mock_logger):
|
||||
# Mocking the Inventory.objects.filter method to return an empty queryset
|
||||
with patch("awx.main.tasks.system.Inventory.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = False
|
||||
|
||||
update_inventory_computed_fields(1)
|
||||
|
||||
# Assertions
|
||||
mock_filter.assert_called_once_with(id=1)
|
||||
mock_logger.error.assert_called_once_with("Update Inventory Computed Fields failed due to missing inventory: 1")
|
||||
|
||||
|
||||
def test_update_inventory_computed_fields_database_error_nosqlstate(mock_logger, mock_inventory):
|
||||
# Mocking the Inventory.objects.filter method to return a non-empty queryset
|
||||
with patch("awx.main.tasks.system.Inventory.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
mock_filter.return_value.__getitem__.return_value = mock_inventory
|
||||
|
||||
# Mocking the update_computed_fields method
|
||||
with patch.object(mock_inventory, "update_computed_fields") as mock_update_computed_fields:
|
||||
# Simulating the update_computed_fields method to explicitly raise a DatabaseError
|
||||
mock_update_computed_fields.side_effect = DatabaseError("Some error")
|
||||
|
||||
update_inventory_computed_fields(1)
|
||||
|
||||
# Assertions
|
||||
mock_filter.assert_called_once_with(id=1)
|
||||
mock_update_computed_fields.assert_called_once()
|
||||
mock_inventory.update_computed_fields.assert_called_once()
|
||||
@@ -121,6 +121,10 @@ def test_get_model_for_valid_type(model_type, model_class):
|
||||
assert common.get_model_for_type(model_type) == model_class
|
||||
|
||||
|
||||
def test_is_testing():
|
||||
assert common.is_testing() is True
|
||||
|
||||
|
||||
@pytest.mark.parametrize("model_type,model_class", [(name, cls) for cls, name in TEST_MODELS])
|
||||
def test_get_capacity_type(model_type, model_class):
|
||||
if model_type in ('job', 'ad_hoc_command', 'inventory_update', 'job_template'):
|
||||
|
||||
@@ -3,7 +3,7 @@ from awx.main.tasks.receptor import _convert_args_to_cli
|
||||
|
||||
def test_file_cleanup_scenario():
|
||||
args = _convert_args_to_cli({'exclude_strings': ['awx_423_', 'awx_582_'], 'file_pattern': '/tmp/awx_*_*'})
|
||||
assert ' '.join(args) == 'cleanup --exclude-strings=awx_423_ awx_582_ --file-pattern=/tmp/awx_*_*'
|
||||
assert ' '.join(args) == 'cleanup --exclude-strings="awx_423_ awx_582_" --file-pattern=/tmp/awx_*_*'
|
||||
|
||||
|
||||
def test_image_cleanup_scenario():
|
||||
@@ -17,5 +17,5 @@ def test_image_cleanup_scenario():
|
||||
}
|
||||
)
|
||||
assert (
|
||||
' '.join(args) == 'cleanup --remove-images=quay.invalid/foo/bar:latest quay.invalid/foo/bar:devel --image-prune --process-isolation-executable=podman'
|
||||
' '.join(args) == 'cleanup --remove-images="quay.invalid/foo/bar:latest quay.invalid/foo/bar:devel" --image-prune --process-isolation-executable=podman'
|
||||
)
|
||||
|
||||
@@ -7,6 +7,7 @@ import json
|
||||
import yaml
|
||||
import logging
|
||||
import time
|
||||
import psycopg
|
||||
import os
|
||||
import subprocess
|
||||
import re
|
||||
@@ -23,7 +24,7 @@ from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.functional import cached_property
|
||||
from django.db import connection, transaction, ProgrammingError, IntegrityError
|
||||
from django.db import connection, DatabaseError, transaction, ProgrammingError, IntegrityError
|
||||
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField
|
||||
from django.db.models.fields.related_descriptors import ForwardManyToOneDescriptor, ManyToManyDescriptor
|
||||
from django.db.models.query import QuerySet
|
||||
@@ -136,7 +137,7 @@ def underscore_to_camelcase(s):
|
||||
@functools.cache
|
||||
def is_testing(argv=None):
|
||||
'''Return True if running django or py.test unit tests.'''
|
||||
if 'PYTEST_CURRENT_TEST' in os.environ.keys():
|
||||
if os.environ.get('DJANGO_SETTINGS_MODULE') == 'awx.main.tests.settings_for_test':
|
||||
return True
|
||||
argv = sys.argv if argv is None else argv
|
||||
if len(argv) >= 1 and ('py.test' in argv[0] or 'py/test.py' in argv[0]):
|
||||
@@ -1155,11 +1156,25 @@ def create_partition(tblname, start=None):
|
||||
f'ALTER TABLE {tblname} ATTACH PARTITION {tblname}_{partition_label} '
|
||||
f'FOR VALUES FROM (\'{start_timestamp}\') TO (\'{end_timestamp}\');'
|
||||
)
|
||||
|
||||
except (ProgrammingError, IntegrityError) as e:
|
||||
if 'already exists' in str(e):
|
||||
logger.info(f'Caught known error due to partition creation race: {e}')
|
||||
else:
|
||||
raise
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_cls = psycopg.errors.lookup(sqlstate)
|
||||
|
||||
if psycopg.errors.DuplicateTable == sqlstate_cls or psycopg.errors.UniqueViolation == sqlstate_cls:
|
||||
logger.info(f'Caught known error due to partition creation race: {e}')
|
||||
else:
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_cls))
|
||||
raise
|
||||
except DatabaseError as e:
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
raise
|
||||
|
||||
|
||||
def cleanup_new_process(func):
|
||||
|
||||
@@ -302,20 +302,36 @@ class WebSocketRelayManager(object):
|
||||
self.stats_mgr.start()
|
||||
|
||||
# Set up a pg_notify consumer for allowing web nodes to "provision" and "deprovision" themselves gracefully.
|
||||
database_conf = settings.DATABASES['default']
|
||||
async_conn = await psycopg.AsyncConnection.connect(
|
||||
dbname=database_conf['NAME'],
|
||||
host=database_conf['HOST'],
|
||||
user=database_conf['USER'],
|
||||
password=database_conf['PASSWORD'],
|
||||
port=database_conf['PORT'],
|
||||
**database_conf.get("OPTIONS", {}),
|
||||
)
|
||||
await async_conn.set_autocommit(True)
|
||||
event_loop.create_task(self.on_ws_heartbeat(async_conn))
|
||||
database_conf = settings.DATABASES['default'].copy()
|
||||
database_conf['OPTIONS'] = database_conf.get('OPTIONS', {}).copy()
|
||||
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).items():
|
||||
database_conf[k] = v
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items():
|
||||
database_conf['OPTIONS'][k] = v
|
||||
|
||||
task = None
|
||||
|
||||
# Establishes a websocket connection to /websocket/relay on all API servers
|
||||
while True:
|
||||
if not task or task.done():
|
||||
try:
|
||||
async_conn = await psycopg.AsyncConnection.connect(
|
||||
dbname=database_conf['NAME'],
|
||||
host=database_conf['HOST'],
|
||||
user=database_conf['USER'],
|
||||
password=database_conf['PASSWORD'],
|
||||
port=database_conf['PORT'],
|
||||
**database_conf.get("OPTIONS", {}),
|
||||
)
|
||||
await async_conn.set_autocommit(True)
|
||||
|
||||
task = event_loop.create_task(self.on_ws_heartbeat(async_conn), name="on_ws_heartbeat")
|
||||
logger.info("Creating `on_ws_heartbeat` task in event loop.")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to connect to database for pg_notify: {e}")
|
||||
|
||||
future_remote_hosts = self.known_hosts.keys()
|
||||
current_remote_hosts = self.relay_connections.keys()
|
||||
deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts)
|
||||
|
||||
@@ -221,8 +221,10 @@
|
||||
vars:
|
||||
req_file: "{{ lookup('ansible.builtin.first_found', req_candidates, skip=True) }}"
|
||||
req_candidates:
|
||||
- "{{ project_path | quote }}/roles/requirements.yml"
|
||||
- "{{ project_path | quote }}/roles/requirements.yaml"
|
||||
files:
|
||||
- "{{ project_path | quote }}/roles/requirements.yml"
|
||||
- "{{ project_path | quote }}/roles/requirements.yaml"
|
||||
skip: True
|
||||
changed_when: "'was installed successfully' in galaxy_result.stdout"
|
||||
when:
|
||||
- roles_enabled | bool
|
||||
@@ -237,10 +239,10 @@
|
||||
vars:
|
||||
req_file: "{{ lookup('ansible.builtin.first_found', req_candidates, skip=True) }}"
|
||||
req_candidates:
|
||||
- "{{ project_path | quote }}/collections/requirements.yml"
|
||||
- "{{ project_path | quote }}/collections/requirements.yaml"
|
||||
- "{{ project_path | quote }}/requirements.yml"
|
||||
- "{{ project_path | quote }}/requirements.yaml"
|
||||
files:
|
||||
- "{{ project_path | quote }}/collections/requirements.yml"
|
||||
- "{{ project_path | quote }}/collections/requirements.yaml"
|
||||
skip: True
|
||||
changed_when: "'Nothing to do.' not in galaxy_collection_result.stdout"
|
||||
when:
|
||||
- "ansible_version.full is version_compare('2.9', '>=')"
|
||||
@@ -249,6 +251,7 @@
|
||||
tags:
|
||||
- install_collections
|
||||
|
||||
# requirements.yml in project root can be either "old" (roles only) or "new" (collections+roles) format
|
||||
- name: Fetch galaxy roles and collections from requirements.(yml/yaml)
|
||||
ansible.builtin.command:
|
||||
cmd: "ansible-galaxy install -r {{ req_file }} {{ verbosity }}"
|
||||
@@ -256,8 +259,10 @@
|
||||
vars:
|
||||
req_file: "{{ lookup('ansible.builtin.first_found', req_candidates, skip=True) }}"
|
||||
req_candidates:
|
||||
- "{{ project_path | quote }}/requirements.yaml"
|
||||
- "{{ project_path | quote }}/requirements.yml"
|
||||
files:
|
||||
- "{{ project_path | quote }}/requirements.yaml"
|
||||
- "{{ project_path | quote }}/requirements.yml"
|
||||
skip: True
|
||||
changed_when: "'Nothing to do.' not in galaxy_combined_result.stdout"
|
||||
when:
|
||||
- "ansible_version.full is version_compare('2.10', '>=')"
|
||||
|
||||
22
awx/resource_api.py
Normal file
@@ -0,0 +1,22 @@
|
||||
from ansible_base.resource_registry.registry import ParentResource, ResourceConfig, ServiceAPIConfig, SharedResource
|
||||
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||
|
||||
from awx.main import models
|
||||
|
||||
|
||||
class APIConfig(ServiceAPIConfig):
|
||||
service_type = "awx"
|
||||
|
||||
|
||||
RESOURCE_LIST = (
|
||||
ResourceConfig(
|
||||
models.Organization,
|
||||
shared_resource=SharedResource(serializer=OrganizationType, is_provider=False),
|
||||
),
|
||||
ResourceConfig(models.User, shared_resource=SharedResource(serializer=UserType, is_provider=False), name_field="username"),
|
||||
ResourceConfig(
|
||||
models.Team,
|
||||
shared_resource=SharedResource(serializer=TeamType, is_provider=False),
|
||||
parent_resources=[ParentResource(model=models.Organization, field_name="organization")],
|
||||
),
|
||||
)
|
||||
@@ -353,8 +353,11 @@ INSTALLED_APPS = [
|
||||
'awx.sso',
|
||||
'solo',
|
||||
'ansible_base.rest_filters',
|
||||
'ansible_base.jwt_consumer',
|
||||
'ansible_base.resource_registry',
|
||||
]
|
||||
|
||||
|
||||
INTERNAL_IPS = ('127.0.0.1',)
|
||||
|
||||
MAX_PAGE_SIZE = 200
|
||||
@@ -362,6 +365,7 @@ REST_FRAMEWORK = {
|
||||
'DEFAULT_PAGINATION_CLASS': 'awx.api.pagination.Pagination',
|
||||
'PAGE_SIZE': 25,
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||
'ansible_base.jwt_consumer.awx.auth.AwxJWTAuthentication',
|
||||
'awx.api.authentication.LoggedOAuth2Authentication',
|
||||
'awx.api.authentication.SessionAuthentication',
|
||||
'awx.api.authentication.LoggedBasicAuthentication',
|
||||
@@ -755,6 +759,14 @@ SATELLITE6_INSTANCE_ID_VAR = 'foreman_id,foreman.id'
|
||||
INSIGHTS_INSTANCE_ID_VAR = 'insights_id'
|
||||
INSIGHTS_EXCLUDE_EMPTY_GROUPS = False
|
||||
|
||||
# ----------------
|
||||
# -- Terraform State --
|
||||
# ----------------
|
||||
# TERRAFORM_ENABLED_VAR =
|
||||
# TERRAFORM_ENABLED_VALUE =
|
||||
TERRAFORM_INSTANCE_ID_VAR = 'id'
|
||||
TERRAFORM_EXCLUDE_EMPTY_GROUPS = True
|
||||
|
||||
# ---------------------
|
||||
# ----- Custom -----
|
||||
# ---------------------
|
||||
@@ -1108,8 +1120,17 @@ METRICS_SUBSYSTEM_CONFIG = {
|
||||
# django-ansible-base
|
||||
ANSIBLE_BASE_TEAM_MODEL = 'main.Team'
|
||||
ANSIBLE_BASE_ORGANIZATION_MODEL = 'main.Organization'
|
||||
ANSIBLE_BASE_RESOURCE_CONFIG_MODULE = 'awx.resource_api'
|
||||
|
||||
from ansible_base.lib import dynamic_config # noqa: E402
|
||||
|
||||
settings_file = os.path.join(os.path.dirname(dynamic_config.__file__), 'dynamic_settings.py')
|
||||
include(settings_file)
|
||||
|
||||
# Add a postfix to the API URL patterns
|
||||
# example if set to '' API pattern will be /api
|
||||
# example if set to 'controller' API pattern will be /api AND /api/controller
|
||||
OPTIONAL_API_URLPATTERN_PREFIX = ''
|
||||
|
||||
# Use AWX base view, to give 401 on unauthenticated requests
|
||||
ANSIBLE_BASE_CUSTOM_VIEW_PARENT = 'awx.api.generics.APIView'
|
||||
|
||||
@@ -72,6 +72,8 @@ AWX_CALLBACK_PROFILE = True
|
||||
# Allows user to trigger task managers directly for debugging and profiling purposes.
|
||||
# Only works in combination with settings.SETTINGS_MODULE == 'awx.settings.development'
|
||||
AWX_DISABLE_TASK_MANAGERS = False
|
||||
|
||||
# Needed for launching runserver in debug mode
|
||||
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
||||
|
||||
# Store a snapshot of default settings at this point before loading any
|
||||
|
||||
@@ -39,7 +39,7 @@
|
||||
{% else %}
|
||||
<li><a href="{% url 'api:login' %}?next={{ request.get_full_path }}" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="Log in"><span class="glyphicon glyphicon-log-in"></span>Log in</a></li>
|
||||
{% endif %}
|
||||
<li><a href="//docs.ansible.com/ansible-tower/{{short_tower_version}}/html/towerapi/index.html" target="_blank" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="{% trans 'API Guide' %}"><span class="glyphicon glyphicon-question-sign"></span><span class="visible-xs-inline">{% trans 'API Guide' %}</span></a></li>
|
||||
<li><a href="//ansible.readthedocs.io/projects/awx/en/latest/rest_api/index.html" target="_blank" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="{% trans 'API Guide' %}"><span class="glyphicon glyphicon-question-sign"></span><span class="visible-xs-inline">{% trans 'API Guide' %}</span></a></li>
|
||||
<li><a href="/" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="{% trans 'Back to application' %}"><span class="glyphicon glyphicon-circle-arrow-left"></span><span class="visible-xs-inline">{% trans 'Back to application' %}</span></a></li>
|
||||
<li class="hidden-xs"><a href="#" class="resize" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="{% trans 'Resize' %}"><span class="glyphicon glyphicon-resize-full"></span></a></li>
|
||||
</ul>
|
||||
|
||||
@@ -59,6 +59,7 @@ register(
|
||||
help_text=_('Maximum number of job events for the UI to retrieve within a single request.'),
|
||||
category=_('UI'),
|
||||
category_slug='ui',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
register(
|
||||
@@ -68,4 +69,5 @@ register(
|
||||
help_text=_('If disabled, the page will not refresh when events are received. Reloading the page will be required to get the latest details.'),
|
||||
category=_('UI'),
|
||||
category_slug='ui',
|
||||
hidden=True,
|
||||
)
|
||||
|
||||
46
awx/ui/package-lock.json
generated
@@ -13,7 +13,7 @@
|
||||
"@patternfly/react-table": "4.113.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.27.2",
|
||||
"axios": "^1.6.7",
|
||||
"d3": "7.6.1",
|
||||
"dagre": "^0.8.4",
|
||||
"dompurify": "2.4.0",
|
||||
@@ -5940,12 +5940,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "0.27.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz",
|
||||
"integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==",
|
||||
"version": "1.6.7",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz",
|
||||
"integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.14.9",
|
||||
"form-data": "^4.0.0"
|
||||
"follow-redirects": "^1.15.4",
|
||||
"form-data": "^4.0.0",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/axios/node_modules/form-data": {
|
||||
@@ -10387,9 +10388,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/follow-redirects": {
|
||||
"version": "1.15.1",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz",
|
||||
"integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA==",
|
||||
"version": "1.15.5",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz",
|
||||
"integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "individual",
|
||||
@@ -18349,6 +18350,11 @@
|
||||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
||||
},
|
||||
"node_modules/pseudolocale": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pseudolocale/-/pseudolocale-1.2.0.tgz",
|
||||
@@ -26915,12 +26921,13 @@
|
||||
"dev": true
|
||||
},
|
||||
"axios": {
|
||||
"version": "0.27.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz",
|
||||
"integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==",
|
||||
"version": "1.6.7",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz",
|
||||
"integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==",
|
||||
"requires": {
|
||||
"follow-redirects": "^1.14.9",
|
||||
"form-data": "^4.0.0"
|
||||
"follow-redirects": "^1.15.4",
|
||||
"form-data": "^4.0.0",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"form-data": {
|
||||
@@ -30371,9 +30378,9 @@
|
||||
}
|
||||
},
|
||||
"follow-redirects": {
|
||||
"version": "1.15.1",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz",
|
||||
"integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA=="
|
||||
"version": "1.15.5",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz",
|
||||
"integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw=="
|
||||
},
|
||||
"fork-ts-checker-webpack-plugin": {
|
||||
"version": "6.5.2",
|
||||
@@ -36325,6 +36332,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
||||
},
|
||||
"pseudolocale": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pseudolocale/-/pseudolocale-1.2.0.tgz",
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"@patternfly/react-table": "4.113.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "0.27.2",
|
||||
"axios": "^1.6.7",
|
||||
"d3": "7.6.1",
|
||||
"dagre": "^0.8.4",
|
||||
"dompurify": "2.4.0",
|
||||
|
||||
@@ -67,27 +67,18 @@ function getInitialValues(launchConfig, surveyConfig, resource) {
|
||||
const values = {};
|
||||
if (surveyConfig?.spec) {
|
||||
surveyConfig.spec.forEach((question) => {
|
||||
if (question.type === 'multiselect') {
|
||||
if (resource?.extra_data && resource?.extra_data[question.variable]) {
|
||||
values[`survey_${question.variable}`] =
|
||||
resource.extra_data[question.variable];
|
||||
} else if (question.type === 'multiselect') {
|
||||
values[`survey_${question.variable}`] = question.default
|
||||
? question.default.split('\n')
|
||||
: [];
|
||||
} else {
|
||||
values[`survey_${question.variable}`] = question.default ?? '';
|
||||
}
|
||||
if (resource?.extra_data) {
|
||||
Object.entries(resource.extra_data).forEach(([key, value]) => {
|
||||
if (key === question.variable) {
|
||||
if (question.type === 'multiselect') {
|
||||
values[`survey_${question.variable}`] = value;
|
||||
} else {
|
||||
values[`survey_${question.variable}`] = value;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
|
||||
@@ -13,6 +13,18 @@ import ScheduleForm from '../shared/ScheduleForm';
|
||||
import buildRuleSet from '../shared/buildRuleSet';
|
||||
import { CardBody } from '../../Card';
|
||||
|
||||
function generateExtraData(extra_vars, surveyValues, surveyConfiguration) {
|
||||
const extraVars = parseVariableField(
|
||||
yaml.dump(mergeExtraVars(extra_vars, surveyValues))
|
||||
);
|
||||
surveyConfiguration.spec.forEach((q) => {
|
||||
if (!surveyValues[q.variable]) {
|
||||
delete extraVars[q.variable];
|
||||
}
|
||||
});
|
||||
return extraVars;
|
||||
}
|
||||
|
||||
function ScheduleEdit({
|
||||
hasDaysToKeepField,
|
||||
schedule,
|
||||
@@ -33,10 +45,12 @@ function ScheduleEdit({
|
||||
surveyConfiguration,
|
||||
originalInstanceGroups,
|
||||
originalLabels,
|
||||
scheduleCredentials = []
|
||||
scheduleCredentials = [],
|
||||
isPromptTouched = false
|
||||
) => {
|
||||
const {
|
||||
execution_environment,
|
||||
extra_vars = null,
|
||||
instance_groups,
|
||||
inventory,
|
||||
credentials = [],
|
||||
@@ -48,45 +62,54 @@ function ScheduleEdit({
|
||||
labels,
|
||||
...submitValues
|
||||
} = values;
|
||||
let extraVars;
|
||||
|
||||
const surveyValues = getSurveyValues(values);
|
||||
|
||||
if (
|
||||
!Object.values(surveyValues).length &&
|
||||
surveyConfiguration?.spec?.length
|
||||
isPromptTouched &&
|
||||
surveyConfiguration?.spec &&
|
||||
launchConfiguration?.ask_variables_on_launch
|
||||
) {
|
||||
surveyConfiguration.spec.forEach((q) => {
|
||||
surveyValues[q.variable] = q.default;
|
||||
});
|
||||
submitValues.extra_data = generateExtraData(
|
||||
extra_vars,
|
||||
surveyValues,
|
||||
surveyConfiguration
|
||||
);
|
||||
} else if (
|
||||
isPromptTouched &&
|
||||
surveyConfiguration?.spec &&
|
||||
!launchConfiguration?.ask_variables_on_launch
|
||||
) {
|
||||
submitValues.extra_data = generateExtraData(
|
||||
schedule.extra_data,
|
||||
surveyValues,
|
||||
surveyConfiguration
|
||||
);
|
||||
} else if (
|
||||
isPromptTouched &&
|
||||
launchConfiguration?.ask_variables_on_launch
|
||||
) {
|
||||
submitValues.extra_data = parseVariableField(extra_vars);
|
||||
}
|
||||
|
||||
const initialExtraVars =
|
||||
launchConfiguration?.ask_variables_on_launch &&
|
||||
(values.extra_vars || '---');
|
||||
if (surveyConfiguration?.spec) {
|
||||
extraVars = yaml.dump(mergeExtraVars(initialExtraVars, surveyValues));
|
||||
} else {
|
||||
extraVars = yaml.dump(mergeExtraVars(initialExtraVars, {}));
|
||||
}
|
||||
submitValues.extra_data = extraVars && parseVariableField(extraVars);
|
||||
|
||||
if (
|
||||
Object.keys(submitValues.extra_data).length === 0 &&
|
||||
Object.keys(schedule.extra_data).length > 0
|
||||
isPromptTouched &&
|
||||
launchConfiguration?.ask_inventory_on_launch &&
|
||||
inventory
|
||||
) {
|
||||
submitValues.extra_data = schedule.extra_data;
|
||||
}
|
||||
delete values.extra_vars;
|
||||
if (inventory) {
|
||||
submitValues.inventory = inventory.id;
|
||||
}
|
||||
|
||||
if (execution_environment) {
|
||||
if (
|
||||
isPromptTouched &&
|
||||
launchConfiguration?.ask_execution_environment_on_launch &&
|
||||
execution_environment
|
||||
) {
|
||||
submitValues.execution_environment = execution_environment.id;
|
||||
}
|
||||
|
||||
try {
|
||||
if (launchConfiguration?.ask_labels_on_launch) {
|
||||
if (isPromptTouched && launchConfiguration?.ask_labels_on_launch) {
|
||||
const { labelIds, error } = createNewLabels(
|
||||
values.labels,
|
||||
resource.organization
|
||||
@@ -120,9 +143,16 @@ function ScheduleEdit({
|
||||
}
|
||||
}
|
||||
|
||||
const cleanedRequestData = Object.keys(requestData)
|
||||
.filter((key) => !key.startsWith('survey_'))
|
||||
.reduce((acc, key) => {
|
||||
acc[key] = requestData[key];
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
const {
|
||||
data: { id: scheduleId },
|
||||
} = await SchedulesAPI.update(schedule.id, requestData);
|
||||
} = await SchedulesAPI.update(schedule.id, cleanedRequestData);
|
||||
|
||||
const { added: addedCredentials, removed: removedCredentials } =
|
||||
getAddedAndRemoved(
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
InventoriesAPI,
|
||||
CredentialsAPI,
|
||||
CredentialTypesAPI,
|
||||
JobTemplatesAPI,
|
||||
} from 'api';
|
||||
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
|
||||
import ScheduleEdit from './ScheduleEdit';
|
||||
@@ -125,6 +126,7 @@ describe('<ScheduleEdit />', () => {
|
||||
id: 27,
|
||||
},
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ScheduleEdit
|
||||
@@ -206,7 +208,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run once schedule',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200325T100000 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
|
||||
});
|
||||
@@ -233,7 +234,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run every 10 minutes 10 times',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200325T103000 RRULE:INTERVAL=10;FREQ=MINUTELY;COUNT=10',
|
||||
});
|
||||
@@ -262,7 +262,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run every hour until date',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200325T104500 RRULE:INTERVAL=1;FREQ=HOURLY;UNTIL=20200326T144500Z',
|
||||
});
|
||||
@@ -288,7 +287,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run daily',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200325T104500 RRULE:INTERVAL=1;FREQ=DAILY',
|
||||
});
|
||||
@@ -316,7 +314,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run weekly on mon/wed/fri',
|
||||
extra_data: {},
|
||||
rrule: `DTSTART;TZID=America/New_York:20200325T104500 RRULE:INTERVAL=1;FREQ=WEEKLY;BYDAY=${RRule.MO},${RRule.WE},${RRule.FR}`,
|
||||
});
|
||||
});
|
||||
@@ -344,7 +341,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run on the first day of the month',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200401T104500 RRULE:INTERVAL=1;FREQ=MONTHLY;BYMONTHDAY=1',
|
||||
});
|
||||
@@ -376,7 +372,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run monthly on the last Tuesday',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200331T110000 RRULE:INTERVAL=1;FREQ=MONTHLY;BYSETPOS=-1;BYDAY=TU',
|
||||
});
|
||||
@@ -406,7 +401,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Yearly on the first day of March',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200301T000000 RRULE:INTERVAL=1;FREQ=YEARLY;BYMONTH=3;BYMONTHDAY=1',
|
||||
});
|
||||
@@ -437,7 +431,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Yearly on the second Friday in April',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200410T111500 RRULE:INTERVAL=1;FREQ=YEARLY;BYSETPOS=2;BYDAY=FR;BYMONTH=4',
|
||||
});
|
||||
@@ -468,7 +461,6 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Yearly on the first weekday in October',
|
||||
extra_data: {},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200410T111500 RRULE:INTERVAL=1;FREQ=YEARLY;BYSETPOS=1;BYDAY=MO,TU,WE,TH,FR;BYMONTH=10',
|
||||
});
|
||||
@@ -562,7 +554,6 @@ describe('<ScheduleEdit />', () => {
|
||||
wrapper.update();
|
||||
|
||||
expect(SchedulesAPI.update).toBeCalledWith(27, {
|
||||
extra_data: {},
|
||||
name: 'mock schedule',
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20210128T141500 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
|
||||
@@ -633,15 +624,13 @@ describe('<ScheduleEdit />', () => {
|
||||
endDateTime: undefined,
|
||||
startDateTime: undefined,
|
||||
description: '',
|
||||
extra_data: {},
|
||||
name: 'foo',
|
||||
inventory: 702,
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200402T144500 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
|
||||
});
|
||||
});
|
||||
|
||||
test('should submit survey with default values properly, without opening prompt wizard', async () => {
|
||||
test('should submit update values properly when prompt is not opened', async () => {
|
||||
let scheduleSurveyWrapper;
|
||||
await act(async () => {
|
||||
scheduleSurveyWrapper = mountWithContexts(
|
||||
@@ -746,9 +735,195 @@ describe('<ScheduleEdit />', () => {
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: 'test description',
|
||||
name: 'Run once schedule',
|
||||
extra_data: { mc: 'first', text: 'text variable' },
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200325T100000 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
|
||||
});
|
||||
});
|
||||
test('should submit update values properly when survey values change', async () => {
|
||||
JobTemplatesAPI.readSurvey.mockResolvedValue({
|
||||
data: {
|
||||
spec: [
|
||||
{
|
||||
question_name: 'text',
|
||||
question_description: '',
|
||||
required: true,
|
||||
type: 'text',
|
||||
variable: 'text',
|
||||
min: 0,
|
||||
max: 1024,
|
||||
default: 'text variable',
|
||||
choices: '',
|
||||
new_question: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
|
||||
JobTemplatesAPI.readLaunch.mockResolvedValue({
|
||||
data: {
|
||||
can_start_without_user_input: false,
|
||||
passwords_needed_to_start: [],
|
||||
ask_scm_branch_on_launch: false,
|
||||
ask_variables_on_launch: false,
|
||||
ask_tags_on_launch: false,
|
||||
ask_diff_mode_on_launch: false,
|
||||
ask_skip_tags_on_launch: false,
|
||||
ask_job_type_on_launch: false,
|
||||
ask_limit_on_launch: false,
|
||||
ask_verbosity_on_launch: false,
|
||||
ask_inventory_on_launch: true,
|
||||
ask_credential_on_launch: true,
|
||||
survey_enabled: true,
|
||||
variables_needed_to_start: [],
|
||||
credential_needed_to_start: true,
|
||||
inventory_needed_to_start: true,
|
||||
job_template_data: {
|
||||
name: 'Demo Job Template',
|
||||
id: 7,
|
||||
description: '',
|
||||
},
|
||||
defaults: {
|
||||
extra_vars: '---',
|
||||
diff_mode: false,
|
||||
limit: '',
|
||||
job_tags: '',
|
||||
skip_tags: '',
|
||||
job_type: 'run',
|
||||
verbosity: 0,
|
||||
inventory: {
|
||||
name: null,
|
||||
id: null,
|
||||
},
|
||||
scm_branch: '',
|
||||
credentials: [],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let scheduleSurveyWrapper;
|
||||
await act(async () => {
|
||||
scheduleSurveyWrapper = mountWithContexts(
|
||||
<ScheduleEdit
|
||||
schedule={mockSchedule}
|
||||
resource={{
|
||||
id: 700,
|
||||
type: 'job_template',
|
||||
iventory: 1,
|
||||
summary_fields: {
|
||||
credentials: [
|
||||
{ name: 'job template credential', id: 75, kind: 'ssh' },
|
||||
],
|
||||
},
|
||||
name: 'Foo Job Template',
|
||||
description: '',
|
||||
}}
|
||||
resourceDefaultCredentials={[]}
|
||||
launchConfig={{
|
||||
can_start_without_user_input: false,
|
||||
passwords_needed_to_start: [],
|
||||
ask_scm_branch_on_launch: false,
|
||||
ask_variables_on_launch: false,
|
||||
ask_tags_on_launch: false,
|
||||
ask_diff_mode_on_launch: false,
|
||||
ask_skip_tags_on_launch: false,
|
||||
ask_job_type_on_launch: false,
|
||||
ask_limit_on_launch: false,
|
||||
ask_verbosity_on_launch: false,
|
||||
ask_inventory_on_launch: true,
|
||||
ask_credential_on_launch: true,
|
||||
survey_enabled: true,
|
||||
variables_needed_to_start: [],
|
||||
credential_needed_to_start: true,
|
||||
inventory_needed_to_start: true,
|
||||
job_template_data: {
|
||||
name: 'Demo Job Template',
|
||||
id: 7,
|
||||
description: '',
|
||||
},
|
||||
defaults: {
|
||||
extra_vars: '---',
|
||||
diff_mode: false,
|
||||
limit: '',
|
||||
job_tags: '',
|
||||
skip_tags: '',
|
||||
job_type: 'run',
|
||||
verbosity: 0,
|
||||
inventory: {
|
||||
name: null,
|
||||
id: null,
|
||||
},
|
||||
scm_branch: '',
|
||||
credentials: [],
|
||||
},
|
||||
}}
|
||||
surveyConfig={{
|
||||
spec: [
|
||||
{
|
||||
question_name: 'text',
|
||||
question_description: '',
|
||||
required: true,
|
||||
type: 'text',
|
||||
variable: 'text',
|
||||
min: 0,
|
||||
max: 1024,
|
||||
default: 'text variable',
|
||||
choices: '',
|
||||
new_question: true,
|
||||
},
|
||||
],
|
||||
}}
|
||||
/>
|
||||
);
|
||||
});
|
||||
scheduleSurveyWrapper.update();
|
||||
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper
|
||||
.find('Button[aria-label="Prompt"]')
|
||||
.prop('onClick')()
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
expect(scheduleSurveyWrapper.find('WizardNavItem').length).toBe(4);
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper
|
||||
.find('input#survey-question-text')
|
||||
.simulate('change', {
|
||||
target: { value: 'foo', name: 'survey_text' },
|
||||
})
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
|
||||
);
|
||||
scheduleSurveyWrapper.update();
|
||||
|
||||
expect(scheduleSurveyWrapper.find('Wizard').length).toBe(0);
|
||||
|
||||
await act(async () =>
|
||||
scheduleSurveyWrapper.find('Button[aria-label="Save"]').prop('onClick')()
|
||||
);
|
||||
|
||||
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
|
||||
description: '',
|
||||
name: 'mock schedule',
|
||||
inventory: 702,
|
||||
extra_data: {
|
||||
text: 'foo',
|
||||
},
|
||||
rrule:
|
||||
'DTSTART;TZID=America/New_York:20200402T144500 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -40,6 +40,7 @@ function ScheduleForm({
|
||||
resourceDefaultCredentials,
|
||||
}) {
|
||||
const [isWizardOpen, setIsWizardOpen] = useState(false);
|
||||
const [isPromptTouched, setIsPromptTouched] = useState(false);
|
||||
const [isSaveDisabled, setIsSaveDisabled] = useState(false);
|
||||
const originalLabels = useRef([]);
|
||||
const originalInstanceGroups = useRef([]);
|
||||
@@ -492,7 +493,8 @@ function ScheduleForm({
|
||||
surveyConfig,
|
||||
originalInstanceGroups.current,
|
||||
originalLabels.current,
|
||||
credentials
|
||||
credentials,
|
||||
isPromptTouched
|
||||
);
|
||||
}}
|
||||
validate={validate}
|
||||
@@ -518,6 +520,7 @@ function ScheduleForm({
|
||||
onSave={() => {
|
||||
setIsWizardOpen(false);
|
||||
setIsSaveDisabled(false);
|
||||
setIsPromptTouched(true);
|
||||
}}
|
||||
resourceDefaultCredentials={resourceDefaultCredentials}
|
||||
labels={originalLabels.current}
|
||||
|
||||
@@ -80,7 +80,7 @@ function Dashboard() {
|
||||
<Trans>
|
||||
<p>
|
||||
<InfoCircleIcon /> A tech preview of the new {brandName} user
|
||||
interface can be found <a href="/ui_next/dashboard">here</a>.
|
||||
interface can be found <a href="/ui_next">here</a>.
|
||||
</p>
|
||||
</Trans>
|
||||
</Banner>
|
||||
|
||||
@@ -191,7 +191,7 @@ function InstancePeerList({ setBreadcrumb }) {
|
||||
fetchPeers();
|
||||
addToast({
|
||||
id: instancesPeerToAssociate,
|
||||
title: t`Peers update on ${instance.hostname}. Please be sure to run the install bundle for ${instance.hostname} again in order to see changes take effect.`,
|
||||
title: t`Please be sure to run the install bundle for the selected instance(s) again in order to see changes take effect.`,
|
||||
variant: AlertVariant.success,
|
||||
hasTimeout: true,
|
||||
});
|
||||
|
||||
@@ -21,6 +21,8 @@ const ansibleDocUrls = {
|
||||
'https://docs.ansible.com/ansible/latest/collections/community/vmware/vmware_vm_inventory_inventory.html',
|
||||
constructed:
|
||||
'https://docs.ansible.com/ansible/latest/collections/ansible/builtin/constructed_inventory.html',
|
||||
terraform:
|
||||
'https://github.com/ansible-collections/cloud.terraform/blob/stable-statefile-inventory/plugins/inventory/terraform_state.py',
|
||||
};
|
||||
|
||||
const getInventoryHelpTextStrings = () => ({
|
||||
@@ -119,10 +121,10 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on
|
||||
launch and on project update, click on Update on launch, and also go to`}
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch`}
|
||||
{t`and click on Update Revision on Launch.`}
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
@@ -138,8 +140,8 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on
|
||||
launch and on project update, click on Update on launch, and also go to`}
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch`}
|
||||
</div>
|
||||
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
SCMSubForm,
|
||||
SatelliteSubForm,
|
||||
ControllerSubForm,
|
||||
TerraformSubForm,
|
||||
VMwareSubForm,
|
||||
VirtualizationSubForm,
|
||||
} from './InventorySourceSubForms';
|
||||
@@ -214,6 +215,14 @@ const InventorySourceFormFields = ({
|
||||
}
|
||||
/>
|
||||
),
|
||||
terraform: (
|
||||
<TerraformSubForm
|
||||
autoPopulateCredential={
|
||||
!source?.id || source?.source !== 'terraform'
|
||||
}
|
||||
sourceOptions={sourceOptions}
|
||||
/>
|
||||
),
|
||||
vmware: (
|
||||
<VMwareSubForm
|
||||
autoPopulateCredential={
|
||||
|
||||
@@ -38,6 +38,7 @@ describe('<InventorySourceForm />', () => {
|
||||
['openstack', 'OpenStack'],
|
||||
['rhv', 'Red Hat Virtualization'],
|
||||
['controller', 'Red Hat Ansible Automation Platform'],
|
||||
['terraform', 'Terraform State'],
|
||||
],
|
||||
},
|
||||
},
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import { useField, useFormikContext } from 'formik';
|
||||
import { t } from '@lingui/macro';
|
||||
import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
import { useConfig } from 'contexts/Config';
|
||||
import CredentialLookup from 'components/Lookup/CredentialLookup';
|
||||
import { required } from 'util/validators';
|
||||
import {
|
||||
OptionsField,
|
||||
VerbosityField,
|
||||
EnabledVarField,
|
||||
EnabledValueField,
|
||||
HostFilterField,
|
||||
SourceVarsField,
|
||||
} from './SharedFields';
|
||||
import getHelpText from '../Inventory.helptext';
|
||||
|
||||
const TerraformSubForm = ({ autoPopulateCredential }) => {
|
||||
const helpText = getHelpText();
|
||||
const { setFieldValue, setFieldTouched } = useFormikContext();
|
||||
const [credentialField, credentialMeta, credentialHelpers] =
|
||||
useField('credential');
|
||||
const config = useConfig();
|
||||
const handleCredentialUpdate = useCallback(
|
||||
(value) => {
|
||||
setFieldValue('credential', value);
|
||||
setFieldTouched('credential', true, false);
|
||||
},
|
||||
[setFieldValue, setFieldTouched]
|
||||
);
|
||||
const docsBaseUrl = getDocsBaseUrl(config);
|
||||
|
||||
return (
|
||||
<>
|
||||
<CredentialLookup
|
||||
credentialTypeNamespace="terraform"
|
||||
label={t`Credential`}
|
||||
helperTextInvalid={credentialMeta.error}
|
||||
isValid={!credentialMeta.touched || !credentialMeta.error}
|
||||
onBlur={() => credentialHelpers.setTouched()}
|
||||
onChange={handleCredentialUpdate}
|
||||
value={credentialField.value}
|
||||
required
|
||||
autoPopulate={autoPopulateCredential}
|
||||
validate={required(t`Select a value for this field`)}
|
||||
/>
|
||||
<VerbosityField />
|
||||
<HostFilterField />
|
||||
<EnabledVarField />
|
||||
<EnabledValueField />
|
||||
<OptionsField />
|
||||
<SourceVarsField
|
||||
popoverContent={helpText.sourceVars(docsBaseUrl, 'terraform')}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default TerraformSubForm;
|
||||
@@ -0,0 +1,70 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { Formik } from 'formik';
|
||||
import { CredentialsAPI } from 'api';
|
||||
import { mountWithContexts } from '../../../../../testUtils/enzymeHelpers';
|
||||
import TerraformSubForm from './TerraformSubForm';
|
||||
|
||||
jest.mock('../../../../api');
|
||||
|
||||
const initialValues = {
|
||||
credential: null,
|
||||
overwrite: false,
|
||||
overwrite_vars: false,
|
||||
source_path: '',
|
||||
source_project: null,
|
||||
source_script: null,
|
||||
source_vars: '---\n',
|
||||
update_cache_timeout: 0,
|
||||
update_on_launch: true,
|
||||
verbosity: 1,
|
||||
};
|
||||
|
||||
const mockSourceOptions = {
|
||||
actions: {
|
||||
POST: {},
|
||||
},
|
||||
};
|
||||
|
||||
describe('<TerraformSubForm />', () => {
|
||||
let wrapper;
|
||||
|
||||
beforeEach(async () => {
|
||||
CredentialsAPI.read.mockResolvedValue({
|
||||
data: { count: 0, results: [] },
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={initialValues}>
|
||||
<TerraformSubForm sourceOptions={mockSourceOptions} />
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test('should render subform fields', () => {
|
||||
expect(wrapper.find('FormGroup[label="Credential"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Verbosity"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Update options"]')).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('FormGroup[label="Cache timeout (seconds)"]')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('VariablesField[label="Source variables"]')
|
||||
).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should make expected api calls', () => {
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledTimes(1);
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledWith({
|
||||
credential_type__namespace: 'terraform',
|
||||
order_by: 'name',
|
||||
page: 1,
|
||||
page_size: 5,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -6,5 +6,6 @@ export { default as OpenStackSubForm } from './OpenStackSubForm';
|
||||
export { default as SCMSubForm } from './SCMSubForm';
|
||||
export { default as SatelliteSubForm } from './SatelliteSubForm';
|
||||
export { default as ControllerSubForm } from './ControllerSubForm';
|
||||
export { default as TerraformSubForm } from './TerraformSubForm';
|
||||
export { default as VMwareSubForm } from './VMwareSubForm';
|
||||
export { default as VirtualizationSubForm } from './VirtualizationSubForm';
|
||||
|
||||
@@ -30,7 +30,7 @@ function SubscriptionUsage() {
|
||||
<Trans>
|
||||
<p>
|
||||
<InfoCircleIcon /> A tech preview of the new {brandName} user
|
||||
interface can be found <a href="/ui_next/dashboard">here</a>.
|
||||
interface can be found <a href="/ui_next">here</a>.
|
||||
</p>
|
||||
</Trans>
|
||||
</Banner>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
export default function getSurveyValues(values) {
|
||||
const surveyValues = {};
|
||||
Object.keys(values).forEach((key) => {
|
||||
if (key.startsWith('survey_') && values[key] !== []) {
|
||||
if (key.startsWith('survey_')) {
|
||||
if (Array.isArray(values[key]) && values[key].length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
export default function mergeExtraVars(extraVars = '', survey = {}) {
|
||||
const vars = yaml.load(extraVars) || {};
|
||||
let vars = {};
|
||||
if (typeof extraVars === 'string') {
|
||||
vars = yaml.load(extraVars);
|
||||
} else if (typeof extraVars === 'object') {
|
||||
vars = extraVars;
|
||||
}
|
||||
return {
|
||||
...vars,
|
||||
...survey,
|
||||
|
||||
15
awx/urls.py
@@ -2,7 +2,9 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf import settings
|
||||
from django.urls import re_path, include
|
||||
from django.urls import path, re_path, include
|
||||
|
||||
from ansible_base.resource_registry.urls import urlpatterns as resource_api_urls
|
||||
|
||||
from awx.main.views import handle_400, handle_403, handle_404, handle_500, handle_csp_violation, handle_login_redirect
|
||||
|
||||
@@ -10,7 +12,16 @@ from awx.main.views import handle_400, handle_403, handle_404, handle_500, handl
|
||||
urlpatterns = [
|
||||
re_path(r'', include('awx.ui.urls', namespace='ui')),
|
||||
re_path(r'^ui_next/.*', include('awx.ui_next.urls', namespace='ui_next')),
|
||||
re_path(r'^api/', include('awx.api.urls', namespace='api')),
|
||||
path('api/', include('awx.api.urls', namespace='api')),
|
||||
]
|
||||
|
||||
if settings.OPTIONAL_API_URLPATTERN_PREFIX:
|
||||
urlpatterns += [
|
||||
path(f'api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}/', include('awx.api.urls')),
|
||||
]
|
||||
|
||||
urlpatterns += [
|
||||
re_path(r'^api/v2/', include(resource_api_urls)),
|
||||
re_path(r'^sso/', include('awx.sso.urls', namespace='sso')),
|
||||
re_path(r'^sso/', include('social_django.urls', namespace='social')),
|
||||
re_path(r'^(?:api/)?400.html$', handle_400),
|
||||
|
||||
@@ -147,8 +147,12 @@ def main():
|
||||
if redirect_uris is not None:
|
||||
application_fields['redirect_uris'] = ' '.join(redirect_uris)
|
||||
|
||||
# If the state was present and we can let the module build or update the existing application, this will return on its own
|
||||
module.create_or_update_if_needed(application, application_fields, endpoint='applications', item_type='application')
|
||||
response = module.create_or_update_if_needed(application, application_fields, endpoint='applications', item_type='application', auto_exit=False)
|
||||
if 'client_id' in response:
|
||||
module.json_output['client_id'] = response['client_id']
|
||||
if 'client_secret' in response:
|
||||
module.json_output['client_secret'] = response['client_secret']
|
||||
module.exit_json(**module.json_output)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -155,4 +155,4 @@ def test_build_notification_message_undefined(run_module, admin_user, organizati
|
||||
nt = NotificationTemplate.objects.get(id=result['id'])
|
||||
|
||||
body = job.build_notification_message(nt, 'running')
|
||||
assert 'The template rendering return a blank body' in body[1]
|
||||
assert '{"started_by": "My Placeholder"}' in body[1]
|
||||
|
||||
@@ -1,63 +1,63 @@
|
||||
---
|
||||
- name: Generate a random string for test
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
inv_name: "AWX-Collection-tests-ad_hoc_command_cancel-inventory-{{ test_id }}"
|
||||
ssh_cred_name: "AWX-Collection-tests-ad_hoc_command_cancel-ssh-cred-{{ test_id }}"
|
||||
org_name: "AWX-Collection-tests-ad_hoc_command_cancel-org-{{ test_id }}"
|
||||
|
||||
- name: Create a New Organization
|
||||
organization:
|
||||
awx.awx.organization:
|
||||
name: "{{ org_name }}"
|
||||
|
||||
- name: Create an Inventory
|
||||
inventory:
|
||||
awx.awx.inventory:
|
||||
name: "{{ inv_name }}"
|
||||
organization: "{{ org_name }}"
|
||||
state: present
|
||||
|
||||
- name: Add localhost to the Inventory
|
||||
host:
|
||||
awx.awx.host:
|
||||
name: localhost
|
||||
inventory: "{{ inv_name }}"
|
||||
variables:
|
||||
ansible_connection: local
|
||||
|
||||
- name: Create a Credential
|
||||
credential:
|
||||
awx.awx.credential:
|
||||
name: "{{ ssh_cred_name }}"
|
||||
organization: "{{ org_name }}"
|
||||
credential_type: 'Machine'
|
||||
state: present
|
||||
|
||||
- name: Launch an Ad Hoc Command
|
||||
ad_hoc_command:
|
||||
awx.awx.ad_hoc_command:
|
||||
inventory: "{{ inv_name }}"
|
||||
credential: "{{ ssh_cred_name }}"
|
||||
module_name: "command"
|
||||
module_args: "sleep 100"
|
||||
register: command
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- "command is changed"
|
||||
|
||||
- name: Cancel the command
|
||||
ad_hoc_command_cancel:
|
||||
awx.awx.ad_hoc_command_cancel:
|
||||
command_id: "{{ command.id }}"
|
||||
request_timeout: 60
|
||||
register: results
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- results is changed
|
||||
|
||||
- name: "Wait for up to a minute until the job enters the can_cancel: False state"
|
||||
debug:
|
||||
ansible.builtin.debug:
|
||||
msg: "The job can_cancel status has transitioned into False, we can proceed with testing"
|
||||
until: not job_status
|
||||
retries: 6
|
||||
@@ -66,51 +66,51 @@
|
||||
job_status: "{{ lookup('awx.awx.controller_api', 'ad_hoc_commands/'+ command.id | string +'/cancel')['can_cancel'] }}"
|
||||
|
||||
- name: Cancel the command with hard error if it's not running
|
||||
ad_hoc_command_cancel:
|
||||
awx.awx.ad_hoc_command_cancel:
|
||||
command_id: "{{ command.id }}"
|
||||
fail_if_not_running: true
|
||||
register: results
|
||||
ignore_errors: true
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- results is failed
|
||||
|
||||
- name: Cancel an already canceled command (assert failure)
|
||||
ad_hoc_command_cancel:
|
||||
awx.awx.ad_hoc_command_cancel:
|
||||
command_id: "{{ command.id }}"
|
||||
fail_if_not_running: true
|
||||
register: results
|
||||
ignore_errors: true
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- results is failed
|
||||
|
||||
- name: Check module fails with correct msg
|
||||
ad_hoc_command_cancel:
|
||||
awx.awx.ad_hoc_command_cancel:
|
||||
command_id: 9999999999
|
||||
register: result
|
||||
ignore_errors: true
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- "result.msg == 'Unable to find command with id 9999999999'"
|
||||
|
||||
- name: Delete the Credential
|
||||
credential:
|
||||
awx.awx.credential:
|
||||
name: "{{ ssh_cred_name }}"
|
||||
organization: "{{ org_name }}"
|
||||
credential_type: 'Machine'
|
||||
state: absent
|
||||
|
||||
- name: Delete the Inventory
|
||||
inventory:
|
||||
awx.awx.inventory:
|
||||
name: "{{ inv_name }}"
|
||||
organization: "{{ org_name }}"
|
||||
state: absent
|
||||
|
||||
- name: Remove the Organization
|
||||
organization:
|
||||
awx.awx.organization:
|
||||
name: "{{ org_name }}"
|
||||
state: absent
|
||||
|
||||
@@ -103,6 +103,7 @@
|
||||
- assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
- "'client_secret' in result"
|
||||
|
||||
- name: Rename an inventory
|
||||
application:
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
---
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate hostnames
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
hostname1: "AWX-Collection-tests-instance1.{{ test_id }}.example.com"
|
||||
hostname2: "AWX-Collection-tests-instance2.{{ test_id }}.example.com"
|
||||
hostname3: "AWX-Collection-tests-instance3.{{ test_id }}.example.com"
|
||||
register: facts
|
||||
|
||||
- name: Get the k8s setting
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
IS_K8S: "{{ controller_settings['IS_K8S'] | default(False) }}"
|
||||
vars:
|
||||
controller_settings: "{{ lookup('awx.awx.controller_api', 'settings/all') }}"
|
||||
|
||||
- debug:
|
||||
- ansible.builtin.debug:
|
||||
msg: "Skipping instance test since this is instance is not running on a K8s platform"
|
||||
when: not IS_K8S
|
||||
|
||||
@@ -32,7 +32,7 @@
|
||||
- "{{ hostname2 }}"
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- result is changed
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
capacity_adjustment: 0.4
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- result is changed
|
||||
|
||||
@@ -54,7 +54,7 @@
|
||||
capacity_adjustment: 0.7
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- result is changed
|
||||
|
||||
@@ -78,7 +78,7 @@
|
||||
node_state: installed
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- result is changed
|
||||
|
||||
@@ -89,7 +89,7 @@
|
||||
node_state: installed
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- result is changed
|
||||
|
||||
@@ -103,7 +103,7 @@
|
||||
- "{{ hostname2 }}"
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- result is changed
|
||||
|
||||
@@ -115,7 +115,7 @@
|
||||
peers: []
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- result is changed
|
||||
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
name: "{{ project_name }}"
|
||||
organization: "{{ org_name }}"
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: true
|
||||
|
||||
- name: Create a git project with same name, different org
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
---
|
||||
- name: Generate a random string for test
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate usernames
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
usernames:
|
||||
- "AWX-Collection-tests-api_lookup-user1-{{ test_id }}"
|
||||
- "AWX-Collection-tests-api_lookup-user2-{{ test_id }}"
|
||||
@@ -20,7 +20,7 @@
|
||||
register: controller_meta
|
||||
|
||||
- name: Generate the name of our plugin
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
plugin_name: "{{ controller_meta.prefix }}.controller_api"
|
||||
|
||||
- name: Create all of our users
|
||||
@@ -38,7 +38,7 @@
|
||||
register: results
|
||||
ignore_errors: true
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- "'dne' in (results.msg | lower)"
|
||||
|
||||
@@ -49,48 +49,48 @@
|
||||
loop: "{{ hosts }}"
|
||||
|
||||
- name: Test too many params (failure from validation of terms)
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
junk: "{{ query(plugin_name, 'users', 'teams', query_params={}, ) }}"
|
||||
ignore_errors: true
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- result is failed
|
||||
- "'You must pass exactly one endpoint to query' in result.msg"
|
||||
|
||||
- name: Try to load invalid endpoint
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
junk: "{{ query(plugin_name, 'john', query_params={}, ) }}"
|
||||
ignore_errors: true
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- result is failed
|
||||
- "'The requested object could not be found at' in result.msg"
|
||||
|
||||
- name: Load user of a specific name without promoting objects
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
users_list: "{{ lookup(plugin_name, 'users', query_params={ 'username' : user_creation_results['results'][0]['item'] }, return_objects=False) }}"
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- users_list['results'] | length() == 1
|
||||
- users_list['count'] == 1
|
||||
- users_list['results'][0]['id'] == user_creation_results['results'][0]['id']
|
||||
|
||||
- name: Load user of a specific name with promoting objects
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
user_objects: "{{ query(plugin_name, 'users', query_params={ 'username' : user_creation_results['results'][0]['item'] }, return_objects=True ) }}"
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- user_objects | length() == 1
|
||||
- users_list['results'][0]['id'] == user_objects[0]['id']
|
||||
|
||||
- name: Loop over one user with the loop syntax
|
||||
assert:
|
||||
ansible.builtin.assert:
|
||||
that:
|
||||
- item['id'] == user_creation_results['results'][0]['id']
|
||||
loop: "{{ query(plugin_name, 'users', query_params={ 'username' : user_creation_results['results'][0]['item'] } ) }}"
|
||||
@@ -98,91 +98,91 @@
|
||||
label: "{{ item.id }}"
|
||||
|
||||
- name: Get a page of users as just ids
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
users: "{{ query(plugin_name, 'users', query_params={ 'username__endswith': test_id, 'page_size': 2 }, return_ids=True ) }}"
|
||||
|
||||
- debug:
|
||||
msg: "{{ users }}"
|
||||
|
||||
- name: Assert that user list has 2 ids only and that they are strings, not ints
|
||||
assert:
|
||||
- name: assert that user list has 2 ids only and that they are strings, not ints
|
||||
ansible.builtin.assert:
|
||||
that:
|
||||
- users | length() == 2
|
||||
- user_creation_results['results'][0]['id'] not in users
|
||||
- user_creation_results['results'][0]['id'] | string in users
|
||||
|
||||
- name: Get all users of a system through next attribute
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
users: "{{ query(plugin_name, 'users', query_params={ 'username__endswith': test_id, 'page_size': 1 }, return_all=true ) }}"
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- users | length() >= 3
|
||||
|
||||
- name: Get all of the users created with a max_objects of 1
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
users: "{{ lookup(plugin_name, 'users', query_params={ 'username__endswith': test_id, 'page_size': 1 }, return_all=true, max_objects=1 ) }}"
|
||||
ignore_errors: true
|
||||
register: max_user_errors
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- max_user_errors is failed
|
||||
- "'List view at users returned 3 objects, which is more than the maximum allowed by max_objects' in max_user_errors.msg"
|
||||
|
||||
- name: Get the ID of the first user created and verify that it is correct
|
||||
assert:
|
||||
ansible.builtin.assert:
|
||||
that: "query(plugin_name, 'users', query_params={ 'username' : user_creation_results['results'][0]['item'] }, return_ids=True)[0] == user_creation_results['results'][0]['id'] | string"
|
||||
|
||||
- name: Try to get an ID of someone who does not exist
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
failed_user_id: "{{ query(plugin_name, 'users', query_params={ 'username': 'john jacob jingleheimer schmidt' }, expect_one=True) }}"
|
||||
register: result
|
||||
ignore_errors: true
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- result is failed
|
||||
- "'Expected one object from endpoint users' in result['msg']"
|
||||
|
||||
- name: Lookup too many users
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
too_many_user_ids: " {{ query(plugin_name, 'users', query_params={ 'username__endswith': test_id }, expect_one=True) }}"
|
||||
register: results
|
||||
ignore_errors: true
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- results is failed
|
||||
- "'Expected one object from endpoint users, but obtained 3' in results['msg']"
|
||||
|
||||
- name: Get the ping page
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
ping_data: "{{ lookup(plugin_name, 'ping' ) }}"
|
||||
register: results
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- results is succeeded
|
||||
- "'active_node' in ping_data"
|
||||
|
||||
- name: "Make sure that expect_objects fails on an API page"
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
my_var: "{{ lookup(plugin_name, 'settings/ui', expect_objects=True) }}"
|
||||
ignore_errors: true
|
||||
register: results
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- results is failed
|
||||
- "'Did not obtain a list or detail view at settings/ui, and expect_objects or expect_one is set to True' in results.msg"
|
||||
|
||||
# DOCS Example Tests
|
||||
- name: Load the UI settings
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
controller_settings: "{{ lookup('awx.awx.controller_api', 'settings/ui') }}"
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- "'CUSTOM_LOGO' in controller_settings"
|
||||
|
||||
@@ -191,7 +191,7 @@
|
||||
msg: "Admin users: {{ query('awx.awx.controller_api', 'users', query_params={ 'is_superuser': true }) | map(attribute='username') | join(', ') }}"
|
||||
register: results
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- "'admin' in results.msg"
|
||||
|
||||
@@ -211,7 +211,7 @@
|
||||
register: role_revoke
|
||||
when: "query('awx.awx.controller_api', 'users', query_params={ 'username': 'DNE_TESTING' }) | length == 1"
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- role_revoke is skipped
|
||||
|
||||
@@ -227,7 +227,7 @@
|
||||
) | map(attribute='name') | list }}
|
||||
register: group_creation
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that: group_creation is changed
|
||||
|
||||
always:
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
name: "{{ project_name1 }}"
|
||||
organization: Default
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: true
|
||||
register: result
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
name: "{{ project_name1 }}"
|
||||
organization: Default
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: true
|
||||
state: exists
|
||||
register: result
|
||||
@@ -58,7 +58,7 @@
|
||||
name: "{{ project_name1 }}"
|
||||
organization: Default
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: true
|
||||
state: exists
|
||||
request_timeout: .001
|
||||
@@ -75,7 +75,7 @@
|
||||
name: "{{ project_name1 }}"
|
||||
organization: Default
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: true
|
||||
state: absent
|
||||
register: result
|
||||
@@ -89,7 +89,7 @@
|
||||
name: "{{ project_name1 }}"
|
||||
organization: Default
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: true
|
||||
state: exists
|
||||
register: result
|
||||
@@ -103,7 +103,7 @@
|
||||
name: "{{ project_name1 }}"
|
||||
organization: Default
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: false
|
||||
register: result
|
||||
ignore_errors: true
|
||||
@@ -137,7 +137,7 @@
|
||||
name: "{{ project_name2 }}"
|
||||
organization: "{{ org_name }}"
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
scm_credential: "{{ cred_name }}"
|
||||
check_mode: true
|
||||
|
||||
@@ -162,7 +162,7 @@
|
||||
name: "{{ project_name2 }}"
|
||||
organization: Non_Existing_Org
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
scm_credential: "{{ cred_name }}"
|
||||
register: result
|
||||
ignore_errors: true
|
||||
@@ -179,7 +179,7 @@
|
||||
name: "{{ project_name2 }}"
|
||||
organization: "{{ org_name }}"
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
scm_credential: Non_Existing_Credential
|
||||
register: result
|
||||
ignore_errors: true
|
||||
@@ -191,7 +191,7 @@
|
||||
- "'Non_Existing_Credential' in result.msg"
|
||||
- "result.total_results == 0"
|
||||
|
||||
- name: Create a git project without credentials without waiting
|
||||
- name: Create a git project using a branch and allowing branch override
|
||||
project:
|
||||
name: "{{ project_name3 }}"
|
||||
organization: Default
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
name: "{{ project_name1 }}"
|
||||
organization: Default
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
scm_url: https://github.com/ansible/ansible-tower-samples
|
||||
wait: false
|
||||
register: project_create_result
|
||||
|
||||
|
||||
@@ -1,50 +1,50 @@
|
||||
---
|
||||
- name: Get our collection package
|
||||
controller_meta:
|
||||
awx.awx.controller_meta:
|
||||
register: controller_meta
|
||||
|
||||
- name: Generate the name of our plugin
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
plugin_name: "{{ controller_meta.prefix }}.schedule_rrule"
|
||||
|
||||
- name: Test too many params (failure from validation of terms)
|
||||
debug:
|
||||
ansible.builtin.debug:
|
||||
msg: "{{ query(plugin_name | string, 'none', 'weekly', start_date='2020-4-16 03:45:07') }}"
|
||||
ignore_errors: true
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- result is failed
|
||||
- "'You may only pass one schedule type in at a time' in result.msg"
|
||||
|
||||
- name: Test invalid frequency (failure from validation of term)
|
||||
debug:
|
||||
ansible.builtin.debug:
|
||||
msg: "{{ query(plugin_name, 'john', start_date='2020-4-16 03:45:07') }}"
|
||||
ignore_errors: true
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- result is failed
|
||||
- "'Frequency of john is invalid' in result.msg"
|
||||
|
||||
- name: Test an invalid start date (generic failure case from get_rrule)
|
||||
debug:
|
||||
ansible.builtin.debug:
|
||||
msg: "{{ query(plugin_name, 'none', start_date='invalid') }}"
|
||||
ignore_errors: true
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- result is failed
|
||||
- "'Parameter start_date must be in the format YYYY-MM-DD' in result.msg"
|
||||
|
||||
- name: Test end_on as count (generic success case)
|
||||
debug:
|
||||
ansible.builtin.debug:
|
||||
msg: "{{ query(plugin_name, 'minute', start_date='2020-4-16 03:45:07', end_on='2') }}"
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- result.msg == 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=MINUTELY;COUNT=2;INTERVAL=1'
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
register: result
|
||||
|
||||
- name: Changing setting to true should have changed the value
|
||||
assert:
|
||||
ansible.builtin.assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
register: result
|
||||
|
||||
- name: Changing setting to true again should not change the value
|
||||
assert:
|
||||
ansible.builtin.assert:
|
||||
that:
|
||||
- "result is not changed"
|
||||
|
||||
@@ -33,17 +33,17 @@
|
||||
register: result
|
||||
|
||||
- name: Changing setting back to false should have changed the value
|
||||
assert:
|
||||
ansible.builtin.assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
|
||||
- name: Set the value of AWX_ISOLATION_SHOW_PATHS to a baseline
|
||||
settings:
|
||||
awx.awx.settings:
|
||||
name: AWX_ISOLATION_SHOW_PATHS
|
||||
value: '["/var/lib/awx/projects/"]'
|
||||
|
||||
- name: Set the value of AWX_ISOLATION_SHOW_PATHS to get an error back from the controller
|
||||
settings:
|
||||
awx.awx.settings:
|
||||
settings:
|
||||
AWX_ISOLATION_SHOW_PATHS:
|
||||
'not': 'a valid'
|
||||
@@ -51,75 +51,75 @@
|
||||
register: result
|
||||
ignore_errors: true
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- "result is failed"
|
||||
|
||||
- name: Set the value of AWX_ISOLATION_SHOW_PATHS
|
||||
settings:
|
||||
awx.awx.settings:
|
||||
name: AWX_ISOLATION_SHOW_PATHS
|
||||
value: '["/var/lib/awx/projects/", "/tmp"]'
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
|
||||
- name: Attempt to set the value of AWX_ISOLATION_BASE_PATH to what it already is
|
||||
settings:
|
||||
awx.awx.settings:
|
||||
name: AWX_ISOLATION_BASE_PATH
|
||||
value: /tmp
|
||||
register: result
|
||||
|
||||
- debug:
|
||||
- ansible.builtin.debug:
|
||||
msg: "{{ result }}"
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- "result is not changed"
|
||||
|
||||
- name: Apply a single setting via settings
|
||||
settings:
|
||||
awx.awx.settings:
|
||||
name: AWX_ISOLATION_SHOW_PATHS
|
||||
value: '["/var/lib/awx/projects/", "/var/tmp"]'
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
|
||||
- name: Apply multiple setting via settings with no change
|
||||
settings:
|
||||
awx.awx.settings:
|
||||
settings:
|
||||
AWX_ISOLATION_BASE_PATH: /tmp
|
||||
AWX_ISOLATION_SHOW_PATHS: ["/var/lib/awx/projects/", "/var/tmp"]
|
||||
register: result
|
||||
|
||||
- debug:
|
||||
- ansible.builtin.debug:
|
||||
msg: "{{ result }}"
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- "result is not changed"
|
||||
|
||||
- name: Apply multiple setting via settings with change
|
||||
settings:
|
||||
awx.awx.settings:
|
||||
settings:
|
||||
AWX_ISOLATION_BASE_PATH: /tmp
|
||||
AWX_ISOLATION_SHOW_PATHS: []
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
|
||||
- name: Handle an omit value
|
||||
settings:
|
||||
awx.awx.settings:
|
||||
name: AWX_ISOLATION_BASE_PATH
|
||||
value: '{{ junk_var | default(omit) }}'
|
||||
register: result
|
||||
ignore_errors: true
|
||||
|
||||
- assert:
|
||||
- ansible.builtin.assert:
|
||||
that:
|
||||
- "'Unable to update settings' in result.msg"
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
test: ad_hoc_command,host,role
|
||||
tasks:
|
||||
- name: DEBUG - make sure variables are what we expect
|
||||
debug:
|
||||
ansible.builtin.debug:
|
||||
msg: |
|
||||
Running tests at location:
|
||||
{{ loc_tests }}
|
||||
@@ -18,7 +18,7 @@
|
||||
{{ test | trim | split(',') }}
|
||||
|
||||
- name: "Include test targets"
|
||||
include_tasks: "{{ loc_tests }}{{ test_name }}/tasks/main.yml"
|
||||
ansible.builtin.include_tasks: "{{ loc_tests }}{{ test_name }}/tasks/main.yml"
|
||||
loop: "{{ test | trim | split(',') }}"
|
||||
loop_control:
|
||||
loop_var: test_name
|
||||
|
||||
@@ -175,9 +175,10 @@ class TestOptions(unittest.TestCase):
|
||||
assert '--verbosity {0,1,2,3,4,5}' in out.getvalue()
|
||||
|
||||
def test_actions_with_primary_key(self):
|
||||
page = OptionsPage.from_json({'actions': {'GET': {}, 'POST': {}}})
|
||||
ResourceOptionsParser(None, page, 'jobs', self.parser)
|
||||
|
||||
for method in ('get', 'modify', 'delete'):
|
||||
page = OptionsPage.from_json({'actions': {'GET': {}, 'POST': {}}})
|
||||
ResourceOptionsParser(None, page, 'jobs', self.parser)
|
||||
assert method in self.parser.choices
|
||||
|
||||
out = StringIO()
|
||||
|
||||
@@ -8,7 +8,7 @@ skip_missing_interpreters = true
|
||||
# skipsdist = true
|
||||
|
||||
[testenv]
|
||||
basepython = python3.9
|
||||
basepython = python3.11
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}:{env:PYTHONPATH:}:.
|
||||
deps =
|
||||
|
||||
@@ -6,7 +6,7 @@ The *awx-manage* Utility
|
||||
.. index::
|
||||
single: awx-manage
|
||||
|
||||
The ``awx-manage`` utility is used to access detailed internal information of AWX. Commands for ``awx-manage`` should run as the ``awx`` or ``root`` user.
|
||||
The ``awx-manage`` utility is used to access detailed internal information of AWX. Commands for ``awx-manage`` should run as the ``awx`` user only.
|
||||
|
||||
.. warning::
|
||||
Running awx-manage commands via playbook is not recommended or supported.
|
||||
|
||||
@@ -557,7 +557,7 @@ Terminal Access Controller Access-Control System Plus (TACACS+) is a protocol th
|
||||
|
||||
Generic OIDC settings
|
||||
----------------------
|
||||
Similar to SAML, OpenID Connect (OIDC) is uses the OAuth 2.0 framework. It allows third-party applications to verify the identity and obtain basic end-user information. The main difference between OIDC and SMAL is that SAML has a service provider (SP)-to-IdP trust relationship, whereas OIDC establishes the trust with the channel (HTTPS) that is used to obtain the security token. To obtain the credentials needed to setup OIDC with AWX, refer to the documentation from the identity provider (IdP) of your choice that has OIDC support.
|
||||
Similar to SAML, OpenID Connect (OIDC) is uses the OAuth 2.0 framework. It allows third-party applications to verify the identity and obtain basic end-user information. The main difference between OIDC and SAML is that SAML has a service provider (SP)-to-IdP trust relationship, whereas OIDC establishes the trust with the channel (HTTPS) that is used to obtain the security token. To obtain the credentials needed to setup OIDC with AWX, refer to the documentation from the identity provider (IdP) of your choice that has OIDC support.
|
||||
|
||||
To configure OIDC in AWX:
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ Setting up LDAP Authentication
|
||||
single: LDAP
|
||||
pair: authentication; LDAP
|
||||
|
||||
This chapter describes how to integrate LDAP authentication with AWX.
|
||||
|
||||
.. note::
|
||||
|
||||
|
||||
|
Before Width: | Height: | Size: 21 KiB After Width: | Height: | Size: 16 KiB |
|
Before Width: | Height: | Size: 70 KiB After Width: | Height: | Size: 123 KiB |
|
After Width: | Height: | Size: 44 KiB |
|
Before Width: | Height: | Size: 132 KiB After Width: | Height: | Size: 52 KiB |
|
Before Width: | Height: | Size: 55 KiB After Width: | Height: | Size: 50 KiB |
|
Before Width: | Height: | Size: 49 KiB After Width: | Height: | Size: 45 KiB |
|
Before Width: | Height: | Size: 16 KiB After Width: | Height: | Size: 18 KiB |
|
Before Width: | Height: | Size: 76 KiB After Width: | Height: | Size: 76 KiB |
@@ -146,7 +146,7 @@ If you have a VMware instance that uses a self-signed certificate, then you will
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
"source_vars": "---\nvalidate_certs: False",
|
||||
"source_vars": ---validate_certs: False
|
||||
|
||||
You can set this in inventory source for VMware vCenter as follows:
|
||||
|
||||
|
||||
@@ -10,14 +10,15 @@ Secret Management System
|
||||
|
||||
Users and admins upload machine and cloud credentials so that automation can access machines and external services on their behalf. By default, sensitive credential values (such as SSH passwords, SSH private keys, API tokens for cloud services) are stored in the database after being encrypted. With external credentials backed by credential plugins, you can map credential fields (like a password or an SSH Private key) to values stored in a :term:`secret management system` instead of providing them to AWX directly. AWX provides a secret management system that include integrations for:
|
||||
|
||||
- Centrify Vault Credential Provider Lookup
|
||||
- CyberArk Central Credential Provider Lookup (CCP)
|
||||
- CyberArk Conjur Secrets Manager Lookup
|
||||
- HashiCorp Vault Key-Value Store (KV)
|
||||
- HashiCorp Vault SSH Secrets Engine
|
||||
- Microsoft Azure Key Management System (KMS)
|
||||
- Thycotic DevOps Secrets Vault
|
||||
- Thycotic Secret Server
|
||||
- :ref:`ug_credentials_aws_lookup`
|
||||
- :ref:`ug_credentials_centrify`
|
||||
- :ref:`ug_credentials_cyberarkccp`
|
||||
- :ref:`ug_credentials_cyberarkconjur`
|
||||
- :ref:`ug_credentials_hashivault` (KV)
|
||||
- :ref:`ug_credentials_hashivaultssh`
|
||||
- :ref:`ug_credentials_azurekeyvault` (KMS)
|
||||
- :ref:`ug_credentials_thycoticvault`
|
||||
- :ref:`ug_credentials_thycoticserver`
|
||||
|
||||
These external secret values will be fetched prior to running a playbook that needs them. For more information on specifying these credentials in the User Interface, see :ref:`ug_credentials`.
|
||||
|
||||
@@ -49,11 +50,92 @@ Use the AWX User Interface to configure and use each of the supported 3-party se
|
||||
.. image:: ../common/images/credentials-link-credential-prompt.png
|
||||
:alt: Credential section of the external secret management system dialog
|
||||
|
||||
4. Select the credential you want to link to, and click **Next**. This takes you to the **Metadata** tab of the input source. This example shows the Metadata prompt for HashiVault Secret Lookup. Metadata is specific to the input source you select. See the :ref:`ug_metadata_creds_inputs` table for details.
|
||||
4. Select the credential you want to link to, and click **Next**. This takes you to the **Metadata** tab of the input source. Metadata is specific to the input source you select:
|
||||
|
||||
.. list-table::
|
||||
:widths: 10 10 25
|
||||
:width: 1400px
|
||||
:header-rows: 1
|
||||
|
||||
* - Input Source
|
||||
- Metadata
|
||||
- Description
|
||||
* - *AWS Secrets Manager*
|
||||
- AWS Secrets Manager Region (required)
|
||||
- The region where the secrets manager is located.
|
||||
* -
|
||||
- AWS Secret Name (Required)
|
||||
- Specify the AWS secret name that was generated by the AWS access key.
|
||||
* - *Centrify Vault Credential Provider Lookup*
|
||||
- Account Name (Required)
|
||||
- Name of the system account or domain associated with Centrify Vault.
|
||||
* -
|
||||
- System Name
|
||||
- Specify the name used by the Centrify portal.
|
||||
* - *CyberArk Central Credential Provider Lookup*
|
||||
- Object Query (Required)
|
||||
- Lookup query for the object.
|
||||
* -
|
||||
- Object Query Format
|
||||
- Select ``Exact`` for a specific secret name, or ``Regexp`` for a secret that has a dynamically generated name.
|
||||
* -
|
||||
- Object Property
|
||||
- Specifies the name of the property to return (e.g., ``UserName``, ``Address``, etc.) other than the default of ``Content``.
|
||||
* -
|
||||
- Reason
|
||||
- If required per the object's policy, supply a reason for checking out the secret, as CyberArk logs those.
|
||||
* - *CyberArk Conjur Secrets Lookup*
|
||||
- Secret Identifier
|
||||
- The identifier for the secret.
|
||||
* -
|
||||
- Secret Version
|
||||
- Specify a version of the secret, if necessary, otherwise, leave it empty to use the latest version.
|
||||
* - *HashiVault Secret Lookup*
|
||||
- Name of Secret Backend
|
||||
- Specify the name of the KV backend to use. Leave it blank to use the first path segment of the **Path to Secret** field instead.
|
||||
* -
|
||||
- Path to Secret (required)
|
||||
- Specify the path to where the secret information is stored; for example, ``/path/username``.
|
||||
* -
|
||||
- Key Name (required)
|
||||
- Specify the name of the key to look up the secret information.
|
||||
* -
|
||||
- Secret Version (V2 Only)
|
||||
- Specify a version if necessary, otherwise, leave it empty to use the latest version.
|
||||
* - *HashiCorp Signed SSH*
|
||||
- Unsigned Public Key (required)
|
||||
- Specify the public key of the cert you want to get signed. It needs to be present in the authorized keys file of the target host(s).
|
||||
* -
|
||||
- Path to Secret (required)
|
||||
- Specify the path to where the secret information is stored; for example, ``/path/username``.
|
||||
* -
|
||||
- Role Name (required)
|
||||
- A role is a collection of SSH settings and parameters that are stored in Hashi vault. Typically, you can specify a couple of them with different privileges, timeouts, etc. So you could have a role that is allowed to get a cert signed for root, and other less privileged ones, for example.
|
||||
* -
|
||||
- Valid Principals
|
||||
- Specify a user (or users) other than the default, that you are requesting vault to authorize the cert for the stored key. Hashi vault has a default user for whom it signs (e.g., ec2-user).
|
||||
* - *Azure KMS*
|
||||
- Secret Name (required)
|
||||
- The actual name of the secret as it is referenced in Azure's Key vault app.
|
||||
* -
|
||||
- Secret Version
|
||||
- Specify a version of the secret, if necessary, otherwise, leave it empty to use the latest version.
|
||||
* - *Thycotic DevOps Secrets Vault*
|
||||
- Secret Path (required)
|
||||
- Specify the path to where the secret information is stored (e.g., /path/username).
|
||||
* - *Thycotic Secret Server*
|
||||
- Secret ID (required)
|
||||
- The identifier for the secret.
|
||||
* -
|
||||
- Secret Field
|
||||
- Specify the field to be used from the secret.
|
||||
|
||||
This example shows the Metadata prompt for HashiVault Secret Lookup.
|
||||
|
||||
.. image:: ../common/images/credentials-link-metadata-prompt.png
|
||||
:alt: Metadata section of the external secret management system dialog
|
||||
|
||||
|
||||
5. Click **Test** to verify connection to the secret management system. If the lookup is unsuccessful, an error message like this one displays:
|
||||
|
||||
.. image:: ../common/images/credentials-link-metadata-test-error.png
|
||||
@@ -65,133 +147,37 @@ Use the AWX User Interface to configure and use each of the supported 3-party se
|
||||
|
||||
8. Click **Save** when done.
|
||||
|
||||
.. _ug_metadata_creds_inputs:
|
||||
|
||||
Metadata for credential input sources
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
.. _ug_credentials_aws_lookup:
|
||||
|
||||
**Centrify Vault Credential Provider Lookup**
|
||||
AWS Secrets Manager Lookup
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
.. index::
|
||||
pair: credential types; AWS
|
||||
|
||||
.. list-table::
|
||||
:widths: 25 50
|
||||
:header-rows: 1
|
||||
This plugin allows AWS to be used as a credential input source to pull secrets from AWS SecretsManager. `AWS Secrets Manager <https://aws.amazon.com/secrets-manager/>`_ provides similar service to :ref:`ug_credentials_azurekeyvault`, and the AWS collection provides a lookup plugin for it.
|
||||
|
||||
* - Metadata
|
||||
- Description
|
||||
* - Account Name (Required)
|
||||
- Name of the system account or domain associated with Centrify Vault.
|
||||
* - System Name
|
||||
- Specify the name used by the Centrify portal.
|
||||
When **AWS Secrets Manager lookup** is selected for **Credential Type**, provide the following attributes to properly configure your lookup:
|
||||
|
||||
**CyberArk Central Credential Provider Lookup**
|
||||
- **AWS Access Key** (required): provide the access key used for communicating with AWS' key management system
|
||||
- **AWS Secret Key** (required): provide the secret as obtained by the AWS IAM console
|
||||
|
||||
.. list-table::
|
||||
:widths: 25 50
|
||||
:header-rows: 1
|
||||
|
||||
* - Metadata
|
||||
- Description
|
||||
* - Object Query (Required)
|
||||
- Lookup query for the object.
|
||||
* - Object Query Format
|
||||
- Select ``Exact`` for a specific secret name, or ``Regexp`` for a secret that has a dynamically generated name.
|
||||
* - Object Property
|
||||
- Specifies the name of the property to return (e.g., ``UserName``, ``Address``, etc.) other than the default of ``Content``.
|
||||
* - Reason
|
||||
- If required per the object's policy, supply a reason for checking out the secret, as CyberArk logs those.
|
||||
Below shows an example of a configured AWS Secret Manager credential.
|
||||
|
||||
**CyberArk Conjur Secrets Lookup**
|
||||
.. image:: ../common/images/credentials-create-aws-secret-credential.png
|
||||
:width: 1400px
|
||||
:alt: Example new AWS Secret Manager credential lookup dialog
|
||||
|
||||
.. list-table::
|
||||
:widths: 25 50
|
||||
:header-rows: 1
|
||||
|
||||
* - Metadata
|
||||
- Description
|
||||
* - Secret Identifier
|
||||
- The identifier for the secret.
|
||||
* - Secret Version
|
||||
- Specify a version of the secret, if necessary, otherwise, leave it empty to use the latest version.
|
||||
|
||||
**HashiVault Secret Lookup**
|
||||
|
||||
.. list-table::
|
||||
:widths: 25 50
|
||||
:header-rows: 1
|
||||
|
||||
* - Metadata
|
||||
- Description
|
||||
* - Name of Secret Backend
|
||||
- Specify the name of the KV backend to use. Leave it blank to use the first path segment of the **Path to Secret** field instead.
|
||||
* - Path to Secret (required)
|
||||
- Specify the path to where the secret information is stored; for example, ``/path/username``.
|
||||
* - Key Name (required)
|
||||
- Specify the name of the key to look up the secret information.
|
||||
* - Secret Version (V2 Only)
|
||||
- Specify a version if necessary, otherwise, leave it empty to use the latest version.
|
||||
|
||||
**HashiCorp Signed SSH**
|
||||
|
||||
.. list-table::
|
||||
:widths: 25 50
|
||||
:header-rows: 1
|
||||
|
||||
* - Metadata
|
||||
- Description
|
||||
* - Unsigned Public Key (required)
|
||||
- Specify the public key of the cert you want to get signed. It needs to be present in the authorized keys file of the target host(s).
|
||||
* - Path to Secret (required)
|
||||
- Specify the path to where the secret information is stored; for example, ``/path/username``.
|
||||
* - Role Name (required)
|
||||
- A role is a collection of SSH settings and parameters that are stored in Hashi vault. Typically, you can specify a couple of them with different privileges, timeouts, etc. So you could have a role that is allowed to get a cert signed for root, and other less privileged ones, for example.
|
||||
* - Valid Principals
|
||||
- Specify a user (or users) other than the default, that you are requesting vault to authorize the cert for the stored key. Hashi vault has a default user for whom it signs (e.g., ec2-user).
|
||||
|
||||
**Azure KMS**
|
||||
|
||||
.. list-table::
|
||||
:widths: 25 50
|
||||
:header-rows: 1
|
||||
|
||||
* - Metadata
|
||||
- Description
|
||||
* - Secret Name (required)
|
||||
- The actual name of the secret as it is referenced in Azure's Key vault app.
|
||||
* - Secret Version
|
||||
- Specify a version of the secret, if necessary, otherwise, leave it empty to use the latest version.
|
||||
|
||||
**Thycotic DevOps Secrets Vault**
|
||||
|
||||
.. list-table::
|
||||
:widths: 25 50
|
||||
:header-rows: 1
|
||||
|
||||
* - Metadata
|
||||
- Description
|
||||
* - Secret Path (required)
|
||||
- Specify the path to where the secret information is stored (e.g., /path/username).
|
||||
|
||||
**Thycotic Secret Server**
|
||||
|
||||
.. list-table::
|
||||
:widths: 25 50
|
||||
:header-rows: 1
|
||||
|
||||
* - Metadata
|
||||
- Description
|
||||
* - Secret ID (required)
|
||||
- The identifier for the secret.
|
||||
* - Secret Field
|
||||
- Specify the field to be used from the secret.
|
||||
|
||||
.. _ug_credentials_centrify:
|
||||
|
||||
Centrify Vault Credential Provider Lookup
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
.. index::
|
||||
pair: credential types; Centrify
|
||||
|
||||
You need the Centrify Vault web service running to store secrets in order for this integration to work. When **Centrify Vault Credential Provider Lookup** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
|
||||
You need the Centrify Vault web service running to store secrets in order for this integration to work. When **Centrify Vault Credential Provider Lookup** is selected for **Credential Type**, provide the following attributes to properly configure your lookup:
|
||||
|
||||
- **Centrify Tenant URL** (required): provide the URL used for communicating with Centrify's secret management system
|
||||
- **Centrify API User** (required): provide the username
|
||||
@@ -208,12 +194,12 @@ Below shows an example of a configured CyberArk AIM credential.
|
||||
.. _ug_credentials_cyberarkccp:
|
||||
|
||||
CyberArk Central Credential Provider (CCP) Lookup
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
.. index::
|
||||
single: CyberArk CCP
|
||||
pair: credential; CyberArk CCP
|
||||
|
||||
You need the CyberArk Central Credential Provider web service running to store secrets in order for this integration to work. When **CyberArk Central Credential Provider Lookup** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
|
||||
You need the CyberArk Central Credential Provider web service running to store secrets in order for this integration to work. When **CyberArk Central Credential Provider Lookup** is selected for **Credential Type**, provide the following attributes to properly configure your lookup:
|
||||
|
||||
- **CyberArk CCP URL** (required): provide the URL used for communicating with CyberArk CCP's secret management system; must include URL scheme (http, https, etc.)
|
||||
- **Web Service ID**: optionally specify the identifier for the web service; leaving it blank defaults to AIMWebService
|
||||
@@ -230,14 +216,14 @@ Below shows an example of a configured CyberArk CCP credential.
|
||||
.. _ug_credentials_cyberarkconjur:
|
||||
|
||||
CyberArk Conjur Secrets Manager Lookup
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
.. index::
|
||||
single: CyberArk Conjur
|
||||
pair: credential; CyberArk Conjur
|
||||
|
||||
With a Conjur Cloud tenant available to target, configure the CyberArk Conjur Secrets Lookup external management system credential plugin as documented.
|
||||
|
||||
When **CyberArk Conjur Secrets Manager Lookup** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
|
||||
When **CyberArk Conjur Secrets Manager Lookup** is selected for **Credential Type**, provide the following attributes to properly configure your lookup:
|
||||
|
||||
- **Conjur URL** (required): provide the URL used for communicating with CyberArk Conjur's secret management system; must include URL scheme (http, https, etc.)
|
||||
- **API Key** (required): provide the key given by your Conjur admin
|
||||
@@ -253,12 +239,12 @@ Below shows an example of a configured CyberArk Conjur credential.
|
||||
.. _ug_credentials_hashivault:
|
||||
|
||||
HashiCorp Vault Secret Lookup
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
.. index::
|
||||
single: HashiCorp Secret Lookup
|
||||
pair: credential; HashiCorp KV
|
||||
|
||||
When **HashiCorp Vault Secret Lookup** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
|
||||
When **HashiCorp Vault Secret Lookup** is selected for **Credential Type**, provide the following attributes to properly configure your lookup:
|
||||
|
||||
- **Server URL** (required): provide the URL used for communicating with HashiCorp Vault's secret management system
|
||||
- **Token**: specify the access token used to authenticate HashiCorp's server
|
||||
@@ -291,7 +277,7 @@ Below shows an example of a configured HashiCorp Vault Secret Lookup credential
|
||||
.. image:: ../common/images/credentials-create-hashicorp-kv-credential.png
|
||||
:alt: Example new HashiCorp Vault Secret lookup dialog
|
||||
|
||||
To test the lookup, create another credential that uses the HashiCorp Vault lookup. The example below shows the metadata for a machine credential configured to look up HashiCorp Vault secret credentials:
|
||||
To test the lookup, create another credential that uses the HashiCorp Vault lookup. The example below shows the attributes for a machine credential configured to look up HashiCorp Vault secret credentials:
|
||||
|
||||
.. image:: ../common/images/credentials-machine-test-hashicorp-metadata.png
|
||||
:alt: Example machine credential lookup metadata for HashiCorp Vault.
|
||||
@@ -300,12 +286,12 @@ To test the lookup, create another credential that uses the HashiCorp Vault look
|
||||
.. _ug_credentials_hashivaultssh:
|
||||
|
||||
HashiCorp Vault Signed SSH
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
.. index::
|
||||
single: HashiCorp SSH Secrets Engine
|
||||
pair: credential; HashiCorp SSH Secrets Engine
|
||||
|
||||
When **HashiCorp Vault Signed SSH** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
|
||||
When **HashiCorp Vault Signed SSH** is selected for **Credential Type**, provide the following attributes to properly configure your lookup:
|
||||
|
||||
- **Server URL** (required): provide the URL used for communicating with HashiCorp Signed SSH's secret management system
|
||||
- **Token**: specify the access token used to authenticate HashiCorp's server
|
||||
@@ -335,13 +321,13 @@ Below shows an example of a configured HashiCorp SSH Secrets Engine credential.
|
||||
.. _ug_credentials_azurekeyvault:
|
||||
|
||||
Microsoft Azure Key Vault
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
.. index::
|
||||
single: MS Azure KMS
|
||||
pair: credential; MS Azure KMS
|
||||
triple: credential; Azure; KMS
|
||||
|
||||
When **Microsoft Azure Key Vault** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
|
||||
When **Microsoft Azure Key Vault** is selected for **Credential Type**, provide the following attributes to properly configure your lookup:
|
||||
|
||||
- **Vault URL (DNS Name)** (required): provide the URL used for communicating with MS Azure's key management system
|
||||
- **Client ID** (required): provide the identifier as obtained by the Azure Active Directory
|
||||
@@ -357,12 +343,12 @@ Below shows an example of a configured Microsoft Azure KMS credential.
|
||||
.. _ug_credentials_thycoticvault:
|
||||
|
||||
Thycotic DevOps Secrets Vault
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
.. index::
|
||||
single: Thycotic DevOps Secrets Vault
|
||||
pair: credential; Thycotic DevOps Secrets Vault
|
||||
|
||||
When **Thycotic DevOps Secrets Vault** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
|
||||
When **Thycotic DevOps Secrets Vault** is selected for **Credential Type**, provide the following attributes to properly configure your lookup:
|
||||
|
||||
- **Tenant** (required): provide the URL used for communicating with Thycotic's secret management system
|
||||
- **Top-level Domain (TLD)** : provide the top-level domain designation (e.g., com, edu, org) associated with the secret vault you want to integrate
|
||||
@@ -379,12 +365,12 @@ Below shows an example of a configured Thycotic DevOps Secrets Vault credential.
|
||||
.. _ug_credentials_thycoticserver:
|
||||
|
||||
Thycotic Secret Server
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
.. index::
|
||||
single: Thycotic Secret Server
|
||||
pair: credential; Thycotic Secret Server
|
||||
|
||||
When **Thycotic Secrets Server** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
|
||||
When **Thycotic Secrets Server** is selected for **Credential Type**, provide the following attributes to properly configure your lookup:
|
||||
|
||||
- **Secret Server URL** (required): provide the URL used for communicating with the Thycotic Secrets Server management system
|
||||
- **Username** (required): specify the authenticated user for this service
|
||||
|
||||