Compare commits

..

1 Commits

Author SHA1 Message Date
David O Neill
bfb0d15e6f Github action to show monthly issue metrics 2024-02-21 15:02:44 +00:00
153 changed files with 791 additions and 2506 deletions

View File

@@ -7,14 +7,6 @@ commit message and your description; but you should still explain what
the change does. the change does.
--> -->
##### Depends on
<!---
Please provide links to any other PR dependanices.
Indicating these should be merged first prior to this PR.
-->
- #12345
- https://github.com/xxx/yyy/pulls/1234
##### ISSUE TYPE ##### ISSUE TYPE
<!--- Pick one below and delete the rest: --> <!--- Pick one below and delete the rest: -->
- Breaking Change - Breaking Change

View File

@@ -0,0 +1,40 @@
name: Monthly issue metrics
on:
workflow_dispatch:
schedule:
- cron: '3 2 1 * *'
permissions:
issues: write
pull-requests: read
jobs:
build:
name: issue metrics
runs-on: ubuntu-latest
steps:
- name: Get dates for last month
shell: bash
run: |
# Calculate the first day of the previous month
first_day=$(date -d "last month" +%Y-%m-01)
# Calculate the last day of the previous month
last_day=$(date -d "$first_day +1 month -1 day" +%Y-%m-%d)
#Set an environment variable with the date range
echo "$first_day..$last_day"
echo "last_month=$first_day..$last_day" >> "$GITHUB_ENV"
- name: Run issue-metrics tool
uses: github/issue-metrics@v2
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SEARCH_QUERY: 'repo:ansible/awx is:issue created:${{ env.last_month }} -reason:"not planned"'
- name: Create issue
uses: peter-evans/create-issue-from-file@v4
with:
title: Monthly issue metrics report
token: ${{ secrets.GITHUB_TOKEN }}
content-filepath: ./issue_metrics.md

View File

@@ -107,7 +107,7 @@ jobs:
ansible-galaxy collection install -r molecule/requirements.yml ansible-galaxy collection install -r molecule/requirements.yml
sudo rm -f $(which kustomize) sudo rm -f $(which kustomize)
make kustomize make kustomize
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind
env: env:
AWX_TEST_IMAGE: awx AWX_TEST_IMAGE: awx
AWX_TEST_VERSION: ci AWX_TEST_VERSION: ci
@@ -127,6 +127,10 @@ jobs:
- name: Run sanity tests - name: Run sanity tests
run: make test_collection_sanity run: make test_collection_sanity
env:
# needed due to cgroupsv2. This is fixed, but a stable release
# with the fix has not been made yet.
ANSIBLE_TEST_PREFER_PODMAN: 1
collection-integration: collection-integration:
name: awx_collection integration name: awx_collection integration

View File

@@ -3,7 +3,6 @@ name: Build/Push Development Images
env: env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
on: on:
workflow_dispatch:
push: push:
branches: branches:
- devel - devel

View File

@@ -83,15 +83,11 @@ jobs:
- name: Re-tag and promote awx image - name: Re-tag and promote awx image
run: | run: |
docker buildx imagetools create \ docker pull ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }}
ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} \ docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
--tag quay.io/${{ github.repository }}:${{ github.event.release.tag_name }} docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
docker buildx imagetools create \ docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} \ docker push quay.io/${{ github.repository }}:latest
--tag quay.io/${{ github.repository }}:latest docker pull ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
docker tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
- name: Re-tag and promote awx-ee image docker push quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
run: |
docker buildx imagetools create \
ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} \
--tag quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}

View File

@@ -102,9 +102,9 @@ jobs:
- name: tag awx-ee:latest with version input - name: tag awx-ee:latest with version input
run: | run: |
docker buildx imagetools create \ docker pull quay.io/ansible/awx-ee:latest
quay.io/ansible/awx-ee:latest \ docker tag quay.io/ansible/awx-ee:latest ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
--tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }} docker push ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
- name: Stage awx-operator image - name: Stage awx-operator image
working-directory: awx-operator working-directory: awx-operator

5
.gitignore vendored
View File

@@ -46,11 +46,6 @@ tools/docker-compose/overrides/
tools/docker-compose-minikube/_sources tools/docker-compose-minikube/_sources
tools/docker-compose/keycloak.awx.realm.json tools/docker-compose/keycloak.awx.realm.json
!tools/docker-compose/editable_dependencies
tools/docker-compose/editable_dependencies/*
!tools/docker-compose/editable_dependencies/README.md
!tools/docker-compose/editable_dependencies/install.sh
# Tower setup playbook testing # Tower setup playbook testing
setup/test/roles/postgresql setup/test/roles/postgresql
**/provision_docker **/provision_docker

113
.vscode/launch.json vendored
View File

@@ -1,113 +0,0 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "run_ws_heartbeat",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_ws_heartbeat"],
"django": true,
"preLaunchTask": "stop awx-ws-heartbeat",
"postDebugTask": "start awx-ws-heartbeat"
},
{
"name": "run_cache_clear",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_cache_clear"],
"django": true,
"preLaunchTask": "stop awx-cache-clear",
"postDebugTask": "start awx-cache-clear"
},
{
"name": "run_callback_receiver",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_callback_receiver"],
"django": true,
"preLaunchTask": "stop awx-receiver",
"postDebugTask": "start awx-receiver"
},
{
"name": "run_dispatcher",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_dispatcher"],
"django": true,
"preLaunchTask": "stop awx-dispatcher",
"postDebugTask": "start awx-dispatcher"
},
{
"name": "run_rsyslog_configurer",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_rsyslog_configurer"],
"django": true,
"preLaunchTask": "stop awx-rsyslog-configurer",
"postDebugTask": "start awx-rsyslog-configurer"
},
{
"name": "run_cache_clear",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_cache_clear"],
"django": true,
"preLaunchTask": "stop awx-cache-clear",
"postDebugTask": "start awx-cache-clear"
},
{
"name": "run_wsrelay",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["run_wsrelay"],
"django": true,
"preLaunchTask": "stop awx-wsrelay",
"postDebugTask": "start awx-wsrelay"
},
{
"name": "daphne",
"type": "debugpy",
"request": "launch",
"program": "/var/lib/awx/venv/awx/bin/daphne",
"args": ["-b", "127.0.0.1", "-p", "8051", "awx.asgi:channel_layer"],
"django": true,
"preLaunchTask": "stop awx-daphne",
"postDebugTask": "start awx-daphne"
},
{
"name": "runserver(uwsgi alternative)",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["runserver", "127.0.0.1:8052"],
"django": true,
"preLaunchTask": "stop awx-uwsgi",
"postDebugTask": "start awx-uwsgi"
},
{
"name": "runserver_plus(uwsgi alternative)",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["runserver_plus", "127.0.0.1:8052"],
"django": true,
"preLaunchTask": "stop awx-uwsgi and install Werkzeug",
"postDebugTask": "start awx-uwsgi"
},
{
"name": "shell_plus",
"type": "debugpy",
"request": "launch",
"program": "manage.py",
"args": ["shell_plus"],
"django": true,
},
]
}

100
.vscode/tasks.json vendored
View File

@@ -1,100 +0,0 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "start awx-cache-clear",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-cache-clear"
},
{
"label": "stop awx-cache-clear",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-cache-clear"
},
{
"label": "start awx-daphne",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-daphne"
},
{
"label": "stop awx-daphne",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-daphne"
},
{
"label": "start awx-dispatcher",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-dispatcher"
},
{
"label": "stop awx-dispatcher",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-dispatcher"
},
{
"label": "start awx-receiver",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-receiver"
},
{
"label": "stop awx-receiver",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-receiver"
},
{
"label": "start awx-rsyslog-configurer",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-rsyslog-configurer"
},
{
"label": "stop awx-rsyslog-configurer",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-rsyslog-configurer"
},
{
"label": "start awx-rsyslogd",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-rsyslogd"
},
{
"label": "stop awx-rsyslogd",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-rsyslogd"
},
{
"label": "start awx-uwsgi",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-uwsgi"
},
{
"label": "stop awx-uwsgi",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-uwsgi"
},
{
"label": "stop awx-uwsgi and install Werkzeug",
"type": "shell",
"command": "pip install Werkzeug; supervisorctl stop tower-processes:awx-uwsgi"
},
{
"label": "start awx-ws-heartbeat",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-ws-heartbeat"
},
{
"label": "stop awx-ws-heartbeat",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-ws-heartbeat"
},
{
"label": "start awx-wsrelay",
"type": "shell",
"command": "supervisorctl start tower-processes:awx-wsrelay"
},
{
"label": "stop awx-wsrelay",
"type": "shell",
"command": "supervisorctl stop tower-processes:awx-wsrelay"
}
]
}

View File

@@ -1,6 +1,6 @@
-include awx/ui_next/Makefile -include awx/ui_next/Makefile
PYTHON := $(notdir $(shell for i in python3.11 python3; do command -v $$i; done|sed 1q)) PYTHON := $(notdir $(shell for i in python3.9 python3; do command -v $$i; done|sed 1q))
SHELL := bash SHELL := bash
DOCKER_COMPOSE ?= docker-compose DOCKER_COMPOSE ?= docker-compose
OFFICIAL ?= no OFFICIAL ?= no
@@ -47,8 +47,6 @@ VAULT ?= false
VAULT_TLS ?= false VAULT_TLS ?= false
# If set to true docker-compose will also start a tacacs+ instance # If set to true docker-compose will also start a tacacs+ instance
TACACS ?= false TACACS ?= false
# If set to true docker-compose will install editable dependencies
EDITABLE_DEPENDENCIES ?= false
VENV_BASE ?= /var/lib/awx/venv VENV_BASE ?= /var/lib/awx/venv
@@ -65,7 +63,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
# These should be upgraded in the AWX and Ansible venv before attempting # These should be upgraded in the AWX and Ansible venv before attempting
# to install the actual requirements # to install the actual requirements
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==69.0.2 setuptools_scm[toml]==8.0.4 wheel==0.42.0 VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==8.0.4 wheel==0.38.4
NAME ?= awx NAME ?= awx
@@ -218,6 +216,8 @@ collectstatic:
fi; \ fi; \
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1 $(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
uwsgi: collectstatic uwsgi: collectstatic
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \ . $(VENV_BASE)/awx/bin/activate; \
@@ -225,7 +225,7 @@ uwsgi: collectstatic
uwsgi /etc/tower/uwsgi.ini uwsgi /etc/tower/uwsgi.ini
awx-autoreload: awx-autoreload:
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx @/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
daphne: daphne:
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
@@ -305,7 +305,7 @@ swagger: reports
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \ . $(VENV_BASE)/awx/bin/activate; \
fi; \ fi; \
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report) (set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
check: black check: black
@@ -535,7 +535,6 @@ docker-compose-sources: .git/hooks/pre-commit
-e enable_vault=$(VAULT) \ -e enable_vault=$(VAULT) \
-e vault_tls=$(VAULT_TLS) \ -e vault_tls=$(VAULT_TLS) \
-e enable_tacacs=$(TACACS) \ -e enable_tacacs=$(TACACS) \
-e install_editable_dependencies=$(EDITABLE_DEPENDENCIES) \
$(EXTRA_SOURCES_ANSIBLE_OPTS) $(EXTRA_SOURCES_ANSIBLE_OPTS)
docker-compose: awx/projects docker-compose-sources docker-compose: awx/projects docker-compose-sources
@@ -543,15 +542,9 @@ docker-compose: awx/projects docker-compose-sources
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \ ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
-e enable_vault=$(VAULT) \ -e enable_vault=$(VAULT) \
-e vault_tls=$(VAULT_TLS) \ -e vault_tls=$(VAULT_TLS) \
-e enable_ldap=$(LDAP); \ -e enable_ldap=$(LDAP);
$(MAKE) docker-compose-up
docker-compose-up:
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans $(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
docker-compose-down:
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) down --remove-orphans
docker-compose-credential-plugins: awx/projects docker-compose-sources docker-compose-credential-plugins: awx/projects docker-compose-sources
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m" echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans $(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
@@ -616,7 +609,7 @@ docker-clean:
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);) -$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
docker volume rm -f tools_var_lib_awx tools_awx_db tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q) docker volume rm -f tools_awx_db tools_vault_1 tools_ldap_1 tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
docker-refresh: docker-clean docker-compose docker-refresh: docker-clean docker-compose
@@ -638,6 +631,9 @@ clean-elk:
docker rm tools_elasticsearch_1 docker rm tools_elasticsearch_1
docker rm tools_kibana_1 docker rm tools_kibana_1
psql-container:
docker run -it --net tools_default --rm postgres:12 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
VERSION: VERSION:
@echo "awx: $(VERSION)" @echo "awx: $(VERSION)"

View File

@@ -154,12 +154,10 @@ def manage():
from django.conf import settings from django.conf import settings
from django.core.management import execute_from_command_line from django.core.management import execute_from_command_line
# enforce the postgres version is a minimum of 12 (we need this for partitioning); if not, then terminate program with exit code of 1 # enforce the postgres version is equal to 12. if not, then terminate program with exit code of 1
# In the future if we require a feature of a version of postgres > 12 this should be updated to reflect that.
# The return of connection.pg_version is something like 12013
if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development': if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development':
if (connection.pg_version // 10000) < 12: if (connection.pg_version // 10000) < 12:
sys.stderr.write("At a minimum, postgres version 12 is required\n") sys.stderr.write("Postgres version 12 is required\n")
sys.exit(1) sys.exit(1)
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover

View File

@@ -93,7 +93,6 @@ register(
default='', default='',
label=_('Login redirect override URL'), label=_('Login redirect override URL'),
help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'), help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the login page.'),
warning_text=_('Changing the redirect URL could impact the ability to login if local authentication is also disabled.'),
category=_('Authentication'), category=_('Authentication'),
category_slug='authentication', category_slug='authentication',
) )

View File

@@ -36,13 +36,11 @@ class Metadata(metadata.SimpleMetadata):
field_info = OrderedDict() field_info = OrderedDict()
field_info['type'] = self.label_lookup[field] field_info['type'] = self.label_lookup[field]
field_info['required'] = getattr(field, 'required', False) field_info['required'] = getattr(field, 'required', False)
field_info['hidden'] = getattr(field, 'hidden', False)
text_attrs = [ text_attrs = [
'read_only', 'read_only',
'label', 'label',
'help_text', 'help_text',
'warning_text',
'min_length', 'min_length',
'max_length', 'max_length',
'min_value', 'min_value',

View File

@@ -191,7 +191,6 @@ SUMMARIZABLE_FK_FIELDS = {
'webhook_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'), 'webhook_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
'approved_or_denied_by': ('id', 'username', 'first_name', 'last_name'), 'approved_or_denied_by': ('id', 'username', 'first_name', 'last_name'),
'credential_type': DEFAULT_SUMMARY_FIELDS, 'credential_type': DEFAULT_SUMMARY_FIELDS,
'resource': ('ansible_id', 'resource_type'),
} }

View File

@@ -24,10 +24,6 @@ def drf_reverse(viewname, args=None, kwargs=None, request=None, format=None, **e
else: else:
url = _reverse(viewname, args, kwargs, request, format, **extra) url = _reverse(viewname, args, kwargs, request, format, **extra)
if settings.OPTIONAL_API_URLPATTERN_PREFIX and request:
if request.path.startswith(f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}"):
url = url.replace('/api', f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}")
return url return url

View File

@@ -13,7 +13,6 @@ from django.utils.decorators import method_decorator
from django.views.decorators.csrf import ensure_csrf_cookie from django.views.decorators.csrf import ensure_csrf_cookie
from django.template.loader import render_to_string from django.template.loader import render_to_string
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from django.urls import reverse as django_reverse
from rest_framework.permissions import AllowAny, IsAuthenticated from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response from rest_framework.response import Response
@@ -131,7 +130,6 @@ class ApiVersionRootView(APIView):
data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request) data['mesh_visualizer'] = reverse('api:mesh_visualizer_view', request=request)
data['bulk'] = reverse('api:bulk', request=request) data['bulk'] = reverse('api:bulk', request=request)
data['analytics'] = reverse('api:analytics_root_view', request=request) data['analytics'] = reverse('api:analytics_root_view', request=request)
data['service_index'] = django_reverse('service-index-root')
return Response(data) return Response(data)

View File

@@ -55,7 +55,6 @@ register(
# Optional; category_slug will be slugified version of category if not # Optional; category_slug will be slugified version of category if not
# explicitly provided. # explicitly provided.
category_slug='cows', category_slug='cows',
hidden=True,
) )

View File

@@ -127,8 +127,6 @@ class SettingsRegistry(object):
encrypted = bool(field_kwargs.pop('encrypted', False)) encrypted = bool(field_kwargs.pop('encrypted', False))
defined_in_file = bool(field_kwargs.pop('defined_in_file', False)) defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
unit = field_kwargs.pop('unit', None) unit = field_kwargs.pop('unit', None)
hidden = field_kwargs.pop('hidden', False)
warning_text = field_kwargs.pop('warning_text', None)
if getattr(field_kwargs.get('child', None), 'source', None) is not None: if getattr(field_kwargs.get('child', None), 'source', None) is not None:
field_kwargs['child'].source = None field_kwargs['child'].source = None
field_instance = field_class(**field_kwargs) field_instance = field_class(**field_kwargs)
@@ -136,14 +134,12 @@ class SettingsRegistry(object):
field_instance.category = category field_instance.category = category
field_instance.depends_on = depends_on field_instance.depends_on = depends_on
field_instance.unit = unit field_instance.unit = unit
field_instance.hidden = hidden
if placeholder is not empty: if placeholder is not empty:
field_instance.placeholder = placeholder field_instance.placeholder = placeholder
field_instance.defined_in_file = defined_in_file field_instance.defined_in_file = defined_in_file
if field_instance.defined_in_file: if field_instance.defined_in_file:
field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text) field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
field_instance.encrypted = encrypted field_instance.encrypted = encrypted
field_instance.warning_text = warning_text
original_field_instance = field_instance original_field_instance = field_instance
if field_class != original_field_class: if field_class != original_field_class:
original_field_instance = original_field_class(**field_kwargs) original_field_instance = original_field_class(**field_kwargs)

View File

@@ -1,7 +1,6 @@
# Python # Python
import contextlib import contextlib
import logging import logging
import psycopg
import threading import threading
import time import time
import os import os
@@ -14,7 +13,7 @@ from django.conf import settings, UserSettingsHolder
from django.core.cache import cache as django_cache from django.core.cache import cache as django_cache
from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation
from django.db import transaction, connection from django.db import transaction, connection
from django.db.utils import DatabaseError, ProgrammingError from django.db.utils import Error as DBError, ProgrammingError
from django.utils.functional import cached_property from django.utils.functional import cached_property
# Django REST Framework # Django REST Framework
@@ -81,26 +80,18 @@ def _ctit_db_wrapper(trans_safe=False):
logger.debug('Obtaining database settings in spite of broken transaction.') logger.debug('Obtaining database settings in spite of broken transaction.')
transaction.set_rollback(False) transaction.set_rollback(False)
yield yield
except ProgrammingError as e: except DBError as exc:
# Exception raised for programming errors
# Examples may be table not found or already exists,
# this generally means we can't fetch Tower configuration
# because the database hasn't actually finished migrating yet;
# this is usually a sign that a service in a container (such as ws_broadcast)
# has come up *before* the database has finished migrating, and
# especially that the conf.settings table doesn't exist yet
# syntax error in the SQL statement, wrong number of parameters specified, etc.
if trans_safe: if trans_safe:
logger.debug(f'Database settings are not available, using defaults. error: {str(e)}') level = logger.warning
else: if isinstance(exc, ProgrammingError):
logger.exception('Error modifying something related to database settings.') if 'relation' in str(exc) and 'does not exist' in str(exc):
except DatabaseError as e: # this generally means we can't fetch Tower configuration
if trans_safe: # because the database hasn't actually finished migrating yet;
cause = e.__cause__ # this is usually a sign that a service in a container (such as ws_broadcast)
if cause and hasattr(cause, 'sqlstate'): # has come up *before* the database has finished migrating, and
sqlstate = cause.sqlstate # especially that the conf.settings table doesn't exist yet
sqlstate_str = psycopg.errors.lookup(sqlstate) level = logger.debug
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str)) level(f'Database settings are not available, using defaults. error: {str(exc)}')
else: else:
logger.exception('Error modifying something related to database settings.') logger.exception('Error modifying something related to database settings.')
finally: finally:

View File

@@ -639,10 +639,7 @@ class UserAccess(BaseAccess):
""" """
model = User model = User
prefetch_related = ( prefetch_related = ('profile',)
'profile',
'resource',
)
def filtered_queryset(self): def filtered_queryset(self):
if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()): if settings.ORG_ADMINS_CAN_SEE_ALL_USERS and (self.user.admin_of_organizations.exists() or self.user.auditor_of_organizations.exists()):
@@ -838,7 +835,6 @@ class OrganizationAccess(NotificationAttachMixin, BaseAccess):
prefetch_related = ( prefetch_related = (
'created_by', 'created_by',
'modified_by', 'modified_by',
'resource', # dab_resource_registry
) )
# organization admin_role is not a parent of organization auditor_role # organization admin_role is not a parent of organization auditor_role
notification_attach_roles = ['admin_role', 'auditor_role'] notification_attach_roles = ['admin_role', 'auditor_role']
@@ -1307,7 +1303,6 @@ class TeamAccess(BaseAccess):
'created_by', 'created_by',
'modified_by', 'modified_by',
'organization', 'organization',
'resource', # dab_resource_registry
) )
def filtered_queryset(self): def filtered_queryset(self):

View File

@@ -419,7 +419,7 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create
resolved_action, resolved_action,
resolved_role, resolved_role,
-- '-' operator listed here: -- '-' operator listed here:
-- https://www.postgresql.org/docs/15/functions-json.html -- https://www.postgresql.org/docs/12/functions-json.html
-- note that operator is only supported by jsonb objects -- note that operator is only supported by jsonb objects
-- https://www.postgresql.org/docs/current/datatype-json.html -- https://www.postgresql.org/docs/current/datatype-json.html
(CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats, (CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats,

View File

@@ -92,7 +92,6 @@ register(
), ),
category=_('System'), category=_('System'),
category_slug='system', category_slug='system',
required=False,
) )
register( register(
@@ -775,7 +774,6 @@ register(
allow_null=True, allow_null=True,
category=_('System'), category=_('System'),
category_slug='system', category_slug='system',
required=False,
) )
register( register(
'AUTOMATION_ANALYTICS_LAST_ENTRIES', 'AUTOMATION_ANALYTICS_LAST_ENTRIES',
@@ -817,7 +815,6 @@ register(
help_text=_('Max jobs to allow bulk jobs to launch'), help_text=_('Max jobs to allow bulk jobs to launch'),
category=_('Bulk Actions'), category=_('Bulk Actions'),
category_slug='bulk', category_slug='bulk',
hidden=True,
) )
register( register(
@@ -828,7 +825,6 @@ register(
help_text=_('Max number of hosts to allow to be created in a single bulk action'), help_text=_('Max number of hosts to allow to be created in a single bulk action'),
category=_('Bulk Actions'), category=_('Bulk Actions'),
category_slug='bulk', category_slug='bulk',
hidden=True,
) )
register( register(
@@ -839,7 +835,6 @@ register(
help_text=_('Max number of hosts to allow to be deleted in a single bulk action'), help_text=_('Max number of hosts to allow to be deleted in a single bulk action'),
category=_('Bulk Actions'), category=_('Bulk Actions'),
category_slug='bulk', category_slug='bulk',
hidden=True,
) )
register( register(
@@ -850,7 +845,6 @@ register(
help_text=_('Enable preview of new user interface.'), help_text=_('Enable preview of new user interface.'),
category=_('System'), category=_('System'),
category_slug='system', category_slug='system',
hidden=True,
) )
register( register(

View File

@@ -14,7 +14,7 @@ __all__ = [
'STANDARD_INVENTORY_UPDATE_ENV', 'STANDARD_INVENTORY_UPDATE_ENV',
] ]
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform') CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights')
PRIVILEGE_ESCALATION_METHODS = [ PRIVILEGE_ESCALATION_METHODS = [
('sudo', _('Sudo')), ('sudo', _('Sudo')),
('su', _('Su')), ('su', _('Su')),

View File

@@ -1,10 +1,9 @@
from azure.keyvault.secrets import SecretClient
from azure.identity import ClientSecretCredential
from msrestazure import azure_cloud
from .plugin import CredentialPlugin from .plugin import CredentialPlugin
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from azure.keyvault import KeyVaultClient, KeyVaultAuthentication
from azure.common.credentials import ServicePrincipalCredentials
from msrestazure import azure_cloud
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py # https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
@@ -55,9 +54,22 @@ azure_keyvault_inputs = {
def azure_keyvault_backend(**kwargs): def azure_keyvault_backend(**kwargs):
csc = ClientSecretCredential(tenant_id=kwargs['tenant'], client_id=kwargs['client'], client_secret=kwargs['secret']) url = kwargs['url']
kv = SecretClient(credential=csc, vault_url=kwargs['url']) [cloud] = [c for c in clouds if c.name == kwargs.get('cloud_name', default_cloud.name)]
return kv.get_secret(name=kwargs['secret_field'], version=kwargs.get('secret_version', '')).value
def auth_callback(server, resource, scope):
credentials = ServicePrincipalCredentials(
url=url,
client_id=kwargs['client'],
secret=kwargs['secret'],
tenant=kwargs['tenant'],
resource=f"https://{cloud.suffixes.keyvault_dns.split('.', 1).pop()}",
)
token = credentials.token
return token['token_type'], token['access_token']
kv = KeyVaultClient(KeyVaultAuthentication(auth_callback))
return kv.get_secret(url, kwargs['secret_field'], kwargs.get('secret_version', '')).value
azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend) azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)

View File

@@ -259,12 +259,6 @@ class AWXConsumerPG(AWXConsumerBase):
current_downtime = time.time() - self.pg_down_time current_downtime = time.time() - self.pg_down_time
if current_downtime > self.pg_max_wait: if current_downtime > self.pg_max_wait:
logger.exception(f"Postgres event consumer has not recovered in {current_downtime} s, exiting") logger.exception(f"Postgres event consumer has not recovered in {current_downtime} s, exiting")
# Sending QUIT to multiprocess queue to signal workers to exit
for worker in self.pool.workers:
try:
worker.quit()
except Exception:
logger.exception(f"Error sending QUIT to worker {worker}")
raise raise
# Wait for a second before next attempt, but still listen for any shutdown signals # Wait for a second before next attempt, but still listen for any shutdown signals
for i in range(10): for i in range(10):
@@ -276,12 +270,6 @@ class AWXConsumerPG(AWXConsumerBase):
except Exception: except Exception:
# Log unanticipated exception in addition to writing to stderr to get timestamps and other metadata # Log unanticipated exception in addition to writing to stderr to get timestamps and other metadata
logger.exception('Encountered unhandled error in dispatcher main loop') logger.exception('Encountered unhandled error in dispatcher main loop')
# Sending QUIT to multiprocess queue to signal workers to exit
for worker in self.pool.workers:
try:
worker.quit()
except Exception:
logger.exception(f"Error sending QUIT to worker {worker}")
raise raise

View File

@@ -1,179 +0,0 @@
import json
import os
import sys
import re
from typing import Any
from django.core.management.base import BaseCommand
from django.conf import settings
from awx.conf import settings_registry
class Command(BaseCommand):
help = 'Dump the current auth configuration in django_ansible_base.authenticator format, currently supports LDAP and SAML'
DAB_SAML_AUTHENTICATOR_KEYS = {
"SP_ENTITY_ID": True,
"SP_PUBLIC_CERT": True,
"SP_PRIVATE_KEY": True,
"ORG_INFO": True,
"TECHNICAL_CONTACT": True,
"SUPPORT_CONTACT": True,
"SP_EXTRA": False,
"SECURITY_CONFIG": False,
"EXTRA_DATA": False,
"ENABLED_IDPS": True,
"CALLBACK_URL": False,
}
DAB_LDAP_AUTHENTICATOR_KEYS = {
"SERVER_URI": True,
"BIND_DN": False,
"BIND_PASSWORD": False,
"CONNECTION_OPTIONS": False,
"GROUP_TYPE": True,
"GROUP_TYPE_PARAMS": True,
"GROUP_SEARCH": False,
"START_TLS": False,
"USER_DN_TEMPLATE": True,
"USER_ATTR_MAP": True,
"USER_SEARCH": False,
}
def get_awx_ldap_settings(self) -> dict[str, dict[str, Any]]:
awx_ldap_settings = {}
for awx_ldap_setting in settings_registry.get_registered_settings(category_slug='ldap'):
key = awx_ldap_setting.removeprefix("AUTH_LDAP_")
value = getattr(settings, awx_ldap_setting, None)
awx_ldap_settings[key] = value
grouped_settings = {}
for key, value in awx_ldap_settings.items():
match = re.search(r'(\d+)', key)
index = int(match.group()) if match else 0
new_key = re.sub(r'\d+_', '', key)
if index not in grouped_settings:
grouped_settings[index] = {}
grouped_settings[index][new_key] = value
if new_key == "GROUP_TYPE" and value:
grouped_settings[index][new_key] = type(value).__name__
if new_key == "SERVER_URI" and value:
value = value.split(", ")
return grouped_settings
def is_enabled(self, settings, keys):
for key, required in keys.items():
if required and not settings.get(key):
return False
return True
def get_awx_saml_settings(self) -> dict[str, Any]:
awx_saml_settings = {}
for awx_saml_setting in settings_registry.get_registered_settings(category_slug='saml'):
awx_saml_settings[awx_saml_setting.removeprefix("SOCIAL_AUTH_SAML_")] = getattr(settings, awx_saml_setting, None)
return awx_saml_settings
def format_config_data(self, enabled, awx_settings, type, keys, name):
config = {
"type": f"awx.authentication.authenticator_plugins.{type}",
"name": name,
"enabled": enabled,
"create_objects": True,
"users_unique": False,
"remove_users": True,
"configuration": {},
}
for k in keys:
v = awx_settings.get(k)
config["configuration"].update({k: v})
if type == "saml":
idp_to_key_mapping = {
"url": "IDP_URL",
"x509cert": "IDP_X509_CERT",
"entity_id": "IDP_ENTITY_ID",
"attr_email": "IDP_ATTR_EMAIL",
"attr_groups": "IDP_GROUPS",
"attr_username": "IDP_ATTR_USERNAME",
"attr_last_name": "IDP_ATTR_LAST_NAME",
"attr_first_name": "IDP_ATTR_FIRST_NAME",
"attr_user_permanent_id": "IDP_ATTR_USER_PERMANENT_ID",
}
for idp_name in awx_settings.get("ENABLED_IDPS", {}):
for key in idp_to_key_mapping:
value = awx_settings["ENABLED_IDPS"][idp_name].get(key)
if value is not None:
config["name"] = idp_name
config["configuration"].update({idp_to_key_mapping[key]: value})
return config
def add_arguments(self, parser):
parser.add_argument(
"output_file",
nargs="?",
type=str,
default=None,
help="Output JSON file path",
)
def handle(self, *args, **options):
try:
data = []
# dump SAML settings
awx_saml_settings = self.get_awx_saml_settings()
awx_saml_enabled = self.is_enabled(awx_saml_settings, self.DAB_SAML_AUTHENTICATOR_KEYS)
if awx_saml_enabled:
awx_saml_name = awx_saml_settings["ENABLED_IDPS"]
data.append(
self.format_config_data(
awx_saml_enabled,
awx_saml_settings,
"saml",
self.DAB_SAML_AUTHENTICATOR_KEYS,
awx_saml_name,
)
)
# dump LDAP settings
awx_ldap_group_settings = self.get_awx_ldap_settings()
for awx_ldap_name, awx_ldap_settings in enumerate(awx_ldap_group_settings.values()):
enabled = self.is_enabled(awx_ldap_settings, self.DAB_LDAP_AUTHENTICATOR_KEYS)
if enabled:
data.append(
self.format_config_data(
enabled,
awx_ldap_settings,
"ldap",
self.DAB_LDAP_AUTHENTICATOR_KEYS,
str(awx_ldap_name),
)
)
# write to file if requested
if options["output_file"]:
# Define the path for the output JSON file
output_file = options["output_file"]
# Ensure the directory exists
os.makedirs(os.path.dirname(output_file), exist_ok=True)
# Write data to the JSON file
with open(output_file, "w") as f:
json.dump(data, f, indent=4)
self.stdout.write(self.style.SUCCESS(f"Auth config data dumped to {output_file}"))
else:
self.stdout.write(json.dumps(data, indent=4))
except Exception as e:
self.stdout.write(self.style.ERROR(f"An error occurred: {str(e)}"))
sys.exit(1)

View File

@@ -92,6 +92,8 @@ class Command(BaseCommand):
return host_stats return host_stats
def handle(self, *arg, **options): def handle(self, *arg, **options):
WebsocketsMetricsServer().start()
# it's necessary to delay this import in case # it's necessary to delay this import in case
# database migrations are still running # database migrations are still running
from awx.main.models.ha import Instance from awx.main.models.ha import Instance
@@ -164,15 +166,8 @@ class Command(BaseCommand):
return return
WebsocketsMetricsServer().start() try:
websocket_relay_manager = WebSocketRelayManager() websocket_relay_manager = WebSocketRelayManager()
asyncio.run(websocket_relay_manager.run())
while True: except KeyboardInterrupt:
try: logger.info('Terminating Websocket Relayer')
asyncio.run(websocket_relay_manager.run())
except KeyboardInterrupt:
logger.info('Shutting down Websocket Relayer')
break
except Exception as e:
logger.exception('Error in Websocket Relayer, exception: {}. Restarting in 10 seconds'.format(e))
time.sleep(10)

View File

@@ -1,59 +0,0 @@
# Generated by Django 4.2.6 on 2024-02-15 20:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0189_inbound_hop_nodes'),
]
operations = [
migrations.AlterField(
model_name='inventorysource',
name='source',
field=models.CharField(
choices=[
('file', 'File, Directory or Script'),
('constructed', 'Template additional groups and hostvars at runtime'),
('scm', 'Sourced from a Project'),
('ec2', 'Amazon EC2'),
('gce', 'Google Compute Engine'),
('azure_rm', 'Microsoft Azure Resource Manager'),
('vmware', 'VMware vCenter'),
('satellite6', 'Red Hat Satellite 6'),
('openstack', 'OpenStack'),
('rhv', 'Red Hat Virtualization'),
('controller', 'Red Hat Ansible Automation Platform'),
('insights', 'Red Hat Insights'),
('terraform', 'Terraform State'),
],
default=None,
max_length=32,
),
),
migrations.AlterField(
model_name='inventoryupdate',
name='source',
field=models.CharField(
choices=[
('file', 'File, Directory or Script'),
('constructed', 'Template additional groups and hostvars at runtime'),
('scm', 'Sourced from a Project'),
('ec2', 'Amazon EC2'),
('gce', 'Google Compute Engine'),
('azure_rm', 'Microsoft Azure Resource Manager'),
('vmware', 'VMware vCenter'),
('satellite6', 'Red Hat Satellite 6'),
('openstack', 'OpenStack'),
('rhv', 'Red Hat Virtualization'),
('controller', 'Red Hat Ansible Automation Platform'),
('insights', 'Red Hat Insights'),
('terraform', 'Terraform State'),
],
default=None,
max_length=32,
),
),
]

View File

@@ -6,8 +6,6 @@ from django.conf import settings # noqa
from django.db import connection from django.db import connection
from django.db.models.signals import pre_delete # noqa from django.db.models.signals import pre_delete # noqa
# django-ansible-base
from ansible_base.resource_registry.fields import AnsibleResourceField
from ansible_base.lib.utils.models import prevent_search from ansible_base.lib.utils.models import prevent_search
# AWX # AWX
@@ -101,7 +99,6 @@ from awx.main.access import get_user_queryset, check_user_access, check_user_acc
User.add_to_class('get_queryset', get_user_queryset) User.add_to_class('get_queryset', get_user_queryset)
User.add_to_class('can_access', check_user_access) User.add_to_class('can_access', check_user_access)
User.add_to_class('can_access_with_errors', check_user_access_with_errors) User.add_to_class('can_access_with_errors', check_user_access_with_errors)
User.add_to_class('resource', AnsibleResourceField(primary_key_field="id"))
def convert_jsonfields(): def convert_jsonfields():

View File

@@ -925,7 +925,6 @@ class InventorySourceOptions(BaseModel):
('rhv', _('Red Hat Virtualization')), ('rhv', _('Red Hat Virtualization')),
('controller', _('Red Hat Ansible Automation Platform')), ('controller', _('Red Hat Ansible Automation Platform')),
('insights', _('Red Hat Insights')), ('insights', _('Red Hat Insights')),
('terraform', _('Terraform State')),
] ]
# From the options of the Django management base command # From the options of the Django management base command
@@ -1631,20 +1630,6 @@ class satellite6(PluginFileInjector):
return ret return ret
class terraform(PluginFileInjector):
plugin_name = 'terraform_state'
base_injector = 'managed'
namespace = 'cloud'
collection = 'terraform'
use_fqcn = True
def inventory_as_dict(self, inventory_update, private_data_dir):
env = super(terraform, self).get_plugin_env(inventory_update, private_data_dir, None)
ret = super().inventory_as_dict(inventory_update, private_data_dir)
ret['backend_config_files'] = env["TF_BACKEND_CONFIG_FILE"]
return ret
class controller(PluginFileInjector): class controller(PluginFileInjector):
plugin_name = 'tower' # TODO: relying on routing for now, update after EEs pick up revised collection plugin_name = 'tower' # TODO: relying on routing for now, update after EEs pick up revised collection
base_injector = 'template' base_injector = 'template'

View File

@@ -498,7 +498,7 @@ class JobNotificationMixin(object):
# Body should have at least 2 CRLF, some clients will interpret # Body should have at least 2 CRLF, some clients will interpret
# the email incorrectly with blank body. So we will check that # the email incorrectly with blank body. So we will check that
if len(body.strip().splitlines()) < 1: if len(body.strip().splitlines()) <= 2:
# blank body # blank body
body = '\r\n'.join( body = '\r\n'.join(
[ [

View File

@@ -10,8 +10,6 @@ from django.contrib.sessions.models import Session
from django.utils.timezone import now as tz_now from django.utils.timezone import now as tz_now
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
# django-ansible-base
from ansible_base.resource_registry.fields import AnsibleResourceField
# AWX # AWX
from awx.api.versioning import reverse from awx.api.versioning import reverse
@@ -105,7 +103,6 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
approval_role = ImplicitRoleField( approval_role = ImplicitRoleField(
parent_role='admin_role', parent_role='admin_role',
) )
resource = AnsibleResourceField(primary_key_field="id")
def get_absolute_url(self, request=None): def get_absolute_url(self, request=None):
return reverse('api:organization_detail', kwargs={'pk': self.pk}, request=request) return reverse('api:organization_detail', kwargs={'pk': self.pk}, request=request)
@@ -154,7 +151,6 @@ class Team(CommonModelNameNotUnique, ResourceMixin):
read_role = ImplicitRoleField( read_role = ImplicitRoleField(
parent_role=['organization.auditor_role', 'member_role'], parent_role=['organization.auditor_role', 'member_role'],
) )
resource = AnsibleResourceField(primary_key_field="id")
def get_absolute_url(self, request=None): def get_absolute_url(self, request=None):
return reverse('api:team_detail', kwargs={'pk': self.pk}, request=request) return reverse('api:team_detail', kwargs={'pk': self.pk}, request=request)

View File

@@ -49,70 +49,6 @@ class ReceptorConnectionType(Enum):
STREAMTLS = 2 STREAMTLS = 2
"""
Translate receptorctl messages that come in over stdout into
structured messages. Currently, these are error messages.
"""
class ReceptorErrorBase:
_MESSAGE = 'Receptor Error'
def __init__(self, node: str = 'N/A', state_name: str = 'N/A'):
self.node = node
self.state_name = state_name
def __str__(self):
return f"{self.__class__.__name__} '{self._MESSAGE}' on node '{self.node}' with state '{self.state_name}'"
class WorkUnitError(ReceptorErrorBase):
_MESSAGE = 'unknown work unit '
def __init__(self, work_unit_id: str, *args, **kwargs):
super().__init__(*args, **kwargs)
self.work_unit_id = work_unit_id
def __str__(self):
return f"{super().__str__()} work unit id '{self.work_unit_id}'"
class WorkUnitCancelError(WorkUnitError):
_MESSAGE = 'error cancelling remote unit: unknown work unit '
class WorkUnitResultsError(WorkUnitError):
_MESSAGE = 'Failed to get results: unknown work unit '
class UnknownError(ReceptorErrorBase):
_MESSAGE = 'Unknown receptor ctl error'
def __init__(self, msg, *args, **kwargs):
super().__init__(*args, **kwargs)
self._MESSAGE = msg
class FuzzyError:
def __new__(self, e: RuntimeError, node: str, state_name: str):
"""
At the time of writing this comment all of the sub-classes detection
is centralized in this parent class. It's like a Router().
Someone may find it better to push down the error detection logic into
each sub-class.
"""
msg = e.args[0]
common_startswith = (WorkUnitCancelError, WorkUnitResultsError, WorkUnitError)
for klass in common_startswith:
if msg.startswith(klass._MESSAGE):
work_unit_id = msg[len(klass._MESSAGE) :]
return klass(work_unit_id, node=node, state_name=state_name)
return UnknownError(msg, node=node, state_name=state_name)
def read_receptor_config(): def read_receptor_config():
# for K8S deployments, getting a lock is necessary as another process # for K8S deployments, getting a lock is necessary as another process
# may be re-writing the config at this time # may be re-writing the config at this time
@@ -249,7 +185,6 @@ def run_until_complete(node, timing_data=None, **kwargs):
timing_data['transmit_timing'] = run_start - transmit_start timing_data['transmit_timing'] = run_start - transmit_start
run_timing = 0.0 run_timing = 0.0
stdout = '' stdout = ''
state_name = 'local var never set'
try: try:
resultfile = receptor_ctl.get_work_results(unit_id) resultfile = receptor_ctl.get_work_results(unit_id)
@@ -270,33 +205,13 @@ def run_until_complete(node, timing_data=None, **kwargs):
stdout = resultfile.read() stdout = resultfile.read()
stdout = str(stdout, encoding='utf-8') stdout = str(stdout, encoding='utf-8')
except RuntimeError as e:
receptor_e = FuzzyError(e, node, state_name)
if type(receptor_e) in (
WorkUnitError,
WorkUnitResultsError,
):
logger.warning(f'While consuming job results: {receptor_e}')
else:
raise
finally: finally:
if settings.RECEPTOR_RELEASE_WORK: if settings.RECEPTOR_RELEASE_WORK:
try: res = receptor_ctl.simple_command(f"work release {unit_id}")
res = receptor_ctl.simple_command(f"work release {unit_id}") if res != {'released': unit_id}:
logger.warning(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}')
if res != {'released': unit_id}: receptor_ctl.close()
logger.warning(f'Could not confirm release of receptor work unit id {unit_id} from {node}, data: {res}')
receptor_ctl.close()
except RuntimeError as e:
receptor_e = FuzzyError(e, node, state_name)
if type(receptor_e) in (
WorkUnitError,
WorkUnitCancelError,
):
logger.warning(f"While releasing work: {receptor_e}")
else:
logger.error(f"While releasing work: {receptor_e}")
if state_name.lower() == 'failed': if state_name.lower() == 'failed':
work_detail = status.get('Detail', '') work_detail = status.get('Detail', '')
@@ -360,7 +275,7 @@ def _convert_args_to_cli(vargs):
args = ['cleanup'] args = ['cleanup']
for option in ('exclude_strings', 'remove_images'): for option in ('exclude_strings', 'remove_images'):
if vargs.get(option): if vargs.get(option):
args.append('--{}="{}"'.format(option.replace('_', '-'), ' '.join(vargs.get(option)))) args.append('--{}={}'.format(option.replace('_', '-'), ' '.join(vargs.get(option))))
for option in ('file_pattern', 'image_prune', 'process_isolation_executable', 'grace_period'): for option in ('file_pattern', 'image_prune', 'process_isolation_executable', 'grace_period'):
if vargs.get(option) is True: if vargs.get(option) is True:
args.append('--{}'.format(option.replace('_', '-'))) args.append('--{}'.format(option.replace('_', '-')))

View File

@@ -6,7 +6,6 @@ import itertools
import json import json
import logging import logging
import os import os
import psycopg
from io import StringIO from io import StringIO
from contextlib import redirect_stdout from contextlib import redirect_stdout
import shutil import shutil
@@ -417,7 +416,7 @@ def handle_removed_image(remove_images=None):
@task(queue=get_task_queuename) @task(queue=get_task_queuename)
def cleanup_images_and_files(): def cleanup_images_and_files():
_cleanup_images_and_files(image_prune=True) _cleanup_images_and_files()
@task(queue=get_task_queuename) @task(queue=get_task_queuename)
@@ -631,18 +630,10 @@ def cluster_node_heartbeat(dispatch_time=None, worker_tasks=None):
logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.last_seen)) logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.last_seen))
except DatabaseError as e: except DatabaseError as e:
cause = e.__cause__ if 'did not affect any rows' in str(e):
if cause and hasattr(cause, 'sqlstate'): logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
sqlstate = cause.sqlstate
sqlstate_str = psycopg.errors.lookup(sqlstate)
logger.debug('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
if sqlstate == psycopg.errors.NoData:
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
else:
logger.exception("Error marking {} as lost.".format(other_inst.hostname))
else: else:
logger.exception('No SQL state available. Error marking {} as lost'.format(other_inst.hostname)) logger.exception('Error marking {} as lost'.format(other_inst.hostname))
# Run local reaper # Run local reaper
if worker_tasks is not None: if worker_tasks is not None:
@@ -797,19 +788,10 @@ def update_inventory_computed_fields(inventory_id):
try: try:
i.update_computed_fields() i.update_computed_fields()
except DatabaseError as e: except DatabaseError as e:
# https://github.com/django/django/blob/eff21d8e7a1cb297aedf1c702668b590a1b618f3/django/db/models/base.py#L1105 if 'did not affect any rows' in str(e):
# django raises DatabaseError("Forced update did not affect any rows.") logger.debug('Exiting duplicate update_inventory_computed_fields task.')
return
# if sqlstate is set then there was a database error and otherwise will re-raise that error raise
cause = e.__cause__
if cause and hasattr(cause, 'sqlstate'):
sqlstate = cause.sqlstate
sqlstate_str = psycopg.errors.lookup(sqlstate)
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
raise
# otherwise
logger.debug('Exiting duplicate update_inventory_computed_fields task.')
def update_smart_memberships_for_inventory(smart_inventory): def update_smart_memberships_for_inventory(smart_inventory):

View File

@@ -3,5 +3,5 @@
hosts: all hosts: all
tasks: tasks:
- name: Hello Message - name: Hello Message
ansible.builtin.debug: debug:
msg: "Hello World!" msg: "Hello World!"

View File

@@ -1,3 +0,0 @@
{
"TF_BACKEND_CONFIG_FILE": "{{ file_reference }}"
}

View File

@@ -1,8 +1,13 @@
from awx.main.tests.functional.conftest import * # noqa from awx.main.tests.functional.conftest import * # noqa
import os
import pytest
@pytest.fixture() def pytest_addoption(parser):
def release(): parser.addoption("--release", action="store", help="a release version number, e.g., 3.3.0")
return os.environ.get('VERSION_TARGET', '')
def pytest_generate_tests(metafunc):
# This is called for every test. Only get/set command line arguments
# if the argument is specified in the list of test "fixturenames".
option_value = metafunc.config.option.release
if 'release' in metafunc.fixturenames and option_value is not None:
metafunc.parametrize("release", [option_value])

View File

@@ -3,19 +3,15 @@ import pytest
from unittest import mock from unittest import mock
import urllib.parse import urllib.parse
from unittest.mock import PropertyMock from unittest.mock import PropertyMock
import importlib
# Django # Django
from django.urls import resolve from django.urls import resolve
from django.http import Http404 from django.http import Http404
from django.apps import apps
from django.core.handlers.exception import response_for_exception from django.core.handlers.exception import response_for_exception
from django.contrib.auth.models import User from django.contrib.auth.models import User
from django.core.serializers.json import DjangoJSONEncoder from django.core.serializers.json import DjangoJSONEncoder
from django.db.backends.sqlite3.base import SQLiteCursorWrapper from django.db.backends.sqlite3.base import SQLiteCursorWrapper
from django.db.models.signals import post_migrate
# AWX # AWX
from awx.main.models.projects import Project from awx.main.models.projects import Project
from awx.main.models.ha import Instance from awx.main.models.ha import Instance
@@ -45,19 +41,10 @@ from awx.main.models.workflow import WorkflowJobTemplate
from awx.main.models.ad_hoc_commands import AdHocCommand from awx.main.models.ad_hoc_commands import AdHocCommand
from awx.main.models.oauth import OAuth2Application as Application from awx.main.models.oauth import OAuth2Application as Application
from awx.main.models.execution_environments import ExecutionEnvironment from awx.main.models.execution_environments import ExecutionEnvironment
from awx.main.utils import is_testing
__SWAGGER_REQUESTS__ = {} __SWAGGER_REQUESTS__ = {}
# HACK: the dab_resource_registry app required ServiceID in migrations which checks do not run
dab_rr_initial = importlib.import_module('ansible_base.resource_registry.migrations.0001_initial')
if is_testing():
post_migrate.connect(lambda **kwargs: dab_rr_initial.create_service_id(apps, None))
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def swagger_autogen(requests=__SWAGGER_REQUESTS__): def swagger_autogen(requests=__SWAGGER_REQUESTS__):
return requests return requests

View File

@@ -1,39 +0,0 @@
import pytest
from ansible_base.resource_registry.models import Resource
from awx.api.versioning import reverse
def assert_has_resource(list_response, obj=None):
data = list_response.data
assert 'resource' in data['results'][0]['summary_fields']
resource_data = data['results'][0]['summary_fields']['resource']
assert resource_data['ansible_id']
resource = Resource.objects.filter(ansible_id=resource_data['ansible_id']).first()
assert resource
assert resource.content_object
if obj:
objects = [Resource.objects.get(ansible_id=entry['summary_fields']['resource']['ansible_id']).content_object for entry in data['results']]
assert obj in objects
@pytest.mark.django_db
def test_organization_ansible_id(organization, admin_user, get):
url = reverse('api:organization_list')
response = get(url=url, user=admin_user, expect=200)
assert_has_resource(response, obj=organization)
@pytest.mark.django_db
def test_team_ansible_id(team, admin_user, get):
url = reverse('api:team_list')
response = get(url=url, user=admin_user, expect=200)
assert_has_resource(response, obj=team)
@pytest.mark.django_db
def test_user_ansible_id(rando, admin_user, get):
url = reverse('api:user_list')
response = get(url=url, user=admin_user, expect=200)
assert_has_resource(response, obj=rando)

View File

@@ -193,7 +193,6 @@ class TestInventorySourceInjectors:
('satellite6', 'theforeman.foreman.foreman'), ('satellite6', 'theforeman.foreman.foreman'),
('insights', 'redhatinsights.insights.insights'), ('insights', 'redhatinsights.insights.insights'),
('controller', 'awx.awx.tower'), ('controller', 'awx.awx.tower'),
('terraform', 'cloud.terraform.terraform_state'),
], ],
) )
def test_plugin_proper_names(self, source, proper_name): def test_plugin_proper_names(self, source, proper_name):

View File

@@ -107,7 +107,6 @@ def read_content(private_data_dir, raw_env, inventory_update):
for filename in os.listdir(os.path.join(private_data_dir, subdir)): for filename in os.listdir(os.path.join(private_data_dir, subdir)):
filename_list.append(os.path.join(subdir, filename)) filename_list.append(os.path.join(subdir, filename))
filename_list = sorted(filename_list, key=lambda fn: inverse_env.get(os.path.join(private_data_dir, fn), [fn])[0]) filename_list = sorted(filename_list, key=lambda fn: inverse_env.get(os.path.join(private_data_dir, fn), [fn])[0])
inventory_content = ""
for filename in filename_list: for filename in filename_list:
if filename in ('args', 'project'): if filename in ('args', 'project'):
continue # Ansible runner continue # Ansible runner
@@ -131,7 +130,6 @@ def read_content(private_data_dir, raw_env, inventory_update):
dir_contents[abs_file_path] = f.read() dir_contents[abs_file_path] = f.read()
# Declare a reference to inventory plugin file if it exists # Declare a reference to inventory plugin file if it exists
if abs_file_path.endswith('.yml') and 'plugin: ' in dir_contents[abs_file_path]: if abs_file_path.endswith('.yml') and 'plugin: ' in dir_contents[abs_file_path]:
inventory_content = dir_contents[abs_file_path]
referenced_paths.add(abs_file_path) # used as inventory file referenced_paths.add(abs_file_path) # used as inventory file
elif cache_file_regex.match(abs_file_path): elif cache_file_regex.match(abs_file_path):
file_aliases[abs_file_path] = 'cache_file' file_aliases[abs_file_path] = 'cache_file'
@@ -159,11 +157,7 @@ def read_content(private_data_dir, raw_env, inventory_update):
content = {} content = {}
for abs_file_path, file_content in dir_contents.items(): for abs_file_path, file_content in dir_contents.items():
# assert that all files laid down are used # assert that all files laid down are used
if ( if abs_file_path not in referenced_paths and abs_file_path not in ignore_files:
abs_file_path not in referenced_paths
and to_container_path(abs_file_path, private_data_dir) not in inventory_content
and abs_file_path not in ignore_files
):
raise AssertionError( raise AssertionError(
"File {} is not referenced. References and files:\n{}\n{}".format(abs_file_path, json.dumps(env, indent=4), json.dumps(dir_contents, indent=4)) "File {} is not referenced. References and files:\n{}\n{}".format(abs_file_path, json.dumps(env, indent=4), json.dumps(dir_contents, indent=4))
) )

View File

@@ -411,14 +411,14 @@ def test_project_delete(delete, organization, admin_user):
@pytest.mark.parametrize( @pytest.mark.parametrize(
'order_by, expected_names', 'order_by, expected_names, expected_ids',
[ [
('name', ['alice project', 'bob project', 'shared project']), ('name', ['alice project', 'bob project', 'shared project'], [1, 2, 3]),
('-name', ['shared project', 'bob project', 'alice project']), ('-name', ['shared project', 'bob project', 'alice project'], [3, 2, 1]),
], ],
) )
@pytest.mark.django_db @pytest.mark.django_db
def test_project_list_ordering_by_name(get, order_by, expected_names, organization_factory): def test_project_list_ordering_by_name(get, order_by, expected_names, expected_ids, organization_factory):
'ensure sorted order of project list is maintained correctly when the requested order is invalid or not applicable' 'ensure sorted order of project list is maintained correctly when the requested order is invalid or not applicable'
objects = organization_factory( objects = organization_factory(
'org1', 'org1',
@@ -426,11 +426,13 @@ def test_project_list_ordering_by_name(get, order_by, expected_names, organizati
superusers=['admin'], superusers=['admin'],
) )
project_names = [] project_names = []
project_ids = []
# TODO: ask for an order by here that doesn't apply # TODO: ask for an order by here that doesn't apply
results = get(reverse('api:project_list'), objects.superusers.admin, QUERY_STRING='order_by=%s' % order_by).data['results'] results = get(reverse('api:project_list'), objects.superusers.admin, QUERY_STRING='order_by=%s' % order_by).data['results']
for x in range(len(results)): for x in range(len(results)):
project_names.append(results[x]['name']) project_names.append(results[x]['name'])
assert project_names == expected_names project_ids.append(results[x]['id'])
assert project_names == expected_names and project_ids == expected_ids
@pytest.mark.parametrize('order_by', ('name', '-name')) @pytest.mark.parametrize('order_by', ('name', '-name'))
@@ -448,8 +450,7 @@ def test_project_list_ordering_with_duplicate_names(get, order_by, organization_
for x in range(3): for x in range(3):
results = get(reverse('api:project_list'), objects.superusers.admin, QUERY_STRING='order_by=%s' % order_by).data['results'] results = get(reverse('api:project_list'), objects.superusers.admin, QUERY_STRING='order_by=%s' % order_by).data['results']
project_ids[x] = [proj['id'] for proj in results] project_ids[x] = [proj['id'] for proj in results]
assert project_ids[0] == project_ids[1] == project_ids[2] assert project_ids[0] == project_ids[1] == project_ids[2] == [1, 2, 3, 4, 5]
assert project_ids[0] == sorted(project_ids[0])
@pytest.mark.django_db @pytest.mark.django_db

View File

@@ -1,6 +1,11 @@
# Python # Python
from unittest import mock
import uuid import uuid
# patch python-ldap
with mock.patch('__main__.__builtins__.dir', return_value=[]):
import ldap # NOQA
# Load development settings for base variables. # Load development settings for base variables.
from awx.settings.development import * # NOQA from awx.settings.development import * # NOQA

View File

@@ -1,122 +0,0 @@
from io import StringIO
import json
from django.core.management import call_command
from django.test import TestCase, override_settings
settings_dict = {
"SOCIAL_AUTH_SAML_SP_ENTITY_ID": "SP_ENTITY_ID",
"SOCIAL_AUTH_SAML_SP_PUBLIC_CERT": "SP_PUBLIC_CERT",
"SOCIAL_AUTH_SAML_SP_PRIVATE_KEY": "SP_PRIVATE_KEY",
"SOCIAL_AUTH_SAML_ORG_INFO": "ORG_INFO",
"SOCIAL_AUTH_SAML_TECHNICAL_CONTACT": "TECHNICAL_CONTACT",
"SOCIAL_AUTH_SAML_SUPPORT_CONTACT": "SUPPORT_CONTACT",
"SOCIAL_AUTH_SAML_SP_EXTRA": "SP_EXTRA",
"SOCIAL_AUTH_SAML_SECURITY_CONFIG": "SECURITY_CONFIG",
"SOCIAL_AUTH_SAML_EXTRA_DATA": "EXTRA_DATA",
"SOCIAL_AUTH_SAML_ENABLED_IDPS": {
"Keycloak": {
"attr_last_name": "last_name",
"attr_groups": "groups",
"attr_email": "email",
"attr_user_permanent_id": "name_id",
"attr_username": "username",
"entity_id": "https://example.com/auth/realms/awx",
"url": "https://example.com/auth/realms/awx/protocol/saml",
"x509cert": "-----BEGIN CERTIFICATE-----\nMIIDDjCCAfYCCQCPBeVvpo8+VzANBgkqhkiG9w0BAQsFADBJMQswCQYDVQQGEwJV\nUzELMAkGA1UECAwCTkMxDzANBgNVBAcMBkR1cmhhbTEMMAoGA1UECgwDYXd4MQ4w\nDAYDVQQDDAVsb2NhbDAeFw0yNDAxMTgxNDA4MzFaFw0yNTAxMTcxNDA4MzFaMEkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJOQzEPMA0GA1UEBwwGRHVyaGFtMQwwCgYD\nVQQKDANhd3gxDjAMBgNVBAMMBWxvY2FsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\nMIIBCgKCAQEAzouj93oyFXsHEABdPESh3CYpp5QJJBM4TLYIIolk6PFOFIVwBuFY\nfExi5w7Hh4A42lPM6RkrT+u3h7LV39H9MRUfqygOSmaxICTOI0sU9ROHc44fWWzN\n756OP4B5zSiqG82q8X7nYVkcID+2F/3ekPLMOlWn53OrcdfKKDIcqavoTkQJefc2\nggXU3WgVCxGki/qCm+e5cZ1Cpl/ykSLOT8dWMEzDd12kin66zJ3KYz9F2Q5kQTh4\nKRAChnBBoEqzOfENHEAaHALiXOlVSy61VcLbtvskRMMwBtsydlnd9n/HGnktgrid\n3Ca0z5wBTHWjAOBvCKxKJuDa+jmyHEnpcQIDAQABMA0GCSqGSIb3DQEBCwUAA4IB\nAQBXvmyPWgXhC26cHYJBgQqj57dZ+n7p00kM1J+27oDMjGmbmX+XIKXLWazw/rG3\ngDjw9MXI2tVCrQMX0ohjphaULXhb/VBUPDOiW+k7C6AB3nZySFRflcR3cM4f83zF\nMoBd0549h5Red4p72FeOKNJRTN8YO4ooH9YNh5g0FQkgqn7fV9w2CNlomeKIW9zP\nm8tjFw0cJUk2wEYBVl8O7ko5rgNlzhkLoZkMvJhKa99AQJA6MAdyoLl1lv56Kq4X\njk+mMEiz9SaInp+ILQ1uQxZEwuC7DoGRW76rV4Fnie6+DLft4WKZfX1497mx8NV3\noR0abutJaKnCj07dwRu4/EsK\n-----END CERTIFICATE-----",
"attr_first_name": "first_name",
}
},
"SOCIAL_AUTH_SAML_CALLBACK_URL": "CALLBACK_URL",
"AUTH_LDAP_1_SERVER_URI": "SERVER_URI",
"AUTH_LDAP_1_BIND_DN": "BIND_DN",
"AUTH_LDAP_1_BIND_PASSWORD": "BIND_PASSWORD",
"AUTH_LDAP_1_GROUP_SEARCH": ["GROUP_SEARCH"],
"AUTH_LDAP_1_GROUP_TYPE": "string object",
"AUTH_LDAP_1_GROUP_TYPE_PARAMS": {"member_attr": "member", "name_attr": "cn"},
"AUTH_LDAP_1_USER_DN_TEMPLATE": "USER_DN_TEMPLATE",
"AUTH_LDAP_1_USER_SEARCH": ["USER_SEARCH"],
"AUTH_LDAP_1_USER_ATTR_MAP": {
"email": "email",
"last_name": "last_name",
"first_name": "first_name",
},
"AUTH_LDAP_1_CONNECTION_OPTIONS": {},
"AUTH_LDAP_1_START_TLS": None,
}
@override_settings(**settings_dict)
class TestDumpAuthConfigCommand(TestCase):
def setUp(self):
super().setUp()
self.expected_config = [
{
"type": "awx.authentication.authenticator_plugins.saml",
"name": "Keycloak",
"enabled": True,
"create_objects": True,
"users_unique": False,
"remove_users": True,
"configuration": {
"SP_ENTITY_ID": "SP_ENTITY_ID",
"SP_PUBLIC_CERT": "SP_PUBLIC_CERT",
"SP_PRIVATE_KEY": "SP_PRIVATE_KEY",
"ORG_INFO": "ORG_INFO",
"TECHNICAL_CONTACT": "TECHNICAL_CONTACT",
"SUPPORT_CONTACT": "SUPPORT_CONTACT",
"SP_EXTRA": "SP_EXTRA",
"SECURITY_CONFIG": "SECURITY_CONFIG",
"EXTRA_DATA": "EXTRA_DATA",
"ENABLED_IDPS": {
"Keycloak": {
"attr_last_name": "last_name",
"attr_groups": "groups",
"attr_email": "email",
"attr_user_permanent_id": "name_id",
"attr_username": "username",
"entity_id": "https://example.com/auth/realms/awx",
"url": "https://example.com/auth/realms/awx/protocol/saml",
"x509cert": "-----BEGIN CERTIFICATE-----\nMIIDDjCCAfYCCQCPBeVvpo8+VzANBgkqhkiG9w0BAQsFADBJMQswCQYDVQQGEwJV\nUzELMAkGA1UECAwCTkMxDzANBgNVBAcMBkR1cmhhbTEMMAoGA1UECgwDYXd4MQ4w\nDAYDVQQDDAVsb2NhbDAeFw0yNDAxMTgxNDA4MzFaFw0yNTAxMTcxNDA4MzFaMEkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJOQzEPMA0GA1UEBwwGRHVyaGFtMQwwCgYD\nVQQKDANhd3gxDjAMBgNVBAMMBWxvY2FsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\nMIIBCgKCAQEAzouj93oyFXsHEABdPESh3CYpp5QJJBM4TLYIIolk6PFOFIVwBuFY\nfExi5w7Hh4A42lPM6RkrT+u3h7LV39H9MRUfqygOSmaxICTOI0sU9ROHc44fWWzN\n756OP4B5zSiqG82q8X7nYVkcID+2F/3ekPLMOlWn53OrcdfKKDIcqavoTkQJefc2\nggXU3WgVCxGki/qCm+e5cZ1Cpl/ykSLOT8dWMEzDd12kin66zJ3KYz9F2Q5kQTh4\nKRAChnBBoEqzOfENHEAaHALiXOlVSy61VcLbtvskRMMwBtsydlnd9n/HGnktgrid\n3Ca0z5wBTHWjAOBvCKxKJuDa+jmyHEnpcQIDAQABMA0GCSqGSIb3DQEBCwUAA4IB\nAQBXvmyPWgXhC26cHYJBgQqj57dZ+n7p00kM1J+27oDMjGmbmX+XIKXLWazw/rG3\ngDjw9MXI2tVCrQMX0ohjphaULXhb/VBUPDOiW+k7C6AB3nZySFRflcR3cM4f83zF\nMoBd0549h5Red4p72FeOKNJRTN8YO4ooH9YNh5g0FQkgqn7fV9w2CNlomeKIW9zP\nm8tjFw0cJUk2wEYBVl8O7ko5rgNlzhkLoZkMvJhKa99AQJA6MAdyoLl1lv56Kq4X\njk+mMEiz9SaInp+ILQ1uQxZEwuC7DoGRW76rV4Fnie6+DLft4WKZfX1497mx8NV3\noR0abutJaKnCj07dwRu4/EsK\n-----END CERTIFICATE-----",
"attr_first_name": "first_name",
}
},
"CALLBACK_URL": "CALLBACK_URL",
"IDP_URL": "https://example.com/auth/realms/awx/protocol/saml",
"IDP_X509_CERT": "-----BEGIN CERTIFICATE-----\nMIIDDjCCAfYCCQCPBeVvpo8+VzANBgkqhkiG9w0BAQsFADBJMQswCQYDVQQGEwJV\nUzELMAkGA1UECAwCTkMxDzANBgNVBAcMBkR1cmhhbTEMMAoGA1UECgwDYXd4MQ4w\nDAYDVQQDDAVsb2NhbDAeFw0yNDAxMTgxNDA4MzFaFw0yNTAxMTcxNDA4MzFaMEkx\nCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJOQzEPMA0GA1UEBwwGRHVyaGFtMQwwCgYD\nVQQKDANhd3gxDjAMBgNVBAMMBWxvY2FsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\nMIIBCgKCAQEAzouj93oyFXsHEABdPESh3CYpp5QJJBM4TLYIIolk6PFOFIVwBuFY\nfExi5w7Hh4A42lPM6RkrT+u3h7LV39H9MRUfqygOSmaxICTOI0sU9ROHc44fWWzN\n756OP4B5zSiqG82q8X7nYVkcID+2F/3ekPLMOlWn53OrcdfKKDIcqavoTkQJefc2\nggXU3WgVCxGki/qCm+e5cZ1Cpl/ykSLOT8dWMEzDd12kin66zJ3KYz9F2Q5kQTh4\nKRAChnBBoEqzOfENHEAaHALiXOlVSy61VcLbtvskRMMwBtsydlnd9n/HGnktgrid\n3Ca0z5wBTHWjAOBvCKxKJuDa+jmyHEnpcQIDAQABMA0GCSqGSIb3DQEBCwUAA4IB\nAQBXvmyPWgXhC26cHYJBgQqj57dZ+n7p00kM1J+27oDMjGmbmX+XIKXLWazw/rG3\ngDjw9MXI2tVCrQMX0ohjphaULXhb/VBUPDOiW+k7C6AB3nZySFRflcR3cM4f83zF\nMoBd0549h5Red4p72FeOKNJRTN8YO4ooH9YNh5g0FQkgqn7fV9w2CNlomeKIW9zP\nm8tjFw0cJUk2wEYBVl8O7ko5rgNlzhkLoZkMvJhKa99AQJA6MAdyoLl1lv56Kq4X\njk+mMEiz9SaInp+ILQ1uQxZEwuC7DoGRW76rV4Fnie6+DLft4WKZfX1497mx8NV3\noR0abutJaKnCj07dwRu4/EsK\n-----END CERTIFICATE-----",
"IDP_ENTITY_ID": "https://example.com/auth/realms/awx",
"IDP_ATTR_EMAIL": "email",
"IDP_GROUPS": "groups",
"IDP_ATTR_USERNAME": "username",
"IDP_ATTR_LAST_NAME": "last_name",
"IDP_ATTR_FIRST_NAME": "first_name",
"IDP_ATTR_USER_PERMANENT_ID": "name_id",
},
},
{
"type": "awx.authentication.authenticator_plugins.ldap",
"name": "1",
"enabled": True,
"create_objects": True,
"users_unique": False,
"remove_users": True,
"configuration": {
"SERVER_URI": "SERVER_URI",
"BIND_DN": "BIND_DN",
"BIND_PASSWORD": "BIND_PASSWORD",
"CONNECTION_OPTIONS": {},
"GROUP_TYPE": "str",
"GROUP_TYPE_PARAMS": {"member_attr": "member", "name_attr": "cn"},
"GROUP_SEARCH": ["GROUP_SEARCH"],
"START_TLS": None,
"USER_DN_TEMPLATE": "USER_DN_TEMPLATE",
"USER_ATTR_MAP": {"email": "email", "last_name": "last_name", "first_name": "first_name"},
"USER_SEARCH": ["USER_SEARCH"],
},
},
]
def test_json_returned_from_cmd(self):
output = StringIO()
call_command("dump_auth_config", stdout=output)
assert json.loads(output.getvalue()) == self.expected_config

View File

@@ -1,64 +0,0 @@
import pytest
from unittest.mock import MagicMock, patch
from awx.main.tasks.system import update_inventory_computed_fields
from awx.main.models import Inventory
from django.db import DatabaseError
@pytest.fixture
def mock_logger():
with patch("awx.main.tasks.system.logger") as logger:
yield logger
@pytest.fixture
def mock_inventory():
return MagicMock(spec=Inventory)
def test_update_inventory_computed_fields_existing_inventory(mock_logger, mock_inventory):
# Mocking the Inventory.objects.filter method to return a non-empty queryset
with patch("awx.main.tasks.system.Inventory.objects.filter") as mock_filter:
mock_filter.return_value.exists.return_value = True
mock_filter.return_value.__getitem__.return_value = mock_inventory
# Mocking the update_computed_fields method
with patch.object(mock_inventory, "update_computed_fields") as mock_update_computed_fields:
update_inventory_computed_fields(1)
# Assertions
mock_filter.assert_called_once_with(id=1)
mock_update_computed_fields.assert_called_once()
# You can add more assertions based on your specific requirements
def test_update_inventory_computed_fields_missing_inventory(mock_logger):
# Mocking the Inventory.objects.filter method to return an empty queryset
with patch("awx.main.tasks.system.Inventory.objects.filter") as mock_filter:
mock_filter.return_value.exists.return_value = False
update_inventory_computed_fields(1)
# Assertions
mock_filter.assert_called_once_with(id=1)
mock_logger.error.assert_called_once_with("Update Inventory Computed Fields failed due to missing inventory: 1")
def test_update_inventory_computed_fields_database_error_nosqlstate(mock_logger, mock_inventory):
# Mocking the Inventory.objects.filter method to return a non-empty queryset
with patch("awx.main.tasks.system.Inventory.objects.filter") as mock_filter:
mock_filter.return_value.exists.return_value = True
mock_filter.return_value.__getitem__.return_value = mock_inventory
# Mocking the update_computed_fields method
with patch.object(mock_inventory, "update_computed_fields") as mock_update_computed_fields:
# Simulating the update_computed_fields method to explicitly raise a DatabaseError
mock_update_computed_fields.side_effect = DatabaseError("Some error")
update_inventory_computed_fields(1)
# Assertions
mock_filter.assert_called_once_with(id=1)
mock_update_computed_fields.assert_called_once()
mock_inventory.update_computed_fields.assert_called_once()

View File

@@ -121,10 +121,6 @@ def test_get_model_for_valid_type(model_type, model_class):
assert common.get_model_for_type(model_type) == model_class assert common.get_model_for_type(model_type) == model_class
def test_is_testing():
assert common.is_testing() is True
@pytest.mark.parametrize("model_type,model_class", [(name, cls) for cls, name in TEST_MODELS]) @pytest.mark.parametrize("model_type,model_class", [(name, cls) for cls, name in TEST_MODELS])
def test_get_capacity_type(model_type, model_class): def test_get_capacity_type(model_type, model_class):
if model_type in ('job', 'ad_hoc_command', 'inventory_update', 'job_template'): if model_type in ('job', 'ad_hoc_command', 'inventory_update', 'job_template'):

View File

@@ -3,7 +3,7 @@ from awx.main.tasks.receptor import _convert_args_to_cli
def test_file_cleanup_scenario(): def test_file_cleanup_scenario():
args = _convert_args_to_cli({'exclude_strings': ['awx_423_', 'awx_582_'], 'file_pattern': '/tmp/awx_*_*'}) args = _convert_args_to_cli({'exclude_strings': ['awx_423_', 'awx_582_'], 'file_pattern': '/tmp/awx_*_*'})
assert ' '.join(args) == 'cleanup --exclude-strings="awx_423_ awx_582_" --file-pattern=/tmp/awx_*_*' assert ' '.join(args) == 'cleanup --exclude-strings=awx_423_ awx_582_ --file-pattern=/tmp/awx_*_*'
def test_image_cleanup_scenario(): def test_image_cleanup_scenario():
@@ -17,5 +17,5 @@ def test_image_cleanup_scenario():
} }
) )
assert ( assert (
' '.join(args) == 'cleanup --remove-images="quay.invalid/foo/bar:latest quay.invalid/foo/bar:devel" --image-prune --process-isolation-executable=podman' ' '.join(args) == 'cleanup --remove-images=quay.invalid/foo/bar:latest quay.invalid/foo/bar:devel --image-prune --process-isolation-executable=podman'
) )

View File

@@ -7,7 +7,6 @@ import json
import yaml import yaml
import logging import logging
import time import time
import psycopg
import os import os
import subprocess import subprocess
import re import re
@@ -24,7 +23,7 @@ from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
from django.utils.dateparse import parse_datetime from django.utils.dateparse import parse_datetime
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from django.utils.functional import cached_property from django.utils.functional import cached_property
from django.db import connection, DatabaseError, transaction, ProgrammingError, IntegrityError from django.db import connection, transaction, ProgrammingError, IntegrityError
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField from django.db.models.fields.related import ForeignObjectRel, ManyToManyField
from django.db.models.fields.related_descriptors import ForwardManyToOneDescriptor, ManyToManyDescriptor from django.db.models.fields.related_descriptors import ForwardManyToOneDescriptor, ManyToManyDescriptor
from django.db.models.query import QuerySet from django.db.models.query import QuerySet
@@ -137,7 +136,7 @@ def underscore_to_camelcase(s):
@functools.cache @functools.cache
def is_testing(argv=None): def is_testing(argv=None):
'''Return True if running django or py.test unit tests.''' '''Return True if running django or py.test unit tests.'''
if os.environ.get('DJANGO_SETTINGS_MODULE') == 'awx.main.tests.settings_for_test': if 'PYTEST_CURRENT_TEST' in os.environ.keys():
return True return True
argv = sys.argv if argv is None else argv argv = sys.argv if argv is None else argv
if len(argv) >= 1 and ('py.test' in argv[0] or 'py/test.py' in argv[0]): if len(argv) >= 1 and ('py.test' in argv[0] or 'py/test.py' in argv[0]):
@@ -1156,25 +1155,11 @@ def create_partition(tblname, start=None):
f'ALTER TABLE {tblname} ATTACH PARTITION {tblname}_{partition_label} ' f'ALTER TABLE {tblname} ATTACH PARTITION {tblname}_{partition_label} '
f'FOR VALUES FROM (\'{start_timestamp}\') TO (\'{end_timestamp}\');' f'FOR VALUES FROM (\'{start_timestamp}\') TO (\'{end_timestamp}\');'
) )
except (ProgrammingError, IntegrityError) as e: except (ProgrammingError, IntegrityError) as e:
cause = e.__cause__ if 'already exists' in str(e):
if cause and hasattr(cause, 'sqlstate'): logger.info(f'Caught known error due to partition creation race: {e}')
sqlstate = cause.sqlstate else:
sqlstate_cls = psycopg.errors.lookup(sqlstate) raise
if psycopg.errors.DuplicateTable == sqlstate_cls or psycopg.errors.UniqueViolation == sqlstate_cls:
logger.info(f'Caught known error due to partition creation race: {e}')
else:
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_cls))
raise
except DatabaseError as e:
cause = e.__cause__
if cause and hasattr(cause, 'sqlstate'):
sqlstate = cause.sqlstate
sqlstate_str = psycopg.errors.lookup(sqlstate)
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
raise
def cleanup_new_process(func): def cleanup_new_process(func):

View File

@@ -302,36 +302,20 @@ class WebSocketRelayManager(object):
self.stats_mgr.start() self.stats_mgr.start()
# Set up a pg_notify consumer for allowing web nodes to "provision" and "deprovision" themselves gracefully. # Set up a pg_notify consumer for allowing web nodes to "provision" and "deprovision" themselves gracefully.
database_conf = settings.DATABASES['default'].copy() database_conf = settings.DATABASES['default']
database_conf['OPTIONS'] = database_conf.get('OPTIONS', {}).copy() async_conn = await psycopg.AsyncConnection.connect(
dbname=database_conf['NAME'],
for k, v in settings.LISTENER_DATABASES.get('default', {}).items(): host=database_conf['HOST'],
database_conf[k] = v user=database_conf['USER'],
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items(): password=database_conf['PASSWORD'],
database_conf['OPTIONS'][k] = v port=database_conf['PORT'],
**database_conf.get("OPTIONS", {}),
task = None )
await async_conn.set_autocommit(True)
event_loop.create_task(self.on_ws_heartbeat(async_conn))
# Establishes a websocket connection to /websocket/relay on all API servers # Establishes a websocket connection to /websocket/relay on all API servers
while True: while True:
if not task or task.done():
try:
async_conn = await psycopg.AsyncConnection.connect(
dbname=database_conf['NAME'],
host=database_conf['HOST'],
user=database_conf['USER'],
password=database_conf['PASSWORD'],
port=database_conf['PORT'],
**database_conf.get("OPTIONS", {}),
)
await async_conn.set_autocommit(True)
task = event_loop.create_task(self.on_ws_heartbeat(async_conn), name="on_ws_heartbeat")
logger.info("Creating `on_ws_heartbeat` task in event loop.")
except Exception as e:
logger.warning(f"Failed to connect to database for pg_notify: {e}")
future_remote_hosts = self.known_hosts.keys() future_remote_hosts = self.known_hosts.keys()
current_remote_hosts = self.relay_connections.keys() current_remote_hosts = self.relay_connections.keys()
deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts) deleted_remote_hosts = set(current_remote_hosts) - set(future_remote_hosts)

View File

@@ -221,10 +221,8 @@
vars: vars:
req_file: "{{ lookup('ansible.builtin.first_found', req_candidates, skip=True) }}" req_file: "{{ lookup('ansible.builtin.first_found', req_candidates, skip=True) }}"
req_candidates: req_candidates:
files: - "{{ project_path | quote }}/roles/requirements.yml"
- "{{ project_path | quote }}/roles/requirements.yml" - "{{ project_path | quote }}/roles/requirements.yaml"
- "{{ project_path | quote }}/roles/requirements.yaml"
skip: True
changed_when: "'was installed successfully' in galaxy_result.stdout" changed_when: "'was installed successfully' in galaxy_result.stdout"
when: when:
- roles_enabled | bool - roles_enabled | bool
@@ -239,10 +237,10 @@
vars: vars:
req_file: "{{ lookup('ansible.builtin.first_found', req_candidates, skip=True) }}" req_file: "{{ lookup('ansible.builtin.first_found', req_candidates, skip=True) }}"
req_candidates: req_candidates:
files: - "{{ project_path | quote }}/collections/requirements.yml"
- "{{ project_path | quote }}/collections/requirements.yml" - "{{ project_path | quote }}/collections/requirements.yaml"
- "{{ project_path | quote }}/collections/requirements.yaml" - "{{ project_path | quote }}/requirements.yml"
skip: True - "{{ project_path | quote }}/requirements.yaml"
changed_when: "'Nothing to do.' not in galaxy_collection_result.stdout" changed_when: "'Nothing to do.' not in galaxy_collection_result.stdout"
when: when:
- "ansible_version.full is version_compare('2.9', '>=')" - "ansible_version.full is version_compare('2.9', '>=')"
@@ -251,7 +249,6 @@
tags: tags:
- install_collections - install_collections
# requirements.yml in project root can be either "old" (roles only) or "new" (collections+roles) format
- name: Fetch galaxy roles and collections from requirements.(yml/yaml) - name: Fetch galaxy roles and collections from requirements.(yml/yaml)
ansible.builtin.command: ansible.builtin.command:
cmd: "ansible-galaxy install -r {{ req_file }} {{ verbosity }}" cmd: "ansible-galaxy install -r {{ req_file }} {{ verbosity }}"
@@ -259,10 +256,8 @@
vars: vars:
req_file: "{{ lookup('ansible.builtin.first_found', req_candidates, skip=True) }}" req_file: "{{ lookup('ansible.builtin.first_found', req_candidates, skip=True) }}"
req_candidates: req_candidates:
files: - "{{ project_path | quote }}/requirements.yaml"
- "{{ project_path | quote }}/requirements.yaml" - "{{ project_path | quote }}/requirements.yml"
- "{{ project_path | quote }}/requirements.yml"
skip: True
changed_when: "'Nothing to do.' not in galaxy_combined_result.stdout" changed_when: "'Nothing to do.' not in galaxy_combined_result.stdout"
when: when:
- "ansible_version.full is version_compare('2.10', '>=')" - "ansible_version.full is version_compare('2.10', '>=')"

View File

@@ -1,22 +0,0 @@
from ansible_base.resource_registry.registry import ParentResource, ResourceConfig, ServiceAPIConfig, SharedResource
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
from awx.main import models
class APIConfig(ServiceAPIConfig):
service_type = "awx"
RESOURCE_LIST = (
ResourceConfig(
models.Organization,
shared_resource=SharedResource(serializer=OrganizationType, is_provider=False),
),
ResourceConfig(models.User, shared_resource=SharedResource(serializer=UserType, is_provider=False), name_field="username"),
ResourceConfig(
models.Team,
shared_resource=SharedResource(serializer=TeamType, is_provider=False),
parent_resources=[ParentResource(model=models.Organization, field_name="organization")],
),
)

View File

@@ -353,11 +353,8 @@ INSTALLED_APPS = [
'awx.sso', 'awx.sso',
'solo', 'solo',
'ansible_base.rest_filters', 'ansible_base.rest_filters',
'ansible_base.jwt_consumer',
'ansible_base.resource_registry',
] ]
INTERNAL_IPS = ('127.0.0.1',) INTERNAL_IPS = ('127.0.0.1',)
MAX_PAGE_SIZE = 200 MAX_PAGE_SIZE = 200
@@ -365,7 +362,6 @@ REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'awx.api.pagination.Pagination', 'DEFAULT_PAGINATION_CLASS': 'awx.api.pagination.Pagination',
'PAGE_SIZE': 25, 'PAGE_SIZE': 25,
'DEFAULT_AUTHENTICATION_CLASSES': ( 'DEFAULT_AUTHENTICATION_CLASSES': (
'ansible_base.jwt_consumer.awx.auth.AwxJWTAuthentication',
'awx.api.authentication.LoggedOAuth2Authentication', 'awx.api.authentication.LoggedOAuth2Authentication',
'awx.api.authentication.SessionAuthentication', 'awx.api.authentication.SessionAuthentication',
'awx.api.authentication.LoggedBasicAuthentication', 'awx.api.authentication.LoggedBasicAuthentication',
@@ -759,14 +755,6 @@ SATELLITE6_INSTANCE_ID_VAR = 'foreman_id,foreman.id'
INSIGHTS_INSTANCE_ID_VAR = 'insights_id' INSIGHTS_INSTANCE_ID_VAR = 'insights_id'
INSIGHTS_EXCLUDE_EMPTY_GROUPS = False INSIGHTS_EXCLUDE_EMPTY_GROUPS = False
# ----------------
# -- Terraform State --
# ----------------
# TERRAFORM_ENABLED_VAR =
# TERRAFORM_ENABLED_VALUE =
TERRAFORM_INSTANCE_ID_VAR = 'id'
TERRAFORM_EXCLUDE_EMPTY_GROUPS = True
# --------------------- # ---------------------
# ----- Custom ----- # ----- Custom -----
# --------------------- # ---------------------
@@ -1120,17 +1108,8 @@ METRICS_SUBSYSTEM_CONFIG = {
# django-ansible-base # django-ansible-base
ANSIBLE_BASE_TEAM_MODEL = 'main.Team' ANSIBLE_BASE_TEAM_MODEL = 'main.Team'
ANSIBLE_BASE_ORGANIZATION_MODEL = 'main.Organization' ANSIBLE_BASE_ORGANIZATION_MODEL = 'main.Organization'
ANSIBLE_BASE_RESOURCE_CONFIG_MODULE = 'awx.resource_api'
from ansible_base.lib import dynamic_config # noqa: E402 from ansible_base.lib import dynamic_config # noqa: E402
settings_file = os.path.join(os.path.dirname(dynamic_config.__file__), 'dynamic_settings.py') settings_file = os.path.join(os.path.dirname(dynamic_config.__file__), 'dynamic_settings.py')
include(settings_file) include(settings_file)
# Add a postfix to the API URL patterns
# example if set to '' API pattern will be /api
# example if set to 'controller' API pattern will be /api AND /api/controller
OPTIONAL_API_URLPATTERN_PREFIX = ''
# Use AWX base view, to give 401 on unauthenticated requests
ANSIBLE_BASE_CUSTOM_VIEW_PARENT = 'awx.api.generics.APIView'

View File

@@ -72,8 +72,6 @@ AWX_CALLBACK_PROFILE = True
# Allows user to trigger task managers directly for debugging and profiling purposes. # Allows user to trigger task managers directly for debugging and profiling purposes.
# Only works in combination with settings.SETTINGS_MODULE == 'awx.settings.development' # Only works in combination with settings.SETTINGS_MODULE == 'awx.settings.development'
AWX_DISABLE_TASK_MANAGERS = False AWX_DISABLE_TASK_MANAGERS = False
# Needed for launching runserver in debug mode
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!================================= # ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
# Store a snapshot of default settings at this point before loading any # Store a snapshot of default settings at this point before loading any

View File

@@ -39,7 +39,7 @@
{% else %} {% else %}
<li><a href="{% url 'api:login' %}?next={{ request.get_full_path }}" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="Log in"><span class="glyphicon glyphicon-log-in"></span>Log in</a></li> <li><a href="{% url 'api:login' %}?next={{ request.get_full_path }}" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="Log in"><span class="glyphicon glyphicon-log-in"></span>Log in</a></li>
{% endif %} {% endif %}
<li><a href="//ansible.readthedocs.io/projects/awx/en/latest/rest_api/index.html" target="_blank" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="{% trans 'API Guide' %}"><span class="glyphicon glyphicon-question-sign"></span><span class="visible-xs-inline">{% trans 'API Guide' %}</span></a></li> <li><a href="//docs.ansible.com/ansible-tower/{{short_tower_version}}/html/towerapi/index.html" target="_blank" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="{% trans 'API Guide' %}"><span class="glyphicon glyphicon-question-sign"></span><span class="visible-xs-inline">{% trans 'API Guide' %}</span></a></li>
<li><a href="/" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="{% trans 'Back to application' %}"><span class="glyphicon glyphicon-circle-arrow-left"></span><span class="visible-xs-inline">{% trans 'Back to application' %}</span></a></li> <li><a href="/" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="{% trans 'Back to application' %}"><span class="glyphicon glyphicon-circle-arrow-left"></span><span class="visible-xs-inline">{% trans 'Back to application' %}</span></a></li>
<li class="hidden-xs"><a href="#" class="resize" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="{% trans 'Resize' %}"><span class="glyphicon glyphicon-resize-full"></span></a></li> <li class="hidden-xs"><a href="#" class="resize" data-toggle="tooltip" data-placement="bottom" data-delay="1000" title="{% trans 'Resize' %}"><span class="glyphicon glyphicon-resize-full"></span></a></li>
</ul> </ul>

View File

@@ -59,7 +59,6 @@ register(
help_text=_('Maximum number of job events for the UI to retrieve within a single request.'), help_text=_('Maximum number of job events for the UI to retrieve within a single request.'),
category=_('UI'), category=_('UI'),
category_slug='ui', category_slug='ui',
hidden=True,
) )
register( register(
@@ -69,5 +68,4 @@ register(
help_text=_('If disabled, the page will not refresh when events are received. Reloading the page will be required to get the latest details.'), help_text=_('If disabled, the page will not refresh when events are received. Reloading the page will be required to get the latest details.'),
category=_('UI'), category=_('UI'),
category_slug='ui', category_slug='ui',
hidden=True,
) )

View File

@@ -13,7 +13,7 @@
"@patternfly/react-table": "4.113.0", "@patternfly/react-table": "4.113.0",
"ace-builds": "^1.10.1", "ace-builds": "^1.10.1",
"ansi-to-html": "0.7.2", "ansi-to-html": "0.7.2",
"axios": "^1.6.7", "axios": "0.27.2",
"d3": "7.6.1", "d3": "7.6.1",
"dagre": "^0.8.4", "dagre": "^0.8.4",
"dompurify": "2.4.0", "dompurify": "2.4.0",
@@ -5940,13 +5940,12 @@
} }
}, },
"node_modules/axios": { "node_modules/axios": {
"version": "1.6.7", "version": "0.27.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz", "resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz",
"integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==", "integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==",
"dependencies": { "dependencies": {
"follow-redirects": "^1.15.4", "follow-redirects": "^1.14.9",
"form-data": "^4.0.0", "form-data": "^4.0.0"
"proxy-from-env": "^1.1.0"
} }
}, },
"node_modules/axios/node_modules/form-data": { "node_modules/axios/node_modules/form-data": {
@@ -10388,9 +10387,9 @@
} }
}, },
"node_modules/follow-redirects": { "node_modules/follow-redirects": {
"version": "1.15.5", "version": "1.15.1",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz",
"integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==", "integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA==",
"funding": [ "funding": [
{ {
"type": "individual", "type": "individual",
@@ -18350,11 +18349,6 @@
"node": ">= 0.10" "node": ">= 0.10"
} }
}, },
"node_modules/proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
},
"node_modules/pseudolocale": { "node_modules/pseudolocale": {
"version": "1.2.0", "version": "1.2.0",
"resolved": "https://registry.npmjs.org/pseudolocale/-/pseudolocale-1.2.0.tgz", "resolved": "https://registry.npmjs.org/pseudolocale/-/pseudolocale-1.2.0.tgz",
@@ -26921,13 +26915,12 @@
"dev": true "dev": true
}, },
"axios": { "axios": {
"version": "1.6.7", "version": "0.27.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz", "resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz",
"integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==", "integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==",
"requires": { "requires": {
"follow-redirects": "^1.15.4", "follow-redirects": "^1.14.9",
"form-data": "^4.0.0", "form-data": "^4.0.0"
"proxy-from-env": "^1.1.0"
}, },
"dependencies": { "dependencies": {
"form-data": { "form-data": {
@@ -30378,9 +30371,9 @@
} }
}, },
"follow-redirects": { "follow-redirects": {
"version": "1.15.5", "version": "1.15.1",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz",
"integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==" "integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA=="
}, },
"fork-ts-checker-webpack-plugin": { "fork-ts-checker-webpack-plugin": {
"version": "6.5.2", "version": "6.5.2",
@@ -36332,11 +36325,6 @@
} }
} }
}, },
"proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
},
"pseudolocale": { "pseudolocale": {
"version": "1.2.0", "version": "1.2.0",
"resolved": "https://registry.npmjs.org/pseudolocale/-/pseudolocale-1.2.0.tgz", "resolved": "https://registry.npmjs.org/pseudolocale/-/pseudolocale-1.2.0.tgz",

View File

@@ -13,7 +13,7 @@
"@patternfly/react-table": "4.113.0", "@patternfly/react-table": "4.113.0",
"ace-builds": "^1.10.1", "ace-builds": "^1.10.1",
"ansi-to-html": "0.7.2", "ansi-to-html": "0.7.2",
"axios": "^1.6.7", "axios": "0.27.2",
"d3": "7.6.1", "d3": "7.6.1",
"dagre": "^0.8.4", "dagre": "^0.8.4",
"dompurify": "2.4.0", "dompurify": "2.4.0",

View File

@@ -67,18 +67,27 @@ function getInitialValues(launchConfig, surveyConfig, resource) {
const values = {}; const values = {};
if (surveyConfig?.spec) { if (surveyConfig?.spec) {
surveyConfig.spec.forEach((question) => { surveyConfig.spec.forEach((question) => {
if (resource?.extra_data && resource?.extra_data[question.variable]) { if (question.type === 'multiselect') {
values[`survey_${question.variable}`] =
resource.extra_data[question.variable];
} else if (question.type === 'multiselect') {
values[`survey_${question.variable}`] = question.default values[`survey_${question.variable}`] = question.default
? question.default.split('\n') ? question.default.split('\n')
: []; : [];
} else { } else {
values[`survey_${question.variable}`] = question.default ?? ''; values[`survey_${question.variable}`] = question.default ?? '';
} }
if (resource?.extra_data) {
Object.entries(resource.extra_data).forEach(([key, value]) => {
if (key === question.variable) {
if (question.type === 'multiselect') {
values[`survey_${question.variable}`] = value;
} else {
values[`survey_${question.variable}`] = value;
}
}
});
}
}); });
} }
return values; return values;
} }

View File

@@ -13,18 +13,6 @@ import ScheduleForm from '../shared/ScheduleForm';
import buildRuleSet from '../shared/buildRuleSet'; import buildRuleSet from '../shared/buildRuleSet';
import { CardBody } from '../../Card'; import { CardBody } from '../../Card';
function generateExtraData(extra_vars, surveyValues, surveyConfiguration) {
const extraVars = parseVariableField(
yaml.dump(mergeExtraVars(extra_vars, surveyValues))
);
surveyConfiguration.spec.forEach((q) => {
if (!surveyValues[q.variable]) {
delete extraVars[q.variable];
}
});
return extraVars;
}
function ScheduleEdit({ function ScheduleEdit({
hasDaysToKeepField, hasDaysToKeepField,
schedule, schedule,
@@ -45,12 +33,10 @@ function ScheduleEdit({
surveyConfiguration, surveyConfiguration,
originalInstanceGroups, originalInstanceGroups,
originalLabels, originalLabels,
scheduleCredentials = [], scheduleCredentials = []
isPromptTouched = false
) => { ) => {
const { const {
execution_environment, execution_environment,
extra_vars = null,
instance_groups, instance_groups,
inventory, inventory,
credentials = [], credentials = [],
@@ -62,54 +48,45 @@ function ScheduleEdit({
labels, labels,
...submitValues ...submitValues
} = values; } = values;
let extraVars;
const surveyValues = getSurveyValues(values); const surveyValues = getSurveyValues(values);
if ( if (
isPromptTouched && !Object.values(surveyValues).length &&
surveyConfiguration?.spec && surveyConfiguration?.spec?.length
launchConfiguration?.ask_variables_on_launch
) { ) {
submitValues.extra_data = generateExtraData( surveyConfiguration.spec.forEach((q) => {
extra_vars, surveyValues[q.variable] = q.default;
surveyValues, });
surveyConfiguration
);
} else if (
isPromptTouched &&
surveyConfiguration?.spec &&
!launchConfiguration?.ask_variables_on_launch
) {
submitValues.extra_data = generateExtraData(
schedule.extra_data,
surveyValues,
surveyConfiguration
);
} else if (
isPromptTouched &&
launchConfiguration?.ask_variables_on_launch
) {
submitValues.extra_data = parseVariableField(extra_vars);
} }
const initialExtraVars =
launchConfiguration?.ask_variables_on_launch &&
(values.extra_vars || '---');
if (surveyConfiguration?.spec) {
extraVars = yaml.dump(mergeExtraVars(initialExtraVars, surveyValues));
} else {
extraVars = yaml.dump(mergeExtraVars(initialExtraVars, {}));
}
submitValues.extra_data = extraVars && parseVariableField(extraVars);
if ( if (
isPromptTouched && Object.keys(submitValues.extra_data).length === 0 &&
launchConfiguration?.ask_inventory_on_launch && Object.keys(schedule.extra_data).length > 0
inventory
) { ) {
submitValues.extra_data = schedule.extra_data;
}
delete values.extra_vars;
if (inventory) {
submitValues.inventory = inventory.id; submitValues.inventory = inventory.id;
} }
if ( if (execution_environment) {
isPromptTouched &&
launchConfiguration?.ask_execution_environment_on_launch &&
execution_environment
) {
submitValues.execution_environment = execution_environment.id; submitValues.execution_environment = execution_environment.id;
} }
try { try {
if (isPromptTouched && launchConfiguration?.ask_labels_on_launch) { if (launchConfiguration?.ask_labels_on_launch) {
const { labelIds, error } = createNewLabels( const { labelIds, error } = createNewLabels(
values.labels, values.labels,
resource.organization resource.organization
@@ -143,16 +120,9 @@ function ScheduleEdit({
} }
} }
const cleanedRequestData = Object.keys(requestData)
.filter((key) => !key.startsWith('survey_'))
.reduce((acc, key) => {
acc[key] = requestData[key];
return acc;
}, {});
const { const {
data: { id: scheduleId }, data: { id: scheduleId },
} = await SchedulesAPI.update(schedule.id, cleanedRequestData); } = await SchedulesAPI.update(schedule.id, requestData);
const { added: addedCredentials, removed: removedCredentials } = const { added: addedCredentials, removed: removedCredentials } =
getAddedAndRemoved( getAddedAndRemoved(

View File

@@ -6,7 +6,6 @@ import {
InventoriesAPI, InventoriesAPI,
CredentialsAPI, CredentialsAPI,
CredentialTypesAPI, CredentialTypesAPI,
JobTemplatesAPI,
} from 'api'; } from 'api';
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers'; import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
import ScheduleEdit from './ScheduleEdit'; import ScheduleEdit from './ScheduleEdit';
@@ -126,7 +125,6 @@ describe('<ScheduleEdit />', () => {
id: 27, id: 27,
}, },
}); });
await act(async () => { await act(async () => {
wrapper = mountWithContexts( wrapper = mountWithContexts(
<ScheduleEdit <ScheduleEdit
@@ -208,6 +206,7 @@ describe('<ScheduleEdit />', () => {
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, { expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
description: 'test description', description: 'test description',
name: 'Run once schedule', name: 'Run once schedule',
extra_data: {},
rrule: rrule:
'DTSTART;TZID=America/New_York:20200325T100000 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY', 'DTSTART;TZID=America/New_York:20200325T100000 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
}); });
@@ -234,6 +233,7 @@ describe('<ScheduleEdit />', () => {
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, { expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
description: 'test description', description: 'test description',
name: 'Run every 10 minutes 10 times', name: 'Run every 10 minutes 10 times',
extra_data: {},
rrule: rrule:
'DTSTART;TZID=America/New_York:20200325T103000 RRULE:INTERVAL=10;FREQ=MINUTELY;COUNT=10', 'DTSTART;TZID=America/New_York:20200325T103000 RRULE:INTERVAL=10;FREQ=MINUTELY;COUNT=10',
}); });
@@ -262,6 +262,7 @@ describe('<ScheduleEdit />', () => {
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, { expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
description: 'test description', description: 'test description',
name: 'Run every hour until date', name: 'Run every hour until date',
extra_data: {},
rrule: rrule:
'DTSTART;TZID=America/New_York:20200325T104500 RRULE:INTERVAL=1;FREQ=HOURLY;UNTIL=20200326T144500Z', 'DTSTART;TZID=America/New_York:20200325T104500 RRULE:INTERVAL=1;FREQ=HOURLY;UNTIL=20200326T144500Z',
}); });
@@ -287,6 +288,7 @@ describe('<ScheduleEdit />', () => {
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, { expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
description: 'test description', description: 'test description',
name: 'Run daily', name: 'Run daily',
extra_data: {},
rrule: rrule:
'DTSTART;TZID=America/New_York:20200325T104500 RRULE:INTERVAL=1;FREQ=DAILY', 'DTSTART;TZID=America/New_York:20200325T104500 RRULE:INTERVAL=1;FREQ=DAILY',
}); });
@@ -314,6 +316,7 @@ describe('<ScheduleEdit />', () => {
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, { expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
description: 'test description', description: 'test description',
name: 'Run weekly on mon/wed/fri', name: 'Run weekly on mon/wed/fri',
extra_data: {},
rrule: `DTSTART;TZID=America/New_York:20200325T104500 RRULE:INTERVAL=1;FREQ=WEEKLY;BYDAY=${RRule.MO},${RRule.WE},${RRule.FR}`, rrule: `DTSTART;TZID=America/New_York:20200325T104500 RRULE:INTERVAL=1;FREQ=WEEKLY;BYDAY=${RRule.MO},${RRule.WE},${RRule.FR}`,
}); });
}); });
@@ -341,6 +344,7 @@ describe('<ScheduleEdit />', () => {
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, { expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
description: 'test description', description: 'test description',
name: 'Run on the first day of the month', name: 'Run on the first day of the month',
extra_data: {},
rrule: rrule:
'DTSTART;TZID=America/New_York:20200401T104500 RRULE:INTERVAL=1;FREQ=MONTHLY;BYMONTHDAY=1', 'DTSTART;TZID=America/New_York:20200401T104500 RRULE:INTERVAL=1;FREQ=MONTHLY;BYMONTHDAY=1',
}); });
@@ -372,6 +376,7 @@ describe('<ScheduleEdit />', () => {
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, { expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
description: 'test description', description: 'test description',
name: 'Run monthly on the last Tuesday', name: 'Run monthly on the last Tuesday',
extra_data: {},
rrule: rrule:
'DTSTART;TZID=America/New_York:20200331T110000 RRULE:INTERVAL=1;FREQ=MONTHLY;BYSETPOS=-1;BYDAY=TU', 'DTSTART;TZID=America/New_York:20200331T110000 RRULE:INTERVAL=1;FREQ=MONTHLY;BYSETPOS=-1;BYDAY=TU',
}); });
@@ -401,6 +406,7 @@ describe('<ScheduleEdit />', () => {
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, { expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
description: 'test description', description: 'test description',
name: 'Yearly on the first day of March', name: 'Yearly on the first day of March',
extra_data: {},
rrule: rrule:
'DTSTART;TZID=America/New_York:20200301T000000 RRULE:INTERVAL=1;FREQ=YEARLY;BYMONTH=3;BYMONTHDAY=1', 'DTSTART;TZID=America/New_York:20200301T000000 RRULE:INTERVAL=1;FREQ=YEARLY;BYMONTH=3;BYMONTHDAY=1',
}); });
@@ -431,6 +437,7 @@ describe('<ScheduleEdit />', () => {
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, { expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
description: 'test description', description: 'test description',
name: 'Yearly on the second Friday in April', name: 'Yearly on the second Friday in April',
extra_data: {},
rrule: rrule:
'DTSTART;TZID=America/New_York:20200410T111500 RRULE:INTERVAL=1;FREQ=YEARLY;BYSETPOS=2;BYDAY=FR;BYMONTH=4', 'DTSTART;TZID=America/New_York:20200410T111500 RRULE:INTERVAL=1;FREQ=YEARLY;BYSETPOS=2;BYDAY=FR;BYMONTH=4',
}); });
@@ -461,6 +468,7 @@ describe('<ScheduleEdit />', () => {
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, { expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
description: 'test description', description: 'test description',
name: 'Yearly on the first weekday in October', name: 'Yearly on the first weekday in October',
extra_data: {},
rrule: rrule:
'DTSTART;TZID=America/New_York:20200410T111500 RRULE:INTERVAL=1;FREQ=YEARLY;BYSETPOS=1;BYDAY=MO,TU,WE,TH,FR;BYMONTH=10', 'DTSTART;TZID=America/New_York:20200410T111500 RRULE:INTERVAL=1;FREQ=YEARLY;BYSETPOS=1;BYDAY=MO,TU,WE,TH,FR;BYMONTH=10',
}); });
@@ -554,6 +562,7 @@ describe('<ScheduleEdit />', () => {
wrapper.update(); wrapper.update();
expect(SchedulesAPI.update).toBeCalledWith(27, { expect(SchedulesAPI.update).toBeCalledWith(27, {
extra_data: {},
name: 'mock schedule', name: 'mock schedule',
rrule: rrule:
'DTSTART;TZID=America/New_York:20210128T141500 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY', 'DTSTART;TZID=America/New_York:20210128T141500 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
@@ -624,13 +633,15 @@ describe('<ScheduleEdit />', () => {
endDateTime: undefined, endDateTime: undefined,
startDateTime: undefined, startDateTime: undefined,
description: '', description: '',
extra_data: {},
name: 'foo', name: 'foo',
inventory: 702,
rrule: rrule:
'DTSTART;TZID=America/New_York:20200402T144500 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY', 'DTSTART;TZID=America/New_York:20200402T144500 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
}); });
}); });
test('should submit update values properly when prompt is not opened', async () => { test('should submit survey with default values properly, without opening prompt wizard', async () => {
let scheduleSurveyWrapper; let scheduleSurveyWrapper;
await act(async () => { await act(async () => {
scheduleSurveyWrapper = mountWithContexts( scheduleSurveyWrapper = mountWithContexts(
@@ -735,195 +746,9 @@ describe('<ScheduleEdit />', () => {
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, { expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
description: 'test description', description: 'test description',
name: 'Run once schedule', name: 'Run once schedule',
extra_data: { mc: 'first', text: 'text variable' },
rrule: rrule:
'DTSTART;TZID=America/New_York:20200325T100000 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY', 'DTSTART;TZID=America/New_York:20200325T100000 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
}); });
}); });
test('should submit update values properly when survey values change', async () => {
JobTemplatesAPI.readSurvey.mockResolvedValue({
data: {
spec: [
{
question_name: 'text',
question_description: '',
required: true,
type: 'text',
variable: 'text',
min: 0,
max: 1024,
default: 'text variable',
choices: '',
new_question: true,
},
],
},
});
JobTemplatesAPI.readLaunch.mockResolvedValue({
data: {
can_start_without_user_input: false,
passwords_needed_to_start: [],
ask_scm_branch_on_launch: false,
ask_variables_on_launch: false,
ask_tags_on_launch: false,
ask_diff_mode_on_launch: false,
ask_skip_tags_on_launch: false,
ask_job_type_on_launch: false,
ask_limit_on_launch: false,
ask_verbosity_on_launch: false,
ask_inventory_on_launch: true,
ask_credential_on_launch: true,
survey_enabled: true,
variables_needed_to_start: [],
credential_needed_to_start: true,
inventory_needed_to_start: true,
job_template_data: {
name: 'Demo Job Template',
id: 7,
description: '',
},
defaults: {
extra_vars: '---',
diff_mode: false,
limit: '',
job_tags: '',
skip_tags: '',
job_type: 'run',
verbosity: 0,
inventory: {
name: null,
id: null,
},
scm_branch: '',
credentials: [],
},
},
});
let scheduleSurveyWrapper;
await act(async () => {
scheduleSurveyWrapper = mountWithContexts(
<ScheduleEdit
schedule={mockSchedule}
resource={{
id: 700,
type: 'job_template',
iventory: 1,
summary_fields: {
credentials: [
{ name: 'job template credential', id: 75, kind: 'ssh' },
],
},
name: 'Foo Job Template',
description: '',
}}
resourceDefaultCredentials={[]}
launchConfig={{
can_start_without_user_input: false,
passwords_needed_to_start: [],
ask_scm_branch_on_launch: false,
ask_variables_on_launch: false,
ask_tags_on_launch: false,
ask_diff_mode_on_launch: false,
ask_skip_tags_on_launch: false,
ask_job_type_on_launch: false,
ask_limit_on_launch: false,
ask_verbosity_on_launch: false,
ask_inventory_on_launch: true,
ask_credential_on_launch: true,
survey_enabled: true,
variables_needed_to_start: [],
credential_needed_to_start: true,
inventory_needed_to_start: true,
job_template_data: {
name: 'Demo Job Template',
id: 7,
description: '',
},
defaults: {
extra_vars: '---',
diff_mode: false,
limit: '',
job_tags: '',
skip_tags: '',
job_type: 'run',
verbosity: 0,
inventory: {
name: null,
id: null,
},
scm_branch: '',
credentials: [],
},
}}
surveyConfig={{
spec: [
{
question_name: 'text',
question_description: '',
required: true,
type: 'text',
variable: 'text',
min: 0,
max: 1024,
default: 'text variable',
choices: '',
new_question: true,
},
],
}}
/>
);
});
scheduleSurveyWrapper.update();
await act(async () =>
scheduleSurveyWrapper
.find('Button[aria-label="Prompt"]')
.prop('onClick')()
);
scheduleSurveyWrapper.update();
expect(scheduleSurveyWrapper.find('WizardNavItem').length).toBe(4);
await act(async () =>
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
);
scheduleSurveyWrapper.update();
await act(async () =>
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
);
scheduleSurveyWrapper.update();
await act(async () =>
scheduleSurveyWrapper
.find('input#survey-question-text')
.simulate('change', {
target: { value: 'foo', name: 'survey_text' },
})
);
scheduleSurveyWrapper.update();
await act(async () =>
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
);
scheduleSurveyWrapper.update();
await act(async () =>
scheduleSurveyWrapper.find('WizardFooterInternal').prop('onNext')()
);
scheduleSurveyWrapper.update();
expect(scheduleSurveyWrapper.find('Wizard').length).toBe(0);
await act(async () =>
scheduleSurveyWrapper.find('Button[aria-label="Save"]').prop('onClick')()
);
expect(SchedulesAPI.update).toHaveBeenCalledWith(27, {
description: '',
name: 'mock schedule',
inventory: 702,
extra_data: {
text: 'foo',
},
rrule:
'DTSTART;TZID=America/New_York:20200402T144500 RRULE:INTERVAL=1;COUNT=1;FREQ=MINUTELY',
});
});
}); });

View File

@@ -40,7 +40,6 @@ function ScheduleForm({
resourceDefaultCredentials, resourceDefaultCredentials,
}) { }) {
const [isWizardOpen, setIsWizardOpen] = useState(false); const [isWizardOpen, setIsWizardOpen] = useState(false);
const [isPromptTouched, setIsPromptTouched] = useState(false);
const [isSaveDisabled, setIsSaveDisabled] = useState(false); const [isSaveDisabled, setIsSaveDisabled] = useState(false);
const originalLabels = useRef([]); const originalLabels = useRef([]);
const originalInstanceGroups = useRef([]); const originalInstanceGroups = useRef([]);
@@ -493,8 +492,7 @@ function ScheduleForm({
surveyConfig, surveyConfig,
originalInstanceGroups.current, originalInstanceGroups.current,
originalLabels.current, originalLabels.current,
credentials, credentials
isPromptTouched
); );
}} }}
validate={validate} validate={validate}
@@ -520,7 +518,6 @@ function ScheduleForm({
onSave={() => { onSave={() => {
setIsWizardOpen(false); setIsWizardOpen(false);
setIsSaveDisabled(false); setIsSaveDisabled(false);
setIsPromptTouched(true);
}} }}
resourceDefaultCredentials={resourceDefaultCredentials} resourceDefaultCredentials={resourceDefaultCredentials}
labels={originalLabels.current} labels={originalLabels.current}

View File

@@ -80,7 +80,7 @@ function Dashboard() {
<Trans> <Trans>
<p> <p>
<InfoCircleIcon /> A tech preview of the new {brandName} user <InfoCircleIcon /> A tech preview of the new {brandName} user
interface can be found <a href="/ui_next">here</a>. interface can be found <a href="/ui_next/dashboard">here</a>.
</p> </p>
</Trans> </Trans>
</Banner> </Banner>

View File

@@ -191,7 +191,7 @@ function InstancePeerList({ setBreadcrumb }) {
fetchPeers(); fetchPeers();
addToast({ addToast({
id: instancesPeerToAssociate, id: instancesPeerToAssociate,
title: t`Please be sure to run the install bundle for the selected instance(s) again in order to see changes take effect.`, title: t`Peers update on ${instance.hostname}. Please be sure to run the install bundle for ${instance.hostname} again in order to see changes take effect.`,
variant: AlertVariant.success, variant: AlertVariant.success,
hasTimeout: true, hasTimeout: true,
}); });

View File

@@ -21,8 +21,6 @@ const ansibleDocUrls = {
'https://docs.ansible.com/ansible/latest/collections/community/vmware/vmware_vm_inventory_inventory.html', 'https://docs.ansible.com/ansible/latest/collections/community/vmware/vmware_vm_inventory_inventory.html',
constructed: constructed:
'https://docs.ansible.com/ansible/latest/collections/ansible/builtin/constructed_inventory.html', 'https://docs.ansible.com/ansible/latest/collections/ansible/builtin/constructed_inventory.html',
terraform:
'https://github.com/ansible-collections/cloud.terraform/blob/stable-statefile-inventory/plugins/inventory/terraform_state.py',
}; };
const getInventoryHelpTextStrings = () => ({ const getInventoryHelpTextStrings = () => ({
@@ -121,10 +119,10 @@ const getInventoryHelpTextStrings = () => ({
<br /> <br />
{value && ( {value && (
<div> <div>
{t`If you want the Inventory Source to update on launch , click on Update on Launch, {t`If you want the Inventory Source to update on
and also go to `} launch and on project update, click on Update on launch, and also go to`}
<Link to={`/projects/${value.id}/details`}> {value.name} </Link> <Link to={`/projects/${value.id}/details`}> {value.name} </Link>
{t`and click on Update Revision on Launch.`} {t`and click on Update Revision on Launch`}
</div> </div>
)} )}
</> </>
@@ -140,8 +138,8 @@ const getInventoryHelpTextStrings = () => ({
<br /> <br />
{value && ( {value && (
<div> <div>
{t`If you want the Inventory Source to update on launch , click on Update on Launch, {t`If you want the Inventory Source to update on
and also go to `} launch and on project update, click on Update on launch, and also go to`}
<Link to={`/projects/${value.id}/details`}> {value.name} </Link> <Link to={`/projects/${value.id}/details`}> {value.name} </Link>
{t`and click on Update Revision on Launch`} {t`and click on Update Revision on Launch`}
</div> </div>

View File

@@ -23,7 +23,6 @@ import {
SCMSubForm, SCMSubForm,
SatelliteSubForm, SatelliteSubForm,
ControllerSubForm, ControllerSubForm,
TerraformSubForm,
VMwareSubForm, VMwareSubForm,
VirtualizationSubForm, VirtualizationSubForm,
} from './InventorySourceSubForms'; } from './InventorySourceSubForms';
@@ -215,14 +214,6 @@ const InventorySourceFormFields = ({
} }
/> />
), ),
terraform: (
<TerraformSubForm
autoPopulateCredential={
!source?.id || source?.source !== 'terraform'
}
sourceOptions={sourceOptions}
/>
),
vmware: ( vmware: (
<VMwareSubForm <VMwareSubForm
autoPopulateCredential={ autoPopulateCredential={

View File

@@ -38,7 +38,6 @@ describe('<InventorySourceForm />', () => {
['openstack', 'OpenStack'], ['openstack', 'OpenStack'],
['rhv', 'Red Hat Virtualization'], ['rhv', 'Red Hat Virtualization'],
['controller', 'Red Hat Ansible Automation Platform'], ['controller', 'Red Hat Ansible Automation Platform'],
['terraform', 'Terraform State'],
], ],
}, },
}, },

View File

@@ -1,59 +0,0 @@
import React, { useCallback } from 'react';
import { useField, useFormikContext } from 'formik';
import { t } from '@lingui/macro';
import getDocsBaseUrl from 'util/getDocsBaseUrl';
import { useConfig } from 'contexts/Config';
import CredentialLookup from 'components/Lookup/CredentialLookup';
import { required } from 'util/validators';
import {
OptionsField,
VerbosityField,
EnabledVarField,
EnabledValueField,
HostFilterField,
SourceVarsField,
} from './SharedFields';
import getHelpText from '../Inventory.helptext';
const TerraformSubForm = ({ autoPopulateCredential }) => {
const helpText = getHelpText();
const { setFieldValue, setFieldTouched } = useFormikContext();
const [credentialField, credentialMeta, credentialHelpers] =
useField('credential');
const config = useConfig();
const handleCredentialUpdate = useCallback(
(value) => {
setFieldValue('credential', value);
setFieldTouched('credential', true, false);
},
[setFieldValue, setFieldTouched]
);
const docsBaseUrl = getDocsBaseUrl(config);
return (
<>
<CredentialLookup
credentialTypeNamespace="terraform"
label={t`Credential`}
helperTextInvalid={credentialMeta.error}
isValid={!credentialMeta.touched || !credentialMeta.error}
onBlur={() => credentialHelpers.setTouched()}
onChange={handleCredentialUpdate}
value={credentialField.value}
required
autoPopulate={autoPopulateCredential}
validate={required(t`Select a value for this field`)}
/>
<VerbosityField />
<HostFilterField />
<EnabledVarField />
<EnabledValueField />
<OptionsField />
<SourceVarsField
popoverContent={helpText.sourceVars(docsBaseUrl, 'terraform')}
/>
</>
);
};
export default TerraformSubForm;

View File

@@ -1,70 +0,0 @@
import React from 'react';
import { act } from 'react-dom/test-utils';
import { Formik } from 'formik';
import { CredentialsAPI } from 'api';
import { mountWithContexts } from '../../../../../testUtils/enzymeHelpers';
import TerraformSubForm from './TerraformSubForm';
jest.mock('../../../../api');
const initialValues = {
credential: null,
overwrite: false,
overwrite_vars: false,
source_path: '',
source_project: null,
source_script: null,
source_vars: '---\n',
update_cache_timeout: 0,
update_on_launch: true,
verbosity: 1,
};
const mockSourceOptions = {
actions: {
POST: {},
},
};
describe('<TerraformSubForm />', () => {
let wrapper;
beforeEach(async () => {
CredentialsAPI.read.mockResolvedValue({
data: { count: 0, results: [] },
});
await act(async () => {
wrapper = mountWithContexts(
<Formik initialValues={initialValues}>
<TerraformSubForm sourceOptions={mockSourceOptions} />
</Formik>
);
});
});
afterAll(() => {
jest.clearAllMocks();
});
test('should render subform fields', () => {
expect(wrapper.find('FormGroup[label="Credential"]')).toHaveLength(1);
expect(wrapper.find('FormGroup[label="Verbosity"]')).toHaveLength(1);
expect(wrapper.find('FormGroup[label="Update options"]')).toHaveLength(1);
expect(
wrapper.find('FormGroup[label="Cache timeout (seconds)"]')
).toHaveLength(1);
expect(
wrapper.find('VariablesField[label="Source variables"]')
).toHaveLength(1);
});
test('should make expected api calls', () => {
expect(CredentialsAPI.read).toHaveBeenCalledTimes(1);
expect(CredentialsAPI.read).toHaveBeenCalledWith({
credential_type__namespace: 'terraform',
order_by: 'name',
page: 1,
page_size: 5,
});
});
});

View File

@@ -6,6 +6,5 @@ export { default as OpenStackSubForm } from './OpenStackSubForm';
export { default as SCMSubForm } from './SCMSubForm'; export { default as SCMSubForm } from './SCMSubForm';
export { default as SatelliteSubForm } from './SatelliteSubForm'; export { default as SatelliteSubForm } from './SatelliteSubForm';
export { default as ControllerSubForm } from './ControllerSubForm'; export { default as ControllerSubForm } from './ControllerSubForm';
export { default as TerraformSubForm } from './TerraformSubForm';
export { default as VMwareSubForm } from './VMwareSubForm'; export { default as VMwareSubForm } from './VMwareSubForm';
export { default as VirtualizationSubForm } from './VirtualizationSubForm'; export { default as VirtualizationSubForm } from './VirtualizationSubForm';

View File

@@ -30,7 +30,7 @@ function SubscriptionUsage() {
<Trans> <Trans>
<p> <p>
<InfoCircleIcon /> A tech preview of the new {brandName} user <InfoCircleIcon /> A tech preview of the new {brandName} user
interface can be found <a href="/ui_next">here</a>. interface can be found <a href="/ui_next/dashboard">here</a>.
</p> </p>
</Trans> </Trans>
</Banner> </Banner>

View File

@@ -1,7 +1,7 @@
export default function getSurveyValues(values) { export default function getSurveyValues(values) {
const surveyValues = {}; const surveyValues = {};
Object.keys(values).forEach((key) => { Object.keys(values).forEach((key) => {
if (key.startsWith('survey_')) { if (key.startsWith('survey_') && values[key] !== []) {
if (Array.isArray(values[key]) && values[key].length === 0) { if (Array.isArray(values[key]) && values[key].length === 0) {
return; return;
} }

View File

@@ -1,12 +1,7 @@
import yaml from 'js-yaml'; import yaml from 'js-yaml';
export default function mergeExtraVars(extraVars = '', survey = {}) { export default function mergeExtraVars(extraVars = '', survey = {}) {
let vars = {}; const vars = yaml.load(extraVars) || {};
if (typeof extraVars === 'string') {
vars = yaml.load(extraVars);
} else if (typeof extraVars === 'object') {
vars = extraVars;
}
return { return {
...vars, ...vars,
...survey, ...survey,

View File

@@ -2,9 +2,7 @@
# All Rights Reserved. # All Rights Reserved.
from django.conf import settings from django.conf import settings
from django.urls import path, re_path, include from django.urls import re_path, include
from ansible_base.resource_registry.urls import urlpatterns as resource_api_urls
from awx.main.views import handle_400, handle_403, handle_404, handle_500, handle_csp_violation, handle_login_redirect from awx.main.views import handle_400, handle_403, handle_404, handle_500, handle_csp_violation, handle_login_redirect
@@ -12,16 +10,7 @@ from awx.main.views import handle_400, handle_403, handle_404, handle_500, handl
urlpatterns = [ urlpatterns = [
re_path(r'', include('awx.ui.urls', namespace='ui')), re_path(r'', include('awx.ui.urls', namespace='ui')),
re_path(r'^ui_next/.*', include('awx.ui_next.urls', namespace='ui_next')), re_path(r'^ui_next/.*', include('awx.ui_next.urls', namespace='ui_next')),
path('api/', include('awx.api.urls', namespace='api')), re_path(r'^api/', include('awx.api.urls', namespace='api')),
]
if settings.OPTIONAL_API_URLPATTERN_PREFIX:
urlpatterns += [
path(f'api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}/', include('awx.api.urls')),
]
urlpatterns += [
re_path(r'^api/v2/', include(resource_api_urls)),
re_path(r'^sso/', include('awx.sso.urls', namespace='sso')), re_path(r'^sso/', include('awx.sso.urls', namespace='sso')),
re_path(r'^sso/', include('social_django.urls', namespace='social')), re_path(r'^sso/', include('social_django.urls', namespace='social')),
re_path(r'^(?:api/)?400.html$', handle_400), re_path(r'^(?:api/)?400.html$', handle_400),

View File

@@ -147,12 +147,8 @@ def main():
if redirect_uris is not None: if redirect_uris is not None:
application_fields['redirect_uris'] = ' '.join(redirect_uris) application_fields['redirect_uris'] = ' '.join(redirect_uris)
response = module.create_or_update_if_needed(application, application_fields, endpoint='applications', item_type='application', auto_exit=False) # If the state was present and we can let the module build or update the existing application, this will return on its own
if 'client_id' in response: module.create_or_update_if_needed(application, application_fields, endpoint='applications', item_type='application')
module.json_output['client_id'] = response['client_id']
if 'client_secret' in response:
module.json_output['client_secret'] = response['client_secret']
module.exit_json(**module.json_output)
if __name__ == '__main__': if __name__ == '__main__':

View File

@@ -155,4 +155,4 @@ def test_build_notification_message_undefined(run_module, admin_user, organizati
nt = NotificationTemplate.objects.get(id=result['id']) nt = NotificationTemplate.objects.get(id=result['id'])
body = job.build_notification_message(nt, 'running') body = job.build_notification_message(nt, 'running')
assert '{"started_by": "My Placeholder"}' in body[1] assert 'The template rendering return a blank body' in body[1]

View File

@@ -1,63 +1,63 @@
--- ---
- name: Generate a random string for test - name: Generate a random string for test
ansible.builtin.set_fact: set_fact:
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}" test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
when: test_id is not defined when: test_id is not defined
- name: Generate names - name: Generate names
ansible.builtin.set_fact: set_fact:
inv_name: "AWX-Collection-tests-ad_hoc_command_cancel-inventory-{{ test_id }}" inv_name: "AWX-Collection-tests-ad_hoc_command_cancel-inventory-{{ test_id }}"
ssh_cred_name: "AWX-Collection-tests-ad_hoc_command_cancel-ssh-cred-{{ test_id }}" ssh_cred_name: "AWX-Collection-tests-ad_hoc_command_cancel-ssh-cred-{{ test_id }}"
org_name: "AWX-Collection-tests-ad_hoc_command_cancel-org-{{ test_id }}" org_name: "AWX-Collection-tests-ad_hoc_command_cancel-org-{{ test_id }}"
- name: Create a New Organization - name: Create a New Organization
awx.awx.organization: organization:
name: "{{ org_name }}" name: "{{ org_name }}"
- name: Create an Inventory - name: Create an Inventory
awx.awx.inventory: inventory:
name: "{{ inv_name }}" name: "{{ inv_name }}"
organization: "{{ org_name }}" organization: "{{ org_name }}"
state: present state: present
- name: Add localhost to the Inventory - name: Add localhost to the Inventory
awx.awx.host: host:
name: localhost name: localhost
inventory: "{{ inv_name }}" inventory: "{{ inv_name }}"
variables: variables:
ansible_connection: local ansible_connection: local
- name: Create a Credential - name: Create a Credential
awx.awx.credential: credential:
name: "{{ ssh_cred_name }}" name: "{{ ssh_cred_name }}"
organization: "{{ org_name }}" organization: "{{ org_name }}"
credential_type: 'Machine' credential_type: 'Machine'
state: present state: present
- name: Launch an Ad Hoc Command - name: Launch an Ad Hoc Command
awx.awx.ad_hoc_command: ad_hoc_command:
inventory: "{{ inv_name }}" inventory: "{{ inv_name }}"
credential: "{{ ssh_cred_name }}" credential: "{{ ssh_cred_name }}"
module_name: "command" module_name: "command"
module_args: "sleep 100" module_args: "sleep 100"
register: command register: command
- ansible.builtin.assert: - assert:
that: that:
- "command is changed" - "command is changed"
- name: Cancel the command - name: Cancel the command
awx.awx.ad_hoc_command_cancel: ad_hoc_command_cancel:
command_id: "{{ command.id }}" command_id: "{{ command.id }}"
request_timeout: 60 request_timeout: 60
register: results register: results
- ansible.builtin.assert: - assert:
that: that:
- results is changed - results is changed
- name: "Wait for up to a minute until the job enters the can_cancel: False state" - name: "Wait for up to a minute until the job enters the can_cancel: False state"
ansible.builtin.debug: debug:
msg: "The job can_cancel status has transitioned into False, we can proceed with testing" msg: "The job can_cancel status has transitioned into False, we can proceed with testing"
until: not job_status until: not job_status
retries: 6 retries: 6
@@ -66,51 +66,51 @@
job_status: "{{ lookup('awx.awx.controller_api', 'ad_hoc_commands/'+ command.id | string +'/cancel')['can_cancel'] }}" job_status: "{{ lookup('awx.awx.controller_api', 'ad_hoc_commands/'+ command.id | string +'/cancel')['can_cancel'] }}"
- name: Cancel the command with hard error if it's not running - name: Cancel the command with hard error if it's not running
awx.awx.ad_hoc_command_cancel: ad_hoc_command_cancel:
command_id: "{{ command.id }}" command_id: "{{ command.id }}"
fail_if_not_running: true fail_if_not_running: true
register: results register: results
ignore_errors: true ignore_errors: true
- ansible.builtin.assert: - assert:
that: that:
- results is failed - results is failed
- name: Cancel an already canceled command (assert failure) - name: Cancel an already canceled command (assert failure)
awx.awx.ad_hoc_command_cancel: ad_hoc_command_cancel:
command_id: "{{ command.id }}" command_id: "{{ command.id }}"
fail_if_not_running: true fail_if_not_running: true
register: results register: results
ignore_errors: true ignore_errors: true
- ansible.builtin.assert: - assert:
that: that:
- results is failed - results is failed
- name: Check module fails with correct msg - name: Check module fails with correct msg
awx.awx.ad_hoc_command_cancel: ad_hoc_command_cancel:
command_id: 9999999999 command_id: 9999999999
register: result register: result
ignore_errors: true ignore_errors: true
- ansible.builtin.assert: - assert:
that: that:
- "result.msg == 'Unable to find command with id 9999999999'" - "result.msg == 'Unable to find command with id 9999999999'"
- name: Delete the Credential - name: Delete the Credential
awx.awx.credential: credential:
name: "{{ ssh_cred_name }}" name: "{{ ssh_cred_name }}"
organization: "{{ org_name }}" organization: "{{ org_name }}"
credential_type: 'Machine' credential_type: 'Machine'
state: absent state: absent
- name: Delete the Inventory - name: Delete the Inventory
awx.awx.inventory: inventory:
name: "{{ inv_name }}" name: "{{ inv_name }}"
organization: "{{ org_name }}" organization: "{{ org_name }}"
state: absent state: absent
- name: Remove the Organization - name: Remove the Organization
awx.awx.organization: organization:
name: "{{ org_name }}" name: "{{ org_name }}"
state: absent state: absent

View File

@@ -103,7 +103,6 @@
- assert: - assert:
that: that:
- "result is changed" - "result is changed"
- "'client_secret' in result"
- name: Rename an inventory - name: Rename an inventory
application: application:

View File

@@ -1,23 +1,23 @@
--- ---
- name: Generate a test ID - name: Generate a test ID
ansible.builtin.set_fact: set_fact:
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}" test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
when: test_id is not defined when: test_id is not defined
- name: Generate hostnames - name: Generate hostnames
ansible.builtin.set_fact: set_fact:
hostname1: "AWX-Collection-tests-instance1.{{ test_id }}.example.com" hostname1: "AWX-Collection-tests-instance1.{{ test_id }}.example.com"
hostname2: "AWX-Collection-tests-instance2.{{ test_id }}.example.com" hostname2: "AWX-Collection-tests-instance2.{{ test_id }}.example.com"
hostname3: "AWX-Collection-tests-instance3.{{ test_id }}.example.com" hostname3: "AWX-Collection-tests-instance3.{{ test_id }}.example.com"
register: facts register: facts
- name: Get the k8s setting - name: Get the k8s setting
ansible.builtin.set_fact: set_fact:
IS_K8S: "{{ controller_settings['IS_K8S'] | default(False) }}" IS_K8S: "{{ controller_settings['IS_K8S'] | default(False) }}"
vars: vars:
controller_settings: "{{ lookup('awx.awx.controller_api', 'settings/all') }}" controller_settings: "{{ lookup('awx.awx.controller_api', 'settings/all') }}"
- ansible.builtin.debug: - debug:
msg: "Skipping instance test since this is instance is not running on a K8s platform" msg: "Skipping instance test since this is instance is not running on a K8s platform"
when: not IS_K8S when: not IS_K8S
@@ -32,7 +32,7 @@
- "{{ hostname2 }}" - "{{ hostname2 }}"
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- result is changed - result is changed
@@ -44,7 +44,7 @@
capacity_adjustment: 0.4 capacity_adjustment: 0.4
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- result is changed - result is changed
@@ -54,7 +54,7 @@
capacity_adjustment: 0.7 capacity_adjustment: 0.7
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- result is changed - result is changed
@@ -78,7 +78,7 @@
node_state: installed node_state: installed
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- result is changed - result is changed
@@ -89,7 +89,7 @@
node_state: installed node_state: installed
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- result is changed - result is changed
@@ -103,7 +103,7 @@
- "{{ hostname2 }}" - "{{ hostname2 }}"
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- result is changed - result is changed
@@ -115,7 +115,7 @@
peers: [] peers: []
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- result is changed - result is changed

View File

@@ -26,7 +26,7 @@
name: "{{ project_name }}" name: "{{ project_name }}"
organization: "{{ org_name }}" organization: "{{ org_name }}"
scm_type: git scm_type: git
scm_url: https://github.com/ansible/ansible-tower-samples scm_url: https://github.com/ansible/test-playbooks
wait: true wait: true
- name: Create a git project with same name, different org - name: Create a git project with same name, different org

View File

@@ -1,11 +1,11 @@
--- ---
- name: Generate a random string for test - name: Generate a random string for test
ansible.builtin.set_fact: set_fact:
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}" test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
when: test_id is not defined when: test_id is not defined
- name: Generate usernames - name: Generate usernames
ansible.builtin.set_fact: set_fact:
usernames: usernames:
- "AWX-Collection-tests-api_lookup-user1-{{ test_id }}" - "AWX-Collection-tests-api_lookup-user1-{{ test_id }}"
- "AWX-Collection-tests-api_lookup-user2-{{ test_id }}" - "AWX-Collection-tests-api_lookup-user2-{{ test_id }}"
@@ -20,7 +20,7 @@
register: controller_meta register: controller_meta
- name: Generate the name of our plugin - name: Generate the name of our plugin
ansible.builtin.set_fact: set_fact:
plugin_name: "{{ controller_meta.prefix }}.controller_api" plugin_name: "{{ controller_meta.prefix }}.controller_api"
- name: Create all of our users - name: Create all of our users
@@ -38,7 +38,7 @@
register: results register: results
ignore_errors: true ignore_errors: true
- ansible.builtin.assert: - assert:
that: that:
- "'dne' in (results.msg | lower)" - "'dne' in (results.msg | lower)"
@@ -49,48 +49,48 @@
loop: "{{ hosts }}" loop: "{{ hosts }}"
- name: Test too many params (failure from validation of terms) - name: Test too many params (failure from validation of terms)
ansible.builtin.set_fact: set_fact:
junk: "{{ query(plugin_name, 'users', 'teams', query_params={}, ) }}" junk: "{{ query(plugin_name, 'users', 'teams', query_params={}, ) }}"
ignore_errors: true ignore_errors: true
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- result is failed - result is failed
- "'You must pass exactly one endpoint to query' in result.msg" - "'You must pass exactly one endpoint to query' in result.msg"
- name: Try to load invalid endpoint - name: Try to load invalid endpoint
ansible.builtin.set_fact: set_fact:
junk: "{{ query(plugin_name, 'john', query_params={}, ) }}" junk: "{{ query(plugin_name, 'john', query_params={}, ) }}"
ignore_errors: true ignore_errors: true
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- result is failed - result is failed
- "'The requested object could not be found at' in result.msg" - "'The requested object could not be found at' in result.msg"
- name: Load user of a specific name without promoting objects - name: Load user of a specific name without promoting objects
ansible.builtin.set_fact: set_fact:
users_list: "{{ lookup(plugin_name, 'users', query_params={ 'username' : user_creation_results['results'][0]['item'] }, return_objects=False) }}" users_list: "{{ lookup(plugin_name, 'users', query_params={ 'username' : user_creation_results['results'][0]['item'] }, return_objects=False) }}"
- ansible.builtin.assert: - assert:
that: that:
- users_list['results'] | length() == 1 - users_list['results'] | length() == 1
- users_list['count'] == 1 - users_list['count'] == 1
- users_list['results'][0]['id'] == user_creation_results['results'][0]['id'] - users_list['results'][0]['id'] == user_creation_results['results'][0]['id']
- name: Load user of a specific name with promoting objects - name: Load user of a specific name with promoting objects
ansible.builtin.set_fact: set_fact:
user_objects: "{{ query(plugin_name, 'users', query_params={ 'username' : user_creation_results['results'][0]['item'] }, return_objects=True ) }}" user_objects: "{{ query(plugin_name, 'users', query_params={ 'username' : user_creation_results['results'][0]['item'] }, return_objects=True ) }}"
- ansible.builtin.assert: - assert:
that: that:
- user_objects | length() == 1 - user_objects | length() == 1
- users_list['results'][0]['id'] == user_objects[0]['id'] - users_list['results'][0]['id'] == user_objects[0]['id']
- name: Loop over one user with the loop syntax - name: Loop over one user with the loop syntax
ansible.builtin.assert: assert:
that: that:
- item['id'] == user_creation_results['results'][0]['id'] - item['id'] == user_creation_results['results'][0]['id']
loop: "{{ query(plugin_name, 'users', query_params={ 'username' : user_creation_results['results'][0]['item'] } ) }}" loop: "{{ query(plugin_name, 'users', query_params={ 'username' : user_creation_results['results'][0]['item'] } ) }}"
@@ -98,91 +98,91 @@
label: "{{ item.id }}" label: "{{ item.id }}"
- name: Get a page of users as just ids - name: Get a page of users as just ids
ansible.builtin.set_fact: set_fact:
users: "{{ query(plugin_name, 'users', query_params={ 'username__endswith': test_id, 'page_size': 2 }, return_ids=True ) }}" users: "{{ query(plugin_name, 'users', query_params={ 'username__endswith': test_id, 'page_size': 2 }, return_ids=True ) }}"
- debug: - debug:
msg: "{{ users }}" msg: "{{ users }}"
- name: assert that user list has 2 ids only and that they are strings, not ints - name: Assert that user list has 2 ids only and that they are strings, not ints
ansible.builtin.assert: assert:
that: that:
- users | length() == 2 - users | length() == 2
- user_creation_results['results'][0]['id'] not in users - user_creation_results['results'][0]['id'] not in users
- user_creation_results['results'][0]['id'] | string in users - user_creation_results['results'][0]['id'] | string in users
- name: Get all users of a system through next attribute - name: Get all users of a system through next attribute
ansible.builtin.set_fact: set_fact:
users: "{{ query(plugin_name, 'users', query_params={ 'username__endswith': test_id, 'page_size': 1 }, return_all=true ) }}" users: "{{ query(plugin_name, 'users', query_params={ 'username__endswith': test_id, 'page_size': 1 }, return_all=true ) }}"
- ansible.builtin.assert: - assert:
that: that:
- users | length() >= 3 - users | length() >= 3
- name: Get all of the users created with a max_objects of 1 - name: Get all of the users created with a max_objects of 1
ansible.builtin.set_fact: set_fact:
users: "{{ lookup(plugin_name, 'users', query_params={ 'username__endswith': test_id, 'page_size': 1 }, return_all=true, max_objects=1 ) }}" users: "{{ lookup(plugin_name, 'users', query_params={ 'username__endswith': test_id, 'page_size': 1 }, return_all=true, max_objects=1 ) }}"
ignore_errors: true ignore_errors: true
register: max_user_errors register: max_user_errors
- ansible.builtin.assert: - assert:
that: that:
- max_user_errors is failed - max_user_errors is failed
- "'List view at users returned 3 objects, which is more than the maximum allowed by max_objects' in max_user_errors.msg" - "'List view at users returned 3 objects, which is more than the maximum allowed by max_objects' in max_user_errors.msg"
- name: Get the ID of the first user created and verify that it is correct - name: Get the ID of the first user created and verify that it is correct
ansible.builtin.assert: assert:
that: "query(plugin_name, 'users', query_params={ 'username' : user_creation_results['results'][0]['item'] }, return_ids=True)[0] == user_creation_results['results'][0]['id'] | string" that: "query(plugin_name, 'users', query_params={ 'username' : user_creation_results['results'][0]['item'] }, return_ids=True)[0] == user_creation_results['results'][0]['id'] | string"
- name: Try to get an ID of someone who does not exist - name: Try to get an ID of someone who does not exist
ansible.builtin.set_fact: set_fact:
failed_user_id: "{{ query(plugin_name, 'users', query_params={ 'username': 'john jacob jingleheimer schmidt' }, expect_one=True) }}" failed_user_id: "{{ query(plugin_name, 'users', query_params={ 'username': 'john jacob jingleheimer schmidt' }, expect_one=True) }}"
register: result register: result
ignore_errors: true ignore_errors: true
- ansible.builtin.assert: - assert:
that: that:
- result is failed - result is failed
- "'Expected one object from endpoint users' in result['msg']" - "'Expected one object from endpoint users' in result['msg']"
- name: Lookup too many users - name: Lookup too many users
ansible.builtin.set_fact: set_fact:
too_many_user_ids: " {{ query(plugin_name, 'users', query_params={ 'username__endswith': test_id }, expect_one=True) }}" too_many_user_ids: " {{ query(plugin_name, 'users', query_params={ 'username__endswith': test_id }, expect_one=True) }}"
register: results register: results
ignore_errors: true ignore_errors: true
- ansible.builtin.assert: - assert:
that: that:
- results is failed - results is failed
- "'Expected one object from endpoint users, but obtained 3' in results['msg']" - "'Expected one object from endpoint users, but obtained 3' in results['msg']"
- name: Get the ping page - name: Get the ping page
ansible.builtin.set_fact: set_fact:
ping_data: "{{ lookup(plugin_name, 'ping' ) }}" ping_data: "{{ lookup(plugin_name, 'ping' ) }}"
register: results register: results
- ansible.builtin.assert: - assert:
that: that:
- results is succeeded - results is succeeded
- "'active_node' in ping_data" - "'active_node' in ping_data"
- name: "Make sure that expect_objects fails on an API page" - name: "Make sure that expect_objects fails on an API page"
ansible.builtin.set_fact: set_fact:
my_var: "{{ lookup(plugin_name, 'settings/ui', expect_objects=True) }}" my_var: "{{ lookup(plugin_name, 'settings/ui', expect_objects=True) }}"
ignore_errors: true ignore_errors: true
register: results register: results
- ansible.builtin.assert: - assert:
that: that:
- results is failed - results is failed
- "'Did not obtain a list or detail view at settings/ui, and expect_objects or expect_one is set to True' in results.msg" - "'Did not obtain a list or detail view at settings/ui, and expect_objects or expect_one is set to True' in results.msg"
# DOCS Example Tests # DOCS Example Tests
- name: Load the UI settings - name: Load the UI settings
ansible.builtin.set_fact: set_fact:
controller_settings: "{{ lookup('awx.awx.controller_api', 'settings/ui') }}" controller_settings: "{{ lookup('awx.awx.controller_api', 'settings/ui') }}"
- ansible.builtin.assert: - assert:
that: that:
- "'CUSTOM_LOGO' in controller_settings" - "'CUSTOM_LOGO' in controller_settings"
@@ -191,7 +191,7 @@
msg: "Admin users: {{ query('awx.awx.controller_api', 'users', query_params={ 'is_superuser': true }) | map(attribute='username') | join(', ') }}" msg: "Admin users: {{ query('awx.awx.controller_api', 'users', query_params={ 'is_superuser': true }) | map(attribute='username') | join(', ') }}"
register: results register: results
- ansible.builtin.assert: - assert:
that: that:
- "'admin' in results.msg" - "'admin' in results.msg"
@@ -211,7 +211,7 @@
register: role_revoke register: role_revoke
when: "query('awx.awx.controller_api', 'users', query_params={ 'username': 'DNE_TESTING' }) | length == 1" when: "query('awx.awx.controller_api', 'users', query_params={ 'username': 'DNE_TESTING' }) | length == 1"
- ansible.builtin.assert: - assert:
that: that:
- role_revoke is skipped - role_revoke is skipped
@@ -227,7 +227,7 @@
) | map(attribute='name') | list }} ) | map(attribute='name') | list }}
register: group_creation register: group_creation
- ansible.builtin.assert: - assert:
that: group_creation is changed that: group_creation is changed
always: always:

View File

@@ -31,7 +31,7 @@
name: "{{ project_name1 }}" name: "{{ project_name1 }}"
organization: Default organization: Default
scm_type: git scm_type: git
scm_url: https://github.com/ansible/ansible-tower-samples scm_url: https://github.com/ansible/test-playbooks
wait: true wait: true
register: result register: result
@@ -44,7 +44,7 @@
name: "{{ project_name1 }}" name: "{{ project_name1 }}"
organization: Default organization: Default
scm_type: git scm_type: git
scm_url: https://github.com/ansible/ansible-tower-samples scm_url: https://github.com/ansible/test-playbooks
wait: true wait: true
state: exists state: exists
register: result register: result
@@ -58,7 +58,7 @@
name: "{{ project_name1 }}" name: "{{ project_name1 }}"
organization: Default organization: Default
scm_type: git scm_type: git
scm_url: https://github.com/ansible/ansible-tower-samples scm_url: https://github.com/ansible/test-playbooks
wait: true wait: true
state: exists state: exists
request_timeout: .001 request_timeout: .001
@@ -75,7 +75,7 @@
name: "{{ project_name1 }}" name: "{{ project_name1 }}"
organization: Default organization: Default
scm_type: git scm_type: git
scm_url: https://github.com/ansible/ansible-tower-samples scm_url: https://github.com/ansible/test-playbooks
wait: true wait: true
state: absent state: absent
register: result register: result
@@ -89,7 +89,7 @@
name: "{{ project_name1 }}" name: "{{ project_name1 }}"
organization: Default organization: Default
scm_type: git scm_type: git
scm_url: https://github.com/ansible/ansible-tower-samples scm_url: https://github.com/ansible/test-playbooks
wait: true wait: true
state: exists state: exists
register: result register: result
@@ -103,7 +103,7 @@
name: "{{ project_name1 }}" name: "{{ project_name1 }}"
organization: Default organization: Default
scm_type: git scm_type: git
scm_url: https://github.com/ansible/ansible-tower-samples scm_url: https://github.com/ansible/test-playbooks
wait: false wait: false
register: result register: result
ignore_errors: true ignore_errors: true
@@ -137,7 +137,7 @@
name: "{{ project_name2 }}" name: "{{ project_name2 }}"
organization: "{{ org_name }}" organization: "{{ org_name }}"
scm_type: git scm_type: git
scm_url: https://github.com/ansible/ansible-tower-samples scm_url: https://github.com/ansible/test-playbooks
scm_credential: "{{ cred_name }}" scm_credential: "{{ cred_name }}"
check_mode: true check_mode: true
@@ -162,7 +162,7 @@
name: "{{ project_name2 }}" name: "{{ project_name2 }}"
organization: Non_Existing_Org organization: Non_Existing_Org
scm_type: git scm_type: git
scm_url: https://github.com/ansible/ansible-tower-samples scm_url: https://github.com/ansible/test-playbooks
scm_credential: "{{ cred_name }}" scm_credential: "{{ cred_name }}"
register: result register: result
ignore_errors: true ignore_errors: true
@@ -179,7 +179,7 @@
name: "{{ project_name2 }}" name: "{{ project_name2 }}"
organization: "{{ org_name }}" organization: "{{ org_name }}"
scm_type: git scm_type: git
scm_url: https://github.com/ansible/ansible-tower-samples scm_url: https://github.com/ansible/test-playbooks
scm_credential: Non_Existing_Credential scm_credential: Non_Existing_Credential
register: result register: result
ignore_errors: true ignore_errors: true
@@ -191,7 +191,7 @@
- "'Non_Existing_Credential' in result.msg" - "'Non_Existing_Credential' in result.msg"
- "result.total_results == 0" - "result.total_results == 0"
- name: Create a git project using a branch and allowing branch override - name: Create a git project without credentials without waiting
project: project:
name: "{{ project_name3 }}" name: "{{ project_name3 }}"
organization: Default organization: Default

View File

@@ -13,7 +13,7 @@
name: "{{ project_name1 }}" name: "{{ project_name1 }}"
organization: Default organization: Default
scm_type: git scm_type: git
scm_url: https://github.com/ansible/ansible-tower-samples scm_url: https://github.com/ansible/test-playbooks
wait: false wait: false
register: project_create_result register: project_create_result

View File

@@ -1,50 +1,50 @@
--- ---
- name: Get our collection package - name: Get our collection package
awx.awx.controller_meta: controller_meta:
register: controller_meta register: controller_meta
- name: Generate the name of our plugin - name: Generate the name of our plugin
ansible.builtin.set_fact: set_fact:
plugin_name: "{{ controller_meta.prefix }}.schedule_rrule" plugin_name: "{{ controller_meta.prefix }}.schedule_rrule"
- name: Test too many params (failure from validation of terms) - name: Test too many params (failure from validation of terms)
ansible.builtin.debug: debug:
msg: "{{ query(plugin_name | string, 'none', 'weekly', start_date='2020-4-16 03:45:07') }}" msg: "{{ query(plugin_name | string, 'none', 'weekly', start_date='2020-4-16 03:45:07') }}"
ignore_errors: true ignore_errors: true
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- result is failed - result is failed
- "'You may only pass one schedule type in at a time' in result.msg" - "'You may only pass one schedule type in at a time' in result.msg"
- name: Test invalid frequency (failure from validation of term) - name: Test invalid frequency (failure from validation of term)
ansible.builtin.debug: debug:
msg: "{{ query(plugin_name, 'john', start_date='2020-4-16 03:45:07') }}" msg: "{{ query(plugin_name, 'john', start_date='2020-4-16 03:45:07') }}"
ignore_errors: true ignore_errors: true
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- result is failed - result is failed
- "'Frequency of john is invalid' in result.msg" - "'Frequency of john is invalid' in result.msg"
- name: Test an invalid start date (generic failure case from get_rrule) - name: Test an invalid start date (generic failure case from get_rrule)
ansible.builtin.debug: debug:
msg: "{{ query(plugin_name, 'none', start_date='invalid') }}" msg: "{{ query(plugin_name, 'none', start_date='invalid') }}"
ignore_errors: true ignore_errors: true
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- result is failed - result is failed
- "'Parameter start_date must be in the format YYYY-MM-DD' in result.msg" - "'Parameter start_date must be in the format YYYY-MM-DD' in result.msg"
- name: Test end_on as count (generic success case) - name: Test end_on as count (generic success case)
ansible.builtin.debug: debug:
msg: "{{ query(plugin_name, 'minute', start_date='2020-4-16 03:45:07', end_on='2') }}" msg: "{{ query(plugin_name, 'minute', start_date='2020-4-16 03:45:07', end_on='2') }}"
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- result.msg == 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=MINUTELY;COUNT=2;INTERVAL=1' - result.msg == 'DTSTART;TZID=America/New_York:20200416T034507 RRULE:FREQ=MINUTELY;COUNT=2;INTERVAL=1'

View File

@@ -11,7 +11,7 @@
register: result register: result
- name: Changing setting to true should have changed the value - name: Changing setting to true should have changed the value
ansible.builtin.assert: assert:
that: that:
- "result is changed" - "result is changed"
@@ -22,7 +22,7 @@
register: result register: result
- name: Changing setting to true again should not change the value - name: Changing setting to true again should not change the value
ansible.builtin.assert: assert:
that: that:
- "result is not changed" - "result is not changed"
@@ -33,17 +33,17 @@
register: result register: result
- name: Changing setting back to false should have changed the value - name: Changing setting back to false should have changed the value
ansible.builtin.assert: assert:
that: that:
- "result is changed" - "result is changed"
- name: Set the value of AWX_ISOLATION_SHOW_PATHS to a baseline - name: Set the value of AWX_ISOLATION_SHOW_PATHS to a baseline
awx.awx.settings: settings:
name: AWX_ISOLATION_SHOW_PATHS name: AWX_ISOLATION_SHOW_PATHS
value: '["/var/lib/awx/projects/"]' value: '["/var/lib/awx/projects/"]'
- name: Set the value of AWX_ISOLATION_SHOW_PATHS to get an error back from the controller - name: Set the value of AWX_ISOLATION_SHOW_PATHS to get an error back from the controller
awx.awx.settings: settings:
settings: settings:
AWX_ISOLATION_SHOW_PATHS: AWX_ISOLATION_SHOW_PATHS:
'not': 'a valid' 'not': 'a valid'
@@ -51,75 +51,75 @@
register: result register: result
ignore_errors: true ignore_errors: true
- ansible.builtin.assert: - assert:
that: that:
- "result is failed" - "result is failed"
- name: Set the value of AWX_ISOLATION_SHOW_PATHS - name: Set the value of AWX_ISOLATION_SHOW_PATHS
awx.awx.settings: settings:
name: AWX_ISOLATION_SHOW_PATHS name: AWX_ISOLATION_SHOW_PATHS
value: '["/var/lib/awx/projects/", "/tmp"]' value: '["/var/lib/awx/projects/", "/tmp"]'
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- "result is changed" - "result is changed"
- name: Attempt to set the value of AWX_ISOLATION_BASE_PATH to what it already is - name: Attempt to set the value of AWX_ISOLATION_BASE_PATH to what it already is
awx.awx.settings: settings:
name: AWX_ISOLATION_BASE_PATH name: AWX_ISOLATION_BASE_PATH
value: /tmp value: /tmp
register: result register: result
- ansible.builtin.debug: - debug:
msg: "{{ result }}" msg: "{{ result }}"
- ansible.builtin.assert: - assert:
that: that:
- "result is not changed" - "result is not changed"
- name: Apply a single setting via settings - name: Apply a single setting via settings
awx.awx.settings: settings:
name: AWX_ISOLATION_SHOW_PATHS name: AWX_ISOLATION_SHOW_PATHS
value: '["/var/lib/awx/projects/", "/var/tmp"]' value: '["/var/lib/awx/projects/", "/var/tmp"]'
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- "result is changed" - "result is changed"
- name: Apply multiple setting via settings with no change - name: Apply multiple setting via settings with no change
awx.awx.settings: settings:
settings: settings:
AWX_ISOLATION_BASE_PATH: /tmp AWX_ISOLATION_BASE_PATH: /tmp
AWX_ISOLATION_SHOW_PATHS: ["/var/lib/awx/projects/", "/var/tmp"] AWX_ISOLATION_SHOW_PATHS: ["/var/lib/awx/projects/", "/var/tmp"]
register: result register: result
- ansible.builtin.debug: - debug:
msg: "{{ result }}" msg: "{{ result }}"
- ansible.builtin.assert: - assert:
that: that:
- "result is not changed" - "result is not changed"
- name: Apply multiple setting via settings with change - name: Apply multiple setting via settings with change
awx.awx.settings: settings:
settings: settings:
AWX_ISOLATION_BASE_PATH: /tmp AWX_ISOLATION_BASE_PATH: /tmp
AWX_ISOLATION_SHOW_PATHS: [] AWX_ISOLATION_SHOW_PATHS: []
register: result register: result
- ansible.builtin.assert: - assert:
that: that:
- "result is changed" - "result is changed"
- name: Handle an omit value - name: Handle an omit value
awx.awx.settings: settings:
name: AWX_ISOLATION_BASE_PATH name: AWX_ISOLATION_BASE_PATH
value: '{{ junk_var | default(omit) }}' value: '{{ junk_var | default(omit) }}'
register: result register: result
ignore_errors: true ignore_errors: true
- ansible.builtin.assert: - assert:
that: that:
- "'Unable to update settings' in result.msg" - "'Unable to update settings' in result.msg"

View File

@@ -10,7 +10,7 @@
test: ad_hoc_command,host,role test: ad_hoc_command,host,role
tasks: tasks:
- name: DEBUG - make sure variables are what we expect - name: DEBUG - make sure variables are what we expect
ansible.builtin.debug: debug:
msg: | msg: |
Running tests at location: Running tests at location:
{{ loc_tests }} {{ loc_tests }}
@@ -18,7 +18,7 @@
{{ test | trim | split(',') }} {{ test | trim | split(',') }}
- name: "Include test targets" - name: "Include test targets"
ansible.builtin.include_tasks: "{{ loc_tests }}{{ test_name }}/tasks/main.yml" include_tasks: "{{ loc_tests }}{{ test_name }}/tasks/main.yml"
loop: "{{ test | trim | split(',') }}" loop: "{{ test | trim | split(',') }}"
loop_control: loop_control:
loop_var: test_name loop_var: test_name

View File

@@ -175,10 +175,9 @@ class TestOptions(unittest.TestCase):
assert '--verbosity {0,1,2,3,4,5}' in out.getvalue() assert '--verbosity {0,1,2,3,4,5}' in out.getvalue()
def test_actions_with_primary_key(self): def test_actions_with_primary_key(self):
page = OptionsPage.from_json({'actions': {'GET': {}, 'POST': {}}})
ResourceOptionsParser(None, page, 'jobs', self.parser)
for method in ('get', 'modify', 'delete'): for method in ('get', 'modify', 'delete'):
page = OptionsPage.from_json({'actions': {'GET': {}, 'POST': {}}})
ResourceOptionsParser(None, page, 'jobs', self.parser)
assert method in self.parser.choices assert method in self.parser.choices
out = StringIO() out = StringIO()

View File

@@ -8,7 +8,7 @@ skip_missing_interpreters = true
# skipsdist = true # skipsdist = true
[testenv] [testenv]
basepython = python3.11 basepython = python3.9
setenv = setenv =
PYTHONPATH = {toxinidir}:{env:PYTHONPATH:}:. PYTHONPATH = {toxinidir}:{env:PYTHONPATH:}:.
deps = deps =

View File

@@ -6,7 +6,7 @@ The *awx-manage* Utility
.. index:: .. index::
single: awx-manage single: awx-manage
The ``awx-manage`` utility is used to access detailed internal information of AWX. Commands for ``awx-manage`` should run as the ``awx`` user only. The ``awx-manage`` utility is used to access detailed internal information of AWX. Commands for ``awx-manage`` should run as the ``awx`` or ``root`` user.
.. warning:: .. warning::
Running awx-manage commands via playbook is not recommended or supported. Running awx-manage commands via playbook is not recommended or supported.

View File

@@ -557,7 +557,7 @@ Terminal Access Controller Access-Control System Plus (TACACS+) is a protocol th
Generic OIDC settings Generic OIDC settings
---------------------- ----------------------
Similar to SAML, OpenID Connect (OIDC) is uses the OAuth 2.0 framework. It allows third-party applications to verify the identity and obtain basic end-user information. The main difference between OIDC and SAML is that SAML has a service provider (SP)-to-IdP trust relationship, whereas OIDC establishes the trust with the channel (HTTPS) that is used to obtain the security token. To obtain the credentials needed to setup OIDC with AWX, refer to the documentation from the identity provider (IdP) of your choice that has OIDC support. Similar to SAML, OpenID Connect (OIDC) is uses the OAuth 2.0 framework. It allows third-party applications to verify the identity and obtain basic end-user information. The main difference between OIDC and SMAL is that SAML has a service provider (SP)-to-IdP trust relationship, whereas OIDC establishes the trust with the channel (HTTPS) that is used to obtain the security token. To obtain the credentials needed to setup OIDC with AWX, refer to the documentation from the identity provider (IdP) of your choice that has OIDC support.
To configure OIDC in AWX: To configure OIDC in AWX:

View File

@@ -7,7 +7,6 @@ Setting up LDAP Authentication
single: LDAP single: LDAP
pair: authentication; LDAP pair: authentication; LDAP
This chapter describes how to integrate LDAP authentication with AWX.
.. note:: .. note::

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 123 KiB

After

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 52 KiB

After

Width:  |  Height:  |  Size: 132 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 50 KiB

After

Width:  |  Height:  |  Size: 55 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 45 KiB

After

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 76 KiB

After

Width:  |  Height:  |  Size: 76 KiB

View File

@@ -146,7 +146,7 @@ If you have a VMware instance that uses a self-signed certificate, then you will
.. code-block:: text .. code-block:: text
"source_vars": ---validate_certs: False "source_vars": "---\nvalidate_certs: False",
You can set this in inventory source for VMware vCenter as follows: You can set this in inventory source for VMware vCenter as follows:

View File

@@ -10,15 +10,14 @@ Secret Management System
Users and admins upload machine and cloud credentials so that automation can access machines and external services on their behalf. By default, sensitive credential values (such as SSH passwords, SSH private keys, API tokens for cloud services) are stored in the database after being encrypted. With external credentials backed by credential plugins, you can map credential fields (like a password or an SSH Private key) to values stored in a :term:`secret management system` instead of providing them to AWX directly. AWX provides a secret management system that include integrations for: Users and admins upload machine and cloud credentials so that automation can access machines and external services on their behalf. By default, sensitive credential values (such as SSH passwords, SSH private keys, API tokens for cloud services) are stored in the database after being encrypted. With external credentials backed by credential plugins, you can map credential fields (like a password or an SSH Private key) to values stored in a :term:`secret management system` instead of providing them to AWX directly. AWX provides a secret management system that include integrations for:
- :ref:`ug_credentials_aws_lookup` - Centrify Vault Credential Provider Lookup
- :ref:`ug_credentials_centrify` - CyberArk Central Credential Provider Lookup (CCP)
- :ref:`ug_credentials_cyberarkccp` - CyberArk Conjur Secrets Manager Lookup
- :ref:`ug_credentials_cyberarkconjur` - HashiCorp Vault Key-Value Store (KV)
- :ref:`ug_credentials_hashivault` (KV) - HashiCorp Vault SSH Secrets Engine
- :ref:`ug_credentials_hashivaultssh` - Microsoft Azure Key Management System (KMS)
- :ref:`ug_credentials_azurekeyvault` (KMS) - Thycotic DevOps Secrets Vault
- :ref:`ug_credentials_thycoticvault` - Thycotic Secret Server
- :ref:`ug_credentials_thycoticserver`
These external secret values will be fetched prior to running a playbook that needs them. For more information on specifying these credentials in the User Interface, see :ref:`ug_credentials`. These external secret values will be fetched prior to running a playbook that needs them. For more information on specifying these credentials in the User Interface, see :ref:`ug_credentials`.
@@ -50,92 +49,11 @@ Use the AWX User Interface to configure and use each of the supported 3-party se
.. image:: ../common/images/credentials-link-credential-prompt.png .. image:: ../common/images/credentials-link-credential-prompt.png
:alt: Credential section of the external secret management system dialog :alt: Credential section of the external secret management system dialog
4. Select the credential you want to link to, and click **Next**. This takes you to the **Metadata** tab of the input source. Metadata is specific to the input source you select: 4. Select the credential you want to link to, and click **Next**. This takes you to the **Metadata** tab of the input source. This example shows the Metadata prompt for HashiVault Secret Lookup. Metadata is specific to the input source you select. See the :ref:`ug_metadata_creds_inputs` table for details.
.. list-table::
:widths: 10 10 25
:width: 1400px
:header-rows: 1
* - Input Source
- Metadata
- Description
* - *AWS Secrets Manager*
- AWS Secrets Manager Region (required)
- The region where the secrets manager is located.
* -
- AWS Secret Name (Required)
- Specify the AWS secret name that was generated by the AWS access key.
* - *Centrify Vault Credential Provider Lookup*
- Account Name (Required)
- Name of the system account or domain associated with Centrify Vault.
* -
- System Name
- Specify the name used by the Centrify portal.
* - *CyberArk Central Credential Provider Lookup*
- Object Query (Required)
- Lookup query for the object.
* -
- Object Query Format
- Select ``Exact`` for a specific secret name, or ``Regexp`` for a secret that has a dynamically generated name.
* -
- Object Property
- Specifies the name of the property to return (e.g., ``UserName``, ``Address``, etc.) other than the default of ``Content``.
* -
- Reason
- If required per the object's policy, supply a reason for checking out the secret, as CyberArk logs those.
* - *CyberArk Conjur Secrets Lookup*
- Secret Identifier
- The identifier for the secret.
* -
- Secret Version
- Specify a version of the secret, if necessary, otherwise, leave it empty to use the latest version.
* - *HashiVault Secret Lookup*
- Name of Secret Backend
- Specify the name of the KV backend to use. Leave it blank to use the first path segment of the **Path to Secret** field instead.
* -
- Path to Secret (required)
- Specify the path to where the secret information is stored; for example, ``/path/username``.
* -
- Key Name (required)
- Specify the name of the key to look up the secret information.
* -
- Secret Version (V2 Only)
- Specify a version if necessary, otherwise, leave it empty to use the latest version.
* - *HashiCorp Signed SSH*
- Unsigned Public Key (required)
- Specify the public key of the cert you want to get signed. It needs to be present in the authorized keys file of the target host(s).
* -
- Path to Secret (required)
- Specify the path to where the secret information is stored; for example, ``/path/username``.
* -
- Role Name (required)
- A role is a collection of SSH settings and parameters that are stored in Hashi vault. Typically, you can specify a couple of them with different privileges, timeouts, etc. So you could have a role that is allowed to get a cert signed for root, and other less privileged ones, for example.
* -
- Valid Principals
- Specify a user (or users) other than the default, that you are requesting vault to authorize the cert for the stored key. Hashi vault has a default user for whom it signs (e.g., ec2-user).
* - *Azure KMS*
- Secret Name (required)
- The actual name of the secret as it is referenced in Azure's Key vault app.
* -
- Secret Version
- Specify a version of the secret, if necessary, otherwise, leave it empty to use the latest version.
* - *Thycotic DevOps Secrets Vault*
- Secret Path (required)
- Specify the path to where the secret information is stored (e.g., /path/username).
* - *Thycotic Secret Server*
- Secret ID (required)
- The identifier for the secret.
* -
- Secret Field
- Specify the field to be used from the secret.
This example shows the Metadata prompt for HashiVault Secret Lookup.
.. image:: ../common/images/credentials-link-metadata-prompt.png .. image:: ../common/images/credentials-link-metadata-prompt.png
:alt: Metadata section of the external secret management system dialog :alt: Metadata section of the external secret management system dialog
5. Click **Test** to verify connection to the secret management system. If the lookup is unsuccessful, an error message like this one displays: 5. Click **Test** to verify connection to the secret management system. If the lookup is unsuccessful, an error message like this one displays:
.. image:: ../common/images/credentials-link-metadata-test-error.png .. image:: ../common/images/credentials-link-metadata-test-error.png
@@ -147,37 +65,133 @@ This example shows the Metadata prompt for HashiVault Secret Lookup.
8. Click **Save** when done. 8. Click **Save** when done.
.. _ug_metadata_creds_inputs:
.. _ug_credentials_aws_lookup: Metadata for credential input sources
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
AWS Secrets Manager Lookup **Centrify Vault Credential Provider Lookup**
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. index::
pair: credential types; AWS
This plugin allows AWS to be used as a credential input source to pull secrets from AWS SecretsManager. `AWS Secrets Manager <https://aws.amazon.com/secrets-manager/>`_ provides similar service to :ref:`ug_credentials_azurekeyvault`, and the AWS collection provides a lookup plugin for it. .. list-table::
:widths: 25 50
:header-rows: 1
When **AWS Secrets Manager lookup** is selected for **Credential Type**, provide the following attributes to properly configure your lookup: * - Metadata
- Description
* - Account Name (Required)
- Name of the system account or domain associated with Centrify Vault.
* - System Name
- Specify the name used by the Centrify portal.
- **AWS Access Key** (required): provide the access key used for communicating with AWS' key management system **CyberArk Central Credential Provider Lookup**
- **AWS Secret Key** (required): provide the secret as obtained by the AWS IAM console
.. list-table::
:widths: 25 50
:header-rows: 1
Below shows an example of a configured AWS Secret Manager credential. * - Metadata
- Description
* - Object Query (Required)
- Lookup query for the object.
* - Object Query Format
- Select ``Exact`` for a specific secret name, or ``Regexp`` for a secret that has a dynamically generated name.
* - Object Property
- Specifies the name of the property to return (e.g., ``UserName``, ``Address``, etc.) other than the default of ``Content``.
* - Reason
- If required per the object's policy, supply a reason for checking out the secret, as CyberArk logs those.
.. image:: ../common/images/credentials-create-aws-secret-credential.png **CyberArk Conjur Secrets Lookup**
:width: 1400px
:alt: Example new AWS Secret Manager credential lookup dialog
.. list-table::
:widths: 25 50
:header-rows: 1
* - Metadata
- Description
* - Secret Identifier
- The identifier for the secret.
* - Secret Version
- Specify a version of the secret, if necessary, otherwise, leave it empty to use the latest version.
**HashiVault Secret Lookup**
.. list-table::
:widths: 25 50
:header-rows: 1
* - Metadata
- Description
* - Name of Secret Backend
- Specify the name of the KV backend to use. Leave it blank to use the first path segment of the **Path to Secret** field instead.
* - Path to Secret (required)
- Specify the path to where the secret information is stored; for example, ``/path/username``.
* - Key Name (required)
- Specify the name of the key to look up the secret information.
* - Secret Version (V2 Only)
- Specify a version if necessary, otherwise, leave it empty to use the latest version.
**HashiCorp Signed SSH**
.. list-table::
:widths: 25 50
:header-rows: 1
* - Metadata
- Description
* - Unsigned Public Key (required)
- Specify the public key of the cert you want to get signed. It needs to be present in the authorized keys file of the target host(s).
* - Path to Secret (required)
- Specify the path to where the secret information is stored; for example, ``/path/username``.
* - Role Name (required)
- A role is a collection of SSH settings and parameters that are stored in Hashi vault. Typically, you can specify a couple of them with different privileges, timeouts, etc. So you could have a role that is allowed to get a cert signed for root, and other less privileged ones, for example.
* - Valid Principals
- Specify a user (or users) other than the default, that you are requesting vault to authorize the cert for the stored key. Hashi vault has a default user for whom it signs (e.g., ec2-user).
**Azure KMS**
.. list-table::
:widths: 25 50
:header-rows: 1
* - Metadata
- Description
* - Secret Name (required)
- The actual name of the secret as it is referenced in Azure's Key vault app.
* - Secret Version
- Specify a version of the secret, if necessary, otherwise, leave it empty to use the latest version.
**Thycotic DevOps Secrets Vault**
.. list-table::
:widths: 25 50
:header-rows: 1
* - Metadata
- Description
* - Secret Path (required)
- Specify the path to where the secret information is stored (e.g., /path/username).
**Thycotic Secret Server**
.. list-table::
:widths: 25 50
:header-rows: 1
* - Metadata
- Description
* - Secret ID (required)
- The identifier for the secret.
* - Secret Field
- Specify the field to be used from the secret.
.. _ug_credentials_centrify: .. _ug_credentials_centrify:
Centrify Vault Credential Provider Lookup Centrify Vault Credential Provider Lookup
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. index:: .. index::
pair: credential types; Centrify pair: credential types; Centrify
You need the Centrify Vault web service running to store secrets in order for this integration to work. When **Centrify Vault Credential Provider Lookup** is selected for **Credential Type**, provide the following attributes to properly configure your lookup: You need the Centrify Vault web service running to store secrets in order for this integration to work. When **Centrify Vault Credential Provider Lookup** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
- **Centrify Tenant URL** (required): provide the URL used for communicating with Centrify's secret management system - **Centrify Tenant URL** (required): provide the URL used for communicating with Centrify's secret management system
- **Centrify API User** (required): provide the username - **Centrify API User** (required): provide the username
@@ -194,12 +208,12 @@ Below shows an example of a configured CyberArk AIM credential.
.. _ug_credentials_cyberarkccp: .. _ug_credentials_cyberarkccp:
CyberArk Central Credential Provider (CCP) Lookup CyberArk Central Credential Provider (CCP) Lookup
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. index:: .. index::
single: CyberArk CCP single: CyberArk CCP
pair: credential; CyberArk CCP pair: credential; CyberArk CCP
You need the CyberArk Central Credential Provider web service running to store secrets in order for this integration to work. When **CyberArk Central Credential Provider Lookup** is selected for **Credential Type**, provide the following attributes to properly configure your lookup: You need the CyberArk Central Credential Provider web service running to store secrets in order for this integration to work. When **CyberArk Central Credential Provider Lookup** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
- **CyberArk CCP URL** (required): provide the URL used for communicating with CyberArk CCP's secret management system; must include URL scheme (http, https, etc.) - **CyberArk CCP URL** (required): provide the URL used for communicating with CyberArk CCP's secret management system; must include URL scheme (http, https, etc.)
- **Web Service ID**: optionally specify the identifier for the web service; leaving it blank defaults to AIMWebService - **Web Service ID**: optionally specify the identifier for the web service; leaving it blank defaults to AIMWebService
@@ -216,14 +230,14 @@ Below shows an example of a configured CyberArk CCP credential.
.. _ug_credentials_cyberarkconjur: .. _ug_credentials_cyberarkconjur:
CyberArk Conjur Secrets Manager Lookup CyberArk Conjur Secrets Manager Lookup
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. index:: .. index::
single: CyberArk Conjur single: CyberArk Conjur
pair: credential; CyberArk Conjur pair: credential; CyberArk Conjur
With a Conjur Cloud tenant available to target, configure the CyberArk Conjur Secrets Lookup external management system credential plugin as documented. With a Conjur Cloud tenant available to target, configure the CyberArk Conjur Secrets Lookup external management system credential plugin as documented.
When **CyberArk Conjur Secrets Manager Lookup** is selected for **Credential Type**, provide the following attributes to properly configure your lookup: When **CyberArk Conjur Secrets Manager Lookup** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
- **Conjur URL** (required): provide the URL used for communicating with CyberArk Conjur's secret management system; must include URL scheme (http, https, etc.) - **Conjur URL** (required): provide the URL used for communicating with CyberArk Conjur's secret management system; must include URL scheme (http, https, etc.)
- **API Key** (required): provide the key given by your Conjur admin - **API Key** (required): provide the key given by your Conjur admin
@@ -239,12 +253,12 @@ Below shows an example of a configured CyberArk Conjur credential.
.. _ug_credentials_hashivault: .. _ug_credentials_hashivault:
HashiCorp Vault Secret Lookup HashiCorp Vault Secret Lookup
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. index:: .. index::
single: HashiCorp Secret Lookup single: HashiCorp Secret Lookup
pair: credential; HashiCorp KV pair: credential; HashiCorp KV
When **HashiCorp Vault Secret Lookup** is selected for **Credential Type**, provide the following attributes to properly configure your lookup: When **HashiCorp Vault Secret Lookup** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
- **Server URL** (required): provide the URL used for communicating with HashiCorp Vault's secret management system - **Server URL** (required): provide the URL used for communicating with HashiCorp Vault's secret management system
- **Token**: specify the access token used to authenticate HashiCorp's server - **Token**: specify the access token used to authenticate HashiCorp's server
@@ -277,7 +291,7 @@ Below shows an example of a configured HashiCorp Vault Secret Lookup credential
.. image:: ../common/images/credentials-create-hashicorp-kv-credential.png .. image:: ../common/images/credentials-create-hashicorp-kv-credential.png
:alt: Example new HashiCorp Vault Secret lookup dialog :alt: Example new HashiCorp Vault Secret lookup dialog
To test the lookup, create another credential that uses the HashiCorp Vault lookup. The example below shows the attributes for a machine credential configured to look up HashiCorp Vault secret credentials: To test the lookup, create another credential that uses the HashiCorp Vault lookup. The example below shows the metadata for a machine credential configured to look up HashiCorp Vault secret credentials:
.. image:: ../common/images/credentials-machine-test-hashicorp-metadata.png .. image:: ../common/images/credentials-machine-test-hashicorp-metadata.png
:alt: Example machine credential lookup metadata for HashiCorp Vault. :alt: Example machine credential lookup metadata for HashiCorp Vault.
@@ -286,12 +300,12 @@ To test the lookup, create another credential that uses the HashiCorp Vault look
.. _ug_credentials_hashivaultssh: .. _ug_credentials_hashivaultssh:
HashiCorp Vault Signed SSH HashiCorp Vault Signed SSH
~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. index:: .. index::
single: HashiCorp SSH Secrets Engine single: HashiCorp SSH Secrets Engine
pair: credential; HashiCorp SSH Secrets Engine pair: credential; HashiCorp SSH Secrets Engine
When **HashiCorp Vault Signed SSH** is selected for **Credential Type**, provide the following attributes to properly configure your lookup: When **HashiCorp Vault Signed SSH** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
- **Server URL** (required): provide the URL used for communicating with HashiCorp Signed SSH's secret management system - **Server URL** (required): provide the URL used for communicating with HashiCorp Signed SSH's secret management system
- **Token**: specify the access token used to authenticate HashiCorp's server - **Token**: specify the access token used to authenticate HashiCorp's server
@@ -321,13 +335,13 @@ Below shows an example of a configured HashiCorp SSH Secrets Engine credential.
.. _ug_credentials_azurekeyvault: .. _ug_credentials_azurekeyvault:
Microsoft Azure Key Vault Microsoft Azure Key Vault
~~~~~~~~~~~~~~~~~~~~~~~~~~ ^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. index:: .. index::
single: MS Azure KMS single: MS Azure KMS
pair: credential; MS Azure KMS pair: credential; MS Azure KMS
triple: credential; Azure; KMS triple: credential; Azure; KMS
When **Microsoft Azure Key Vault** is selected for **Credential Type**, provide the following attributes to properly configure your lookup: When **Microsoft Azure Key Vault** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
- **Vault URL (DNS Name)** (required): provide the URL used for communicating with MS Azure's key management system - **Vault URL (DNS Name)** (required): provide the URL used for communicating with MS Azure's key management system
- **Client ID** (required): provide the identifier as obtained by the Azure Active Directory - **Client ID** (required): provide the identifier as obtained by the Azure Active Directory
@@ -343,12 +357,12 @@ Below shows an example of a configured Microsoft Azure KMS credential.
.. _ug_credentials_thycoticvault: .. _ug_credentials_thycoticvault:
Thycotic DevOps Secrets Vault Thycotic DevOps Secrets Vault
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. index:: .. index::
single: Thycotic DevOps Secrets Vault single: Thycotic DevOps Secrets Vault
pair: credential; Thycotic DevOps Secrets Vault pair: credential; Thycotic DevOps Secrets Vault
When **Thycotic DevOps Secrets Vault** is selected for **Credential Type**, provide the following attributes to properly configure your lookup: When **Thycotic DevOps Secrets Vault** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
- **Tenant** (required): provide the URL used for communicating with Thycotic's secret management system - **Tenant** (required): provide the URL used for communicating with Thycotic's secret management system
- **Top-level Domain (TLD)** : provide the top-level domain designation (e.g., com, edu, org) associated with the secret vault you want to integrate - **Top-level Domain (TLD)** : provide the top-level domain designation (e.g., com, edu, org) associated with the secret vault you want to integrate
@@ -365,12 +379,12 @@ Below shows an example of a configured Thycotic DevOps Secrets Vault credential.
.. _ug_credentials_thycoticserver: .. _ug_credentials_thycoticserver:
Thycotic Secret Server Thycotic Secret Server
~~~~~~~~~~~~~~~~~~~~~~~ ^^^^^^^^^^^^^^^^^^^^^^^^
.. index:: .. index::
single: Thycotic Secret Server single: Thycotic Secret Server
pair: credential; Thycotic Secret Server pair: credential; Thycotic Secret Server
When **Thycotic Secrets Server** is selected for **Credential Type**, provide the following attributes to properly configure your lookup: When **Thycotic Secrets Server** is selected for **Credential Type**, provide the following metadata to properly configure your lookup:
- **Secret Server URL** (required): provide the URL used for communicating with the Thycotic Secrets Server management system - **Secret Server URL** (required): provide the URL used for communicating with the Thycotic Secrets Server management system
- **Username** (required): specify the authenticated user for this service - **Username** (required): specify the authenticated user for this service

Some files were not shown because too many files have changed in this diff Show More