mirror of
https://github.com/ansible/awx.git
synced 2026-01-11 18:09:57 -03:30
Merge branch 'devel' into 8898-fix-update-vault-credentials
This commit is contained in:
commit
0faa1c8a24
32
Makefile
32
Makefile
@ -1,6 +1,6 @@
|
||||
-include awx/ui_next/Makefile
|
||||
|
||||
PYTHON ?= python3.9
|
||||
PYTHON := $(notdir $(shell for i in python3.9 python3; do command -v $$i; done|sed 1q))
|
||||
DOCKER_COMPOSE ?= docker-compose
|
||||
OFFICIAL ?= no
|
||||
NODE ?= node
|
||||
@ -296,13 +296,13 @@ swagger: reports
|
||||
check: black
|
||||
|
||||
api-lint:
|
||||
BLACK_ARGS="--check" make black
|
||||
BLACK_ARGS="--check" $(MAKE) black
|
||||
flake8 awx
|
||||
yamllint -s .
|
||||
|
||||
## Run egg_info_dev to generate awx.egg-info for development.
|
||||
awx-link:
|
||||
[ -d "/awx_devel/awx.egg-info" ] || $(PYTHON) /awx_devel/tools/scripts/egg_info_dev
|
||||
cp -f /tmp/awx.egg-link /var/lib/awx/venv/awx/lib/$(PYTHON)/site-packages/awx.egg-link
|
||||
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||
PYTEST_ARGS ?= -n auto
|
||||
@ -321,7 +321,7 @@ github_ci_setup:
|
||||
# CI_GITHUB_TOKEN is defined in .github files
|
||||
echo $(CI_GITHUB_TOKEN) | docker login ghcr.io -u $(GITHUB_ACTOR) --password-stdin
|
||||
docker pull $(DEVEL_IMAGE_NAME) || : # Pre-pull image to warm build cache
|
||||
make docker-compose-build
|
||||
$(MAKE) docker-compose-build
|
||||
|
||||
## Runs AWX_DOCKER_CMD inside a new docker container.
|
||||
docker-runner:
|
||||
@ -371,7 +371,7 @@ test_collection_sanity:
|
||||
rm -rf $(COLLECTION_INSTALL)
|
||||
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi
|
||||
ansible --version
|
||||
COLLECTION_VERSION=1.0.0 make install_collection
|
||||
COLLECTION_VERSION=1.0.0 $(MAKE) install_collection
|
||||
cd $(COLLECTION_INSTALL) && ansible-test sanity $(COLLECTION_SANITY_ARGS)
|
||||
|
||||
test_collection_integration: install_collection
|
||||
@ -589,7 +589,7 @@ docker-compose-cluster-elk: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
docker-compose-container-group:
|
||||
MINIKUBE_CONTAINER_GROUP=true make docker-compose
|
||||
MINIKUBE_CONTAINER_GROUP=true $(MAKE) docker-compose
|
||||
|
||||
clean-elk:
|
||||
docker stop tools_kibana_1
|
||||
@ -606,7 +606,18 @@ VERSION:
|
||||
@echo "awx: $(VERSION)"
|
||||
|
||||
PYTHON_VERSION:
|
||||
@echo "$(PYTHON)" | sed 's:python::'
|
||||
@echo "$(subst python,,$(PYTHON))"
|
||||
|
||||
.PHONY: version-for-buildyml
|
||||
version-for-buildyml:
|
||||
@echo $(firstword $(subst +, ,$(VERSION)))
|
||||
# version-for-buildyml prints a special version string for build.yml,
|
||||
# chopping off the sha after the '+' sign.
|
||||
# tools/ansible/build.yml was doing this: make print-VERSION | cut -d + -f -1
|
||||
# This does the same thing in native make without
|
||||
# the pipe or the extra processes, and now the pb does `make version-for-buildyml`
|
||||
# Example:
|
||||
# 22.1.1.dev38+g523c0d9781 becomes 22.1.1.dev38
|
||||
|
||||
.PHONY: Dockerfile
|
||||
## Generate Dockerfile for awx image
|
||||
@ -658,6 +669,7 @@ messages:
|
||||
fi; \
|
||||
$(PYTHON) manage.py makemessages -l en_us --keep-pot
|
||||
|
||||
.PHONY: print-%
|
||||
print-%:
|
||||
@echo $($*)
|
||||
|
||||
@ -669,12 +681,12 @@ HELP_FILTER=.PHONY
|
||||
## Display help targets
|
||||
help:
|
||||
@printf "Available targets:\n"
|
||||
@make -s help/generate | grep -vE "\w($(HELP_FILTER))"
|
||||
@$(MAKE) -s help/generate | grep -vE "\w($(HELP_FILTER))"
|
||||
|
||||
## Display help for all targets
|
||||
help/all:
|
||||
@printf "Available targets:\n"
|
||||
@make -s help/generate
|
||||
@$(MAKE) -s help/generate
|
||||
|
||||
## Generate help output from MAKEFILE_LIST
|
||||
help/generate:
|
||||
@ -698,4 +710,4 @@ help/generate:
|
||||
|
||||
## Display help for ui-next targets
|
||||
help/ui-next:
|
||||
@make -s help MAKEFILE_LIST="awx/ui_next/Makefile"
|
||||
@$(MAKE) -s help MAKEFILE_LIST="awx/ui_next/Makefile"
|
||||
|
||||
@ -33,7 +33,7 @@ from rest_framework.negotiation import DefaultContentNegotiation
|
||||
# AWX
|
||||
from awx.api.filters import FieldLookupBackend
|
||||
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.access import optimize_queryset
|
||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.utils.licensing import server_product_name
|
||||
@ -362,12 +362,7 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
return self.queryset._clone()
|
||||
elif self.model is not None:
|
||||
qs = self.model._default_manager
|
||||
if self.model in access_registry:
|
||||
access_class = access_registry[self.model]
|
||||
if access_class.select_related:
|
||||
qs = qs.select_related(*access_class.select_related)
|
||||
if access_class.prefetch_related:
|
||||
qs = qs.prefetch_related(*access_class.prefetch_related)
|
||||
qs = optimize_queryset(qs)
|
||||
return qs
|
||||
else:
|
||||
return super(GenericAPIView, self).get_queryset()
|
||||
@ -510,6 +505,9 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
||||
# And optionally (user must have given access permission on parent object
|
||||
# to view sublist):
|
||||
# parent_access = 'read'
|
||||
# filter_read_permission sets whether or not to override the default intersection behavior
|
||||
# implemented here
|
||||
filter_read_permission = True
|
||||
|
||||
def get_description_context(self):
|
||||
d = super(SubListAPIView, self).get_description_context()
|
||||
@ -524,8 +522,10 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
qs = self.request.user.get_queryset(self.model).distinct()
|
||||
sublist_qs = self.get_sublist_queryset(parent)
|
||||
if not self.filter_read_permission:
|
||||
return optimize_queryset(sublist_qs)
|
||||
qs = self.request.user.get_queryset(self.model).distinct()
|
||||
return qs & sublist_qs
|
||||
|
||||
def get_sublist_queryset(self, parent):
|
||||
|
||||
@ -954,7 +954,7 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
|
||||
|
||||
|
||||
class UserSerializer(BaseSerializer):
|
||||
password = serializers.CharField(required=False, default='', write_only=True, help_text=_('Write-only field used to change the password.'))
|
||||
password = serializers.CharField(required=False, default='', help_text=_('Field used to change the password.'))
|
||||
ldap_dn = serializers.CharField(source='profile.ldap_dn', read_only=True)
|
||||
external_account = serializers.SerializerMethodField(help_text=_('Set if the account is managed by an external service'))
|
||||
is_system_auditor = serializers.BooleanField(default=False)
|
||||
@ -981,7 +981,12 @@ class UserSerializer(BaseSerializer):
|
||||
|
||||
def to_representation(self, obj):
|
||||
ret = super(UserSerializer, self).to_representation(obj)
|
||||
ret.pop('password', None)
|
||||
if self.get_external_account(obj):
|
||||
# If this is an external account it shouldn't have a password field
|
||||
ret.pop('password', None)
|
||||
else:
|
||||
# If its an internal account lets assume there is a password and return $encrypted$ to the user
|
||||
ret['password'] = '$encrypted$'
|
||||
if obj and type(self) is UserSerializer:
|
||||
ret['auth'] = obj.social_auth.values('provider', 'uid')
|
||||
return ret
|
||||
@ -1019,7 +1024,7 @@ class UserSerializer(BaseSerializer):
|
||||
# For now we're not raising an error, just not saving password for
|
||||
# users managed by LDAP who already have an unusable password set.
|
||||
# Get external password will return something like ldap or enterprise or None if the user isn't external. We only want to allow a password update for a None option
|
||||
if new_password and not self.get_external_account(obj):
|
||||
if new_password and new_password != '$encrypted$' and not self.get_external_account(obj):
|
||||
obj.set_password(new_password)
|
||||
obj.save(update_fields=['password'])
|
||||
|
||||
@ -2185,7 +2190,7 @@ class BulkHostCreateSerializer(serializers.Serializer):
|
||||
host_data = []
|
||||
for r in result:
|
||||
item = {k: getattr(r, k) for k in return_keys}
|
||||
if not settings.IS_TESTING_MODE:
|
||||
if settings.DATABASES and ('sqlite3' not in settings.DATABASES.get('default', {}).get('ENGINE')):
|
||||
# sqlite acts different with bulk_create -- it doesn't return the id of the objects
|
||||
# to get it, you have to do an additional query, which is not useful for our tests
|
||||
item['url'] = reverse('api:host_detail', kwargs={'pk': r.id})
|
||||
|
||||
@ -62,7 +62,7 @@ from wsgiref.util import FileWrapper
|
||||
|
||||
# AWX
|
||||
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
||||
from awx.main.access import get_user_queryset, HostAccess
|
||||
from awx.main.access import get_user_queryset
|
||||
from awx.api.generics import (
|
||||
APIView,
|
||||
BaseUsersList,
|
||||
@ -794,13 +794,7 @@ class ExecutionEnvironmentActivityStreamList(SubListAPIView):
|
||||
parent_model = models.ExecutionEnvironment
|
||||
relationship = 'activitystream_set'
|
||||
search_fields = ('changes',)
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
return qs.filter(execution_environment=parent)
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class ProjectList(ListCreateAPIView):
|
||||
@ -1634,13 +1628,7 @@ class InventoryHostsList(HostRelatedSearchMixin, SubListCreateAttachDetachAPIVie
|
||||
parent_model = models.Inventory
|
||||
relationship = 'hosts'
|
||||
parent_key = 'inventory'
|
||||
|
||||
def get_queryset(self):
|
||||
inventory = self.get_parent_object()
|
||||
qs = getattrd(inventory, self.relationship).all()
|
||||
# Apply queryset optimizations
|
||||
qs = qs.select_related(*HostAccess.select_related).prefetch_related(*HostAccess.prefetch_related)
|
||||
return qs
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class HostGroupsList(SubListCreateAttachDetachAPIView):
|
||||
@ -2581,16 +2569,7 @@ class JobTemplateCredentialsList(SubListCreateAttachDetachAPIView):
|
||||
serializer_class = serializers.CredentialSerializer
|
||||
parent_model = models.JobTemplate
|
||||
relationship = 'credentials'
|
||||
|
||||
def get_queryset(self):
|
||||
# Return the full list of credentials
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
sublist_qs = getattrd(parent, self.relationship)
|
||||
sublist_qs = sublist_qs.prefetch_related(
|
||||
'created_by', 'modified_by', 'admin_role', 'use_role', 'read_role', 'admin_role__parents', 'admin_role__members'
|
||||
)
|
||||
return sublist_qs
|
||||
filter_read_permission = False
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
if sub.unique_hash() in [cred.unique_hash() for cred in parent.credentials.all()]:
|
||||
@ -2780,6 +2759,7 @@ class JobTemplateInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
serializer_class = serializers.InstanceGroupSerializer
|
||||
parent_model = models.JobTemplate
|
||||
relationship = 'instance_groups'
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class JobTemplateAccessList(ResourceAccessList):
|
||||
@ -2870,16 +2850,7 @@ class WorkflowJobTemplateNodeChildrenBaseList(EnforceParentRelationshipMixin, Su
|
||||
relationship = ''
|
||||
enforce_parent_relationship = 'workflow_job_template'
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
'''
|
||||
Limit the set of WorkflowJobTemplateNodes to the related nodes of specified by
|
||||
'relationship'
|
||||
'''
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).all()
|
||||
filter_read_permission = False
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
if created:
|
||||
@ -2954,14 +2925,7 @@ class WorkflowJobNodeChildrenBaseList(SubListAPIView):
|
||||
parent_model = models.WorkflowJobNode
|
||||
relationship = ''
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
#
|
||||
# Limit the set of WorkflowJobNodes to the related nodes of specified by self.relationship
|
||||
#
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).all()
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class WorkflowJobNodeSuccessNodesList(WorkflowJobNodeChildrenBaseList):
|
||||
@ -3140,11 +3104,8 @@ class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView):
|
||||
relationship = 'workflow_job_template_nodes'
|
||||
parent_key = 'workflow_job_template'
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).order_by('id')
|
||||
ordering = ('id',) # assure ordering by id for consistency
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class WorkflowJobTemplateJobsList(SubListAPIView):
|
||||
@ -3236,11 +3197,8 @@ class WorkflowJobWorkflowNodesList(SubListAPIView):
|
||||
relationship = 'workflow_job_nodes'
|
||||
parent_key = 'workflow_job'
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).order_by('id')
|
||||
ordering = ('id',) # assure ordering by id for consistency
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class WorkflowJobCancel(GenericCancelView):
|
||||
@ -3554,11 +3512,7 @@ class BaseJobHostSummariesList(SubListAPIView):
|
||||
relationship = 'job_host_summaries'
|
||||
name = _('Job Host Summaries List')
|
||||
search_fields = ('host_name',)
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).select_related('job', 'job__job_template', 'host')
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class HostJobHostSummariesList(BaseJobHostSummariesList):
|
||||
|
||||
@ -61,12 +61,6 @@ class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
qs = Organization.accessible_objects(self.request.user, 'read_role')
|
||||
qs = qs.select_related('admin_role', 'auditor_role', 'member_role', 'read_role')
|
||||
qs = qs.prefetch_related('created_by', 'modified_by')
|
||||
return qs
|
||||
|
||||
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = Organization
|
||||
@ -207,6 +201,7 @@ class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
serializer_class = InstanceGroupSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'instance_groups'
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
||||
@ -214,6 +209,7 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
||||
serializer_class = CredentialSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'galaxy_credentials'
|
||||
filter_read_permission = False
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
if sub.kind != 'galaxy_api_token':
|
||||
|
||||
@ -2952,3 +2952,19 @@ class WorkflowApprovalTemplateAccess(BaseAccess):
|
||||
for cls in BaseAccess.__subclasses__():
|
||||
access_registry[cls.model] = cls
|
||||
access_registry[UnpartitionedJobEvent] = UnpartitionedJobEventAccess
|
||||
|
||||
|
||||
def optimize_queryset(queryset):
|
||||
"""
|
||||
A utility method in case you already have a queryset and just want to
|
||||
apply the standard optimizations for that model.
|
||||
In other words, use if you do not want to start from filtered_queryset for some reason.
|
||||
"""
|
||||
if not queryset.model or queryset.model not in access_registry:
|
||||
return queryset
|
||||
access_class = access_registry[queryset.model]
|
||||
if access_class.select_related:
|
||||
queryset = queryset.select_related(*access_class.select_related)
|
||||
if access_class.prefetch_related:
|
||||
queryset = queryset.prefetch_related(*access_class.prefetch_related)
|
||||
return queryset
|
||||
|
||||
@ -2,6 +2,8 @@ import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import signal
|
||||
import sys
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
@ -50,6 +52,11 @@ class Command(BaseCommand):
|
||||
}
|
||||
return json.dumps(payload)
|
||||
|
||||
def notify_listener_and_exit(self, *args):
|
||||
with pg_bus_conn(new_connection=False) as conn:
|
||||
conn.notify('web_heartbeet', self.construct_payload(action='offline'))
|
||||
sys.exit(0)
|
||||
|
||||
def do_hearbeat_loop(self):
|
||||
with pg_bus_conn(new_connection=True) as conn:
|
||||
while True:
|
||||
@ -57,10 +64,10 @@ class Command(BaseCommand):
|
||||
conn.notify('web_heartbeet', self.construct_payload())
|
||||
time.sleep(settings.BROADCAST_WEBSOCKET_BEACON_FROM_WEB_RATE_SECONDS)
|
||||
|
||||
# TODO: Send a message with action=offline if we notice a SIGTERM or SIGINT
|
||||
# (wsrelay can use this to remove the node quicker)
|
||||
def handle(self, *arg, **options):
|
||||
self.print_banner()
|
||||
signal.signal(signal.SIGTERM, self.notify_listener_and_exit)
|
||||
signal.signal(signal.SIGINT, self.notify_listener_and_exit)
|
||||
|
||||
# Note: We don't really try any reconnect logic to pg_notify here,
|
||||
# just let supervisor restart if we fail.
|
||||
|
||||
@ -1479,8 +1479,6 @@ class PluginFileInjector(object):
|
||||
def build_env(self, inventory_update, env, private_data_dir, private_data_files):
|
||||
injector_env = self.get_plugin_env(inventory_update, private_data_dir, private_data_files)
|
||||
env.update(injector_env)
|
||||
# Preserves current behavior for Ansible change in default planned for 2.10
|
||||
env['ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS'] = 'never'
|
||||
# All CLOUD_PROVIDERS sources implement as inventory plugin from collection
|
||||
env['ANSIBLE_INVENTORY_ENABLED'] = 'auto'
|
||||
return env
|
||||
|
||||
@ -1,9 +1,8 @@
|
||||
{
|
||||
"ANSIBLE_JINJA2_NATIVE": "True",
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"AZURE_CLIENT_ID": "fooo",
|
||||
"AZURE_CLOUD_ENVIRONMENT": "fooo",
|
||||
"AZURE_SECRET": "fooo",
|
||||
"AZURE_SUBSCRIPTION_ID": "fooo",
|
||||
"AZURE_TENANT": "fooo"
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
{
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"TOWER_HOST": "https://foo.invalid",
|
||||
"TOWER_PASSWORD": "fooo",
|
||||
"TOWER_USERNAME": "fooo",
|
||||
@ -10,4 +9,4 @@
|
||||
"CONTROLLER_USERNAME": "fooo",
|
||||
"CONTROLLER_OAUTH_TOKEN": "",
|
||||
"CONTROLLER_VERIFY_SSL": "False"
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,8 +1,7 @@
|
||||
{
|
||||
"ANSIBLE_JINJA2_NATIVE": "True",
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"AWS_ACCESS_KEY_ID": "fooo",
|
||||
"AWS_SECRET_ACCESS_KEY": "fooo",
|
||||
"AWS_SECURITY_TOKEN": "fooo",
|
||||
"AWS_SESSION_TOKEN": "fooo"
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
{
|
||||
"ANSIBLE_JINJA2_NATIVE": "True",
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"GCE_CREDENTIALS_FILE_PATH": "{{ file_reference }}",
|
||||
"GOOGLE_APPLICATION_CREDENTIALS": "{{ file_reference }}",
|
||||
"GCP_AUTH_KIND": "serviceaccount",
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
{
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"INSIGHTS_USER": "fooo",
|
||||
"INSIGHTS_PASSWORD": "fooo"
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
{
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"OS_CLIENT_CONFIG_FILE": "{{ file_reference }}"
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
{
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"OVIRT_INI_PATH": "{{ file_reference }}",
|
||||
"OVIRT_PASSWORD": "fooo",
|
||||
"OVIRT_URL": "https://foo.invalid",
|
||||
"OVIRT_USERNAME": "fooo"
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
{
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"FOREMAN_PASSWORD": "fooo",
|
||||
"FOREMAN_SERVER": "https://foo.invalid",
|
||||
"FOREMAN_USER": "fooo"
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
{
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"VMWARE_HOST": "https://foo.invalid",
|
||||
"VMWARE_PASSWORD": "fooo",
|
||||
"VMWARE_USER": "fooo",
|
||||
"VMWARE_VALIDATE_CERTS": "False"
|
||||
}
|
||||
}
|
||||
|
||||
@ -329,3 +329,21 @@ def test_galaxy_credential_association(alice, admin, organization, post, get):
|
||||
'Public Galaxy 4',
|
||||
'Public Galaxy 5',
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_admin_credential_count(org_admin, admin, organization, post, get):
|
||||
galaxy = CredentialType.defaults['galaxy_api_token']()
|
||||
galaxy.save()
|
||||
|
||||
for i in range(3):
|
||||
cred = Credential.objects.create(credential_type=galaxy, name=f'test_{i}', inputs={'url': 'https://galaxy.ansible.com/'})
|
||||
url = reverse('api:organization_galaxy_credentials_list', kwargs={'pk': organization.pk})
|
||||
post(url, {'associate': True, 'id': cred.pk}, user=admin, expect=204)
|
||||
# org admin should see all associated galaxy credentials
|
||||
resp = get(url, user=org_admin)
|
||||
assert resp.data['count'] == 3
|
||||
# removing one to validate new count
|
||||
post(url, {'disassociate': True, 'id': Credential.objects.get(name='test_1').pk}, user=admin, expect=204)
|
||||
resp_new = get(url, user=org_admin)
|
||||
assert resp_new.data['count'] == 2
|
||||
|
||||
28
awx/main/tests/settings_for_test.py
Normal file
28
awx/main/tests/settings_for_test.py
Normal file
@ -0,0 +1,28 @@
|
||||
# Python
|
||||
from unittest import mock
|
||||
import uuid
|
||||
|
||||
# patch python-ldap
|
||||
with mock.patch('__main__.__builtins__.dir', return_value=[]):
|
||||
import ldap # NOQA
|
||||
|
||||
# Load development settings for base variables.
|
||||
from awx.settings.development import * # NOQA
|
||||
|
||||
# Some things make decisions based on settings.SETTINGS_MODULE, so this is done for that
|
||||
SETTINGS_MODULE = 'awx.settings.development'
|
||||
|
||||
# Use SQLite for unit tests instead of PostgreSQL. If the lines below are
|
||||
# commented out, Django will create the test_awx-dev database in PostgreSQL to
|
||||
# run unit tests.
|
||||
CACHES = {'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'unique-{}'.format(str(uuid.uuid4()))}}
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': os.path.join(BASE_DIR, 'awx.sqlite3'), # noqa
|
||||
'TEST': {
|
||||
# Test database cannot be :memory: for inventory tests.
|
||||
'NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3') # noqa
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -1,8 +1,32 @@
|
||||
from split_settings.tools import include
|
||||
|
||||
|
||||
LOCAL_SETTINGS = (
|
||||
'ALLOWED_HOSTS',
|
||||
'BROADCAST_WEBSOCKET_PORT',
|
||||
'BROADCAST_WEBSOCKET_VERIFY_CERT',
|
||||
'BROADCAST_WEBSOCKET_PROTOCOL',
|
||||
'BROADCAST_WEBSOCKET_SECRET',
|
||||
'DATABASES',
|
||||
'CACHES',
|
||||
'DEBUG',
|
||||
'NAMED_URL_GRAPH',
|
||||
)
|
||||
|
||||
|
||||
def test_postprocess_auth_basic_enabled():
|
||||
locals().update({'__file__': __file__})
|
||||
|
||||
include('../../../settings/defaults.py', scope=locals())
|
||||
assert 'awx.api.authentication.LoggedBasicAuthentication' in locals()['REST_FRAMEWORK']['DEFAULT_AUTHENTICATION_CLASSES']
|
||||
|
||||
|
||||
def test_default_settings():
|
||||
from django.conf import settings
|
||||
|
||||
for k in dir(settings):
|
||||
if k not in settings.DEFAULTS_SNAPSHOT or k in LOCAL_SETTINGS:
|
||||
continue
|
||||
default_val = getattr(settings.default_settings, k, None)
|
||||
snapshot_val = settings.DEFAULTS_SNAPSHOT[k]
|
||||
assert default_val == snapshot_val, f'Setting for {k} does not match shapshot:\nsnapshot: {snapshot_val}\ndefault: {default_val}'
|
||||
|
||||
@ -1,24 +1,16 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import base64
|
||||
import os
|
||||
import re # noqa
|
||||
import sys
|
||||
import tempfile
|
||||
import socket
|
||||
from datetime import timedelta
|
||||
|
||||
|
||||
if "pytest" in sys.modules:
|
||||
IS_TESTING_MODE = True
|
||||
from unittest import mock
|
||||
|
||||
with mock.patch('__main__.__builtins__.dir', return_value=[]):
|
||||
import ldap
|
||||
else:
|
||||
IS_TESTING_MODE = False
|
||||
import ldap
|
||||
# python-ldap
|
||||
import ldap
|
||||
|
||||
|
||||
DEBUG = True
|
||||
|
||||
@ -9,7 +9,6 @@ import socket
|
||||
import copy
|
||||
import sys
|
||||
import traceback
|
||||
import uuid
|
||||
|
||||
# Centos-7 doesn't include the svg mime type
|
||||
# /usr/lib64/python/mimetypes.py
|
||||
@ -62,38 +61,9 @@ DEBUG_TOOLBAR_CONFIG = {'ENABLE_STACKTRACES': True}
|
||||
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
INSTALL_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
# Store a snapshot of default settings at this point before loading any
|
||||
# customizable config files.
|
||||
DEFAULTS_SNAPSHOT = {}
|
||||
this_module = sys.modules[__name__]
|
||||
for setting in dir(this_module):
|
||||
if setting == setting.upper():
|
||||
DEFAULTS_SNAPSHOT[setting] = copy.deepcopy(getattr(this_module, setting))
|
||||
|
||||
# If there is an `/etc/tower/settings.py`, include it.
|
||||
# If there is a `/etc/tower/conf.d/*.py`, include them.
|
||||
include(optional('/etc/tower/settings.py'), scope=locals())
|
||||
include(optional('/etc/tower/conf.d/*.py'), scope=locals())
|
||||
|
||||
BASE_VENV_PATH = "/var/lib/awx/venv/"
|
||||
AWX_VENV_PATH = os.path.join(BASE_VENV_PATH, "awx")
|
||||
|
||||
# Use SQLite for unit tests instead of PostgreSQL. If the lines below are
|
||||
# commented out, Django will create the test_awx-dev database in PostgreSQL to
|
||||
# run unit tests.
|
||||
if "pytest" in sys.modules:
|
||||
CACHES = {'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'unique-{}'.format(str(uuid.uuid4()))}}
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': os.path.join(BASE_DIR, 'awx.sqlite3'), # noqa
|
||||
'TEST': {
|
||||
# Test database cannot be :memory: for inventory tests.
|
||||
'NAME': os.path.join(BASE_DIR, 'awx_test.sqlite3') # noqa
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
CLUSTER_HOST_ID = socket.gethostname()
|
||||
|
||||
AWX_CALLBACK_PROFILE = True
|
||||
@ -105,11 +75,28 @@ AWX_CALLBACK_PROFILE = True
|
||||
AWX_DISABLE_TASK_MANAGERS = False
|
||||
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
||||
|
||||
from .application_name import set_application_name
|
||||
# Store a snapshot of default settings at this point before loading any
|
||||
# customizable config files.
|
||||
this_module = sys.modules[__name__]
|
||||
local_vars = dir(this_module)
|
||||
DEFAULTS_SNAPSHOT = {} # define after we save local_vars so we do not snapshot the snapshot
|
||||
for setting in local_vars:
|
||||
if setting.isupper():
|
||||
DEFAULTS_SNAPSHOT[setting] = copy.deepcopy(getattr(this_module, setting))
|
||||
|
||||
set_application_name(DATABASES, CLUSTER_HOST_ID)
|
||||
del local_vars # avoid temporary variables from showing up in dir(settings)
|
||||
del this_module
|
||||
#
|
||||
###############################################################################################
|
||||
#
|
||||
# Any settings defined after this point will be marked as as a read_only database setting
|
||||
#
|
||||
################################################################################################
|
||||
|
||||
del set_application_name
|
||||
# If there is an `/etc/tower/settings.py`, include it.
|
||||
# If there is a `/etc/tower/conf.d/*.py`, include them.
|
||||
include(optional('/etc/tower/settings.py'), scope=locals())
|
||||
include(optional('/etc/tower/conf.d/*.py'), scope=locals())
|
||||
|
||||
# If any local_*.py files are present in awx/settings/, use them to override
|
||||
# default settings for development. If not present, we can still run using
|
||||
@ -123,3 +110,11 @@ try:
|
||||
except ImportError:
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
# The below runs AFTER all of the custom settings are imported
|
||||
# because conf.d files will define DATABASES and this should modify that
|
||||
from .application_name import set_application_name
|
||||
|
||||
set_application_name(DATABASES, CLUSTER_HOST_ID) # NOQA
|
||||
|
||||
del set_application_name
|
||||
|
||||
@ -47,17 +47,21 @@ AWX_ISOLATION_SHOW_PATHS = [
|
||||
|
||||
# Store a snapshot of default settings at this point before loading any
|
||||
# customizable config files.
|
||||
this_module = sys.modules[__name__]
|
||||
local_vars = dir(this_module)
|
||||
DEFAULTS_SNAPSHOT = {} # define after we save local_vars so we do not snapshot the snapshot
|
||||
for setting in local_vars:
|
||||
if setting.isupper():
|
||||
DEFAULTS_SNAPSHOT[setting] = copy.deepcopy(getattr(this_module, setting))
|
||||
|
||||
del local_vars # avoid temporary variables from showing up in dir(settings)
|
||||
del this_module
|
||||
#
|
||||
###############################################################################################
|
||||
#
|
||||
# Any settings defined after this point will be marked as as a read_only database setting
|
||||
#
|
||||
################################################################################################
|
||||
DEFAULTS_SNAPSHOT = {}
|
||||
this_module = sys.modules[__name__]
|
||||
for setting in dir(this_module):
|
||||
if setting == setting.upper():
|
||||
DEFAULTS_SNAPSHOT[setting] = copy.deepcopy(getattr(this_module, setting))
|
||||
|
||||
# Load settings from any .py files in the global conf.d directory specified in
|
||||
# the environment, defaulting to /etc/tower/conf.d/.
|
||||
@ -98,8 +102,8 @@ except IOError:
|
||||
else:
|
||||
raise
|
||||
|
||||
# The below runs AFTER all of the custom settings are imported.
|
||||
|
||||
# The below runs AFTER all of the custom settings are imported
|
||||
# because conf.d files will define DATABASES and this should modify that
|
||||
from .application_name import set_application_name
|
||||
|
||||
set_application_name(DATABASES, CLUSTER_HOST_ID) # NOQA
|
||||
|
||||
@ -115,16 +115,16 @@ function AdHocCredentialStep({ credentialTypeId }) {
|
||||
searchColumns={[
|
||||
{
|
||||
name: t`Name`,
|
||||
key: 'name',
|
||||
key: 'name__icontains',
|
||||
isDefault: true,
|
||||
},
|
||||
{
|
||||
name: t`Created By (Username)`,
|
||||
key: 'created_by__username',
|
||||
key: 'created_by__username__icontains',
|
||||
},
|
||||
{
|
||||
name: t`Modified By (Username)`,
|
||||
key: 'modified_by__username',
|
||||
key: 'modified_by__username__icontains',
|
||||
},
|
||||
]}
|
||||
sortColumns={[
|
||||
|
||||
@ -195,9 +195,9 @@ function getRouteConfig(userProfile = {}) {
|
||||
deleteRoute('host_metrics');
|
||||
deleteRouteGroup('settings');
|
||||
deleteRoute('management_jobs');
|
||||
if (userProfile?.isOrgAdmin) return routeConfig;
|
||||
deleteRoute('topology_view');
|
||||
deleteRoute('instances');
|
||||
if (userProfile?.isOrgAdmin) return routeConfig;
|
||||
if (!userProfile?.isNotificationAdmin) deleteRoute('notification_templates');
|
||||
|
||||
return routeConfig;
|
||||
|
||||
@ -101,10 +101,8 @@ describe('getRouteConfig', () => {
|
||||
'/credential_types',
|
||||
'/notification_templates',
|
||||
'/instance_groups',
|
||||
'/instances',
|
||||
'/applications',
|
||||
'/execution_environments',
|
||||
'/topology_view',
|
||||
]);
|
||||
});
|
||||
|
||||
@ -237,10 +235,8 @@ describe('getRouteConfig', () => {
|
||||
'/credential_types',
|
||||
'/notification_templates',
|
||||
'/instance_groups',
|
||||
'/instances',
|
||||
'/applications',
|
||||
'/execution_environments',
|
||||
'/topology_view',
|
||||
]);
|
||||
});
|
||||
|
||||
@ -268,10 +264,8 @@ describe('getRouteConfig', () => {
|
||||
'/credential_types',
|
||||
'/notification_templates',
|
||||
'/instance_groups',
|
||||
'/instances',
|
||||
'/applications',
|
||||
'/execution_environments',
|
||||
'/topology_view',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@ -131,12 +131,6 @@ function HostMetrics() {
|
||||
>
|
||||
{t`Automation`}
|
||||
</HeaderCell>
|
||||
<HeaderCell
|
||||
sortKey="used_in_inventories"
|
||||
tooltip={t`How many inventories is the host in, recomputed on a weekly schedule`}
|
||||
>
|
||||
{t`Inventories`}
|
||||
</HeaderCell>
|
||||
<HeaderCell
|
||||
sortKey="deleted_counter"
|
||||
tooltip={t`How many times was the host deleted`}
|
||||
|
||||
@ -21,7 +21,6 @@ function HostMetricsListItem({ item, isSelected, onSelect, rowIndex }) {
|
||||
{formatDateString(item.last_automation)}
|
||||
</Td>
|
||||
<Td dataLabel={t`Automation`}>{item.automated_counter}</Td>
|
||||
<Td dataLabel={t`Inventories`}>{item.used_in_inventories || 0}</Td>
|
||||
<Td dataLabel={t`Deleted`}>{item.deleted_counter}</Td>
|
||||
</Tr>
|
||||
);
|
||||
|
||||
@ -17,11 +17,7 @@ import { CardBody, CardActionsRow } from 'components/Card';
|
||||
import { Detail, DetailList, UserDateDetail } from 'components/DetailList';
|
||||
import { VariablesDetail } from 'components/CodeEditor';
|
||||
import { formatDateString, secondsToHHMMSS } from 'util/dates';
|
||||
import {
|
||||
WorkflowApprovalsAPI,
|
||||
WorkflowJobTemplatesAPI,
|
||||
WorkflowJobsAPI,
|
||||
} from 'api';
|
||||
import { WorkflowApprovalsAPI, WorkflowJobsAPI } from 'api';
|
||||
import useRequest, { useDismissableError } from 'hooks/useRequest';
|
||||
import { WorkflowApproval } from 'types';
|
||||
import StatusLabel from 'components/StatusLabel';
|
||||
@ -67,8 +63,10 @@ function WorkflowApprovalDetail({ workflowApproval, fetchWorkflowApproval }) {
|
||||
const { error: deleteError, dismissError: dismissDeleteError } =
|
||||
useDismissableError(deleteApprovalError);
|
||||
|
||||
const workflowJobTemplateId =
|
||||
workflowApproval.summary_fields.workflow_job_template.id;
|
||||
const sourceWorkflowJob =
|
||||
workflowApproval?.summary_fields?.source_workflow_job;
|
||||
const sourceWorkflowJobTemplate =
|
||||
workflowApproval?.summary_fields?.workflow_job_template;
|
||||
|
||||
const {
|
||||
error: fetchWorkflowJobError,
|
||||
@ -77,23 +75,10 @@ function WorkflowApprovalDetail({ workflowApproval, fetchWorkflowApproval }) {
|
||||
result: workflowJob,
|
||||
} = useRequest(
|
||||
useCallback(async () => {
|
||||
if (!workflowJobTemplateId) {
|
||||
return {};
|
||||
}
|
||||
const { data: workflowJobTemplate } =
|
||||
await WorkflowJobTemplatesAPI.readDetail(workflowJobTemplateId);
|
||||
|
||||
let jobId = null;
|
||||
|
||||
if (workflowJobTemplate.summary_fields?.current_job) {
|
||||
jobId = workflowJobTemplate.summary_fields.current_job.id;
|
||||
} else if (workflowJobTemplate.summary_fields?.last_job) {
|
||||
jobId = workflowJobTemplate.summary_fields.last_job.id;
|
||||
}
|
||||
const { data } = await WorkflowJobsAPI.readDetail(jobId);
|
||||
|
||||
if (!sourceWorkflowJob?.id) return {};
|
||||
const { data } = await WorkflowJobsAPI.readDetail(sourceWorkflowJob?.id);
|
||||
return data;
|
||||
}, [workflowJobTemplateId]),
|
||||
}, [sourceWorkflowJob?.id]),
|
||||
{
|
||||
workflowJob: null,
|
||||
isLoading: true,
|
||||
@ -116,11 +101,6 @@ function WorkflowApprovalDetail({ workflowApproval, fetchWorkflowApproval }) {
|
||||
},
|
||||
[addToast, fetchWorkflowApproval]
|
||||
);
|
||||
const sourceWorkflowJob =
|
||||
workflowApproval?.summary_fields?.source_workflow_job;
|
||||
|
||||
const sourceWorkflowJobTemplate =
|
||||
workflowApproval?.summary_fields?.workflow_job_template;
|
||||
|
||||
const isLoading = isDeleteLoading || isLoadingWorkflowJob;
|
||||
|
||||
|
||||
@ -1,10 +1,6 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import {
|
||||
WorkflowApprovalsAPI,
|
||||
WorkflowJobTemplatesAPI,
|
||||
WorkflowJobsAPI,
|
||||
} from 'api';
|
||||
import { WorkflowApprovalsAPI, WorkflowJobsAPI } from 'api';
|
||||
import { formatDateString } from 'util/dates';
|
||||
import {
|
||||
mountWithContexts,
|
||||
@ -23,146 +19,6 @@ jest.mock('react-router-dom', () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
const workflowJobTemplate = {
|
||||
id: 8,
|
||||
type: 'workflow_job_template',
|
||||
url: '/api/v2/workflow_job_templates/8/',
|
||||
related: {
|
||||
named_url: '/api/v2/workflow_job_templates/00++/',
|
||||
created_by: '/api/v2/users/1/',
|
||||
modified_by: '/api/v2/users/1/',
|
||||
last_job: '/api/v2/workflow_jobs/111/',
|
||||
workflow_jobs: '/api/v2/workflow_job_templates/8/workflow_jobs/',
|
||||
schedules: '/api/v2/workflow_job_templates/8/schedules/',
|
||||
launch: '/api/v2/workflow_job_templates/8/launch/',
|
||||
webhook_key: '/api/v2/workflow_job_templates/8/webhook_key/',
|
||||
webhook_receiver: '/api/v2/workflow_job_templates/8/github/',
|
||||
workflow_nodes: '/api/v2/workflow_job_templates/8/workflow_nodes/',
|
||||
labels: '/api/v2/workflow_job_templates/8/labels/',
|
||||
activity_stream: '/api/v2/workflow_job_templates/8/activity_stream/',
|
||||
notification_templates_started:
|
||||
'/api/v2/workflow_job_templates/8/notification_templates_started/',
|
||||
notification_templates_success:
|
||||
'/api/v2/workflow_job_templates/8/notification_templates_success/',
|
||||
notification_templates_error:
|
||||
'/api/v2/workflow_job_templates/8/notification_templates_error/',
|
||||
notification_templates_approvals:
|
||||
'/api/v2/workflow_job_templates/8/notification_templates_approvals/',
|
||||
access_list: '/api/v2/workflow_job_templates/8/access_list/',
|
||||
object_roles: '/api/v2/workflow_job_templates/8/object_roles/',
|
||||
survey_spec: '/api/v2/workflow_job_templates/8/survey_spec/',
|
||||
copy: '/api/v2/workflow_job_templates/8/copy/',
|
||||
},
|
||||
summary_fields: {
|
||||
last_job: {
|
||||
id: 111,
|
||||
name: '00',
|
||||
description: '',
|
||||
finished: '2022-05-10T17:29:52.978531Z',
|
||||
status: 'successful',
|
||||
failed: false,
|
||||
},
|
||||
last_update: {
|
||||
id: 111,
|
||||
name: '00',
|
||||
description: '',
|
||||
status: 'successful',
|
||||
failed: false,
|
||||
},
|
||||
created_by: {
|
||||
id: 1,
|
||||
username: 'admin',
|
||||
first_name: '',
|
||||
last_name: '',
|
||||
},
|
||||
modified_by: {
|
||||
id: 1,
|
||||
username: 'admin',
|
||||
first_name: '',
|
||||
last_name: '',
|
||||
},
|
||||
object_roles: {
|
||||
admin_role: {
|
||||
description: 'Can manage all aspects of the workflow job template',
|
||||
name: 'Admin',
|
||||
id: 34,
|
||||
},
|
||||
execute_role: {
|
||||
description: 'May run the workflow job template',
|
||||
name: 'Execute',
|
||||
id: 35,
|
||||
},
|
||||
read_role: {
|
||||
description: 'May view settings for the workflow job template',
|
||||
name: 'Read',
|
||||
id: 36,
|
||||
},
|
||||
approval_role: {
|
||||
description: 'Can approve or deny a workflow approval node',
|
||||
name: 'Approve',
|
||||
id: 37,
|
||||
},
|
||||
},
|
||||
user_capabilities: {
|
||||
edit: true,
|
||||
delete: true,
|
||||
start: true,
|
||||
schedule: true,
|
||||
copy: true,
|
||||
},
|
||||
labels: {
|
||||
count: 1,
|
||||
results: [
|
||||
{
|
||||
id: 2,
|
||||
name: 'Test2',
|
||||
},
|
||||
],
|
||||
},
|
||||
survey: {
|
||||
title: '',
|
||||
description: '',
|
||||
},
|
||||
recent_jobs: [
|
||||
{
|
||||
id: 111,
|
||||
status: 'successful',
|
||||
finished: '2022-05-10T17:29:52.978531Z',
|
||||
canceled_on: null,
|
||||
type: 'workflow_job',
|
||||
},
|
||||
{
|
||||
id: 104,
|
||||
status: 'failed',
|
||||
finished: '2022-05-10T15:26:22.233170Z',
|
||||
canceled_on: null,
|
||||
type: 'workflow_job',
|
||||
},
|
||||
],
|
||||
},
|
||||
created: '2022-05-05T14:13:36.123027Z',
|
||||
modified: '2022-05-05T17:44:44.071447Z',
|
||||
name: '00',
|
||||
description: '',
|
||||
last_job_run: '2022-05-10T17:29:52.978531Z',
|
||||
last_job_failed: false,
|
||||
next_job_run: null,
|
||||
status: 'successful',
|
||||
extra_vars: '{\n "foo": "bar",\n "baz": "qux"\n}',
|
||||
organization: null,
|
||||
survey_enabled: true,
|
||||
allow_simultaneous: true,
|
||||
ask_variables_on_launch: true,
|
||||
inventory: null,
|
||||
limit: null,
|
||||
scm_branch: '',
|
||||
ask_inventory_on_launch: true,
|
||||
ask_scm_branch_on_launch: true,
|
||||
ask_limit_on_launch: true,
|
||||
webhook_service: 'github',
|
||||
webhook_credential: null,
|
||||
};
|
||||
|
||||
const workflowJob = {
|
||||
id: 111,
|
||||
type: 'workflow_job',
|
||||
@ -270,9 +126,6 @@ const workflowJob = {
|
||||
|
||||
describe('<WorkflowApprovalDetail />', () => {
|
||||
beforeEach(() => {
|
||||
WorkflowJobTemplatesAPI.readDetail.mockResolvedValue({
|
||||
data: workflowJobTemplate,
|
||||
});
|
||||
WorkflowJobsAPI.readDetail.mockResolvedValue({ data: workflowJob });
|
||||
});
|
||||
|
||||
@ -482,9 +335,6 @@ describe('<WorkflowApprovalDetail />', () => {
|
||||
});
|
||||
|
||||
test('should not load Labels', async () => {
|
||||
WorkflowJobTemplatesAPI.readDetail.mockResolvedValue({
|
||||
data: workflowJobTemplate,
|
||||
});
|
||||
WorkflowJobsAPI.readDetail.mockResolvedValue({
|
||||
data: {
|
||||
...workflowApproval,
|
||||
@ -621,4 +471,16 @@ describe('<WorkflowApprovalDetail />', () => {
|
||||
(el) => el.length === 0
|
||||
);
|
||||
});
|
||||
|
||||
test('should fetch its workflow job details', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<WorkflowApprovalDetail workflowApproval={workflowApproval} />
|
||||
);
|
||||
});
|
||||
waitForElement(wrapper, 'WorkflowApprovalDetail', (el) => el.length > 0);
|
||||
expect(WorkflowJobsAPI.readDetail).toHaveBeenCalledTimes(1);
|
||||
expect(WorkflowJobsAPI.readDetail).toHaveBeenCalledWith(216);
|
||||
});
|
||||
});
|
||||
|
||||
@ -10,7 +10,7 @@ from awx.main.models import WorkflowJob
|
||||
@pytest.mark.django_db
|
||||
def test_bulk_job_launch(run_module, admin_user, job_template):
|
||||
jobs = [dict(unified_job_template=job_template.id)]
|
||||
run_module(
|
||||
result = run_module(
|
||||
'bulk_job_launch',
|
||||
{
|
||||
'name': "foo-bulk-job",
|
||||
@ -21,6 +21,8 @@ def test_bulk_job_launch(run_module, admin_user, job_template):
|
||||
},
|
||||
admin_user,
|
||||
)
|
||||
assert not result.get('failed', False), result.get('msg', result)
|
||||
assert result.get('changed'), result
|
||||
|
||||
bulk_job = WorkflowJob.objects.get(name="foo-bulk-job")
|
||||
assert bulk_job.extra_vars == '{"animal": "owl"}'
|
||||
@ -30,7 +32,7 @@ def test_bulk_job_launch(run_module, admin_user, job_template):
|
||||
@pytest.mark.django_db
|
||||
def test_bulk_host_create(run_module, admin_user, inventory):
|
||||
hosts = [dict(name="127.0.0.1"), dict(name="foo.dns.org")]
|
||||
run_module(
|
||||
result = run_module(
|
||||
'bulk_host_create',
|
||||
{
|
||||
'inventory': inventory.name,
|
||||
@ -38,6 +40,8 @@ def test_bulk_host_create(run_module, admin_user, inventory):
|
||||
},
|
||||
admin_user,
|
||||
)
|
||||
assert not result.get('failed', False), result.get('msg', result)
|
||||
assert result.get('changed'), result
|
||||
resp_hosts = inventory.hosts.all().values_list('name', flat=True)
|
||||
for h in hosts:
|
||||
assert h['name'] in resp_hosts
|
||||
|
||||
@ -46,27 +46,24 @@
|
||||
that:
|
||||
- "command is changed"
|
||||
|
||||
- name: Timeout waiting for the command to cancel
|
||||
- name: Cancel the command
|
||||
ad_hoc_command_cancel:
|
||||
command_id: "{{ command.id }}"
|
||||
timeout: -1
|
||||
register: results
|
||||
ignore_errors: true
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- results is failed
|
||||
- "results['msg'] == 'Monitoring of ad hoc command aborted due to timeout'"
|
||||
- results is changed
|
||||
|
||||
- block:
|
||||
- name: "Wait for up to a minute until the job enters the can_cancel: False state"
|
||||
debug:
|
||||
msg: "The job can_cancel status has transitioned into False, we can proceed with testing"
|
||||
until: not job_status
|
||||
retries: 6
|
||||
delay: 10
|
||||
vars:
|
||||
job_status: "{{ lookup('awx.awx.controller_api', 'ad_hoc_commands/'+ command.id | string +'/cancel')['can_cancel'] }}"
|
||||
- name: "Wait for up to a minute until the job enters the can_cancel: False state"
|
||||
debug:
|
||||
msg: "The job can_cancel status has transitioned into False, we can proceed with testing"
|
||||
until: not job_status
|
||||
retries: 6
|
||||
delay: 10
|
||||
vars:
|
||||
job_status: "{{ lookup('awx.awx.controller_api', 'ad_hoc_commands/'+ command.id | string +'/cancel')['can_cancel'] }}"
|
||||
|
||||
- name: Cancel the command with hard error if it's not running
|
||||
ad_hoc_command_cancel:
|
||||
|
||||
@ -108,9 +108,8 @@
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- wait_results is failed
|
||||
- 'wait_results.status == "canceled"'
|
||||
- "wait_results.msg == 'The ad hoc command - {{ command.id }}, failed'"
|
||||
- wait_results is successful
|
||||
- 'wait_results.status == "successful"'
|
||||
|
||||
- name: Delete the Credential
|
||||
credential:
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
- name: Generate a test id
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
|
||||
@ -1,7 +1,12 @@
|
||||
---
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
cred_type_name: "AWX-Collection-tests-credential_type-cred-type-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
cred_type_name: "AWX-Collection-tests-credential_type-cred-type-{{ test_id }}"
|
||||
|
||||
- block:
|
||||
- name: Add Tower credential type
|
||||
|
||||
@ -1,13 +1,18 @@
|
||||
---
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
group_name1: "AWX-Collection-tests-group-group-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
group_name2: "AWX-Collection-tests-group-group-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
group_name3: "AWX-Collection-tests-group-group-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
inv_name: "AWX-Collection-tests-group-inv-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
host_name1: "AWX-Collection-tests-group-host-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
host_name2: "AWX-Collection-tests-group-host-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
host_name3: "AWX-Collection-tests-group-host-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
group_name1: "AWX-Collection-tests-group-group1-{{ test_id }}"
|
||||
group_name2: "AWX-Collection-tests-group-group2-{{ test_id }}"
|
||||
group_name3: "AWX-Collection-tests-group-group3-{{ test_id }}"
|
||||
inv_name: "AWX-Collection-tests-group-inv-{{ test_id }}"
|
||||
host_name1: "AWX-Collection-tests-group-host1-{{ test_id }}"
|
||||
host_name2: "AWX-Collection-tests-group-host2-{{ test_id }}"
|
||||
host_name3: "AWX-Collection-tests-group-host3-{{ test_id }}"
|
||||
|
||||
- name: Create an Inventory
|
||||
inventory:
|
||||
@ -117,9 +122,10 @@
|
||||
state: absent
|
||||
register: result
|
||||
|
||||
# In this case, group 2 was last a child of group1 so deleting group1 deleted group2
|
||||
- assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
- "result is not changed"
|
||||
|
||||
- name: Delete a Group
|
||||
group:
|
||||
|
||||
@ -1,8 +1,13 @@
|
||||
---
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
host_name: "AWX-Collection-tests-host-host-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
inv_name: "AWX-Collection-tests-host-inv-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
host_name: "AWX-Collection-tests-host-host-{{ test_id }}"
|
||||
inv_name: "AWX-Collection-tests-host-inv-{{ test_id }}"
|
||||
|
||||
- name: Create an Inventory
|
||||
inventory:
|
||||
|
||||
@ -1,14 +1,25 @@
|
||||
---
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate hostnames
|
||||
set_fact:
|
||||
hostname1: "AWX-Collection-tests-instance1.{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}.example.com"
|
||||
hostname2: "AWX-Collection-tests-instance2.{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}.example.com"
|
||||
hostname3: "AWX-Collection-tests-instance3.{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}.example.com"
|
||||
hostname1: "AWX-Collection-tests-instance1.{{ test_id }}.example.com"
|
||||
hostname2: "AWX-Collection-tests-instance2.{{ test_id }}.example.com"
|
||||
hostname3: "AWX-Collection-tests-instance3.{{ test_id }}.example.com"
|
||||
register: facts
|
||||
|
||||
- name: Show hostnames
|
||||
debug:
|
||||
var: facts
|
||||
- name: Get the k8s setting
|
||||
set_fact:
|
||||
IS_K8S: "{{ controller_settings['IS_K8S'] | default(False) }}"
|
||||
vars:
|
||||
controller_settings: "{{ lookup('awx.awx.controller_api', 'settings/all') }}"
|
||||
|
||||
- debug:
|
||||
msg: "Skipping instance test since this is instance is not running on a K8s platform"
|
||||
when: not IS_K8S
|
||||
|
||||
- block:
|
||||
- name: Create an instance
|
||||
@ -57,3 +68,5 @@
|
||||
- "{{ hostname1 }}"
|
||||
- "{{ hostname2 }}"
|
||||
- "{{ hostname3 }}"
|
||||
|
||||
when: IS_K8S
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
- name: Generate test id
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
|
||||
@ -1,9 +1,14 @@
|
||||
---
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
openstack_cred: "AWX-Collection-tests-inventory_source-cred-openstack-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
openstack_inv: "AWX-Collection-tests-inventory_source-inv-openstack-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
openstack_inv_source: "AWX-Collection-tests-inventory_source-inv-source-openstack-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
openstack_cred: "AWX-Collection-tests-inventory_source-cred-openstack-{{ test_id }}"
|
||||
openstack_inv: "AWX-Collection-tests-inventory_source-inv-openstack-{{ test_id }}"
|
||||
openstack_inv_source: "AWX-Collection-tests-inventory_source-inv-source-openstack-{{ test_id }}"
|
||||
|
||||
- name: Add a credential
|
||||
credential:
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
|
||||
@ -1,9 +1,14 @@
|
||||
---
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
jt_name1: "AWX-Collection-tests-job_launch-jt1-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
jt_name2: "AWX-Collection-tests-job_launch-jt2-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
proj_name: "AWX-Collection-tests-job_launch-project-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
jt_name1: "AWX-Collection-tests-job_launch-jt1-{{ test_id }}"
|
||||
jt_name2: "AWX-Collection-tests-job_launch-jt2-{{ test_id }}"
|
||||
proj_name: "AWX-Collection-tests-job_launch-project-{{ test_id }}"
|
||||
|
||||
- name: Launch a Job Template
|
||||
job_launch:
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
- name: Generate a random string for test
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: generate random string for project
|
||||
set_fact:
|
||||
|
||||
@ -1,8 +1,13 @@
|
||||
---
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate random string for template and project
|
||||
set_fact:
|
||||
jt_name: "AWX-Collection-tests-job_wait-long_running-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
proj_name: "AWX-Collection-tests-job_wait-long_running-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
jt_name: "AWX-Collection-tests-job_wait-long_running-{{ test_id }}"
|
||||
proj_name: "AWX-Collection-tests-job_wait-long_running-{{ test_id }}"
|
||||
|
||||
- name: Assure that the demo project exists
|
||||
project:
|
||||
|
||||
@ -1,7 +1,12 @@
|
||||
---
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
label_name: "AWX-Collection-tests-label-label-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
label_name: "AWX-Collection-tests-label-label-{{ test_id }}"
|
||||
|
||||
- name: Create a Label
|
||||
label:
|
||||
|
||||
@ -101,6 +101,9 @@
|
||||
set_fact:
|
||||
users: "{{ query(plugin_name, 'users', query_params={ 'username__endswith': test_id, 'page_size': 2 }, return_ids=True ) }}"
|
||||
|
||||
- debug:
|
||||
msg: "{{ users }}"
|
||||
|
||||
- name: Assert that user list has 2 ids only and that they are strings, not ints
|
||||
assert:
|
||||
that:
|
||||
|
||||
@ -1,12 +1,17 @@
|
||||
---
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
slack_not: "AWX-Collection-tests-notification_template-slack-not-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
webhook_not: "AWX-Collection-tests-notification_template-wehbook-not-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
email_not: "AWX-Collection-tests-notification_template-email-not-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
twillo_not: "AWX-Collection-tests-notification_template-twillo-not-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
pd_not: "AWX-Collection-tests-notification_template-pd-not-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
irc_not: "AWX-Collection-tests-notification_template-irc-not-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
slack_not: "AWX-Collection-tests-notification_template-slack-not-{{ test_id }}"
|
||||
webhook_not: "AWX-Collection-tests-notification_template-wehbook-not-{{ test_id }}"
|
||||
email_not: "AWX-Collection-tests-notification_template-email-not-{{ test_id }}"
|
||||
twillo_not: "AWX-Collection-tests-notification_template-twillo-not-{{ test_id }}"
|
||||
pd_not: "AWX-Collection-tests-notification_template-pd-not-{{ test_id }}"
|
||||
irc_not: "AWX-Collection-tests-notification_template-irc-not-{{ test_id }}"
|
||||
|
||||
- name: Create Slack notification with custom messages
|
||||
notification_template:
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate an org name
|
||||
set_fact:
|
||||
|
||||
@ -1,13 +1,18 @@
|
||||
---
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
project_name1: "AWX-Collection-tests-project-project1-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
project_name2: "AWX-Collection-tests-project-project2-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
project_name3: "AWX-Collection-tests-project-project3-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
jt1: "AWX-Collection-tests-project-jt1-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
scm_cred_name: "AWX-Collection-tests-project-scm-cred-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
org_name: "AWX-Collection-tests-project-org-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
cred_name: "AWX-Collection-tests-project-cred-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
project_name1: "AWX-Collection-tests-project-project1-{{ test_id }}"
|
||||
project_name2: "AWX-Collection-tests-project-project2-{{ test_id }}"
|
||||
project_name3: "AWX-Collection-tests-project-project3-{{ test_id }}"
|
||||
jt1: "AWX-Collection-tests-project-jt1-{{ test_id }}"
|
||||
scm_cred_name: "AWX-Collection-tests-project-scm-cred-{{ test_id }}"
|
||||
org_name: "AWX-Collection-tests-project-org-{{ test_id }}"
|
||||
cred_name: "AWX-Collection-tests-project-cred-{{ test_id }}"
|
||||
|
||||
- block:
|
||||
- name: Create an SCM Credential
|
||||
|
||||
@ -1,56 +0,0 @@
|
||||
---
|
||||
- name: Load the UI settings
|
||||
set_fact:
|
||||
project_base_dir: "{{ controller_settings.project_base_dir }}"
|
||||
vars:
|
||||
controller_settings: "{{ lookup('awx.awx.controller_api', 'config/') }}"
|
||||
|
||||
- inventory:
|
||||
name: localhost
|
||||
organization: Default
|
||||
|
||||
- host:
|
||||
name: localhost
|
||||
inventory: localhost
|
||||
variables:
|
||||
ansible_connection: local
|
||||
|
||||
- name: Create an unused SSH / Machine credential
|
||||
credential:
|
||||
name: dummy
|
||||
credential_type: Machine
|
||||
inputs:
|
||||
ssh_key_data: |
|
||||
-----BEGIN EC PRIVATE KEY-----
|
||||
MHcCAQEEIIUl6R1xgzR6siIUArz4XBPtGZ09aetma2eWf1v3uYymoAoGCCqGSM49
|
||||
AwEHoUQDQgAENJNjgeZDAh/+BY860s0yqrLDprXJflY0GvHIr7lX3ieCtrzOMCVU
|
||||
QWzw35pc5tvuP34SSi0ZE1E+7cVMDDOF3w==
|
||||
-----END EC PRIVATE KEY-----
|
||||
organization: Default
|
||||
|
||||
- block:
|
||||
- name: Add a path to a setting
|
||||
settings:
|
||||
name: AWX_ISOLATION_SHOW_PATHS
|
||||
value: "[{{ project_base_dir }}]"
|
||||
|
||||
- name: Create a directory for manual project
|
||||
ad_hoc_command:
|
||||
credential: dummy
|
||||
inventory: localhost
|
||||
job_type: run
|
||||
module_args: "mkdir -p {{ project_base_dir }}/{{ project_dir_name }}"
|
||||
module_name: command
|
||||
wait: true
|
||||
|
||||
always:
|
||||
- name: Delete path from setting
|
||||
settings:
|
||||
name: AWX_ISOLATION_SHOW_PATHS
|
||||
value: []
|
||||
|
||||
- name: Delete dummy credential
|
||||
credential:
|
||||
name: dummy
|
||||
credential_type: Machine
|
||||
state: absent
|
||||
@ -1,38 +0,0 @@
|
||||
---
|
||||
- name: Generate random string for project
|
||||
set_fact:
|
||||
rand_string: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
|
||||
- name: Generate manual project name
|
||||
set_fact:
|
||||
project_name: "Manual_Project_{{ rand_string }}"
|
||||
|
||||
- name: Generate manual project dir name
|
||||
set_fact:
|
||||
project_dir_name: "proj_{{ rand_string }}"
|
||||
|
||||
- name: Create a project directory for manual project
|
||||
import_tasks: create_project_dir.yml
|
||||
|
||||
- name: Create a manual project
|
||||
project:
|
||||
name: "{{ project_name }}"
|
||||
organization: Default
|
||||
scm_type: manual
|
||||
local_path: "{{ project_dir_name }}"
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
|
||||
- name: Delete a manual project
|
||||
project:
|
||||
name: "{{ project_name }}"
|
||||
organization: Default
|
||||
state: absent
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
@ -2,6 +2,7 @@
|
||||
- name: Generate a test id
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
- name: Generate a random string for test
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: generate random string for schedule
|
||||
set_fact:
|
||||
|
||||
@ -1,7 +1,12 @@
|
||||
---
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
team_name: "AWX-Collection-tests-team-team-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
team_name: "AWX-Collection-tests-team-team-{{ test_id }}"
|
||||
|
||||
- name: Attempt to add a team to a non-existant Organization
|
||||
team:
|
||||
|
||||
@ -1,7 +1,12 @@
|
||||
---
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
token_description: "AWX-Collection-tests-token-description-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
token_description: "AWX-Collection-tests-token-description-{{ test_id }}"
|
||||
|
||||
- name: Try to use a token as a dict which is missing the token parameter
|
||||
job_list:
|
||||
|
||||
@ -1,7 +1,12 @@
|
||||
---
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate names
|
||||
set_fact:
|
||||
username: "AWX-Collection-tests-user-user-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
username: "AWX-Collection-tests-user-user-{{ test_id }}"
|
||||
|
||||
- name: Create a User
|
||||
user:
|
||||
@ -131,10 +136,6 @@
|
||||
'Can not verify ssl with non-https protocol' in result.exception"
|
||||
|
||||
- block:
|
||||
- name: Generate a test ID
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
|
||||
- name: Generate an org name
|
||||
set_fact:
|
||||
org_name: "AWX-Collection-tests-organization-org-{{ test_id }}"
|
||||
|
||||
@ -2,13 +2,15 @@
|
||||
- name: Generate a random string for names
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
test_prefix: AWX-Collection-tests-workflow_approval
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate random names for test objects
|
||||
set_fact:
|
||||
org_name: "{{ test_prefix }}-org-{{ test_id }}"
|
||||
approval_node_name: "{{ test_prefix }}-node-{{ test_id }}"
|
||||
wfjt_name: "{{ test_prefix }}-wfjt-{{ test_id }}"
|
||||
vars:
|
||||
test_prefix: AWX-Collection-tests-workflow_approval
|
||||
|
||||
- block:
|
||||
- name: Create a new organization for test isolation
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
- name: Generate a random string for names
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: test_id is not defined
|
||||
|
||||
- name: Generate random names for test objects
|
||||
set_fact:
|
||||
@ -17,8 +18,8 @@
|
||||
webhook_wfjt_name: "AWX-Collection-tests-workflow_job_template-webhook-wfjt-{{ test_id }}"
|
||||
email_not: "AWX-Collection-tests-job_template-email-not-{{ test_id }}"
|
||||
webhook_notification: "AWX-Collection-tests-notification_template-wehbook-not-{{ test_id }}"
|
||||
project_inv: "AWX-Collection-tests-inventory_source-inv-project-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
project_inv_source: "AWX-Collection-tests-inventory_source-inv-source-project-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
project_inv: "AWX-Collection-tests-inventory_source-inv-project-{{ test_id }}"
|
||||
project_inv_source: "AWX-Collection-tests-inventory_source-inv-source-project-{{ test_id }}"
|
||||
github_webhook_credential_name: "AWX-Collection-tests-credential-webhook-{{ test_id }}_github"
|
||||
ee1: "AWX-Collection-tests-workflow_job_template-ee1-{{ test_id }}"
|
||||
label1: "AWX-Collection-tests-workflow_job_template-l1-{{ test_id }}"
|
||||
|
||||
@ -12,8 +12,13 @@ To encrypt secret fields, AWX uses AES in CBC mode with a 256-bit key for encryp
|
||||
If necessary, credentials and encrypted settings can be extracted using the AWX shell:
|
||||
|
||||
```python
|
||||
# awx-manage shell_plus
|
||||
$ awx-manage shell_plus
|
||||
>>> from awx.main.utils import decrypt_field
|
||||
>>> cred = Credential.objects.get(name="my private key")
|
||||
>>> print(decrypt_field(cred, "ssh_key_data"))
|
||||
>>> print(decrypt_field(Credential.objects.get(name="my private key"), "ssh_key_data")) # Example for a credential
|
||||
>>> print(decrypt_field(Setting.objects.get(key='SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET'), 'value')) # Example for a setting
|
||||
```
|
||||
|
||||
If you are running a kubernetes based deployment, you can execute awx-manage like this:
|
||||
```bash
|
||||
$ kubectl exec --stdin --tty [instance name]-task-[...] -c [instance name]-task -- awx-manage shell_plus
|
||||
```
|
||||
@ -1,5 +1,5 @@
|
||||
[pytest]
|
||||
DJANGO_SETTINGS_MODULE = awx.settings.development
|
||||
DJANGO_SETTINGS_MODULE = awx.main.tests.settings_for_test
|
||||
python_paths = /var/lib/awx/venv/tower/lib/python3.8/site-packages
|
||||
site_dirs = /var/lib/awx/venv/tower/lib/python3.8/site-packages
|
||||
python_files = *.py
|
||||
|
||||
@ -4,8 +4,7 @@
|
||||
gather_facts: true
|
||||
tasks:
|
||||
- name: Get version from SCM if not explicitly provided
|
||||
shell: |
|
||||
make print-VERSION | cut -d + -f -1
|
||||
command: make version-for-buildyml
|
||||
args:
|
||||
chdir: '../../'
|
||||
register: scm_version
|
||||
|
||||
@ -232,7 +232,7 @@ ADD {{ template_dest }}/supervisor_rsyslog.conf /etc/supervisord_rsyslog.conf
|
||||
{% endif %}
|
||||
|
||||
{% if (build_dev|bool) or (kube_dev|bool) %}
|
||||
ADD tools/docker-compose/awx.egg-link /tmp/awx.egg-link
|
||||
RUN echo /awx_devel > /var/lib/awx/venv/awx/lib/python3.9/site-packages/awx.egg-link
|
||||
ADD tools/docker-compose/awx-manage /usr/local/bin/awx-manage
|
||||
ADD tools/scripts/awx-python /usr/bin/awx-python
|
||||
{% endif %}
|
||||
@ -285,8 +285,7 @@ RUN for dir in \
|
||||
/var/lib/shared/overlay-layers/layers.lock \
|
||||
/var/lib/shared/vfs-images/images.lock \
|
||||
/var/lib/shared/vfs-layers/layers.lock \
|
||||
/var/run/nginx.pid \
|
||||
/var/lib/awx/venv/awx/lib/python3.9/site-packages/awx.egg-link ; \
|
||||
/var/run/nginx.pid; \
|
||||
do touch $file ; chmod g+rw $file ; done && \
|
||||
echo "\setenv PAGER 'less -SXF'" > /var/lib/awx/.psqlrc
|
||||
{% endif %}
|
||||
|
||||
@ -1,8 +1,7 @@
|
||||
# Copyright (c) 2015 Ansible, Inc. (formerly AnsibleWorks, Inc.)
|
||||
# All Rights Reserved.
|
||||
|
||||
# Local Django settings for AWX project. Rename to "local_settings.py" and
|
||||
# edit as needed for your development environment.
|
||||
# Local Django settings for AWX project.
|
||||
|
||||
# All variables defined in awx/settings/development.py will already be loaded
|
||||
# into the global namespace before this file is loaded, to allow for reading
|
||||
|
||||
@ -1 +0,0 @@
|
||||
/awx_devel
|
||||
@ -5,8 +5,6 @@ cd /awx_devel
|
||||
make clean
|
||||
make awx-link
|
||||
|
||||
cp tools/docker-compose/ansible/roles/sources/files/local_settings.py awx/settings/local_settings.py
|
||||
|
||||
if [[ ! $@ ]]; then
|
||||
make test
|
||||
else
|
||||
|
||||
@ -4,7 +4,7 @@ minfds = 4096
|
||||
nodaemon=true
|
||||
|
||||
[program:awx-dispatcher]
|
||||
command = make dispatcher
|
||||
command = awx-manage run_dispatcher
|
||||
autorestart = true
|
||||
stopasgroup=true
|
||||
killasgroup=true
|
||||
@ -12,7 +12,7 @@ stdout_events_enabled = true
|
||||
stderr_events_enabled = true
|
||||
|
||||
[program:awx-receiver]
|
||||
command = make receiver
|
||||
command = awx-manage run_callback_receiver
|
||||
autorestart = true
|
||||
stopasgroup=true
|
||||
killasgroup=true
|
||||
@ -20,7 +20,7 @@ stdout_events_enabled = true
|
||||
stderr_events_enabled = true
|
||||
|
||||
[program:awx-wsrelay]
|
||||
command = make run-wsrelay
|
||||
command = awx-manage run_wsrelay
|
||||
autorestart = true
|
||||
autorestart = true
|
||||
stopasgroup=true
|
||||
@ -29,7 +29,7 @@ stdout_events_enabled = true
|
||||
stderr_events_enabled = true
|
||||
|
||||
[program:awx-heartbeet]
|
||||
command = make run-heartbeet
|
||||
command = awx-manage run_heartbeet
|
||||
autorestart = true
|
||||
autorestart = true
|
||||
stopasgroup=true
|
||||
@ -38,7 +38,7 @@ stdout_events_enabled = true
|
||||
stderr_events_enabled = true
|
||||
|
||||
[program:awx-rsyslog-configurer]
|
||||
command = make run-rsyslog-configurer
|
||||
command = awx-manage run_rsyslog_configurer
|
||||
autorestart = true
|
||||
stopasgroup=true
|
||||
killasgroup=true
|
||||
@ -48,7 +48,7 @@ stderr_logfile=/dev/stderr
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[program:awx-cache-clear]
|
||||
command = make run-cache-clear
|
||||
command = awx-manage run_cache_clear
|
||||
autorestart = true
|
||||
stopasgroup=true
|
||||
killasgroup=true
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user