mirror of
https://github.com/ansible/awx.git
synced 2026-01-23 23:41:23 -03:30
Merge branch 'devel' into inventories-i18n
This commit is contained in:
commit
929ab0d5ce
52
Makefile
52
Makefile
@ -1,4 +1,4 @@
|
||||
PYTHON = python
|
||||
PYTHON ?= python
|
||||
PYTHON_VERSION = $(shell $(PYTHON) -c "from distutils.sysconfig import get_python_version; print(get_python_version())")
|
||||
SITELIB=$(shell $(PYTHON) -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")
|
||||
OFFICIAL ?= no
|
||||
@ -8,7 +8,6 @@ NODE ?= node
|
||||
NPM_BIN ?= npm
|
||||
DEPS_SCRIPT ?= packaging/bundle/deps.py
|
||||
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
DOCKER_HOST_IP=`python -c "import socket; print(socket.gethostbyname(socket.gethostname()))"`
|
||||
|
||||
GCLOUD_AUTH ?= $(shell gcloud auth print-access-token)
|
||||
# NOTE: This defaults the container image version to the branch that's active
|
||||
@ -73,8 +72,10 @@ else
|
||||
SETUP_TAR_NAME=$(NAME)-setup-$(VERSION)-$(RELEASE)
|
||||
SDIST_TAR_NAME=$(NAME)-$(VERSION)-$(RELEASE)
|
||||
endif
|
||||
|
||||
SDIST_COMMAND ?= sdist
|
||||
SDIST_TAR_FILE=$(SDIST_TAR_NAME).tar.gz
|
||||
SDIST_TAR_FILE ?= $(SDIST_TAR_NAME).tar.gz
|
||||
|
||||
SETUP_TAR_FILE=$(SETUP_TAR_NAME).tar.gz
|
||||
SETUP_TAR_LINK=$(NAME)-setup-latest.tar.gz
|
||||
SETUP_TAR_CHECKSUM=$(NAME)-setup-CHECKSUM
|
||||
@ -684,6 +685,9 @@ release_clean:
|
||||
dist/$(SDIST_TAR_FILE): ui-release
|
||||
BUILD="$(BUILD)" $(PYTHON) setup.py $(SDIST_COMMAND)
|
||||
|
||||
dist/ansible-tower.tar.gz: ui-release
|
||||
OFFICIAL="yes" $(PYTHON) setup.py sdist
|
||||
|
||||
sdist: dist/$(SDIST_TAR_FILE)
|
||||
@echo "#############################################"
|
||||
@echo "Artifacts:"
|
||||
@ -953,13 +957,13 @@ docker-isolated:
|
||||
|
||||
# Docker Compose Development environment
|
||||
docker-compose: docker-auth
|
||||
DOCKER_HOST_IP=$(DOCKER_HOST_IP) TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose.yml up --no-recreate tower
|
||||
TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose.yml up --no-recreate tower
|
||||
|
||||
docker-compose-cluster: docker-auth
|
||||
DOCKER_HOST_IP=$(DOCKER_HOST_IP) TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose-cluster.yml up
|
||||
TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose-cluster.yml up
|
||||
|
||||
docker-compose-test: docker-auth
|
||||
cd tools && DOCKER_HOST_IP=$(DOCKER_HOST_IP) TAG=$(COMPOSE_TAG) docker-compose run --rm --service-ports tower /bin/bash
|
||||
cd tools && TAG=$(COMPOSE_TAG) docker-compose run --rm --service-ports tower /bin/bash
|
||||
|
||||
docker-compose-build: tower-devel-build tower-isolated-build
|
||||
|
||||
@ -983,10 +987,10 @@ docker-refresh: docker-clean docker-compose
|
||||
|
||||
# Docker Development Environment with Elastic Stack Connected
|
||||
docker-compose-elk: docker-auth
|
||||
DOCKER_HOST_IP=$(DOCKER_HOST_IP) TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
docker-compose-cluster-elk: docker-auth
|
||||
DOCKER_HOST_IP=$(DOCKER_HOST_IP) TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose-cluster.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose-cluster.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
clean-elk:
|
||||
docker stop tools_kibana_1
|
||||
@ -998,3 +1002,35 @@ clean-elk:
|
||||
|
||||
psql-container:
|
||||
docker run -it --net tools_default --rm postgres:9.4.1 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
|
||||
# Openshift placeholders, these are good for bootstrapping a totally fresh Openshift Node but not for re-running
|
||||
# So you may want to pick and choose the functionality in these targets based on what you are doing
|
||||
openshift-production-build: dist/ansible-tower.tar.gz
|
||||
docker build -t ansible/tower_web -f installer/openshift/Dockerfile .
|
||||
docker build -t ansible/tower_task -f installer/openshift/Dockerfile.celery .
|
||||
|
||||
openshift-production-tag: openshift-production-build
|
||||
docker tag ansible/tower_web:latest 172.30.1.1:5000/tower/tower_web:latest
|
||||
docker tag ansible/tower_task:latest 172.30.1.1:5000/tower/tower_task:latest
|
||||
|
||||
openshift-image-push: openshift-production-tag
|
||||
oc login -u developer && \
|
||||
docker login -u developer -p $(shell oc whoami -t) 172.30.1.1:5000 && \
|
||||
docker push 172.30.1.1:5000/tower/tower_web:latest && \
|
||||
docker push 172.30.1.1:5000/tower/tower_task:latest
|
||||
|
||||
openshift-preconfig:
|
||||
oc login -u developer || true && \
|
||||
oc new-project tower || true && \
|
||||
oc adm policy add-role-to-user admin developer -n tower
|
||||
|
||||
openshift-deploy: openshift-preconfig openshift-image-push
|
||||
oc login -u developer && \
|
||||
oc new-app --template=postgresql-persistent -e MEMORY_LIMIT=512Mi -e NAMESPACE=openshift -e DATABASE_SERVICE_NAME=postgresql -e POSTGRESQL_USER=tower -e POSTGRESQL_PASSWORD=password123 -e POSTGRESQL_DATABASE=tower -e VOLUME_CAPACITY=1Gi -e POSTGRESQL_VERSION=9.5 -n tower && \
|
||||
echo "Waiting for PG to come online" && \
|
||||
sleep 15 && \
|
||||
oc apply -f installer/openshift/config/configmap.yml && \
|
||||
oc apply -f installer/openshift/config/deployment.yml
|
||||
|
||||
openshift-delete:
|
||||
oc delete -f installer/openshift/config/deployment.yml
|
||||
|
||||
@ -76,7 +76,8 @@ SUMMARIZABLE_FK_FIELDS = {
|
||||
'total_inventory_sources',
|
||||
'inventory_sources_with_failures',
|
||||
'organization_id',
|
||||
'kind'),
|
||||
'kind',
|
||||
'insights_credential_id',),
|
||||
'host': DEFAULT_SUMMARY_FIELDS + ('has_active_failures',
|
||||
'has_inventory_sources'),
|
||||
'group': DEFAULT_SUMMARY_FIELDS + ('has_active_failures',
|
||||
@ -298,7 +299,8 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
def get_related(self, obj):
|
||||
res = OrderedDict()
|
||||
view = self.context.get('view', None)
|
||||
if view and hasattr(view, 'retrieve') and type(obj) in settings.NAMED_URL_GRAPH:
|
||||
if view and (hasattr(view, 'retrieve') or view.request.method == 'POST') and \
|
||||
type(obj) in settings.NAMED_URL_GRAPH:
|
||||
original_url = self.get_url(obj)
|
||||
if not original_url.startswith('/api/v1'):
|
||||
res['named_url'] = self._generate_named_url(
|
||||
@ -1145,7 +1147,6 @@ class InventorySerializer(BaseSerializerWithVariables):
|
||||
update_inventory_sources = self.reverse('api:inventory_inventory_sources_update', kwargs={'pk': obj.pk}),
|
||||
activity_stream = self.reverse('api:inventory_activity_stream_list', kwargs={'pk': obj.pk}),
|
||||
job_templates = self.reverse('api:inventory_job_template_list', kwargs={'pk': obj.pk}),
|
||||
scan_job_templates = self.reverse('api:inventory_scan_job_template_list', kwargs={'pk': obj.pk}),
|
||||
ad_hoc_commands = self.reverse('api:inventory_ad_hoc_commands_list', kwargs={'pk': obj.pk}),
|
||||
access_list = self.reverse('api:inventory_access_list', kwargs={'pk': obj.pk}),
|
||||
object_roles = self.reverse('api:inventory_object_roles_list', kwargs={'pk': obj.pk}),
|
||||
@ -1652,7 +1653,7 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
|
||||
raise serializers.ValidationError({"detail": _("Inventory controlled by project-following SCM.")})
|
||||
elif source=='scm' and not overwrite_vars:
|
||||
raise serializers.ValidationError({"detail": _(
|
||||
"SCM type sources must set `overwrite_vars` to `true` until a future Tower release.")})
|
||||
"SCM type sources must set `overwrite_vars` to `true`.")})
|
||||
|
||||
return super(InventorySourceSerializer, self).validate(attrs)
|
||||
|
||||
@ -1894,7 +1895,7 @@ class CredentialTypeSerializer(BaseSerializer):
|
||||
def validate(self, attrs):
|
||||
if self.instance and self.instance.managed_by_tower:
|
||||
raise PermissionDenied(
|
||||
detail=_("Modifications not allowed for credential types managed by Tower")
|
||||
detail=_("Modifications not allowed for managed credential types")
|
||||
)
|
||||
if self.instance and self.instance.credentials.exists():
|
||||
if 'inputs' in attrs and attrs['inputs'] != self.instance.inputs:
|
||||
@ -1922,6 +1923,17 @@ class CredentialTypeSerializer(BaseSerializer):
|
||||
)
|
||||
return res
|
||||
|
||||
def to_representation(self, data):
|
||||
value = super(CredentialTypeSerializer, self).to_representation(data)
|
||||
|
||||
# translate labels and help_text for credential fields "managed by Tower"
|
||||
if value.get('managed_by_tower'):
|
||||
for field in value.get('inputs', {}).get('fields', []):
|
||||
field['label'] = _(field['label'])
|
||||
if 'help_text' in field:
|
||||
field['help_text'] = _(field['help_text'])
|
||||
return value
|
||||
|
||||
|
||||
# TODO: remove when API v1 is removed
|
||||
@six.add_metaclass(BaseSerializerMetaclass)
|
||||
@ -2334,8 +2346,7 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
|
||||
if 'project' in self.fields and 'playbook' in self.fields:
|
||||
project = attrs.get('project', self.instance and self.instance.project or None)
|
||||
playbook = attrs.get('playbook', self.instance and self.instance.playbook or '')
|
||||
job_type = attrs.get('job_type', self.instance and self.instance.job_type or None)
|
||||
if not project and job_type != PERM_INVENTORY_SCAN:
|
||||
if not project:
|
||||
raise serializers.ValidationError({'project': _('This field is required.')})
|
||||
if project and project.scm_type and playbook and force_text(playbook) not in project.playbook_files:
|
||||
raise serializers.ValidationError({'playbook': _('Playbook not found for project.')})
|
||||
@ -2406,26 +2417,18 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
def get_field_from_model_or_attrs(fd):
|
||||
return attrs.get(fd, self.instance and getattr(self.instance, fd) or None)
|
||||
|
||||
survey_enabled = get_field_from_model_or_attrs('survey_enabled')
|
||||
job_type = get_field_from_model_or_attrs('job_type')
|
||||
inventory = get_field_from_model_or_attrs('inventory')
|
||||
credential = get_field_from_model_or_attrs('credential')
|
||||
project = get_field_from_model_or_attrs('project')
|
||||
|
||||
prompting_error_message = _("Must either set a default value or ask to prompt on launch.")
|
||||
if job_type == "scan":
|
||||
if inventory is None or attrs.get('ask_inventory_on_launch', False):
|
||||
raise serializers.ValidationError({'inventory': _('Scan jobs must be assigned a fixed inventory.')})
|
||||
elif project is None:
|
||||
if project is None:
|
||||
raise serializers.ValidationError({'project': _("Job types 'run' and 'check' must have assigned a project.")})
|
||||
elif credential is None and not get_field_from_model_or_attrs('ask_credential_on_launch'):
|
||||
raise serializers.ValidationError({'credential': prompting_error_message})
|
||||
elif inventory is None and not get_field_from_model_or_attrs('ask_inventory_on_launch'):
|
||||
raise serializers.ValidationError({'inventory': prompting_error_message})
|
||||
|
||||
if survey_enabled and job_type == PERM_INVENTORY_SCAN:
|
||||
raise serializers.ValidationError({'survey_enabled': _('Survey Enabled cannot be used with scan jobs.')})
|
||||
|
||||
return super(JobTemplateSerializer, self).validate(attrs)
|
||||
|
||||
def validate_extra_vars(self, value):
|
||||
@ -2568,7 +2571,7 @@ class JobRelaunchSerializer(JobSerializer):
|
||||
obj = self.context.get('obj')
|
||||
if not obj.credential:
|
||||
raise serializers.ValidationError(dict(credential=[_("Credential not found or deleted.")]))
|
||||
if obj.job_type != PERM_INVENTORY_SCAN and obj.project is None:
|
||||
if obj.project is None:
|
||||
raise serializers.ValidationError(dict(errors=[_("Job Template Project is missing or undefined.")]))
|
||||
if obj.inventory is None:
|
||||
raise serializers.ValidationError(dict(errors=[_("Job Template Inventory is missing or undefined.")]))
|
||||
|
||||
@ -97,7 +97,6 @@ inventory_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/update_inventory_sources/$', 'inventory_inventory_sources_update'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'inventory_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/job_templates/$', 'inventory_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/scan_job_templates/$', 'inventory_scan_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'inventory_ad_hoc_commands_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'inventory_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'inventory_object_roles_list'),
|
||||
|
||||
@ -213,7 +213,7 @@ class ApiV2RootView(ApiVersionRootView):
|
||||
|
||||
|
||||
class ApiV1PingView(APIView):
|
||||
"""A simple view that reports very basic information about this Tower
|
||||
"""A simple view that reports very basic information about this
|
||||
instance, which is acceptable to be public information.
|
||||
"""
|
||||
permission_classes = (AllowAny,)
|
||||
@ -222,7 +222,7 @@ class ApiV1PingView(APIView):
|
||||
new_in_210 = True
|
||||
|
||||
def get(self, request, format=None):
|
||||
"""Return some basic information about this Tower instance.
|
||||
"""Return some basic information about this instance.
|
||||
|
||||
Everything returned here should be considered public / insecure, as
|
||||
this requires no auth and is intended for use by the installer process.
|
||||
@ -320,7 +320,7 @@ class ApiV1ConfigView(APIView):
|
||||
try:
|
||||
data_actual = json.dumps(request.data)
|
||||
except Exception:
|
||||
logger.info(smart_text(u"Invalid JSON submitted for Tower license."),
|
||||
logger.info(smart_text(u"Invalid JSON submitted for license."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
@ -328,7 +328,7 @@ class ApiV1ConfigView(APIView):
|
||||
license_data = json.loads(data_actual)
|
||||
license_data_validated = TaskEnhancer(**license_data).validate_enhancements()
|
||||
except Exception:
|
||||
logger.warning(smart_text(u"Invalid Tower license submitted."),
|
||||
logger.warning(smart_text(u"Invalid license submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@ -338,7 +338,7 @@ class ApiV1ConfigView(APIView):
|
||||
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
|
||||
return Response(license_data_validated)
|
||||
|
||||
logger.warning(smart_text(u"Invalid Tower license submitted."),
|
||||
logger.warning(smart_text(u"Invalid license submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid license")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@ -813,7 +813,7 @@ class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
def create(self, request, *args, **kwargs):
|
||||
"""Create a new organzation.
|
||||
|
||||
If there is already an organization and the license of the Tower
|
||||
If there is already an organization and the license of this
|
||||
instance does not permit multiple organizations, then raise
|
||||
LicenseForbids.
|
||||
"""
|
||||
@ -822,7 +822,7 @@ class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
# if no organizations exist in the system.
|
||||
if (not feature_enabled('multiple_organizations') and
|
||||
self.model.objects.exists()):
|
||||
raise LicenseForbids(_('Your Tower license only permits a single '
|
||||
raise LicenseForbids(_('Your license only permits a single '
|
||||
'organization to exist.'))
|
||||
|
||||
# Okay, create the organization as usual.
|
||||
@ -858,10 +858,8 @@ class OrganizationDetail(RetrieveUpdateDestroyAPIView):
|
||||
organization__id=org_id).count()
|
||||
org_counts['projects'] = Project.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).exclude(
|
||||
job_type='scan').filter(project__organization__id=org_id).count()
|
||||
org_counts['job_templates'] += JobTemplate.accessible_objects(**access_kwargs).filter(
|
||||
job_type='scan').filter(inventory__organization__id=org_id).count()
|
||||
org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).filter(
|
||||
project__organization__id=org_id).count()
|
||||
|
||||
full_context['related_field_counts'] = {}
|
||||
full_context['related_field_counts'][org_id] = org_counts
|
||||
@ -1591,7 +1589,7 @@ class CredentialTypeDetail(RetrieveUpdateDestroyAPIView):
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
if instance.managed_by_tower:
|
||||
raise PermissionDenied(detail=_("Deletion not allowed for credential types managed by Tower"))
|
||||
raise PermissionDenied(detail=_("Deletion not allowed for managed credential types"))
|
||||
if instance.credentials.exists():
|
||||
raise PermissionDenied(detail=_("Credential types that are in use cannot be deleted"))
|
||||
return super(CredentialTypeDetail, self).destroy(request, *args, **kwargs)
|
||||
@ -1907,21 +1905,6 @@ class InventoryJobTemplateList(SubListAPIView):
|
||||
return qs.filter(inventory=parent)
|
||||
|
||||
|
||||
class InventoryScanJobTemplateList(SubListAPIView):
|
||||
|
||||
model = JobTemplate
|
||||
serializer_class = JobTemplateSerializer
|
||||
parent_model = Inventory
|
||||
relationship = 'jobtemplates'
|
||||
new_in_220 = True
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
return qs.filter(job_type=PERM_INVENTORY_SCAN, inventory=parent)
|
||||
|
||||
|
||||
class HostList(ListCreateAPIView):
|
||||
|
||||
always_allow_superuser = False
|
||||
|
||||
@ -86,7 +86,7 @@ class SettingsRegistry(object):
|
||||
categories[category_slug] = kwargs.get('category', None) or category_slug
|
||||
return categories
|
||||
|
||||
def get_registered_settings(self, category_slug=None, read_only=None, features_enabled=None):
|
||||
def get_registered_settings(self, category_slug=None, read_only=None, features_enabled=None, slugs_to_ignore=set()):
|
||||
setting_names = []
|
||||
if category_slug == 'user-defaults':
|
||||
category_slug = 'user'
|
||||
@ -95,6 +95,8 @@ class SettingsRegistry(object):
|
||||
for setting, kwargs in self._registry.items():
|
||||
if category_slug not in {None, 'all', kwargs.get('category_slug', None)}:
|
||||
continue
|
||||
if kwargs.get('category_slug', None) in slugs_to_ignore:
|
||||
continue
|
||||
if read_only in {True, False} and kwargs.get('read_only', False) != read_only:
|
||||
# Note: Doesn't catch fields that set read_only via __init__;
|
||||
# read-only field kwargs should always include read_only=True.
|
||||
|
||||
@ -70,6 +70,10 @@ class SettingSingletonSerializer(serializers.Serializer):
|
||||
category_slug = self.context['view'].kwargs.get('category_slug', 'all')
|
||||
except (KeyError, AttributeError):
|
||||
category_slug = ''
|
||||
if self.context['view'].kwargs.get('category_slug', '') == 'all':
|
||||
for validate_func in settings_registry._validate_registry.values():
|
||||
attrs = validate_func(self, attrs)
|
||||
return attrs
|
||||
custom_validate = settings_registry.get_registered_validate_func(category_slug)
|
||||
return custom_validate(self, attrs) if custom_validate else attrs
|
||||
|
||||
|
||||
@ -19,7 +19,7 @@ from rest_framework import status
|
||||
# Tower
|
||||
from awx.api.generics import * # noqa
|
||||
from awx.api.permissions import IsSuperUser
|
||||
from awx.api.versioning import reverse
|
||||
from awx.api.versioning import reverse, get_request_version
|
||||
from awx.main.utils import * # noqa
|
||||
from awx.main.utils.handlers import BaseHTTPSHandler, LoggingConnectivityException
|
||||
from awx.main.tasks import handle_setting_changes
|
||||
@ -31,6 +31,13 @@ from awx.conf import settings_registry
|
||||
|
||||
SettingCategory = collections.namedtuple('SettingCategory', ('url', 'slug', 'name'))
|
||||
|
||||
VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE = {
|
||||
1: set([
|
||||
'named-url',
|
||||
]),
|
||||
2: set([]),
|
||||
}
|
||||
|
||||
|
||||
class SettingCategoryList(ListAPIView):
|
||||
|
||||
@ -50,6 +57,8 @@ class SettingCategoryList(ListAPIView):
|
||||
else:
|
||||
categories = {}
|
||||
for category_slug in sorted(categories.keys()):
|
||||
if category_slug in VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE[get_request_version(self.request)]:
|
||||
continue
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': category_slug}, request=self.request)
|
||||
setting_categories.append(SettingCategory(url, category_slug, categories[category_slug]))
|
||||
return setting_categories
|
||||
@ -66,6 +75,8 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
def get_queryset(self):
|
||||
self.category_slug = self.kwargs.get('category_slug', 'all')
|
||||
all_category_slugs = settings_registry.get_registered_categories(features_enabled=get_licensed_features()).keys()
|
||||
for slug_to_delete in VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE[get_request_version(self.request)]:
|
||||
all_category_slugs.remove(slug_to_delete)
|
||||
if self.request.user.is_superuser or getattr(self.request.user, 'is_system_auditor', False):
|
||||
category_slugs = all_category_slugs
|
||||
else:
|
||||
@ -75,7 +86,10 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
if self.category_slug not in category_slugs:
|
||||
raise PermissionDenied()
|
||||
|
||||
registered_settings = settings_registry.get_registered_settings(category_slug=self.category_slug, read_only=False, features_enabled=get_licensed_features())
|
||||
registered_settings = settings_registry.get_registered_settings(
|
||||
category_slug=self.category_slug, read_only=False, features_enabled=get_licensed_features(),
|
||||
slugs_to_ignore=VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE[get_request_version(self.request)]
|
||||
)
|
||||
if self.category_slug == 'user':
|
||||
return Setting.objects.filter(key__in=registered_settings, user=self.request.user)
|
||||
else:
|
||||
@ -83,7 +97,10 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
def get_object(self):
|
||||
settings_qs = self.get_queryset()
|
||||
registered_settings = settings_registry.get_registered_settings(category_slug=self.category_slug, features_enabled=get_licensed_features())
|
||||
registered_settings = settings_registry.get_registered_settings(
|
||||
category_slug=self.category_slug, features_enabled=get_licensed_features(),
|
||||
slugs_to_ignore=VERSION_SPECIFIC_CATEGORIES_TO_EXCLUDE[get_request_version(self.request)]
|
||||
)
|
||||
all_settings = {}
|
||||
for setting in settings_qs:
|
||||
all_settings[setting.key] = setting.value
|
||||
@ -161,6 +178,12 @@ class SettingLoggingTest(GenericAPIView):
|
||||
obj = type('Settings', (object,), defaults)()
|
||||
serializer = self.get_serializer(obj, data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
if request.data.get('LOG_AGGREGATOR_PASSWORD', '').startswith('$encrypted$'):
|
||||
serializer.validated_data['LOG_AGGREGATOR_PASSWORD'] = getattr(
|
||||
settings, 'LOG_AGGREGATOR_PASSWORD', ''
|
||||
)
|
||||
|
||||
try:
|
||||
class MockSettings:
|
||||
pass
|
||||
|
||||
@ -31,7 +31,7 @@ __all__ = ['get_user_queryset', 'check_user_access', 'check_user_access_with_err
|
||||
logger = logging.getLogger('awx.main.access')
|
||||
|
||||
access_registry = {
|
||||
# <model_class>: [<access_class>, ...],
|
||||
# <model_class>: <access_class>,
|
||||
# ...
|
||||
}
|
||||
|
||||
@ -41,8 +41,7 @@ class StateConflict(ValidationError):
|
||||
|
||||
|
||||
def register_access(model_class, access_class):
|
||||
access_classes = access_registry.setdefault(model_class, [])
|
||||
access_classes.append(access_class)
|
||||
access_registry[model_class] = access_class
|
||||
|
||||
|
||||
@property
|
||||
@ -66,19 +65,9 @@ def get_user_queryset(user, model_class):
|
||||
Return a queryset for the given model_class containing only the instances
|
||||
that should be visible to the given user.
|
||||
'''
|
||||
querysets = []
|
||||
for access_class in access_registry.get(model_class, []):
|
||||
access_instance = access_class(user)
|
||||
querysets.append(access_instance.get_queryset())
|
||||
if not querysets:
|
||||
return model_class.objects.none()
|
||||
elif len(querysets) == 1:
|
||||
return querysets[0]
|
||||
else:
|
||||
queryset = model_class.objects.all()
|
||||
for qs in querysets:
|
||||
queryset = queryset.filter(pk__in=qs.values_list('pk', flat=True))
|
||||
return queryset
|
||||
access_class = access_registry[model_class]
|
||||
access_instance = access_class(user)
|
||||
return access_instance.get_queryset()
|
||||
|
||||
|
||||
def check_user_access(user, model_class, action, *args, **kwargs):
|
||||
@ -86,33 +75,26 @@ def check_user_access(user, model_class, action, *args, **kwargs):
|
||||
Return True if user can perform action against model_class with the
|
||||
provided parameters.
|
||||
'''
|
||||
for access_class in access_registry.get(model_class, []):
|
||||
access_instance = access_class(user)
|
||||
access_method = getattr(access_instance, 'can_%s' % action, None)
|
||||
if not access_method:
|
||||
logger.debug('%s.%s not found', access_instance.__class__.__name__,
|
||||
'can_%s' % action)
|
||||
continue
|
||||
result = access_method(*args, **kwargs)
|
||||
logger.debug('%s.%s %r returned %r', access_instance.__class__.__name__,
|
||||
getattr(access_method, '__name__', 'unknown'), args, result)
|
||||
if result:
|
||||
return result
|
||||
return False
|
||||
access_class = access_registry[model_class]
|
||||
access_instance = access_class(user)
|
||||
access_method = getattr(access_instance, 'can_%s' % action)
|
||||
result = access_method(*args, **kwargs)
|
||||
logger.debug('%s.%s %r returned %r', access_instance.__class__.__name__,
|
||||
getattr(access_method, '__name__', 'unknown'), args, result)
|
||||
return result
|
||||
|
||||
|
||||
def check_user_access_with_errors(user, model_class, action, *args, **kwargs):
|
||||
'''
|
||||
Return T/F permission and summary of problems with the action.
|
||||
'''
|
||||
for access_class in access_registry.get(model_class, []):
|
||||
access_instance = access_class(user, save_messages=True)
|
||||
access_method = getattr(access_instance, 'can_%s' % action, None)
|
||||
result = access_method(*args, **kwargs)
|
||||
logger.debug('%s.%s %r returned %r', access_instance.__class__.__name__,
|
||||
access_method.__name__, args, result)
|
||||
return (result, access_instance.messages)
|
||||
return (False, '')
|
||||
access_class = access_registry[model_class]
|
||||
access_instance = access_class(user, save_messages=True)
|
||||
access_method = getattr(access_instance, 'can_%s' % action, None)
|
||||
result = access_method(*args, **kwargs)
|
||||
logger.debug('%s.%s %r returned %r', access_instance.__class__.__name__,
|
||||
access_method.__name__, args, result)
|
||||
return (result, access_instance.messages)
|
||||
|
||||
|
||||
def get_user_capabilities(user, instance, **kwargs):
|
||||
@ -123,9 +105,8 @@ def get_user_capabilities(user, instance, **kwargs):
|
||||
convenient for the user interface to consume and hide or show various
|
||||
actions in the interface.
|
||||
'''
|
||||
for access_class in access_registry.get(type(instance), []):
|
||||
return access_class(user).get_user_capabilities(instance, **kwargs)
|
||||
return None
|
||||
access_class = access_registry[instance.__class__]
|
||||
return access_class(user).get_user_capabilities(instance, **kwargs)
|
||||
|
||||
|
||||
def check_superuser(func):
|
||||
@ -392,7 +373,10 @@ class InstanceAccess(BaseAccess):
|
||||
model = Instance
|
||||
|
||||
def get_queryset(self):
|
||||
return Instance.objects.filter(rampart_groups__in=self.user.get_queryset(InstanceGroup))
|
||||
if self.user.is_superuser or self.user.is_system_auditor:
|
||||
return Instance.objects.all().distinct()
|
||||
else:
|
||||
return Instance.objects.filter(rampart_groups__in=self.user.get_queryset(InstanceGroup)).distinct()
|
||||
|
||||
def can_add(self, data):
|
||||
return False
|
||||
@ -1157,9 +1141,6 @@ class JobTemplateAccess(BaseAccess):
|
||||
# if reference_obj is provided, determine if it can be copied
|
||||
reference_obj = data.get('reference_obj', None)
|
||||
|
||||
if 'job_type' in data and data['job_type'] == PERM_INVENTORY_SCAN:
|
||||
self.check_license(feature='system_tracking')
|
||||
|
||||
if 'survey_enabled' in data and data['survey_enabled']:
|
||||
self.check_license(feature='surveys')
|
||||
|
||||
@ -1191,11 +1172,6 @@ class JobTemplateAccess(BaseAccess):
|
||||
return False
|
||||
|
||||
project = get_value(Project, 'project')
|
||||
if 'job_type' in data and data['job_type'] == PERM_INVENTORY_SCAN:
|
||||
if not inventory:
|
||||
return False
|
||||
elif not project:
|
||||
return True
|
||||
# If the user has admin access to the project (as an org admin), should
|
||||
# be able to proceed without additional checks.
|
||||
if project:
|
||||
@ -1210,8 +1186,6 @@ class JobTemplateAccess(BaseAccess):
|
||||
# Check license.
|
||||
if validate_license:
|
||||
self.check_license()
|
||||
if obj.job_type == PERM_INVENTORY_SCAN:
|
||||
self.check_license(feature='system_tracking')
|
||||
if obj.survey_enabled:
|
||||
self.check_license(feature='surveys')
|
||||
if Instance.objects.active_count() > 1:
|
||||
@ -1221,12 +1195,6 @@ class JobTemplateAccess(BaseAccess):
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
|
||||
if obj.job_type == PERM_INVENTORY_SCAN:
|
||||
# Scan job with default project, must have JT execute or be org admin
|
||||
if obj.project is None and obj.inventory:
|
||||
return (self.user in obj.execute_role or
|
||||
self.user in obj.inventory.organization.admin_role)
|
||||
|
||||
return self.user in obj.execute_role
|
||||
|
||||
def can_change(self, obj, data):
|
||||
@ -1237,9 +1205,6 @@ class JobTemplateAccess(BaseAccess):
|
||||
data = dict(data)
|
||||
|
||||
if self.changes_are_non_sensitive(obj, data):
|
||||
if 'job_type' in data and obj.job_type != data['job_type'] and data['job_type'] == PERM_INVENTORY_SCAN:
|
||||
self.check_license(feature='system_tracking')
|
||||
|
||||
if 'survey_enabled' in data and obj.survey_enabled != data['survey_enabled'] and data['survey_enabled']:
|
||||
self.check_license(feature='surveys')
|
||||
return True
|
||||
@ -2008,7 +1973,7 @@ class UnifiedJobTemplateAccess(BaseAccess):
|
||||
return qs.all()
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
access_class = access_registry.get(obj.__class__, [])[0]
|
||||
access_class = access_registry[obj.__class__]
|
||||
access_instance = access_class(self.user)
|
||||
return access_instance.can_start(obj, validate_license=validate_license)
|
||||
|
||||
@ -2376,38 +2341,5 @@ class RoleAccess(BaseAccess):
|
||||
return False
|
||||
|
||||
|
||||
register_access(User, UserAccess)
|
||||
register_access(Organization, OrganizationAccess)
|
||||
register_access(Inventory, InventoryAccess)
|
||||
register_access(Host, HostAccess)
|
||||
register_access(Group, GroupAccess)
|
||||
register_access(InventorySource, InventorySourceAccess)
|
||||
register_access(InventoryUpdate, InventoryUpdateAccess)
|
||||
register_access(Credential, CredentialAccess)
|
||||
register_access(CredentialType, CredentialTypeAccess)
|
||||
register_access(Team, TeamAccess)
|
||||
register_access(Project, ProjectAccess)
|
||||
register_access(ProjectUpdate, ProjectUpdateAccess)
|
||||
register_access(JobTemplate, JobTemplateAccess)
|
||||
register_access(Job, JobAccess)
|
||||
register_access(JobHostSummary, JobHostSummaryAccess)
|
||||
register_access(JobEvent, JobEventAccess)
|
||||
register_access(SystemJobTemplate, SystemJobTemplateAccess)
|
||||
register_access(SystemJob, SystemJobAccess)
|
||||
register_access(AdHocCommand, AdHocCommandAccess)
|
||||
register_access(AdHocCommandEvent, AdHocCommandEventAccess)
|
||||
register_access(Schedule, ScheduleAccess)
|
||||
register_access(UnifiedJobTemplate, UnifiedJobTemplateAccess)
|
||||
register_access(UnifiedJob, UnifiedJobAccess)
|
||||
register_access(ActivityStream, ActivityStreamAccess)
|
||||
register_access(CustomInventoryScript, CustomInventoryScriptAccess)
|
||||
register_access(Role, RoleAccess)
|
||||
register_access(NotificationTemplate, NotificationTemplateAccess)
|
||||
register_access(Notification, NotificationAccess)
|
||||
register_access(Label, LabelAccess)
|
||||
register_access(WorkflowJobTemplateNode, WorkflowJobTemplateNodeAccess)
|
||||
register_access(WorkflowJobNode, WorkflowJobNodeAccess)
|
||||
register_access(WorkflowJobTemplate, WorkflowJobTemplateAccess)
|
||||
register_access(WorkflowJob, WorkflowJobAccess)
|
||||
register_access(Instance, InstanceAccess)
|
||||
register_access(InstanceGroup, InstanceGroupAccess)
|
||||
for cls in BaseAccess.__subclasses__():
|
||||
access_registry[cls.model] = cls
|
||||
|
||||
@ -18,7 +18,7 @@ register(
|
||||
'ACTIVITY_STREAM_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Enable Activity Stream'),
|
||||
help_text=_('Enable capturing activity for the Tower activity stream.'),
|
||||
help_text=_('Enable capturing activity for the activity stream.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
feature_required='activity_streams',
|
||||
@ -28,7 +28,7 @@ register(
|
||||
'ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Enable Activity Stream for Inventory Sync'),
|
||||
help_text=_('Enable capturing activity for the Tower activity stream when running inventory sync.'),
|
||||
help_text=_('Enable capturing activity for the activity stream when running inventory sync.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
feature_required='activity_streams',
|
||||
@ -46,8 +46,8 @@ register(
|
||||
register(
|
||||
'TOWER_ADMIN_ALERTS',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Enable Tower Administrator Alerts'),
|
||||
help_text=_('Allow Tower to email Admin users for system events that may require attention.'),
|
||||
label=_('Enable Administrator Alerts'),
|
||||
help_text=_('Email Admin users for system events that may require attention.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@ -99,9 +99,9 @@ register(
|
||||
'LICENSE',
|
||||
field_class=fields.DictField,
|
||||
default=_load_default_license_from_file,
|
||||
label=_('Tower License'),
|
||||
label=_('License'),
|
||||
help_text=_('The license controls which features and functionality are '
|
||||
'enabled in Tower. Use /api/v1/config/ to update or change '
|
||||
'enabled. Use /api/v1/config/ to update or change '
|
||||
'the license.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
@ -121,7 +121,7 @@ register(
|
||||
'AWX_PROOT_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Enable job isolation'),
|
||||
help_text=_('Isolates an Ansible job from protected parts of the Tower system to prevent exposing sensitive information.'),
|
||||
help_text=_('Isolates an Ansible job from protected parts of the system to prevent exposing sensitive information.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
@ -129,8 +129,10 @@ register(
|
||||
register(
|
||||
'AWX_PROOT_BASE_PATH',
|
||||
field_class=fields.CharField,
|
||||
label=_('Job isolation execution path'),
|
||||
help_text=_('Create temporary working directories for isolated jobs in this location.'),
|
||||
label=_('Job execution path'),
|
||||
help_text=_('The directory in which Tower will create new temporary '
|
||||
'directories for job execution and isolation '
|
||||
'(such as credential files and custom inventory scripts).'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
@ -159,7 +161,7 @@ register(
|
||||
'AWX_ISOLATED_CHECK_INTERVAL',
|
||||
field_class=fields.IntegerField,
|
||||
label=_('Isolated status check interval'),
|
||||
help_text=_('The number of seconds to sleep between status checks for jobs running on isolated instances.'), # noqa
|
||||
help_text=_('The number of seconds to sleep between status checks for jobs running on isolated instances.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
@ -168,7 +170,19 @@ register(
|
||||
'AWX_ISOLATED_LAUNCH_TIMEOUT',
|
||||
field_class=fields.IntegerField,
|
||||
label=_('Isolated launch timeout'),
|
||||
help_text=_('The timeout (in seconds) for launching jobs on isolated instances. This includes the time needed to copy source control files (playbooks) to the isolated instance.'),
|
||||
help_text=_('The timeout (in seconds) for launching jobs on isolated instances. '
|
||||
'This includes the time needed to copy source control files (playbooks) to the isolated instance.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_ISOLATED_CONNECTION_TIMEOUT',
|
||||
field_class=fields.IntegerField,
|
||||
default=10,
|
||||
label=_('Isolated connection timeout'),
|
||||
help_text=_('Ansible SSH connection timeout (in seconds) to use when communicating with isolated instances. '
|
||||
'Value should be substantially greater than expected network latency.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
@ -278,7 +292,7 @@ register(
|
||||
min_value=0,
|
||||
default=0,
|
||||
label=_('Per-Host Ansible Fact Cache Timeout'),
|
||||
help_text=_('Maximum time, in seconds, that Tower stored Ansible facts are considered valid since '
|
||||
help_text=_('Maximum time, in seconds, that stored Ansible facts are considered valid since '
|
||||
'the last time they were modified. Only valid, non-stale, facts will be accessible by '
|
||||
'a playbook. Note, this does not influence the deletion of ansible_facts from the database.'),
|
||||
category=_('Jobs'),
|
||||
@ -345,7 +359,7 @@ register(
|
||||
label=_('Loggers to send data to the log aggregator from'),
|
||||
help_text=_('List of loggers that will send HTTP logs to the collector, these can '
|
||||
'include any or all of: \n'
|
||||
'awx - Tower service logs\n'
|
||||
'awx - service logs\n'
|
||||
'activity_stream - activity stream records\n'
|
||||
'job_events - callback data from Ansible job events\n'
|
||||
'system_tracking - facts gathered from scan jobs.'),
|
||||
@ -404,6 +418,31 @@ register(
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
register(
|
||||
'LOG_AGGREGATOR_VERIFY_CERT',
|
||||
field_class=fields.BooleanField,
|
||||
default=True,
|
||||
label=_('Enable/disable HTTPS certificate verification'),
|
||||
help_text=_('Flag to control enable/disable of certificate verification'
|
||||
' when LOG_AGGREGATOR_PROTOCOL is "https". If enabled, Tower\'s'
|
||||
' log handler will verify certificate sent by external log aggregator'
|
||||
' before establishing connection.'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
register(
|
||||
'LOG_AGGREGATOR_LEVEL',
|
||||
field_class=fields.ChoiceField,
|
||||
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
|
||||
default='WARNING',
|
||||
label=_('Logging Aggregator Level Threshold'),
|
||||
help_text=_('Level threshold used by log handler. Severities from lowest to highest'
|
||||
' are DEBUG, INFO, WARNING, ERROR, CRITICAL. Messages less severe '
|
||||
'than the threshold will be ignored by log handler. (messages under category '
|
||||
'awx.anlytics ignore this setting)'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
|
||||
|
||||
def logging_validate(serializer, attrs):
|
||||
|
||||
@ -542,6 +542,10 @@ class CredentialTypeInputField(JSONSchemaField):
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'required': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'}
|
||||
},
|
||||
'fields': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
|
||||
@ -19,6 +19,7 @@ from awx.main.utils import OutputEventFilter
|
||||
from awx.main.queue import CallbackQueueDispatcher
|
||||
|
||||
logger = logging.getLogger('awx.isolated.manager')
|
||||
playbook_logger = logging.getLogger('awx.isolated.manager.playbooks')
|
||||
|
||||
|
||||
class IsolatedManager(object):
|
||||
@ -57,9 +58,8 @@ class IsolatedManager(object):
|
||||
"""
|
||||
self.args = args
|
||||
self.cwd = cwd
|
||||
self.env = env.copy()
|
||||
# Do not use callbacks for controller's management jobs
|
||||
self.env.update(self._base_management_env())
|
||||
self.isolated_env = self._redact_isolated_env(env.copy())
|
||||
self.management_env = self._base_management_env()
|
||||
self.stdout_handle = stdout_handle
|
||||
self.ssh_key_path = ssh_key_path
|
||||
self.expect_passwords = {k.pattern: v for k, v in expect_passwords.items()}
|
||||
@ -73,14 +73,48 @@ class IsolatedManager(object):
|
||||
|
||||
@staticmethod
|
||||
def _base_management_env():
|
||||
return {
|
||||
'ANSIBLE_CALLBACK_PLUGINS': '',
|
||||
'CALLBACK_QUEUE': '',
|
||||
'CALLBACK_CONNECTION': '',
|
||||
'ANSIBLE_RETRY_FILES_ENABLED': 'False',
|
||||
'ANSIBLE_HOST_KEY_CHECKING': 'False',
|
||||
'ANSIBLE_LIBRARY': os.path.join(os.path.dirname(awx.__file__), 'plugins', 'isolated')
|
||||
}
|
||||
'''
|
||||
Returns environment variables to use when running a playbook
|
||||
that manages the isolated instance.
|
||||
Use of normal job callback and other such configurations are avoided.
|
||||
'''
|
||||
env = dict(os.environ.items())
|
||||
env['ANSIBLE_RETRY_FILES_ENABLED'] = 'False'
|
||||
env['ANSIBLE_HOST_KEY_CHECKING'] = 'False'
|
||||
env['ANSIBLE_LIBRARY'] = os.path.join(os.path.dirname(awx.__file__), 'plugins', 'isolated')
|
||||
return env
|
||||
|
||||
@staticmethod
|
||||
def _build_args(playbook, hosts, extra_vars=None):
|
||||
'''
|
||||
Returns list of Ansible CLI command arguments for a management task
|
||||
|
||||
:param playbook: name of the playbook to run
|
||||
:param hosts: host pattern to operate on, ex. "localhost,"
|
||||
:param extra_vars: optional dictionary of extra_vars to apply
|
||||
'''
|
||||
args = [
|
||||
'ansible-playbook',
|
||||
playbook,
|
||||
'-u', settings.AWX_ISOLATED_USERNAME,
|
||||
'-T', str(settings.AWX_ISOLATED_CONNECTION_TIMEOUT),
|
||||
'-i', hosts
|
||||
]
|
||||
if extra_vars:
|
||||
args.extend(['-e', json.dumps(extra_vars)])
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def _redact_isolated_env(env):
|
||||
'''
|
||||
strips some environment variables that aren't applicable to
|
||||
job execution within the isolated instance
|
||||
'''
|
||||
for var in (
|
||||
'HOME', 'RABBITMQ_HOST', 'RABBITMQ_PASS', 'RABBITMQ_USER', 'CACHE',
|
||||
'DJANGO_PROJECT_DIR', 'DJANGO_SETTINGS_MODULE', 'RABBITMQ_VHOST'):
|
||||
env.pop(var, None)
|
||||
return env
|
||||
|
||||
@classmethod
|
||||
def awx_playbook_path(cls):
|
||||
@ -99,7 +133,7 @@ class IsolatedManager(object):
|
||||
'''
|
||||
self.started_at = time.time()
|
||||
secrets = {
|
||||
'env': self.env.copy(),
|
||||
'env': self.isolated_env,
|
||||
'passwords': self.expect_passwords,
|
||||
'ssh_key_data': None,
|
||||
'idle_timeout': self.idle_timeout,
|
||||
@ -116,17 +150,11 @@ class IsolatedManager(object):
|
||||
secrets['ssh_key_data'] = buff.getvalue()
|
||||
os.remove(self.ssh_key_path)
|
||||
|
||||
# strip some environment variables that aren't applicable to isolated
|
||||
# execution
|
||||
for var in (
|
||||
'HOME', 'RABBITMQ_HOST', 'RABBITMQ_PASS', 'RABBITMQ_USER', 'CACHE',
|
||||
'DJANGO_PROJECT_DIR', 'DJANGO_SETTINGS_MODULE', 'RABBITMQ_VHOST'):
|
||||
secrets['env'].pop(var, None)
|
||||
self.build_isolated_job_data()
|
||||
|
||||
extra_vars = {
|
||||
'src': self.private_data_dir,
|
||||
'dest': os.path.split(self.private_data_dir)[0],
|
||||
'dest': settings.AWX_PROOT_BASE_PATH,
|
||||
}
|
||||
if self.proot_temp_dir:
|
||||
extra_vars['proot_temp_dir'] = self.proot_temp_dir
|
||||
@ -137,15 +165,13 @@ class IsolatedManager(object):
|
||||
# - copies encrypted job data from the controlling host to the isolated host (with rsync)
|
||||
# - writes the encryption secret to a named pipe on the isolated host
|
||||
# - launches the isolated playbook runner via `tower-expect start <job-id>`
|
||||
args = ['ansible-playbook', '-u', settings.AWX_ISOLATED_USERNAME, '-i',
|
||||
'%s,' % self.host, 'run_isolated.yml', '-e',
|
||||
json.dumps(extra_vars)]
|
||||
args = self._build_args('run_isolated.yml', '%s,' % self.host, extra_vars)
|
||||
if self.instance.verbosity:
|
||||
args.append('-%s' % ('v' * min(5, self.instance.verbosity)))
|
||||
buff = StringIO.StringIO()
|
||||
logger.debug('Starting job on isolated host with `run_isolated.yml` playbook.')
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.env, buff,
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
expect_passwords={
|
||||
re.compile(r'Secret:\s*?$', re.M): base64.b64encode(json.dumps(secrets))
|
||||
},
|
||||
@ -153,8 +179,10 @@ class IsolatedManager(object):
|
||||
job_timeout=settings.AWX_ISOLATED_LAUNCH_TIMEOUT,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
output = buff.getvalue()
|
||||
playbook_logger.info('Job {} management started\n{}'.format(self.instance.id, output))
|
||||
if status != 'successful':
|
||||
self.stdout_handle.write(buff.getvalue())
|
||||
self.stdout_handle.write(output)
|
||||
return status, rc
|
||||
|
||||
@classmethod
|
||||
@ -162,7 +190,7 @@ class IsolatedManager(object):
|
||||
isolated_ssh_path = None
|
||||
try:
|
||||
if getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', None):
|
||||
isolated_ssh_path = tempfile.mkdtemp(prefix='ansible_tower_isolated')
|
||||
isolated_ssh_path = tempfile.mkdtemp(prefix='ansible_tower_isolated', dir=settings.AWX_PROOT_BASE_PATH)
|
||||
os.chmod(isolated_ssh_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
isolated_key = os.path.join(isolated_ssh_path, '.isolated')
|
||||
ssh_sock = os.path.join(isolated_ssh_path, '.isolated_ssh_auth.sock')
|
||||
@ -201,13 +229,13 @@ class IsolatedManager(object):
|
||||
os.chmod(path, stat.S_IRUSR)
|
||||
|
||||
# symlink the scm checkout (if there is one) so that it's rsync'ed over, too
|
||||
if 'AD_HOC_COMMAND_ID' not in self.env:
|
||||
if 'AD_HOC_COMMAND_ID' not in self.isolated_env:
|
||||
os.symlink(self.cwd, self.path_to('project'))
|
||||
|
||||
# create directories for build artifacts to live in
|
||||
os.makedirs(self.path_to('artifacts', 'job_events'), mode=stat.S_IXUSR + stat.S_IWUSR + stat.S_IRUSR)
|
||||
|
||||
def _missing_artifacts(self, path_list, buff):
|
||||
def _missing_artifacts(self, path_list, output):
|
||||
missing_artifacts = filter(lambda path: not os.path.exists(path), path_list)
|
||||
for path in missing_artifacts:
|
||||
self.stdout_handle.write('ansible did not exit cleanly, missing `{}`.\n'.format(path))
|
||||
@ -219,7 +247,7 @@ class IsolatedManager(object):
|
||||
self.stdout_handle.write(f.read())
|
||||
else:
|
||||
# Provide the management playbook standard out if not available
|
||||
self.stdout_handle.write(buff.getvalue())
|
||||
self.stdout_handle.write(output)
|
||||
return True
|
||||
return False
|
||||
|
||||
@ -239,9 +267,7 @@ class IsolatedManager(object):
|
||||
"""
|
||||
interval = interval if interval is not None else settings.AWX_ISOLATED_CHECK_INTERVAL
|
||||
extra_vars = {'src': self.private_data_dir}
|
||||
args = ['ansible-playbook', '-u', settings.AWX_ISOLATED_USERNAME, '-i',
|
||||
'%s,' % self.host, 'check_isolated.yml', '-e',
|
||||
json.dumps(extra_vars)]
|
||||
args = self._build_args('check_isolated.yml', '%s,' % self.host, extra_vars)
|
||||
if self.instance.verbosity:
|
||||
args.append('-%s' % ('v' * min(5, self.instance.verbosity)))
|
||||
|
||||
@ -271,13 +297,15 @@ class IsolatedManager(object):
|
||||
buff = cStringIO.StringIO()
|
||||
logger.debug('Checking job on isolated host with `check_isolated.yml` playbook.')
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.env, buff,
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
cancelled_callback=self.cancelled_callback,
|
||||
idle_timeout=remaining,
|
||||
job_timeout=remaining,
|
||||
pexpect_timeout=5,
|
||||
proot_cmd=self.proot_cmd
|
||||
)
|
||||
output = buff.getvalue()
|
||||
playbook_logger.info(output)
|
||||
|
||||
path = self.path_to('artifacts', 'stdout')
|
||||
if os.path.exists(path):
|
||||
@ -292,7 +320,7 @@ class IsolatedManager(object):
|
||||
if status == 'successful':
|
||||
status_path = self.path_to('artifacts', 'status')
|
||||
rc_path = self.path_to('artifacts', 'rc')
|
||||
if self._missing_artifacts([status_path, rc_path], buff):
|
||||
if self._missing_artifacts([status_path, rc_path], output):
|
||||
status = 'failed'
|
||||
rc = 1
|
||||
else:
|
||||
@ -303,7 +331,7 @@ class IsolatedManager(object):
|
||||
elif status == 'failed':
|
||||
# if we were unable to retrieve job reults from the isolated host,
|
||||
# print stdout of the `check_isolated.yml` playbook for clues
|
||||
self.stdout_handle.write(buff.getvalue())
|
||||
self.stdout_handle.write(output)
|
||||
|
||||
return status, rc
|
||||
|
||||
@ -316,20 +344,21 @@ class IsolatedManager(object):
|
||||
self.proot_temp_dir,
|
||||
],
|
||||
}
|
||||
args = ['ansible-playbook', '-u', settings.AWX_ISOLATED_USERNAME, '-i',
|
||||
'%s,' % self.host, 'clean_isolated.yml', '-e',
|
||||
json.dumps(extra_vars)]
|
||||
args = self._build_args('clean_isolated.yml', '%s,' % self.host, extra_vars)
|
||||
logger.debug('Cleaning up job on isolated host with `clean_isolated.yml` playbook.')
|
||||
buff = cStringIO.StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, self.awx_playbook_path(), self.env, buff,
|
||||
idle_timeout=60, job_timeout=60,
|
||||
args, self.awx_playbook_path(), self.management_env, buff,
|
||||
idle_timeout=timeout, job_timeout=timeout,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
output = buff.getvalue()
|
||||
playbook_logger.info(output)
|
||||
|
||||
if status != 'successful':
|
||||
# stdout_handle is closed by this point so writing output to logs is our only option
|
||||
logger.warning('Cleanup from isolated job encountered error, output:\n{}'.format(buff.getvalue()))
|
||||
logger.warning('Cleanup from isolated job encountered error, output:\n{}'.format(output))
|
||||
|
||||
@classmethod
|
||||
def health_check(cls, instance_qs):
|
||||
@ -345,15 +374,16 @@ class IsolatedManager(object):
|
||||
hostname_string = ''
|
||||
for instance in instance_qs:
|
||||
hostname_string += '{},'.format(instance.hostname)
|
||||
args = ['ansible-playbook', '-u', settings.AWX_ISOLATED_USERNAME, '-i',
|
||||
hostname_string, 'heartbeat_isolated.yml']
|
||||
args = cls._build_args('heartbeat_isolated.yml', hostname_string)
|
||||
args.extend(['--forks', str(len(instance_qs))])
|
||||
env = cls._base_management_env()
|
||||
env['ANSIBLE_STDOUT_CALLBACK'] = 'json'
|
||||
|
||||
buff = cStringIO.StringIO()
|
||||
timeout = max(60, 2 * settings.AWX_ISOLATED_CONNECTION_TIMEOUT)
|
||||
status, rc = IsolatedManager.run_pexpect(
|
||||
args, cls.awx_playbook_path(), env, buff,
|
||||
idle_timeout=60, job_timeout=60,
|
||||
idle_timeout=timeout, job_timeout=timeout,
|
||||
pexpect_timeout=5
|
||||
)
|
||||
output = buff.getvalue()
|
||||
@ -374,8 +404,9 @@ class IsolatedManager(object):
|
||||
logger.exception('Failed to read status from isolated instance {}.'.format(instance.hostname))
|
||||
continue
|
||||
if 'capacity' in task_result:
|
||||
instance.version = task_result['version']
|
||||
instance.capacity = int(task_result['capacity'])
|
||||
instance.save(update_fields=['capacity', 'modified'])
|
||||
instance.save(update_fields=['capacity', 'version', 'modified'])
|
||||
else:
|
||||
logger.warning('Could not update capacity of {}, msg={}'.format(
|
||||
instance.hostname, task_result.get('msg', 'unknown failure')))
|
||||
|
||||
@ -259,10 +259,18 @@ def __run__(private_data_dir):
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
__version__ = '3.2.0'
|
||||
try:
|
||||
import awx
|
||||
__version__ = awx.__version__
|
||||
except ImportError:
|
||||
pass # in devel, `awx` isn't an installed package
|
||||
parser = argparse.ArgumentParser(description='manage a daemonized, isolated ansible playbook')
|
||||
parser.add_argument('--version', action='version', version=__version__ + '-isolated')
|
||||
parser.add_argument('command', choices=['start', 'stop', 'is-alive'])
|
||||
parser.add_argument('private_data_dir')
|
||||
args = parser.parse_args()
|
||||
|
||||
private_data_dir = args.private_data_dir
|
||||
pidfile = os.path.join(private_data_dir, 'pid')
|
||||
|
||||
|
||||
@ -130,7 +130,7 @@ class Command(BaseCommand):
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
if not feature_enabled('system_tracking'):
|
||||
raise CommandError("The System Tracking feature is not enabled for your Tower instance")
|
||||
raise CommandError("The System Tracking feature is not enabled for your instance")
|
||||
cleanup_facts = CleanupFacts()
|
||||
if not all([options[GRANULARITY], options[OLDER_THAN]]):
|
||||
raise CommandError('Both --granularity and --older_than are required.')
|
||||
|
||||
@ -848,7 +848,7 @@ class Command(NoArgsCommand):
|
||||
license_info = TaskEnhancer().validate_enhancements()
|
||||
if license_info.get('license_key', 'UNLICENSED') == 'UNLICENSED':
|
||||
logger.error(LICENSE_NON_EXISTANT_MESSAGE)
|
||||
raise CommandError('No Tower license found!')
|
||||
raise CommandError('No license found!')
|
||||
available_instances = license_info.get('available_instances', 0)
|
||||
free_instances = license_info.get('free_instances', 0)
|
||||
time_remaining = license_info.get('time_remaining', 0)
|
||||
|
||||
@ -20,6 +20,12 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Release UJT unique_together constraint
|
||||
migrations.AlterUniqueTogether(
|
||||
name='unifiedjobtemplate',
|
||||
unique_together=set([]),
|
||||
),
|
||||
|
||||
# Inventory Refresh
|
||||
migrations.RenameField(
|
||||
'InventorySource',
|
||||
|
||||
@ -9,6 +9,7 @@ from django.db import migrations
|
||||
from awx.main.migrations import _inventory_source as invsrc
|
||||
from awx.main.migrations import _migration_utils as migration_utils
|
||||
from awx.main.migrations import _reencrypt
|
||||
from awx.main.migrations import _scan_jobs
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@ -24,4 +25,5 @@ class Migration(migrations.Migration):
|
||||
migrations.RunPython(invsrc.remove_inventory_source_with_no_inventory_link),
|
||||
migrations.RunPython(invsrc.rename_inventory_sources),
|
||||
migrations.RunPython(_reencrypt.replace_aesecb_fernet),
|
||||
migrations.RunPython(_scan_jobs.migrate_scan_job_templates),
|
||||
]
|
||||
|
||||
@ -25,8 +25,8 @@ class Migration(migrations.Migration):
|
||||
('name', models.CharField(max_length=512)),
|
||||
('kind', models.CharField(max_length=32, choices=[(b'ssh', 'SSH'), (b'vault', 'Vault'), (b'net', 'Network'), (b'scm', 'Source Control'), (b'cloud', 'Cloud'), (b'insights', 'Insights')])),
|
||||
('managed_by_tower', models.BooleanField(default=False, editable=False)),
|
||||
('inputs', awx.main.fields.CredentialTypeInputField(default={}, blank=True)),
|
||||
('injectors', awx.main.fields.CredentialTypeInjectorField(default={}, blank=True)),
|
||||
('inputs', awx.main.fields.CredentialTypeInputField(default={}, blank=True, help_text='Enter inputs using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.')),
|
||||
('injectors', awx.main.fields.CredentialTypeInjectorField(default={}, blank=True, help_text='Enter injectors using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.')),
|
||||
('created_by', models.ForeignKey(related_name="{u'class': 'credentialtype', u'app_label': 'main'}(class)s_created+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('modified_by', models.ForeignKey(related_name="{u'class': 'credentialtype', u'app_label': 'main'}(class)s_modified+", on_delete=django.db.models.deletion.SET_NULL, default=None, editable=False, to=settings.AUTH_USER_MODEL, null=True)),
|
||||
('tags', taggit.managers.TaggableManager(to='taggit.Tag', through='taggit.TaggedItem', blank=True, help_text='A comma-separated list of tags.', verbose_name='Tags')),
|
||||
|
||||
@ -99,12 +99,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='credential_type',
|
||||
field=models.ForeignKey(related_name='credentials', to='main.CredentialType', null=False, help_text='Type for this credential. Credential Types define valid fields (e.g,. "username", "password") and their properties (e.g,. "username is required" or "password should be stored with encryption").')
|
||||
field=models.ForeignKey(related_name='credentials', to='main.CredentialType', null=False, help_text='Specify the type of credential you want to create. Refer to the Ansible Tower documentation for details on each type.')
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='inputs',
|
||||
field=awx.main.fields.CredentialInputField(default={}, help_text='Data structure used to specify input values (e.g., {"username": "jane-doe", "password": "secret"}). Valid fields and their requirements vary depending on the fields defined on the chosen CredentialType.', blank=True),
|
||||
field=awx.main.fields.CredentialInputField(default={}, help_text='Enter inputs using either JSON or YAML syntax. Use the radio button to toggle between the two. Refer to the Ansible Tower documentation for example syntax.', blank=True),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='job',
|
||||
|
||||
68
awx/main/migrations/_scan_jobs.py
Normal file
68
awx/main/migrations/_scan_jobs.py
Normal file
@ -0,0 +1,68 @@
|
||||
import logging
|
||||
|
||||
from awx.main.models.base import PERM_INVENTORY_SCAN, PERM_INVENTORY_DEPLOY
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
|
||||
def _create_fact_scan_project(Project, org):
|
||||
name = "Tower Fact Scan - {}".format(org.name if org else "No Organization")
|
||||
proj = Project(name=name,
|
||||
scm_url='https://github.com/ansible/tower-fact-modules',
|
||||
scm_type='git',
|
||||
scm_update_on_launch=True,
|
||||
scm_update_cache_timeout=86400,
|
||||
organization=org)
|
||||
proj.save(skip_update=True)
|
||||
return proj
|
||||
|
||||
|
||||
def _create_fact_scan_projects(Project, orgs):
|
||||
return {org.id : _create_fact_scan_project(Project, org) for org in orgs}
|
||||
|
||||
|
||||
def _get_tower_scan_job_templates(JobTemplate):
|
||||
return JobTemplate.objects.filter(job_type=PERM_INVENTORY_SCAN, project__isnull=True) \
|
||||
.prefetch_related('inventory__organization')
|
||||
|
||||
|
||||
def _get_orgs(Organization, job_template_ids):
|
||||
return Organization.objects.filter(inventories__jobtemplates__in=job_template_ids).distinct()
|
||||
|
||||
|
||||
def _migrate_scan_job_templates(apps):
|
||||
Organization = apps.get_model('main', 'Organization')
|
||||
Project = apps.get_model('main', 'Project')
|
||||
JobTemplate = apps.get_model('main', 'JobTemplate')
|
||||
|
||||
project_no_org = None
|
||||
|
||||
# A scan job template with a custom project will retain the custom project.
|
||||
JobTemplate.objects.filter(job_type=PERM_INVENTORY_SCAN, project__isnull=False).update(use_fact_cache=True, job_type=PERM_INVENTORY_DEPLOY)
|
||||
|
||||
# Scan jobs templates using Tower's default scan playbook will now point at
|
||||
# the same playbook but in a github repo.
|
||||
jts = _get_tower_scan_job_templates(JobTemplate)
|
||||
if jts.count() == 0:
|
||||
return
|
||||
|
||||
orgs = _get_orgs(Organization, jts.values_list('id'))
|
||||
if orgs.count() == 0:
|
||||
return
|
||||
|
||||
org_proj_map = _create_fact_scan_projects(Project, orgs)
|
||||
for jt in jts:
|
||||
if jt.inventory and jt.inventory.organization:
|
||||
jt.project = org_proj_map[jt.inventory.organization.id]
|
||||
# Job Templates without an Organization; through related Inventory
|
||||
else:
|
||||
if not project_no_org:
|
||||
project_no_org = _create_fact_scan_project(Project, None)
|
||||
jt.project = project_no_org
|
||||
jt.job_type = PERM_INVENTORY_DEPLOY
|
||||
jt.use_fact_cache = True
|
||||
jt.save()
|
||||
|
||||
|
||||
def migrate_scan_job_templates(apps, schema_editor):
|
||||
_migrate_scan_job_templates(apps)
|
||||
@ -10,7 +10,7 @@ import yaml
|
||||
|
||||
# Django
|
||||
from django.db import models
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.exceptions import ValidationError, ObjectDoesNotExist
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.timezone import now
|
||||
|
||||
@ -287,6 +287,27 @@ class PrimordialModel(CreatedModifiedModel):
|
||||
# Description should always be empty string, never null.
|
||||
return self.description or ''
|
||||
|
||||
def validate_unique(self, exclude=None):
|
||||
super(PrimordialModel, self).validate_unique(exclude=exclude)
|
||||
model = type(self)
|
||||
if not hasattr(model, 'SOFT_UNIQUE_TOGETHER'):
|
||||
return
|
||||
errors = []
|
||||
for ut in model.SOFT_UNIQUE_TOGETHER:
|
||||
kwargs = {}
|
||||
for field_name in ut:
|
||||
kwargs[field_name] = getattr(self, field_name, None)
|
||||
try:
|
||||
obj = model.objects.get(**kwargs)
|
||||
except ObjectDoesNotExist:
|
||||
continue
|
||||
if not (self.pk and self.pk == obj.pk):
|
||||
errors.append(
|
||||
'%s with this (%s) combination already exists.' % (model.__name__, ', '.join(ut))
|
||||
)
|
||||
if errors:
|
||||
raise ValidationError(errors)
|
||||
|
||||
|
||||
class CommonModel(PrimordialModel):
|
||||
''' a base model where the name is unique '''
|
||||
|
||||
@ -30,7 +30,7 @@ from awx.main.models.rbac import (
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
)
|
||||
from awx.main.utils import encrypt_field
|
||||
from awx.main.utils import encrypt_field, to_python_boolean
|
||||
|
||||
__all__ = ['Credential', 'CredentialType', 'V1Credential']
|
||||
|
||||
@ -219,10 +219,8 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
'CredentialType',
|
||||
related_name='credentials',
|
||||
null=False,
|
||||
help_text=_('Type for this credential. Credential Types define '
|
||||
'valid fields (e.g,. "username", "password") and their '
|
||||
'properties (e.g,. "username is required" or "password '
|
||||
'should be stored with encryption").')
|
||||
help_text=_('Specify the type of credential you want to create. Refer '
|
||||
'to the Ansible Tower documentation for details on each type.')
|
||||
)
|
||||
organization = models.ForeignKey(
|
||||
'Organization',
|
||||
@ -235,10 +233,9 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
inputs = CredentialInputField(
|
||||
blank=True,
|
||||
default={},
|
||||
help_text=_('Data structure used to specify input values (e.g., '
|
||||
'{"username": "jane-doe", "password": "secret"}). Valid '
|
||||
'fields and their requirements vary depending on the '
|
||||
'fields defined on the chosen CredentialType.')
|
||||
help_text=_('Enter inputs using either JSON or YAML syntax. Use the '
|
||||
'radio button to toggle between the two. Refer to the '
|
||||
'Ansible Tower documentation for example syntax.')
|
||||
)
|
||||
admin_role = ImplicitRoleField(
|
||||
parent_role=[
|
||||
@ -421,11 +418,17 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
)
|
||||
inputs = CredentialTypeInputField(
|
||||
blank=True,
|
||||
default={}
|
||||
default={},
|
||||
help_text=_('Enter inputs using either JSON or YAML syntax. Use the '
|
||||
'radio button to toggle between the two. Refer to the '
|
||||
'Ansible Tower documentation for example syntax.')
|
||||
)
|
||||
injectors = CredentialTypeInjectorField(
|
||||
blank=True,
|
||||
default={}
|
||||
default={},
|
||||
help_text=_('Enter injectors using either JSON or YAML syntax. Use the '
|
||||
'radio button to toggle between the two. Refer to the '
|
||||
'Ansible Tower documentation for example syntax.')
|
||||
)
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
@ -535,6 +538,12 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
# ansible-playbook) and a safe namespace with secret values hidden (for
|
||||
# DB storage)
|
||||
for field_name, value in credential.inputs.items():
|
||||
|
||||
if type(value) is bool:
|
||||
# boolean values can't be secret/encrypted
|
||||
safe_namespace[field_name] = namespace[field_name] = value
|
||||
continue
|
||||
|
||||
if field_name in self.secret_fields:
|
||||
value = decrypt_field(credential, field_name)
|
||||
safe_namespace[field_name] = '**********'
|
||||
@ -567,6 +576,13 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
extra_vars[var_name] = Template(tmpl).render(**namespace)
|
||||
safe_extra_vars[var_name] = Template(tmpl).render(**safe_namespace)
|
||||
|
||||
# If the template renders to a stringified Boolean, they've _probably_
|
||||
# set up an extra_var injection with a boolean field; extra_vars supports JSON,
|
||||
# so give them the actual boolean type they want
|
||||
for v in (extra_vars, safe_extra_vars):
|
||||
if v[var_name] in ('True', 'False'):
|
||||
v[var_name] = to_python_boolean(v[var_name])
|
||||
|
||||
if extra_vars:
|
||||
args.extend(['-e', json.dumps(extra_vars)])
|
||||
|
||||
@ -608,7 +624,10 @@ def ssh(cls):
|
||||
'id': 'become_method',
|
||||
'label': 'Privilege Escalation Method',
|
||||
'choices': map(operator.itemgetter(0),
|
||||
V1Credential.FIELDS['become_method'].choices)
|
||||
V1Credential.FIELDS['become_method'].choices),
|
||||
'help_text': ('Specify a method for "become" operations. This is '
|
||||
'equivalent to specifying the --become-method '
|
||||
'Ansible parameter.')
|
||||
}, {
|
||||
'id': 'become_username',
|
||||
'label': 'Privilege Escalation Username',
|
||||
@ -738,6 +757,10 @@ def aws(cls):
|
||||
'label': 'STS Token',
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': ('Security Token Service (STS) is a web service '
|
||||
'that enables you to request temporary, '
|
||||
'limited-privilege credentials for AWS Identity '
|
||||
'and Access Management (IAM) users.'),
|
||||
}],
|
||||
'required': ['username', 'password']
|
||||
}
|
||||
@ -764,6 +787,8 @@ def openstack(cls):
|
||||
'id': 'host',
|
||||
'label': 'Host (Authentication URL)',
|
||||
'type': 'string',
|
||||
'help_text': ('The host to authenticate with. For example, '
|
||||
'https://openstack.business.com/v2.0/')
|
||||
}, {
|
||||
'id': 'project',
|
||||
'label': 'Project (Tenant Name)',
|
||||
@ -771,7 +796,11 @@ def openstack(cls):
|
||||
}, {
|
||||
'id': 'domain',
|
||||
'label': 'Domain Name',
|
||||
'type': 'string'
|
||||
'type': 'string',
|
||||
'help_text': ('OpenStack domains define administrative boundaries. '
|
||||
'It is only needed for Keystone v3 authentication '
|
||||
'URLs. Refer to Ansible Tower documentation for '
|
||||
'common scenarios.')
|
||||
}],
|
||||
'required': ['username', 'password', 'host', 'project']
|
||||
}
|
||||
@ -789,6 +818,8 @@ def vmware(cls):
|
||||
'id': 'host',
|
||||
'label': 'VCenter Host',
|
||||
'type': 'string',
|
||||
'help_text': ('Enter the hostname or IP address which corresponds '
|
||||
'to your VMware vCenter.')
|
||||
}, {
|
||||
'id': 'username',
|
||||
'label': 'Username',
|
||||
@ -815,6 +846,8 @@ def satellite6(cls):
|
||||
'id': 'host',
|
||||
'label': 'Satellite 6 URL',
|
||||
'type': 'string',
|
||||
'help_text': ('Enter the URL which corresponds to your Red Hat '
|
||||
'Satellite 6 server. For example, https://satellite.example.org')
|
||||
}, {
|
||||
'id': 'username',
|
||||
'label': 'Username',
|
||||
@ -840,6 +873,9 @@ def cloudforms(cls):
|
||||
'id': 'host',
|
||||
'label': 'CloudForms URL',
|
||||
'type': 'string',
|
||||
'help_text': ('Enter the URL for the virtual machine which '
|
||||
'corresponds to your CloudForm instance. '
|
||||
'For example, https://cloudforms.example.org')
|
||||
}, {
|
||||
'id': 'username',
|
||||
'label': 'Username',
|
||||
@ -864,18 +900,25 @@ def gce(cls):
|
||||
'fields': [{
|
||||
'id': 'username',
|
||||
'label': 'Service Account Email Address',
|
||||
'type': 'string'
|
||||
'type': 'string',
|
||||
'help_text': ('The email address assigned to the Google Compute '
|
||||
'Engine service account.')
|
||||
}, {
|
||||
'id': 'project',
|
||||
'label': 'Project',
|
||||
'type': 'string'
|
||||
'type': 'string',
|
||||
'help_text': ('The Project ID is the GCE assigned identification. '
|
||||
'It is constructed as two words followed by a three '
|
||||
'digit number. Example: adjective-noun-000')
|
||||
}, {
|
||||
'id': 'ssh_key_data',
|
||||
'label': 'RSA Private Key',
|
||||
'type': 'string',
|
||||
'format': 'ssh_private_key',
|
||||
'secret': True,
|
||||
'multiline': True
|
||||
'multiline': True,
|
||||
'help_text': ('Paste the contents of the PEM file associated '
|
||||
'with the service account email.')
|
||||
}]
|
||||
}
|
||||
)
|
||||
@ -891,14 +934,19 @@ def azure(cls):
|
||||
'fields': [{
|
||||
'id': 'username',
|
||||
'label': 'Subscription ID',
|
||||
'type': 'string'
|
||||
'type': 'string',
|
||||
'help_text': ('Subscription ID is an Azure construct, which is '
|
||||
'mapped to a username.')
|
||||
}, {
|
||||
'id': 'ssh_key_data',
|
||||
'label': 'Management Certificate',
|
||||
'type': 'string',
|
||||
'format': 'ssh_private_key',
|
||||
'secret': True,
|
||||
'multiline': True
|
||||
'multiline': True,
|
||||
'help_text': ('Paste the contents of the PEM file that corresponds '
|
||||
'to the certificate you uploaded in the Microsoft '
|
||||
'Azure console.')
|
||||
}]
|
||||
}
|
||||
)
|
||||
@ -914,7 +962,9 @@ def azure_rm(cls):
|
||||
'fields': [{
|
||||
'id': 'subscription',
|
||||
'label': 'Subscription ID',
|
||||
'type': 'string'
|
||||
'type': 'string',
|
||||
'help_text': ('Subscription ID is an Azure construct, which is '
|
||||
'mapped to a username.')
|
||||
}, {
|
||||
'id': 'username',
|
||||
'label': 'Username',
|
||||
@ -946,16 +996,16 @@ def azure_rm(cls):
|
||||
def insights(cls):
|
||||
return cls(
|
||||
kind='insights',
|
||||
name='Insights Basic Auth',
|
||||
name='Insights',
|
||||
managed_by_tower=True,
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'username',
|
||||
'label': 'Basic Auth Username',
|
||||
'label': 'Username',
|
||||
'type': 'string'
|
||||
}, {
|
||||
'id': 'password',
|
||||
'label': 'Basic Auth Password',
|
||||
'label': 'Password',
|
||||
'type': 'string',
|
||||
'secret': True
|
||||
}],
|
||||
|
||||
@ -829,7 +829,7 @@ class InventorySourceOptions(BaseModel):
|
||||
SOURCE_CHOICES = [
|
||||
('', _('Manual')),
|
||||
('file', _('File, Directory or Script')),
|
||||
('scm', _('Sourced from a project in Tower')),
|
||||
('scm', _('Sourced from a Project')),
|
||||
('ec2', _('Amazon EC2')),
|
||||
('gce', _('Google Compute Engine')),
|
||||
('azure', _('Microsoft Azure Classic (deprecated)')),
|
||||
@ -1183,6 +1183,8 @@ class InventorySourceOptions(BaseModel):
|
||||
|
||||
class InventorySource(UnifiedJobTemplate, InventorySourceOptions):
|
||||
|
||||
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'inventory')]
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
|
||||
|
||||
@ -8,6 +8,7 @@ import hmac
|
||||
import logging
|
||||
import time
|
||||
import json
|
||||
import base64
|
||||
from urlparse import urljoin
|
||||
|
||||
# Django
|
||||
@ -223,6 +224,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
A job template is a reusable job definition for applying a project (with
|
||||
playbook) to an inventory source with a given credential.
|
||||
'''
|
||||
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name')]
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
@ -308,10 +310,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
validation_errors['credential'] = [_("Job Template must provide 'credential' or allow prompting for it."),]
|
||||
|
||||
# Job type dependent checks
|
||||
if self.job_type == PERM_INVENTORY_SCAN:
|
||||
if self.inventory is None or self.ask_inventory_on_launch:
|
||||
validation_errors['inventory'] = [_("Scan jobs must be assigned a fixed inventory."),]
|
||||
elif self.project is None:
|
||||
if self.project is None:
|
||||
resources_needed_to_start.append('project')
|
||||
validation_errors['project'] = [_("Job types 'run' and 'check' must have assigned a project."),]
|
||||
|
||||
@ -407,12 +406,8 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
||||
"""
|
||||
errors = {}
|
||||
if 'job_type' in data and self.ask_job_type_on_launch:
|
||||
if ((self.job_type == PERM_INVENTORY_SCAN and not data['job_type'] == PERM_INVENTORY_SCAN) or
|
||||
(data['job_type'] == PERM_INVENTORY_SCAN and not self.job_type == PERM_INVENTORY_SCAN)):
|
||||
if data['job_type'] == PERM_INVENTORY_SCAN and not self.job_type == PERM_INVENTORY_SCAN:
|
||||
errors['job_type'] = _('Cannot override job_type to or from a scan job.')
|
||||
if (self.job_type == PERM_INVENTORY_SCAN and ('inventory' in data) and self.ask_inventory_on_launch and
|
||||
self.inventory != data['inventory']):
|
||||
errors['inventory'] = _('Inventory cannot be changed at runtime for scan jobs.')
|
||||
return errors
|
||||
|
||||
@property
|
||||
@ -647,8 +642,6 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin):
|
||||
return data
|
||||
|
||||
def _resources_sufficient_for_launch(self):
|
||||
if self.job_type == PERM_INVENTORY_SCAN:
|
||||
return self.inventory_id is not None
|
||||
return not (self.inventory_id is None or self.project_id is None)
|
||||
|
||||
def display_artifacts(self):
|
||||
@ -714,10 +707,10 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin):
|
||||
return '{}'.format(self.inventory.id)
|
||||
|
||||
def memcached_fact_host_key(self, host_name):
|
||||
return '{}-{}'.format(self.inventory.id, host_name)
|
||||
return '{}-{}'.format(self.inventory.id, base64.b64encode(host_name))
|
||||
|
||||
def memcached_fact_modified_key(self, host_name):
|
||||
return '{}-{}-modified'.format(self.inventory.id, host_name)
|
||||
return '{}-{}-modified'.format(self.inventory.id, base64.b64encode(host_name))
|
||||
|
||||
def _get_inventory_hosts(self, only=['name', 'ansible_facts', 'modified',]):
|
||||
return self.inventory.hosts.only(*only)
|
||||
@ -1441,4 +1434,3 @@ class SystemJob(UnifiedJob, SystemJobOptions, JobNotificationMixin):
|
||||
|
||||
def get_notification_friendly_name(self):
|
||||
return "System Job"
|
||||
|
||||
|
||||
@ -223,6 +223,8 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin):
|
||||
A project represents a playbook git repo that can access a set of inventories
|
||||
'''
|
||||
|
||||
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')]
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('id',)
|
||||
|
||||
@ -66,7 +66,7 @@ class Schedule(CommonModel):
|
||||
)
|
||||
enabled = models.BooleanField(
|
||||
default=True,
|
||||
help_text=_("Enables processing of this schedule by Tower.")
|
||||
help_text=_("Enables processing of this schedule.")
|
||||
)
|
||||
dtstart = models.DateTimeField(
|
||||
null=True,
|
||||
|
||||
@ -92,7 +92,9 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
unique_together = [('polymorphic_ctype', 'name')]
|
||||
# unique_together here is intentionally commented out. Please make sure sub-classes of this model
|
||||
# contain at least this uniqueness restriction: SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name')]
|
||||
#unique_together = [('polymorphic_ctype', 'name')]
|
||||
|
||||
old_pk = models.PositiveIntegerField(
|
||||
null=True,
|
||||
@ -474,7 +476,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
blank=True,
|
||||
default='',
|
||||
editable=False,
|
||||
help_text=_("The Tower node the job executed on."),
|
||||
help_text=_("The node the job executed on."),
|
||||
)
|
||||
notifications = models.ManyToManyField(
|
||||
'Notification',
|
||||
|
||||
@ -187,15 +187,16 @@ class WorkflowJobTemplateNode(WorkflowNodeBase):
|
||||
'''
|
||||
create_kwargs = {}
|
||||
for field_name in self._get_workflow_job_field_names():
|
||||
if hasattr(self, field_name):
|
||||
item = getattr(self, field_name)
|
||||
if field_name in ['inventory', 'credential']:
|
||||
if not user.can_access(item.__class__, 'use', item):
|
||||
continue
|
||||
if field_name in ['unified_job_template']:
|
||||
if not user.can_access(item.__class__, 'start', item, validate_license=False):
|
||||
continue
|
||||
create_kwargs[field_name] = item
|
||||
item = getattr(self, field_name, None)
|
||||
if item is None:
|
||||
continue
|
||||
if field_name in ['inventory', 'credential']:
|
||||
if not user.can_access(item.__class__, 'use', item):
|
||||
continue
|
||||
if field_name in ['unified_job_template']:
|
||||
if not user.can_access(item.__class__, 'start', item, validate_license=False):
|
||||
continue
|
||||
create_kwargs[field_name] = item
|
||||
create_kwargs['workflow_job_template'] = workflow_job_template
|
||||
return self.__class__.objects.create(**create_kwargs)
|
||||
|
||||
@ -327,6 +328,8 @@ class WorkflowJobOptions(BaseModel):
|
||||
|
||||
class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTemplateMixin, ResourceMixin):
|
||||
|
||||
SOFT_UNIQUE_TOGETHER = [('polymorphic_ctype', 'name', 'organization')]
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
|
||||
|
||||
@ -9,7 +9,6 @@ from sets import Set
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.db import transaction, connection
|
||||
from django.db.utils import DatabaseError
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.timezone import now as tz_now
|
||||
|
||||
@ -17,6 +16,7 @@ from django.utils.timezone import now as tz_now
|
||||
from awx.main.models import * # noqa
|
||||
#from awx.main.scheduler.dag_simple import SimpleDAG
|
||||
from awx.main.scheduler.dag_workflow import WorkflowDAG
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
|
||||
from awx.main.scheduler.dependency_graph import DependencyGraph
|
||||
from awx.main.tasks import _send_notification_templates
|
||||
@ -24,6 +24,7 @@ from awx.main.tasks import _send_notification_templates
|
||||
# Celery
|
||||
from celery.task.control import inspect
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.scheduler')
|
||||
|
||||
|
||||
@ -375,7 +376,7 @@ class TaskManager():
|
||||
if not found_acceptable_queue:
|
||||
logger.debug("Task {} couldn't be scheduled on graph, waiting for next cycle".format(task))
|
||||
|
||||
def process_celery_tasks(self, active_tasks, all_running_sorted_tasks):
|
||||
def process_celery_tasks(self, celery_task_start_time, active_tasks, all_running_sorted_tasks):
|
||||
'''
|
||||
Rectify tower db <-> celery inconsistent view of jobs state
|
||||
'''
|
||||
@ -383,13 +384,9 @@ class TaskManager():
|
||||
|
||||
if (task.celery_task_id not in active_tasks and not hasattr(settings, 'IGNORE_CELERY_INSPECTOR')):
|
||||
# TODO: try catch the getting of the job. The job COULD have been deleted
|
||||
# Ensure job did not finish running between the time we get the
|
||||
# list of task id's from celery and now.
|
||||
# Note: This is an actual fix, not a reduction in the time
|
||||
# window that this can happen.
|
||||
if isinstance(task, WorkflowJob):
|
||||
continue
|
||||
if task.status != 'running':
|
||||
if task_obj.modified > celery_task_start_time:
|
||||
continue
|
||||
task.status = 'failed'
|
||||
task.job_explanation += ' '.join((
|
||||
@ -459,13 +456,12 @@ class TaskManager():
|
||||
logger.debug("Starting Schedule")
|
||||
with transaction.atomic():
|
||||
# Lock
|
||||
try:
|
||||
Instance.objects.select_for_update(nowait=True).all()[0]
|
||||
except DatabaseError:
|
||||
return
|
||||
with advisory_lock('task_manager_lock', wait=False) as acquired:
|
||||
if acquired is False:
|
||||
return
|
||||
|
||||
finished_wfjs = self._schedule()
|
||||
finished_wfjs = self._schedule()
|
||||
|
||||
# Operations whose queries rely on modifications made during the atomic scheduling session
|
||||
for wfj in WorkflowJob.objects.filter(id__in=finished_wfjs):
|
||||
_send_notification_templates(wfj, 'succeeded' if wfj.status == 'successful' else 'failed')
|
||||
# Operations whose queries rely on modifications made during the atomic scheduling session
|
||||
for wfj in WorkflowJob.objects.filter(id__in=finished_wfjs):
|
||||
_send_notification_templates(wfj, 'succeeded' if wfj.status == 'successful' else 'failed')
|
||||
|
||||
@ -5,16 +5,17 @@ import json
|
||||
|
||||
# Django
|
||||
from django.db import transaction
|
||||
from django.db.utils import DatabaseError
|
||||
from django.utils.timezone import now as tz_now
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
|
||||
# Celery
|
||||
from celery import task
|
||||
|
||||
# AWX
|
||||
from awx.main.models import Instance
|
||||
from awx.main.scheduler import TaskManager
|
||||
from django.core.cache import cache
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.scheduler')
|
||||
|
||||
# TODO: move logic to UnifiedJob model and use bind=True feature of celery.
|
||||
@ -43,9 +44,12 @@ def run_fail_inconsistent_running_jobs():
|
||||
logger.debug("Running task to fail inconsistent running jobs.")
|
||||
with transaction.atomic():
|
||||
# Lock
|
||||
try:
|
||||
Instance.objects.select_for_update(nowait=True).all()[0]
|
||||
with advisory_lock('task_manager_lock', wait=False) as acquired:
|
||||
if acquired is False:
|
||||
return
|
||||
|
||||
scheduler = TaskManager()
|
||||
celery_task_start_time = tz_now()
|
||||
active_task_queues, active_tasks = scheduler.get_active_tasks()
|
||||
cache.set("active_celery_tasks", json.dumps(active_task_queues))
|
||||
if active_tasks is None:
|
||||
@ -53,7 +57,5 @@ def run_fail_inconsistent_running_jobs():
|
||||
return None
|
||||
|
||||
all_running_sorted_tasks = scheduler.get_running_tasks()
|
||||
scheduler.process_celery_tasks(active_tasks, all_running_sorted_tasks)
|
||||
except DatabaseError:
|
||||
return
|
||||
scheduler.process_celery_tasks(celery_task_start_time, active_tasks, all_running_sorted_tasks)
|
||||
|
||||
|
||||
@ -67,7 +67,7 @@ HIDDEN_PASSWORD = '**********'
|
||||
|
||||
OPENSSH_KEY_ERROR = u'''\
|
||||
It looks like you're trying to use a private key in OpenSSH format, which \
|
||||
isn't supported by the installed version of OpenSSH on this Tower instance. \
|
||||
isn't supported by the installed version of OpenSSH on this instance. \
|
||||
Try upgrading OpenSSH or providing your private key in an different format. \
|
||||
'''
|
||||
|
||||
@ -79,7 +79,7 @@ def celery_startup(conf=None, **kwargs):
|
||||
# Re-init all schedules
|
||||
# NOTE: Rework this during the Rampart work
|
||||
startup_logger = logging.getLogger('awx.main.tasks')
|
||||
startup_logger.info("Syncing Tower Schedules")
|
||||
startup_logger.info("Syncing Schedules")
|
||||
for sch in Schedule.objects.all():
|
||||
try:
|
||||
sch.update_computed_fields()
|
||||
@ -189,11 +189,11 @@ def cluster_node_heartbeat(self):
|
||||
for other_inst in recent_inst:
|
||||
if other_inst.version == "":
|
||||
continue
|
||||
if Version(other_inst.version) > Version(tower_application_version):
|
||||
logger.error("Host {} reports Tower version {}, but this node {} is at {}, shutting down".format(other_inst.hostname,
|
||||
other_inst.version,
|
||||
inst.hostname,
|
||||
inst.version))
|
||||
if Version(other_inst.version.split('-', 1)[0]) > Version(tower_application_version) and not settings.DEBUG:
|
||||
logger.error("Host {} reports version {}, but this node {} is at {}, shutting down".format(other_inst.hostname,
|
||||
other_inst.version,
|
||||
inst.hostname,
|
||||
inst.version))
|
||||
stop_local_services(['uwsgi', 'celery', 'beat', 'callback', 'fact'])
|
||||
|
||||
|
||||
@ -444,7 +444,7 @@ class BaseTask(Task):
|
||||
'''
|
||||
Create a temporary directory for job-related files.
|
||||
'''
|
||||
path = tempfile.mkdtemp(prefix='ansible_tower_%s_' % instance.pk)
|
||||
path = tempfile.mkdtemp(prefix='ansible_tower_%s_' % instance.pk, dir=settings.AWX_PROOT_BASE_PATH)
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
|
||||
return path
|
||||
|
||||
@ -481,7 +481,7 @@ class BaseTask(Task):
|
||||
# For credentials used with ssh-add, write to a named pipe which
|
||||
# will be read then closed, instead of leaving the SSH key on disk.
|
||||
if credential.kind in ('ssh', 'scm') and not ssh_too_old:
|
||||
path = os.path.join(kwargs.get('private_data_dir', tempfile.gettempdir()), name)
|
||||
path = os.path.join(kwargs['private_data_dir'], name)
|
||||
run.open_fifo_write(path, data)
|
||||
private_data_files['credentials']['ssh'] = path
|
||||
# Ansible network modules do not yet support ssh-agent.
|
||||
@ -682,6 +682,9 @@ class BaseTask(Task):
|
||||
instance = self.update_model(pk)
|
||||
status = instance.status
|
||||
raise RuntimeError('not starting %s task' % instance.status)
|
||||
|
||||
if not os.path.exists(settings.AWX_PROOT_BASE_PATH):
|
||||
raise RuntimeError('AWX_PROOT_BASE_PATH=%s does not exist' % settings.AWX_PROOT_BASE_PATH)
|
||||
# Fetch ansible version once here to support version-dependent features.
|
||||
kwargs['ansible_version'] = get_ansible_version()
|
||||
kwargs['private_data_dir'] = self.build_private_data_dir(instance, **kwargs)
|
||||
@ -1057,18 +1060,13 @@ class RunJob(BaseTask):
|
||||
args.extend(['-e', json.dumps(extra_vars)])
|
||||
|
||||
# Add path to playbook (relative to project.local_path).
|
||||
if job.project is None and job.job_type == PERM_INVENTORY_SCAN:
|
||||
args.append("scan_facts.yml")
|
||||
else:
|
||||
args.append(job.playbook)
|
||||
args.append(job.playbook)
|
||||
return args
|
||||
|
||||
def build_safe_args(self, job, **kwargs):
|
||||
return self.build_args(job, display=True, **kwargs)
|
||||
|
||||
def build_cwd(self, job, **kwargs):
|
||||
if job.project is None and job.job_type == PERM_INVENTORY_SCAN:
|
||||
return self.get_path_to('..', 'playbooks')
|
||||
cwd = job.project.get_project_path()
|
||||
if not cwd:
|
||||
root = settings.PROJECTS_ROOT
|
||||
@ -1195,7 +1193,7 @@ class RunProjectUpdate(BaseTask):
|
||||
}
|
||||
}
|
||||
'''
|
||||
handle, self.revision_path = tempfile.mkstemp()
|
||||
handle, self.revision_path = tempfile.mkstemp(dir=settings.AWX_PROOT_BASE_PATH)
|
||||
private_data = {'credentials': {}}
|
||||
if project_update.credential:
|
||||
credential = project_update.credential
|
||||
@ -1815,7 +1813,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
elif src == 'scm':
|
||||
args.append(inventory_update.get_actual_source_path())
|
||||
elif src == 'custom':
|
||||
runpath = tempfile.mkdtemp(prefix='ansible_tower_launch_')
|
||||
runpath = tempfile.mkdtemp(prefix='ansible_tower_launch_', dir=settings.AWX_PROOT_BASE_PATH)
|
||||
handle, path = tempfile.mkstemp(dir=runpath)
|
||||
f = os.fdopen(handle, 'w')
|
||||
if inventory_update.source_script is None:
|
||||
|
||||
@ -24,7 +24,7 @@ def mock_access():
|
||||
mock_instance = mock.MagicMock(__name__='foobar')
|
||||
MockAccess = mock.MagicMock(return_value=mock_instance)
|
||||
the_patch = mock.patch.dict('awx.main.access.access_registry',
|
||||
{TowerClass: [MockAccess]}, clear=False)
|
||||
{TowerClass: MockAccess}, clear=False)
|
||||
the_patch.__enter__()
|
||||
yield mock_instance
|
||||
finally:
|
||||
|
||||
@ -170,6 +170,30 @@ def test_create_with_valid_inputs(get, post, admin):
|
||||
assert fields[0]['type'] == 'string'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_with_required_inputs(get, post, admin):
|
||||
response = post(reverse('api:credential_type_list'), {
|
||||
'kind': 'cloud',
|
||||
'name': 'MyCloud',
|
||||
'inputs': {
|
||||
'fields': [{
|
||||
'id': 'api_token',
|
||||
'label': 'API Token',
|
||||
'type': 'string',
|
||||
'secret': True
|
||||
}],
|
||||
'required': ['api_token'],
|
||||
},
|
||||
'injectors': {}
|
||||
}, admin)
|
||||
assert response.status_code == 201
|
||||
|
||||
response = get(reverse('api:credential_type_list'), admin)
|
||||
assert response.data['count'] == 1
|
||||
required = response.data['results'][0]['inputs']['required']
|
||||
assert required == ['api_token']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('inputs', [
|
||||
True,
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
import pytest
|
||||
import mock
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
from awx.main.models import InventorySource, Project, ProjectUpdate
|
||||
@ -34,6 +36,19 @@ def test_inventory_source_notification_on_cloud_only(get, post, inventory_source
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_inventory_source_unique_together_with_inv(inventory_factory):
|
||||
inv1 = inventory_factory('foo')
|
||||
inv2 = inventory_factory('bar')
|
||||
is1 = InventorySource(name='foo', source='file', inventory=inv1)
|
||||
is1.save()
|
||||
is2 = InventorySource(name='foo', source='file', inventory=inv1)
|
||||
with pytest.raises(ValidationError):
|
||||
is2.validate_unique()
|
||||
is2 = InventorySource(name='foo', source='file', inventory=inv2)
|
||||
is2.validate_unique()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("role_field,expected_status_code", [
|
||||
(None, 403),
|
||||
('admin_role', 200),
|
||||
@ -347,4 +362,3 @@ class TestInsightsCredential:
|
||||
patch(insights_inventory.get_absolute_url(),
|
||||
{'insights_credential': scm_credential.id}, admin_user,
|
||||
expect=400)
|
||||
|
||||
|
||||
@ -76,14 +76,6 @@ def job_template_prompts_null(project):
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def bad_scan_JT(job_template_prompts):
|
||||
job_template = job_template_prompts(True)
|
||||
job_template.job_type = 'scan'
|
||||
job_template.save()
|
||||
return job_template
|
||||
|
||||
|
||||
# End of setup, tests start here
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
@ -259,18 +251,6 @@ def test_job_block_scan_job_type_change(job_template_prompts, post, admin_user):
|
||||
assert 'job_type' in response.data
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_block_scan_job_inv_change(mocker, bad_scan_JT, runtime_data, post, admin_user):
|
||||
# Assure that giving a new inventory for a scan job blocks the launch
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
response = post(reverse('api:job_template_launch', kwargs={'pk': bad_scan_JT.pk}),
|
||||
dict(inventory=runtime_data['inventory']), admin_user,
|
||||
expect=400)
|
||||
|
||||
assert 'inventory' in response.data
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_launch_JT_with_validation(machine_credential, deploy_jobtemplate):
|
||||
deploy_jobtemplate.extra_vars = '{"job_template_var": 3}'
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import pytest
|
||||
|
||||
# AWX
|
||||
from awx.api.serializers import JobTemplateSerializer, JobLaunchSerializer
|
||||
from awx.api.serializers import JobTemplateSerializer
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models.jobs import Job
|
||||
from awx.main.migrations import _save_password_keys as save_password_keys
|
||||
@ -387,7 +387,6 @@ def test_edit_nonsenstive(patch, job_template_factory, alice):
|
||||
'ask_inventory_on_launch':True,
|
||||
'ask_credential_on_launch': True,
|
||||
}, alice, expect=200)
|
||||
print(res.data)
|
||||
assert res.data['name'] == 'updated'
|
||||
|
||||
|
||||
@ -430,48 +429,6 @@ def test_jt_admin_copy_edit_functional(jt_copy_edit, rando, get, post):
|
||||
assert post_response.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_scan_jt_no_inventory(job_template_factory):
|
||||
# A user should be able to create a scan job without a project, but an inventory is required
|
||||
objects = job_template_factory('jt',
|
||||
credential='c',
|
||||
job_type="scan",
|
||||
project='p',
|
||||
inventory='i',
|
||||
organization='o')
|
||||
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
|
||||
"project": None, "inventory": objects.inventory.pk})
|
||||
assert serializer.is_valid()
|
||||
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
|
||||
"project": None, "inventory": None})
|
||||
assert not serializer.is_valid()
|
||||
assert "inventory" in serializer.errors
|
||||
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
|
||||
"project": None, "inventory": None,
|
||||
"ask_inventory_on_launch": True})
|
||||
assert not serializer.is_valid()
|
||||
assert "inventory" in serializer.errors
|
||||
|
||||
# A user shouldn't be able to launch a scan job template which is missing an inventory
|
||||
obj_jt = objects.job_template
|
||||
obj_jt.inventory = None
|
||||
serializer = JobLaunchSerializer(instance=obj_jt,
|
||||
context={'obj': obj_jt,
|
||||
"data": {}},
|
||||
data={})
|
||||
assert not serializer.is_valid()
|
||||
assert 'inventory' in serializer.errors
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_scan_jt_surveys(inventory):
|
||||
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
|
||||
"project": None, "inventory": inventory.pk,
|
||||
"survey_enabled": True})
|
||||
assert not serializer.is_valid()
|
||||
assert "survey_enabled" in serializer.errors
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_launch_with_pending_deletion_inventory(get, post, organization_factory,
|
||||
job_template_factory, machine_credential,
|
||||
@ -641,9 +598,6 @@ def test_jt_without_project(inventory):
|
||||
serializer = JobTemplateSerializer(data=data)
|
||||
assert not serializer.is_valid()
|
||||
assert "project" in serializer.errors
|
||||
data["job_type"] = "scan"
|
||||
serializer = JobTemplateSerializer(data=data)
|
||||
assert serializer.is_valid()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@ -163,12 +163,7 @@ def test_two_organizations(resourced_organization, organizations, user, get):
|
||||
@pytest.mark.django_db
|
||||
def test_scan_JT_counted(resourced_organization, user, get):
|
||||
admin_user = user('admin', True)
|
||||
# Add a scan job template to the org
|
||||
resourced_organization.projects.all()[0].jobtemplates.create(
|
||||
job_type='scan', inventory=resourced_organization.inventories.all()[0],
|
||||
name='scan-job-template')
|
||||
counts_dict = COUNTS_PRIMES
|
||||
counts_dict['job_templates'] += 1
|
||||
|
||||
# Test list view
|
||||
list_response = get(reverse('api:organization_list'), admin_user)
|
||||
@ -184,7 +179,7 @@ def test_scan_JT_counted(resourced_organization, user, get):
|
||||
@pytest.mark.django_db
|
||||
def test_JT_not_double_counted(resourced_organization, user, get):
|
||||
admin_user = user('admin', True)
|
||||
# Add a scan job template to the org
|
||||
# Add a run job template to the org
|
||||
resourced_organization.projects.all()[0].jobtemplates.create(
|
||||
job_type='run',
|
||||
inventory=resourced_organization.inventories.all()[0],
|
||||
|
||||
@ -188,7 +188,7 @@ class TestAccessListCapabilities:
|
||||
self, inventory, rando, get, mocker, mock_access_method):
|
||||
inventory.admin_role.members.add(rando)
|
||||
|
||||
with mocker.patch.object(access_registry[Role][0], 'can_unattach', mock_access_method):
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), rando)
|
||||
|
||||
mock_access_method.assert_called_once_with(inventory.admin_role, rando, 'members', **self.extra_kwargs)
|
||||
@ -198,7 +198,7 @@ class TestAccessListCapabilities:
|
||||
|
||||
def test_access_list_indirect_access_capability(
|
||||
self, inventory, organization, org_admin, get, mocker, mock_access_method):
|
||||
with mocker.patch.object(access_registry[Role][0], 'can_unattach', mock_access_method):
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), org_admin)
|
||||
|
||||
mock_access_method.assert_called_once_with(organization.admin_role, org_admin, 'members', **self.extra_kwargs)
|
||||
@ -210,7 +210,7 @@ class TestAccessListCapabilities:
|
||||
self, inventory, team, team_member, get, mocker, mock_access_method):
|
||||
team.member_role.children.add(inventory.admin_role)
|
||||
|
||||
with mocker.patch.object(access_registry[Role][0], 'can_unattach', mock_access_method):
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:inventory_access_list', kwargs={'pk': inventory.id}), team_member)
|
||||
|
||||
mock_access_method.assert_called_once_with(inventory.admin_role, team.member_role, 'parents', **self.extra_kwargs)
|
||||
@ -229,7 +229,7 @@ class TestAccessListCapabilities:
|
||||
def test_team_roles_unattach(mocker, team, team_member, inventory, mock_access_method, get):
|
||||
team.member_role.children.add(inventory.admin_role)
|
||||
|
||||
with mocker.patch.object(access_registry[Role][0], 'can_unattach', mock_access_method):
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:team_roles_list', kwargs={'pk': team.id}), team_member)
|
||||
|
||||
# Did we assess whether team_member can remove team's permission to the inventory?
|
||||
@ -244,7 +244,7 @@ def test_user_roles_unattach(mocker, organization, alice, bob, mock_access_metho
|
||||
organization.member_role.members.add(alice)
|
||||
organization.member_role.members.add(bob)
|
||||
|
||||
with mocker.patch.object(access_registry[Role][0], 'can_unattach', mock_access_method):
|
||||
with mocker.patch.object(access_registry[Role], 'can_unattach', mock_access_method):
|
||||
response = get(reverse('api:user_roles_list', kwargs={'pk': alice.id}), bob)
|
||||
|
||||
# Did we assess whether bob can remove alice's permission to the inventory?
|
||||
|
||||
@ -222,6 +222,25 @@ def test_logging_aggregrator_connection_test_valid(mocker, get, post, admin):
|
||||
assert getattr(create_settings, k) == v
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_with_masked_password(mocker, patch, post, admin):
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'logging'})
|
||||
patch(url, user=admin, data={'LOG_AGGREGATOR_PASSWORD': 'password123'}, expect=200)
|
||||
|
||||
with mock.patch.object(BaseHTTPSHandler, 'perform_test') as perform_test:
|
||||
url = reverse('api:setting_logging_test')
|
||||
user_data = {
|
||||
'LOG_AGGREGATOR_TYPE': 'logstash',
|
||||
'LOG_AGGREGATOR_HOST': 'localhost',
|
||||
'LOG_AGGREGATOR_PORT': 8080,
|
||||
'LOG_AGGREGATOR_USERNAME': 'logger',
|
||||
'LOG_AGGREGATOR_PASSWORD': '$encrypted$'
|
||||
}
|
||||
post(url, user_data, user=admin, expect=200)
|
||||
create_settings = perform_test.call_args[0][0]
|
||||
assert getattr(create_settings, 'LOG_AGGREGATOR_PASSWORD') == 'password123'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_logging_aggregrator_connection_test_invalid(mocker, get, post, admin):
|
||||
with mock.patch.object(BaseHTTPSHandler, 'perform_test') as perform_test:
|
||||
|
||||
@ -108,7 +108,7 @@ def test_system_tracking_feature_disabled(mocker):
|
||||
cmd = Command()
|
||||
with pytest.raises(CommandError) as err:
|
||||
cmd.handle(None)
|
||||
assert 'The System Tracking feature is not enabled for your Tower instance' in err.value
|
||||
assert 'The System Tracking feature is not enabled for your instance' in err.value
|
||||
|
||||
|
||||
@mock.patch('awx.main.management.commands.cleanup_facts.feature_enabled', new=mock_feature_enabled)
|
||||
|
||||
@ -3,13 +3,14 @@
|
||||
import pytest
|
||||
|
||||
# AWX
|
||||
from awx.main.models.workflow import WorkflowJob, WorkflowJobNode, WorkflowJobTemplateNode
|
||||
from awx.main.models.workflow import WorkflowJob, WorkflowJobNode, WorkflowJobTemplateNode, WorkflowJobTemplate
|
||||
from awx.main.models.jobs import Job
|
||||
from awx.main.models.projects import ProjectUpdate
|
||||
from awx.main.scheduler.dag_workflow import WorkflowDAG
|
||||
|
||||
# Django
|
||||
from django.test import TransactionTestCase
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@ -155,6 +156,15 @@ class TestWorkflowJobTemplate:
|
||||
assert nodes[1].unified_job_template == job_template
|
||||
assert nodes[2].inventory == inventory
|
||||
|
||||
def test_wfjt_unique_together_with_org(self, organization):
|
||||
wfjt1 = WorkflowJobTemplate(name='foo', organization=organization)
|
||||
wfjt1.save()
|
||||
wfjt2 = WorkflowJobTemplate(name='foo', organization=organization)
|
||||
with pytest.raises(ValidationError):
|
||||
wfjt2.validate_unique()
|
||||
wfjt2 = WorkflowJobTemplate(name='foo', organization=None)
|
||||
wfjt2.validate_unique()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestWorkflowJobFailure:
|
||||
|
||||
@ -320,7 +320,7 @@ def test_insights_migration():
|
||||
'password': 'some-password',
|
||||
})
|
||||
|
||||
assert cred.credential_type.name == 'Insights Basic Auth'
|
||||
assert cred.credential_type.name == 'Insights'
|
||||
assert cred.inputs['username'] == 'bob'
|
||||
assert cred.inputs['password'].startswith('$encrypted$')
|
||||
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
import pytest
|
||||
|
||||
from awx.main.models import AdHocCommand, InventoryUpdate, Job, JobTemplate, ProjectUpdate
|
||||
from awx.main.models import Instance
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@ -8,6 +10,26 @@ def test_default_tower_instance_group(default_instance_group, job_factory):
|
||||
assert default_instance_group in job_factory().preferred_instance_groups
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_instance_dup(org_admin, organization, project, instance_factory, instance_group_factory, get, system_auditor):
|
||||
i1 = instance_factory("i1")
|
||||
i2 = instance_factory("i2")
|
||||
i3 = instance_factory("i3")
|
||||
ig_all = instance_group_factory("all", instances=[i1, i2, i3])
|
||||
ig_dup = instance_group_factory("duplicates", instances=[i1])
|
||||
project.organization.instance_groups.add(ig_all, ig_dup)
|
||||
actual_num_instances = Instance.objects.active_count()
|
||||
list_response = get(reverse('api:instance_list'), user=system_auditor)
|
||||
api_num_instances_auditor = list_response.data.items()[0][1]
|
||||
|
||||
list_response2 = get(reverse('api:instance_list'), user=org_admin)
|
||||
api_num_instances_oa = list_response2.data.items()[0][1]
|
||||
|
||||
assert actual_num_instances == api_num_instances_auditor
|
||||
# Note: The org_admin will not see the default 'tower' node because it is not in it's group, as expected
|
||||
assert api_num_instances_oa == (actual_num_instances - 1)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_basic_instance_group_membership(instance_group_factory, default_instance_group, job_factory):
|
||||
j = job_factory()
|
||||
|
||||
@ -6,6 +6,8 @@ import pytest
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import Project
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
|
||||
#
|
||||
# Project listing and visibility tests
|
||||
@ -238,3 +240,14 @@ def test_cannot_schedule_manual_project(project, admin_user, post):
|
||||
}, admin_user, expect=400
|
||||
)
|
||||
assert 'Manual' in response.data['unified_job_template'][0]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_project_unique_together_with_org(organization):
|
||||
proj1 = Project(name='foo', organization=organization)
|
||||
proj1.save()
|
||||
proj2 = Project(name='foo', organization=organization)
|
||||
with pytest.raises(ValidationError):
|
||||
proj2.validate_unique()
|
||||
proj2 = Project(name='foo', organization=None)
|
||||
proj2.validate_unique()
|
||||
|
||||
100
awx/main/tests/functional/test_scan_jobs_migration.py
Normal file
100
awx/main/tests/functional/test_scan_jobs_migration.py
Normal file
@ -0,0 +1,100 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2017 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
import pytest
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
from awx.main.models.base import PERM_INVENTORY_SCAN, PERM_INVENTORY_DEPLOY
|
||||
from awx.main.models import (
|
||||
JobTemplate,
|
||||
Project,
|
||||
Inventory,
|
||||
Organization,
|
||||
)
|
||||
|
||||
from awx.main.migrations._scan_jobs import _migrate_scan_job_templates
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def organizations():
|
||||
return [Organization.objects.create(name="org-{}".format(x)) for x in range(3)]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventories(organizations):
|
||||
return [Inventory.objects.create(name="inv-{}".format(x),
|
||||
organization=organizations[x]) for x in range(3)]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_templates_scan(inventories):
|
||||
return [JobTemplate.objects.create(name="jt-scan-{}".format(x),
|
||||
job_type=PERM_INVENTORY_SCAN,
|
||||
inventory=inventories[x]) for x in range(3)]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_templates_deploy(inventories):
|
||||
return [JobTemplate.objects.create(name="jt-deploy-{}".format(x),
|
||||
job_type=PERM_INVENTORY_DEPLOY,
|
||||
inventory=inventories[x]) for x in range(3)]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def project_custom(organizations):
|
||||
return Project.objects.create(name="proj-scan_custom",
|
||||
scm_url='https://giggity.com',
|
||||
organization=organizations[0])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_templates_custom_scan_project(project_custom):
|
||||
return [JobTemplate.objects.create(name="jt-scan-custom-{}".format(x),
|
||||
project=project_custom,
|
||||
job_type=PERM_INVENTORY_SCAN) for x in range(3)]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_template_scan_no_org():
|
||||
return JobTemplate.objects.create(name="jt-scan-no-org",
|
||||
job_type=PERM_INVENTORY_SCAN)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_scan_jobs_migration(job_templates_scan, job_templates_deploy, job_templates_custom_scan_project, project_custom, job_template_scan_no_org):
|
||||
_migrate_scan_job_templates(apps)
|
||||
|
||||
# Ensure there are no scan job templates after the migration
|
||||
assert 0 == JobTemplate.objects.filter(job_type=PERM_INVENTORY_SCAN).count()
|
||||
|
||||
# Ensure special No Organization proj created
|
||||
# And No Organization project is associated with correct jt
|
||||
proj = Project.objects.get(name="Tower Fact Scan - No Organization")
|
||||
assert proj.id == JobTemplate.objects.get(id=job_template_scan_no_org.id).project.id
|
||||
|
||||
# Ensure per-org projects were created
|
||||
projs = Project.objects.filter(name__startswith="Tower Fact Scan")
|
||||
assert projs.count() == 4
|
||||
|
||||
# Ensure scan job templates with Tower project are migrated
|
||||
for i, jt_old in enumerate(job_templates_scan):
|
||||
jt = JobTemplate.objects.get(id=jt_old.id)
|
||||
assert PERM_INVENTORY_DEPLOY == jt.job_type
|
||||
assert jt.use_fact_cache is True
|
||||
assert projs[i] == jt.project
|
||||
|
||||
# Ensure scan job templates with custom projects are migrated
|
||||
for jt_old in job_templates_custom_scan_project:
|
||||
jt = JobTemplate.objects.get(id=jt_old.id)
|
||||
assert PERM_INVENTORY_DEPLOY == jt.job_type
|
||||
assert jt.use_fact_cache is True
|
||||
assert project_custom == jt.project
|
||||
|
||||
# Ensure other job template aren't touched
|
||||
for jt_old in job_templates_deploy:
|
||||
jt = JobTemplate.objects.get(id=jt_old.id)
|
||||
assert PERM_INVENTORY_DEPLOY == jt.job_type
|
||||
assert jt.project is None
|
||||
|
||||
@ -264,11 +264,14 @@ def test_check_isolated_job(private_data_dir, rsa_key):
|
||||
|
||||
run_pexpect.assert_called_with(
|
||||
[
|
||||
'ansible-playbook', '-u', settings.AWX_ISOLATED_USERNAME, '-i', 'isolated-host,',
|
||||
'check_isolated.yml', '-e', '{"src": "%s"}' % private_data_dir,
|
||||
'ansible-playbook', 'check_isolated.yml',
|
||||
'-u', settings.AWX_ISOLATED_USERNAME,
|
||||
'-T', str(settings.AWX_ISOLATED_CONNECTION_TIMEOUT),
|
||||
'-i', 'isolated-host,',
|
||||
'-e', '{"src": "%s"}' % private_data_dir,
|
||||
'-vvvvv'
|
||||
],
|
||||
'/tower_devel/awx/playbooks', mgr.env, mock.ANY,
|
||||
'/tower_devel/awx/playbooks', mgr.management_env, mock.ANY,
|
||||
cancelled_callback=None,
|
||||
idle_timeout=0,
|
||||
job_timeout=0,
|
||||
|
||||
@ -8,6 +8,7 @@ from awx.main.models import (
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import base64
|
||||
from dateutil.tz import tzutc
|
||||
|
||||
|
||||
@ -89,8 +90,8 @@ def test_start_job_fact_cache(hosts, job, inventory, mocker):
|
||||
|
||||
job._get_memcache_connection().set.assert_any_call('5', [h.name for h in hosts])
|
||||
for host in hosts:
|
||||
job._get_memcache_connection().set.assert_any_call('{}-{}'.format(5, host.name), json.dumps(host.ansible_facts))
|
||||
job._get_memcache_connection().set.assert_any_call('{}-{}-modified'.format(5, host.name), host.ansible_facts_modified.isoformat())
|
||||
job._get_memcache_connection().set.assert_any_call('{}-{}'.format(5, base64.b64encode(host.name)), json.dumps(host.ansible_facts))
|
||||
job._get_memcache_connection().set.assert_any_call('{}-{}-modified'.format(5, base64.b64encode(host.name)), host.ansible_facts_modified.isoformat())
|
||||
|
||||
|
||||
def test_start_job_fact_cache_existing_host(hosts, hosts2, job, job2, inventory, mocker):
|
||||
@ -98,15 +99,15 @@ def test_start_job_fact_cache_existing_host(hosts, hosts2, job, job2, inventory,
|
||||
job.start_job_fact_cache()
|
||||
|
||||
for host in hosts:
|
||||
job._get_memcache_connection().set.assert_any_call('{}-{}'.format(5, host.name), json.dumps(host.ansible_facts))
|
||||
job._get_memcache_connection().set.assert_any_call('{}-{}-modified'.format(5, host.name), host.ansible_facts_modified.isoformat())
|
||||
job._get_memcache_connection().set.assert_any_call('{}-{}'.format(5, base64.b64encode(host.name)), json.dumps(host.ansible_facts))
|
||||
job._get_memcache_connection().set.assert_any_call('{}-{}-modified'.format(5, base64.b64encode(host.name)), host.ansible_facts_modified.isoformat())
|
||||
|
||||
job._get_memcache_connection().set.reset_mock()
|
||||
|
||||
job2.start_job_fact_cache()
|
||||
|
||||
# Ensure hosts2 ansible_facts didn't overwrite hosts ansible_facts
|
||||
ansible_facts_cached = job._get_memcache_connection().get('{}-{}'.format(5, hosts2[0].name))
|
||||
ansible_facts_cached = job._get_memcache_connection().get('{}-{}'.format(5, base64.b64encode(hosts2[0].name)))
|
||||
assert ansible_facts_cached == json.dumps(hosts[1].ansible_facts)
|
||||
|
||||
|
||||
|
||||
@ -242,11 +242,3 @@ class TestWorkflowWarnings:
|
||||
assert 'credential' in job_node_with_prompts.get_prompts_warnings()['ignored']
|
||||
assert len(job_node_with_prompts.get_prompts_warnings()['ignored']) == 2
|
||||
|
||||
def test_warn_scan_errors_node_prompts(self, job_node_with_prompts):
|
||||
job_node_with_prompts.unified_job_template.job_type = 'scan'
|
||||
job_node_with_prompts.char_prompts['job_type'] = 'run'
|
||||
job_node_with_prompts.inventory = Inventory(name='different-inventory', pk=23)
|
||||
assert 'ignored' in job_node_with_prompts.get_prompts_warnings()
|
||||
assert 'job_type' in job_node_with_prompts.get_prompts_warnings()['ignored']
|
||||
assert 'inventory' in job_node_with_prompts.get_prompts_warnings()['ignored']
|
||||
assert len(job_node_with_prompts.get_prompts_warnings()['ignored']) == 2
|
||||
|
||||
@ -341,8 +341,9 @@ class TestIsolatedExecution(TestJobExecution):
|
||||
|
||||
playbook_run = self.run_pexpect.call_args_list[0][0]
|
||||
assert ' '.join(playbook_run[0]).startswith(' '.join([
|
||||
'ansible-playbook', '-u', settings.AWX_ISOLATED_USERNAME, '-i', self.REMOTE_HOST + ',',
|
||||
'run_isolated.yml', '-e',
|
||||
'ansible-playbook', 'run_isolated.yml', '-u', settings.AWX_ISOLATED_USERNAME,
|
||||
'-T', str(settings.AWX_ISOLATED_CONNECTION_TIMEOUT), '-i', self.REMOTE_HOST + ',',
|
||||
'-e',
|
||||
]))
|
||||
extra_vars = playbook_run[0][playbook_run[0].index('-e') + 1]
|
||||
extra_vars = json.loads(extra_vars)
|
||||
@ -724,6 +725,37 @@ class TestJobCredentials(TestJobExecution):
|
||||
|
||||
assert env['MY_CLOUD_API_TOKEN'] == 'ABC123'
|
||||
|
||||
def test_custom_environment_injectors_with_boolean_env_var(self):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'turbo_button',
|
||||
'label': 'Turbo Button',
|
||||
'type': 'boolean'
|
||||
}]
|
||||
},
|
||||
injectors={
|
||||
'env': {
|
||||
'TURBO_BUTTON': '{{turbo_button}}'
|
||||
}
|
||||
}
|
||||
)
|
||||
credential = Credential(
|
||||
pk=1,
|
||||
credential_type=some_cloud,
|
||||
inputs={'turbo_button': True}
|
||||
)
|
||||
self.instance.extra_credentials.add(credential)
|
||||
self.task.run(self.pk)
|
||||
|
||||
assert self.run_pexpect.call_count == 1
|
||||
call_args, _ = self.run_pexpect.call_args_list[0]
|
||||
args, cwd, env, stdout = call_args
|
||||
assert env['TURBO_BUTTON'] == str(True)
|
||||
|
||||
def test_custom_environment_injectors_with_reserved_env_var(self):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
@ -823,6 +855,68 @@ class TestJobCredentials(TestJobExecution):
|
||||
|
||||
assert '-e {"api_token": "ABC123"}' in ' '.join(args)
|
||||
|
||||
def test_custom_environment_injectors_with_boolean_extra_vars(self):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'turbo_button',
|
||||
'label': 'Turbo Button',
|
||||
'type': 'boolean'
|
||||
}]
|
||||
},
|
||||
injectors={
|
||||
'extra_vars': {
|
||||
'turbo_button': '{{turbo_button}}'
|
||||
}
|
||||
}
|
||||
)
|
||||
credential = Credential(
|
||||
pk=1,
|
||||
credential_type=some_cloud,
|
||||
inputs={'turbo_button': True}
|
||||
)
|
||||
self.instance.extra_credentials.add(credential)
|
||||
self.task.run(self.pk)
|
||||
|
||||
assert self.run_pexpect.call_count == 1
|
||||
call_args, _ = self.run_pexpect.call_args_list[0]
|
||||
args, cwd, env, stdout = call_args
|
||||
assert '-e {"turbo_button": true}' in ' '.join(args)
|
||||
|
||||
def test_custom_environment_injectors_with_complicated_boolean_template(self):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'turbo_button',
|
||||
'label': 'Turbo Button',
|
||||
'type': 'boolean'
|
||||
}]
|
||||
},
|
||||
injectors={
|
||||
'extra_vars': {
|
||||
'turbo_button': '{% if turbo_button %}FAST!{% else %}SLOW!{% endif %}'
|
||||
}
|
||||
}
|
||||
)
|
||||
credential = Credential(
|
||||
pk=1,
|
||||
credential_type=some_cloud,
|
||||
inputs={'turbo_button': True}
|
||||
)
|
||||
self.instance.extra_credentials.add(credential)
|
||||
self.task.run(self.pk)
|
||||
|
||||
assert self.run_pexpect.call_count == 1
|
||||
call_args, _ = self.run_pexpect.call_args_list[0]
|
||||
args, cwd, env, stdout = call_args
|
||||
assert '-e {"turbo_button": "FAST!"}' in ' '.join(args)
|
||||
|
||||
def test_custom_environment_injectors_with_secret_extra_vars(self):
|
||||
"""
|
||||
extra_vars that contain secret field values should be censored in the DB
|
||||
|
||||
@ -124,7 +124,7 @@ def test_base_logging_handler_skip_log(params, logger_name, expected):
|
||||
|
||||
def test_base_logging_handler_emit(dummy_log_record):
|
||||
handler = BaseHandler(host='127.0.0.1', enabled_flag=True,
|
||||
message_type='logstash',
|
||||
message_type='logstash', lvl='INFO',
|
||||
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'])
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
sent_payloads = handler.emit(dummy_log_record)
|
||||
@ -137,9 +137,18 @@ def test_base_logging_handler_emit(dummy_log_record):
|
||||
assert body['message'] == 'User joe logged in'
|
||||
|
||||
|
||||
def test_base_logging_handler_ignore_low_severity_msg(dummy_log_record):
|
||||
handler = BaseHandler(host='127.0.0.1', enabled_flag=True,
|
||||
message_type='logstash', lvl='WARNING',
|
||||
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'])
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
sent_payloads = handler.emit(dummy_log_record)
|
||||
assert len(sent_payloads) == 0
|
||||
|
||||
|
||||
def test_base_logging_handler_emit_system_tracking():
|
||||
handler = BaseHandler(host='127.0.0.1', enabled_flag=True,
|
||||
message_type='logstash', indv_facts=True,
|
||||
message_type='logstash', indv_facts=True, lvl='INFO',
|
||||
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'])
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
record = logging.LogRecord(
|
||||
@ -205,7 +214,8 @@ def test_https_logging_handler_connectivity_test(http_adapter, status, reason, e
|
||||
'LOG_AGGREGATOR_PASSWORD': 'password',
|
||||
'LOG_AGGREGATOR_LOGGERS': ['awx', 'activity_stream', 'job_events', 'system_tracking'],
|
||||
'CLUSTER_HOST_ID': '',
|
||||
'LOG_AGGREGATOR_TOWER_UUID': str(uuid4())
|
||||
'LOG_AGGREGATOR_TOWER_UUID': str(uuid4()),
|
||||
'LOG_AGGREGATOR_LEVEL': 'DEBUG',
|
||||
})
|
||||
|
||||
class FakeHTTPSHandler(HTTPSHandler):
|
||||
@ -226,7 +236,7 @@ def test_https_logging_handler_connectivity_test(http_adapter, status, reason, e
|
||||
|
||||
|
||||
def test_https_logging_handler_logstash_auth_info():
|
||||
handler = HTTPSHandler(message_type='logstash', username='bob', password='ansible')
|
||||
handler = HTTPSHandler(message_type='logstash', username='bob', password='ansible', lvl='INFO')
|
||||
handler._add_auth_information()
|
||||
assert isinstance(handler.session.auth, requests.auth.HTTPBasicAuth)
|
||||
assert handler.session.auth.username == 'bob'
|
||||
@ -243,7 +253,7 @@ def test_https_logging_handler_splunk_auth_info():
|
||||
def test_https_logging_handler_connection_error(connection_error_adapter,
|
||||
dummy_log_record):
|
||||
handler = HTTPSHandler(host='127.0.0.1', enabled_flag=True,
|
||||
message_type='logstash',
|
||||
message_type='logstash', lvl='INFO',
|
||||
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'])
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('http://', connection_error_adapter)
|
||||
@ -271,7 +281,7 @@ def test_https_logging_handler_connection_error(connection_error_adapter,
|
||||
def test_https_logging_handler_emit_without_cred(http_adapter, dummy_log_record,
|
||||
message_type):
|
||||
handler = HTTPSHandler(host='127.0.0.1', enabled_flag=True,
|
||||
message_type=message_type,
|
||||
message_type=message_type, lvl='INFO',
|
||||
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'])
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('http://', http_adapter)
|
||||
@ -295,7 +305,7 @@ def test_https_logging_handler_emit_logstash_with_creds(http_adapter,
|
||||
dummy_log_record):
|
||||
handler = HTTPSHandler(host='127.0.0.1', enabled_flag=True,
|
||||
username='user', password='pass',
|
||||
message_type='logstash',
|
||||
message_type='logstash', lvl='INFO',
|
||||
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'])
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('http://', http_adapter)
|
||||
@ -310,7 +320,7 @@ def test_https_logging_handler_emit_logstash_with_creds(http_adapter,
|
||||
def test_https_logging_handler_emit_splunk_with_creds(http_adapter,
|
||||
dummy_log_record):
|
||||
handler = HTTPSHandler(host='127.0.0.1', enabled_flag=True,
|
||||
password='pass', message_type='splunk',
|
||||
password='pass', message_type='splunk', lvl='INFO',
|
||||
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'])
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
handler.session.mount('http://', http_adapter)
|
||||
@ -333,7 +343,7 @@ def test_encode_payload_for_socket(payload, encoded_payload):
|
||||
|
||||
def test_udp_handler_create_socket_at_init():
|
||||
handler = UDPHandler(host='127.0.0.1', port=4399,
|
||||
enabled_flag=True, message_type='splunk',
|
||||
enabled_flag=True, message_type='splunk', lvl='INFO',
|
||||
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'])
|
||||
assert hasattr(handler, 'socket')
|
||||
assert isinstance(handler.socket, socket.socket)
|
||||
@ -343,7 +353,7 @@ def test_udp_handler_create_socket_at_init():
|
||||
|
||||
def test_udp_handler_send(dummy_log_record):
|
||||
handler = UDPHandler(host='127.0.0.1', port=4399,
|
||||
enabled_flag=True, message_type='splunk',
|
||||
enabled_flag=True, message_type='splunk', lvl='INFO',
|
||||
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'])
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('awx.main.utils.handlers._encode_payload_for_socket', return_value="des") as encode_mock,\
|
||||
@ -355,7 +365,7 @@ def test_udp_handler_send(dummy_log_record):
|
||||
|
||||
def test_tcp_handler_send(fake_socket, dummy_log_record):
|
||||
handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5,
|
||||
enabled_flag=True, message_type='splunk',
|
||||
enabled_flag=True, message_type='splunk', lvl='INFO',
|
||||
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'])
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('socket.socket', return_value=fake_socket) as sok_init_mock,\
|
||||
@ -370,7 +380,7 @@ def test_tcp_handler_send(fake_socket, dummy_log_record):
|
||||
|
||||
def test_tcp_handler_return_if_socket_unavailable(fake_socket, dummy_log_record):
|
||||
handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5,
|
||||
enabled_flag=True, message_type='splunk',
|
||||
enabled_flag=True, message_type='splunk', lvl='INFO',
|
||||
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'])
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('socket.socket', return_value=fake_socket) as sok_init_mock,\
|
||||
@ -385,7 +395,7 @@ def test_tcp_handler_return_if_socket_unavailable(fake_socket, dummy_log_record)
|
||||
|
||||
def test_tcp_handler_log_exception(fake_socket, dummy_log_record):
|
||||
handler = TCPHandler(host='127.0.0.1', port=4399, tcp_timeout=5,
|
||||
enabled_flag=True, message_type='splunk',
|
||||
enabled_flag=True, message_type='splunk', lvl='INFO',
|
||||
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'])
|
||||
handler.setFormatter(LogstashFormatter())
|
||||
with mock.patch('socket.socket', return_value=fake_socket) as sok_init_mock,\
|
||||
|
||||
@ -24,9 +24,11 @@ def common_model_class_mock():
|
||||
|
||||
@pytest.fixture
|
||||
def common_model_name_not_unique_class_mock():
|
||||
def class_generator(ut, fk_a_obj, fk_b_obj, plural):
|
||||
def class_generator(ut, fk_a_obj, fk_b_obj, plural, soft_ut=[]):
|
||||
class ModelClass(CommonModelNameNotUnique):
|
||||
|
||||
SOFT_UNIQUE_TOGETHER = soft_ut
|
||||
|
||||
class Meta:
|
||||
unique_together = ut
|
||||
verbose_name_plural = plural
|
||||
@ -92,6 +94,33 @@ def test_invalid_generation(common_model_name_not_unique_class_mock,
|
||||
assert not settings_mock.NAMED_URL_FORMATS
|
||||
|
||||
|
||||
def test_soft_unique_together_being_included(common_model_name_not_unique_class_mock,
|
||||
common_model_class_mock, settings_mock):
|
||||
models = []
|
||||
model_1 = common_model_class_mock('model_1')
|
||||
models.append(model_1)
|
||||
model_2 = common_model_name_not_unique_class_mock(
|
||||
(),
|
||||
model_1,
|
||||
model_1,
|
||||
'model_2',
|
||||
soft_ut=[('name', 'fk_a')]
|
||||
)
|
||||
models.append(model_2)
|
||||
|
||||
random.shuffle(models)
|
||||
with mock.patch('awx.main.utils.named_url_graph.settings', settings_mock):
|
||||
generate_graph(models)
|
||||
assert settings_mock.NAMED_URL_GRAPH[model_1].model == model_1
|
||||
assert settings_mock.NAMED_URL_GRAPH[model_1].fields == ('name',)
|
||||
assert settings_mock.NAMED_URL_GRAPH[model_1].adj_list == []
|
||||
|
||||
assert settings_mock.NAMED_URL_GRAPH[model_2].model == model_2
|
||||
assert settings_mock.NAMED_URL_GRAPH[model_2].fields == ('name',)
|
||||
assert zip(*settings_mock.NAMED_URL_GRAPH[model_2].adj_list)[0] == ('fk_a',)
|
||||
assert [x.model for x in zip(*settings_mock.NAMED_URL_GRAPH[model_2].adj_list)[1]] == [model_1]
|
||||
|
||||
|
||||
def test_chain_generation(common_model_class_mock, common_model_name_not_unique_class_mock, settings_mock):
|
||||
"""
|
||||
Graph topology:
|
||||
|
||||
@ -256,7 +256,7 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
netloc_password = ''
|
||||
|
||||
if netloc_username and parts.scheme != 'file' and scm_type != "insights":
|
||||
netloc = u':'.join([urllib.quote(x) for x in (netloc_username, netloc_password) if x])
|
||||
netloc = u':'.join([urllib.quote(x,safe='') for x in (netloc_username, netloc_password) if x])
|
||||
else:
|
||||
netloc = u''
|
||||
netloc = u'@'.join(filter(None, [netloc, parts.hostname]))
|
||||
@ -612,7 +612,7 @@ def build_proot_temp_dir():
|
||||
def wrap_args_with_proot(args, cwd, **kwargs):
|
||||
'''
|
||||
Wrap existing command line with proot to restrict access to:
|
||||
- /tmp (except for own tmp files)
|
||||
- AWX_PROOT_BASE_PATH (generally, /tmp) (except for own /tmp files)
|
||||
For non-isolated nodes:
|
||||
- /etc/tower (to prevent obtaining db info or secret key)
|
||||
- /var/lib/awx (except for current project)
|
||||
@ -621,7 +621,7 @@ def wrap_args_with_proot(args, cwd, **kwargs):
|
||||
'''
|
||||
from django.conf import settings
|
||||
new_args = [getattr(settings, 'AWX_PROOT_CMD', 'bwrap'), '--unshare-pid', '--dev-bind', '/', '/']
|
||||
hide_paths = [tempfile.gettempdir()]
|
||||
hide_paths = [settings.AWX_PROOT_BASE_PATH]
|
||||
if not kwargs.get('isolated'):
|
||||
hide_paths.extend(['/etc/tower', '/var/lib/awx', '/var/log',
|
||||
settings.PROJECTS_ROOT, settings.JOBOUTPUT_ROOT])
|
||||
|
||||
@ -44,7 +44,8 @@ PARAM_NAMES = {
|
||||
'indv_facts': 'LOG_AGGREGATOR_INDIVIDUAL_FACTS',
|
||||
'enabled_flag': 'LOG_AGGREGATOR_ENABLED',
|
||||
'tcp_timeout': 'LOG_AGGREGATOR_TCP_TIMEOUT',
|
||||
'verify_cert': 'LOG_AGGREGATOR_VERIFY_CERT'
|
||||
'verify_cert': 'LOG_AGGREGATOR_VERIFY_CERT',
|
||||
'lvl': 'LOG_AGGREGATOR_LEVEL',
|
||||
}
|
||||
|
||||
|
||||
@ -89,6 +90,15 @@ class VerboseThreadPoolExecutor(ThreadPoolExecutor):
|
||||
**kwargs)
|
||||
|
||||
|
||||
LEVEL_MAPPING = {
|
||||
'DEBUG': logging.DEBUG,
|
||||
'INFO': logging.INFO,
|
||||
'WARNING': logging.WARNING,
|
||||
'ERROR': logging.ERROR,
|
||||
'CRITICAL': logging.CRITICAL,
|
||||
}
|
||||
|
||||
|
||||
class BaseHandler(logging.Handler):
|
||||
def __init__(self, **kwargs):
|
||||
super(BaseHandler, self).__init__()
|
||||
@ -133,6 +143,8 @@ class BaseHandler(logging.Handler):
|
||||
Emit a log record. Returns a list of zero or more
|
||||
implementation-specific objects for tests.
|
||||
"""
|
||||
if not record.name.startswith('awx.analytics') and record.levelno < LEVEL_MAPPING[self.lvl]:
|
||||
return []
|
||||
if self._skip_log(record.name):
|
||||
return []
|
||||
try:
|
||||
|
||||
@ -187,6 +187,8 @@ def _get_all_unique_togethers(model):
|
||||
ret.append(uts)
|
||||
else:
|
||||
ret.extend(uts)
|
||||
soft_uts = getattr(model_to_backtrack, 'SOFT_UNIQUE_TOGETHER', [])
|
||||
ret.extend(soft_uts)
|
||||
for parent_class in model_to_backtrack.__bases__:
|
||||
if issubclass(parent_class, models.Model) and\
|
||||
hasattr(parent_class, '_meta') and\
|
||||
|
||||
16
awx/main/utils/pglock.py
Normal file
16
awx/main/utils/pglock.py
Normal file
@ -0,0 +1,16 @@
|
||||
# Copyright (c) 2017 Ansible by Red Hat
|
||||
# All Rights Reserved.
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
from django_pglocks import advisory_lock as django_pglocks_advisory_lock
|
||||
from django.db import connection
|
||||
|
||||
|
||||
@contextmanager
|
||||
def advisory_lock(*args, **kwargs):
|
||||
if connection.vendor == 'postgresql':
|
||||
with django_pglocks_advisory_lock(*args, **kwargs) as internal_lock:
|
||||
yield internal_lock
|
||||
else:
|
||||
yield True
|
||||
@ -3,22 +3,25 @@
|
||||
# The following variables will be set by the runner of this playbook:
|
||||
# src: /tmp/some/path/private_data_dir/
|
||||
|
||||
- hosts: all
|
||||
- name: Poll for status of active job.
|
||||
hosts: all
|
||||
gather_facts: false
|
||||
|
||||
tasks:
|
||||
|
||||
- shell: "tower-expect is-alive {{src}}"
|
||||
- name: Determine if daemon process is alive.
|
||||
shell: "tower-expect is-alive {{src}}"
|
||||
register: is_alive
|
||||
ignore_errors: true
|
||||
|
||||
- name: copy artifacts from the isolated host
|
||||
- name: Copy artifacts from the isolated host.
|
||||
synchronize:
|
||||
src: "{{src}}/artifacts/"
|
||||
dest: "{{src}}/artifacts/"
|
||||
mode: pull
|
||||
recursive: yes
|
||||
|
||||
- fail:
|
||||
- name: Fail if previous check determined that process is not alive.
|
||||
fail:
|
||||
msg: "isolated task is still running"
|
||||
when: "is_alive.rc == 0"
|
||||
|
||||
@ -4,7 +4,8 @@
|
||||
# cleanup_dirs: ['/tmp/path/private_data_dir/', '/tmp//path/proot_temp_dir/']
|
||||
# private_data_dir: '/tmp/path/private_data_dir/'
|
||||
|
||||
- hosts: all
|
||||
- name: Clean up from isolated job run.
|
||||
hosts: all
|
||||
gather_facts: false
|
||||
|
||||
tasks:
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
---
|
||||
- hosts: all
|
||||
- name: Periodic background status check of isolated instances.
|
||||
hosts: all
|
||||
gather_facts: false
|
||||
|
||||
tasks:
|
||||
|
||||
@ -5,18 +5,20 @@
|
||||
# dest: /tmp/some/path/
|
||||
# proot_temp_dir: /tmp/some/path
|
||||
|
||||
- hosts: all
|
||||
- name: Prepare data, dispatch job in isolated environment.
|
||||
hosts: all
|
||||
gather_facts: false
|
||||
vars_prompt:
|
||||
- prompt: "Secret"
|
||||
name: "secret"
|
||||
private: yes
|
||||
|
||||
tasks:
|
||||
|
||||
- name: create a proot/bwrap temp dir (if necessary)
|
||||
synchronize:
|
||||
src: "{{proot_temp_dir}}"
|
||||
dest: "/tmp"
|
||||
dest: "{{dest}}"
|
||||
when: proot_temp_dir is defined
|
||||
|
||||
- name: synchronize job environment with isolated host
|
||||
|
||||
@ -33,6 +33,7 @@ import os
|
||||
import memcache
|
||||
import json
|
||||
import datetime
|
||||
import base64
|
||||
from dateutil import parser
|
||||
from dateutil.tz import tzutc
|
||||
|
||||
@ -56,10 +57,10 @@ class CacheModule(BaseCacheModule):
|
||||
return '{}'.format(self._inventory_id)
|
||||
|
||||
def translate_host_key(self, host_name):
|
||||
return '{}-{}'.format(self._inventory_id, host_name)
|
||||
return '{}-{}'.format(self._inventory_id, base64.b64encode(host_name))
|
||||
|
||||
def translate_modified_key(self, host_name):
|
||||
return '{}-{}-modified'.format(self._inventory_id, host_name)
|
||||
return '{}-{}-modified'.format(self._inventory_id, base64.b64encode(host_name))
|
||||
|
||||
def get(self, key):
|
||||
host_key = self.translate_host_key(key)
|
||||
@ -104,8 +105,7 @@ class CacheModule(BaseCacheModule):
|
||||
return False
|
||||
|
||||
def delete(self, key):
|
||||
self.mc.delete(self.translate_host_key(key))
|
||||
self.mc.delete(self.translate_modified_key(key))
|
||||
self.set(key, {})
|
||||
|
||||
def flush(self):
|
||||
host_names = self.mc.get(self.host_names_key)
|
||||
|
||||
@ -24,6 +24,14 @@ def main():
|
||||
module = AnsibleModule(
|
||||
argument_spec = dict()
|
||||
)
|
||||
try:
|
||||
version = subprocess.check_output(
|
||||
['tower-expect', '--version'],
|
||||
stderr=subprocess.STDOUT
|
||||
).strip()
|
||||
except subprocess.CalledProcessError as e:
|
||||
module.fail_json(msg=str(e))
|
||||
return
|
||||
# Duplicated with awx.main.utils.common.get_system_task_capacity
|
||||
try:
|
||||
out = subprocess.check_output(['free', '-m'])
|
||||
@ -36,7 +44,7 @@ def main():
|
||||
cap = 50 + ((int(total_mem_value) / 1024) - 2) * 75
|
||||
|
||||
# Module never results in a change
|
||||
module.exit_json(changed=False, capacity=cap)
|
||||
module.exit_json(changed=False, capacity=cap, version=version)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@ -594,7 +594,9 @@ AWX_PROOT_SHOW_PATHS = []
|
||||
# Number of jobs to show as part of the job template history
|
||||
AWX_JOB_TEMPLATE_HISTORY = 10
|
||||
|
||||
# The directory in which bubblewrap will create new temporary directories for its root
|
||||
# The directory in which Tower will create new temporary directories for job
|
||||
# execution and isolation (such as credential files and custom
|
||||
# inventory scripts).
|
||||
# Note: This setting may be overridden by database settings.
|
||||
AWX_PROOT_BASE_PATH = "/tmp"
|
||||
|
||||
@ -611,6 +613,9 @@ AWX_ISOLATED_CHECK_INTERVAL = 30
|
||||
# The timeout (in seconds) for launching jobs on isolated nodes
|
||||
AWX_ISOLATED_LAUNCH_TIMEOUT = 600
|
||||
|
||||
# Ansible connection timeout (in seconds) for communicating with isolated instances
|
||||
AWX_ISOLATED_CONNECTION_TIMEOUT = 10
|
||||
|
||||
# The time (in seconds) between the periodic isolated heartbeat status check
|
||||
AWX_ISOLATED_PERIODIC_CHECK = 600
|
||||
|
||||
@ -912,6 +917,7 @@ TOWER_SETTINGS_MANIFEST = {}
|
||||
LOG_AGGREGATOR_ENABLED = False
|
||||
LOG_AGGREGATOR_TCP_TIMEOUT = 5
|
||||
LOG_AGGREGATOR_VERIFY_CERT = True
|
||||
LOG_AGGREGATOR_LEVEL = 'INFO'
|
||||
|
||||
# The number of retry attempts for websocket session establishment
|
||||
# If you're encountering issues establishing websockets in clustered Tower,
|
||||
@ -1008,6 +1014,15 @@ LOGGING = {
|
||||
'backupCount': 5,
|
||||
'formatter':'simple',
|
||||
},
|
||||
'management_playbooks': {
|
||||
'level': 'DEBUG',
|
||||
'class':'logging.handlers.RotatingFileHandler',
|
||||
'filters': ['require_debug_false'],
|
||||
'filename': os.path.join(LOG_ROOT, 'management_playbooks.log'),
|
||||
'maxBytes': 1024 * 1024 * 5, # 5 MB
|
||||
'backupCount': 5,
|
||||
'formatter':'simple',
|
||||
},
|
||||
'fact_receiver': {
|
||||
'level': 'WARNING',
|
||||
'class':'logging.handlers.RotatingFileHandler',
|
||||
@ -1066,10 +1081,13 @@ LOGGING = {
|
||||
},
|
||||
'awx.main': {
|
||||
'handlers': ['null']
|
||||
},
|
||||
'awx.main.commands.run_callback_receiver': {
|
||||
}, 'awx.main.commands.run_callback_receiver': {
|
||||
'handlers': ['callback_receiver'],
|
||||
},
|
||||
'awx.isolated.manager.playbooks': {
|
||||
'handlers': ['management_playbooks'],
|
||||
'propagate': False
|
||||
},
|
||||
'awx.main.commands.inventory_import': {
|
||||
'handlers': ['inventory_import'],
|
||||
'propagate': False
|
||||
|
||||
@ -24,6 +24,9 @@ from defaults import * # NOQA
|
||||
LOGGING['handlers']['console']['()'] = 'awx.main.utils.handlers.ColorHandler'
|
||||
COLOR_LOGS = True
|
||||
|
||||
# Pipe management playbook output to console
|
||||
LOGGING['loggers']['awx.isolated.manager.playbooks']['propagate'] = True
|
||||
|
||||
ALLOWED_HOSTS = ['*']
|
||||
|
||||
mimetypes.add_type("image/svg+xml", ".svg", True)
|
||||
|
||||
@ -54,6 +54,7 @@ LOGGING['handlers']['tower_warnings']['filename'] = '/var/log/tower/tower.log'
|
||||
LOGGING['handlers']['callback_receiver']['filename'] = '/var/log/tower/callback_receiver.log'
|
||||
LOGGING['handlers']['task_system']['filename'] = '/var/log/tower/task_system.log'
|
||||
LOGGING['handlers']['fact_receiver']['filename'] = '/var/log/tower/fact_receiver.log'
|
||||
LOGGING['handlers']['management_playbooks']['filename'] = '/var/log/tower/management_playbooks.log'
|
||||
LOGGING['handlers']['system_tracking_migrations']['filename'] = '/var/log/tower/tower_system_tracking_migrations.log'
|
||||
LOGGING['handlers']['rbac_migrations']['filename'] = '/var/log/tower/tower_rbac_migrations.log'
|
||||
|
||||
|
||||
@ -270,7 +270,7 @@ register(
|
||||
field_class=fields.LDAPSearchField,
|
||||
default=[],
|
||||
label=_('LDAP Group Search'),
|
||||
help_text=_('Users in Tower are mapped to organizations based on their '
|
||||
help_text=_('Users are mapped to organizations based on their '
|
||||
'membership in LDAP groups. This setting defines the LDAP search '
|
||||
'query to find groups. Note that this, unlike the user search '
|
||||
'above, does not support LDAPSearchUnion.'),
|
||||
|
||||
@ -1,13 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
<title>Ansible Tower: Secondary</title>
|
||||
<meta http-equiv="refresh" content="{{ redirect_seconds }}; url=//{{ primary.hostname }}/" />
|
||||
</head>
|
||||
<body>
|
||||
<h1>Ansible Tower</h1>
|
||||
<p>This Ansible Tower server (version {{ version }}) is in secondary
|
||||
mode. The current master is {{ primary.hostname }}.
|
||||
</p>
|
||||
<p>This page will redirect in {{ redirect_seconds }} seconds.</p>
|
||||
</body>
|
||||
</html>
|
||||
@ -1,21 +1,19 @@
|
||||
const DEFAULT_ORGANIZATION_PLACEHOLDER = 'SELECT AN ORGANIZATION';
|
||||
|
||||
function AddCredentialsController (models, $state) {
|
||||
function AddCredentialsController (models, $state, strings) {
|
||||
let vm = this || {};
|
||||
|
||||
let me = models.me;
|
||||
let credential = models.credential;
|
||||
let credentialType = models.credentialType;
|
||||
let organization = models.organization;
|
||||
|
||||
vm.panelTitle = 'NEW CREDENTIAL';
|
||||
vm.mode = 'add';
|
||||
vm.strings = strings.credentials;
|
||||
|
||||
vm.panelTitle = vm.strings[vm.mode].PANEL_TITLE;
|
||||
|
||||
vm.tab = {
|
||||
details: {
|
||||
_active: true
|
||||
},
|
||||
permissions:{
|
||||
_disabled: true
|
||||
}
|
||||
details: { _active: true },
|
||||
permissions:{ _disabled: true }
|
||||
};
|
||||
|
||||
vm.form = credential.createFormSchema('post', {
|
||||
@ -24,9 +22,13 @@ function AddCredentialsController (models, $state) {
|
||||
|
||||
vm.form.organization._resource = 'organization';
|
||||
vm.form.organization._route = 'credentials.add.organization';
|
||||
|
||||
vm.form.organization._model = organization;
|
||||
vm.form.organization._placeholder = vm.strings.inputs.ORGANIZATION_PLACEHOLDER;
|
||||
|
||||
vm.form.credential_type._resource = 'credential_type';
|
||||
vm.form.credential_type._route = 'credentials.add.credentialType';
|
||||
vm.form.credential_type._model = credentialType;
|
||||
vm.form.credential_type._placeholder = vm.strings.inputs.CREDENTIAL_TYPE_PLACEHOLDER;
|
||||
|
||||
vm.form.inputs = {
|
||||
_get: id => {
|
||||
@ -52,7 +54,8 @@ function AddCredentialsController (models, $state) {
|
||||
|
||||
AddCredentialsController.$inject = [
|
||||
'resolvedModels',
|
||||
'$state'
|
||||
'$state',
|
||||
'CredentialsStrings'
|
||||
];
|
||||
|
||||
export default AddCredentialsController;
|
||||
|
||||
@ -2,8 +2,8 @@
|
||||
<at-panel-heading>{{ vm.panelTitle }}</at-panel-heading>
|
||||
|
||||
<at-tab-group>
|
||||
<at-tab state="vm.tab.details">Details</at-tab>
|
||||
<at-tab state="vm.tab.permissions">Permissions</at-tab>
|
||||
<at-tab state="vm.tab.details">{{ vm.strings.tab.DETAILS }}</at-tab>
|
||||
<at-tab state="vm.tab.permissions">{{ vm.strings.tab.PERMISSIONS }}</at-tab>
|
||||
</at-tab-group>
|
||||
|
||||
<at-panel-body>
|
||||
@ -17,7 +17,7 @@
|
||||
<at-input-lookup col="4" tab="4" state="vm.form.credential_type"></at-input-lookup>
|
||||
|
||||
<at-input-group col="4" tab="5" state="vm.form.inputs">
|
||||
Type Details
|
||||
{{ vm.strings.inputs.GROUP_TITLE }}
|
||||
</at-input-group>
|
||||
|
||||
<at-action-group col="12" pos="right">
|
||||
@ -29,11 +29,11 @@
|
||||
</at-panel>
|
||||
|
||||
<at-panel ng-if="$state.current.name.includes('permissions')">
|
||||
<at-panel-heading>CREDENTIALS PERMISSIONS</at-panel-heading>
|
||||
<at-panel-heading>{{ vm.strings.permissions.TITLE }}</at-panel-heading>
|
||||
|
||||
<at-tab-group>
|
||||
<at-tab state="vm.tab.details">Details</at-tab>
|
||||
<at-tab state="vm.tab.permissions">Permissions</at-tab>
|
||||
<at-tab state="vm.tab.details">{{ vm.strings.tab.DETAILS }}</at-tab>
|
||||
<at-tab state="vm.tab.permissions">{{ vm.strings.tab.PERMISSIONS }}</at-tab>
|
||||
</at-tab-group>
|
||||
|
||||
<at-panel-body>
|
||||
|
||||
34
awx/ui/client/features/credentials/credentials.strings.js
Normal file
34
awx/ui/client/features/credentials/credentials.strings.js
Normal file
@ -0,0 +1,34 @@
|
||||
function CredentialsStrings (BaseString) {
|
||||
BaseString.call(this, 'credentials');
|
||||
|
||||
let t = this.t;
|
||||
let ns = this.credentials;
|
||||
|
||||
ns.state = {
|
||||
ADD_BREADCRUMB_LABEL: t('CREATE CREDENTIAL'),
|
||||
EDIT_BREADCRUMB_LABEL: t('EDIT CREDENTIAL')
|
||||
};
|
||||
|
||||
ns.tab = {
|
||||
DETAILS: t('Details'),
|
||||
PERMISSIONS: t('Permissions')
|
||||
};
|
||||
|
||||
ns.inputs = {
|
||||
GROUP_TITLE: t('Type Details'),
|
||||
ORGANIZATION_PLACEHOLDER: t('SELECT AN ORGANIZATION'),
|
||||
CREDENTIAL_TYPE_PLACEHOLDER: t('SELECT A CREDENTIAL TYPE')
|
||||
};
|
||||
|
||||
ns.add = {
|
||||
PANEL_TITLE: t('NEW CREDENTIAL')
|
||||
};
|
||||
|
||||
ns.permissions = {
|
||||
TITLE: t('CREDENTIALS PERMISSIONS')
|
||||
};
|
||||
}
|
||||
|
||||
CredentialsStrings.$inject = ['BaseStringService'];
|
||||
|
||||
export default CredentialsStrings;
|
||||
@ -1,12 +1,15 @@
|
||||
const DEFAULT_ORGANIZATION_PLACEHOLDER = 'SELECT AN ORGANIZATION';
|
||||
|
||||
function EditCredentialsController (models, $state, $scope) {
|
||||
function EditCredentialsController (models, $state, $scope, strings) {
|
||||
let vm = this || {};
|
||||
|
||||
let me = models.me;
|
||||
let credential = models.credential;
|
||||
let credentialType = models.credentialType;
|
||||
let selectedCredentialType = credentialType.getById(credential.get('credential_type'));
|
||||
let organization = models.organization;
|
||||
let selectedCredentialType = models.selectedCredentialType;
|
||||
|
||||
vm.mode = 'edit';
|
||||
vm.strings = strings.credentials;
|
||||
vm.panelTitle = credential.get('name');
|
||||
|
||||
vm.tab = {
|
||||
details: {
|
||||
@ -33,21 +36,23 @@ function EditCredentialsController (models, $state, $scope) {
|
||||
// Only exists for permissions compatibility
|
||||
$scope.credential_obj = credential.get();
|
||||
|
||||
vm.panelTitle = credential.get('name');
|
||||
|
||||
vm.form = credential.createFormSchema('put', {
|
||||
omit: ['user', 'team', 'inputs']
|
||||
});
|
||||
|
||||
vm.form.organization._resource = 'organization';
|
||||
vm.form.organization._model = organization;
|
||||
vm.form.organization._route = 'credentials.edit.organization';
|
||||
vm.form.organization._value = credential.get('summary_fields.organization.id');
|
||||
vm.form.organization._displayValue = credential.get('summary_fields.organization.name');
|
||||
vm.form.organization._placeholder = vm.strings.inputs.ORGANIZATION_PLACEHOLDER;
|
||||
|
||||
vm.form.credential_type._resource = 'credential_type';
|
||||
vm.form.credential_type._model = credentialType;
|
||||
vm.form.credential_type._route = 'credentials.edit.credentialType';
|
||||
vm.form.credential_type._value = selectedCredentialType.id;
|
||||
vm.form.credential_type._displayValue = selectedCredentialType.name;
|
||||
vm.form.credential_type._value = selectedCredentialType.get('id');
|
||||
vm.form.credential_type._displayValue = selectedCredentialType.get('name');
|
||||
vm.form.credential_type._placeholder = vm.strings.inputs.CREDENTIAL_TYPE_PLACEHOLDER;
|
||||
|
||||
vm.form.inputs = {
|
||||
_get (id) {
|
||||
@ -80,7 +85,8 @@ function EditCredentialsController (models, $state, $scope) {
|
||||
EditCredentialsController.$inject = [
|
||||
'resolvedModels',
|
||||
'$state',
|
||||
'$scope'
|
||||
'$scope',
|
||||
'CredentialsStrings'
|
||||
];
|
||||
|
||||
export default EditCredentialsController;
|
||||
|
||||
@ -1,23 +1,38 @@
|
||||
import LegacyCredentials from './legacy.credentials';
|
||||
import AddController from './add-credentials.controller.js';
|
||||
import EditController from './edit-credentials.controller.js';
|
||||
import { N_ } from '../../src/i18n';
|
||||
import AddController from './add-credentials.controller';
|
||||
import EditController from './edit-credentials.controller';
|
||||
import CredentialsStrings from './credentials.strings'
|
||||
|
||||
function CredentialsResolve ($q, $stateParams, Me, Credential, CredentialType) {
|
||||
function CredentialsResolve ($q, $stateParams, Me, Credential, CredentialType, Organization) {
|
||||
let id = $stateParams.credential_id;
|
||||
let models;
|
||||
|
||||
let promises = {
|
||||
me: new Me('get'),
|
||||
credentialType: new CredentialType('get')
|
||||
credentialType: new CredentialType('get'),
|
||||
organization: new Organization('get')
|
||||
};
|
||||
|
||||
if (id) {
|
||||
promises.credential = new Credential(['get', 'options'], [id, id]);
|
||||
} else {
|
||||
if (!id) {
|
||||
promises.credential = new Credential('options');
|
||||
|
||||
return $q.all(promises)
|
||||
}
|
||||
|
||||
return $q.all(promises);
|
||||
promises.credential = new Credential(['get', 'options'], [id, id]);
|
||||
|
||||
return $q.all(promises)
|
||||
.then(_models_ => {
|
||||
models = _models_;
|
||||
let credentialTypeId = models.credential.get('credential_type');
|
||||
|
||||
return models.credentialType.graft(credentialTypeId);
|
||||
})
|
||||
.then(selectedCredentialType => {
|
||||
models.selectedCredentialType = selectedCredentialType;
|
||||
|
||||
return models;
|
||||
});
|
||||
}
|
||||
|
||||
CredentialsResolve.$inject = [
|
||||
@ -25,19 +40,23 @@ CredentialsResolve.$inject = [
|
||||
'$stateParams',
|
||||
'MeModel',
|
||||
'CredentialModel',
|
||||
'CredentialTypeModel'
|
||||
'CredentialTypeModel',
|
||||
'OrganizationModel'
|
||||
];
|
||||
|
||||
function CredentialsConfig ($stateExtenderProvider, legacyProvider, pathProvider) {
|
||||
function CredentialsConfig ($stateExtenderProvider, legacyProvider, pathProvider, stringProvider) {
|
||||
let path = pathProvider.$get();
|
||||
let stateExtender = $stateExtenderProvider.$get();
|
||||
let legacy = legacyProvider.$get();
|
||||
let strings = stringProvider.$get();
|
||||
|
||||
strings = strings.credentials.state;
|
||||
|
||||
stateExtender.addState({
|
||||
name: 'credentials.add',
|
||||
route: '/add',
|
||||
ncyBreadcrumb: {
|
||||
label: N_('CREATE CREDENTIALS')
|
||||
label: strings.ADD_BREADCRUMB_LABEL
|
||||
},
|
||||
views: {
|
||||
'add@credentials': {
|
||||
@ -55,7 +74,7 @@ function CredentialsConfig ($stateExtenderProvider, legacyProvider, pathProvider
|
||||
name: 'credentials.edit',
|
||||
route: '/:credential_id',
|
||||
ncyBreadcrumb: {
|
||||
label: N_('EDIT')
|
||||
label: strings.EDIT_BREADCRUMB_LABEL
|
||||
},
|
||||
views: {
|
||||
'edit@credentials': {
|
||||
@ -81,7 +100,8 @@ function CredentialsConfig ($stateExtenderProvider, legacyProvider, pathProvider
|
||||
CredentialsConfig.$inject = [
|
||||
'$stateExtenderProvider',
|
||||
'LegacyCredentialsServiceProvider',
|
||||
'PathServiceProvider'
|
||||
'PathServiceProvider',
|
||||
'CredentialsStringsProvider'
|
||||
];
|
||||
|
||||
angular
|
||||
@ -89,4 +109,5 @@ angular
|
||||
.config(CredentialsConfig)
|
||||
.controller('AddController', AddController)
|
||||
.controller('EditController', EditController)
|
||||
.service('LegacyCredentialsService', LegacyCredentials);
|
||||
.service('LegacyCredentialsService', LegacyCredentials)
|
||||
.service('CredentialsStrings', CredentialsStrings);
|
||||
|
||||
@ -113,7 +113,7 @@ function LegacyCredentialsService (pathService) {
|
||||
},
|
||||
ncyBreadcrumb: {
|
||||
parent: 'credentials.edit',
|
||||
label: 'PERMISSIONS'
|
||||
label: N_('PERMISSIONS')
|
||||
},
|
||||
views: {
|
||||
'related': {
|
||||
@ -336,7 +336,7 @@ function LegacyCredentialsService (pathService) {
|
||||
return this.credentialType;
|
||||
|
||||
default:
|
||||
throw new Error(`Legacy state configuration for ${name} does not exist`);
|
||||
throw new Error(N_(`Legacy state configuration for ${name} does not exist`));
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
@ -60,7 +60,7 @@ table, tbody {
|
||||
height: 40px;
|
||||
font-size: 14px;
|
||||
color: @list-item;
|
||||
border-bottom: 1px solid @default-white-button-bord;
|
||||
border-bottom: 1px solid @default-border;
|
||||
}
|
||||
|
||||
.List-tableRow:last-of-type {
|
||||
@ -176,6 +176,27 @@ table, tbody {
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.List-exitHolder {
|
||||
justify-content: flex-end;
|
||||
display:flex;
|
||||
}
|
||||
|
||||
.List-exit {
|
||||
cursor:pointer;
|
||||
padding:0px;
|
||||
border: none;
|
||||
height:20px;
|
||||
font-size: 20px;
|
||||
background-color:@default-bg;
|
||||
color:@d7grey;
|
||||
transition: color 0.2s;
|
||||
line-height:1;
|
||||
}
|
||||
|
||||
.List-exit:hover{
|
||||
color:@default-icon;
|
||||
}
|
||||
|
||||
.List-actionHolder {
|
||||
justify-content: flex-end;
|
||||
display: flex;
|
||||
|
||||
49
awx/ui/client/lib/components/components.strings.js
Normal file
49
awx/ui/client/lib/components/components.strings.js
Normal file
@ -0,0 +1,49 @@
|
||||
function ComponentsStrings (BaseString) {
|
||||
BaseString.call(this, 'components');
|
||||
|
||||
let t = this.t;
|
||||
let ns = this.components;
|
||||
|
||||
ns.REPLACE = t('REPLACE');
|
||||
ns.REVERT = t('REVERT');
|
||||
ns.ENCRYPTED = t('ENCRYPTED');
|
||||
ns.OPTIONS = t('OPTIONS');
|
||||
ns.SHOW = t('SHOW');
|
||||
ns.HIDE = t('HIDE');
|
||||
|
||||
ns.message = {
|
||||
REQUIRED_INPUT_MISSING: t('Please enter a value.'),
|
||||
INVALID_INPUT: t('Invalid input for this type.')
|
||||
};
|
||||
|
||||
ns.form = {
|
||||
SUBMISSION_ERROR_TITLE: t('Unable to Submit'),
|
||||
SUBMISSION_ERROR_MESSAGE:t('Unexpected server error. View the console for more information'),
|
||||
SUBMISSION_ERROR_PREFACE: t('Unexpected Error')
|
||||
};
|
||||
|
||||
ns.group = {
|
||||
UNSUPPORTED_ERROR_PREFACE: t('Unsupported input type')
|
||||
};
|
||||
|
||||
ns.label = {
|
||||
PROMPT_ON_LAUNCH: t('Prompt on launch')
|
||||
};
|
||||
|
||||
ns.select = {
|
||||
UNSUPPORTED_TYPE_ERROR: t('Unsupported display model type'),
|
||||
EMPTY_PLACEHOLDER: t('NO OPTIONS AVAILABLE')
|
||||
};
|
||||
|
||||
ns.textarea = {
|
||||
SSH_KEY_HINT: t('HINT: Drag and drop an SSH private key file on the field below.')
|
||||
};
|
||||
|
||||
ns.lookup = {
|
||||
NOT_FOUND: t('That value was not found. Please enter or select a valid value.')
|
||||
};
|
||||
}
|
||||
|
||||
ComponentsStrings.$inject = ['BaseStringService'];
|
||||
|
||||
export default ComponentsStrings;
|
||||
@ -5,7 +5,7 @@ function link (scope, element, attrs, controllers) {
|
||||
actionController.init(formController, element, scope);
|
||||
}
|
||||
|
||||
function atFormActionController ($state) {
|
||||
function atFormActionController ($state, strings) {
|
||||
let vm = this || {};
|
||||
|
||||
let element;
|
||||
@ -36,21 +36,21 @@ function atFormActionController ($state) {
|
||||
};
|
||||
|
||||
vm.setCancelDefaults = () => {
|
||||
scope.text = 'CANCEL';
|
||||
scope.text = strings.CANCEL;
|
||||
scope.fill = 'Hollow';
|
||||
scope.color = 'default';
|
||||
scope.action = () => $state.go(scope.to || '^');
|
||||
};
|
||||
|
||||
vm.setSaveDefaults = () => {
|
||||
scope.text = 'SAVE';
|
||||
scope.text = strings.SAVE;
|
||||
scope.fill = '';
|
||||
scope.color = 'success';
|
||||
scope.action = () => form.submit();
|
||||
};
|
||||
}
|
||||
|
||||
atFormAction.$inject = ['$state'];
|
||||
atFormActionController.$inject = ['$state', 'ComponentsStrings'];
|
||||
|
||||
function atFormAction (pathService) {
|
||||
return {
|
||||
|
||||
@ -8,13 +8,15 @@ function atFormLink (scope, el, attrs, controllers) {
|
||||
formController.init(scope, form);
|
||||
}
|
||||
|
||||
function AtFormController (eventService) {
|
||||
function AtFormController (eventService, strings) {
|
||||
let vm = this || {};
|
||||
|
||||
let scope;
|
||||
let modal;
|
||||
let form;
|
||||
|
||||
strings = strings.components.forms;
|
||||
|
||||
vm.components = [];
|
||||
vm.state = {
|
||||
isValid: false,
|
||||
@ -99,6 +101,8 @@ function AtFormController (eventService) {
|
||||
|
||||
if (!handled) {
|
||||
let message;
|
||||
let title = strings.SUBMISSION_ERROR_TITLE;
|
||||
let preface = strings.SUBMISSION_ERROR_PREFACE;
|
||||
|
||||
if (typeof err.data === 'object') {
|
||||
message = JSON.stringify(err.data);
|
||||
@ -106,13 +110,13 @@ function AtFormController (eventService) {
|
||||
message = err.data;
|
||||
}
|
||||
|
||||
modal.show('Unable to Submit', `Unexpected Error: ${message}`);
|
||||
modal.show(title, `${preface}: ${message}`)
|
||||
}
|
||||
};
|
||||
|
||||
vm.handleUnexpectedError = err => {
|
||||
let title = 'Unable to Submit';
|
||||
let message = 'Unexpected server error. View the console for more information';
|
||||
let title = strings.SUBMISSION_ERROR_TITLE;
|
||||
let message = strings.SUBMISSION_ERROR_MESSAGE;
|
||||
|
||||
modal.show(title, message);
|
||||
|
||||
@ -190,7 +194,7 @@ function AtFormController (eventService) {
|
||||
};
|
||||
}
|
||||
|
||||
AtFormController.$inject = ['EventService'];
|
||||
AtFormController.$inject = ['EventService', 'ComponentsStrings'];
|
||||
|
||||
function atForm (pathService) {
|
||||
return {
|
||||
|
||||
@ -8,6 +8,7 @@ import inputLabel from './input/label.directive';
|
||||
import inputLookup from './input/lookup.directive';
|
||||
import inputMessage from './input/message.directive';
|
||||
import inputSecret from './input/secret.directive';
|
||||
import inputSelect from './input/select.directive';
|
||||
import inputText from './input/text.directive';
|
||||
import inputTextarea from './input/textarea.directive';
|
||||
import inputTextareaSecret from './input/textarea-secret.directive';
|
||||
@ -20,6 +21,7 @@ import tab from './tabs/tab.directive';
|
||||
import tabGroup from './tabs/group.directive';
|
||||
|
||||
import BaseInputController from './input/base.controller';
|
||||
import ComponentsStrings from './components.strings';
|
||||
|
||||
angular
|
||||
.module('at.lib.components', [])
|
||||
@ -33,6 +35,7 @@ angular
|
||||
.directive('atInputLookup', inputLookup)
|
||||
.directive('atInputMessage', inputMessage)
|
||||
.directive('atInputSecret', inputSecret)
|
||||
.directive('atInputSelect', inputSelect)
|
||||
.directive('atInputText', inputText)
|
||||
.directive('atInputTextarea', inputTextarea)
|
||||
.directive('atInputTextareaSecret', inputTextareaSecret)
|
||||
@ -43,6 +46,7 @@ angular
|
||||
.directive('atPopover', popover)
|
||||
.directive('atTab', tab)
|
||||
.directive('atTabGroup', tabGroup)
|
||||
.service('BaseInputController', BaseInputController);
|
||||
.service('BaseInputController', BaseInputController)
|
||||
.service('ComponentsStrings', ComponentsStrings);
|
||||
|
||||
|
||||
|
||||
@ -1,17 +1,19 @@
|
||||
const REQUIRED_INPUT_MISSING_MESSAGE = 'Please enter a value.';
|
||||
const DEFAULT_INVALID_INPUT_MESSAGE = 'Invalid input for this type.';
|
||||
const PROMPT_ON_LAUNCH_VALUE = 'ASK';
|
||||
const ENCRYPTED_VALUE = '$encrypted$';
|
||||
function BaseInputController (strings) {
|
||||
// Default values are universal. Don't translate.
|
||||
const PROMPT_ON_LAUNCH_VALUE = 'ASK';
|
||||
const ENCRYPTED_VALUE = '$encrypted$';
|
||||
|
||||
function BaseInputController () {
|
||||
return function extend (type, scope, element, form) {
|
||||
let vm = this;
|
||||
|
||||
vm.strings = strings;
|
||||
|
||||
scope.state = scope.state || {};
|
||||
|
||||
scope.state._touched = false;
|
||||
scope.state._required = scope.state.required || false;
|
||||
scope.state._isValid = scope.state.isValid || false;
|
||||
scope.state._disabled = scope.state.disabled || false;
|
||||
scope.state._isValid = scope.state._isValid || false;
|
||||
scope.state._disabled = scope.state._disabled || false;
|
||||
scope.state._activeModel = '_value';
|
||||
|
||||
if (scope.state.ask_at_runtime) {
|
||||
@ -43,17 +45,19 @@ function BaseInputController () {
|
||||
let isValid = true;
|
||||
let message = '';
|
||||
|
||||
if (scope.state._required && !scope.state._value) {
|
||||
isValid = false;
|
||||
message = REQUIRED_INPUT_MISSING_MESSAGE;
|
||||
if (scope.state._value || scope.state._displayValue) {
|
||||
scope.state._touched = true;
|
||||
}
|
||||
|
||||
if (scope.state.validate) {
|
||||
if (scope.state._required && !scope.state._value && !scope.state._displayValue) {
|
||||
isValid = false;
|
||||
message = vm.strings.components.message.REQUIRED_INPUT_MISSING;
|
||||
} else if (scope.state._validate) {
|
||||
let result = scope.state._validate(scope.state._value);
|
||||
|
||||
if (!result.isValid) {
|
||||
isValid = false;
|
||||
message = result.message || DEFAULT_INVALID_INPUT_MESSAGE;
|
||||
message = result.message || vm.strings.components.message.INVALID_INPUT;
|
||||
}
|
||||
}
|
||||
|
||||
@ -66,7 +70,7 @@ function BaseInputController () {
|
||||
vm.check = () => {
|
||||
let result = vm.validate();
|
||||
|
||||
if (result.isValid !== scope.state._isValid) {
|
||||
if (scope.state._touched || !scope.state._required) {
|
||||
scope.state._rejected = !result.isValid;
|
||||
scope.state._isValid = result.isValid;
|
||||
scope.state._message = result.message;
|
||||
@ -79,14 +83,14 @@ function BaseInputController () {
|
||||
scope.state._isBeingReplaced = !scope.state._isBeingReplaced;
|
||||
|
||||
if (!scope.state._isBeingReplaced) {
|
||||
scope.state._buttonText = 'REPLACE';
|
||||
scope.state._buttonText = vm.strings.components.REPLACE;
|
||||
scope.state._disabled = true;
|
||||
scope.state._enableToggle = true;
|
||||
scope.state._value = scope.state._preEditValue;
|
||||
scope.state._activeModel = '_displayValue';
|
||||
scope.state._placeholder = 'ENCRYPTED';
|
||||
scope.state._placeholder = vm.strings.components.ENCRYPTED;
|
||||
} else {
|
||||
scope.state._buttonText = 'REVERT';
|
||||
scope.state._buttonText = vm.strings.components.REVERT;
|
||||
scope.state._disabled = false;
|
||||
scope.state._enableToggle = false;
|
||||
scope.state._activeModel = '_value';
|
||||
@ -118,4 +122,6 @@ function BaseInputController () {
|
||||
};
|
||||
}
|
||||
|
||||
BaseInputController.$inject = ['ComponentsStrings'];
|
||||
|
||||
export default BaseInputController;
|
||||
|
||||
@ -15,7 +15,7 @@ function AtInputCheckboxController (baseInputController) {
|
||||
vm.init = (scope, element, form) => {
|
||||
baseInputController.call(vm, 'input', scope, element, form);
|
||||
scope.label = scope.state.label;
|
||||
scope.state.label = 'OPTIONS';
|
||||
scope.state.label = vm.strings.components.OPTIONS;
|
||||
|
||||
vm.check();
|
||||
};
|
||||
|
||||
@ -34,14 +34,14 @@ function AtInputGroupController ($scope, $compile) {
|
||||
};
|
||||
|
||||
vm.update = () => {
|
||||
if (!vm.isValidSource()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (state._group) {
|
||||
vm.clear();
|
||||
}
|
||||
|
||||
if (!vm.isValidSource()) {
|
||||
return;
|
||||
}
|
||||
|
||||
state._value = source._value;
|
||||
|
||||
let inputs = state._get(source._value);
|
||||
@ -101,7 +101,8 @@ function AtInputGroupController ($scope, $compile) {
|
||||
config._data = input.choices;
|
||||
config._exp = 'index as choice for (index, choice) in state._data';
|
||||
} else {
|
||||
throw new Error('Unsupported input type: ' + input.type)
|
||||
let preface = vm.strings.components.UNSUPPORTED_ERROR_PREFACE;
|
||||
throw new Error(`${preface}: ${input.type}`)
|
||||
}
|
||||
|
||||
return config;
|
||||
@ -158,6 +159,8 @@ function AtInputGroupController ($scope, $compile) {
|
||||
vm.clear = () => {
|
||||
form.deregisterInputGroup(state._group);
|
||||
element.innerHTML = '';
|
||||
state._group = undefined;
|
||||
state._value = undefined;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
<div ng-show="state._value" class="col-sm-12 at-InputGroup">
|
||||
<div ng-show="state._group" class="col-sm-12 at-InputGroup">
|
||||
<div class="at-InputGroup-border"></div>
|
||||
<div class="at-InputGroup-inset">
|
||||
<div class="row">
|
||||
|
||||
@ -8,7 +8,7 @@
|
||||
<input type="checkbox"
|
||||
ng-model="state._promptOnLaunch"
|
||||
ng-change="vm.togglePromptOnLaunch()" />
|
||||
<p>Prompt on launch</p>
|
||||
<p>{{ vm.strings.components.label.PROMPT_ON_LAUNCH }}</p>
|
||||
</label>
|
||||
</div>
|
||||
</label>
|
||||
|
||||
@ -20,12 +20,13 @@ function AtInputLookupController (baseInputController, $state, $stateParams) {
|
||||
scope = _scope_;
|
||||
|
||||
scope.$watch(scope.state._resource, vm.watchResource);
|
||||
scope.state._validate = vm.checkOnInput;
|
||||
|
||||
vm.check();
|
||||
};
|
||||
|
||||
vm.watchResource = () => {
|
||||
if (scope[scope.state._resource]) {
|
||||
if (scope[scope.state._resource] !== scope.state._value) {
|
||||
scope.state._value = scope[scope.state._resource];
|
||||
scope.state._displayValue = scope[`${scope.state._resource}_name`];
|
||||
|
||||
@ -33,15 +34,43 @@ function AtInputLookupController (baseInputController, $state, $stateParams) {
|
||||
}
|
||||
};
|
||||
|
||||
vm.search = () => {
|
||||
vm.lookup = () => {
|
||||
let params = {};
|
||||
|
||||
if (scope.state._value) {
|
||||
if (scope.state._value && scope.state._isValid) {
|
||||
params.selected = scope.state._value;
|
||||
}
|
||||
|
||||
$state.go(scope.state._route, params);
|
||||
};
|
||||
|
||||
vm.reset = () => {
|
||||
scope.state._value = undefined;
|
||||
scope[scope.state._resource] = undefined;
|
||||
};
|
||||
|
||||
vm.checkOnInput = () => {
|
||||
if (!scope.state._touched) {
|
||||
return { isValid: true };
|
||||
}
|
||||
|
||||
let result = scope.state._model.match('get', 'name', scope.state._displayValue);
|
||||
|
||||
if (result) {
|
||||
scope[scope.state._resource] = result.id;
|
||||
scope.state._value = result.id;
|
||||
scope.state._displayValue = result.name;
|
||||
|
||||
return { isValid: true };
|
||||
}
|
||||
|
||||
vm.reset();
|
||||
|
||||
return {
|
||||
isValid: false,
|
||||
message: vm.strings.components.lookup.NOT_FOUND
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
AtInputLookupController.$inject = [
|
||||
|
||||
@ -6,13 +6,13 @@
|
||||
<span class="input-group-btn">
|
||||
<button class="btn at-ButtonHollow--default at-Input-button"
|
||||
ng-disabled="state._disabled || form.disabled"
|
||||
ng-click="vm.search()">
|
||||
ng-click="vm.lookup()">
|
||||
<i class="fa fa-search"></i>
|
||||
</button>
|
||||
</span>
|
||||
<input type="text"
|
||||
class="form-control at-Input"
|
||||
ng-class="{ 'at-Input--rejected': state.rejected }"
|
||||
ng-class="{ 'at-Input--rejected': state._rejected }"
|
||||
ng-model="state._displayValue"
|
||||
ng-attr-tabindex="{{ tab || undefined }}"
|
||||
ng-attr-placeholder="{{::state._placeholder || undefined }}"
|
||||
|
||||
@ -20,13 +20,13 @@ function AtInputSecretController (baseInputController) {
|
||||
scope = _scope_;
|
||||
|
||||
if (!scope.state._value || scope.state._promptOnLaunch) {
|
||||
scope.state._buttonText = 'SHOW';
|
||||
scope.state._buttonText = vm.strings.components.SHOW;
|
||||
scope.type = 'password';
|
||||
|
||||
vm.toggle = vm.toggleShowHide;
|
||||
} else {
|
||||
scope.state._buttonText = 'REPLACE';
|
||||
scope.state._placeholder = 'ENCRYPTED';
|
||||
scope.state._buttonText = vm.strings.components.REPLACE;
|
||||
scope.state._placeholder = vm.strings.components.ENCRYPTED;
|
||||
vm.toggle = vm.toggleRevertReplace;
|
||||
}
|
||||
|
||||
@ -36,10 +36,10 @@ function AtInputSecretController (baseInputController) {
|
||||
vm.toggleShowHide = () => {
|
||||
if (scope.type === 'password') {
|
||||
scope.type = 'text';
|
||||
scope.state._buttonText = 'HIDE';
|
||||
scope.state._buttonText = vm.strings.components.HIDE;
|
||||
} else {
|
||||
scope.type = 'password';
|
||||
scope.state._buttonText = 'SHOW';
|
||||
scope.state._buttonText = vm.strings.components.SHOW;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
const DEFAULT_EMPTY_PLACEHOLDER = 'NO OPTIONS AVAILABLE';
|
||||
|
||||
function atInputSelectLink (scope, element, attrs, controllers) {
|
||||
let formController = controllers[0];
|
||||
let inputController = controllers[1];
|
||||
@ -11,7 +9,7 @@ function atInputSelectLink (scope, element, attrs, controllers) {
|
||||
inputController.init(scope, element, formController);
|
||||
}
|
||||
|
||||
function AtInputSelectController (baseInputController, eventService) {
|
||||
function AtInputSelectController (baseInputController, eventService) {
|
||||
let vm = this || {};
|
||||
|
||||
let scope;
|
||||
@ -29,7 +27,7 @@ function AtInputSelectController (baseInputController, eventService) {
|
||||
|
||||
if (!scope.state._data || scope.state._data.length === 0) {
|
||||
scope.state._disabled = true;
|
||||
scope.state._placeholder = DEFAULT_EMPTY_PLACEHOLDER;
|
||||
scope.state._placeholder = vm.strings.components.EMPTY_PLACEHOLDER;
|
||||
}
|
||||
|
||||
vm.setListeners();
|
||||
@ -67,7 +65,7 @@ function AtInputSelectController (baseInputController, eventService) {
|
||||
} else if (scope.state._format === 'grouped-object') {
|
||||
scope.displayModel = scope.state._value[scope.state._display];
|
||||
} else {
|
||||
throw new Error('Unsupported display model type');
|
||||
throw new Error(vm.strings.components.UNSUPPORTED_TYPE_ERROR);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
const DEFAULT_HINT = 'HINT: Drag and drop an SSH private key file on the field below.';
|
||||
|
||||
function atInputTextareaSecretLink (scope, element, attrs, controllers) {
|
||||
let formController = controllers[0];
|
||||
let inputController = controllers[1];
|
||||
@ -28,13 +26,13 @@ function AtInputTextareaSecretController (baseInputController, eventService) {
|
||||
|
||||
if (scope.state.format === 'ssh_private_key') {
|
||||
scope.ssh = true;
|
||||
scope.state._hint = scope.state._hint || DEFAULT_HINT;
|
||||
scope.state._hint = scope.state._hint || vm.strings.components.textarea.SSH_KEY_HINT;
|
||||
input = element.find('input')[0];
|
||||
}
|
||||
|
||||
if (scope.state._value) {
|
||||
scope.state._buttonText = 'REPLACE';
|
||||
scope.state._placeholder = 'ENCRYPTED';
|
||||
scope.state._buttonText = vm.strings.components.REPLACE;
|
||||
scope.state._placeholder = vm.strings.components.ENCRYPTED;
|
||||
} else {
|
||||
if (scope.state.format === 'ssh_private_key') {
|
||||
vm.listeners = vm.setFileListeners(textarea, input);
|
||||
@ -54,7 +52,7 @@ function AtInputTextareaSecretController (baseInputController, eventService) {
|
||||
vm.listeners = vm.setFileListeners(textarea, input);
|
||||
} else {
|
||||
scope.state._displayHint = false;
|
||||
scope.state._placeholder = 'ENCRYPTED';
|
||||
scope.state._placeholder = vm.strings.components.ENCRYPTED;
|
||||
eventService.remove(vm.listeners);
|
||||
}
|
||||
};
|
||||
@ -92,7 +90,11 @@ function AtInputTextareaSecretController (baseInputController, eventService) {
|
||||
};
|
||||
}
|
||||
|
||||
AtInputTextareaSecretController.$inject = ['BaseInputController', 'EventService'];
|
||||
AtInputTextareaSecretController.$inject = [
|
||||
'BaseInputController',
|
||||
'EventService',
|
||||
'ComponentsStrings'
|
||||
];
|
||||
|
||||
function atInputTextareaSecret (pathService) {
|
||||
return {
|
||||
|
||||
@ -23,7 +23,7 @@
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn at-ButtonHollow--default"
|
||||
ng-click="vm.hide()">
|
||||
OK
|
||||
{{ vm.strings.OK }}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@ -18,16 +18,8 @@ function AtTabGroupController ($state) {
|
||||
};
|
||||
|
||||
vm.register = tab => {
|
||||
|
||||
tab.active = true;
|
||||
/*
|
||||
* if (vm.tabs.length === 0) {
|
||||
* tab.active = true;
|
||||
* } else {
|
||||
* tab.disabled = true;
|
||||
* }
|
||||
*
|
||||
*/
|
||||
|
||||
vm.tabs.push(tab);
|
||||
};
|
||||
}
|
||||
|
||||
@ -12,8 +12,10 @@ function request (method, resource) {
|
||||
}
|
||||
|
||||
function httpGet (resource) {
|
||||
this.method = this.method || 'GET';
|
||||
|
||||
let req = {
|
||||
method: 'GET',
|
||||
method: this.method,
|
||||
url: this.path
|
||||
};
|
||||
|
||||
@ -85,6 +87,26 @@ function get (keys) {
|
||||
return this.find('get', keys);
|
||||
}
|
||||
|
||||
function match (method, key, value) {
|
||||
let model = this.model[method.toUpperCase()];
|
||||
|
||||
if (!model) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!model.results) {
|
||||
if (model[key] === value) {
|
||||
return model;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
let result = model.results.filter(result => result[key] === value);
|
||||
|
||||
return result.length === 0 ? null : result[0];
|
||||
}
|
||||
|
||||
function find (method, keys) {
|
||||
let value = this.model[method.toUpperCase()];
|
||||
|
||||
@ -138,6 +160,7 @@ function BaseModel (path) {
|
||||
this.get = get;
|
||||
this.options = options;
|
||||
this.find = find;
|
||||
this.match = match;
|
||||
this.normalizePath = normalizePath;
|
||||
this.getById = getById;
|
||||
this.request = request;
|
||||
|
||||
@ -26,11 +26,18 @@ function mergeInputProperties (type) {
|
||||
});
|
||||
}
|
||||
|
||||
function graft (id) {
|
||||
let data = this.getById(id);
|
||||
|
||||
return new CredentialTypeModel('get', data);
|
||||
}
|
||||
|
||||
function CredentialTypeModel (method, id) {
|
||||
BaseModel.call(this, 'credential_types');
|
||||
|
||||
this.categorizeByKind = categorizeByKind.bind(this);
|
||||
this.mergeInputProperties = mergeInputProperties.bind(this);
|
||||
this.graft = graft.bind(this);
|
||||
|
||||
return this.request(method, id)
|
||||
.then(() => this);
|
||||
|
||||
23
awx/ui/client/lib/services/base-string.service.js
Normal file
23
awx/ui/client/lib/services/base-string.service.js
Normal file
@ -0,0 +1,23 @@
|
||||
let i18n;
|
||||
|
||||
function BaseStringService (namespace) {
|
||||
let t = i18n._;
|
||||
|
||||
this.t = t;
|
||||
this[namespace] = {};
|
||||
|
||||
this.CANCEL = t('CANCEL');
|
||||
this.SAVE = t('SAVE');
|
||||
this.OK = t('OK');
|
||||
}
|
||||
|
||||
|
||||
function BaseStringServiceLoader (_i18n_) {
|
||||
i18n = _i18n_;
|
||||
|
||||
return BaseStringService;
|
||||
}
|
||||
|
||||
BaseStringServiceLoader.$inject = ['i18n'];
|
||||
|
||||
export default BaseStringServiceLoader;
|
||||
@ -1,7 +1,9 @@
|
||||
import EventService from './event.service';
|
||||
import PathService from './path.service';
|
||||
import BaseStringService from './base-string.service';
|
||||
|
||||
angular
|
||||
.module('at.lib.services', [])
|
||||
.service('EventService', EventService)
|
||||
.service('PathService', PathService);
|
||||
.service('PathService', PathService)
|
||||
.service('BaseStringService', BaseStringService);
|
||||
|
||||
@ -34,6 +34,13 @@
|
||||
.BreadCrumb-menuLink:hover {
|
||||
color: @bc-link-icon-focus;
|
||||
}
|
||||
.BreadCrumb-menuLink {
|
||||
.BreadCrumb-menuLinkImage.fa-refresh {
|
||||
&:hover {
|
||||
color: @default-link;
|
||||
}
|
||||
}
|
||||
}
|
||||
.BreadCrumb-menuLinkImage {
|
||||
font-size: 18px;
|
||||
color: @bc-link-icon;
|
||||
|
||||
@ -12,6 +12,7 @@ export default
|
||||
|
||||
scope.showActivityStreamButton = false;
|
||||
scope.showRefreshButton = false;
|
||||
scope.alwaysShowRefreshButton = false;
|
||||
scope.loadingLicense = true;
|
||||
|
||||
scope.$on("$stateChangeSuccess", function updateActivityStreamButton(event, toState, toParams, fromState, fromParams) {
|
||||
@ -48,6 +49,7 @@ export default
|
||||
}
|
||||
|
||||
scope.showRefreshButton = (streamConfig && streamConfig.refreshButton) ? true : false;
|
||||
scope.alwaysShowRefreshButton = (streamConfig && streamConfig.alwaysShowRefreshButton) ? true: false;
|
||||
});
|
||||
|
||||
// scope.$on('featuresLoaded', function(){
|
||||
|
||||
@ -8,7 +8,7 @@
|
||||
data-trigger="hover"
|
||||
data-container="body"
|
||||
ng-hide= "loadingLicense || licenseMissing"
|
||||
ng-if="socketStatus === 'error' && showRefreshButton"
|
||||
ng-if="(socketStatus === 'error' && showRefreshButton) || alwaysShowRefreshButton"
|
||||
ng-click="refresh()">
|
||||
<i class="BreadCrumb-menuLinkImage fa fa-refresh"
|
||||
alt="Refresh the page">
|
||||
|
||||
@ -392,7 +392,11 @@ export default [
|
||||
}
|
||||
else {
|
||||
// Everything else
|
||||
payload[key] = $scope[key];
|
||||
if (key !== 'LOG_AGGREGATOR_TCP_TIMEOUT' ||
|
||||
($scope.LOG_AGGREGATOR_PROTOCOL === 'https' ||
|
||||
$scope.LOG_AGGREGATOR_PROTOCOL === 'tcp')) {
|
||||
payload[key] = $scope[key];
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@ -171,6 +171,10 @@ export default [
|
||||
$scope.$parent.LOG_AGGREGATOR_TYPE = _.find($scope.$parent.LOG_AGGREGATOR_TYPE_options, { value: $scope.$parent.LOG_AGGREGATOR_TYPE });
|
||||
}
|
||||
|
||||
if($scope.$parent.LOG_AGGREGATOR_PROTOCOL !== null) {
|
||||
$scope.$parent.LOG_AGGREGATOR_PROTOCOL = _.find($scope.$parent.LOG_AGGREGATOR_PROTOCOL_options, { value: $scope.$parent.LOG_AGGREGATOR_PROTOCOL });
|
||||
}
|
||||
|
||||
if(flag !== undefined){
|
||||
dropdownRendered = flag;
|
||||
}
|
||||
@ -183,6 +187,7 @@ export default [
|
||||
placeholder: i18n._('Select types'),
|
||||
});
|
||||
$scope.$parent.configuration_logging_template_form.LOG_AGGREGATOR_TYPE.$setPristine();
|
||||
$scope.$parent.configuration_logging_template_form.LOG_AGGREGATOR_PROTOCOL.$setPristine();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -42,6 +42,20 @@
|
||||
},
|
||||
LOG_AGGREGATOR_ENABLED: {
|
||||
type: 'toggleSwitch',
|
||||
},
|
||||
LOG_AGGREGATOR_PROTOCOL: {
|
||||
type: 'select',
|
||||
reset: 'LOG_AGGREGATOR_PROTOCOL',
|
||||
ngOptions: 'type.label for type in LOG_AGGREGATOR_PROTOCOL_options track by type.value'
|
||||
},
|
||||
LOG_AGGREGATOR_TCP_TIMEOUT: {
|
||||
type: 'text',
|
||||
reset: 'LOG_AGGREGATOR_TCP_TIMEOUT',
|
||||
ngShow: 'LOG_AGGREGATOR_PROTOCOL.value === "tcp" || LOG_AGGREGATOR_PROTOCOL.value === "https"',
|
||||
awRequiredWhen: {
|
||||
reqExpression: "LOG_AGGREGATOR_PROTOCOL.value === 'tcp' || LOG_AGGREGATOR_PROTOCOL.value === 'https'",
|
||||
init: "false"
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
/* jshint ignore:start */
|
||||
|
||||
var sprintf = require('sprintf-js').sprintf;
|
||||
let defaultLanguage = 'en_US';
|
||||
|
||||
/**
|
||||
* @ngdoc method
|
||||
@ -24,7 +25,12 @@ export default
|
||||
$window.navigator.userLanguage ||
|
||||
'';
|
||||
var langUrl = langInfo.replace('-', '_');
|
||||
//gettextCatalog.debug = true;
|
||||
|
||||
if (langUrl === defaultLanguage) {
|
||||
return;
|
||||
}
|
||||
|
||||
// gettextCatalog.debug = true;
|
||||
gettextCatalog.setCurrentLanguage(langInfo);
|
||||
gettextCatalog.loadRemote('/static/languages/' + langUrl + '.json');
|
||||
};
|
||||
|
||||
@ -0,0 +1,28 @@
|
||||
@import "../../shared/branding/colors.default.less";
|
||||
|
||||
capacity-bar {
|
||||
|
||||
width: 50%;
|
||||
margin-right: 10px;
|
||||
min-width: 100px;
|
||||
|
||||
.CapacityBar {
|
||||
background-color: @default-bg;
|
||||
display: flex;
|
||||
flex: 0 0 auto;
|
||||
height: 10px;
|
||||
border: 1px solid @default-link;
|
||||
width: 100%;
|
||||
border-radius: 100vw;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.CapacityBar-remaining {
|
||||
background-color: @default-link;
|
||||
flex: 0 0 auto;
|
||||
}
|
||||
|
||||
.CapacityBar-consumed {
|
||||
flex: 0 0 auto;
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user