mirror of
https://github.com/ansible/awx.git
synced 2026-01-11 10:00:01 -03:30
Merge remote-tracking branch 'tower/release_3.2.2' into devel
This commit is contained in:
commit
9dbcc5934e
2
Makefile
2
Makefile
@ -607,7 +607,7 @@ clean-elk:
|
||||
docker rm tools_kibana_1
|
||||
|
||||
psql-container:
|
||||
docker run -it --net tools_default --rm postgres:9.4.1 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
docker run -it --net tools_default --rm postgres:9.6 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
|
||||
VERSION:
|
||||
@echo $(VERSION_TARGET) > $@
|
||||
|
||||
@ -166,7 +166,13 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
elif isinstance(field, models.BooleanField):
|
||||
return to_python_boolean(value)
|
||||
elif isinstance(field, (ForeignObjectRel, ManyToManyField, GenericForeignKey, ForeignKey)):
|
||||
return self.to_python_related(value)
|
||||
try:
|
||||
return self.to_python_related(value)
|
||||
except ValueError:
|
||||
raise ParseError(_('Invalid {field_name} id: {field_id}').format(
|
||||
field_name=getattr(field, 'name', 'related field'),
|
||||
field_id=value)
|
||||
)
|
||||
else:
|
||||
return field.to_python(value)
|
||||
|
||||
@ -243,11 +249,10 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
# Search across related objects.
|
||||
if key.endswith('__search'):
|
||||
for value in values:
|
||||
for search_term in force_text(value).replace(',', ' ').split():
|
||||
search_value, new_keys = self.value_to_python(queryset.model, key, search_term)
|
||||
assert isinstance(new_keys, list)
|
||||
for new_key in new_keys:
|
||||
search_filters.append((new_key, search_value))
|
||||
search_value, new_keys = self.value_to_python(queryset.model, key, force_text(value))
|
||||
assert isinstance(new_keys, list)
|
||||
for new_key in new_keys:
|
||||
search_filters.append((new_key, search_value))
|
||||
continue
|
||||
|
||||
# Custom chain__ and or__ filters, mutually exclusive (both can
|
||||
|
||||
@ -21,7 +21,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.authentication import get_authorization_header
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.exceptions import PermissionDenied, AuthenticationFailed
|
||||
from rest_framework import generics
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
@ -38,9 +38,10 @@ from awx.api.metadata import SublistAttachDetatchMetadata
|
||||
|
||||
__all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
|
||||
'ListCreateAPIView', 'SubListAPIView', 'SubListCreateAPIView',
|
||||
'SubListDestroyAPIView',
|
||||
'SubListCreateAttachDetachAPIView', 'RetrieveAPIView',
|
||||
'RetrieveUpdateAPIView', 'RetrieveDestroyAPIView',
|
||||
'RetrieveUpdateDestroyAPIView', 'DestroyAPIView',
|
||||
'RetrieveUpdateDestroyAPIView',
|
||||
'SubDetailAPIView',
|
||||
'ResourceAccessList',
|
||||
'ParentMixin',
|
||||
@ -115,6 +116,10 @@ class APIView(views.APIView):
|
||||
|
||||
drf_request = super(APIView, self).initialize_request(request, *args, **kwargs)
|
||||
request.drf_request = drf_request
|
||||
try:
|
||||
request.drf_request_user = getattr(drf_request, 'user', False)
|
||||
except AuthenticationFailed:
|
||||
request.drf_request_user = None
|
||||
return drf_request
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
@ -140,7 +145,6 @@ class APIView(views.APIView):
|
||||
response['X-API-Query-Count'] = len(q_times)
|
||||
response['X-API-Query-Time'] = '%0.3fs' % sum(q_times)
|
||||
|
||||
analytics_logger.info("api response", extra=dict(python_objects=dict(request=request, response=response)))
|
||||
return response
|
||||
|
||||
def get_authenticate_header(self, request):
|
||||
@ -442,6 +446,41 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
||||
return qs & sublist_qs
|
||||
|
||||
|
||||
class DestroyAPIView(generics.DestroyAPIView):
|
||||
|
||||
def has_delete_permission(self, obj):
|
||||
return self.request.user.can_access(self.model, 'delete', obj)
|
||||
|
||||
def perform_destroy(self, instance, check_permission=True):
|
||||
if check_permission and not self.has_delete_permission(instance):
|
||||
raise PermissionDenied()
|
||||
super(DestroyAPIView, self).perform_destroy(instance)
|
||||
|
||||
|
||||
class SubListDestroyAPIView(DestroyAPIView, SubListAPIView):
|
||||
"""
|
||||
Concrete view for deleting everything related by `relationship`.
|
||||
"""
|
||||
check_sub_obj_permission = True
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance_list = self.get_queryset()
|
||||
if (not self.check_sub_obj_permission and
|
||||
not request.user.can_access(self.parent_model, 'delete', self.get_parent_object())):
|
||||
raise PermissionDenied()
|
||||
self.perform_list_destroy(instance_list)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
def perform_list_destroy(self, instance_list):
|
||||
if self.check_sub_obj_permission:
|
||||
# Check permissions for all before deleting, avoiding half-deleted lists
|
||||
for instance in instance_list:
|
||||
if self.has_delete_permission(instance):
|
||||
raise PermissionDenied()
|
||||
for instance in instance_list:
|
||||
self.perform_destroy(instance, check_permission=False)
|
||||
|
||||
|
||||
class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
||||
# Base class for a sublist view that allows for creating subobjects
|
||||
# associated with the parent object.
|
||||
@ -680,22 +719,11 @@ class RetrieveUpdateAPIView(RetrieveAPIView, generics.RetrieveUpdateAPIView):
|
||||
pass
|
||||
|
||||
|
||||
class RetrieveDestroyAPIView(RetrieveAPIView, generics.RetrieveDestroyAPIView):
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
# somewhat lame that delete has to call it's own permissions check
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'delete', obj):
|
||||
raise PermissionDenied()
|
||||
obj.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, RetrieveDestroyAPIView):
|
||||
class RetrieveDestroyAPIView(RetrieveAPIView, DestroyAPIView):
|
||||
pass
|
||||
|
||||
|
||||
class DestroyAPIView(GenericAPIView, generics.DestroyAPIView):
|
||||
class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
|
||||
pass
|
||||
|
||||
|
||||
|
||||
@ -345,7 +345,9 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
continue
|
||||
summary_fields[fk] = OrderedDict()
|
||||
for field in related_fields:
|
||||
if field == 'credential_type_id' and fk == 'credential' and self.version < 2: # TODO: remove version check in 3.3
|
||||
if (
|
||||
self.version < 2 and field == 'credential_type_id' and
|
||||
fk in ['credential', 'vault_credential']): # TODO: remove version check in 3.3
|
||||
continue
|
||||
|
||||
fval = getattr(fkval, field, None)
|
||||
@ -1111,8 +1113,13 @@ class ProjectUpdateSerializer(UnifiedJobSerializer, ProjectOptionsSerializer):
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(ProjectUpdateSerializer, self).get_related(obj)
|
||||
try:
|
||||
res.update(dict(
|
||||
project = self.reverse('api:project_detail', kwargs={'pk': obj.project.pk}),
|
||||
))
|
||||
except ObjectDoesNotExist:
|
||||
pass
|
||||
res.update(dict(
|
||||
project = self.reverse('api:project_detail', kwargs={'pk': obj.project.pk}),
|
||||
cancel = self.reverse('api:project_update_cancel', kwargs={'pk': obj.pk}),
|
||||
scm_inventory_updates = self.reverse('api:project_update_scm_inventory_updates', kwargs={'pk': obj.pk}),
|
||||
notifications = self.reverse('api:project_update_notifications_list', kwargs={'pk': obj.pk}),
|
||||
@ -1726,8 +1733,15 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(InventoryUpdateSerializer, self).get_related(obj)
|
||||
try:
|
||||
res.update(dict(
|
||||
inventory_source = self.reverse(
|
||||
'api:inventory_source_detail', kwargs={'pk': obj.inventory_source.pk}
|
||||
),
|
||||
))
|
||||
except ObjectDoesNotExist:
|
||||
pass
|
||||
res.update(dict(
|
||||
inventory_source = self.reverse('api:inventory_source_detail', kwargs={'pk': obj.inventory_source.pk}),
|
||||
cancel = self.reverse('api:inventory_update_cancel', kwargs={'pk': obj.pk}),
|
||||
notifications = self.reverse('api:inventory_update_notifications_list', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
@ -2125,7 +2139,7 @@ class CredentialSerializer(BaseSerializer):
|
||||
|
||||
def to_internal_value(self, data):
|
||||
# TODO: remove when API v1 is removed
|
||||
if 'credential_type' not in data:
|
||||
if 'credential_type' not in data and self.version == 1:
|
||||
# If `credential_type` is not provided, assume the payload is a
|
||||
# v1 credential payload that specifies a `kind` and a flat list
|
||||
# of field values
|
||||
@ -2162,10 +2176,22 @@ class CredentialSerializer(BaseSerializer):
|
||||
|
||||
def validate_credential_type(self, credential_type):
|
||||
if self.instance and credential_type.pk != self.instance.credential_type.pk:
|
||||
raise ValidationError(
|
||||
_('You cannot change the credential type of the credential, as it may break the functionality'
|
||||
' of the resources using it.'),
|
||||
)
|
||||
for rel in (
|
||||
'ad_hoc_commands',
|
||||
'insights_inventories',
|
||||
'inventorysources',
|
||||
'inventoryupdates',
|
||||
'jobs',
|
||||
'jobtemplates',
|
||||
'projects',
|
||||
'projectupdates',
|
||||
'workflowjobnodes'
|
||||
):
|
||||
if getattr(self.instance, rel).count() > 0:
|
||||
raise ValidationError(
|
||||
_('You cannot change the credential type of the credential, as it may break the functionality'
|
||||
' of the resources using it.'),
|
||||
)
|
||||
return credential_type
|
||||
|
||||
|
||||
@ -2346,14 +2372,30 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
|
||||
def get_related(self, obj):
|
||||
res = super(JobOptionsSerializer, self).get_related(obj)
|
||||
res['labels'] = self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk})
|
||||
if obj.inventory:
|
||||
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory.pk})
|
||||
if obj.project:
|
||||
res['project'] = self.reverse('api:project_detail', kwargs={'pk': obj.project.pk})
|
||||
if obj.credential:
|
||||
res['credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.credential})
|
||||
if obj.vault_credential:
|
||||
res['vault_credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.vault_credential})
|
||||
try:
|
||||
if obj.inventory:
|
||||
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory.pk})
|
||||
except ObjectDoesNotExist:
|
||||
setattr(obj, 'inventory', None)
|
||||
try:
|
||||
if obj.project:
|
||||
res['project'] = self.reverse('api:project_detail', kwargs={'pk': obj.project.pk})
|
||||
except ObjectDoesNotExist:
|
||||
setattr(obj, 'project', None)
|
||||
try:
|
||||
if obj.credential:
|
||||
res['credential'] = self.reverse(
|
||||
'api:credential_detail', kwargs={'pk': obj.credential.pk}
|
||||
)
|
||||
except ObjectDoesNotExist:
|
||||
setattr(obj, 'credential', None)
|
||||
try:
|
||||
if obj.vault_credential:
|
||||
res['vault_credential'] = self.reverse(
|
||||
'api:credential_detail', kwargs={'pk': obj.vault_credential.pk}
|
||||
)
|
||||
except ObjectDoesNotExist:
|
||||
setattr(obj, 'vault_credential', None)
|
||||
if self.version > 1:
|
||||
if isinstance(obj, UnifiedJobTemplate):
|
||||
res['extra_credentials'] = self.reverse(
|
||||
@ -2608,15 +2650,23 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
||||
notifications = self.reverse('api:job_notifications_list', kwargs={'pk': obj.pk}),
|
||||
labels = self.reverse('api:job_label_list', kwargs={'pk': obj.pk}),
|
||||
))
|
||||
if obj.job_template:
|
||||
res['job_template'] = self.reverse('api:job_template_detail',
|
||||
kwargs={'pk': obj.job_template.pk})
|
||||
try:
|
||||
if obj.job_template:
|
||||
res['job_template'] = self.reverse('api:job_template_detail',
|
||||
kwargs={'pk': obj.job_template.pk})
|
||||
except ObjectDoesNotExist:
|
||||
setattr(obj, 'job_template', None)
|
||||
if (obj.can_start or True) and self.version == 1: # TODO: remove in 3.3
|
||||
res['start'] = self.reverse('api:job_start', kwargs={'pk': obj.pk})
|
||||
if obj.can_cancel or True:
|
||||
res['cancel'] = self.reverse('api:job_cancel', kwargs={'pk': obj.pk})
|
||||
if obj.project_update:
|
||||
res['project_update'] = self.reverse('api:project_update_detail', kwargs={'pk': obj.project_update.pk})
|
||||
try:
|
||||
if obj.project_update:
|
||||
res['project_update'] = self.reverse(
|
||||
'api:project_update_detail', kwargs={'pk': obj.project_update.pk}
|
||||
)
|
||||
except ObjectDoesNotExist:
|
||||
pass
|
||||
res['create_schedule'] = self.reverse('api:job_create_schedule', kwargs={'pk': obj.pk})
|
||||
res['relaunch'] = self.reverse('api:job_relaunch', kwargs={'pk': obj.pk})
|
||||
return res
|
||||
@ -2756,8 +2806,10 @@ class JobRelaunchSerializer(BaseSerializer):
|
||||
|
||||
def validate(self, attrs):
|
||||
obj = self.context.get('obj')
|
||||
if not obj.credential:
|
||||
raise serializers.ValidationError(dict(credential=[_("Credential not found or deleted.")]))
|
||||
if not obj.credential and not obj.vault_credential:
|
||||
raise serializers.ValidationError(
|
||||
dict(credential=[_("Neither credential nor vault credential provided.")])
|
||||
)
|
||||
if obj.project is None:
|
||||
raise serializers.ValidationError(dict(errors=[_("Job Template Project is missing or undefined.")]))
|
||||
if obj.inventory is None or obj.inventory.pending_deletion:
|
||||
@ -3820,6 +3872,7 @@ class InstanceSerializer(BaseSerializer):
|
||||
|
||||
class InstanceGroupSerializer(BaseSerializer):
|
||||
|
||||
committed_capacity = serializers.SerializerMethodField()
|
||||
consumed_capacity = serializers.SerializerMethodField()
|
||||
percent_capacity_remaining = serializers.SerializerMethodField()
|
||||
jobs_running = serializers.SerializerMethodField()
|
||||
@ -3827,7 +3880,8 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = InstanceGroup
|
||||
fields = ("id", "type", "url", "related", "name", "created", "modified", "capacity", "consumed_capacity",
|
||||
fields = ("id", "type", "url", "related", "name", "created", "modified",
|
||||
"capacity", "committed_capacity", "consumed_capacity",
|
||||
"percent_capacity_remaining", "jobs_running", "instances", "controller")
|
||||
|
||||
def get_related(self, obj):
|
||||
@ -3856,7 +3910,10 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
return self.context['capacity_map']
|
||||
|
||||
def get_consumed_capacity(self, obj):
|
||||
return self.get_capacity_dict()[obj.name]['consumed_capacity']
|
||||
return self.get_capacity_dict()[obj.name]['running_capacity']
|
||||
|
||||
def get_committed_capacity(self, obj):
|
||||
return self.get_capacity_dict()[obj.name]['committed_capacity']
|
||||
|
||||
def get_percent_capacity_remaining(self, obj):
|
||||
if not obj.capacity:
|
||||
@ -3954,6 +4011,11 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
|
||||
if fk == 'schedule':
|
||||
rel['unified_job_template'] = thisItem.unified_job_template.get_absolute_url(self.context.get('request'))
|
||||
if obj.setting and obj.setting.get('category', None):
|
||||
rel['setting'] = self.reverse(
|
||||
'api:setting_singleton_detail',
|
||||
kwargs={'category_slug': obj.setting['category']}
|
||||
)
|
||||
return rel
|
||||
|
||||
def _get_rel(self, obj, fk):
|
||||
@ -4005,6 +4067,8 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
username = obj.actor.username,
|
||||
first_name = obj.actor.first_name,
|
||||
last_name = obj.actor.last_name)
|
||||
if obj.setting:
|
||||
summary_fields['setting'] = [obj.setting]
|
||||
return summary_fields
|
||||
|
||||
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
The resulting data structure contains:
|
||||
|
||||
{
|
||||
"count": 99,
|
||||
"next": null,
|
||||
"previous": null,
|
||||
"count": 99,
|
||||
"next": null,
|
||||
"previous": null,
|
||||
"results": [
|
||||
...
|
||||
]
|
||||
@ -60,6 +60,10 @@ _Added in AWX 1.4_
|
||||
|
||||
?related__search=findme
|
||||
|
||||
Note: If you want to provide more than one search terms, please use multiple
|
||||
search fields with the same key, like `?related__search=foo&related__search=bar`,
|
||||
All search terms with the same key will be ORed together.
|
||||
|
||||
## Filtering
|
||||
|
||||
Any additional query string parameters may be used to filter the list of
|
||||
@ -70,7 +74,7 @@ in the specified value should be url-encoded. For example:
|
||||
?field=value%20xyz
|
||||
|
||||
Fields may also span relations, only for fields and relationships defined in
|
||||
the database:
|
||||
the database:
|
||||
|
||||
?other__field=value
|
||||
|
||||
|
||||
6
awx/api/templates/api/sub_list_destroy_api_view.md
Normal file
6
awx/api/templates/api/sub_list_destroy_api_view.md
Normal file
@ -0,0 +1,6 @@
|
||||
{% include "api/sub_list_create_api_view.md" %}
|
||||
|
||||
# Delete all {{ model_verbose_name_plural }} of this {{ parent_model_verbose_name|title }}:
|
||||
|
||||
Make a DELETE request to this resource to delete all {{ model_verbose_name_plural }} show in the list.
|
||||
The {{ parent_model_verbose_name|title }} will not be deleted by this request.
|
||||
102
awx/api/views.py
102
awx/api/views.py
@ -14,6 +14,7 @@ import logging
|
||||
import requests
|
||||
from base64 import b64encode
|
||||
from collections import OrderedDict, Iterable
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@ -72,6 +73,7 @@ from awx.main.utils import (
|
||||
extract_ansible_vars,
|
||||
decrypt_field,
|
||||
)
|
||||
from awx.main.utils.encryption import encrypt_value
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
from awx.main.utils.insights import filter_insights_api_response
|
||||
|
||||
@ -1967,7 +1969,17 @@ class InventoryJobTemplateList(SubListAPIView):
|
||||
return qs.filter(inventory=parent)
|
||||
|
||||
|
||||
class HostList(ListCreateAPIView):
|
||||
class HostRelatedSearchMixin(object):
|
||||
|
||||
@property
|
||||
def related_search_fields(self):
|
||||
# Edge-case handle: https://github.com/ansible/ansible-tower/issues/7712
|
||||
ret = super(HostRelatedSearchMixin, self).related_search_fields
|
||||
ret.append('ansible_facts')
|
||||
return ret
|
||||
|
||||
|
||||
class HostList(HostRelatedSearchMixin, ListCreateAPIView):
|
||||
|
||||
always_allow_superuser = False
|
||||
model = Host
|
||||
@ -2004,7 +2016,7 @@ class HostAnsibleFactsDetail(RetrieveAPIView):
|
||||
new_in_api_v2 = True
|
||||
|
||||
|
||||
class InventoryHostsList(SubListCreateAttachDetachAPIView):
|
||||
class InventoryHostsList(HostRelatedSearchMixin, SubListCreateAttachDetachAPIView):
|
||||
|
||||
model = Host
|
||||
serializer_class = HostSerializer
|
||||
@ -2274,7 +2286,9 @@ class GroupPotentialChildrenList(SubListAPIView):
|
||||
return qs.exclude(pk__in=except_pks)
|
||||
|
||||
|
||||
class GroupHostsList(ControlledByScmMixin, SubListCreateAttachDetachAPIView):
|
||||
class GroupHostsList(HostRelatedSearchMixin,
|
||||
ControlledByScmMixin,
|
||||
SubListCreateAttachDetachAPIView):
|
||||
''' the list of hosts directly below a group '''
|
||||
|
||||
model = Host
|
||||
@ -2301,7 +2315,7 @@ class GroupHostsList(ControlledByScmMixin, SubListCreateAttachDetachAPIView):
|
||||
return super(GroupHostsList, self).create(request, *args, **kwargs)
|
||||
|
||||
|
||||
class GroupAllHostsList(SubListAPIView):
|
||||
class GroupAllHostsList(HostRelatedSearchMixin, SubListAPIView):
|
||||
''' the list of all hosts below a group, even including subgroups '''
|
||||
|
||||
model = Host
|
||||
@ -2419,6 +2433,8 @@ class InventoryScriptView(RetrieveAPIView):
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
hostname = request.query_params.get('host', '')
|
||||
hostvars = bool(request.query_params.get('hostvars', ''))
|
||||
towervars = bool(request.query_params.get('towervars', ''))
|
||||
show_all = bool(request.query_params.get('all', ''))
|
||||
if hostname:
|
||||
hosts_q = dict(name=hostname)
|
||||
@ -2607,23 +2623,25 @@ class InventorySourceNotificationTemplatesSuccessList(InventorySourceNotificatio
|
||||
relationship = 'notification_templates_success'
|
||||
|
||||
|
||||
class InventorySourceHostsList(SubListAPIView):
|
||||
class InventorySourceHostsList(HostRelatedSearchMixin, SubListDestroyAPIView):
|
||||
|
||||
model = Host
|
||||
serializer_class = HostSerializer
|
||||
parent_model = InventorySource
|
||||
relationship = 'hosts'
|
||||
new_in_148 = True
|
||||
check_sub_obj_permission = False
|
||||
capabilities_prefetch = ['inventory.admin']
|
||||
|
||||
|
||||
class InventorySourceGroupsList(SubListAPIView):
|
||||
class InventorySourceGroupsList(SubListDestroyAPIView):
|
||||
|
||||
model = Group
|
||||
serializer_class = GroupSerializer
|
||||
parent_model = InventorySource
|
||||
relationship = 'groups'
|
||||
new_in_148 = True
|
||||
check_sub_obj_permission = False
|
||||
|
||||
|
||||
class InventorySourceUpdatesList(SubListAPIView):
|
||||
@ -2918,13 +2936,8 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
if not feature_enabled('surveys'):
|
||||
raise LicenseForbids(_('Your license does not allow '
|
||||
'adding surveys.'))
|
||||
survey_spec = obj.survey_spec
|
||||
for pos, field in enumerate(survey_spec.get('spec', [])):
|
||||
if field.get('type') == 'password':
|
||||
if 'default' in field and field['default']:
|
||||
field['default'] = '$encrypted$'
|
||||
|
||||
return Response(survey_spec)
|
||||
return Response(obj.display_survey_spec())
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
@ -2937,7 +2950,14 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
|
||||
if not request.user.can_access(self.model, 'change', obj, None):
|
||||
raise PermissionDenied()
|
||||
new_spec = request.data
|
||||
response = self._validate_spec_data(request.data, obj.survey_spec)
|
||||
if response:
|
||||
return response
|
||||
obj.survey_spec = request.data
|
||||
obj.save(update_fields=['survey_spec'])
|
||||
return Response()
|
||||
|
||||
def _validate_spec_data(self, new_spec, old_spec):
|
||||
if "name" not in new_spec:
|
||||
return Response(dict(error=_("'name' missing from survey spec.")), status=status.HTTP_400_BAD_REQUEST)
|
||||
if "description" not in new_spec:
|
||||
@ -2949,9 +2969,9 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
if len(new_spec["spec"]) < 1:
|
||||
return Response(dict(error=_("'spec' doesn't contain any items.")), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
idx = 0
|
||||
variable_set = set()
|
||||
for survey_item in new_spec["spec"]:
|
||||
old_spec_dict = JobTemplate.pivot_spec(old_spec)
|
||||
for idx, survey_item in enumerate(new_spec["spec"]):
|
||||
if not isinstance(survey_item, dict):
|
||||
return Response(dict(error=_("Survey question %s is not a json object.") % str(idx)), status=status.HTTP_400_BAD_REQUEST)
|
||||
if "type" not in survey_item:
|
||||
@ -2968,21 +2988,41 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
if "required" not in survey_item:
|
||||
return Response(dict(error=_("'required' missing from survey question %s.") % str(idx)), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if survey_item["type"] == "password":
|
||||
if survey_item.get("default") and survey_item["default"].startswith('$encrypted$'):
|
||||
if not obj.survey_spec:
|
||||
return Response(dict(error=_("$encrypted$ is reserved keyword and may not be used as a default for password {}.".format(str(idx)))),
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
old_spec = obj.survey_spec
|
||||
for old_item in old_spec['spec']:
|
||||
if old_item['variable'] == survey_item['variable']:
|
||||
survey_item['default'] = old_item['default']
|
||||
idx += 1
|
||||
if survey_item["type"] == "password" and "default" in survey_item:
|
||||
if not isinstance(survey_item['default'], six.string_types):
|
||||
return Response(dict(error=_(
|
||||
"Value {question_default} for '{variable_name}' expected to be a string."
|
||||
).format(
|
||||
question_default=survey_item["default"], variable_name=survey_item["variable"])
|
||||
), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
obj.survey_spec = new_spec
|
||||
obj.save(update_fields=['survey_spec'])
|
||||
return Response()
|
||||
if ("default" in survey_item and isinstance(survey_item['default'], six.string_types) and
|
||||
survey_item['default'].startswith('$encrypted$')):
|
||||
# Submission expects the existence of encrypted DB value to replace given default
|
||||
if survey_item["type"] != "password":
|
||||
return Response(dict(error=_(
|
||||
"$encrypted$ is a reserved keyword for password question defaults, "
|
||||
"survey question {question_position} is type {question_type}."
|
||||
).format(
|
||||
question_position=str(idx), question_type=survey_item["type"])
|
||||
), status=status.HTTP_400_BAD_REQUEST)
|
||||
old_element = old_spec_dict.get(survey_item['variable'], {})
|
||||
encryptedish_default_exists = False
|
||||
if 'default' in old_element:
|
||||
old_default = old_element['default']
|
||||
if isinstance(old_default, six.string_types):
|
||||
if old_default.startswith('$encrypted$'):
|
||||
encryptedish_default_exists = True
|
||||
elif old_default == "": # unencrypted blank string is allowed as DB value as special case
|
||||
encryptedish_default_exists = True
|
||||
if not encryptedish_default_exists:
|
||||
return Response(dict(error=_(
|
||||
"$encrypted$ is a reserved keyword, may not be used for new default in position {question_position}."
|
||||
).format(question_position=str(idx))), status=status.HTTP_400_BAD_REQUEST)
|
||||
survey_item['default'] = old_element['default']
|
||||
elif survey_item["type"] == "password" and 'default' in survey_item:
|
||||
# Submission provides new encrypted default
|
||||
survey_item['default'] = encrypt_value(survey_item['default'])
|
||||
|
||||
def delete(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
@ -4121,7 +4161,7 @@ class JobEventChildrenList(SubListAPIView):
|
||||
view_name = _('Job Event Children List')
|
||||
|
||||
|
||||
class JobEventHostsList(SubListAPIView):
|
||||
class JobEventHostsList(HostRelatedSearchMixin, SubListAPIView):
|
||||
|
||||
model = Host
|
||||
serializer_class = HostSerializer
|
||||
@ -4141,7 +4181,7 @@ class BaseJobEventsList(SubListAPIView):
|
||||
search_fields = ('stdout',)
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
response['X-UI-Max-Events'] = settings.RECOMMENDED_MAX_EVENTS_DISPLAY_HEADER
|
||||
response['X-UI-Max-Events'] = settings.MAX_UI_JOB_EVENTS
|
||||
return super(BaseJobEventsList, self).finalize_response(request, response, *args, **kwargs)
|
||||
|
||||
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
# Python
|
||||
import logging
|
||||
import urlparse
|
||||
from collections import OrderedDict
|
||||
|
||||
# Django
|
||||
from django.core.validators import URLValidator
|
||||
@ -139,6 +140,8 @@ class KeyValueField(DictField):
|
||||
ret = super(KeyValueField, self).to_internal_value(data)
|
||||
for value in data.values():
|
||||
if not isinstance(value, six.string_types + six.integer_types + (float,)):
|
||||
if isinstance(value, OrderedDict):
|
||||
value = dict(value)
|
||||
self.fail('invalid_child', input=value)
|
||||
return ret
|
||||
|
||||
|
||||
@ -120,6 +120,9 @@ class SettingsRegistry(object):
|
||||
def is_setting_read_only(self, setting):
|
||||
return bool(self._registry.get(setting, {}).get('read_only', False))
|
||||
|
||||
def get_setting_category(self, setting):
|
||||
return self._registry.get(setting, {}).get('category_slug', None)
|
||||
|
||||
def get_setting_field(self, setting, mixin_class=None, for_user=False, **kwargs):
|
||||
from rest_framework.fields import empty
|
||||
field_kwargs = {}
|
||||
|
||||
@ -87,8 +87,10 @@ class SettingSingletonSerializer(serializers.Serializer):
|
||||
if self.instance and not hasattr(self.instance, key):
|
||||
continue
|
||||
extra_kwargs = {}
|
||||
# Make LICENSE read-only here; update via /api/v1/config/ only.
|
||||
if key == 'LICENSE':
|
||||
# Make LICENSE and AWX_ISOLATED_KEY_GENERATION read-only here;
|
||||
# LICENSE is only updated via /api/v1/config/
|
||||
# AWX_ISOLATED_KEY_GENERATION is only set/unset via the setup playbook
|
||||
if key in ('LICENSE', 'AWX_ISOLATED_KEY_GENERATION'):
|
||||
extra_kwargs['read_only'] = True
|
||||
field = settings_registry.get_setting_field(key, mixin_class=SettingFieldMixin, for_user=bool(category_slug == 'user'), **extra_kwargs)
|
||||
fields[key] = field
|
||||
|
||||
@ -9,7 +9,10 @@ import shutil
|
||||
# RedBaron
|
||||
from redbaron import RedBaron, indent
|
||||
|
||||
__all__ = ['comment_assignments']
|
||||
# AWX
|
||||
from awx.conf.registry import settings_registry
|
||||
|
||||
__all__ = ['comment_assignments', 'conf_to_dict']
|
||||
|
||||
|
||||
def comment_assignments(patterns, assignment_names, dry_run=True, backup_suffix='.old'):
|
||||
@ -103,6 +106,13 @@ def comment_assignments_in_file(filename, assignment_names, dry_run=True, backup
|
||||
return '\n'.join(diff_lines)
|
||||
|
||||
|
||||
def conf_to_dict(obj):
|
||||
return {
|
||||
'category': settings_registry.get_setting_category(obj.key),
|
||||
'name': obj.key,
|
||||
}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
pattern = os.path.join(os.path.dirname(__file__), '..', 'settings', 'local_*.py')
|
||||
diffs = comment_assignments(pattern, ['AUTH_LDAP_ORGANIZATION_MAP'])
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -12,6 +12,7 @@ from django.db.models import Q, Prefetch
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import ParseError, PermissionDenied, ValidationError
|
||||
@ -31,7 +32,7 @@ from awx.conf.license import LicenseForbids, feature_enabled
|
||||
|
||||
__all__ = ['get_user_queryset', 'check_user_access', 'check_user_access_with_errors',
|
||||
'user_accessible_objects', 'consumer_access',
|
||||
'user_admin_role', 'StateConflict',]
|
||||
'user_admin_role', 'ActiveJobConflict',]
|
||||
|
||||
logger = logging.getLogger('awx.main.access')
|
||||
|
||||
@ -71,9 +72,15 @@ def get_object_from_data(field, Model, data, obj=None):
|
||||
raise ParseError(_("Bad data found in related field %s." % field))
|
||||
|
||||
|
||||
class StateConflict(ValidationError):
|
||||
class ActiveJobConflict(ValidationError):
|
||||
status_code = 409
|
||||
|
||||
def __init__(self, active_jobs):
|
||||
super(ActiveJobConflict, self).__init__({
|
||||
"conflict": _("Resource is being used by running jobs."),
|
||||
"active_jobs": active_jobs
|
||||
})
|
||||
|
||||
|
||||
def register_access(model_class, access_class):
|
||||
access_registry[model_class] = access_class
|
||||
@ -568,8 +575,7 @@ class OrganizationAccess(BaseAccess):
|
||||
active_jobs.extend([dict(type="inventory_update", id=o.id)
|
||||
for o in InventoryUpdate.objects.filter(inventory_source__inventory__organization=obj, status__in=ACTIVE_STATES)])
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
return True
|
||||
|
||||
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
@ -662,8 +668,7 @@ class InventoryAccess(BaseAccess):
|
||||
active_jobs.extend([dict(type="ad_hoc_command", id=o.id)
|
||||
for o in AdHocCommand.objects.filter(inventory=obj, status__in=ACTIVE_STATES)])
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
return True
|
||||
|
||||
def can_run_ad_hoc_commands(self, obj):
|
||||
@ -788,8 +793,7 @@ class GroupAccess(BaseAccess):
|
||||
active_jobs.extend([dict(type="inventory_update", id=o.id)
|
||||
for o in InventoryUpdate.objects.filter(inventory_source__in=obj.inventory_sources.all(), status__in=ACTIVE_STATES)])
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
return True
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
@ -839,8 +843,7 @@ class InventorySourceAccess(BaseAccess):
|
||||
return False
|
||||
active_jobs_qs = InventoryUpdate.objects.filter(inventory_source=obj, status__in=ACTIVE_STATES)
|
||||
if active_jobs_qs.exists():
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": [dict(type="inventory_update", id=o.id) for o in active_jobs_qs.all()]})
|
||||
raise ActiveJobConflict([dict(type="inventory_update", id=o.id) for o in active_jobs_qs.all()])
|
||||
return True
|
||||
|
||||
@check_superuser
|
||||
@ -1090,8 +1093,7 @@ class ProjectAccess(BaseAccess):
|
||||
active_jobs.extend([dict(type="project_update", id=o.id)
|
||||
for o in ProjectUpdate.objects.filter(project=obj, status__in=ACTIVE_STATES)])
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
return True
|
||||
|
||||
@check_superuser
|
||||
@ -1124,8 +1126,11 @@ class ProjectUpdateAccess(BaseAccess):
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
# for relaunching
|
||||
if obj and obj.project:
|
||||
return self.user in obj.project.update_role
|
||||
try:
|
||||
if obj and obj.project:
|
||||
return self.user in obj.project.update_role
|
||||
except ObjectDoesNotExist:
|
||||
pass
|
||||
return False
|
||||
|
||||
@check_superuser
|
||||
@ -1265,8 +1270,7 @@ class JobTemplateAccess(BaseAccess):
|
||||
active_jobs = [dict(type="job", id=o.id)
|
||||
for o in obj.jobs.filter(status__in=ACTIVE_STATES)]
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
return True
|
||||
|
||||
@check_superuser
|
||||
@ -1771,8 +1775,7 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
active_jobs = [dict(type="workflow_job", id=o.id)
|
||||
for o in obj.workflow_jobs.filter(status__in=ACTIVE_STATES)]
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@ -5,7 +5,7 @@ import re
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'satellite6', 'cloudforms')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'cloudforms', 'tower')
|
||||
SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + ('custom', 'scm',)
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')), ('su', _('Su')), ('pbrun', _('Pbrun')), ('pfexec', _('Pfexec')),
|
||||
|
||||
@ -6,6 +6,7 @@ import copy
|
||||
import json
|
||||
import re
|
||||
import six
|
||||
import urllib
|
||||
|
||||
from jinja2 import Environment, StrictUndefined
|
||||
from jinja2.exceptions import UndefinedError
|
||||
@ -352,6 +353,7 @@ class SmartFilterField(models.TextField):
|
||||
# https://docs.python.org/2/library/stdtypes.html#truth-value-testing
|
||||
if not value:
|
||||
return None
|
||||
value = urllib.unquote(value)
|
||||
try:
|
||||
SmartFilter().query_from_string(value)
|
||||
except RuntimeError, e:
|
||||
|
||||
@ -173,6 +173,7 @@ class AnsibleInventoryLoader(object):
|
||||
def load(self):
|
||||
base_args = self.get_base_args()
|
||||
logger.info('Reading Ansible inventory source: %s', self.source)
|
||||
|
||||
data = self.command_to_json(base_args + ['--list'])
|
||||
|
||||
# TODO: remove after we run custom scripts through ansible-inventory
|
||||
@ -225,6 +226,7 @@ def load_inventory_source(source, group_filter_re=None,
|
||||
'''
|
||||
# Sanity check: We sanitize these module names for our API but Ansible proper doesn't follow
|
||||
# good naming conventions
|
||||
source = source.replace('rhv.py', 'ovirt4.py')
|
||||
source = source.replace('satellite6.py', 'foreman.py')
|
||||
source = source.replace('vmware.py', 'vmware_inventory.py')
|
||||
if not os.path.exists(source):
|
||||
@ -600,27 +602,20 @@ class Command(BaseCommand):
|
||||
|
||||
def _update_inventory(self):
|
||||
'''
|
||||
Update/overwrite variables from "all" group. If importing from a
|
||||
cloud source attached to a specific group, variables will be set on
|
||||
the base group, otherwise they will be set on the whole inventory.
|
||||
Update inventory variables from "all" group.
|
||||
'''
|
||||
# FIXME: figure out how "all" variables are handled in the new inventory source system
|
||||
# TODO: We disable variable overwrite here in case user-defined inventory variables get
|
||||
# mangled. But we still need to figure out a better way of processing multiple inventory
|
||||
# update variables mixing with each other.
|
||||
all_obj = self.inventory
|
||||
all_name = 'inventory'
|
||||
db_variables = all_obj.variables_dict
|
||||
if self.overwrite_vars:
|
||||
db_variables = self.all_group.variables
|
||||
else:
|
||||
db_variables.update(self.all_group.variables)
|
||||
db_variables.update(self.all_group.variables)
|
||||
if db_variables != all_obj.variables_dict:
|
||||
all_obj.variables = json.dumps(db_variables)
|
||||
all_obj.save(update_fields=['variables'])
|
||||
if self.overwrite_vars:
|
||||
logger.info('%s variables replaced from "all" group', all_name.capitalize())
|
||||
else:
|
||||
logger.info('%s variables updated from "all" group', all_name.capitalize())
|
||||
logger.info('Inventory variables updated from "all" group')
|
||||
else:
|
||||
logger.info('%s variables unmodified', all_name.capitalize())
|
||||
logger.info('Inventory variables unmodified')
|
||||
|
||||
def _create_update_groups(self):
|
||||
'''
|
||||
|
||||
@ -3,13 +3,14 @@
|
||||
|
||||
# Python
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
import time
|
||||
from uuid import UUID
|
||||
from multiprocessing import Process
|
||||
from multiprocessing import Queue as MPQueue
|
||||
from Queue import Empty as QueueEmpty
|
||||
from Queue import Full as QueueFull
|
||||
import os
|
||||
|
||||
from kombu import Connection, Exchange, Queue
|
||||
from kombu.mixins import ConsumerMixin
|
||||
@ -18,7 +19,8 @@ from kombu.mixins import ConsumerMixin
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection as django_connection
|
||||
from django.db import DatabaseError
|
||||
from django.db import DatabaseError, OperationalError
|
||||
from django.db.utils import InterfaceError, InternalError
|
||||
from django.core.cache import cache as django_cache
|
||||
|
||||
# AWX
|
||||
@ -39,6 +41,9 @@ class WorkerSignalHandler:
|
||||
|
||||
|
||||
class CallbackBrokerWorker(ConsumerMixin):
|
||||
|
||||
MAX_RETRIES = 2
|
||||
|
||||
def __init__(self, connection, use_workers=True):
|
||||
self.connection = connection
|
||||
self.worker_queues = []
|
||||
@ -133,13 +138,40 @@ class CallbackBrokerWorker(ConsumerMixin):
|
||||
logger.info('Body: {}'.format(
|
||||
highlight(pformat(body, width=160), PythonLexer(), Terminal256Formatter(style='friendly'))
|
||||
))
|
||||
try:
|
||||
|
||||
def _save_event_data():
|
||||
if 'job_id' in body:
|
||||
JobEvent.create_from_data(**body)
|
||||
elif 'ad_hoc_command_id' in body:
|
||||
AdHocCommandEvent.create_from_data(**body)
|
||||
except DatabaseError as e:
|
||||
logger.error('Database Error Saving Job Event: {}'.format(e))
|
||||
|
||||
job_identifier = 'unknown job'
|
||||
if 'job_id' in body:
|
||||
job_identifier = body['job_id']
|
||||
elif 'ad_hoc_command_id' in body:
|
||||
job_identifier = body['ad_hoc_command_id']
|
||||
|
||||
retries = 0
|
||||
while retries <= self.MAX_RETRIES:
|
||||
try:
|
||||
_save_event_data()
|
||||
break
|
||||
except (OperationalError, InterfaceError, InternalError) as e:
|
||||
if retries >= self.MAX_RETRIES:
|
||||
logger.exception('Worker could not re-establish database connectivity, shutting down gracefully: Job {}'.format(job_identifier))
|
||||
os.kill(os.getppid(), signal.SIGINT)
|
||||
return
|
||||
delay = 60 * retries
|
||||
logger.exception('Database Error Saving Job Event, retry #{i} in {delay} seconds:'.format(
|
||||
i=retries + 1,
|
||||
delay=delay
|
||||
))
|
||||
django_connection.close()
|
||||
time.sleep(delay)
|
||||
retries += 1
|
||||
except DatabaseError as e:
|
||||
logger.exception('Database Error Saving Job Event for Job {}'.format(job_identifier))
|
||||
break
|
||||
except Exception as exc:
|
||||
import traceback
|
||||
tb = traceback.format_exc()
|
||||
|
||||
50
awx/main/management/commands/test_isolated_connection.py
Normal file
50
awx/main/management/commands/test_isolated_connection.py
Normal file
@ -0,0 +1,50 @@
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from optparse import make_option
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
from awx.main.expect import run
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Tests SSH connectivity between a controller and target isolated node"""
|
||||
help = 'Tests SSH connectivity between a controller and target isolated node'
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--hostname', dest='hostname', type='string',
|
||||
help='Hostname of an isolated node'),
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
hostname = options.get('hostname')
|
||||
if not hostname:
|
||||
raise CommandError("--hostname is a required argument")
|
||||
|
||||
try:
|
||||
path = tempfile.mkdtemp(prefix='awx_isolated_ssh', dir=settings.AWX_PROOT_BASE_PATH)
|
||||
args = [
|
||||
'ansible', 'all', '-i', '{},'.format(hostname), '-u',
|
||||
settings.AWX_ISOLATED_USERNAME, '-T5', '-m', 'shell',
|
||||
'-a', 'hostname', '-vvv'
|
||||
]
|
||||
if all([
|
||||
getattr(settings, 'AWX_ISOLATED_KEY_GENERATION', False) is True,
|
||||
getattr(settings, 'AWX_ISOLATED_PRIVATE_KEY', None)
|
||||
]):
|
||||
ssh_key_path = os.path.join(path, '.isolated')
|
||||
ssh_auth_sock = os.path.join(path, 'ssh_auth.sock')
|
||||
run.open_fifo_write(ssh_key_path, settings.AWX_ISOLATED_PRIVATE_KEY)
|
||||
args = run.wrap_args_with_ssh_agent(args, ssh_key_path, ssh_auth_sock)
|
||||
try:
|
||||
print ' '.join(args)
|
||||
subprocess.check_call(args)
|
||||
except subprocess.CalledProcessError as e:
|
||||
sys.exit(e.returncode)
|
||||
finally:
|
||||
shutil.rmtree(path)
|
||||
|
||||
@ -7,7 +7,7 @@ import logging
|
||||
|
||||
from django.db import models
|
||||
from django.utils.timezone import now
|
||||
from django.db.models import Sum
|
||||
from django.db.models import Sum, Q
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
@ -21,9 +21,9 @@ class HostManager(models.Manager):
|
||||
"""Custom manager class for Hosts model."""
|
||||
|
||||
def active_count(self):
|
||||
"""Return count of active, unique hosts for licensing."""
|
||||
"""Return count of active, unique hosts for licensing. Exclude ones source from another Tower"""
|
||||
try:
|
||||
return self.order_by('name').distinct('name').count()
|
||||
return self.filter(~Q(inventory_sources__source='tower')).order_by('name').distinct('name').count()
|
||||
except NotImplementedError: # For unit tests only, SQLite doesn't support distinct('name')
|
||||
return len(set(self.values_list('name', flat=True)))
|
||||
|
||||
|
||||
@ -5,6 +5,10 @@ import logging
|
||||
import threading
|
||||
import uuid
|
||||
import six
|
||||
import time
|
||||
import cProfile
|
||||
import pstats
|
||||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
@ -25,6 +29,38 @@ from awx.conf import fields, register
|
||||
|
||||
logger = logging.getLogger('awx.main.middleware')
|
||||
analytics_logger = logging.getLogger('awx.analytics.activity_stream')
|
||||
perf_logger = logging.getLogger('awx.analytics.performance')
|
||||
|
||||
|
||||
class TimingMiddleware(threading.local):
|
||||
|
||||
dest = '/var/lib/awx/profile'
|
||||
|
||||
def process_request(self, request):
|
||||
self.start_time = time.time()
|
||||
if settings.AWX_REQUEST_PROFILE:
|
||||
self.prof = cProfile.Profile()
|
||||
self.prof.enable()
|
||||
|
||||
def process_response(self, request, response):
|
||||
total_time = time.time() - self.start_time
|
||||
response['X-API-Total-Time'] = '%0.3fs' % total_time
|
||||
if settings.AWX_REQUEST_PROFILE:
|
||||
self.prof.disable()
|
||||
cprofile_file = self.save_profile_file(request)
|
||||
response['cprofile_file'] = cprofile_file
|
||||
perf_logger.info('api response times', extra=dict(python_objects=dict(request=request, response=response)))
|
||||
return response
|
||||
|
||||
def save_profile_file(self, request):
|
||||
if not os.path.isdir(self.dest):
|
||||
os.makedirs(self.dest)
|
||||
filename = '%.3fs-%s' % (pstats.Stats(self.prof).total_tt, uuid.uuid4())
|
||||
filepath = os.path.join(self.dest, filename)
|
||||
with open(filepath, 'w') as f:
|
||||
f.write('%s %s\n' % (request.method, request.get_full_path()))
|
||||
pstats.Stats(self.prof, stream=f).sort_stats('cumulative').print_stats()
|
||||
return filepath
|
||||
|
||||
|
||||
class ActivityStreamMiddleware(threading.local):
|
||||
|
||||
@ -8,14 +8,9 @@ from __future__ import unicode_literals
|
||||
from django.db import migrations, models
|
||||
from django.conf import settings
|
||||
import awx.main.fields
|
||||
import jsonfield.fields
|
||||
|
||||
|
||||
def update_dashed_host_variables(apps, schema_editor):
|
||||
Host = apps.get_model('main', 'Host')
|
||||
for host in Host.objects.filter(variables='---'):
|
||||
host.variables = ''
|
||||
host.save()
|
||||
import _squashed
|
||||
from _squashed_30 import SQUASHED_30
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@ -27,13 +22,7 @@ class Migration(migrations.Migration):
|
||||
(b'main', '0025_v300_update_rbac_parents'),
|
||||
(b'main', '0026_v300_credential_unique'),
|
||||
(b'main', '0027_v300_team_migrations'),
|
||||
(b'main', '0028_v300_org_team_cascade'),
|
||||
(b'main', '0029_v302_add_ask_skip_tags'),
|
||||
(b'main', '0030_v302_job_survey_passwords'),
|
||||
(b'main', '0031_v302_migrate_survey_passwords'),
|
||||
(b'main', '0032_v302_credential_permissions_update'),
|
||||
(b'main', '0033_v303_v245_host_variable_fix'),]
|
||||
|
||||
(b'main', '0028_v300_org_team_cascade')] + _squashed.replaces(SQUASHED_30, applied=True)
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
@ -130,27 +119,4 @@ class Migration(migrations.Migration):
|
||||
field=models.ForeignKey(related_name='teams', to='main.Organization'),
|
||||
preserve_default=False,
|
||||
),
|
||||
# add ask skip tags
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='ask_skip_tags_on_launch',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
# job survery passwords
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='survey_passwords',
|
||||
field=jsonfield.fields.JSONField(default={}, editable=False, blank=True),
|
||||
),
|
||||
# RBAC credential permission updates
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
]
|
||||
] + _squashed.operations(SQUASHED_30, applied=True)
|
||||
|
||||
@ -8,6 +8,9 @@ import django.db.models.deletion
|
||||
import awx.main.models.workflow
|
||||
import awx.main.fields
|
||||
|
||||
import _squashed
|
||||
from _squashed_30 import SQUASHED_30
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@ -15,11 +18,11 @@ class Migration(migrations.Migration):
|
||||
('main', '0003_squashed_v300_v303_updates'),
|
||||
]
|
||||
|
||||
replaces = [
|
||||
replaces = _squashed.replaces(SQUASHED_30) + [
|
||||
(b'main', '0034_v310_release'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
operations = _squashed.operations(SQUASHED_30) + [
|
||||
# Create ChannelGroup table
|
||||
migrations.CreateModel(
|
||||
name='ChannelGroup',
|
||||
|
||||
@ -1,7 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
from django.db import migrations
|
||||
|
||||
from django.db import migrations, models
|
||||
import _squashed
|
||||
from _squashed_31 import SQUASHED_31
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@ -10,28 +12,5 @@ class Migration(migrations.Migration):
|
||||
('main', '0004_squashed_v310_release'),
|
||||
]
|
||||
|
||||
replaces = [
|
||||
(b'main', '0035_v310_remove_tower_settings'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Remove Tower settings, these settings are now in separate awx.conf app.
|
||||
migrations.RemoveField(
|
||||
model_name='towersettings',
|
||||
name='user',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='TowerSettings',
|
||||
),
|
||||
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
]
|
||||
replaces = _squashed.replaces(SQUASHED_31)
|
||||
operations = _squashed.operations(SQUASHED_31)
|
||||
|
||||
@ -1,28 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0005_squashed_v310_v313_updates'),
|
||||
]
|
||||
|
||||
replaces = [
|
||||
(b'main', '0036_v311_insights'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
]
|
||||
@ -1,24 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0005a_squashed_v310_v313_updates'),
|
||||
]
|
||||
|
||||
replaces = [
|
||||
(b'main', '0037_v313_instance_version'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Remove Tower settings, these settings are now in separate awx.conf app.
|
||||
migrations.AddField(
|
||||
model_name='instance',
|
||||
name='version',
|
||||
field=models.CharField(max_length=24, blank=True),
|
||||
),
|
||||
]
|
||||
@ -6,7 +6,13 @@ from __future__ import unicode_literals
|
||||
from psycopg2.extensions import AsIs
|
||||
|
||||
# Django
|
||||
from django.db import migrations, models
|
||||
from django.db import (
|
||||
connection,
|
||||
migrations,
|
||||
models,
|
||||
OperationalError,
|
||||
ProgrammingError
|
||||
)
|
||||
from django.conf import settings
|
||||
import taggit.managers
|
||||
|
||||
@ -15,12 +21,24 @@ import awx.main.fields
|
||||
from awx.main.models import Host
|
||||
|
||||
|
||||
def replaces():
|
||||
squashed = ['0005a_squashed_v310_v313_updates', '0005b_squashed_v310_v313_updates']
|
||||
try:
|
||||
recorder = migrations.recorder.MigrationRecorder(connection)
|
||||
result = recorder.migration_qs.filter(app='main').filter(name__in=squashed).all()
|
||||
return [('main', m.name) for m in result]
|
||||
except (OperationalError, ProgrammingError):
|
||||
return []
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0005b_squashed_v310_v313_updates'),
|
||||
('main', '0005_squashed_v310_v313_updates'),
|
||||
]
|
||||
|
||||
replaces = replaces()
|
||||
|
||||
operations = [
|
||||
# Release UJT unique_together constraint
|
||||
migrations.AlterUniqueTogether(
|
||||
|
||||
@ -6,6 +6,7 @@ from __future__ import unicode_literals
|
||||
from django.db import migrations, models
|
||||
|
||||
# AWX
|
||||
from awx.main.migrations import ActivityStreamDisabledMigration
|
||||
from awx.main.migrations import _inventory_source as invsrc
|
||||
from awx.main.migrations import _migration_utils as migration_utils
|
||||
from awx.main.migrations import _reencrypt as reencrypt
|
||||
@ -15,7 +16,7 @@ from awx.main.migrations import _azure_credentials as azurecreds
|
||||
import awx.main.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
class Migration(ActivityStreamDisabledMigration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0006_v320_release'),
|
||||
|
||||
@ -0,0 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
import awx.main.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0008_v320_drop_v1_credential_fields'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='activitystream',
|
||||
name='setting',
|
||||
field=awx.main.fields.JSONField(default=dict, blank=True),
|
||||
),
|
||||
]
|
||||
28
awx/main/migrations/0010_v322_add_ovirt4_tower_inventory.py
Normal file
28
awx/main/migrations/0010_v322_add_ovirt4_tower_inventory.py
Normal file
@ -0,0 +1,28 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# AWX
|
||||
from awx.main.migrations import _credentialtypes as credentialtypes
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0009_v322_add_setting_field_for_activity_stream'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(credentialtypes.create_rhv_tower_credtype),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'rhv', 'Red Hat Virtualization'), (b'tower', 'Ansible Tower'), (b'custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'rhv', 'Red Hat Virtualization'), (b'tower', 'Ansible Tower'), (b'custom', 'Custom Script')]),
|
||||
),
|
||||
]
|
||||
19
awx/main/migrations/0011_v322_encrypt_survey_passwords.py
Normal file
19
awx/main/migrations/0011_v322_encrypt_survey_passwords.py
Normal file
@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
from awx.main.migrations import ActivityStreamDisabledMigration
|
||||
from awx.main.migrations import _reencrypt as reencrypt
|
||||
from awx.main.migrations import _migration_utils as migration_utils
|
||||
|
||||
|
||||
class Migration(ActivityStreamDisabledMigration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0010_v322_add_ovirt4_tower_inventory'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(reencrypt.encrypt_survey_passwords),
|
||||
]
|
||||
18
awx/main/migrations/0012_v322_update_cred_types.py
Normal file
18
awx/main/migrations/0012_v322_update_cred_types.py
Normal file
@ -0,0 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# AWX
|
||||
from awx.main.migrations import _credentialtypes as credentialtypes
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0011_v322_encrypt_survey_passwords'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(credentialtypes.add_azure_cloud_environment_field),
|
||||
]
|
||||
@ -11,7 +11,7 @@ from awx.main.migrations._multi_cred import migrate_to_multi_cred
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0008_v320_drop_v1_credential_fields'),
|
||||
('main', '0012_v322_update_cred_types'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@ -13,7 +13,7 @@ from awx.main.migrations._scan_jobs import remove_scan_type_nodes
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0009_v330_multi_credential'),
|
||||
('main', '0013_v330_multi_credential'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@ -13,7 +13,7 @@ from awx.main.migrations._reencrypt import blank_old_start_args
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0010_saved_launchtime_configs'),
|
||||
('main', '0014_v330_saved_launchtime_configs'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@ -10,7 +10,7 @@ import django.db.models.deletion
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0011_blank_start_args'),
|
||||
('main', '0015_v330_blank_start_args'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@ -8,7 +8,7 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0012_non_blank_workflow'),
|
||||
('main', '0016_v330_non_blank_workflow'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@ -1,2 +1,12 @@
|
||||
# Copyright (c) 2016 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.db.migrations import Migration
|
||||
|
||||
|
||||
class ActivityStreamDisabledMigration(Migration):
|
||||
|
||||
def apply(self, project_state, schema_editor, collect_sql=False):
|
||||
from awx.main.signals import disable_activity_stream
|
||||
with disable_activity_stream():
|
||||
return Migration.apply(self, project_state, schema_editor, collect_sql)
|
||||
|
||||
@ -178,3 +178,14 @@ def add_vault_id_field(apps, schema_editor):
|
||||
vault_credtype = CredentialType.objects.get(kind='vault')
|
||||
vault_credtype.inputs = CredentialType.defaults.get('vault')().inputs
|
||||
vault_credtype.save()
|
||||
|
||||
|
||||
def create_rhv_tower_credtype(apps, schema_editor):
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
|
||||
|
||||
def add_azure_cloud_environment_field(apps, schema_editor):
|
||||
azure_rm_credtype = CredentialType.objects.get(kind='cloud',
|
||||
name='Microsoft Azure Resource Manager')
|
||||
azure_rm_credtype.inputs = CredentialType.defaults.get('azure_rm')().inputs
|
||||
azure_rm_credtype.save()
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import logging
|
||||
|
||||
from django.db.models import Q
|
||||
import six
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
@ -38,8 +39,10 @@ def rename_inventory_sources(apps, schema_editor):
|
||||
Q(deprecated_group__inventory__organization=org)).distinct().all()):
|
||||
|
||||
inventory = invsrc.deprecated_group.inventory if invsrc.deprecated_group else invsrc.inventory
|
||||
name = '{0} - {1} - {2}'.format(invsrc.name, inventory.name, i)
|
||||
logger.debug("Renaming InventorySource({0}) {1} -> {2}".format(invsrc.pk, invsrc.name, name))
|
||||
name = six.text_type('{0} - {1} - {2}').format(invsrc.name, inventory.name, i)
|
||||
logger.debug(six.text_type("Renaming InventorySource({0}) {1} -> {2}").format(
|
||||
invsrc.pk, invsrc.name, name
|
||||
))
|
||||
invsrc.name = name
|
||||
invsrc.save()
|
||||
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
import logging
|
||||
import json
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
import six
|
||||
|
||||
from awx.conf.migrations._reencrypt import (
|
||||
decrypt_field,
|
||||
@ -65,7 +67,6 @@ def _credentials(apps):
|
||||
credential.save()
|
||||
|
||||
|
||||
|
||||
def _unified_jobs(apps):
|
||||
UnifiedJob = apps.get_model('main', 'UnifiedJob')
|
||||
for uj in UnifiedJob.objects.all():
|
||||
@ -91,3 +92,53 @@ def blank_old_start_args(apps, schema_editor):
|
||||
logger.debug('Blanking job args for %s', uj.pk)
|
||||
uj.start_args = ''
|
||||
uj.save()
|
||||
|
||||
|
||||
def encrypt_survey_passwords(apps, schema_editor):
|
||||
_encrypt_survey_passwords(
|
||||
apps.get_model('main', 'Job'),
|
||||
apps.get_model('main', 'JobTemplate'),
|
||||
apps.get_model('main', 'WorkflowJob'),
|
||||
apps.get_model('main', 'WorkflowJobTemplate'),
|
||||
)
|
||||
|
||||
|
||||
def _encrypt_survey_passwords(Job, JobTemplate, WorkflowJob, WorkflowJobTemplate):
|
||||
from awx.main.utils.encryption import encrypt_value
|
||||
for _type in (JobTemplate, WorkflowJobTemplate):
|
||||
for jt in _type.objects.exclude(survey_spec={}):
|
||||
changed = False
|
||||
if jt.survey_spec.get('spec', []):
|
||||
for field in jt.survey_spec['spec']:
|
||||
if field.get('type') == 'password' and field.get('default', ''):
|
||||
default = field['default']
|
||||
if default.startswith('$encrypted$'):
|
||||
if default == '$encrypted$':
|
||||
# If you have a survey_spec with a literal
|
||||
# '$encrypted$' as the default, you have
|
||||
# encountered a known bug in awx/Tower
|
||||
# https://github.com/ansible/ansible-tower/issues/7800
|
||||
logger.error(
|
||||
'{}.pk={} survey_spec has ambiguous $encrypted$ default for {}, needs attention...'.format(jt, jt.pk, field['variable'])
|
||||
)
|
||||
field['default'] = ''
|
||||
changed = True
|
||||
continue
|
||||
field['default'] = encrypt_value(field['default'], pk=None)
|
||||
changed = True
|
||||
if changed:
|
||||
jt.save()
|
||||
|
||||
for _type in (Job, WorkflowJob):
|
||||
for job in _type.objects.defer('result_stdout_text').exclude(survey_passwords={}).iterator():
|
||||
changed = False
|
||||
for key in job.survey_passwords:
|
||||
if key in job.extra_vars:
|
||||
extra_vars = json.loads(job.extra_vars)
|
||||
if not extra_vars.get(key, '') or extra_vars[key].startswith('$encrypted$'):
|
||||
continue
|
||||
extra_vars[key] = encrypt_value(extra_vars[key], pk=None)
|
||||
job.extra_vars = json.dumps(extra_vars)
|
||||
changed = True
|
||||
if changed:
|
||||
job.save()
|
||||
|
||||
63
awx/main/migrations/_squashed.py
Normal file
63
awx/main/migrations/_squashed.py
Normal file
@ -0,0 +1,63 @@
|
||||
from itertools import chain
|
||||
from django.db import (
|
||||
connection,
|
||||
migrations,
|
||||
OperationalError,
|
||||
ProgrammingError,
|
||||
)
|
||||
|
||||
|
||||
def squash_data(squashed):
|
||||
'''Returns a tuple of the squashed_keys and the key position to begin
|
||||
processing replace and operation lists'''
|
||||
|
||||
cm = current_migration()
|
||||
squashed_keys = sorted(squashed.keys())
|
||||
if cm is None:
|
||||
return squashed_keys, 0
|
||||
|
||||
try:
|
||||
key_index = squashed_keys.index(cm.name) + 1
|
||||
except ValueError:
|
||||
key_index = 0
|
||||
return squashed_keys, key_index
|
||||
|
||||
|
||||
def current_migration(exclude_squashed=True):
|
||||
'''Get the latest migration non-squashed migration'''
|
||||
try:
|
||||
recorder = migrations.recorder.MigrationRecorder(connection)
|
||||
migration_qs = recorder.migration_qs.filter(app='main')
|
||||
if exclude_squashed:
|
||||
migration_qs = migration_qs.exclude(name__contains='squashed')
|
||||
return migration_qs.latest('id')
|
||||
except (recorder.Migration.DoesNotExist, OperationalError, ProgrammingError):
|
||||
return None
|
||||
|
||||
|
||||
def replaces(squashed, applied=False):
|
||||
'''Build a list of replacement migrations based on the most recent non-squashed migration
|
||||
and the provided list of SQUASHED migrations. If the most recent non-squashed migration
|
||||
is not present anywhere in the SQUASHED dictionary, assume they have all been applied.
|
||||
|
||||
If applied is True, this will return a list of all the migrations that have already
|
||||
been applied.
|
||||
'''
|
||||
squashed_keys, key_index = squash_data(squashed)
|
||||
if applied:
|
||||
return [(b'main', key) for key in squashed_keys[:key_index]]
|
||||
return [(b'main', key) for key in squashed_keys[key_index:]]
|
||||
|
||||
|
||||
def operations(squashed, applied=False):
|
||||
'''Build a list of migration operations based on the most recent non-squashed migration
|
||||
and the provided list of squashed migrations. If the most recent non-squashed migration
|
||||
is not present anywhere in the `squashed` dictionary, assume they have all been applied.
|
||||
|
||||
If applied is True, this will return a list of all the operations that have
|
||||
already been applied.
|
||||
'''
|
||||
squashed_keys, key_index = squash_data(squashed)
|
||||
op_keys = squashed_keys[:key_index] if applied else squashed_keys[key_index:]
|
||||
ops = [squashed[op_key] for op_key in op_keys]
|
||||
return [op for op in chain.from_iterable(ops)]
|
||||
60
awx/main/migrations/_squashed_30.py
Normal file
60
awx/main/migrations/_squashed_30.py
Normal file
@ -0,0 +1,60 @@
|
||||
from django.db import (
|
||||
migrations,
|
||||
models,
|
||||
)
|
||||
import jsonfield.fields
|
||||
import awx.main.fields
|
||||
|
||||
from awx.main.migrations import _save_password_keys
|
||||
from awx.main.migrations import _migration_utils as migration_utils
|
||||
|
||||
|
||||
def update_dashed_host_variables(apps, schema_editor):
|
||||
Host = apps.get_model('main', 'Host')
|
||||
for host in Host.objects.filter(variables='---'):
|
||||
host.variables = ''
|
||||
host.save()
|
||||
|
||||
|
||||
SQUASHED_30 = {
|
||||
'0029_v302_add_ask_skip_tags': [
|
||||
# add ask skip tags
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='ask_skip_tags_on_launch',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
],
|
||||
'0030_v302_job_survey_passwords': [
|
||||
# job survery passwords
|
||||
migrations.AddField(
|
||||
model_name='job',
|
||||
name='survey_passwords',
|
||||
field=jsonfield.fields.JSONField(default={}, editable=False, blank=True),
|
||||
),
|
||||
],
|
||||
'0031_v302_migrate_survey_passwords': [
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(_save_password_keys.migrate_survey_passwords),
|
||||
],
|
||||
'0032_v302_credential_permissions_update': [
|
||||
# RBAC credential permission updates
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='admin_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'singleton:system_administrator', b'organization.admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='credential',
|
||||
name='use_role',
|
||||
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=[b'admin_role'], to='main.Role', null=b'True'),
|
||||
),
|
||||
],
|
||||
'0033_v303_v245_host_variable_fix': [
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(update_dashed_host_variables),
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
__all__ = ['SQUASHED_30']
|
||||
50
awx/main/migrations/_squashed_31.py
Normal file
50
awx/main/migrations/_squashed_31.py
Normal file
@ -0,0 +1,50 @@
|
||||
from django.db import (
|
||||
migrations,
|
||||
models,
|
||||
)
|
||||
|
||||
SQUASHED_31 = {
|
||||
'0035_v310_remove_tower_settings': [
|
||||
# Remove Tower settings, these settings are now in separate awx.conf app.
|
||||
migrations.RemoveField(
|
||||
model_name='towersettings',
|
||||
name='user',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name='TowerSettings',
|
||||
),
|
||||
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
],
|
||||
'0036_v311_insights': [
|
||||
migrations.AlterField(
|
||||
model_name='project',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='projectupdate',
|
||||
name='scm_type',
|
||||
field=models.CharField(default=b'', choices=[(b'', 'Manual'), (b'git', 'Git'), (b'hg', 'Mercurial'), (b'svn', 'Subversion'), (b'insights', 'Red Hat Insights')], max_length=8, blank=True, help_text='Specifies the source control system used to store the project.', verbose_name='SCM Type'),
|
||||
),
|
||||
],
|
||||
'0037_v313_instance_version': [
|
||||
# Remove Tower settings, these settings are now in separate awx.conf app.
|
||||
migrations.AddField(
|
||||
model_name='instance',
|
||||
name='version',
|
||||
field=models.CharField(max_length=24, blank=True),
|
||||
),
|
||||
],
|
||||
}
|
||||
|
||||
__all__ = ['SQUASHED_31']
|
||||
@ -3,6 +3,7 @@
|
||||
|
||||
# Tower
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.fields import JSONField
|
||||
|
||||
# Django
|
||||
from django.db import models
|
||||
@ -66,6 +67,8 @@ class ActivityStream(models.Model):
|
||||
role = models.ManyToManyField("Role", blank=True)
|
||||
instance_group = models.ManyToManyField("InstanceGroup", blank=True)
|
||||
|
||||
setting = JSONField(blank=True)
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
return reverse('api:activity_stream_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
|
||||
@ -50,7 +50,7 @@ PROJECT_UPDATE_JOB_TYPE_CHOICES = [
|
||||
(PERM_INVENTORY_CHECK, _('Check')),
|
||||
]
|
||||
|
||||
CLOUD_INVENTORY_SOURCES = ['ec2', 'vmware', 'gce', 'azure_rm', 'openstack', 'custom', 'satellite6', 'cloudforms', 'scm',]
|
||||
CLOUD_INVENTORY_SOURCES = ['ec2', 'vmware', 'gce', 'azure_rm', 'openstack', 'rhv', 'custom', 'satellite6', 'cloudforms', 'scm', 'tower',]
|
||||
|
||||
VERBOSITY_CHOICES = [
|
||||
(0, '0 (Normal)'),
|
||||
@ -288,7 +288,10 @@ class PrimordialModel(CreatedModifiedModel):
|
||||
continue
|
||||
if not (self.pk and self.pk == obj.pk):
|
||||
errors.append(
|
||||
'%s with this (%s) combination already exists.' % (model.__name__, ', '.join(ut))
|
||||
'%s with this (%s) combination already exists.' % (
|
||||
model.__name__,
|
||||
', '.join(set(ut) - {'polymorphic_ctype'})
|
||||
)
|
||||
)
|
||||
if errors:
|
||||
raise ValidationError(errors)
|
||||
|
||||
@ -3,6 +3,7 @@
|
||||
from collections import OrderedDict
|
||||
import functools
|
||||
import json
|
||||
import logging
|
||||
import operator
|
||||
import os
|
||||
import stat
|
||||
@ -35,6 +36,8 @@ from awx.main.utils import encrypt_field
|
||||
|
||||
__all__ = ['Credential', 'CredentialType', 'V1Credential']
|
||||
|
||||
logger = logging.getLogger('awx.main.models.credential')
|
||||
|
||||
|
||||
class V1Credential(object):
|
||||
|
||||
@ -59,7 +62,9 @@ class V1Credential(object):
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('insights', 'Insights'),
|
||||
('tower', 'Ansible Tower'),
|
||||
]
|
||||
FIELDS = {
|
||||
'kind': models.CharField(
|
||||
@ -413,8 +418,8 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
ENV_BLACKLIST = set((
|
||||
'VIRTUAL_ENV', 'PATH', 'PYTHONPATH', 'PROOT_TMP_DIR', 'JOB_ID',
|
||||
'INVENTORY_ID', 'INVENTORY_SOURCE_ID', 'INVENTORY_UPDATE_ID',
|
||||
'AD_HOC_COMMAND_ID', 'REST_API_URL', 'REST_API_TOKEN', 'TOWER_HOST',
|
||||
'AWX_HOST', 'MAX_EVENT_RES', 'CALLBACK_QUEUE', 'CALLBACK_CONNECTION', 'CACHE',
|
||||
'AD_HOC_COMMAND_ID', 'REST_API_URL', 'REST_API_TOKEN', 'MAX_EVENT_RES',
|
||||
'CALLBACK_QUEUE', 'CALLBACK_CONNECTION', 'CACHE',
|
||||
'JOB_CALLBACK_DEBUG', 'INVENTORY_HOSTVARS', 'FACT_QUEUE',
|
||||
))
|
||||
|
||||
@ -498,6 +503,11 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
for default in cls.defaults.values():
|
||||
default_ = default()
|
||||
if persisted:
|
||||
if CredentialType.objects.filter(name=default_.name, kind=default_.kind).count():
|
||||
continue
|
||||
logger.debug(_(
|
||||
"adding %s credential type" % default_.name
|
||||
))
|
||||
default_.save()
|
||||
|
||||
@classmethod
|
||||
@ -1009,6 +1019,12 @@ def azure_rm(cls):
|
||||
'id': 'tenant',
|
||||
'label': 'Tenant ID',
|
||||
'type': 'string'
|
||||
}, {
|
||||
'id': 'cloud_environment',
|
||||
'label': 'Azure Cloud Environment',
|
||||
'type': 'string',
|
||||
'help_text': ('Environment variable AZURE_CLOUD_ENVIRONMENT when'
|
||||
' using Azure GovCloud or Azure stack.')
|
||||
}],
|
||||
'required': ['subscription'],
|
||||
}
|
||||
@ -1041,3 +1057,89 @@ def insights(cls):
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@CredentialType.default
|
||||
def rhv(cls):
|
||||
return cls(
|
||||
kind='cloud',
|
||||
name='Red Hat Virtualization',
|
||||
managed_by_tower=True,
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'host',
|
||||
'label': 'Host (Authentication URL)',
|
||||
'type': 'string',
|
||||
'help_text': ('The host to authenticate with.')
|
||||
}, {
|
||||
'id': 'username',
|
||||
'label': 'Username',
|
||||
'type': 'string'
|
||||
}, {
|
||||
'id': 'password',
|
||||
'label': 'Password',
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
}, {
|
||||
'id': 'ca_file',
|
||||
'label': 'CA File',
|
||||
'type': 'string',
|
||||
'help_text': ('Absolute file path to the CA file to use (optional)')
|
||||
}],
|
||||
'required': ['host', 'username', 'password'],
|
||||
},
|
||||
injectors={
|
||||
# The duplication here is intentional; the ovirt4 inventory plugin
|
||||
# writes a .ini file for authentication, while the ansible modules for
|
||||
# ovirt4 use a separate authentication process that support
|
||||
# environment variables; by injecting both, we support both
|
||||
'file': {
|
||||
'template': '\n'.join([
|
||||
'[ovirt]',
|
||||
'ovirt_url={{host}}',
|
||||
'ovirt_username={{username}}',
|
||||
'ovirt_password={{password}}',
|
||||
'{% if ca_file %}ovirt_ca_file={{ca_file}}{% endif %}'])
|
||||
},
|
||||
'env': {
|
||||
'OVIRT_INI_PATH': '{{tower.filename}}',
|
||||
'OVIRT_URL': '{{host}}',
|
||||
'OVIRT_USERNAME': '{{username}}',
|
||||
'OVIRT_PASSWORD': '{{password}}'
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@CredentialType.default
|
||||
def tower(cls):
|
||||
return cls(
|
||||
kind='cloud',
|
||||
name='Ansible Tower',
|
||||
managed_by_tower=True,
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'host',
|
||||
'label': 'Ansible Tower Hostname',
|
||||
'type': 'string',
|
||||
'help_text': ('The Ansible Tower base URL to authenticate with.')
|
||||
}, {
|
||||
'id': 'username',
|
||||
'label': 'Username',
|
||||
'type': 'string'
|
||||
}, {
|
||||
'id': 'password',
|
||||
'label': 'Password',
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
}],
|
||||
'required': ['host', 'username', 'password'],
|
||||
},
|
||||
injectors={
|
||||
'env': {
|
||||
'TOWER_HOST': '{{host}}',
|
||||
'TOWER_USERNAME': '{{username}}',
|
||||
'TOWER_PASSWORD': '{{password}}',
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@ -399,8 +399,13 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin):
|
||||
active_hosts = self.hosts
|
||||
failed_hosts = active_hosts.filter(has_active_failures=True)
|
||||
active_groups = self.groups
|
||||
if self.kind == 'smart':
|
||||
active_groups = active_groups.none()
|
||||
failed_groups = active_groups.filter(has_active_failures=True)
|
||||
active_inventory_sources = self.inventory_sources.filter(source__in=CLOUD_INVENTORY_SOURCES)
|
||||
if self.kind == 'smart':
|
||||
active_inventory_sources = self.inventory_sources.none()
|
||||
else:
|
||||
active_inventory_sources = self.inventory_sources.filter(source__in=CLOUD_INVENTORY_SOURCES)
|
||||
failed_inventory_sources = active_inventory_sources.filter(last_job_failed=True)
|
||||
computed_fields = {
|
||||
'has_active_failures': bool(failed_hosts.count()),
|
||||
@ -417,6 +422,8 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin):
|
||||
for field, value in computed_fields.items():
|
||||
if getattr(iobj, field) != value:
|
||||
setattr(iobj, field, value)
|
||||
# update in-memory object
|
||||
setattr(self, field, value)
|
||||
else:
|
||||
computed_fields.pop(field)
|
||||
if computed_fields:
|
||||
@ -464,6 +471,10 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin):
|
||||
def save(self, *args, **kwargs):
|
||||
self._update_host_smart_inventory_memeberships()
|
||||
super(Inventory, self).save(*args, **kwargs)
|
||||
if (self.kind == 'smart' and 'host_filter' in kwargs.get('update_fields', ['host_filter']) and
|
||||
connection.vendor != 'sqlite'):
|
||||
# Minimal update of host_count for smart inventory host filter changes
|
||||
self.update_computed_fields(update_groups=False, update_hosts=False)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
self._update_host_smart_inventory_memeberships()
|
||||
@ -937,6 +948,8 @@ class InventorySourceOptions(BaseModel):
|
||||
('satellite6', _('Red Hat Satellite 6')),
|
||||
('cloudforms', _('Red Hat CloudForms')),
|
||||
('openstack', _('OpenStack')),
|
||||
('rhv', _('Red Hat Virtualization')),
|
||||
('tower', _('Ansible Tower')),
|
||||
('custom', _('Custom Script')),
|
||||
]
|
||||
|
||||
@ -1185,6 +1198,16 @@ class InventorySourceOptions(BaseModel):
|
||||
"""Red Hat CloudForms region choices (not implemented)"""
|
||||
return [('all', 'All')]
|
||||
|
||||
@classmethod
|
||||
def get_rhv_region_choices(self):
|
||||
"""No region supprt"""
|
||||
return [('all', 'All')]
|
||||
|
||||
@classmethod
|
||||
def get_tower_region_choices(self):
|
||||
"""No region supprt"""
|
||||
return [('all', 'All')]
|
||||
|
||||
def clean_credential(self):
|
||||
if not self.source:
|
||||
return None
|
||||
@ -1256,7 +1279,7 @@ class InventorySourceOptions(BaseModel):
|
||||
raise ValidationError(_('Invalid filter expression: %(filter)s') %
|
||||
{'filter': ', '.join(invalid_filters)})
|
||||
return instance_filters
|
||||
elif self.source == 'vmware':
|
||||
elif self.source in ('vmware', 'tower'):
|
||||
return instance_filters
|
||||
else:
|
||||
return ''
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
# Python
|
||||
import json
|
||||
from copy import copy
|
||||
from copy import copy, deepcopy
|
||||
|
||||
# Django
|
||||
from django.db import models
|
||||
@ -14,6 +14,7 @@ from awx.main.models.rbac import (
|
||||
Role, RoleAncestorEntry, get_roles_on_resource
|
||||
)
|
||||
from awx.main.utils import parse_yaml_or_json
|
||||
from awx.main.utils.encryption import decrypt_value, get_encryption_key
|
||||
from awx.main.fields import JSONField, AskForField
|
||||
|
||||
|
||||
@ -141,21 +142,27 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
else:
|
||||
runtime_extra_vars = {}
|
||||
|
||||
# Overwrite with job template extra vars with survey default vars
|
||||
# Overwrite job template extra vars with survey default vars
|
||||
if self.survey_enabled and 'spec' in self.survey_spec:
|
||||
for survey_element in self.survey_spec.get("spec", []):
|
||||
default = survey_element.get('default')
|
||||
variable_key = survey_element.get('variable')
|
||||
|
||||
if survey_element.get('type') == 'password':
|
||||
if variable_key in runtime_extra_vars and default:
|
||||
if variable_key in runtime_extra_vars:
|
||||
kw_value = runtime_extra_vars[variable_key]
|
||||
if kw_value.startswith('$encrypted$') and kw_value != default:
|
||||
runtime_extra_vars[variable_key] = default
|
||||
if kw_value == '$encrypted$':
|
||||
runtime_extra_vars.pop(variable_key)
|
||||
|
||||
if default is not None:
|
||||
data = {variable_key: default}
|
||||
errors = self._survey_element_validation(survey_element, data)
|
||||
decrypted_default = default
|
||||
if (
|
||||
survey_element['type'] == "password" and
|
||||
isinstance(decrypted_default, basestring) and
|
||||
decrypted_default.startswith('$encrypted$')
|
||||
):
|
||||
decrypted_default = decrypt_value(get_encryption_key('value', pk=None), decrypted_default)
|
||||
errors = self._survey_element_validation(survey_element, {variable_key: decrypted_default})
|
||||
if not errors:
|
||||
survey_defaults[variable_key] = default
|
||||
extra_vars.update(survey_defaults)
|
||||
@ -167,7 +174,20 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
return create_kwargs
|
||||
|
||||
def _survey_element_validation(self, survey_element, data):
|
||||
# Don't apply validation to the `$encrypted$` placeholder; the decrypted
|
||||
# default (if any) will be validated against instead
|
||||
errors = []
|
||||
|
||||
if (survey_element['type'] == "password"):
|
||||
password_value = data.get(survey_element['variable'])
|
||||
if (
|
||||
isinstance(password_value, basestring) and
|
||||
password_value == '$encrypted$'
|
||||
):
|
||||
if survey_element.get('default') is None and survey_element['required']:
|
||||
errors.append("'%s' value missing" % survey_element['variable'])
|
||||
return errors
|
||||
|
||||
if survey_element['variable'] not in data and survey_element['required']:
|
||||
errors.append("'%s' value missing" % survey_element['variable'])
|
||||
elif survey_element['type'] in ["textarea", "text", "password"]:
|
||||
@ -272,6 +292,40 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
|
||||
return (accepted, rejected, errors)
|
||||
|
||||
@staticmethod
|
||||
def pivot_spec(spec):
|
||||
'''
|
||||
Utility method that will return a dictionary keyed off variable names
|
||||
'''
|
||||
pivoted = {}
|
||||
for element_data in spec.get('spec', []):
|
||||
if 'variable' in element_data:
|
||||
pivoted[element_data['variable']] = element_data
|
||||
return pivoted
|
||||
|
||||
def survey_variable_validation(self, data):
|
||||
errors = []
|
||||
if not self.survey_enabled:
|
||||
return errors
|
||||
if 'name' not in self.survey_spec:
|
||||
errors.append("'name' missing from survey spec.")
|
||||
if 'description' not in self.survey_spec:
|
||||
errors.append("'description' missing from survey spec.")
|
||||
for survey_element in self.survey_spec.get("spec", []):
|
||||
errors += self._survey_element_validation(survey_element, data)
|
||||
return errors
|
||||
|
||||
def display_survey_spec(self):
|
||||
'''
|
||||
Hide encrypted default passwords in survey specs
|
||||
'''
|
||||
survey_spec = deepcopy(self.survey_spec) if self.survey_spec else {}
|
||||
for field in survey_spec.get('spec', []):
|
||||
if field.get('type') == 'password':
|
||||
if 'default' in field and field['default']:
|
||||
field['default'] = '$encrypted$'
|
||||
return survey_spec
|
||||
|
||||
|
||||
class SurveyJobMixin(models.Model):
|
||||
class Meta:
|
||||
@ -296,6 +350,20 @@ class SurveyJobMixin(models.Model):
|
||||
else:
|
||||
return self.extra_vars
|
||||
|
||||
def decrypted_extra_vars(self):
|
||||
'''
|
||||
Decrypts fields marked as passwords in survey.
|
||||
'''
|
||||
if self.survey_passwords:
|
||||
extra_vars = json.loads(self.extra_vars)
|
||||
for key in self.survey_passwords:
|
||||
value = extra_vars.get(key)
|
||||
if value and isinstance(value, basestring) and value.startswith('$encrypted$'):
|
||||
extra_vars[key] = decrypt_value(get_encryption_key('value', pk=None), value)
|
||||
return json.dumps(extra_vars)
|
||||
else:
|
||||
return self.extra_vars
|
||||
|
||||
|
||||
class TaskManagerUnifiedJobMixin(models.Model):
|
||||
class Meta:
|
||||
@ -312,6 +380,9 @@ class TaskManagerJobMixin(TaskManagerUnifiedJobMixin):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def get_jobs_fail_chain(self):
|
||||
return [self.project_update] if self.project_update else []
|
||||
|
||||
def dependent_jobs_finished(self):
|
||||
for j in self.dependent_jobs.all():
|
||||
if j.status in ['pending', 'waiting', 'running']:
|
||||
|
||||
@ -91,6 +91,40 @@ class Schedule(CommonModel, LaunchTimeConfig):
|
||||
help_text=_("The next time that the scheduled action will run.")
|
||||
)
|
||||
|
||||
# extra_data is actually a string with a JSON payload in it. This
|
||||
# is technically OK because a string is a valid JSON. One day we will
|
||||
# enforce non-string JSON.
|
||||
def _clean_extra_data_system_jobs(self):
|
||||
extra_data = self.extra_data
|
||||
if not isinstance(extra_data, dict):
|
||||
try:
|
||||
extra_data = json.loads(self.extra_data)
|
||||
except Exception:
|
||||
raise ValidationError(_("Expected JSON"))
|
||||
|
||||
if extra_data and 'days' in extra_data:
|
||||
try:
|
||||
if type(extra_data['days']) is bool:
|
||||
raise ValueError
|
||||
if float(extra_data['days']) != int(extra_data['days']):
|
||||
raise ValueError
|
||||
days = int(extra_data['days'])
|
||||
if days < 0:
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
raise ValidationError(_("days must be a positive integer."))
|
||||
return self.extra_data
|
||||
|
||||
def clean_extra_data(self):
|
||||
if not self.unified_job_template:
|
||||
return self.extra_data
|
||||
|
||||
# Compare class by string name because it's hard to import SystemJobTemplate
|
||||
if type(self.unified_job_template).__name__ is not 'SystemJobTemplate':
|
||||
return self.extra_data
|
||||
|
||||
return self._clean_extra_data_system_jobs()
|
||||
|
||||
def __unicode__(self):
|
||||
return u'%s_t%s_%s_%s' % (self.name, self.unified_job_template.id, self.id, self.next_run)
|
||||
|
||||
|
||||
@ -34,7 +34,7 @@ from django_celery_results.models import TaskResult
|
||||
from awx.main.models.base import * # noqa
|
||||
from awx.main.models.mixins import ResourceMixin, TaskManagerUnifiedJobMixin
|
||||
from awx.main.utils import (
|
||||
decrypt_field, _inventory_updates,
|
||||
encrypt_value, decrypt_field, _inventory_updates,
|
||||
copy_model_by_class, copy_m2m_relationships,
|
||||
get_type_for_model, parse_yaml_or_json
|
||||
)
|
||||
@ -345,6 +345,16 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
|
||||
'''
|
||||
new_job_passwords = kwargs.pop('survey_passwords', {})
|
||||
eager_fields = kwargs.pop('_eager_fields', None)
|
||||
|
||||
# automatically encrypt survey fields
|
||||
if hasattr(self, 'survey_spec') and getattr(self, 'survey_enabled', False):
|
||||
password_list = self.survey_password_variables()
|
||||
for key in kwargs.get('extra_vars', {}):
|
||||
if key in password_list:
|
||||
kwargs['extra_vars'][key] = encrypt_value(
|
||||
kwargs['extra_vars'][key]
|
||||
)
|
||||
|
||||
unified_job_class = self._get_unified_job_class()
|
||||
fields = self._get_unified_job_field_names()
|
||||
unallowed_fields = set(kwargs.keys()) - set(fields)
|
||||
|
||||
@ -26,6 +26,8 @@ from awx.main.fields import is_implicit_parent
|
||||
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
|
||||
from awx.conf.utils import conf_to_dict
|
||||
|
||||
__all__ = []
|
||||
|
||||
logger = logging.getLogger('awx.main.signals')
|
||||
@ -284,7 +286,12 @@ def _update_host_last_jhs(host):
|
||||
except IndexError:
|
||||
jhs = None
|
||||
update_fields = []
|
||||
last_job = jhs.job if jhs else None
|
||||
try:
|
||||
last_job = jhs.job if jhs else None
|
||||
except Job.DoesNotExist:
|
||||
# The job (and its summaries) have already been/are currently being
|
||||
# deleted, so there's no need to update the host w/ a reference to it
|
||||
return
|
||||
if host.last_job != last_job:
|
||||
host.last_job = last_job
|
||||
update_fields.append('last_job')
|
||||
@ -392,12 +399,15 @@ def activity_stream_create(sender, instance, created, **kwargs):
|
||||
object1=object1,
|
||||
changes=json.dumps(changes),
|
||||
actor=get_current_user_or_none())
|
||||
activity_entry.save()
|
||||
#TODO: Weird situation where cascade SETNULL doesn't work
|
||||
# it might actually be a good idea to remove all of these FK references since
|
||||
# we don't really use them anyway.
|
||||
if instance._meta.model_name != 'setting': # Is not conf.Setting instance
|
||||
activity_entry.save()
|
||||
getattr(activity_entry, object1).add(instance)
|
||||
else:
|
||||
activity_entry.setting = conf_to_dict(instance)
|
||||
activity_entry.save()
|
||||
|
||||
|
||||
def activity_stream_update(sender, instance, **kwargs):
|
||||
@ -423,9 +433,12 @@ def activity_stream_update(sender, instance, **kwargs):
|
||||
object1=object1,
|
||||
changes=json.dumps(changes),
|
||||
actor=get_current_user_or_none())
|
||||
activity_entry.save()
|
||||
if instance._meta.model_name != 'setting': # Is not conf.Setting instance
|
||||
activity_entry.save()
|
||||
getattr(activity_entry, object1).add(instance)
|
||||
else:
|
||||
activity_entry.setting = conf_to_dict(instance)
|
||||
activity_entry.save()
|
||||
|
||||
|
||||
def activity_stream_delete(sender, instance, **kwargs):
|
||||
@ -535,8 +548,8 @@ def get_current_user_from_drf_request(sender, **kwargs):
|
||||
drf_request on the underlying Django Request object.
|
||||
'''
|
||||
request = get_current_request()
|
||||
drf_request = getattr(request, 'drf_request', None)
|
||||
return (getattr(drf_request, 'user', False), 0)
|
||||
drf_request_user = getattr(request, 'drf_request_user', False)
|
||||
return (drf_request_user, 0)
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=Organization)
|
||||
|
||||
@ -432,13 +432,22 @@ def update_host_smart_inventory_memberships():
|
||||
smart_inventories = Inventory.objects.filter(kind='smart', host_filter__isnull=False, pending_deletion=False)
|
||||
SmartInventoryMembership.objects.all().delete()
|
||||
memberships = []
|
||||
changed_inventories = set([])
|
||||
for smart_inventory in smart_inventories:
|
||||
memberships.extend([SmartInventoryMembership(inventory_id=smart_inventory.id, host_id=host_id[0])
|
||||
for host_id in smart_inventory.hosts.values_list('id')])
|
||||
add_for_inventory = [
|
||||
SmartInventoryMembership(inventory_id=smart_inventory.id, host_id=host_id[0])
|
||||
for host_id in smart_inventory.hosts.values_list('id')
|
||||
]
|
||||
memberships.extend(add_for_inventory)
|
||||
if add_for_inventory:
|
||||
changed_inventories.add(smart_inventory)
|
||||
SmartInventoryMembership.objects.bulk_create(memberships)
|
||||
except IntegrityError as e:
|
||||
logger.error("Update Host Smart Inventory Memberships failed due to an exception: " + str(e))
|
||||
return
|
||||
# Update computed fields for changed inventories outside atomic action
|
||||
for smart_inventory in changed_inventories:
|
||||
smart_inventory.update_computed_fields(update_groups=False, update_hosts=False)
|
||||
|
||||
|
||||
@shared_task(bind=True, queue='tower', base=LogErrorsTask, max_retries=5)
|
||||
@ -874,6 +883,12 @@ class BaseTask(LogErrorsTask):
|
||||
try:
|
||||
stdout_handle.flush()
|
||||
stdout_handle.close()
|
||||
# If stdout_handle was wrapped with event filter, log data
|
||||
if hasattr(stdout_handle, '_event_ct'):
|
||||
logger.info('%s finished running, producing %s events.',
|
||||
instance.log_format, stdout_handle._event_ct)
|
||||
else:
|
||||
logger.info('%s finished running', instance.log_format)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@ -1026,13 +1041,9 @@ class RunJob(BaseTask):
|
||||
env['ANSIBLE_STDOUT_CALLBACK'] = 'awx_display'
|
||||
env['TOWER_HOST'] = settings.TOWER_URL_BASE
|
||||
env['AWX_HOST'] = settings.TOWER_URL_BASE
|
||||
env['CALLBACK_QUEUE'] = settings.CALLBACK_QUEUE
|
||||
env['CALLBACK_CONNECTION'] = settings.CELERY_BROKER_URL
|
||||
env['REST_API_URL'] = settings.INTERNAL_API_URL
|
||||
env['REST_API_TOKEN'] = job.task_auth_token or ''
|
||||
env['CACHE'] = settings.CACHES['default']['LOCATION'] if 'LOCATION' in settings.CACHES['default'] else ''
|
||||
if getattr(settings, 'JOB_CALLBACK_DEBUG', False):
|
||||
env['JOB_CALLBACK_DEBUG'] = '2'
|
||||
elif settings.DEBUG:
|
||||
env['JOB_CALLBACK_DEBUG'] = '1'
|
||||
|
||||
# Create a directory for ControlPath sockets that is unique to each
|
||||
# job and visible inside the proot environment (when enabled).
|
||||
@ -1067,6 +1078,8 @@ class RunJob(BaseTask):
|
||||
env['AZURE_SUBSCRIPTION_ID'] = cloud_cred.subscription
|
||||
env['AZURE_AD_USER'] = cloud_cred.username
|
||||
env['AZURE_PASSWORD'] = decrypt_field(cloud_cred, 'password')
|
||||
if cloud_cred.inputs.get('cloud_environment', None):
|
||||
env['AZURE_CLOUD_ENVIRONMENT'] = cloud_cred.inputs['cloud_environment']
|
||||
elif cloud_cred and cloud_cred.kind == 'vmware':
|
||||
env['VMWARE_USER'] = cloud_cred.username
|
||||
env['VMWARE_PASSWORD'] = decrypt_field(cloud_cred, 'password')
|
||||
@ -1159,7 +1172,7 @@ class RunJob(BaseTask):
|
||||
if kwargs.get('display', False) and job.job_template:
|
||||
extra_vars.update(json.loads(job.display_extra_vars()))
|
||||
else:
|
||||
extra_vars.update(job.extra_vars_dict)
|
||||
extra_vars.update(json.loads(job.decrypted_extra_vars()))
|
||||
args.extend(['-e', json.dumps(extra_vars)])
|
||||
|
||||
# Add path to playbook (relative to project.local_path).
|
||||
@ -1252,10 +1265,12 @@ class RunJob(BaseTask):
|
||||
task_instance.run(local_project_sync.id)
|
||||
job = self.update_model(job.pk, scm_revision=job.project.scm_revision)
|
||||
except Exception:
|
||||
job = self.update_model(job.pk, status='failed',
|
||||
job_explanation=('Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' %
|
||||
('project_update', local_project_sync.name, local_project_sync.id)))
|
||||
raise
|
||||
local_project_sync.refresh_from_db()
|
||||
if local_project_sync.status != 'canceled':
|
||||
job = self.update_model(job.pk, status='failed',
|
||||
job_explanation=('Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' %
|
||||
('project_update', local_project_sync.name, local_project_sync.id)))
|
||||
raise
|
||||
|
||||
if job.use_fact_cache and not kwargs.get('isolated'):
|
||||
job.start_job_fact_cache()
|
||||
@ -1327,6 +1342,9 @@ class RunProjectUpdate(BaseTask):
|
||||
env['ANSIBLE_ASK_PASS'] = str(False)
|
||||
env['ANSIBLE_BECOME_ASK_PASS'] = str(False)
|
||||
env['DISPLAY'] = '' # Prevent stupid password popup when running tests.
|
||||
# give ansible a hint about the intended tmpdir to work around issues
|
||||
# like https://github.com/ansible/ansible/issues/30064
|
||||
env['TMP'] = settings.AWX_PROOT_BASE_PATH
|
||||
return env
|
||||
|
||||
def _build_scm_url_extra_vars(self, project_update, **kwargs):
|
||||
@ -1519,11 +1537,11 @@ class RunProjectUpdate(BaseTask):
|
||||
except InventoryUpdate.DoesNotExist:
|
||||
logger.warning('%s Dependent inventory update deleted during execution.', project_update.log_format)
|
||||
continue
|
||||
if project_update.cancel_flag or local_inv_update.cancel_flag:
|
||||
if not project_update.cancel_flag:
|
||||
self.update_model(project_update.pk, cancel_flag=True, job_explanation=_(
|
||||
'Dependent inventory update {} was canceled.'.format(local_inv_update.name)))
|
||||
break # Stop rest of updates if project or inventory update was canceled
|
||||
if project_update.cancel_flag:
|
||||
logger.info('Project update {} was canceled while updating dependent inventories.'.format(project_update.log_format))
|
||||
break
|
||||
if local_inv_update.cancel_flag:
|
||||
logger.info('Continuing to process project dependencies after {} was canceled'.format(local_inv_update.log_format))
|
||||
if local_inv_update.status == 'successful':
|
||||
inv_src.scm_last_revision = scm_revision
|
||||
inv_src.save(update_fields=['scm_last_revision'])
|
||||
@ -1864,11 +1882,24 @@ class RunInventoryUpdate(BaseTask):
|
||||
env['AZURE_AD_USER'] = passwords.get('source_username', '')
|
||||
env['AZURE_PASSWORD'] = passwords.get('source_password', '')
|
||||
env['AZURE_INI_PATH'] = cloud_credential
|
||||
if inventory_update.credential and \
|
||||
inventory_update.credential.inputs.get('cloud_environment', None):
|
||||
env['AZURE_CLOUD_ENVIRONMENT'] = inventory_update.credential.inputs['cloud_environment']
|
||||
elif inventory_update.source == 'gce':
|
||||
env['GCE_EMAIL'] = passwords.get('source_username', '')
|
||||
env['GCE_PROJECT'] = passwords.get('source_project', '')
|
||||
env['GCE_PEM_FILE_PATH'] = cloud_credential
|
||||
env['GCE_ZONE'] = inventory_update.source_regions if inventory_update.source_regions != 'all' else ''
|
||||
|
||||
# by default, the GCE inventory source caches results on disk for
|
||||
# 5 minutes; disable this behavior
|
||||
cp = ConfigParser.ConfigParser()
|
||||
cp.add_section('cache')
|
||||
cp.set('cache', 'cache_max_age', '0')
|
||||
handle, path = tempfile.mkstemp(dir=kwargs.get('private_data_dir', None))
|
||||
cp.write(os.fdopen(handle, 'w'))
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
env['GCE_INI_PATH'] = path
|
||||
elif inventory_update.source == 'openstack':
|
||||
env['OS_CLIENT_CONFIG_FILE'] = cloud_credential
|
||||
elif inventory_update.source == 'satellite6':
|
||||
@ -1879,6 +1910,9 @@ class RunInventoryUpdate(BaseTask):
|
||||
for env_k in inventory_update.source_vars_dict:
|
||||
if str(env_k) not in env and str(env_k) not in settings.INV_ENV_VARIABLE_BLACKLIST:
|
||||
env[str(env_k)] = unicode(inventory_update.source_vars_dict[env_k])
|
||||
elif inventory_update.source == 'tower':
|
||||
env['TOWER_INVENTORY'] = inventory_update.instance_filters
|
||||
env['TOWER_LICENSE_TYPE'] = get_licenser().validate()['license_type']
|
||||
elif inventory_update.source == 'file':
|
||||
raise NotImplementedError('Cannot update file sources through the task system.')
|
||||
# add private_data_files
|
||||
@ -2066,14 +2100,10 @@ class RunAdHocCommand(BaseTask):
|
||||
env['ANSIBLE_CALLBACK_PLUGINS'] = plugin_dir
|
||||
env['ANSIBLE_LOAD_CALLBACK_PLUGINS'] = '1'
|
||||
env['ANSIBLE_STDOUT_CALLBACK'] = 'minimal' # Hardcoded by Ansible for ad-hoc commands (either minimal or oneline).
|
||||
env['CALLBACK_QUEUE'] = settings.CALLBACK_QUEUE
|
||||
env['CALLBACK_CONNECTION'] = settings.CELERY_BROKER_URL
|
||||
env['REST_API_URL'] = settings.INTERNAL_API_URL
|
||||
env['REST_API_TOKEN'] = ad_hoc_command.task_auth_token or ''
|
||||
env['ANSIBLE_SFTP_BATCH_MODE'] = 'False'
|
||||
env['CACHE'] = settings.CACHES['default']['LOCATION'] if 'LOCATION' in settings.CACHES['default'] else ''
|
||||
if getattr(settings, 'JOB_CALLBACK_DEBUG', False):
|
||||
env['JOB_CALLBACK_DEBUG'] = '2'
|
||||
elif settings.DEBUG:
|
||||
env['JOB_CALLBACK_DEBUG'] = '1'
|
||||
|
||||
# Specify empty SSH args (should disable ControlPersist entirely for
|
||||
# ad hoc commands).
|
||||
@ -2124,14 +2154,27 @@ class RunAdHocCommand(BaseTask):
|
||||
if ad_hoc_command.verbosity:
|
||||
args.append('-%s' % ('v' * min(5, ad_hoc_command.verbosity)))
|
||||
|
||||
# Define special extra_vars for AWX, combine with ad_hoc_command.extra_vars
|
||||
extra_vars = {
|
||||
'tower_job_id': ad_hoc_command.pk,
|
||||
'awx_job_id': ad_hoc_command.pk,
|
||||
}
|
||||
if ad_hoc_command.created_by:
|
||||
extra_vars.update({
|
||||
'tower_user_id': ad_hoc_command.created_by.pk,
|
||||
'tower_user_name': ad_hoc_command.created_by.username,
|
||||
'awx_user_id': ad_hoc_command.created_by.pk,
|
||||
'awx_user_name': ad_hoc_command.created_by.username,
|
||||
})
|
||||
|
||||
if ad_hoc_command.extra_vars_dict:
|
||||
redacted_extra_vars, removed_vars = extract_ansible_vars(ad_hoc_command.extra_vars_dict)
|
||||
if removed_vars:
|
||||
raise ValueError(_(
|
||||
"{} are prohibited from use in ad hoc commands."
|
||||
).format(", ".join(removed_vars)))
|
||||
|
||||
args.extend(['-e', json.dumps(ad_hoc_command.extra_vars_dict)])
|
||||
extra_vars.update(ad_hoc_command.extra_vars_dict)
|
||||
args.extend(['-e', json.dumps(extra_vars)])
|
||||
|
||||
args.extend(['-m', ad_hoc_command.module_name])
|
||||
args.extend(['-a', ad_hoc_command.module_args])
|
||||
|
||||
@ -5,6 +5,7 @@ from awx.api.versioning import reverse
|
||||
from awx.main.middleware import ActivityStreamMiddleware
|
||||
from awx.main.models.activity_stream import ActivityStream
|
||||
from awx.main.access import ActivityStreamAccess
|
||||
from awx.conf.models import Setting
|
||||
|
||||
|
||||
def mock_feature_enabled(feature):
|
||||
@ -47,6 +48,26 @@ def test_basic_fields(monkeypatch, organization, get, user, settings):
|
||||
assert response.data['summary_fields']['organization'][0]['name'] == 'test-org'
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_ctint_activity_stream(monkeypatch, get, user, settings):
|
||||
Setting.objects.create(key="FOO", value="bar")
|
||||
settings.ACTIVITY_STREAM_ENABLED = True
|
||||
u = user('admin', True)
|
||||
activity_stream = ActivityStream.objects.filter(setting={'name': 'FOO', 'category': None}).latest('pk')
|
||||
activity_stream.actor = u
|
||||
activity_stream.save()
|
||||
|
||||
aspk = activity_stream.pk
|
||||
url = reverse('api:activity_stream_detail', kwargs={'pk': aspk})
|
||||
response = get(url, user('admin', True))
|
||||
|
||||
assert response.status_code == 200
|
||||
assert 'summary_fields' in response.data
|
||||
assert 'setting' in response.data['summary_fields']
|
||||
assert response.data['summary_fields']['setting'][0]['name'] == 'FOO'
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
def test_middleware_actor_added(monkeypatch, post, get, user, settings):
|
||||
|
||||
@ -4,7 +4,9 @@ import re
|
||||
import mock # noqa
|
||||
import pytest
|
||||
|
||||
from awx.main.models.credential import Credential, CredentialType
|
||||
from awx.main.models import (AdHocCommand, Credential, CredentialType, Job, JobTemplate,
|
||||
Inventory, InventorySource, Project,
|
||||
WorkflowJobNode)
|
||||
from awx.main.utils import decrypt_field
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
@ -12,6 +14,17 @@ EXAMPLE_PRIVATE_KEY = '-----BEGIN PRIVATE KEY-----\nxyz==\n-----END PRIVATE KEY-
|
||||
EXAMPLE_ENCRYPTED_PRIVATE_KEY = '-----BEGIN PRIVATE KEY-----\nProc-Type: 4,ENCRYPTED\nxyz==\n-----END PRIVATE KEY-----'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_idempotent_credential_type_setup():
|
||||
assert CredentialType.objects.count() == 0
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
total = CredentialType.objects.count()
|
||||
assert total > 0
|
||||
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
assert CredentialType.objects.count() == total
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('kind, total', [
|
||||
('ssh', 1), ('net', 0)
|
||||
@ -575,7 +588,7 @@ def test_create_org_credential_as_admin(post, organization, org_admin, credentia
|
||||
params['name'] = 'Some name'
|
||||
params['organization'] = organization.id
|
||||
response = post(
|
||||
reverse('api:credential_list'),
|
||||
reverse('api:credential_list', kwargs={'version': version}),
|
||||
params,
|
||||
org_admin
|
||||
)
|
||||
@ -591,7 +604,7 @@ def test_credential_detail(post, get, organization, org_admin, credentialtype_ss
|
||||
params['name'] = 'Some name'
|
||||
params['organization'] = organization.id
|
||||
response = post(
|
||||
reverse('api:credential_list'),
|
||||
reverse('api:credential_list', kwargs={'version': version}),
|
||||
params,
|
||||
org_admin
|
||||
)
|
||||
@ -1410,7 +1423,17 @@ def test_field_removal(put, organization, admin, credentialtype_ssh, version, pa
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_credential_type_immutable_in_v2(patch, organization, admin, credentialtype_ssh, credentialtype_aws):
|
||||
@pytest.mark.parametrize('relation, related_obj', [
|
||||
['ad_hoc_commands', AdHocCommand()],
|
||||
['insights_inventories', Inventory()],
|
||||
['inventorysources', InventorySource()],
|
||||
['jobs', Job()],
|
||||
['jobtemplates', JobTemplate()],
|
||||
['projects', Project()],
|
||||
['workflowjobnodes', WorkflowJobNode()],
|
||||
])
|
||||
def test_credential_type_mutability(patch, organization, admin, credentialtype_ssh,
|
||||
credentialtype_aws, relation, related_obj):
|
||||
cred = Credential(
|
||||
credential_type=credentialtype_ssh,
|
||||
name='Best credential ever',
|
||||
@ -1422,19 +1445,39 @@ def test_credential_type_immutable_in_v2(patch, organization, admin, credentialt
|
||||
)
|
||||
cred.save()
|
||||
|
||||
related_obj.save()
|
||||
getattr(cred, relation).add(related_obj)
|
||||
|
||||
def _change_credential_type():
|
||||
return patch(
|
||||
reverse('api:credential_detail', kwargs={'version': 'v2', 'pk': cred.pk}),
|
||||
{
|
||||
'credential_type': credentialtype_aws.pk,
|
||||
'inputs': {
|
||||
'username': u'jim',
|
||||
'password': u'pass'
|
||||
}
|
||||
},
|
||||
admin
|
||||
)
|
||||
|
||||
response = _change_credential_type()
|
||||
assert response.status_code == 400
|
||||
expected = ['You cannot change the credential type of the credential, '
|
||||
'as it may break the functionality of the resources using it.']
|
||||
assert response.data['credential_type'] == expected
|
||||
|
||||
response = patch(
|
||||
reverse('api:credential_detail', kwargs={'version': 'v2', 'pk': cred.pk}),
|
||||
{
|
||||
'credential_type': credentialtype_aws.pk,
|
||||
'inputs': {
|
||||
'username': u'jim',
|
||||
'password': u'pass'
|
||||
}
|
||||
},
|
||||
{'name': 'Worst credential ever'},
|
||||
admin
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert 'credential_type' in response.data
|
||||
assert response.status_code == 200
|
||||
assert Credential.objects.get(pk=cred.pk).name == 'Worst credential ever'
|
||||
|
||||
related_obj.delete()
|
||||
response = _change_credential_type()
|
||||
assert response.status_code == 200
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@ -60,3 +60,34 @@ def test_proxy_ip_whitelist(get, patch, admin):
|
||||
REMOTE_HOST='my.proxy.example.org',
|
||||
HTTP_X_FROM_THE_LOAD_BALANCER='some-actual-ip')
|
||||
assert middleware.environ['HTTP_X_FROM_THE_LOAD_BALANCER'] == 'some-actual-ip'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestDeleteViews:
|
||||
def test_sublist_delete_permission_check(self, inventory_source, host, rando, delete):
|
||||
inventory_source.hosts.add(host)
|
||||
inventory_source.inventory.read_role.members.add(rando)
|
||||
delete(
|
||||
reverse(
|
||||
'api:inventory_source_hosts_list',
|
||||
kwargs={'version': 'v2', 'pk': inventory_source.pk}
|
||||
), user=rando, expect=403
|
||||
)
|
||||
|
||||
def test_sublist_delete_functionality(self, inventory_source, host, rando, delete):
|
||||
inventory_source.hosts.add(host)
|
||||
inventory_source.inventory.admin_role.members.add(rando)
|
||||
delete(
|
||||
reverse(
|
||||
'api:inventory_source_hosts_list',
|
||||
kwargs={'version': 'v2', 'pk': inventory_source.pk}
|
||||
), user=rando, expect=204
|
||||
)
|
||||
assert inventory_source.hosts.count() == 0
|
||||
|
||||
def test_destroy_permission_check(self, job_factory, system_auditor, delete):
|
||||
job = job_factory()
|
||||
resp = delete(
|
||||
job.get_absolute_url(), user=system_auditor
|
||||
)
|
||||
assert resp.status_code == 403
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import pytest
|
||||
import mock
|
||||
|
||||
@ -236,6 +237,51 @@ def test_create_inventory_smart_inventory_sources(post, get, inventory, admin_us
|
||||
assert jdata['count'] == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_urlencode_host_filter(post, admin_user, organization):
|
||||
"""
|
||||
Host filters saved on the model must correspond to the same result
|
||||
as when that host_filter is used in the URL as a querystring.
|
||||
That means that it must be url-encoded patterns like %22 for quotes
|
||||
must be escaped as the string is saved to the model.
|
||||
|
||||
Expected host filter in this test would match a host such as:
|
||||
inventory.hosts.create(
|
||||
ansible_facts={"ansible_distribution_version": "7.4"}
|
||||
)
|
||||
"""
|
||||
# Create smart inventory with host filter that corresponds to querystring
|
||||
post(
|
||||
reverse('api:inventory_list'),
|
||||
data={
|
||||
'name': 'smart inventory', 'kind': 'smart',
|
||||
'organization': organization.pk,
|
||||
'host_filter': 'ansible_facts__ansible_distribution_version=%227.4%22'
|
||||
},
|
||||
user=admin_user,
|
||||
expect=201
|
||||
)
|
||||
# Assert that the saved version of host filter has escaped ""
|
||||
si = Inventory.objects.get(name='smart inventory')
|
||||
assert si.host_filter == 'ansible_facts__ansible_distribution_version="7.4"'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_host_filter_unicode(post, admin_user, organization):
|
||||
post(
|
||||
reverse('api:inventory_list'),
|
||||
data={
|
||||
'name': 'smart inventory', 'kind': 'smart',
|
||||
'organization': organization.pk,
|
||||
'host_filter': u'ansible_facts__ansible_distribution=レッドハット'
|
||||
},
|
||||
user=admin_user,
|
||||
expect=201
|
||||
)
|
||||
si = Inventory.objects.get(name='smart inventory')
|
||||
assert si.host_filter == u'ansible_facts__ansible_distribution=レッドハット'
|
||||
|
||||
|
||||
@pytest.mark.parametrize("role_field,expected_status_code", [
|
||||
(None, 403),
|
||||
('admin_role', 201),
|
||||
|
||||
@ -304,3 +304,19 @@ def test_isolated_keys_readonly(get, patch, delete, admin, key, expected):
|
||||
|
||||
delete(url, user=admin)
|
||||
assert getattr(settings, key) == 'secret'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_isolated_key_flag_readonly(get, patch, delete, admin):
|
||||
settings.AWX_ISOLATED_KEY_GENERATION = True
|
||||
url = reverse('api:setting_singleton_detail', kwargs={'category_slug': 'jobs'})
|
||||
resp = get(url, user=admin)
|
||||
assert resp.data['AWX_ISOLATED_KEY_GENERATION'] is True
|
||||
|
||||
patch(url, user=admin, data={
|
||||
'AWX_ISOLATED_KEY_GENERATION': False
|
||||
})
|
||||
assert settings.AWX_ISOLATED_KEY_GENERATION is True
|
||||
|
||||
delete(url, user=admin)
|
||||
assert settings.AWX_ISOLATED_KEY_GENERATION is True
|
||||
|
||||
@ -111,6 +111,241 @@ def test_survey_spec_sucessful_creation(survey_spec_factory, job_template, post,
|
||||
assert updated_jt.survey_spec == survey_input_data
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('with_default', [True, False])
|
||||
@pytest.mark.parametrize('value, status', [
|
||||
('SUPERSECRET', 201),
|
||||
(['some', 'invalid', 'list'], 400),
|
||||
({'some-invalid': 'dict'}, 400),
|
||||
(False, 400)
|
||||
])
|
||||
def test_survey_spec_passwords_are_encrypted_on_launch(job_template_factory, post, admin_user, with_default, value, status):
|
||||
objects = job_template_factory('jt', organization='org1', project='prj',
|
||||
inventory='inv', credential='cred')
|
||||
job_template = objects.job_template
|
||||
job_template.survey_enabled = True
|
||||
job_template.save()
|
||||
input_data = {
|
||||
'description': 'A survey',
|
||||
'spec': [{
|
||||
'index': 0,
|
||||
'question_name': 'What is your password?',
|
||||
'required': True,
|
||||
'variable': 'secret_value',
|
||||
'type': 'password'
|
||||
}],
|
||||
'name': 'my survey'
|
||||
}
|
||||
if with_default:
|
||||
input_data['spec'][0]['default'] = 'some-default'
|
||||
post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}),
|
||||
data=input_data, user=admin_user, expect=200)
|
||||
resp = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
|
||||
dict(extra_vars=dict(secret_value=value)), admin_user, expect=status)
|
||||
|
||||
if status == 201:
|
||||
job = Job.objects.get(pk=resp.data['id'])
|
||||
assert json.loads(job.extra_vars)['secret_value'].startswith('$encrypted$')
|
||||
assert json.loads(job.decrypted_extra_vars()) == {
|
||||
'secret_value': value
|
||||
}
|
||||
else:
|
||||
assert "for 'secret_value' expected to be a string." in json.dumps(resp.data)
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
@pytest.mark.django_db
|
||||
def test_survey_spec_passwords_with_empty_default(job_template_factory, post, admin_user):
|
||||
objects = job_template_factory('jt', organization='org1', project='prj',
|
||||
inventory='inv', credential='cred')
|
||||
job_template = objects.job_template
|
||||
job_template.survey_enabled = True
|
||||
job_template.save()
|
||||
input_data = {
|
||||
'description': 'A survey',
|
||||
'spec': [{
|
||||
'index': 0,
|
||||
'question_name': 'What is your password?',
|
||||
'required': False,
|
||||
'variable': 'secret_value',
|
||||
'type': 'password',
|
||||
'default': ''
|
||||
}],
|
||||
'name': 'my survey'
|
||||
}
|
||||
post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}),
|
||||
data=input_data, user=admin_user, expect=200)
|
||||
|
||||
resp = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
|
||||
{}, admin_user, expect=201)
|
||||
job = Job.objects.get(pk=resp.data['id'])
|
||||
assert json.loads(job.extra_vars)['secret_value'] == ''
|
||||
assert json.loads(job.decrypted_extra_vars()) == {
|
||||
'secret_value': ''
|
||||
}
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('default, launch_value, expected_extra_vars, status', [
|
||||
['', '$encrypted$', {'secret_value': ''}, 201],
|
||||
['', 'y', {'secret_value': 'y'}, 201],
|
||||
['', 'y' * 100, None, 400],
|
||||
[None, '$encrypted$', {}, 201],
|
||||
[None, 'y', {'secret_value': 'y'}, 201],
|
||||
[None, 'y' * 100, {}, 400],
|
||||
['x', '$encrypted$', {'secret_value': 'x'}, 201],
|
||||
['x', 'y', {'secret_value': 'y'}, 201],
|
||||
['x', 'y' * 100, {}, 400],
|
||||
['x' * 100, '$encrypted$', {}, 201],
|
||||
['x' * 100, 'y', {'secret_value': 'y'}, 201],
|
||||
['x' * 100, 'y' * 100, {}, 400],
|
||||
])
|
||||
def test_survey_spec_passwords_with_default_optional(job_template_factory, post, admin_user,
|
||||
default, launch_value,
|
||||
expected_extra_vars, status):
|
||||
objects = job_template_factory('jt', organization='org1', project='prj',
|
||||
inventory='inv', credential='cred')
|
||||
job_template = objects.job_template
|
||||
job_template.survey_enabled = True
|
||||
job_template.save()
|
||||
input_data = {
|
||||
'description': 'A survey',
|
||||
'spec': [{
|
||||
'index': 0,
|
||||
'question_name': 'What is your password?',
|
||||
'required': False,
|
||||
'variable': 'secret_value',
|
||||
'type': 'password',
|
||||
'max': 3
|
||||
}],
|
||||
'name': 'my survey'
|
||||
}
|
||||
if default is not None:
|
||||
input_data['spec'][0]['default'] = default
|
||||
post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}),
|
||||
data=input_data, user=admin_user, expect=200)
|
||||
|
||||
resp = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
|
||||
data={'extra_vars': {'secret_value': launch_value}}, user=admin_user, expect=status)
|
||||
|
||||
if status == 201:
|
||||
job = Job.objects.get(pk=resp.data['job'])
|
||||
assert json.loads(job.decrypted_extra_vars()) == expected_extra_vars
|
||||
if default:
|
||||
assert default not in json.loads(job.extra_vars).values()
|
||||
assert launch_value not in json.loads(job.extra_vars).values()
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('default, launch_value, expected_extra_vars, status', [
|
||||
['', '$encrypted$', {'secret_value': ''}, 201],
|
||||
[None, '$encrypted$', {}, 400],
|
||||
[None, 'y', {'secret_value': 'y'}, 201],
|
||||
])
|
||||
def test_survey_spec_passwords_with_default_required(job_template_factory, post, admin_user,
|
||||
default, launch_value,
|
||||
expected_extra_vars, status):
|
||||
objects = job_template_factory('jt', organization='org1', project='prj',
|
||||
inventory='inv', credential='cred')
|
||||
job_template = objects.job_template
|
||||
job_template.survey_enabled = True
|
||||
job_template.save()
|
||||
input_data = {
|
||||
'description': 'A survey',
|
||||
'spec': [{
|
||||
'index': 0,
|
||||
'question_name': 'What is your password?',
|
||||
'required': True,
|
||||
'variable': 'secret_value',
|
||||
'type': 'password',
|
||||
'max': 3
|
||||
}],
|
||||
'name': 'my survey'
|
||||
}
|
||||
if default is not None:
|
||||
input_data['spec'][0]['default'] = default
|
||||
post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}),
|
||||
data=input_data, user=admin_user, expect=200)
|
||||
|
||||
resp = post(reverse('api:job_template_launch', kwargs={'pk': job_template.pk}),
|
||||
data={'extra_vars': {'secret_value': launch_value}}, user=admin_user, expect=status)
|
||||
|
||||
if status == 201:
|
||||
job = Job.objects.get(pk=resp.data['job'])
|
||||
assert json.loads(job.decrypted_extra_vars()) == expected_extra_vars
|
||||
if default:
|
||||
assert default not in json.loads(job.extra_vars).values()
|
||||
assert launch_value not in json.loads(job.extra_vars).values()
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('default, status', [
|
||||
('SUPERSECRET', 200),
|
||||
(['some', 'invalid', 'list'], 400),
|
||||
({'some-invalid': 'dict'}, 400),
|
||||
(False, 400)
|
||||
])
|
||||
def test_survey_spec_default_passwords_are_encrypted(job_template, post, admin_user, default, status):
|
||||
job_template.survey_enabled = True
|
||||
job_template.save()
|
||||
input_data = {
|
||||
'description': 'A survey',
|
||||
'spec': [{
|
||||
'index': 0,
|
||||
'question_name': 'What is your password?',
|
||||
'required': True,
|
||||
'variable': 'secret_value',
|
||||
'default': default,
|
||||
'type': 'password'
|
||||
}],
|
||||
'name': 'my survey'
|
||||
}
|
||||
resp = post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}),
|
||||
data=input_data, user=admin_user, expect=status)
|
||||
|
||||
if status == 200:
|
||||
updated_jt = JobTemplate.objects.get(pk=job_template.pk)
|
||||
assert updated_jt.survey_spec['spec'][0]['default'].startswith('$encrypted$')
|
||||
|
||||
job = updated_jt.create_unified_job()
|
||||
assert json.loads(job.extra_vars)['secret_value'].startswith('$encrypted$')
|
||||
assert json.loads(job.decrypted_extra_vars()) == {
|
||||
'secret_value': default
|
||||
}
|
||||
else:
|
||||
assert "for 'secret_value' expected to be a string." in str(resp.data)
|
||||
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
@pytest.mark.django_db
|
||||
def test_survey_spec_default_passwords_encrypted_on_update(job_template, post, put, admin_user):
|
||||
input_data = {
|
||||
'description': 'A survey',
|
||||
'spec': [{
|
||||
'index': 0,
|
||||
'question_name': 'What is your password?',
|
||||
'required': True,
|
||||
'variable': 'secret_value',
|
||||
'default': 'SUPERSECRET',
|
||||
'type': 'password'
|
||||
}],
|
||||
'name': 'my survey'
|
||||
}
|
||||
post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}),
|
||||
data=input_data, user=admin_user, expect=200)
|
||||
updated_jt = JobTemplate.objects.get(pk=job_template.pk)
|
||||
|
||||
# simulate a survey field edit where we're not changing the default value
|
||||
input_data['spec'][0]['default'] = '$encrypted$'
|
||||
post(url=reverse('api:job_template_survey_spec', kwargs={'pk': job_template.id}),
|
||||
data=input_data, user=admin_user, expect=200)
|
||||
assert updated_jt.survey_spec == JobTemplate.objects.get(pk=job_template.pk).survey_spec
|
||||
|
||||
|
||||
# Tests related to survey content validation
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
@pytest.mark.django_db
|
||||
|
||||
@ -25,9 +25,11 @@ def test_default_cred_types():
|
||||
'insights',
|
||||
'net',
|
||||
'openstack',
|
||||
'rhv',
|
||||
'satellite6',
|
||||
'scm',
|
||||
'ssh',
|
||||
'tower',
|
||||
'vault',
|
||||
'vmware',
|
||||
]
|
||||
|
||||
@ -13,7 +13,7 @@ from awx.main.access import (
|
||||
InventoryUpdateAccess,
|
||||
CustomInventoryScriptAccess,
|
||||
ScheduleAccess,
|
||||
StateConflict
|
||||
ActiveJobConflict
|
||||
)
|
||||
|
||||
|
||||
@ -21,7 +21,7 @@ from awx.main.access import (
|
||||
def test_running_job_protection(inventory, admin_user):
|
||||
AdHocCommand.objects.create(inventory=inventory, status='running')
|
||||
access = InventoryAccess(admin_user)
|
||||
with pytest.raises(StateConflict):
|
||||
with pytest.raises(ActiveJobConflict):
|
||||
access.can_delete(inventory)
|
||||
|
||||
|
||||
|
||||
@ -57,7 +57,21 @@ class TestWorkflowJobTemplateNodeAccess:
|
||||
# without access to the related job template, admin to the WFJT can
|
||||
# not change the prompted parameters
|
||||
access = WorkflowJobTemplateNodeAccess(org_admin)
|
||||
assert not access.can_change(wfjt_node, {'job_type': 'scan'})
|
||||
assert not access.can_change(wfjt_node, {'job_type': 'check'})
|
||||
|
||||
def test_node_edit_allowed(self, wfjt_node, org_admin):
|
||||
wfjt_node.unified_job_template.admin_role.members.add(org_admin)
|
||||
access = WorkflowJobTemplateNodeAccess(org_admin)
|
||||
assert access.can_change(wfjt_node, {'job_type': 'check'})
|
||||
|
||||
def test_prompted_resource_prevents_edit(self, wfjt_node, org_admin, machine_credential):
|
||||
# without access to prompted resources, admin to the WFJT can
|
||||
# not change the other prompted resources
|
||||
wfjt_node.unified_job_template.admin_role.members.add(org_admin)
|
||||
wfjt_node.credential = machine_credential
|
||||
wfjt_node.save()
|
||||
access = WorkflowJobTemplateNodeAccess(org_admin)
|
||||
assert not access.can_change(wfjt_node, {'inventory': 45})
|
||||
|
||||
def test_access_to_edit_non_JT(self, rando, workflow_job_template, organization, project):
|
||||
workflow_job_template.admin_role.members.add(rando)
|
||||
|
||||
@ -10,6 +10,10 @@ from django.apps import apps
|
||||
|
||||
from awx.main.models import (
|
||||
UnifiedJob,
|
||||
Job,
|
||||
JobTemplate,
|
||||
WorkflowJob,
|
||||
WorkflowJobTemplate,
|
||||
NotificationTemplate,
|
||||
Credential,
|
||||
)
|
||||
@ -20,9 +24,10 @@ from awx.main.migrations._reencrypt import (
|
||||
_notification_templates,
|
||||
_credentials,
|
||||
_unified_jobs,
|
||||
_encrypt_survey_passwords
|
||||
)
|
||||
|
||||
from awx.main.utils import decrypt_field
|
||||
from awx.main.utils import decrypt_field, get_encryption_key, decrypt_value
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@ -93,3 +98,62 @@ def test_unified_job_migration(old_enc, new_enc, value):
|
||||
# Exception if the encryption type of AESCBC is not properly skipped, ensures
|
||||
# our `startswith` calls don't have typos
|
||||
_unified_jobs(apps)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize("attr, cls", [
|
||||
['job_template', JobTemplate],
|
||||
['workflow_job_template', WorkflowJobTemplate]
|
||||
])
|
||||
def test_survey_default_password_encryption(attr, cls, request):
|
||||
factory = request.getfuncargvalue('{}_factory'.format(attr))
|
||||
jt = getattr(factory('jt'), attr)
|
||||
jt.survey_enabled = True
|
||||
jt.survey_spec = {
|
||||
'description': 'A survey',
|
||||
'spec': [{
|
||||
'index': 0,
|
||||
'question_name': 'What is your password?',
|
||||
'required': True,
|
||||
'variable': 'secret_value',
|
||||
'default': 'SUPERSECRET',
|
||||
'type': 'password'
|
||||
}],
|
||||
'name': 'my survey'
|
||||
}
|
||||
jt.save()
|
||||
|
||||
_encrypt_survey_passwords(Job, JobTemplate, WorkflowJob, WorkflowJobTemplate)
|
||||
spec = cls.objects.get(pk=jt.pk).survey_spec['spec']
|
||||
assert decrypt_value(get_encryption_key('value', pk=None), spec[0]['default']) == 'SUPERSECRET'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize("attr, cls", [
|
||||
['job_template', Job],
|
||||
['workflow_job_template', WorkflowJob]
|
||||
])
|
||||
def test_job_survey_vars_encryption(attr, cls, request):
|
||||
factory = request.getfuncargvalue('{}_factory'.format(attr))
|
||||
jt = getattr(factory('jt'), attr)
|
||||
jt.survey_enabled = True
|
||||
jt.survey_spec = {
|
||||
'description': 'A survey',
|
||||
'spec': [{
|
||||
'index': 0,
|
||||
'question_name': 'What is your password?',
|
||||
'required': True,
|
||||
'variable': 'secret_value',
|
||||
'default': '',
|
||||
'type': 'password'
|
||||
}],
|
||||
'name': 'my survey'
|
||||
}
|
||||
jt.save()
|
||||
job = jt.create_unified_job()
|
||||
job.extra_vars = json.dumps({'secret_value': 'SUPERSECRET'})
|
||||
job.save()
|
||||
|
||||
_encrypt_survey_passwords(Job, JobTemplate, WorkflowJob, WorkflowJobTemplate)
|
||||
job = cls.objects.get(pk=job.pk)
|
||||
assert json.loads(job.decrypted_extra_vars()) == {'secret_value': 'SUPERSECRET'}
|
||||
|
||||
@ -1,15 +1,16 @@
|
||||
import mock
|
||||
import pytest
|
||||
import requests
|
||||
from copy import deepcopy
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
from awx.api.views import (
|
||||
ApiVersionRootView,
|
||||
JobTemplateLabelList,
|
||||
JobTemplateSurveySpec,
|
||||
InventoryInventorySourcesUpdate,
|
||||
HostInsights,
|
||||
JobTemplateSurveySpec
|
||||
)
|
||||
|
||||
from awx.main.models import (
|
||||
@ -77,19 +78,6 @@ class TestJobTemplateLabelList:
|
||||
assert mixin_unattach.called_with(mock_request, None, None)
|
||||
|
||||
|
||||
class TestJobTemplateSurveySpec(object):
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
def test_get_password_type(self, mocker, mock_response_new):
|
||||
JobTemplate = namedtuple('JobTemplate', 'survey_spec')
|
||||
obj = JobTemplate(survey_spec={'spec':[{'type': 'password', 'default': 'my_default'}]})
|
||||
with mocker.patch.object(JobTemplateSurveySpec, 'get_object', return_value=obj):
|
||||
view = JobTemplateSurveySpec()
|
||||
response = view.get(mocker.MagicMock())
|
||||
assert response == mock_response_new
|
||||
# which there was a better way to do this!
|
||||
assert response.call_args[0][1]['spec'][0]['default'] == '$encrypted$'
|
||||
|
||||
|
||||
class TestInventoryInventorySourcesUpdate:
|
||||
|
||||
@pytest.mark.parametrize("can_update, can_access, is_source, is_up_on_proj, expected", [
|
||||
@ -220,3 +208,158 @@ class TestHostInsights():
|
||||
|
||||
assert resp.data['error'] == 'The Insights Credential for "inventory_name_here" was not found.'
|
||||
assert resp.status_code == 404
|
||||
|
||||
|
||||
class TestInventoryHostsList(object):
|
||||
|
||||
def test_host_list_smart_inventory(self, mocker):
|
||||
Inventory = namedtuple('Inventory', ['kind', 'host_filter', 'hosts', 'organization_id'])
|
||||
obj = Inventory(kind='smart', host_filter='localhost', hosts=HostManager(), organization_id=None)
|
||||
obj.hosts.instance = obj
|
||||
|
||||
with mock.patch.object(InventoryHostsList, 'get_parent_object', return_value=obj):
|
||||
with mock.patch('awx.main.utils.filters.SmartFilter.query_from_string') as mock_query:
|
||||
view = InventoryHostsList()
|
||||
view.get_queryset()
|
||||
mock_query.assert_called_once_with('localhost')
|
||||
|
||||
|
||||
class TestSurveySpecValidation:
|
||||
|
||||
def test_create_text_encrypted(self):
|
||||
view = JobTemplateSurveySpec()
|
||||
resp = view._validate_spec_data({
|
||||
"name": "new survey",
|
||||
"description": "foobar",
|
||||
"spec": [
|
||||
{
|
||||
"question_description": "",
|
||||
"min": 0,
|
||||
"default": "$encrypted$",
|
||||
"max": 1024,
|
||||
"required": True,
|
||||
"choices": "",
|
||||
"variable": "openshift_username",
|
||||
"question_name": "OpenShift Username",
|
||||
"type": "text"
|
||||
}
|
||||
]
|
||||
}, {})
|
||||
assert resp.status_code == 400
|
||||
assert '$encrypted$ is a reserved keyword for password question defaults' in str(resp.data['error'])
|
||||
|
||||
def test_change_encrypted_var_name(self):
|
||||
view = JobTemplateSurveySpec()
|
||||
old = {
|
||||
"name": "old survey",
|
||||
"description": "foobar",
|
||||
"spec": [
|
||||
{
|
||||
"question_description": "",
|
||||
"min": 0,
|
||||
"default": "$encrypted$foooooooo",
|
||||
"max": 1024,
|
||||
"required": True,
|
||||
"choices": "",
|
||||
"variable": "openshift_username",
|
||||
"question_name": "OpenShift Username",
|
||||
"type": "password"
|
||||
}
|
||||
]
|
||||
}
|
||||
new = deepcopy(old)
|
||||
new['spec'][0]['variable'] = 'openstack_username'
|
||||
resp = view._validate_spec_data(new, old)
|
||||
assert resp.status_code == 400
|
||||
assert 'may not be used for new default' in str(resp.data['error'])
|
||||
|
||||
def test_use_saved_encrypted_default(self):
|
||||
'''
|
||||
Save is allowed, the $encrypted$ replacement is done
|
||||
'''
|
||||
view = JobTemplateSurveySpec()
|
||||
old = {
|
||||
"name": "old survey",
|
||||
"description": "foobar",
|
||||
"spec": [
|
||||
{
|
||||
"question_description": "",
|
||||
"min": 0,
|
||||
"default": "$encrypted$foooooooo",
|
||||
"max": 1024,
|
||||
"required": True,
|
||||
"choices": "",
|
||||
"variable": "openshift_username",
|
||||
"question_name": "OpenShift Username",
|
||||
"type": "password"
|
||||
}
|
||||
]
|
||||
}
|
||||
new = deepcopy(old)
|
||||
new['spec'][0]['default'] = '$encrypted$'
|
||||
new['spec'][0]['required'] = False
|
||||
resp = view._validate_spec_data(new, old)
|
||||
assert resp is None
|
||||
assert new == {
|
||||
"name": "old survey",
|
||||
"description": "foobar",
|
||||
"spec": [
|
||||
{
|
||||
"question_description": "",
|
||||
"min": 0,
|
||||
"default": "$encrypted$foooooooo", # default remained the same
|
||||
"max": 1024,
|
||||
"required": False, # only thing changed
|
||||
"choices": "",
|
||||
"variable": "openshift_username",
|
||||
"question_name": "OpenShift Username",
|
||||
"type": "password"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
def test_use_saved_empty_string_default(self):
|
||||
'''
|
||||
Save is allowed, the $encrypted$ replacement is done with empty string
|
||||
The empty string value for default is unencrypted,
|
||||
unlike all other password questions
|
||||
'''
|
||||
view = JobTemplateSurveySpec()
|
||||
old = {
|
||||
"name": "old survey",
|
||||
"description": "foobar",
|
||||
"spec": [
|
||||
{
|
||||
"question_description": "",
|
||||
"min": 0,
|
||||
"default": "",
|
||||
"max": 1024,
|
||||
"required": True,
|
||||
"choices": "",
|
||||
"variable": "openshift_username",
|
||||
"question_name": "OpenShift Username",
|
||||
"type": "password"
|
||||
}
|
||||
]
|
||||
}
|
||||
new = deepcopy(old)
|
||||
new['spec'][0]['default'] = '$encrypted$'
|
||||
resp = view._validate_spec_data(new, old)
|
||||
assert resp is None
|
||||
assert new == {
|
||||
"name": "old survey",
|
||||
"description": "foobar",
|
||||
"spec": [
|
||||
{
|
||||
"question_description": "",
|
||||
"min": 0,
|
||||
"default": "", # still has old unencrypted default
|
||||
"max": 1024,
|
||||
"required": True,
|
||||
"choices": "",
|
||||
"variable": "openshift_username",
|
||||
"question_name": "OpenShift Username",
|
||||
"type": "password"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@ -2,7 +2,7 @@ import tempfile
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
from awx.main.utils.encryption import encrypt_value
|
||||
from awx.main.tasks import RunJob
|
||||
from awx.main.models import (
|
||||
Job,
|
||||
@ -10,6 +10,8 @@ from awx.main.models import (
|
||||
WorkflowJobTemplate
|
||||
)
|
||||
|
||||
ENCRYPTED_SECRET = encrypt_value('secret')
|
||||
|
||||
|
||||
@pytest.mark.survey
|
||||
class SurveyVariableValidation:
|
||||
@ -61,6 +63,7 @@ class SurveyVariableValidation:
|
||||
@pytest.fixture
|
||||
def job(mocker):
|
||||
ret = mocker.MagicMock(**{
|
||||
'decrypted_extra_vars.return_value': '{\"secret_key\": \"my_password\"}',
|
||||
'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}',
|
||||
'extra_vars_dict': {"secret_key": "my_password"},
|
||||
'pk': 1, 'job_template.pk': 1, 'job_template.name': '',
|
||||
@ -143,6 +146,15 @@ def test_update_kwargs_survey_invalid_default(survey_spec_factory):
|
||||
assert json.loads(defaulted_extra_vars['extra_vars'])['var2'] == 2
|
||||
|
||||
|
||||
@pytest.mark.survey
|
||||
def test_display_survey_spec_encrypts_default(survey_spec_factory):
|
||||
spec = survey_spec_factory('var2')
|
||||
spec['spec'][0]['type'] = 'password'
|
||||
spec['spec'][0]['default'] = 'some-default'
|
||||
jt = JobTemplate(name="test-jt", survey_spec=spec, survey_enabled=True)
|
||||
assert jt.display_survey_spec()['spec'][0]['default'] == '$encrypted$'
|
||||
|
||||
|
||||
@pytest.mark.survey
|
||||
@pytest.mark.parametrize("question_type,default,min,max,expect_use,expect_value", [
|
||||
("text", "", 0, 0, True, ''), # default used
|
||||
@ -183,6 +195,46 @@ def test_optional_survey_question_defaults(
|
||||
assert 'c' not in defaulted_extra_vars['extra_vars']
|
||||
|
||||
|
||||
@pytest.mark.survey
|
||||
@pytest.mark.parametrize("question_type,default,maxlen,kwargs,expected", [
|
||||
('text', None, 5, {}, {}),
|
||||
('text', '', 5, {}, {'x': ''}),
|
||||
('text', 'y', 5, {}, {'x': 'y'}),
|
||||
('text', 'too-long', 5, {}, {}),
|
||||
('password', None, 5, {}, {}),
|
||||
('password', '', 5, {}, {'x': ''}),
|
||||
('password', ENCRYPTED_SECRET, 5, {}, {}), # len(secret) == 6, invalid
|
||||
('password', ENCRYPTED_SECRET, 10, {}, {'x': ENCRYPTED_SECRET}), # len(secret) < 10, valid
|
||||
('password', None, 5, {'extra_vars': {'x': '$encrypted$'}}, {}),
|
||||
('password', '', 5, {'extra_vars': {'x': '$encrypted$'}}, {'x': ''}),
|
||||
('password', None, 5, {'extra_vars': {'x': 'y'}}, {'x': 'y'}),
|
||||
('password', '', 5, {'extra_vars': {'x': 'y'}}, {'x': 'y'}),
|
||||
('password', 'foo', 5, {'extra_vars': {'x': 'y'}}, {'x': 'y'}),
|
||||
('password', None, 5, {'extra_vars': {'x': ''}}, {'x': ''}),
|
||||
('password', '', 5, {'extra_vars': {'x': ''}}, {'x': ''}),
|
||||
('password', 'foo', 5, {'extra_vars': {'x': ''}}, {'x': ''}),
|
||||
('password', ENCRYPTED_SECRET, 5, {'extra_vars': {'x': '$encrypted$'}}, {}),
|
||||
('password', ENCRYPTED_SECRET, 10, {'extra_vars': {'x': '$encrypted$'}}, {'x': ENCRYPTED_SECRET}),
|
||||
])
|
||||
def test_survey_encryption_defaults(survey_spec_factory, question_type, default, maxlen, kwargs, expected):
|
||||
spec = survey_spec_factory([
|
||||
{
|
||||
"required": True,
|
||||
"variable": "x",
|
||||
"min": 0,
|
||||
"max": maxlen,
|
||||
"type": question_type
|
||||
},
|
||||
])
|
||||
if default is not None:
|
||||
spec['spec'][0]['default'] = default
|
||||
else:
|
||||
spec['spec'][0].pop('default', None)
|
||||
jt = JobTemplate(name="test-jt", survey_spec=spec, survey_enabled=True)
|
||||
extra_vars = json.loads(jt._update_unified_job_kwargs({}, kwargs).get('extra_vars'))
|
||||
assert extra_vars == expected
|
||||
|
||||
|
||||
@pytest.mark.survey
|
||||
class TestWorkflowSurveys:
|
||||
def test_update_kwargs_survey_defaults(self, survey_spec_factory):
|
||||
|
||||
@ -16,6 +16,7 @@ from django.conf import settings
|
||||
|
||||
|
||||
from awx.main.models import (
|
||||
AdHocCommand,
|
||||
Credential,
|
||||
CredentialType,
|
||||
Inventory,
|
||||
@ -26,10 +27,11 @@ from awx.main.models import (
|
||||
Project,
|
||||
ProjectUpdate,
|
||||
UnifiedJob,
|
||||
User
|
||||
)
|
||||
|
||||
from awx.main import tasks
|
||||
from awx.main.utils import encrypt_field
|
||||
from awx.main.utils import encrypt_field, encrypt_value
|
||||
|
||||
|
||||
|
||||
@ -304,6 +306,32 @@ class TestGenericRun(TestJobExecution):
|
||||
assert '--ro-bind %s %s' % (settings.ANSIBLE_VENV_PATH, settings.ANSIBLE_VENV_PATH) in ' '.join(args) # noqa
|
||||
assert '--ro-bind %s %s' % (settings.AWX_VENV_PATH, settings.AWX_VENV_PATH) in ' '.join(args) # noqa
|
||||
|
||||
def test_created_by_extra_vars(self):
|
||||
self.instance.created_by = User(pk=123, username='angry-spud')
|
||||
self.task.run(self.pk)
|
||||
|
||||
assert self.run_pexpect.call_count == 1
|
||||
call_args, _ = self.run_pexpect.call_args_list[0]
|
||||
args, cwd, env, stdout = call_args
|
||||
assert '"tower_user_id": 123,' in ' '.join(args)
|
||||
assert '"tower_user_name": "angry-spud"' in ' '.join(args)
|
||||
assert '"awx_user_id": 123,' in ' '.join(args)
|
||||
assert '"awx_user_name": "angry-spud"' in ' '.join(args)
|
||||
|
||||
def test_survey_extra_vars(self):
|
||||
self.instance.extra_vars = json.dumps({
|
||||
'super_secret': encrypt_value('CLASSIFIED', pk=None)
|
||||
})
|
||||
self.instance.survey_passwords = {
|
||||
'super_secret': '$encrypted$'
|
||||
}
|
||||
self.task.run(self.pk)
|
||||
|
||||
assert self.run_pexpect.call_count == 1
|
||||
call_args, _ = self.run_pexpect.call_args_list[0]
|
||||
args, cwd, env, stdout = call_args
|
||||
assert '"super_secret": "CLASSIFIED"' in ' '.join(args)
|
||||
|
||||
def test_awx_task_env(self):
|
||||
patch = mock.patch('awx.main.tasks.settings.AWX_TASK_ENV', {'FOO': 'BAR'})
|
||||
patch.start()
|
||||
@ -316,6 +344,35 @@ class TestGenericRun(TestJobExecution):
|
||||
assert env['FOO'] == 'BAR'
|
||||
|
||||
|
||||
class TestAdhocRun(TestJobExecution):
|
||||
|
||||
TASK_CLS = tasks.RunAdHocCommand
|
||||
|
||||
def get_instance(self):
|
||||
return AdHocCommand(
|
||||
pk=1,
|
||||
created=datetime.utcnow(),
|
||||
inventory=Inventory(pk=1),
|
||||
status='new',
|
||||
cancel_flag=False,
|
||||
verbosity=3,
|
||||
extra_vars={'awx_foo': 'awx-bar'}
|
||||
)
|
||||
|
||||
def test_created_by_extra_vars(self):
|
||||
self.instance.created_by = User(pk=123, username='angry-spud')
|
||||
self.task.run(self.pk)
|
||||
|
||||
assert self.run_pexpect.call_count == 1
|
||||
call_args, _ = self.run_pexpect.call_args_list[0]
|
||||
args, cwd, env, stdout = call_args
|
||||
assert '"tower_user_id": 123,' in ' '.join(args)
|
||||
assert '"tower_user_name": "angry-spud"' in ' '.join(args)
|
||||
assert '"awx_user_id": 123,' in ' '.join(args)
|
||||
assert '"awx_user_name": "angry-spud"' in ' '.join(args)
|
||||
assert '"awx_foo": "awx-bar' in ' '.join(args)
|
||||
|
||||
|
||||
class TestIsolatedExecution(TestJobExecution):
|
||||
|
||||
REMOTE_HOST = 'some-isolated-host'
|
||||
@ -670,7 +727,8 @@ class TestJobCredentials(TestJobExecution):
|
||||
inputs = {
|
||||
'subscription': 'some-subscription',
|
||||
'username': 'bob',
|
||||
'password': 'secret'
|
||||
'password': 'secret',
|
||||
'cloud_environment': 'foobar'
|
||||
}
|
||||
)
|
||||
credential.inputs['password'] = encrypt_field(credential, 'password')
|
||||
@ -685,6 +743,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
assert env['AZURE_SUBSCRIPTION_ID'] == 'some-subscription'
|
||||
assert env['AZURE_AD_USER'] == 'bob'
|
||||
assert env['AZURE_PASSWORD'] == 'secret'
|
||||
assert env['AZURE_CLOUD_ENVIRONMENT'] == 'foobar'
|
||||
|
||||
def test_vmware_credentials(self):
|
||||
vmware = CredentialType.defaults['vmware']()
|
||||
@ -738,6 +797,41 @@ class TestJobCredentials(TestJobExecution):
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
self.task.run(self.pk)
|
||||
|
||||
@pytest.mark.parametrize("ca_file", [None, '/path/to/some/file'])
|
||||
def test_rhv_credentials(self, ca_file):
|
||||
rhv = CredentialType.defaults['rhv']()
|
||||
inputs = {
|
||||
'host': 'some-ovirt-host.example.org',
|
||||
'username': 'bob',
|
||||
'password': 'some-pass',
|
||||
}
|
||||
if ca_file:
|
||||
inputs['ca_file'] = ca_file
|
||||
credential = Credential(
|
||||
pk=1,
|
||||
credential_type=rhv,
|
||||
inputs=inputs
|
||||
)
|
||||
credential.inputs['password'] = encrypt_field(credential, 'password')
|
||||
self.instance.extra_credentials.add(credential)
|
||||
|
||||
def run_pexpect_side_effect(*args, **kwargs):
|
||||
args, cwd, env, stdout = args
|
||||
config = ConfigParser.ConfigParser()
|
||||
config.read(env['OVIRT_INI_PATH'])
|
||||
assert config.get('ovirt', 'ovirt_url') == 'some-ovirt-host.example.org'
|
||||
assert config.get('ovirt', 'ovirt_username') == 'bob'
|
||||
assert config.get('ovirt', 'ovirt_password') == 'some-pass'
|
||||
if ca_file:
|
||||
assert config.get('ovirt', 'ovirt_ca_file') == ca_file
|
||||
else:
|
||||
with pytest.raises(ConfigParser.NoOptionError):
|
||||
config.get('ovirt', 'ovirt_ca_file')
|
||||
return ['successful', 0]
|
||||
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
self.task.run(self.pk)
|
||||
|
||||
def test_net_credentials(self):
|
||||
net = CredentialType.defaults['net']()
|
||||
credential = Credential(
|
||||
@ -1368,6 +1462,12 @@ class TestInventoryUpdateCredentials(TestJobExecution):
|
||||
assert env['GCE_ZONE'] == expected_gce_zone
|
||||
ssh_key_data = env['GCE_PEM_FILE_PATH']
|
||||
assert open(ssh_key_data, 'rb').read() == self.EXAMPLE_PRIVATE_KEY
|
||||
|
||||
config = ConfigParser.ConfigParser()
|
||||
config.read(env['GCE_INI_PATH'])
|
||||
assert 'cache' in config.sections()
|
||||
assert config.getint('cache', 'cache_max_age') == 0
|
||||
|
||||
return ['successful', 0]
|
||||
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
|
||||
@ -51,3 +51,24 @@ def test_encrypt_field_with_ask():
|
||||
def test_encrypt_field_with_empty_value():
|
||||
encrypted = encryption.encrypt_field(Setting(value=None), 'value')
|
||||
assert encrypted is None
|
||||
|
||||
|
||||
class TestSurveyReversibilityValue:
|
||||
'''
|
||||
Tests to enforce the contract with survey password question encrypted values
|
||||
'''
|
||||
_key = encryption.get_encryption_key('value', None)
|
||||
|
||||
def test_encrypt_empty_string(self):
|
||||
assert encryption.encrypt_value('') == ''
|
||||
# the reverse, decryption, does not work
|
||||
|
||||
def test_encrypt_encryption_key(self):
|
||||
assert encryption.encrypt_value('$encrypted$') == '$encrypted$'
|
||||
# the reverse, decryption, does not work
|
||||
|
||||
def test_encrypt_empty_string_twice(self):
|
||||
# Encryption is idempotent
|
||||
val = encryption.encrypt_value('foobar')
|
||||
val2 = encryption.encrypt_value(val)
|
||||
assert encryption.decrypt_value(self._key, val2) == 'foobar'
|
||||
|
||||
@ -729,6 +729,7 @@ def wrap_args_with_proot(args, cwd, **kwargs):
|
||||
- /var/log/supervisor
|
||||
'''
|
||||
from django.conf import settings
|
||||
cwd = os.path.realpath(cwd)
|
||||
new_args = [getattr(settings, 'AWX_PROOT_CMD', 'bwrap'), '--unshare-pid', '--dev-bind', '/', '/']
|
||||
hide_paths = [settings.AWX_PROOT_BASE_PATH]
|
||||
if not kwargs.get('isolated'):
|
||||
@ -847,6 +848,7 @@ class OutputEventFilter(object):
|
||||
def __init__(self, fileobj=None, event_callback=None, raw_callback=None):
|
||||
self._fileobj = fileobj
|
||||
self._event_callback = event_callback
|
||||
self._event_ct = 0
|
||||
self._raw_callback = raw_callback
|
||||
self._counter = 1
|
||||
self._start_line = 0
|
||||
@ -901,6 +903,7 @@ class OutputEventFilter(object):
|
||||
self._start_line += n_lines
|
||||
if self._event_callback:
|
||||
self._event_callback(event_data)
|
||||
self._event_ct += 1
|
||||
|
||||
if next_event_data.get('uuid', None):
|
||||
self._current_event_data = next_event_data
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import base64
|
||||
import hashlib
|
||||
import logging
|
||||
from collections import namedtuple
|
||||
|
||||
import six
|
||||
from cryptography.fernet import Fernet, InvalidToken
|
||||
@ -8,7 +9,8 @@ from cryptography.hazmat.backends import default_backend
|
||||
from django.utils.encoding import smart_str
|
||||
|
||||
|
||||
__all__ = ['get_encryption_key', 'encrypt_field', 'decrypt_field', 'decrypt_value']
|
||||
__all__ = ['get_encryption_key', 'encrypt_value', 'encrypt_field',
|
||||
'decrypt_field', 'decrypt_value']
|
||||
|
||||
logger = logging.getLogger('awx.main.utils.encryption')
|
||||
|
||||
@ -50,6 +52,11 @@ def get_encryption_key(field_name, pk=None):
|
||||
return base64.urlsafe_b64encode(h.digest())
|
||||
|
||||
|
||||
def encrypt_value(value, pk=None):
|
||||
TransientField = namedtuple('TransientField', ['pk', 'value'])
|
||||
return encrypt_field(TransientField(pk=pk, value=value), 'value')
|
||||
|
||||
|
||||
def encrypt_field(instance, field_name, ask=False, subfield=None, skip_utf8=False):
|
||||
'''
|
||||
Return content of the given instance and field name encrypted.
|
||||
|
||||
@ -120,6 +120,7 @@ class LogstashFormatter(LogstashFormatterVersion1):
|
||||
# exist if SQL_DEBUG is turned on in settings.
|
||||
headers = [
|
||||
(float, 'X-API-Time'), # may end with an 's' "0.33s"
|
||||
(float, 'X-API-Total-Time'),
|
||||
(int, 'X-API-Query-Count'),
|
||||
(float, 'X-API-Query-Time'), # may also end with an 's'
|
||||
(str, 'X-API-Node'),
|
||||
@ -131,9 +132,11 @@ class LogstashFormatter(LogstashFormatterVersion1):
|
||||
'path': request.path,
|
||||
'path_info': request.path_info,
|
||||
'query_string': request.META['QUERY_STRING'],
|
||||
'data': request.data,
|
||||
}
|
||||
|
||||
if hasattr(request, 'data'):
|
||||
data_for_log['request']['data'] = request.data
|
||||
|
||||
return data_for_log
|
||||
|
||||
def get_extra_fields(self, record):
|
||||
|
||||
@ -16,4 +16,14 @@
|
||||
|
||||
- name: remove build artifacts
|
||||
file: path="{{item}}" state=absent
|
||||
register: result
|
||||
with_items: "{{cleanup_dirs}}"
|
||||
until: result|succeeded
|
||||
ignore_errors: yes
|
||||
retries: 3
|
||||
delay: 5
|
||||
|
||||
- name: fail if build artifacts were not cleaned
|
||||
fail:
|
||||
msg: 'Unable to cleanup build artifacts'
|
||||
when: not result|succeeded
|
||||
|
||||
262
awx/plugins/inventory/ovirt4.py
Executable file
262
awx/plugins/inventory/ovirt4.py
Executable file
@ -0,0 +1,262 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2016 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
"""
|
||||
oVirt dynamic inventory script
|
||||
=================================
|
||||
|
||||
Generates dynamic inventory file for oVirt.
|
||||
|
||||
Script will return following attributes for each virtual machine:
|
||||
- id
|
||||
- name
|
||||
- host
|
||||
- cluster
|
||||
- status
|
||||
- description
|
||||
- fqdn
|
||||
- os_type
|
||||
- template
|
||||
- tags
|
||||
- statistics
|
||||
- devices
|
||||
|
||||
When run in --list mode, virtual machines are grouped by the following categories:
|
||||
- cluster
|
||||
- tag
|
||||
- status
|
||||
|
||||
Note: If there is some virtual machine which has has more tags it will be in both tag
|
||||
records.
|
||||
|
||||
Examples:
|
||||
# Execute update of system on webserver virtual machine:
|
||||
|
||||
$ ansible -i contrib/inventory/ovirt4.py webserver -m yum -a "name=* state=latest"
|
||||
|
||||
# Get webserver virtual machine information:
|
||||
|
||||
$ contrib/inventory/ovirt4.py --host webserver
|
||||
|
||||
Author: Ondra Machacek (@machacekondra)
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
try:
|
||||
import ConfigParser as configparser
|
||||
except ImportError:
|
||||
import configparser
|
||||
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
import simplejson as json
|
||||
|
||||
try:
|
||||
import ovirtsdk4 as sdk
|
||||
import ovirtsdk4.types as otypes
|
||||
except ImportError:
|
||||
print('oVirt inventory script requires ovirt-engine-sdk-python >= 4.0.0')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def parse_args():
|
||||
"""
|
||||
Create command line parser for oVirt dynamic inventory script.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Ansible dynamic inventory script for oVirt.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--list',
|
||||
action='store_true',
|
||||
default=True,
|
||||
help='Get data of all virtual machines (default: True).',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--host',
|
||||
help='Get data of virtual machines running on specified host.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--pretty',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='Pretty format (default: False).',
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def create_connection():
|
||||
"""
|
||||
Create a connection to oVirt engine API.
|
||||
"""
|
||||
# Get the path of the configuration file, by default use
|
||||
# 'ovirt.ini' file in script directory:
|
||||
default_path = os.path.join(
|
||||
os.path.dirname(os.path.realpath(__file__)),
|
||||
'ovirt.ini',
|
||||
)
|
||||
config_path = os.environ.get('OVIRT_INI_PATH', default_path)
|
||||
|
||||
# Create parser and add ovirt section if it doesn't exist:
|
||||
config = configparser.SafeConfigParser(
|
||||
defaults={
|
||||
'ovirt_url': None,
|
||||
'ovirt_username': None,
|
||||
'ovirt_password': None,
|
||||
'ovirt_ca_file': None,
|
||||
}
|
||||
)
|
||||
if not config.has_section('ovirt'):
|
||||
config.add_section('ovirt')
|
||||
config.read(config_path)
|
||||
|
||||
# Create a connection with options defined in ini file:
|
||||
return sdk.Connection(
|
||||
url=config.get('ovirt', 'ovirt_url'),
|
||||
username=config.get('ovirt', 'ovirt_username'),
|
||||
password=config.get('ovirt', 'ovirt_password'),
|
||||
ca_file=config.get('ovirt', 'ovirt_ca_file'),
|
||||
insecure=config.get('ovirt', 'ovirt_ca_file') is None,
|
||||
)
|
||||
|
||||
|
||||
def get_dict_of_struct(connection, vm):
|
||||
"""
|
||||
Transform SDK Vm Struct type to Python dictionary.
|
||||
"""
|
||||
if vm is None:
|
||||
return dict()
|
||||
|
||||
vms_service = connection.system_service().vms_service()
|
||||
clusters_service = connection.system_service().clusters_service()
|
||||
vm_service = vms_service.vm_service(vm.id)
|
||||
devices = vm_service.reported_devices_service().list()
|
||||
tags = vm_service.tags_service().list()
|
||||
stats = vm_service.statistics_service().list()
|
||||
labels = vm_service.affinity_labels_service().list()
|
||||
groups = clusters_service.cluster_service(
|
||||
vm.cluster.id
|
||||
).affinity_groups_service().list()
|
||||
|
||||
return {
|
||||
'id': vm.id,
|
||||
'name': vm.name,
|
||||
'host': connection.follow_link(vm.host).name if vm.host else None,
|
||||
'cluster': connection.follow_link(vm.cluster).name,
|
||||
'status': str(vm.status),
|
||||
'description': vm.description,
|
||||
'fqdn': vm.fqdn,
|
||||
'os_type': vm.os.type,
|
||||
'template': connection.follow_link(vm.template).name,
|
||||
'tags': [tag.name for tag in tags],
|
||||
'affinity_labels': [label.name for label in labels],
|
||||
'affinity_groups': [
|
||||
group.name for group in groups
|
||||
if vm.name in [vm.name for vm in connection.follow_link(group.vms)]
|
||||
],
|
||||
'statistics': dict(
|
||||
(stat.name, stat.values[0].datum) for stat in stats
|
||||
),
|
||||
'devices': dict(
|
||||
(device.name, [ip.address for ip in device.ips]) for device in devices if device.ips
|
||||
),
|
||||
'ansible_host': next((device.ips[0].address for device in devices if device.ips), None)
|
||||
}
|
||||
|
||||
|
||||
def get_data(connection, vm_name=None):
|
||||
"""
|
||||
Obtain data of `vm_name` if specified, otherwise obtain data of all vms.
|
||||
"""
|
||||
vms_service = connection.system_service().vms_service()
|
||||
clusters_service = connection.system_service().clusters_service()
|
||||
|
||||
if vm_name:
|
||||
vm = vms_service.list(search='name=%s' % vm_name) or [None]
|
||||
data = get_dict_of_struct(
|
||||
connection=connection,
|
||||
vm=vm[0],
|
||||
)
|
||||
else:
|
||||
vms = dict()
|
||||
data = defaultdict(list)
|
||||
for vm in vms_service.list():
|
||||
name = vm.name
|
||||
vm_service = vms_service.vm_service(vm.id)
|
||||
cluster_service = clusters_service.cluster_service(vm.cluster.id)
|
||||
|
||||
# Add vm to vms dict:
|
||||
vms[name] = get_dict_of_struct(connection, vm)
|
||||
|
||||
# Add vm to cluster group:
|
||||
cluster_name = connection.follow_link(vm.cluster).name
|
||||
data['cluster_%s' % cluster_name].append(name)
|
||||
|
||||
# Add vm to tag group:
|
||||
tags_service = vm_service.tags_service()
|
||||
for tag in tags_service.list():
|
||||
data['tag_%s' % tag.name].append(name)
|
||||
|
||||
# Add vm to status group:
|
||||
data['status_%s' % vm.status].append(name)
|
||||
|
||||
# Add vm to affinity group:
|
||||
for group in cluster_service.affinity_groups_service().list():
|
||||
if vm.name in [
|
||||
v.name for v in connection.follow_link(group.vms)
|
||||
]:
|
||||
data['affinity_group_%s' % group.name].append(vm.name)
|
||||
|
||||
# Add vm to affinity label group:
|
||||
affinity_labels_service = vm_service.affinity_labels_service()
|
||||
for label in affinity_labels_service.list():
|
||||
data['affinity_label_%s' % label.name].append(name)
|
||||
|
||||
data["_meta"] = {
|
||||
'hostvars': vms,
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
connection = create_connection()
|
||||
|
||||
print(
|
||||
json.dumps(
|
||||
obj=get_data(
|
||||
connection=connection,
|
||||
vm_name=args.host,
|
||||
),
|
||||
sort_keys=args.pretty,
|
||||
indent=args.pretty * 2,
|
||||
)
|
||||
)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
129
awx/plugins/inventory/tower.py
Executable file
129
awx/plugins/inventory/tower.py
Executable file
@ -0,0 +1,129 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2016 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
"""
|
||||
Ansible Tower/AWX dynamic inventory script
|
||||
==========================================
|
||||
|
||||
Generates dynamic inventory for Tower
|
||||
|
||||
Author: Matthew Jones (@matburt)
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import requests
|
||||
from requests.auth import HTTPBasicAuth
|
||||
from urlparse import urljoin
|
||||
|
||||
|
||||
def parse_configuration():
|
||||
"""
|
||||
Create command line parser for oVirt dynamic inventory script.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Ansible dynamic inventory script for Ansible Tower.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--list',
|
||||
action='store_true',
|
||||
default=True,
|
||||
help='Return all hosts known to Tower given a particular inventory',
|
||||
)
|
||||
parser.parse_args()
|
||||
host_name = os.environ.get("TOWER_HOST", None)
|
||||
username = os.environ.get("TOWER_USERNAME", None)
|
||||
password = os.environ.get("TOWER_PASSWORD", None)
|
||||
ignore_ssl = os.environ.get("TOWER_IGNORE_SSL", "1").lower() in ("1", "yes", "true")
|
||||
inventory = os.environ.get("TOWER_INVENTORY", None)
|
||||
license_type = os.environ.get("TOWER_LICENSE_TYPE", "enterprise")
|
||||
|
||||
errors = []
|
||||
if not host_name:
|
||||
errors.append("Missing TOWER_HOST in environment")
|
||||
if not username:
|
||||
errors.append("Missing TOWER_USERNAME in environment")
|
||||
if not password:
|
||||
errors.append("Missing TOWER_PASSWORD in environment")
|
||||
if not inventory:
|
||||
errors.append("Missing TOWER_INVENTORY in environment")
|
||||
if errors:
|
||||
raise RuntimeError("\n".join(errors))
|
||||
|
||||
return dict(tower_host=host_name,
|
||||
tower_user=username,
|
||||
tower_pass=password,
|
||||
tower_inventory=inventory,
|
||||
tower_license_type=license_type,
|
||||
ignore_ssl=ignore_ssl)
|
||||
|
||||
|
||||
def read_tower_inventory(tower_host, tower_user, tower_pass, inventory, license_type, ignore_ssl=False):
|
||||
if not re.match('(?:http|https)://', tower_host):
|
||||
tower_host = "https://{}".format(tower_host)
|
||||
inventory_url = urljoin(tower_host, "/api/v2/inventories/{}/script/?hostvars=1&towervars=1&all=1".format(inventory.replace('/', '')))
|
||||
config_url = urljoin(tower_host, "/api/v2/config/")
|
||||
try:
|
||||
if license_type != "open":
|
||||
config_response = requests.get(config_url,
|
||||
auth=HTTPBasicAuth(tower_user, tower_pass),
|
||||
verify=not ignore_ssl)
|
||||
if config_response.ok:
|
||||
source_type = config_response.json()['license_info']['license_type']
|
||||
if not source_type == license_type:
|
||||
raise RuntimeError("Tower server licenses must match: source: {} local: {}".format(source_type,
|
||||
license_type))
|
||||
else:
|
||||
raise RuntimeError("Failed to validate the license of the remote Tower: {}".format(config_response.data))
|
||||
|
||||
response = requests.get(inventory_url,
|
||||
auth=HTTPBasicAuth(tower_user, tower_pass),
|
||||
verify=not ignore_ssl)
|
||||
if response.ok:
|
||||
return response.json()
|
||||
json_reason = response.json()
|
||||
reason = json_reason.get('detail', 'Retrieving Tower Inventory Failed')
|
||||
except requests.ConnectionError, e:
|
||||
reason = "Connection to remote host failed: {}".format(e)
|
||||
except json.JSONDecodeError, e:
|
||||
reason = "Failed to parse json from host: {}".format(e)
|
||||
raise RuntimeError(reason)
|
||||
|
||||
|
||||
def main():
|
||||
config = parse_configuration()
|
||||
inventory_hosts = read_tower_inventory(config['tower_host'],
|
||||
config['tower_user'],
|
||||
config['tower_pass'],
|
||||
config['tower_inventory'],
|
||||
config['tower_license_type'],
|
||||
ignore_ssl=config['ignore_ssl'])
|
||||
print(
|
||||
json.dumps(
|
||||
inventory_hosts
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@ -41,7 +41,8 @@ def main():
|
||||
total_mem_value = out.split()[7]
|
||||
if int(total_mem_value) <= 2048:
|
||||
cap = 50
|
||||
cap = 50 + ((int(total_mem_value) / 1024) - 2) * 75
|
||||
else:
|
||||
cap = 50 + ((int(total_mem_value) / 1024) - 2) * 75
|
||||
|
||||
# Module never results in a change
|
||||
module.exit_json(changed=False, capacity=cap, version=version)
|
||||
|
||||
@ -164,7 +164,7 @@ STDOUT_MAX_BYTES_DISPLAY = 1048576
|
||||
|
||||
# Returned in the header on event api lists as a recommendation to the UI
|
||||
# on how many events to display before truncating/hiding
|
||||
RECOMMENDED_MAX_EVENTS_DISPLAY_HEADER = 4000
|
||||
MAX_UI_JOB_EVENTS = 4000
|
||||
|
||||
# The maximum size of the ansible callback event's res data structure
|
||||
# beyond this limit and the value will be removed
|
||||
@ -226,6 +226,7 @@ TEMPLATES = [
|
||||
]
|
||||
|
||||
MIDDLEWARE_CLASSES = ( # NOQA
|
||||
'awx.main.middleware.TimingMiddleware',
|
||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||
'django.middleware.locale.LocaleMiddleware',
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
@ -517,7 +518,7 @@ SOCIAL_AUTH_INACTIVE_USER_URL = '/sso/inactive/'
|
||||
|
||||
SOCIAL_AUTH_RAISE_EXCEPTIONS = False
|
||||
SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = False
|
||||
SOCIAL_AUTH_SLUGIFY_USERNAMES = True
|
||||
#SOCIAL_AUTH_SLUGIFY_USERNAMES = True
|
||||
SOCIAL_AUTH_CLEAN_USERNAMES = True
|
||||
|
||||
SOCIAL_AUTH_SANITIZE_REDIRECTS = True
|
||||
@ -819,6 +820,26 @@ OPENSTACK_HOST_FILTER = r'^.+$'
|
||||
OPENSTACK_EXCLUDE_EMPTY_GROUPS = True
|
||||
OPENSTACK_INSTANCE_ID_VAR = 'openstack.id'
|
||||
|
||||
# ---------------------
|
||||
# ----- oVirt4 -----
|
||||
# ---------------------
|
||||
RHV_ENABLED_VAR = 'status'
|
||||
RHV_ENABLED_VALUE = 'up'
|
||||
RHV_GROUP_FILTER = r'^.+$'
|
||||
RHV_HOST_FILTER = r'^.+$'
|
||||
RHV_EXCLUDE_EMPTY_GROUPS = True
|
||||
RHV_INSTANCE_ID_VAR = 'id'
|
||||
|
||||
# ---------------------
|
||||
# ----- Tower -----
|
||||
# ---------------------
|
||||
TOWER_ENABLED_VAR = 'remote_tower_enabled'
|
||||
TOWER_ENABLED_VALUE = 'true'
|
||||
TOWER_GROUP_FILTER = r'^.+$'
|
||||
TOWER_HOST_FILTER = r'^.+$'
|
||||
TOWER_EXCLUDE_EMPTY_GROUPS = True
|
||||
TOWER_INSTANCE_ID_VAR = 'remote_tower_id'
|
||||
|
||||
# ---------------------
|
||||
# ----- Foreman -----
|
||||
# ---------------------
|
||||
@ -1122,8 +1143,12 @@ LOGGING = {
|
||||
},
|
||||
}
|
||||
}
|
||||
# Apply coloring to messages logged to the console
|
||||
COLOR_LOGS = False
|
||||
|
||||
# https://github.com/django-polymorphic/django-polymorphic/issues/195
|
||||
# FIXME: Disabling models.E006 warning until we can renamed Project and InventorySource
|
||||
SILENCED_SYSTEM_CHECKS = ['models.E006']
|
||||
|
||||
# Use middleware to get request statistics
|
||||
AWX_REQUEST_PROFILE = False
|
||||
|
||||
@ -241,7 +241,8 @@ class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider):
|
||||
"""
|
||||
key = self.conf.get(conf_key, default_attribute)
|
||||
value = attributes[key] if key in attributes else None
|
||||
if isinstance(value, list):
|
||||
# In certain implementations (like https://pagure.io/ipsilon) this value is a string, not a list
|
||||
if isinstance(value, (list, tuple)):
|
||||
value = value[0]
|
||||
if conf_key in ('attr_first_name', 'attr_last_name', 'attr_username', 'attr_email') and value is None:
|
||||
logger.warn("Could not map user detail '%s' from SAML attribute '%s'; "
|
||||
@ -309,7 +310,7 @@ def _update_m2m_from_groups(user, ldap_user, rel, opts, remove=True):
|
||||
should_add = True
|
||||
if should_add:
|
||||
rel.add(user)
|
||||
elif remove:
|
||||
elif remove and user in rel.all():
|
||||
rel.remove(user)
|
||||
|
||||
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import ldap
|
||||
|
||||
from awx.sso.backends import LDAPSettings
|
||||
from awx.sso.validators import validate_ldap_filter
|
||||
|
||||
|
||||
def test_ldap_default_settings(mocker):
|
||||
@ -15,4 +16,11 @@ def test_ldap_default_network_timeout(mocker):
|
||||
from_db = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=from_db):
|
||||
settings = LDAPSettings()
|
||||
assert settings.CONNECTION_OPTIONS[ldap.OPT_NETWORK_TIMEOUT] == 30
|
||||
assert settings.CONNECTION_OPTIONS == {
|
||||
ldap.OPT_REFERRALS: 0,
|
||||
ldap.OPT_NETWORK_TIMEOUT: 30
|
||||
}
|
||||
|
||||
|
||||
def test_ldap_filter_validator():
|
||||
validate_ldap_filter('(test-uid=%(user)s)', with_user=True)
|
||||
|
||||
@ -47,7 +47,7 @@ def validate_ldap_filter(value, with_user=False):
|
||||
dn_value = value.replace('%(user)s', 'USER')
|
||||
else:
|
||||
dn_value = value
|
||||
if re.match(r'^\([A-Za-z0-9]+?=[^()]+?\)$', dn_value):
|
||||
if re.match(r'^\([A-Za-z0-9-]+?=[^()]+?\)$', dn_value):
|
||||
return
|
||||
elif re.match(r'^\([&|!]\(.*?\)\)$', dn_value):
|
||||
try:
|
||||
|
||||
@ -458,7 +458,6 @@
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
border:1px solid @field-border;
|
||||
border-right: 0px;
|
||||
}
|
||||
|
||||
.Form-lookupButton:hover {
|
||||
@ -470,7 +469,6 @@
|
||||
.Form-lookupButton:active,
|
||||
.Form-lookupButton:focus {
|
||||
border: 1px solid @field-border;
|
||||
border-right: 0px;
|
||||
}
|
||||
|
||||
.CodeMirror {
|
||||
|
||||
@ -149,7 +149,7 @@ table.ui-datepicker-calendar {
|
||||
|
||||
.ui-widget-content {
|
||||
background-image: none;
|
||||
background-color: @default-secondary-bg;
|
||||
background-color: @default-bg;
|
||||
|
||||
a,
|
||||
a:visited,
|
||||
|
||||
@ -248,8 +248,7 @@ table, tbody {
|
||||
}
|
||||
|
||||
.List-buttonDefault[disabled] {
|
||||
color: @d7grey;
|
||||
border-color: @d7grey;
|
||||
opacity: 0.65;
|
||||
}
|
||||
|
||||
.List-searchDropdown {
|
||||
|
||||
@ -72,7 +72,7 @@ function AtFormController (eventService, strings) {
|
||||
const data = vm.components
|
||||
.filter(component => component.category === 'input')
|
||||
.reduce((values, component) => {
|
||||
if (!component.state._value) {
|
||||
if (component.state._value === undefined) {
|
||||
return values;
|
||||
}
|
||||
|
||||
|
||||
@ -84,6 +84,7 @@ function AtInputLookupController (baseInputController, $q, $state) {
|
||||
scope.state._touched = true;
|
||||
|
||||
if (scope.state._displayValue === '' && !scope.state._required) {
|
||||
scope.state._value = null;
|
||||
return vm.check({ isValid: true });
|
||||
}
|
||||
|
||||
|
||||
@ -62,13 +62,20 @@ function requestWithCache (config) {
|
||||
* @yields {boolean} - Indicating a match has been found. If so, the results
|
||||
* are set on the model.
|
||||
*/
|
||||
function search (params, config) {
|
||||
function search (params = {}, config = {}) {
|
||||
const req = {
|
||||
method: 'GET',
|
||||
url: this.path,
|
||||
params
|
||||
url: this.path
|
||||
};
|
||||
|
||||
if (typeof params === 'string') {
|
||||
req.url = `?params`;
|
||||
} else if (Array.isArray(params)) {
|
||||
req.url += `?${params.join('&')}`;
|
||||
} else {
|
||||
req.params = params;
|
||||
}
|
||||
|
||||
return $http(req)
|
||||
.then(({ data }) => {
|
||||
if (!data.count) {
|
||||
|
||||
@ -205,7 +205,8 @@ export default [
|
||||
scope: $scope.$parent,
|
||||
variable: name,
|
||||
parse_variable: 'parseType',
|
||||
field_id: form.formDef.name + '_' + name
|
||||
field_id: form.formDef.name + '_' + name,
|
||||
readOnly: $scope.$parent.configDataResolve[name] && $scope.$parent.configDataResolve[name].disabled ? true : false
|
||||
});
|
||||
$scope.parseTypeChange('parseType', name);
|
||||
}
|
||||
|
||||
@ -37,8 +37,14 @@ export default ['Rest', 'Wait',
|
||||
callback: 'loadCredentialKindOptions'
|
||||
});
|
||||
|
||||
$scope.inputs_help_text = _.get(options, 'actions.POST.inputs.help_text', "Specification for credential type inputs");
|
||||
$scope.injectors_help_text = _.get(options, 'actions.POST.injectors.help_text', "Specification for credential type injector");
|
||||
const docs_url = 'https://docs.ansible.com/ansible-tower/latest/html/userguide/credential_types.html#getting-started-with-credential-types';
|
||||
const docs_help_text = `<br><br><a href=${docs_url}>Getting Started with Credential Types</a>`;
|
||||
|
||||
const api_inputs_help_text = _.get(options, 'actions.POST.inputs.help_text', "Specification for credential type inputs.");
|
||||
const api_injectors_help_text = _.get(options, 'actions.POST.injectors.help_text', "Specification for credential type injector.");
|
||||
|
||||
$scope.inputs_help_text = api_inputs_help_text + docs_help_text;
|
||||
$scope.injectors_help_text = api_injectors_help_text + docs_help_text;
|
||||
|
||||
if (!options.actions.POST) {
|
||||
$state.go("^");
|
||||
|
||||
@ -36,8 +36,14 @@ export default ['Rest', 'Wait',
|
||||
callback: 'choicesReadyCredentialTypes'
|
||||
});
|
||||
|
||||
$scope.inputs_help_text = _.get(options, 'actions.POST.inputs.help_text', "Specification for credential type inputs");
|
||||
$scope.injectors_help_text = _.get(options, 'actions.POST.injectors.help_text', "Specification for credential type injector");
|
||||
const docs_url = 'https://docs.ansible.com/ansible-tower/latest/html/userguide/credential_types.html#getting-started-with-credential-types';
|
||||
const docs_help_text = `<br><br><a href=${docs_url}>Getting Started with Credential Types</a>`;
|
||||
|
||||
const api_inputs_help_text = _.get(options, 'actions.POST.inputs.help_text', "Specification for credential type inputs.");
|
||||
const api_injectors_help_text = _.get(options, 'actions.POST.injectors.help_text', "Specification for credential type injector.");
|
||||
|
||||
$scope.inputs_help_text = api_inputs_help_text + docs_help_text;
|
||||
$scope.injectors_help_text = api_injectors_help_text + docs_help_text;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@ -11,7 +11,7 @@ export default ['$scope', 'Rest', 'CredentialList', 'Prompt', 'ProcessErrors', '
|
||||
ProcessErrors, GetBasePath, Wait, $state, $filter, rbacUiControlService, Dataset,
|
||||
credentialType, i18n, Credential, CredentialsStrings) {
|
||||
|
||||
let credential = new Credential();
|
||||
const credential = new Credential();
|
||||
|
||||
var list = CredentialList,
|
||||
defaultUrl = GetBasePath('credentials');
|
||||
@ -48,9 +48,25 @@ export default ['$scope', 'Rest', 'CredentialList', 'Prompt', 'ProcessErrors', '
|
||||
return;
|
||||
}
|
||||
|
||||
$scope[list.name].forEach(credential => {
|
||||
credential.kind = credentialType.match('id', credential.credential_type).name;
|
||||
});
|
||||
const params = $scope[list.name]
|
||||
.reduce((accumulator, credential) => {
|
||||
accumulator.push(credential.credential_type);
|
||||
|
||||
return accumulator;
|
||||
}, [])
|
||||
.filter((id, i, array) => array.indexOf(id) === i)
|
||||
.map(id => `or__id=${id}`);
|
||||
|
||||
credentialType.search(params)
|
||||
.then(found => {
|
||||
if (!found) {
|
||||
return;
|
||||
}
|
||||
|
||||
$scope[list.name].forEach(credential => {
|
||||
credential.kind = credentialType.match('id', credential.credential_type).name;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// iterate over the list and add fields like type label, after the
|
||||
|
||||
@ -9,11 +9,12 @@
|
||||
'adjustGraphSize',
|
||||
'templateUrl',
|
||||
'i18n',
|
||||
'moment',
|
||||
'jobStatusGraphData',
|
||||
JobStatusGraph
|
||||
];
|
||||
|
||||
function JobStatusGraph($window, adjustGraphSize, templateUrl, i18n, graphDataService) {
|
||||
function JobStatusGraph($window, adjustGraphSize, templateUrl, i18n, moment, graphDataService) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
@ -72,11 +73,11 @@ function JobStatusGraph($window, adjustGraphSize, templateUrl, i18n, graphDataSe
|
||||
}
|
||||
});
|
||||
|
||||
if(period==="day") {
|
||||
timeFormat="%H:%M";
|
||||
if(period === "day") {
|
||||
timeFormat="H:M";
|
||||
}
|
||||
else {
|
||||
timeFormat = '%m/%d';
|
||||
timeFormat = "MMM D";
|
||||
}
|
||||
graphData.map(function(series) {
|
||||
series.values = series.values.map(function(d) {
|
||||
@ -93,7 +94,8 @@ function JobStatusGraph($window, adjustGraphSize, templateUrl, i18n, graphDataSe
|
||||
.useInteractiveGuideline(true) //We want nice looking tooltips and a guideline!
|
||||
.showLegend(false) //Show the legend, allowing users to turn on/off line series.
|
||||
.showYAxis(true) //Show the y-axis
|
||||
.showXAxis(true); //Show the x-axis
|
||||
.showXAxis(true) //Show the x-axis
|
||||
.margin({ right: 32 });
|
||||
|
||||
job_status_chart.interactiveLayer.tooltip.fixedTop(-10); //distance from the top of the chart to tooltip
|
||||
job_status_chart.interactiveLayer.tooltip.distance(-1); //distance from interactive line to tooltip
|
||||
@ -101,8 +103,15 @@ function JobStatusGraph($window, adjustGraphSize, templateUrl, i18n, graphDataSe
|
||||
job_status_chart.xAxis
|
||||
.axisLabel(i18n._("TIME"))//.showMaxMin(true)
|
||||
.tickFormat(function(d) {
|
||||
var dx = graphData[0].values[d] && graphData[0].values[d].x || 0;
|
||||
return dx ? d3.time.format(timeFormat)(new Date(Number(dx+'000'))) : '';
|
||||
const dx = graphData[0].values[d] && graphData[0].values[d].x || 0;
|
||||
|
||||
if (!dx) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const tickDate = new Date(Number(dx + '000'));
|
||||
|
||||
return moment(tickDate).format(timeFormat);
|
||||
});
|
||||
|
||||
job_status_chart.yAxis //Chart y-axis settings
|
||||
|
||||
@ -104,12 +104,14 @@ export default ['i18n', function(i18n) {
|
||||
smart_inventory: {
|
||||
mode: 'all',
|
||||
ngClick: "smartInventory()",
|
||||
awToolTip: i18n._("Create a new Smart Inventory from search results."),
|
||||
awToolTip: "{{ smartInventoryButtonTooltip }}",
|
||||
dataTipWatch: 'smartInventoryButtonTooltip',
|
||||
actionClass: 'btn List-buttonDefault',
|
||||
buttonContent: i18n._('SMART INVENTORY'),
|
||||
ngShow: 'canAdd && (hosts.length > 0 || !(searchTags | isEmpty))',
|
||||
dataPlacement: "top",
|
||||
ngDisabled: '!enableSmartInventoryButton'
|
||||
ngDisabled: '!enableSmartInventoryButton',
|
||||
showTipWhenDisabled: true
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@ -7,7 +7,7 @@
|
||||
|
||||
function HostsList($scope, HostsList, $rootScope, GetBasePath,
|
||||
rbacUiControlService, Dataset, $state, $filter, Prompt, Wait,
|
||||
HostsService, SetStatus, canAdd, $transitions) {
|
||||
HostsService, SetStatus, canAdd, $transitions, InventoryHostsStrings, HostsList) {
|
||||
|
||||
let list = HostsList;
|
||||
|
||||
@ -16,6 +16,7 @@ function HostsList($scope, HostsList, $rootScope, GetBasePath,
|
||||
function init(){
|
||||
$scope.canAdd = canAdd;
|
||||
$scope.enableSmartInventoryButton = false;
|
||||
$scope.smartInventoryButtonTooltip = InventoryHostsStrings.get('smartinventorybutton.DISABLED_INSTRUCTIONS');
|
||||
|
||||
// Search init
|
||||
$scope.list = list;
|
||||
@ -37,14 +38,16 @@ function HostsList($scope, HostsList, $rootScope, GetBasePath,
|
||||
if(trans.params('to') && trans.params('to').host_search) {
|
||||
let hasMoreThanDefaultKeys = false;
|
||||
angular.forEach(trans.params('to').host_search, function(value, key) {
|
||||
if(key !== 'order_by' && key !== 'page_size') {
|
||||
if(key !== 'order_by' && key !== 'page_size' && key !== 'page') {
|
||||
hasMoreThanDefaultKeys = true;
|
||||
}
|
||||
});
|
||||
$scope.enableSmartInventoryButton = hasMoreThanDefaultKeys ? true : false;
|
||||
$scope.smartInventoryButtonTooltip = hasMoreThanDefaultKeys ? InventoryHostsStrings.get('smartinventorybutton.ENABLED_INSTRUCTIONS') : InventoryHostsStrings.get('smartinventorybutton.DISABLED_INSTRUCTIONS');
|
||||
}
|
||||
else {
|
||||
$scope.enableSmartInventoryButton = false;
|
||||
$scope.smartInventoryButtonTooltip = InventoryHostsStrings.get('smartinventorybutton.DISABLED_INSTRUCTIONS');
|
||||
}
|
||||
});
|
||||
|
||||
@ -83,20 +86,7 @@ function HostsList($scope, HostsList, $rootScope, GetBasePath,
|
||||
};
|
||||
|
||||
$scope.smartInventory = function() {
|
||||
// Gather up search terms and pass them to the add smart inventory form
|
||||
let stateParamsCopy = angular.copy($state.params.host_search);
|
||||
let defaults = _.find($state.$current.path, (step) => {
|
||||
if(step && step.params && step.params.hasOwnProperty(`host_search`)){
|
||||
return step.params.hasOwnProperty(`host_search`);
|
||||
}
|
||||
}).params[`host_search`].config.value;
|
||||
|
||||
// Strip defaults out of the state params copy
|
||||
angular.forEach(Object.keys(defaults), function(value) {
|
||||
delete stateParamsCopy[value];
|
||||
});
|
||||
|
||||
$state.go('inventories.addSmartInventory', {hostfilter: JSON.stringify(stateParamsCopy)});
|
||||
$state.go('inventories.addSmartInventory', {hostfilter: JSON.stringify({"host_filter":`${$state.params.host_search.host_filter}`})});
|
||||
};
|
||||
|
||||
$scope.editInventory = function(host) {
|
||||
@ -114,5 +104,5 @@ function HostsList($scope, HostsList, $rootScope, GetBasePath,
|
||||
|
||||
export default ['$scope', 'HostsList', '$rootScope', 'GetBasePath',
|
||||
'rbacUiControlService', 'Dataset', '$state', '$filter', 'Prompt', 'Wait',
|
||||
'HostsService', 'SetStatus', 'canAdd', '$transitions', HostsList
|
||||
'HostsService', 'SetStatus', 'canAdd', '$transitions', 'InventoryHostsStrings', HostsList
|
||||
];
|
||||
|
||||
@ -141,9 +141,11 @@
|
||||
} else {
|
||||
$state.go($state.current, reloadListStateParams, {reload: true});
|
||||
}
|
||||
$('#group-delete-modal').modal('hide');
|
||||
$('body').removeClass('modal-open');
|
||||
$('.modal-backdrop').remove();
|
||||
setTimeout(function(){
|
||||
$('#group-delete-modal').modal('hide');
|
||||
$('body').removeClass('modal-open');
|
||||
$('.modal-backdrop').remove();
|
||||
}, 1000);
|
||||
});
|
||||
break;
|
||||
default:
|
||||
@ -153,9 +155,11 @@
|
||||
} else {
|
||||
$state.go($state.current, reloadListStateParams, {reload: true});
|
||||
}
|
||||
$('#group-delete-modal').modal('hide');
|
||||
$('body').removeClass('modal-open');
|
||||
$('.modal-backdrop').remove();
|
||||
setTimeout(function(){
|
||||
$('#group-delete-modal').modal('hide');
|
||||
$('body').removeClass('modal-open');
|
||||
$('.modal-backdrop').remove();
|
||||
}, 1000);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
<div class="Modal-title ng-binding">
|
||||
<span translate>Delete Group</span>
|
||||
<a href="" id="awp-promote" href=""
|
||||
aw-pop-over="<dl><dt>Delete</dt><dd>Deletes groups and hosts associated with the group being deleted. If a group or host is associated with other groups, it will still exist within those groups. Otherwise, the associated groups and hosts will no longer appear in the inventory.</dd>\n<dt style='margin-top: 5px;'>Promote</dt><dd>Groups and hosts associated with the group being removed will be promoted root level. Note: groups already associated with other groups cannot be promoted.</dd></dl>\n" aw-tool-tip="Click for help"
|
||||
aw-pop-over="<dl><dt>Delete</dt><dd>Deletes groups and hosts associated with the group being deleted. If a group or host is associated with other groups, it will still exist within those groups. Otherwise, the associated groups and hosts will no longer appear in the inventory.</dd><dt style='margin-top: 5px;'>Promote</dt><dd>Groups and hosts associated with the group being removed will be promoted root level. Note: groups already associated with other groups cannot be promoted.</dd></dl>" aw-tool-tip="Click for help"
|
||||
data-placement="right"
|
||||
data-container="body"
|
||||
data-title="Delete Group"
|
||||
|
||||
@ -3,13 +3,13 @@
|
||||
<div class="modal-content Modal-content">
|
||||
<div class="Modal-header">
|
||||
<div class="Modal-title ng-binding">
|
||||
<translate>Disassociate Group</translate>
|
||||
<translate>Disassociate Group From Group</translate>
|
||||
<a href="" id="awp-promote" href=""
|
||||
aw-pop-over="<dl><dt>Disassociate</dt><dd>Disassociates this group from the currently targeted parent group.</dd></dl>"
|
||||
aw-tool-tip="Click for help"
|
||||
data-placement="right"
|
||||
data-container="body"
|
||||
data-title="Disassociate Group"
|
||||
data-title="Disassociate Group From Group"
|
||||
class="help-link">
|
||||
<i class="fa fa-question-circle"></i>
|
||||
</a>
|
||||
|
||||
@ -3,13 +3,13 @@
|
||||
<div class="modal-content Modal-content">
|
||||
<div class="Modal-header">
|
||||
<div class="Modal-title ng-binding">
|
||||
<translate>Disassociate Host</translate>
|
||||
<translate>Disassociate Host From Group</translate>
|
||||
<a href="" id="awp-promote" href=""
|
||||
aw-pop-over="<dl><dt>Disassociate</dt><dd>Disassociates this host from the currently targeted parent group.</dd></dl>"
|
||||
aw-tool-tip="Click for help"
|
||||
data-placement="right"
|
||||
data-container="body"
|
||||
data-title="Disassociate Host"
|
||||
data-title="Disassociate Host From Group"
|
||||
class="help-link">
|
||||
<i class="fa fa-question-circle"></i>
|
||||
</a>
|
||||
|
||||
@ -6,11 +6,11 @@
|
||||
|
||||
export default ['$scope', 'NestedHostsListDefinition', '$rootScope', 'GetBasePath',
|
||||
'rbacUiControlService', 'Dataset', '$state', '$filter', 'Prompt', 'Wait',
|
||||
'HostsService', 'SetStatus', 'canAdd', 'GroupsService', 'ProcessErrors', 'groupData', 'inventoryData',
|
||||
'HostsService', 'SetStatus', 'canAdd', 'GroupsService', 'ProcessErrors', 'groupData', 'inventoryData', 'InventoryHostsStrings',
|
||||
'$transitions',
|
||||
function($scope, NestedHostsListDefinition, $rootScope, GetBasePath,
|
||||
rbacUiControlService, Dataset, $state, $filter, Prompt, Wait,
|
||||
HostsService, SetStatus, canAdd, GroupsService, ProcessErrors, groupData, inventoryData,
|
||||
HostsService, SetStatus, canAdd, GroupsService, ProcessErrors, groupData, inventoryData, InventoryHostsStrings,
|
||||
$transitions) {
|
||||
|
||||
let list = NestedHostsListDefinition;
|
||||
@ -21,6 +21,7 @@ export default ['$scope', 'NestedHostsListDefinition', '$rootScope', 'GetBasePat
|
||||
$scope.canAdd = canAdd;
|
||||
$scope.enableSmartInventoryButton = false;
|
||||
$scope.disassociateFrom = groupData;
|
||||
$scope.smartInventoryButtonTooltip = InventoryHostsStrings.get('smartinventorybutton.DISABLED_INSTRUCTIONS');
|
||||
|
||||
// Search init
|
||||
$scope.list = list;
|
||||
@ -52,14 +53,16 @@ export default ['$scope', 'NestedHostsListDefinition', '$rootScope', 'GetBasePat
|
||||
if(trans.params('to') && trans.params('to').host_search) {
|
||||
let hasMoreThanDefaultKeys = false;
|
||||
angular.forEach(trans.params('to').host_search, function(value, key) {
|
||||
if(key !== 'order_by' && key !== 'page_size') {
|
||||
if(key !== 'order_by' && key !== 'page_size' && key !== 'page') {
|
||||
hasMoreThanDefaultKeys = true;
|
||||
}
|
||||
});
|
||||
$scope.enableSmartInventoryButton = hasMoreThanDefaultKeys ? true : false;
|
||||
$scope.smartInventoryButtonTooltip = hasMoreThanDefaultKeys ? InventoryHostsStrings.get('smartinventorybutton.ENABLED_INSTRUCTIONS') : InventoryHostsStrings.get('smartinventorybutton.DISABLED_INSTRUCTIONS');
|
||||
}
|
||||
else {
|
||||
$scope.enableSmartInventoryButton = false;
|
||||
$scope.smartInventoryButtonTooltip = InventoryHostsStrings.get('smartinventorybutton.DISABLED_INSTRUCTIONS');
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@ -7,10 +7,10 @@
|
||||
// import HostsService from './../hosts/host.service';
|
||||
export default ['$scope', 'ListDefinition', '$rootScope', 'GetBasePath',
|
||||
'rbacUiControlService', 'Dataset', '$state', '$filter', 'Prompt', 'Wait',
|
||||
'HostsService', 'SetStatus', 'canAdd', 'i18n', '$transitions',
|
||||
'HostsService', 'SetStatus', 'canAdd', 'i18n', 'InventoryHostsStrings', '$transitions',
|
||||
function($scope, ListDefinition, $rootScope, GetBasePath,
|
||||
rbacUiControlService, Dataset, $state, $filter, Prompt, Wait,
|
||||
HostsService, SetStatus, canAdd, i18n, $transitions) {
|
||||
HostsService, SetStatus, canAdd, i18n, InventoryHostsStrings, $transitions) {
|
||||
|
||||
let list = ListDefinition;
|
||||
|
||||
@ -19,6 +19,7 @@ export default ['$scope', 'ListDefinition', '$rootScope', 'GetBasePath',
|
||||
function init(){
|
||||
$scope.canAdd = canAdd;
|
||||
$scope.enableSmartInventoryButton = false;
|
||||
$scope.smartInventoryButtonTooltip = InventoryHostsStrings.get('smartinventorybutton.DISABLED_INSTRUCTIONS');
|
||||
|
||||
// Search init
|
||||
$scope.list = list;
|
||||
@ -45,14 +46,16 @@ export default ['$scope', 'ListDefinition', '$rootScope', 'GetBasePath',
|
||||
if(trans.params('to') && trans.params('to').host_search) {
|
||||
let hasMoreThanDefaultKeys = false;
|
||||
angular.forEach(trans.params('to').host_search, function(value, key) {
|
||||
if(key !== 'order_by' && key !== 'page_size') {
|
||||
if(key !== 'order_by' && key !== 'page_size' && key !== 'page') {
|
||||
hasMoreThanDefaultKeys = true;
|
||||
}
|
||||
});
|
||||
$scope.enableSmartInventoryButton = hasMoreThanDefaultKeys ? true : false;
|
||||
$scope.smartInventoryButtonTooltip = hasMoreThanDefaultKeys ? InventoryHostsStrings.get('smartinventorybutton.ENABLED_INSTRUCTIONS') : InventoryHostsStrings.get('smartinventorybutton.DISABLED_INSTRUCTIONS');
|
||||
}
|
||||
else {
|
||||
$scope.enableSmartInventoryButton = false;
|
||||
$scope.smartInventoryButtonTooltip = InventoryHostsStrings.get('smartinventorybutton.DISABLED_INSTRUCTIONS');
|
||||
}
|
||||
});
|
||||
|
||||
@ -89,7 +92,12 @@ export default ['$scope', 'ListDefinition', '$rootScope', 'GetBasePath',
|
||||
$state.go('inventories.edit.hosts.add');
|
||||
};
|
||||
$scope.editHost = function(host){
|
||||
$state.go('.edit', {inventory_id: host.inventory_id, host_id: host.id});
|
||||
if($state.includes('inventories.edit.hosts')) {
|
||||
$state.go('inventories.edit.hosts.edit', {host_id: host.id});
|
||||
}
|
||||
else if($state.includes('inventories.editSmartInventory.hosts')) {
|
||||
$state.go('inventories.editSmartInventory.hosts.edit', {host_id: host.id});
|
||||
}
|
||||
};
|
||||
$scope.goToInsights = function(host){
|
||||
$state.go('inventories.edit.hosts.edit.insights', {inventory_id: host.inventory_id, host_id:host.id});
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user