mirror of
https://github.com/ansible/awx.git
synced 2026-01-16 12:20:45 -03:30
Merge branch 'release_3.3.0' into awx/devel
# Conflicts: # awx/ui/client/src/standard-out/standard-out-factories/main.js # awx/ui/package.json
This commit is contained in:
commit
4fdf462b98
13
Makefile
13
Makefile
@ -72,7 +72,7 @@ UI_RELEASE_FLAG_FILE = awx/ui/.release_built
|
||||
|
||||
I18N_FLAG_FILE = .i18n_built
|
||||
|
||||
.PHONY: clean clean-tmp clean-venv requirements requirements_dev \
|
||||
.PHONY: awx-link clean clean-tmp clean-venv requirements requirements_dev \
|
||||
develop refresh adduser migrate dbchange dbshell runserver celeryd \
|
||||
receiver test test_unit test_ansible test_coverage coverage_html \
|
||||
dev_build release_build release_clean sdist \
|
||||
@ -234,7 +234,7 @@ migrate:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
$(MANAGEMENT_COMMAND) migrate --noinput --fake-initial
|
||||
$(MANAGEMENT_COMMAND) migrate --noinput
|
||||
|
||||
# Run after making changes to the models to create a new migration.
|
||||
dbchange:
|
||||
@ -323,7 +323,7 @@ celeryd:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) -n celery@$(COMPOSE_HOST) --pidfile /tmp/celery_pid
|
||||
celery worker -A awx -l DEBUG -B -Ofair --autoscale=100,4 --schedule=$(CELERY_SCHEDULE_FILE) --pidfile /tmp/celery_pid
|
||||
|
||||
# Run to start the zeromq callback receiver
|
||||
receiver:
|
||||
@ -367,6 +367,11 @@ swagger: reports
|
||||
|
||||
check: flake8 pep8 # pyflakes pylint
|
||||
|
||||
awx-link:
|
||||
cp -R /tmp/awx.egg-info /awx_devel/ || true
|
||||
sed -i "s/placeholder/$(shell git describe --long | sed 's/\./\\./g')/" /awx_devel/awx.egg-info/PKG-INFO
|
||||
cp /tmp/awx.egg-link /venv/awx/lib/python2.7/site-packages/awx.egg-link
|
||||
|
||||
TEST_DIRS ?= awx/main/tests/unit awx/main/tests/functional awx/conf/tests awx/sso/tests
|
||||
# Run all API unit tests.
|
||||
test:
|
||||
@ -375,6 +380,8 @@ test:
|
||||
fi; \
|
||||
py.test $(TEST_DIRS)
|
||||
|
||||
test_combined: test_ansible test
|
||||
|
||||
test_unit:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
|
||||
@ -10,6 +10,7 @@ register(
|
||||
'SESSION_COOKIE_AGE',
|
||||
field_class=fields.IntegerField,
|
||||
min_value=60,
|
||||
max_value=30000000000, # approx 1,000 years, higher values give OverflowError
|
||||
label=_('Idle Time Force Log Out'),
|
||||
help_text=_('Number of seconds that a user is inactive before they will need to login again.'),
|
||||
category=_('Authentication'),
|
||||
|
||||
@ -3,12 +3,14 @@
|
||||
|
||||
# Django
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework import serializers
|
||||
|
||||
# AWX
|
||||
from awx.conf import fields
|
||||
from awx.main.models import Credential
|
||||
|
||||
__all__ = ['BooleanNullField', 'CharNullField', 'ChoiceNullField', 'VerbatimField']
|
||||
|
||||
@ -87,3 +89,20 @@ class OAuth2ProviderField(fields.DictField):
|
||||
if invalid_flags:
|
||||
self.fail('invalid_key_names', invalid_key_names=', '.join(list(invalid_flags)))
|
||||
return data
|
||||
|
||||
|
||||
class DeprecatedCredentialField(serializers.IntegerField):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs['allow_null'] = True
|
||||
kwargs['default'] = None
|
||||
kwargs['min_value'] = 1
|
||||
kwargs['help_text'] = 'This resource has been deprecated and will be removed in a future release'
|
||||
super(DeprecatedCredentialField, self).__init__(**kwargs)
|
||||
|
||||
def to_internal_value(self, pk):
|
||||
try:
|
||||
Credential.objects.get(pk=pk)
|
||||
except ObjectDoesNotExist:
|
||||
raise serializers.ValidationError(_('Credential {} does not exist').format(pk))
|
||||
return pk
|
||||
|
||||
@ -23,7 +23,7 @@ from django.contrib.auth import views as auth_views
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.authentication import get_authorization_header
|
||||
from rest_framework.exceptions import PermissionDenied, AuthenticationFailed
|
||||
from rest_framework.exceptions import PermissionDenied, AuthenticationFailed, ParseError
|
||||
from rest_framework import generics
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
@ -165,6 +165,9 @@ class APIView(views.APIView):
|
||||
request.drf_request_user = getattr(drf_request, 'user', False)
|
||||
except AuthenticationFailed:
|
||||
request.drf_request_user = None
|
||||
except ParseError as exc:
|
||||
request.drf_request_user = None
|
||||
self.__init_request_error__ = exc
|
||||
return drf_request
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
@ -174,6 +177,8 @@ class APIView(views.APIView):
|
||||
if response.status_code >= 400:
|
||||
status_msg = "status %s received by user %s attempting to access %s from %s" % \
|
||||
(response.status_code, request.user, request.path, request.META.get('REMOTE_ADDR', None))
|
||||
if hasattr(self, '__init_request_error__'):
|
||||
response = self.handle_exception(self.__init_request_error__)
|
||||
if response.status_code == 401:
|
||||
logger.info(status_msg)
|
||||
else:
|
||||
|
||||
@ -44,9 +44,9 @@ class Metadata(metadata.SimpleMetadata):
|
||||
if placeholder is not serializers.empty:
|
||||
field_info['placeholder'] = placeholder
|
||||
|
||||
# Update help text for common fields.
|
||||
serializer = getattr(field, 'parent', None)
|
||||
if serializer:
|
||||
if serializer and hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'):
|
||||
# Update help text for common fields.
|
||||
field_help_text = {
|
||||
'id': _('Database ID for this {}.'),
|
||||
'name': _('Name of this {}.'),
|
||||
@ -59,10 +59,18 @@ class Metadata(metadata.SimpleMetadata):
|
||||
'modified': _('Timestamp when this {} was last modified.'),
|
||||
}
|
||||
if field.field_name in field_help_text:
|
||||
if hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'):
|
||||
opts = serializer.Meta.model._meta.concrete_model._meta
|
||||
verbose_name = smart_text(opts.verbose_name)
|
||||
field_info['help_text'] = field_help_text[field.field_name].format(verbose_name)
|
||||
opts = serializer.Meta.model._meta.concrete_model._meta
|
||||
verbose_name = smart_text(opts.verbose_name)
|
||||
field_info['help_text'] = field_help_text[field.field_name].format(verbose_name)
|
||||
# If field is not part of the model, then show it as non-filterable
|
||||
else:
|
||||
is_model_field = False
|
||||
for model_field in serializer.Meta.model._meta.fields:
|
||||
if field.field_name == model_field.name:
|
||||
is_model_field = True
|
||||
break
|
||||
if not is_model_field:
|
||||
field_info['filterable'] = False
|
||||
|
||||
# Indicate if a field has a default value.
|
||||
# FIXME: Still isn't showing all default values?
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
# Python
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
import yaml
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@ -13,36 +12,6 @@ from rest_framework import parsers
|
||||
from rest_framework.exceptions import ParseError
|
||||
|
||||
|
||||
class OrderedDictLoader(yaml.SafeLoader):
|
||||
"""
|
||||
This yaml loader is used to deal with current pyYAML (3.12) not supporting
|
||||
custom object pairs hook. Remove it when new version adds that support.
|
||||
"""
|
||||
|
||||
def construct_mapping(self, node, deep=False):
|
||||
if isinstance(node, yaml.nodes.MappingNode):
|
||||
self.flatten_mapping(node)
|
||||
else:
|
||||
raise yaml.constructor.ConstructorError(
|
||||
None, None,
|
||||
"expected a mapping node, but found %s" % node.id,
|
||||
node.start_mark
|
||||
)
|
||||
mapping = OrderedDict()
|
||||
for key_node, value_node in node.value:
|
||||
key = self.construct_object(key_node, deep=deep)
|
||||
try:
|
||||
hash(key)
|
||||
except TypeError as exc:
|
||||
raise yaml.constructor.ConstructorError(
|
||||
"while constructing a mapping", node.start_mark,
|
||||
"found unacceptable key (%s)" % exc, key_node.start_mark
|
||||
)
|
||||
value = self.construct_object(value_node, deep=deep)
|
||||
mapping[key] = value
|
||||
return mapping
|
||||
|
||||
|
||||
class JSONParser(parsers.JSONParser):
|
||||
"""
|
||||
Parses JSON-serialized data, preserving order of dictionary keys.
|
||||
|
||||
@ -233,8 +233,5 @@ class InstanceGroupTowerPermission(ModelAccessPermission):
|
||||
def has_object_permission(self, request, view, obj):
|
||||
if request.method == 'DELETE' and obj.name == "tower":
|
||||
return False
|
||||
if request.method in ['PATCH', 'PUT'] and obj.name == 'tower' and \
|
||||
request and request.data and request.data.get('name', '') != 'tower':
|
||||
return False
|
||||
return super(InstanceGroupTowerPermission, self).has_object_permission(request, view, obj)
|
||||
|
||||
|
||||
@ -5,6 +5,7 @@
|
||||
import copy
|
||||
import json
|
||||
import logging
|
||||
import operator
|
||||
import re
|
||||
import six
|
||||
import urllib
|
||||
@ -38,9 +39,14 @@ from rest_framework.utils.serializer_helpers import ReturnList
|
||||
from polymorphic.models import PolymorphicModel
|
||||
|
||||
# AWX
|
||||
from awx.main.constants import SCHEDULEABLE_PROVIDERS, ANSI_SGR_PATTERN
|
||||
from awx.main.constants import (
|
||||
SCHEDULEABLE_PROVIDERS,
|
||||
ANSI_SGR_PATTERN,
|
||||
ACTIVE_STATES,
|
||||
TOKEN_CENSOR,
|
||||
CHOICES_PRIVILEGE_ESCALATION_METHODS,
|
||||
)
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.models.base import NEW_JOB_TYPE_CHOICES
|
||||
from awx.main.access import get_user_capabilities
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
@ -56,12 +62,11 @@ from awx.main.validators import vars_validate_or_raise
|
||||
|
||||
from awx.conf.license import feature_enabled
|
||||
from awx.api.versioning import reverse, get_request_version
|
||||
from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, VerbatimField
|
||||
from awx.api.fields import (BooleanNullField, CharNullField, ChoiceNullField,
|
||||
VerbatimField, DeprecatedCredentialField)
|
||||
|
||||
logger = logging.getLogger('awx.api.serializers')
|
||||
|
||||
DEPRECATED = 'This resource has been deprecated and will be removed in a future release'
|
||||
|
||||
# Fields that should be summarized regardless of object type.
|
||||
DEFAULT_SUMMARY_FIELDS = ('id', 'name', 'description')# , 'created_by', 'modified_by')#, 'type')
|
||||
|
||||
@ -942,7 +947,6 @@ class UserSerializer(BaseSerializer):
|
||||
roles = self.reverse('api:user_roles_list', kwargs={'pk': obj.pk}),
|
||||
activity_stream = self.reverse('api:user_activity_stream_list', kwargs={'pk': obj.pk}),
|
||||
access_list = self.reverse('api:user_access_list', kwargs={'pk': obj.pk}),
|
||||
applications = self.reverse('api:o_auth2_application_list', kwargs={'pk': obj.pk}),
|
||||
tokens = self.reverse('api:o_auth2_token_list', kwargs={'pk': obj.pk}),
|
||||
authorized_tokens = self.reverse('api:user_authorized_token_list', kwargs={'pk': obj.pk}),
|
||||
personal_tokens = self.reverse('api:o_auth2_personal_token_list', kwargs={'pk': obj.pk}),
|
||||
@ -991,7 +995,7 @@ class UserAuthorizedTokenSerializer(BaseSerializer):
|
||||
model = OAuth2AccessToken
|
||||
fields = (
|
||||
'*', '-name', 'description', 'user', 'token', 'refresh_token',
|
||||
'expires', 'scope', 'application',
|
||||
'expires', 'scope', 'application'
|
||||
)
|
||||
read_only_fields = ('user', 'token', 'expires')
|
||||
|
||||
@ -1001,7 +1005,7 @@ class UserAuthorizedTokenSerializer(BaseSerializer):
|
||||
if request.method == 'POST':
|
||||
return obj.token
|
||||
else:
|
||||
return '*************'
|
||||
return TOKEN_CENSOR
|
||||
except ObjectDoesNotExist:
|
||||
return ''
|
||||
|
||||
@ -1011,12 +1015,13 @@ class UserAuthorizedTokenSerializer(BaseSerializer):
|
||||
if request.method == 'POST':
|
||||
return getattr(obj.refresh_token, 'token', '')
|
||||
else:
|
||||
return '**************'
|
||||
return TOKEN_CENSOR
|
||||
except ObjectDoesNotExist:
|
||||
return ''
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data['user'] = self.context['request'].user
|
||||
current_user = self.context['request'].user
|
||||
validated_data['user'] = current_user
|
||||
validated_data['token'] = generate_token()
|
||||
validated_data['expires'] = now() + timedelta(
|
||||
seconds=oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS
|
||||
@ -1025,7 +1030,7 @@ class UserAuthorizedTokenSerializer(BaseSerializer):
|
||||
obj.save()
|
||||
if obj.application is not None:
|
||||
RefreshToken.objects.create(
|
||||
user=self.context['request'].user,
|
||||
user=current_user,
|
||||
token=generate_token(),
|
||||
application=obj.application,
|
||||
access_token=obj
|
||||
@ -1040,13 +1045,14 @@ class OAuth2ApplicationSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = OAuth2Application
|
||||
fields = (
|
||||
'*', 'description', 'user', 'client_id', 'client_secret', 'client_type',
|
||||
'redirect_uris', 'authorization_grant_type', 'skip_authorization',
|
||||
'*', 'description', '-user', 'client_id', 'client_secret', 'client_type',
|
||||
'redirect_uris', 'authorization_grant_type', 'skip_authorization', 'organization'
|
||||
)
|
||||
read_only_fields = ('client_id', 'client_secret')
|
||||
read_only_on_update_fields = ('user', 'authorization_grant_type')
|
||||
extra_kwargs = {
|
||||
'user': {'allow_null': False, 'required': True},
|
||||
'user': {'allow_null': True, 'required': False},
|
||||
'organization': {'allow_null': False},
|
||||
'authorization_grant_type': {'allow_null': False}
|
||||
}
|
||||
|
||||
@ -1075,7 +1081,7 @@ class OAuth2ApplicationSerializer(BaseSerializer):
|
||||
return ret
|
||||
|
||||
def _summary_field_tokens(self, obj):
|
||||
token_list = [{'id': x.pk, 'token': '**************', 'scope': x.scope} for x in obj.oauth2accesstoken_set.all()[:10]]
|
||||
token_list = [{'id': x.pk, 'token': TOKEN_CENSOR, 'scope': x.scope} for x in obj.oauth2accesstoken_set.all()[:10]]
|
||||
if has_model_field_prefetched(obj, 'oauth2accesstoken_set'):
|
||||
token_count = len(obj.oauth2accesstoken_set.all())
|
||||
else:
|
||||
@ -1095,6 +1101,7 @@ class OAuth2TokenSerializer(BaseSerializer):
|
||||
|
||||
refresh_token = serializers.SerializerMethodField()
|
||||
token = serializers.SerializerMethodField()
|
||||
ALLOWED_SCOPES = ['read', 'write']
|
||||
|
||||
class Meta:
|
||||
model = OAuth2AccessToken
|
||||
@ -1103,6 +1110,10 @@ class OAuth2TokenSerializer(BaseSerializer):
|
||||
'application', 'expires', 'scope',
|
||||
)
|
||||
read_only_fields = ('user', 'token', 'expires')
|
||||
extra_kwargs = {
|
||||
'scope': {'allow_null': False, 'required': True},
|
||||
'user': {'allow_null': False, 'required': True}
|
||||
}
|
||||
|
||||
def get_modified(self, obj):
|
||||
if obj is None:
|
||||
@ -1128,7 +1139,7 @@ class OAuth2TokenSerializer(BaseSerializer):
|
||||
if request.method == 'POST':
|
||||
return obj.token
|
||||
else:
|
||||
return '*************'
|
||||
return TOKEN_CENSOR
|
||||
except ObjectDoesNotExist:
|
||||
return ''
|
||||
|
||||
@ -1138,12 +1149,31 @@ class OAuth2TokenSerializer(BaseSerializer):
|
||||
if request.method == 'POST':
|
||||
return getattr(obj.refresh_token, 'token', '')
|
||||
else:
|
||||
return '**************'
|
||||
return TOKEN_CENSOR
|
||||
except ObjectDoesNotExist:
|
||||
return ''
|
||||
|
||||
def _is_valid_scope(self, value):
|
||||
if not value or (not isinstance(value, six.string_types)):
|
||||
return False
|
||||
words = value.split()
|
||||
for word in words:
|
||||
if words.count(word) > 1:
|
||||
return False # do not allow duplicates
|
||||
if word not in self.ALLOWED_SCOPES:
|
||||
return False
|
||||
return True
|
||||
|
||||
def validate_scope(self, value):
|
||||
if not self._is_valid_scope(value):
|
||||
raise serializers.ValidationError(_(
|
||||
'Must be a simple space-separated string with allowed scopes {}.'
|
||||
).format(self.ALLOWED_SCOPES))
|
||||
return value
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data['user'] = self.context['request'].user
|
||||
current_user = self.context['request'].user
|
||||
validated_data['user'] = current_user
|
||||
validated_data['token'] = generate_token()
|
||||
validated_data['expires'] = now() + timedelta(
|
||||
seconds=oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS
|
||||
@ -1154,7 +1184,7 @@ class OAuth2TokenSerializer(BaseSerializer):
|
||||
obj.save()
|
||||
if obj.application is not None:
|
||||
RefreshToken.objects.create(
|
||||
user=obj.application.user if obj.application.user else None,
|
||||
user=current_user,
|
||||
token=generate_token(),
|
||||
application=obj.application,
|
||||
access_token=obj
|
||||
@ -1176,10 +1206,13 @@ class OAuth2AuthorizedTokenSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = OAuth2AccessToken
|
||||
fields = (
|
||||
'*', '-name', 'description', 'user', 'token', 'refresh_token',
|
||||
'*', '-name', 'description', '-user', 'token', 'refresh_token',
|
||||
'expires', 'scope', 'application',
|
||||
)
|
||||
read_only_fields = ('user', 'token', 'expires')
|
||||
extra_kwargs = {
|
||||
'scope': {'allow_null': False, 'required': True}
|
||||
}
|
||||
|
||||
def get_token(self, obj):
|
||||
request = self.context.get('request', None)
|
||||
@ -1187,7 +1220,7 @@ class OAuth2AuthorizedTokenSerializer(BaseSerializer):
|
||||
if request.method == 'POST':
|
||||
return obj.token
|
||||
else:
|
||||
return '*************'
|
||||
return TOKEN_CENSOR
|
||||
except ObjectDoesNotExist:
|
||||
return ''
|
||||
|
||||
@ -1197,12 +1230,13 @@ class OAuth2AuthorizedTokenSerializer(BaseSerializer):
|
||||
if request.method == 'POST':
|
||||
return getattr(obj.refresh_token, 'token', '')
|
||||
else:
|
||||
return '**************'
|
||||
return TOKEN_CENSOR
|
||||
except ObjectDoesNotExist:
|
||||
return ''
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data['user'] = self.context['request'].user
|
||||
current_user = self.context['request'].user
|
||||
validated_data['user'] = current_user
|
||||
validated_data['token'] = generate_token()
|
||||
validated_data['expires'] = now() + timedelta(
|
||||
seconds=oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS
|
||||
@ -1213,7 +1247,7 @@ class OAuth2AuthorizedTokenSerializer(BaseSerializer):
|
||||
obj.save()
|
||||
if obj.application is not None:
|
||||
RefreshToken.objects.create(
|
||||
user=obj.application.user if obj.application.user else None,
|
||||
user=current_user,
|
||||
token=generate_token(),
|
||||
application=obj.application,
|
||||
access_token=obj
|
||||
@ -1233,6 +1267,9 @@ class OAuth2PersonalTokenSerializer(BaseSerializer):
|
||||
'application', 'expires', 'scope',
|
||||
)
|
||||
read_only_fields = ('user', 'token', 'expires', 'application')
|
||||
extra_kwargs = {
|
||||
'scope': {'allow_null': False, 'required': True}
|
||||
}
|
||||
|
||||
def get_modified(self, obj):
|
||||
if obj is None:
|
||||
@ -1258,7 +1295,7 @@ class OAuth2PersonalTokenSerializer(BaseSerializer):
|
||||
if request.method == 'POST':
|
||||
return obj.token
|
||||
else:
|
||||
return '*************'
|
||||
return TOKEN_CENSOR
|
||||
except ObjectDoesNotExist:
|
||||
return ''
|
||||
|
||||
@ -1271,6 +1308,7 @@ class OAuth2PersonalTokenSerializer(BaseSerializer):
|
||||
validated_data['expires'] = now() + timedelta(
|
||||
seconds=oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS
|
||||
)
|
||||
validated_data['application'] = None
|
||||
obj = super(OAuth2PersonalTokenSerializer, self).create(validated_data)
|
||||
obj.save()
|
||||
return obj
|
||||
@ -1293,6 +1331,7 @@ class OrganizationSerializer(BaseSerializer):
|
||||
admins = self.reverse('api:organization_admins_list', kwargs={'pk': obj.pk}),
|
||||
teams = self.reverse('api:organization_teams_list', kwargs={'pk': obj.pk}),
|
||||
credentials = self.reverse('api:organization_credential_list', kwargs={'pk': obj.pk}),
|
||||
applications = self.reverse('api:organization_applications_list', kwargs={'pk': obj.pk}),
|
||||
activity_stream = self.reverse('api:organization_activity_stream_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates = self.reverse('api:organization_notification_templates_list', kwargs={'pk': obj.pk}),
|
||||
notification_templates_any = self.reverse('api:organization_notification_templates_any_list', kwargs={'pk': obj.pk}),
|
||||
@ -1345,7 +1384,7 @@ class ProjectOptionsSerializer(BaseSerializer):
|
||||
if scm_type:
|
||||
attrs.pop('local_path', None)
|
||||
if 'local_path' in attrs and attrs['local_path'] not in valid_local_paths:
|
||||
errors['local_path'] = 'Invalid path choice.'
|
||||
errors['local_path'] = _('This path is already being used by another manual project.')
|
||||
|
||||
if errors:
|
||||
raise serializers.ValidationError(errors)
|
||||
@ -1923,9 +1962,7 @@ class CustomInventoryScriptSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class InventorySourceOptionsSerializer(BaseSerializer):
|
||||
credential = models.PositiveIntegerField(
|
||||
blank=True, null=True, default=None,
|
||||
help_text='This resource has been deprecated and will be removed in a future release')
|
||||
credential = DeprecatedCredentialField()
|
||||
|
||||
class Meta:
|
||||
fields = ('*', 'source', 'source_path', 'source_script', 'source_vars', 'credential',
|
||||
@ -2261,6 +2298,7 @@ class RoleSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Role
|
||||
fields = ('*', '-created', '-modified')
|
||||
read_only_fields = ('id', 'role_field', 'description', 'name')
|
||||
|
||||
def to_representation(self, obj):
|
||||
@ -2276,8 +2314,6 @@ class RoleSerializer(BaseSerializer):
|
||||
ret['summary_fields']['resource_type'] = get_type_for_model(content_model)
|
||||
ret['summary_fields']['resource_type_display_name'] = content_model._meta.verbose_name.title()
|
||||
|
||||
ret.pop('created')
|
||||
ret.pop('modified')
|
||||
return ret
|
||||
|
||||
def get_related(self, obj):
|
||||
@ -2465,6 +2501,9 @@ class CredentialTypeSerializer(BaseSerializer):
|
||||
field['label'] = _(field['label'])
|
||||
if 'help_text' in field:
|
||||
field['help_text'] = _(field['help_text'])
|
||||
if field['type'] == 'become_method':
|
||||
field.pop('type')
|
||||
field['choices'] = map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS)
|
||||
return value
|
||||
|
||||
def filter_field_metadata(self, fields, method):
|
||||
@ -2634,7 +2673,9 @@ class CredentialSerializer(BaseSerializer):
|
||||
for field in set(data.keys()) - valid_fields - set(credential_type.defined_fields):
|
||||
if data.get(field):
|
||||
raise serializers.ValidationError(
|
||||
{"detail": _("'%s' is not a valid field for %s") % (field, credential_type.name)}
|
||||
{"detail": _("'{field_name}' is not a valid field for {credential_type_name}").format(
|
||||
field_name=field, credential_type_name=credential_type.name
|
||||
)}
|
||||
)
|
||||
value.pop('kind', None)
|
||||
return value
|
||||
@ -2785,15 +2826,11 @@ class V1JobOptionsSerializer(BaseSerializer):
|
||||
model = Credential
|
||||
fields = ('*', 'cloud_credential', 'network_credential')
|
||||
|
||||
V1_FIELDS = {
|
||||
'cloud_credential': models.PositiveIntegerField(blank=True, null=True, default=None, help_text=DEPRECATED),
|
||||
'network_credential': models.PositiveIntegerField(blank=True, null=True, default=None, help_text=DEPRECATED),
|
||||
}
|
||||
V1_FIELDS = ('cloud_credential', 'network_credential',)
|
||||
|
||||
def build_field(self, field_name, info, model_class, nested_depth):
|
||||
if field_name in self.V1_FIELDS:
|
||||
return self.build_standard_field(field_name,
|
||||
self.V1_FIELDS[field_name])
|
||||
return (DeprecatedCredentialField, {})
|
||||
return super(V1JobOptionsSerializer, self).build_field(field_name, info, model_class, nested_depth)
|
||||
|
||||
|
||||
@ -2804,15 +2841,11 @@ class LegacyCredentialFields(BaseSerializer):
|
||||
model = Credential
|
||||
fields = ('*', 'credential', 'vault_credential')
|
||||
|
||||
LEGACY_FIELDS = {
|
||||
'credential': models.PositiveIntegerField(blank=True, null=True, default=None, help_text=DEPRECATED),
|
||||
'vault_credential': models.PositiveIntegerField(blank=True, null=True, default=None, help_text=DEPRECATED),
|
||||
}
|
||||
LEGACY_FIELDS = ('credential', 'vault_credential',)
|
||||
|
||||
def build_field(self, field_name, info, model_class, nested_depth):
|
||||
if field_name in self.LEGACY_FIELDS:
|
||||
return self.build_standard_field(field_name,
|
||||
self.LEGACY_FIELDS[field_name])
|
||||
return (DeprecatedCredentialField, {})
|
||||
return super(LegacyCredentialFields, self).build_field(field_name, info, model_class, nested_depth)
|
||||
|
||||
|
||||
@ -3045,6 +3078,11 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
inventory = get_field_from_model_or_attrs('inventory')
|
||||
project = get_field_from_model_or_attrs('project')
|
||||
|
||||
if get_field_from_model_or_attrs('host_config_key') and not inventory:
|
||||
raise serializers.ValidationError({'host_config_key': _(
|
||||
"Cannot enable provisioning callback without an inventory set."
|
||||
)})
|
||||
|
||||
prompting_error_message = _("Must either set a default value or ask to prompt on launch.")
|
||||
if project is None:
|
||||
raise serializers.ValidationError({'project': _("Job types 'run' and 'check' must have assigned a project.")})
|
||||
@ -3059,7 +3097,6 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
def get_summary_fields(self, obj):
|
||||
summary_fields = super(JobTemplateSerializer, self).get_summary_fields(obj)
|
||||
all_creds = []
|
||||
extra_creds = []
|
||||
if obj.pk:
|
||||
for cred in obj.credentials.all():
|
||||
summarized_cred = {
|
||||
@ -3070,20 +3107,31 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
|
||||
'credential_type_id': cred.credential_type_id
|
||||
}
|
||||
all_creds.append(summarized_cred)
|
||||
if self.is_detail_view:
|
||||
for summarized_cred in all_creds:
|
||||
if summarized_cred['kind'] in ('cloud', 'net'):
|
||||
extra_creds.append(summarized_cred)
|
||||
elif summarized_cred['kind'] == 'ssh':
|
||||
summary_fields['credential'] = summarized_cred
|
||||
elif summarized_cred['kind'] == 'vault':
|
||||
summary_fields['vault_credential'] = summarized_cred
|
||||
# Organize credential data into multitude of deprecated fields
|
||||
extra_creds = []
|
||||
vault_credential = None
|
||||
credential = None
|
||||
for summarized_cred in all_creds:
|
||||
if summarized_cred['kind'] in ('cloud', 'net'):
|
||||
extra_creds.append(summarized_cred)
|
||||
elif summarized_cred['kind'] == 'ssh':
|
||||
credential = summarized_cred
|
||||
elif summarized_cred['kind'] == 'vault':
|
||||
vault_credential = summarized_cred
|
||||
# Selectively apply those fields, depending on view deetails
|
||||
if (self.is_detail_view or self.version == 1) and credential:
|
||||
summary_fields['credential'] = credential
|
||||
else:
|
||||
# Credential could be an empty dictionary in this case
|
||||
summary_fields.pop('credential', None)
|
||||
if (self.is_detail_view or self.version == 1) and vault_credential:
|
||||
summary_fields['vault_credential'] = vault_credential
|
||||
else:
|
||||
# vault credential could be empty dictionary
|
||||
summary_fields.pop('vault_credential', None)
|
||||
if self.version > 1:
|
||||
if self.is_detail_view:
|
||||
summary_fields['extra_credentials'] = extra_creds
|
||||
else:
|
||||
# Credential would be an empty dictionary in this case
|
||||
summary_fields.pop('credential', None)
|
||||
summary_fields['credentials'] = all_creds
|
||||
return summary_fields
|
||||
|
||||
@ -3163,7 +3211,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
||||
data.setdefault('project', job_template.project.pk)
|
||||
data.setdefault('playbook', job_template.playbook)
|
||||
if job_template.credential:
|
||||
data.setdefault('credential', job_template.credential.pk)
|
||||
data.setdefault('credential', job_template.credential)
|
||||
data.setdefault('forks', job_template.forks)
|
||||
data.setdefault('limit', job_template.limit)
|
||||
data.setdefault('verbosity', job_template.verbosity)
|
||||
@ -3210,11 +3258,12 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
||||
return summary_fields
|
||||
|
||||
|
||||
class JobCancelSerializer(JobSerializer):
|
||||
class JobCancelSerializer(BaseSerializer):
|
||||
|
||||
can_cancel = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Job
|
||||
fields = ('can_cancel',)
|
||||
|
||||
|
||||
@ -3669,9 +3718,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class WorkflowJobTemplateNodeSerializer(LaunchConfigurationBaseSerializer):
|
||||
credential = models.PositiveIntegerField(
|
||||
blank=True, null=True, default=None,
|
||||
help_text='This resource has been deprecated and will be removed in a future release')
|
||||
credential = DeprecatedCredentialField()
|
||||
success_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
|
||||
failure_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
|
||||
always_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
|
||||
@ -3762,9 +3809,7 @@ class WorkflowJobTemplateNodeSerializer(LaunchConfigurationBaseSerializer):
|
||||
|
||||
|
||||
class WorkflowJobNodeSerializer(LaunchConfigurationBaseSerializer):
|
||||
credential = models.PositiveIntegerField(
|
||||
blank=True, null=True, default=None,
|
||||
help_text='This resource has been deprecated and will be removed in a future release')
|
||||
credential = DeprecatedCredentialField()
|
||||
success_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
|
||||
failure_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
|
||||
always_nodes = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
|
||||
@ -4505,7 +4550,12 @@ class InstanceSerializer(BaseSerializer):
|
||||
|
||||
consumed_capacity = serializers.SerializerMethodField()
|
||||
percent_capacity_remaining = serializers.SerializerMethodField()
|
||||
jobs_running = serializers.SerializerMethodField()
|
||||
jobs_running = serializers.IntegerField(
|
||||
help_text=_('Count of jobs in the running or waiting state that '
|
||||
'are targeted for this instance'),
|
||||
read_only=True
|
||||
)
|
||||
|
||||
|
||||
class Meta:
|
||||
model = Instance
|
||||
@ -4524,14 +4574,11 @@ class InstanceSerializer(BaseSerializer):
|
||||
return obj.consumed_capacity
|
||||
|
||||
def get_percent_capacity_remaining(self, obj):
|
||||
if not obj.capacity or obj.consumed_capacity == obj.capacity:
|
||||
if not obj.capacity or obj.consumed_capacity >= obj.capacity:
|
||||
return 0.0
|
||||
else:
|
||||
return float("{0:.2f}".format(((float(obj.capacity) - float(obj.consumed_capacity)) / (float(obj.capacity))) * 100))
|
||||
|
||||
def get_jobs_running(self, obj):
|
||||
return UnifiedJob.objects.filter(execution_node=obj.hostname, status__in=('running', 'waiting',)).count()
|
||||
|
||||
|
||||
class InstanceGroupSerializer(BaseSerializer):
|
||||
|
||||
@ -4540,6 +4587,22 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
percent_capacity_remaining = serializers.SerializerMethodField()
|
||||
jobs_running = serializers.SerializerMethodField()
|
||||
instances = serializers.SerializerMethodField()
|
||||
# NOTE: help_text is duplicated from field definitions, no obvious way of
|
||||
# both defining field details here and also getting the field's help_text
|
||||
policy_instance_percentage = serializers.IntegerField(
|
||||
default=0, min_value=0, max_value=100, required=False, initial=0,
|
||||
help_text=_("Minimum percentage of all instances that will be automatically assigned to "
|
||||
"this group when new instances come online.")
|
||||
)
|
||||
policy_instance_minimum = serializers.IntegerField(
|
||||
default=0, min_value=0, required=False, initial=0,
|
||||
help_text=_("Static minimum number of Instances that will be automatically assign to "
|
||||
"this group when new instances come online.")
|
||||
)
|
||||
policy_instance_list = serializers.ListField(
|
||||
child=serializers.CharField(),
|
||||
help_text=_("List of exact-match Instances that will be assigned to this group")
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = InstanceGroup
|
||||
@ -4556,6 +4619,14 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
res['controller'] = self.reverse('api:instance_group_detail', kwargs={'pk': obj.controller_id})
|
||||
return res
|
||||
|
||||
def validate_policy_instance_list(self, value):
|
||||
for instance_name in value:
|
||||
if value.count(instance_name) > 1:
|
||||
raise serializers.ValidationError(_('Duplicate entry {}.').format(instance_name))
|
||||
if not Instance.objects.filter(hostname=instance_name).exists():
|
||||
raise serializers.ValidationError(_('{} is not a valid hostname of an existing instance.').format(instance_name))
|
||||
return value
|
||||
|
||||
def get_jobs_qs(self):
|
||||
# Store running jobs queryset in context, so it will be shared in ListView
|
||||
if 'running_jobs' not in self.context:
|
||||
@ -4582,9 +4653,12 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
def get_percent_capacity_remaining(self, obj):
|
||||
if not obj.capacity:
|
||||
return 0.0
|
||||
consumed = self.get_consumed_capacity(obj)
|
||||
if consumed >= obj.capacity:
|
||||
return 0.0
|
||||
else:
|
||||
return float("{0:.2f}".format(
|
||||
((float(obj.capacity) - float(self.get_consumed_capacity(obj))) / (float(obj.capacity))) * 100)
|
||||
((float(obj.capacity) - float(consumed)) / (float(obj.capacity))) * 100)
|
||||
)
|
||||
|
||||
def get_jobs_running(self, obj):
|
||||
@ -4614,7 +4688,10 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
('workflow_job_template_node', ('id', 'unified_job_template_id')),
|
||||
('label', ('id', 'name', 'organization_id')),
|
||||
('notification', ('id', 'status', 'notification_type', 'notification_template_id')),
|
||||
('access_token', ('id', 'token'))
|
||||
('o_auth2_access_token', ('id', 'user_id', 'description', 'application_id', 'scope')),
|
||||
('o_auth2_application', ('id', 'name', 'description')),
|
||||
('credential_type', ('id', 'name', 'description', 'kind', 'managed_by_tower')),
|
||||
('ad_hoc_command', ('id', 'name', 'status', 'limit'))
|
||||
]
|
||||
return field_list
|
||||
|
||||
@ -4656,6 +4733,10 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
|
||||
def get_related(self, obj):
|
||||
rel = {}
|
||||
VIEW_NAME_EXCEPTIONS = {
|
||||
'custom_inventory_script': 'inventory_script_detail',
|
||||
'o_auth2_access_token': 'o_auth2_token_detail'
|
||||
}
|
||||
if obj.actor is not None:
|
||||
rel['actor'] = self.reverse('api:user_detail', kwargs={'pk': obj.actor.pk})
|
||||
for fk, __ in self._local_summarizable_fk_fields:
|
||||
@ -4669,18 +4750,11 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
if getattr(thisItem, 'id', None) in id_list:
|
||||
continue
|
||||
id_list.append(getattr(thisItem, 'id', None))
|
||||
if fk == 'custom_inventory_script':
|
||||
rel[fk].append(self.reverse('api:inventory_script_detail', kwargs={'pk': thisItem.id}))
|
||||
elif fk == 'application':
|
||||
rel[fk].append(self.reverse(
|
||||
'api:o_auth2_application_detail', kwargs={'pk': thisItem.pk}
|
||||
))
|
||||
elif fk == 'access_token':
|
||||
rel[fk].append(self.reverse(
|
||||
'api:o_auth2_token_detail', kwargs={'pk': thisItem.pk}
|
||||
))
|
||||
if fk in VIEW_NAME_EXCEPTIONS:
|
||||
view_name = VIEW_NAME_EXCEPTIONS[fk]
|
||||
else:
|
||||
rel[fk].append(self.reverse('api:' + fk + '_detail', kwargs={'pk': thisItem.id}))
|
||||
view_name = fk + '_detail'
|
||||
rel[fk].append(self.reverse('api:' + view_name, kwargs={'pk': thisItem.id}))
|
||||
|
||||
if fk == 'schedule':
|
||||
rel['unified_job_template'] = thisItem.unified_job_template.get_absolute_url(self.context.get('request'))
|
||||
@ -4689,7 +4763,6 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
'api:setting_singleton_detail',
|
||||
kwargs={'category_slug': obj.setting['category']}
|
||||
)
|
||||
rel['access_token'] = '*************'
|
||||
return rel
|
||||
|
||||
def _get_rel(self, obj, fk):
|
||||
@ -4743,7 +4816,6 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
last_name = obj.actor.last_name)
|
||||
if obj.setting:
|
||||
summary_fields['setting'] = [obj.setting]
|
||||
summary_fields['access_token'] = '*************'
|
||||
return summary_fields
|
||||
|
||||
|
||||
|
||||
@ -60,9 +60,10 @@ _Added in AWX 1.4_
|
||||
|
||||
?related__search=findme
|
||||
|
||||
Note: If you want to provide more than one search terms, please use multiple
|
||||
Note: If you want to provide more than one search term, multiple
|
||||
search fields with the same key, like `?related__search=foo&related__search=bar`,
|
||||
All search terms with the same key will be ORed together.
|
||||
will be ORed together. Terms separated by commas, like `?related__search=foo,bar`
|
||||
will be ANDed together.
|
||||
|
||||
## Filtering
|
||||
|
||||
|
||||
@ -12,12 +12,6 @@ For example on `cleanup_jobs` and `cleanup_activitystream`:
|
||||
|
||||
Which will act on data older than 30 days.
|
||||
|
||||
For `cleanup_facts`:
|
||||
|
||||
`{"extra_vars": {"older_than": "4w", "granularity": "3d"}}`
|
||||
|
||||
Which will reduce the granularity of scan data to one scan per 3 days when the data is older than 4w.
|
||||
|
||||
For `cleanup_activitystream` and `cleanup_jobs` commands, providing
|
||||
`"dry_run": true` inside of `extra_vars` will show items that will be
|
||||
removed without deleting them.
|
||||
@ -27,7 +21,6 @@ applicable either when running it from the command line or launching its
|
||||
system job template with empty `extra_vars`.
|
||||
|
||||
- Defaults for `cleanup_activitystream`: days=90
|
||||
- Defaults for `cleanup_facts`: older_than="30d", granularity="1w"
|
||||
- Defaults for `cleanup_jobs`: days=90
|
||||
|
||||
If successful, the response status code will be 202. If the job cannot be
|
||||
|
||||
@ -21,6 +21,7 @@ from awx.api.views import (
|
||||
OrganizationInstanceGroupsList,
|
||||
OrganizationObjectRolesList,
|
||||
OrganizationAccessList,
|
||||
OrganizationApplicationList,
|
||||
)
|
||||
|
||||
|
||||
@ -45,6 +46,7 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', OrganizationAccessList.as_view(), name='organization_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/applications/$', OrganizationApplicationList.as_view(), name='organization_applications_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
110
awx/api/views.py
110
awx/api/views.py
@ -77,6 +77,7 @@ from awx.main.utils import (
|
||||
from awx.main.utils.encryption import encrypt_value
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
from awx.main.utils.insights import filter_insights_api_response
|
||||
from awx.main.redact import UriCleaner
|
||||
from awx.api.permissions import (
|
||||
JobTemplateCallbackPermission,
|
||||
TaskPermission,
|
||||
@ -203,6 +204,10 @@ class InstanceGroupMembershipMixin(object):
|
||||
|
||||
class RelatedJobsPreventDeleteMixin(object):
|
||||
def perform_destroy(self, obj):
|
||||
self.check_related_active_jobs(obj)
|
||||
return super(RelatedJobsPreventDeleteMixin, self).perform_destroy(obj)
|
||||
|
||||
def check_related_active_jobs(self, obj):
|
||||
active_jobs = obj.get_active_jobs()
|
||||
if len(active_jobs) > 0:
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
@ -213,7 +218,6 @@ class RelatedJobsPreventDeleteMixin(object):
|
||||
raise PermissionDenied(_(
|
||||
'Related job {} is still processing events.'
|
||||
).format(unified_job.log_format))
|
||||
return super(RelatedJobsPreventDeleteMixin, self).perform_destroy(obj)
|
||||
|
||||
|
||||
class ApiRootView(APIView):
|
||||
@ -631,7 +635,7 @@ class InstanceDetail(RetrieveUpdateAPIView):
|
||||
|
||||
class InstanceUnifiedJobsList(SubListAPIView):
|
||||
|
||||
view_name = _("Instance Running Jobs")
|
||||
view_name = _("Instance Jobs")
|
||||
model = UnifiedJob
|
||||
serializer_class = UnifiedJobSerializer
|
||||
parent_model = Instance
|
||||
@ -667,6 +671,14 @@ class InstanceGroupDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAP
|
||||
serializer_class = InstanceGroupSerializer
|
||||
permission_classes = (InstanceGroupTowerPermission,)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
if instance.controller is not None:
|
||||
raise PermissionDenied(detail=_("Isolated Groups can not be removed from the API"))
|
||||
if instance.controlled_groups.count():
|
||||
raise PermissionDenied(detail=_("Instance Groups acting as a controller for an Isolated Group can not be removed from the API"))
|
||||
return super(InstanceGroupDetail, self).destroy(request, *args, **kwargs)
|
||||
|
||||
|
||||
class InstanceGroupUnifiedJobsList(SubListAPIView):
|
||||
|
||||
@ -995,6 +1007,8 @@ class OrganizationInventoriesList(SubListAPIView):
|
||||
class BaseUsersList(SubListCreateAttachDetachAPIView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(BaseUsersList, self).post( request, *args, **kwargs)
|
||||
if ret.status_code != 201:
|
||||
return ret
|
||||
try:
|
||||
if ret.data is not None and request.data.get('is_system_auditor', False):
|
||||
# This is a faux-field that just maps to checking the system
|
||||
@ -1598,6 +1612,18 @@ class UserAuthorizedTokenList(SubListCreateAPIView):
|
||||
|
||||
def get_queryset(self):
|
||||
return get_access_token_model().objects.filter(application__isnull=False, user=self.request.user)
|
||||
|
||||
|
||||
class OrganizationApplicationList(SubListCreateAPIView):
|
||||
|
||||
view_name = _("Organization OAuth2 Applications")
|
||||
|
||||
model = OAuth2Application
|
||||
serializer_class = OAuth2ApplicationSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'applications'
|
||||
parent_key = 'organization'
|
||||
swagger_topic = 'Authentication'
|
||||
|
||||
|
||||
class OAuth2PersonalTokenList(SubListCreateAPIView):
|
||||
@ -1669,14 +1695,8 @@ class UserRolesList(SubListAttachDetachAPIView):
|
||||
if not sub_id:
|
||||
return super(UserRolesList, self).post(request)
|
||||
|
||||
if sub_id == self.request.user.admin_role.pk:
|
||||
raise PermissionDenied(_('You may not perform any action with your own admin_role.'))
|
||||
|
||||
user = get_object_or_400(User, pk=self.kwargs['pk'])
|
||||
role = get_object_or_400(Role, pk=sub_id)
|
||||
user_content_type = ContentType.objects.get_for_model(User)
|
||||
if role.content_type == user_content_type:
|
||||
raise PermissionDenied(_('You may not change the membership of a users admin_role'))
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(Credential)
|
||||
if role.content_type == credential_content_type:
|
||||
@ -2071,6 +2091,7 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, Retri
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'delete', obj):
|
||||
raise PermissionDenied()
|
||||
self.check_related_active_jobs(obj) # related jobs mixin
|
||||
try:
|
||||
obj.schedule_deletion(getattr(request.user, 'id', None))
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
@ -2169,7 +2190,7 @@ class HostList(HostRelatedSearchMixin, ListCreateAPIView):
|
||||
return Response(dict(error=_(six.text_type(e))), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class HostDetail(ControlledByScmMixin, RetrieveUpdateDestroyAPIView):
|
||||
class HostDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, RetrieveUpdateDestroyAPIView):
|
||||
|
||||
always_allow_superuser = False
|
||||
model = Host
|
||||
@ -3116,16 +3137,22 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
return Response()
|
||||
|
||||
def _validate_spec_data(self, new_spec, old_spec):
|
||||
if "name" not in new_spec:
|
||||
return Response(dict(error=_("'name' missing from survey spec.")), status=status.HTTP_400_BAD_REQUEST)
|
||||
if "description" not in new_spec:
|
||||
return Response(dict(error=_("'description' missing from survey spec.")), status=status.HTTP_400_BAD_REQUEST)
|
||||
if "spec" not in new_spec:
|
||||
return Response(dict(error=_("'spec' missing from survey spec.")), status=status.HTTP_400_BAD_REQUEST)
|
||||
if not isinstance(new_spec["spec"], list):
|
||||
return Response(dict(error=_("'spec' must be a list of items.")), status=status.HTTP_400_BAD_REQUEST)
|
||||
if len(new_spec["spec"]) < 1:
|
||||
return Response(dict(error=_("'spec' doesn't contain any items.")), status=status.HTTP_400_BAD_REQUEST)
|
||||
schema_errors = {}
|
||||
for field, expect_type, type_label in [
|
||||
('name', six.string_types, 'string'),
|
||||
('description', six.string_types, 'string'),
|
||||
('spec', list, 'list of items')]:
|
||||
if field not in new_spec:
|
||||
schema_errors['error'] = _("Field '{}' is missing from survey spec.").format(field)
|
||||
elif not isinstance(new_spec[field], expect_type):
|
||||
schema_errors['error'] = _("Expected {} for field '{}', received {} type.").format(
|
||||
type_label, field, type(new_spec[field]).__name__)
|
||||
|
||||
if isinstance(new_spec.get('spec', None), list) and len(new_spec["spec"]) < 1:
|
||||
schema_errors['error'] = _("'spec' doesn't contain any items.")
|
||||
|
||||
if schema_errors:
|
||||
return Response(schema_errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
variable_set = set()
|
||||
old_spec_dict = JobTemplate.pivot_spec(old_spec)
|
||||
@ -3458,6 +3485,13 @@ class JobTemplateJobsList(SubListCreateAPIView):
|
||||
relationship = 'jobs'
|
||||
parent_key = 'job_template'
|
||||
|
||||
@property
|
||||
def allowed_methods(self):
|
||||
methods = super(JobTemplateJobsList, self).allowed_methods
|
||||
if get_request_version(getattr(self, 'request', None)) > 1:
|
||||
methods.remove('POST')
|
||||
return methods
|
||||
|
||||
|
||||
class JobTemplateInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
|
||||
@ -4122,7 +4156,7 @@ class JobRelaunch(RetrieveAPIView):
|
||||
for p in needed_passwords:
|
||||
data['credential_passwords'][p] = u''
|
||||
else:
|
||||
data.pop('credential_passwords')
|
||||
data.pop('credential_passwords', None)
|
||||
return data
|
||||
|
||||
@csrf_exempt
|
||||
@ -4618,9 +4652,17 @@ class UnifiedJobList(ListAPIView):
|
||||
serializer_class = UnifiedJobListSerializer
|
||||
|
||||
|
||||
class StdoutANSIFilter(object):
|
||||
def redact_ansi(line):
|
||||
# Remove ANSI escape sequences used to embed event data.
|
||||
line = re.sub(r'\x1b\[K(?:[A-Za-z0-9+/=]+\x1b\[\d+D)+\x1b\[K', '', line)
|
||||
# Remove ANSI color escape sequences.
|
||||
return re.sub(r'\x1b[^m]*m', '', line)
|
||||
|
||||
|
||||
class StdoutFilter(object):
|
||||
|
||||
def __init__(self, fileobj):
|
||||
self._functions = []
|
||||
self.fileobj = fileobj
|
||||
self.extra_data = ''
|
||||
if hasattr(fileobj, 'close'):
|
||||
@ -4632,10 +4674,7 @@ class StdoutANSIFilter(object):
|
||||
line = self.fileobj.readline(size)
|
||||
if not line:
|
||||
break
|
||||
# Remove ANSI escape sequences used to embed event data.
|
||||
line = re.sub(r'\x1b\[K(?:[A-Za-z0-9+/=]+\x1b\[\d+D)+\x1b\[K', '', line)
|
||||
# Remove ANSI color escape sequences.
|
||||
line = re.sub(r'\x1b[^m]*m', '', line)
|
||||
line = self.process_line(line)
|
||||
data += line
|
||||
if size > 0 and len(data) > size:
|
||||
self.extra_data = data[size:]
|
||||
@ -4644,6 +4683,14 @@ class StdoutANSIFilter(object):
|
||||
self.extra_data = ''
|
||||
return data
|
||||
|
||||
def register(self, func):
|
||||
self._functions.append(func)
|
||||
|
||||
def process_line(self, line):
|
||||
for func in self._functions:
|
||||
line = func(line)
|
||||
return line
|
||||
|
||||
|
||||
class UnifiedJobStdout(RetrieveAPIView):
|
||||
|
||||
@ -4701,9 +4748,12 @@ class UnifiedJobStdout(RetrieveAPIView):
|
||||
suffix='.ansi' if target_format == 'ansi_download' else ''
|
||||
)
|
||||
content_fd = unified_job.result_stdout_raw_handle(enforce_max_bytes=False)
|
||||
redactor = StdoutFilter(content_fd)
|
||||
if target_format == 'txt_download':
|
||||
content_fd = StdoutANSIFilter(content_fd)
|
||||
response = HttpResponse(FileWrapper(content_fd), content_type='text/plain')
|
||||
redactor.register(redact_ansi)
|
||||
if type(unified_job) == ProjectUpdate:
|
||||
redactor.register(UriCleaner.remove_sensitive)
|
||||
response = HttpResponse(FileWrapper(redactor), content_type='text/plain')
|
||||
response["Content-Disposition"] = 'attachment; filename="{}"'.format(filename)
|
||||
return response
|
||||
else:
|
||||
@ -4882,12 +4932,6 @@ class RoleUsersList(SubListAttachDetachAPIView):
|
||||
|
||||
user = get_object_or_400(User, pk=sub_id)
|
||||
role = self.get_parent_object()
|
||||
if role == self.request.user.admin_role:
|
||||
raise PermissionDenied(_('You may not perform any action with your own admin_role.'))
|
||||
|
||||
user_content_type = ContentType.objects.get_for_model(User)
|
||||
if role.content_type == user_content_type:
|
||||
raise PermissionDenied(_('You may not change the membership of a users admin_role'))
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(Credential)
|
||||
if role.content_type == credential_content_type:
|
||||
|
||||
@ -28,6 +28,7 @@ import uuid
|
||||
from copy import copy
|
||||
|
||||
# Ansible
|
||||
from ansible import constants as C
|
||||
from ansible.plugins.callback import CallbackBase
|
||||
from ansible.plugins.callback.default import CallbackModule as DefaultCallbackModule
|
||||
|
||||
@ -126,16 +127,19 @@ class BaseCallbackModule(CallbackBase):
|
||||
task=(task.name or task.action),
|
||||
task_uuid=str(task._uuid),
|
||||
task_action=task.action,
|
||||
task_args='',
|
||||
)
|
||||
try:
|
||||
task_ctx['task_path'] = task.get_path()
|
||||
except AttributeError:
|
||||
pass
|
||||
if task.no_log:
|
||||
task_ctx['task_args'] = "the output has been hidden due to the fact that 'no_log: true' was specified for this result"
|
||||
else:
|
||||
task_args = ', '.join(('%s=%s' % a for a in task.args.items()))
|
||||
task_ctx['task_args'] = task_args
|
||||
|
||||
if C.DISPLAY_ARGS_TO_STDOUT:
|
||||
if task.no_log:
|
||||
task_ctx['task_args'] = "the output has been hidden due to the fact that 'no_log: true' was specified for this result"
|
||||
else:
|
||||
task_args = ', '.join(('%s=%s' % a for a in task.args.items()))
|
||||
task_ctx['task_args'] = task_args
|
||||
if getattr(task, '_role', None):
|
||||
task_role = task._role._role_name
|
||||
else:
|
||||
@ -274,15 +278,14 @@ class BaseCallbackModule(CallbackBase):
|
||||
with self.capture_event_data('playbook_on_no_hosts_remaining'):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_no_hosts_remaining()
|
||||
|
||||
def v2_playbook_on_notify(self, result, handler):
|
||||
# NOTE: Not used by Ansible 2.x.
|
||||
def v2_playbook_on_notify(self, handler, host):
|
||||
# NOTE: Not used by Ansible < 2.5.
|
||||
event_data = dict(
|
||||
host=result._host.get_name(),
|
||||
task=result._task,
|
||||
handler=handler,
|
||||
host=host.get_name(),
|
||||
handler=handler.get_name(),
|
||||
)
|
||||
with self.capture_event_data('playbook_on_notify', **event_data):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_notify(result, handler)
|
||||
super(BaseCallbackModule, self).v2_playbook_on_notify(handler, host)
|
||||
|
||||
'''
|
||||
ansible_stats is, retoractively, added in 2.2
|
||||
@ -315,6 +318,14 @@ class BaseCallbackModule(CallbackBase):
|
||||
with self.capture_event_data('playbook_on_stats', **event_data):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_stats(stats)
|
||||
|
||||
@staticmethod
|
||||
def _get_event_loop(task):
|
||||
if hasattr(task, 'loop_with'): # Ansible >=2.5
|
||||
return task.loop_with
|
||||
elif hasattr(task, 'loop'): # Ansible <2.4
|
||||
return task.loop
|
||||
return None
|
||||
|
||||
def v2_runner_on_ok(self, result):
|
||||
# FIXME: Display detailed results or not based on verbosity.
|
||||
|
||||
@ -328,7 +339,7 @@ class BaseCallbackModule(CallbackBase):
|
||||
remote_addr=result._host.address,
|
||||
task=result._task,
|
||||
res=result._result,
|
||||
event_loop=result._task.loop if hasattr(result._task, 'loop') else None,
|
||||
event_loop=self._get_event_loop(result._task),
|
||||
)
|
||||
with self.capture_event_data('runner_on_ok', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_on_ok(result)
|
||||
@ -341,7 +352,7 @@ class BaseCallbackModule(CallbackBase):
|
||||
res=result._result,
|
||||
task=result._task,
|
||||
ignore_errors=ignore_errors,
|
||||
event_loop=result._task.loop if hasattr(result._task, 'loop') else None,
|
||||
event_loop=self._get_event_loop(result._task),
|
||||
)
|
||||
with self.capture_event_data('runner_on_failed', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_on_failed(result, ignore_errors)
|
||||
@ -351,7 +362,7 @@ class BaseCallbackModule(CallbackBase):
|
||||
host=result._host.get_name(),
|
||||
remote_addr=result._host.address,
|
||||
task=result._task,
|
||||
event_loop=result._task.loop if hasattr(result._task, 'loop') else None,
|
||||
event_loop=self._get_event_loop(result._task),
|
||||
)
|
||||
with self.capture_event_data('runner_on_skipped', **event_data):
|
||||
super(BaseCallbackModule, self).v2_runner_on_skipped(result)
|
||||
|
||||
@ -28,6 +28,7 @@ CALLBACK = os.path.splitext(os.path.basename(__file__))[0]
|
||||
PLUGINS = os.path.dirname(__file__)
|
||||
with mock.patch.dict(os.environ, {'ANSIBLE_STDOUT_CALLBACK': CALLBACK,
|
||||
'ANSIBLE_CALLBACK_PLUGINS': PLUGINS}):
|
||||
from ansible import __version__ as ANSIBLE_VERSION
|
||||
from ansible.cli.playbook import PlaybookCLI
|
||||
from ansible.executor.playbook_executor import PlaybookExecutor
|
||||
from ansible.inventory.manager import InventoryManager
|
||||
@ -35,7 +36,7 @@ with mock.patch.dict(os.environ, {'ANSIBLE_STDOUT_CALLBACK': CALLBACK,
|
||||
from ansible.vars.manager import VariableManager
|
||||
|
||||
# Add awx/lib to sys.path so we can use the plugin
|
||||
path = os.path.abspath(os.path.join(PLUGINS, '..', '..'))
|
||||
path = os.path.abspath(os.path.join(PLUGINS, '..', '..', 'lib'))
|
||||
if path not in sys.path:
|
||||
sys.path.insert(0, path)
|
||||
|
||||
@ -176,6 +177,19 @@ def test_callback_plugin_receives_events(executor, cache, event, playbook):
|
||||
when: item != "SENSITIVE-SKIPPED"
|
||||
failed_when: item == "SENSITIVE-FAILED"
|
||||
ignore_errors: yes
|
||||
'''}, # noqa, NOTE: with_items will be deprecated in 2.9
|
||||
{'loop.yml': '''
|
||||
- name: loop tasks should be suppressed with no_log
|
||||
connection: local
|
||||
hosts: all
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- shell: echo {{ item }}
|
||||
no_log: true
|
||||
loop: [ "SENSITIVE", "SENSITIVE-SKIPPED", "SENSITIVE-FAILED" ]
|
||||
when: item != "SENSITIVE-SKIPPED"
|
||||
failed_when: item == "SENSITIVE-FAILED"
|
||||
ignore_errors: yes
|
||||
'''}, # noqa
|
||||
])
|
||||
def test_callback_plugin_no_log_filters(executor, cache, playbook):
|
||||
@ -186,14 +200,16 @@ def test_callback_plugin_no_log_filters(executor, cache, playbook):
|
||||
|
||||
@pytest.mark.parametrize('playbook', [
|
||||
{'no_log_on_ok.yml': '''
|
||||
- name: args should not be logged when task-level no_log is set
|
||||
- name: args should not be logged when no_log is set at the task or module level
|
||||
connection: local
|
||||
hosts: all
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- shell: echo "SENSITIVE"
|
||||
- shell: echo "PUBLIC"
|
||||
- shell: echo "PRIVATE"
|
||||
no_log: true
|
||||
- uri: url=https://example.org username="PUBLIC" password="PRIVATE"
|
||||
- copy: content="PRIVATE" dest="/tmp/tmp_no_log"
|
||||
'''}, # noqa
|
||||
])
|
||||
def test_callback_plugin_task_args_leak(executor, cache, playbook):
|
||||
@ -204,15 +220,15 @@ def test_callback_plugin_task_args_leak(executor, cache, playbook):
|
||||
|
||||
# task 1
|
||||
assert events[2]['event'] == 'playbook_on_task_start'
|
||||
assert 'SENSITIVE' in events[2]['event_data']['task_args']
|
||||
assert events[3]['event'] == 'runner_on_ok'
|
||||
assert 'SENSITIVE' in events[3]['event_data']['task_args']
|
||||
|
||||
# task 2 no_log=True
|
||||
assert events[4]['event'] == 'playbook_on_task_start'
|
||||
assert events[4]['event_data']['task_args'] == "the output has been hidden due to the fact that 'no_log: true' was specified for this result" # noqa
|
||||
assert events[5]['event'] == 'runner_on_ok'
|
||||
assert events[5]['event_data']['task_args'] == "the output has been hidden due to the fact that 'no_log: true' was specified for this result" # noqa
|
||||
assert 'PUBLIC' in json.dumps(cache.items())
|
||||
assert 'PRIVATE' not in json.dumps(cache.items())
|
||||
# make sure playbook was successful, so all tasks were hit
|
||||
assert not events[-1]['event_data']['failures'], 'Unexpected playbook execution failure'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('playbook', [
|
||||
@ -284,3 +300,54 @@ def test_callback_plugin_saves_custom_stats(executor, cache, playbook):
|
||||
assert json.load(f) == {'foo': 'bar'}
|
||||
finally:
|
||||
shutil.rmtree(os.path.join(private_data_dir))
|
||||
|
||||
|
||||
@pytest.mark.parametrize('playbook', [
|
||||
{'handle_playbook_on_notify.yml': '''
|
||||
- name: handle playbook_on_notify events properly
|
||||
connection: local
|
||||
hosts: all
|
||||
handlers:
|
||||
- name: my_handler
|
||||
debug: msg="My Handler"
|
||||
tasks:
|
||||
- debug: msg="My Task"
|
||||
changed_when: true
|
||||
notify:
|
||||
- my_handler
|
||||
'''}, # noqa
|
||||
])
|
||||
@pytest.mark.skipif(ANSIBLE_VERSION < '2.5', reason="v2_playbook_on_notify doesn't work before ansible 2.5")
|
||||
def test_callback_plugin_records_notify_events(executor, cache, playbook):
|
||||
executor.run()
|
||||
assert len(cache)
|
||||
notify_events = [x[1] for x in cache.items() if x[1]['event'] == 'playbook_on_notify']
|
||||
assert len(notify_events) == 1
|
||||
assert notify_events[0]['event_data']['handler'] == 'my_handler'
|
||||
assert notify_events[0]['event_data']['host'] == 'localhost'
|
||||
assert notify_events[0]['event_data']['task'] == 'debug'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('playbook', [
|
||||
{'no_log_module_with_var.yml': '''
|
||||
- name: ensure that module-level secrets are redacted
|
||||
connection: local
|
||||
hosts: all
|
||||
vars:
|
||||
- pw: SENSITIVE
|
||||
tasks:
|
||||
- uri:
|
||||
url: https://example.org
|
||||
user: john-jacob-jingleheimer-schmidt
|
||||
password: "{{ pw }}"
|
||||
'''}, # noqa
|
||||
])
|
||||
def test_module_level_no_log(executor, cache, playbook):
|
||||
# https://github.com/ansible/tower/issues/1101
|
||||
# It's possible for `no_log=True` to be defined at the _module_ level,
|
||||
# e.g., for the URI module password parameter
|
||||
# This test ensures that we properly redact those
|
||||
executor.run()
|
||||
assert len(cache)
|
||||
assert 'john-jacob-jingleheimer-schmidt' in json.dumps(cache.items())
|
||||
assert 'SENSITIVE' not in json.dumps(cache.items())
|
||||
|
||||
@ -33,8 +33,7 @@ from awx.main.models.mixins import ResourceMixin
|
||||
from awx.conf.license import LicenseForbids, feature_enabled
|
||||
|
||||
__all__ = ['get_user_queryset', 'check_user_access', 'check_user_access_with_errors',
|
||||
'user_accessible_objects', 'consumer_access',
|
||||
'user_admin_role',]
|
||||
'user_accessible_objects', 'consumer_access',]
|
||||
|
||||
logger = logging.getLogger('awx.main.access')
|
||||
|
||||
@ -78,18 +77,6 @@ def register_access(model_class, access_class):
|
||||
access_registry[model_class] = access_class
|
||||
|
||||
|
||||
@property
|
||||
def user_admin_role(self):
|
||||
role = Role.objects.get(
|
||||
content_type=ContentType.objects.get_for_model(User),
|
||||
object_id=self.id,
|
||||
role_field='admin_role'
|
||||
)
|
||||
# Trick the user.admin_role so that the signal filtering for RBAC activity stream works as intended.
|
||||
role.parents = [org.admin_role.pk for org in self.organizations]
|
||||
return role
|
||||
|
||||
|
||||
def user_accessible_objects(user, role_name):
|
||||
return ResourceMixin._accessible_objects(User, user, role_name)
|
||||
|
||||
@ -344,14 +331,13 @@ class BaseAccess(object):
|
||||
if 'write' not in getattr(self.user, 'oauth_scopes', ['write']):
|
||||
user_capabilities[display_method] = False # Read tokens cannot take any actions
|
||||
continue
|
||||
elif display_method == 'copy' and isinstance(obj, JobTemplate):
|
||||
elif display_method in ['copy', 'start', 'schedule'] and isinstance(obj, JobTemplate):
|
||||
if obj.validation_errors:
|
||||
user_capabilities[display_method] = False
|
||||
continue
|
||||
elif isinstance(obj, (WorkflowJobTemplate, WorkflowJob)):
|
||||
if not feature_enabled('workflows'):
|
||||
user_capabilities[display_method] = (display_method == 'delete')
|
||||
continue
|
||||
elif isinstance(obj, (WorkflowJobTemplate, WorkflowJob)) and (not feature_enabled('workflows')):
|
||||
user_capabilities[display_method] = (display_method == 'delete')
|
||||
continue
|
||||
elif display_method == 'copy' and isinstance(obj, WorkflowJobTemplate) and obj.organization_id is None:
|
||||
user_capabilities[display_method] = self.user.is_superuser
|
||||
continue
|
||||
@ -395,7 +381,7 @@ class BaseAccess(object):
|
||||
elif display_method == 'delete' and not isinstance(obj, (User, UnifiedJob, CustomInventoryScript)):
|
||||
user_capabilities['delete'] = user_capabilities['edit']
|
||||
continue
|
||||
elif display_method == 'copy' and isinstance(obj, (Group, Host, CustomInventoryScript)):
|
||||
elif display_method == 'copy' and isinstance(obj, (Group, Host)):
|
||||
user_capabilities['copy'] = user_capabilities['edit']
|
||||
continue
|
||||
|
||||
@ -469,15 +455,6 @@ class InstanceGroupAccess(BaseAccess):
|
||||
def can_change(self, obj, data):
|
||||
return self.user.is_superuser
|
||||
|
||||
def can_delete(self, obj):
|
||||
return self.user.is_superuser
|
||||
|
||||
def can_attach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
return self.user.is_superuser
|
||||
|
||||
def can_unattach(self, obj, sub_obj, relationship, *args, **kwargs):
|
||||
return self.user.is_superuser
|
||||
|
||||
|
||||
class UserAccess(BaseAccess):
|
||||
'''
|
||||
@ -539,12 +516,42 @@ class UserAccess(BaseAccess):
|
||||
return False
|
||||
return bool(self.user == obj or self.can_admin(obj, data))
|
||||
|
||||
def user_membership_roles(self, u):
|
||||
return Role.objects.filter(
|
||||
content_type=ContentType.objects.get_for_model(Organization),
|
||||
role_field__in=[
|
||||
'admin_role', 'member_role',
|
||||
'execute_role', 'project_admin_role', 'inventory_admin_role',
|
||||
'credential_admin_role', 'workflow_admin_role',
|
||||
'notification_admin_role'
|
||||
],
|
||||
members=u
|
||||
)
|
||||
|
||||
def is_all_org_admin(self, u):
|
||||
return not self.user_membership_roles(u).exclude(
|
||||
ancestors__in=self.user.roles.filter(role_field='admin_role')
|
||||
).exists()
|
||||
|
||||
def user_is_orphaned(self, u):
|
||||
return not self.user_membership_roles(u).exists()
|
||||
|
||||
@check_superuser
|
||||
def can_admin(self, obj, data):
|
||||
def can_admin(self, obj, data, allow_orphans=False):
|
||||
if not settings.MANAGE_ORGANIZATION_AUTH:
|
||||
return False
|
||||
return Organization.objects.filter(Q(member_role__members=obj) | Q(admin_role__members=obj),
|
||||
Q(admin_role__members=self.user)).exists()
|
||||
if obj.is_superuser or obj.is_system_auditor:
|
||||
# must be superuser to admin users with system roles
|
||||
return False
|
||||
if self.user_is_orphaned(obj):
|
||||
if not allow_orphans:
|
||||
# in these cases only superusers can modify orphan users
|
||||
return False
|
||||
return not obj.roles.all().exclude(
|
||||
content_type=ContentType.objects.get_for_model(User)
|
||||
).filter(ancestors__in=self.user.roles.all()).exists()
|
||||
else:
|
||||
return self.is_all_org_admin(obj)
|
||||
|
||||
def can_delete(self, obj):
|
||||
if obj == self.user:
|
||||
@ -580,69 +587,77 @@ class UserAccess(BaseAccess):
|
||||
|
||||
class OAuth2ApplicationAccess(BaseAccess):
|
||||
'''
|
||||
I can read, change or delete OAuth applications when:
|
||||
I can read, change or delete OAuth 2 applications when:
|
||||
- I am a superuser.
|
||||
- I am the admin of the organization of the user of the application.
|
||||
- I am the user of the application.
|
||||
I can create OAuth applications when:
|
||||
- I am a user in the organization of the application.
|
||||
I can create OAuth 2 applications when:
|
||||
- I am a superuser.
|
||||
- I am the admin of the organization of the user of the application.
|
||||
- I am the admin of the organization of the application.
|
||||
'''
|
||||
|
||||
model = OAuth2Application
|
||||
select_related = ('user',)
|
||||
|
||||
def filtered_queryset(self):
|
||||
accessible_users = User.objects.filter(
|
||||
pk__in=self.user.admin_of_organizations.values('member_role__members')
|
||||
) | User.objects.filter(pk=self.user.pk)
|
||||
return self.model.objects.filter(user__in=accessible_users)
|
||||
return self.model.objects.filter(organization__in=self.user.organizations)
|
||||
|
||||
def can_change(self, obj, data):
|
||||
return self.can_read(obj)
|
||||
return self.user.is_superuser or self.check_related('organization', Organization, data, obj=obj,
|
||||
role_field='admin_role', mandatory=True)
|
||||
|
||||
def can_delete(self, obj):
|
||||
return self.can_read(obj)
|
||||
return self.user.is_superuser or obj.organization in self.user.admin_of_organizations
|
||||
|
||||
def can_add(self, data):
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
user = get_object_from_data('user', User, data)
|
||||
if not user:
|
||||
return False
|
||||
return set(self.user.admin_of_organizations.all()) & set(user.organizations.all())
|
||||
return True
|
||||
if not data:
|
||||
return Organization.accessible_objects(self.user, 'admin_role').exists()
|
||||
return self.check_related('organization', Organization, data, role_field='admin_role', mandatory=True)
|
||||
|
||||
|
||||
class OAuth2TokenAccess(BaseAccess):
|
||||
'''
|
||||
I can read, change or delete an OAuth2 token when:
|
||||
I can read, change or delete an app token when:
|
||||
- I am a superuser.
|
||||
- I am the admin of the organization of the user of the token.
|
||||
- I am the admin of the organization of the application of the token.
|
||||
- I am the user of the token.
|
||||
I can create an OAuth token when:
|
||||
I can create an OAuth2 app token when:
|
||||
- I have the read permission of the related application.
|
||||
I can read, change or delete a personal token when:
|
||||
- I am the user of the token
|
||||
- I am the superuser
|
||||
I can create an OAuth2 Personal Access Token when:
|
||||
- I am a user. But I can only create a PAT for myself.
|
||||
'''
|
||||
|
||||
model = OAuth2AccessToken
|
||||
|
||||
select_related = ('user', 'application')
|
||||
|
||||
def filtered_queryset(self):
|
||||
accessible_users = User.objects.filter(
|
||||
pk__in=self.user.admin_of_organizations.values('member_role__members')
|
||||
) | User.objects.filter(pk=self.user.pk)
|
||||
return self.model.objects.filter(user__in=accessible_users)
|
||||
|
||||
def can_change(self, obj, data):
|
||||
return self.can_read(obj)
|
||||
|
||||
|
||||
def filtered_queryset(self):
|
||||
org_access_qs = Organization.objects.filter(
|
||||
Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
|
||||
return self.model.objects.filter(application__organization__in=org_access_qs) | self.model.objects.filter(user__id=self.user.pk)
|
||||
|
||||
def can_delete(self, obj):
|
||||
return self.can_read(obj)
|
||||
if (self.user.is_superuser) | (obj.user == self.user):
|
||||
return True
|
||||
elif not obj.application:
|
||||
return False
|
||||
return self.user in obj.application.organization.admin_role
|
||||
|
||||
def can_change(self, obj, data):
|
||||
return self.can_delete(obj)
|
||||
|
||||
def can_add(self, data):
|
||||
app = get_object_from_data('application', OAuth2Application, data)
|
||||
if not app:
|
||||
return True
|
||||
return OAuth2ApplicationAccess(self.user).can_read(app)
|
||||
if 'application' in data:
|
||||
app = get_object_from_data('application', OAuth2Application, data)
|
||||
if app is None:
|
||||
return True
|
||||
return OAuth2ApplicationAccess(self.user).can_read(app)
|
||||
return True
|
||||
|
||||
|
||||
class OrganizationAccess(BaseAccess):
|
||||
@ -1450,24 +1465,7 @@ class JobAccess(BaseAccess):
|
||||
|
||||
if not data: # So the browseable API will work
|
||||
return True
|
||||
if not self.user.is_superuser:
|
||||
return False
|
||||
|
||||
|
||||
add_data = dict(data.items())
|
||||
|
||||
# If a job template is provided, the user should have read access to it.
|
||||
if data and data.get('job_template', None):
|
||||
job_template = get_object_from_data('job_template', JobTemplate, data)
|
||||
add_data.setdefault('inventory', job_template.inventory.pk)
|
||||
add_data.setdefault('project', job_template.project.pk)
|
||||
add_data.setdefault('job_type', job_template.job_type)
|
||||
if job_template.credential:
|
||||
add_data.setdefault('credential', job_template.credential.pk)
|
||||
else:
|
||||
job_template = None
|
||||
|
||||
return True
|
||||
return self.user.is_superuser
|
||||
|
||||
def can_change(self, obj, data):
|
||||
return (obj.status == 'new' and
|
||||
@ -1861,7 +1859,7 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
|
||||
return (self.check_related('organization', Organization, data, role_field='workflow_admin_field', obj=obj) and
|
||||
return (self.check_related('organization', Organization, data, role_field='workflow_admin_role', obj=obj) and
|
||||
self.user in obj.admin_role)
|
||||
|
||||
def can_delete(self, obj):
|
||||
@ -2080,7 +2078,7 @@ class ProjectUpdateEventAccess(BaseAccess):
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.objects.filter(
|
||||
Q(project_update__in=ProjectUpdate.accessible_pk_qs(self.user, 'read_role')))
|
||||
Q(project_update__project__in=Project.accessible_pk_qs(self.user, 'read_role')))
|
||||
|
||||
def can_add(self, data):
|
||||
return False
|
||||
@ -2101,7 +2099,7 @@ class InventoryUpdateEventAccess(BaseAccess):
|
||||
|
||||
def filtered_queryset(self):
|
||||
return self.model.objects.filter(
|
||||
Q(inventory_update__in=InventoryUpdate.accessible_pk_qs(self.user, 'read_role')))
|
||||
Q(inventory_update__inventory_source__inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role')))
|
||||
|
||||
def can_add(self, data):
|
||||
return False
|
||||
@ -2375,7 +2373,7 @@ class ActivityStreamAccess(BaseAccess):
|
||||
model = ActivityStream
|
||||
prefetch_related = ('organization', 'user', 'inventory', 'host', 'group',
|
||||
'inventory_update', 'credential', 'credential_type', 'team',
|
||||
'ad_hoc_command',
|
||||
'ad_hoc_command', 'o_auth2_application', 'o_auth2_access_token',
|
||||
'notification_template', 'notification', 'label', 'role', 'actor',
|
||||
'schedule', 'custom_inventory_script', 'unified_job_template',
|
||||
'workflow_job_template_node',)
|
||||
@ -2418,9 +2416,13 @@ class ActivityStreamAccess(BaseAccess):
|
||||
jt_set = JobTemplate.accessible_objects(self.user, 'read_role')
|
||||
team_set = Team.accessible_objects(self.user, 'read_role')
|
||||
wfjt_set = WorkflowJobTemplate.accessible_objects(self.user, 'read_role')
|
||||
app_set = OAuth2ApplicationAccess(self.user).filtered_queryset()
|
||||
token_set = OAuth2TokenAccess(self.user).filtered_queryset()
|
||||
|
||||
return qs.filter(
|
||||
Q(ad_hoc_command__inventory__in=inventory_set) |
|
||||
Q(o_auth2_application__in=app_set) |
|
||||
Q(o_auth2_access_token__in=token_set) |
|
||||
Q(user__in=auditing_orgs.values('member_role__members')) |
|
||||
Q(user=self.user) |
|
||||
Q(organization__in=auditing_orgs) |
|
||||
@ -2523,6 +2525,14 @@ class RoleAccess(BaseAccess):
|
||||
if not check_user_access(self.user, sub_obj_resource.__class__, 'read', sub_obj_resource):
|
||||
return False
|
||||
|
||||
# Being a user in the member_role or admin_role of an organization grants
|
||||
# administrators of that Organization the ability to edit that user. To prevent
|
||||
# unwanted escalations lets ensure that the Organization administartor has the abilty
|
||||
# to admin the user being added to the role.
|
||||
if isinstance(obj.content_object, Organization) and obj.role_field in ['member_role', 'admin_role']:
|
||||
if not UserAccess(self.user).can_admin(sub_obj, None, allow_orphans=True):
|
||||
return False
|
||||
|
||||
if isinstance(obj.content_object, ResourceMixin) and \
|
||||
self.user in obj.content_object.admin_role:
|
||||
return True
|
||||
|
||||
@ -135,6 +135,27 @@ register(
|
||||
required=False,
|
||||
)
|
||||
|
||||
register(
|
||||
'ALLOW_JINJA_IN_EXTRA_VARS',
|
||||
field_class=fields.ChoiceField,
|
||||
choices=[
|
||||
('always', _('Always')),
|
||||
('never', _('Never')),
|
||||
('template', _('Only On Job Template Definitions')),
|
||||
],
|
||||
required=True,
|
||||
label=_('When can extra variables contain Jinja templates?'),
|
||||
help_text=_(
|
||||
'Ansible allows variable substitution via the Jinja2 templating '
|
||||
'language for --extra-vars. This poses a potential security '
|
||||
'risk where Tower users with the ability to specify extra vars at job '
|
||||
'launch time can use Jinja2 templates to run arbitrary Python. It is '
|
||||
'recommended that this value be set to "template" or "never".'
|
||||
),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_PROOT_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
@ -341,7 +362,8 @@ register(
|
||||
label=_('Per-Host Ansible Fact Cache Timeout'),
|
||||
help_text=_('Maximum time, in seconds, that stored Ansible facts are considered valid since '
|
||||
'the last time they were modified. Only valid, non-stale, facts will be accessible by '
|
||||
'a playbook. Note, this does not influence the deletion of ansible_facts from the database.'),
|
||||
'a playbook. Note, this does not influence the deletion of ansible_facts from the database. '
|
||||
'Use a value of 0 to indicate that no timeout should be imposed.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
@ -15,7 +15,11 @@ CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'sate
|
||||
SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + ('custom', 'scm',)
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')), ('su', _('Su')), ('pbrun', _('Pbrun')), ('pfexec', _('Pfexec')),
|
||||
('dzdo', _('DZDO')), ('pmrun', _('Pmrun')), ('runas', _('Runas'))]
|
||||
('dzdo', _('DZDO')), ('pmrun', _('Pmrun')), ('runas', _('Runas')),
|
||||
('enable', _('Enable')), ('doas', _('Doas')),
|
||||
]
|
||||
CHOICES_PRIVILEGE_ESCALATION_METHODS = [('', _('None'))] + PRIVILEGE_ESCALATION_METHODS
|
||||
ANSI_SGR_PATTERN = re.compile(r'\x1b\[[0-9;]*m')
|
||||
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
||||
ACTIVE_STATES = CAN_CANCEL
|
||||
TOKEN_CENSOR = '************'
|
||||
|
||||
@ -1,6 +1,9 @@
|
||||
# Copyright (c) 2018 Ansible by Red Hat
|
||||
# All Rights Reserved.
|
||||
|
||||
import six
|
||||
|
||||
|
||||
# Celery does not respect exception type when using a serializer different than pickle;
|
||||
# and awx uses the json serializer
|
||||
# https://github.com/celery/celery/issues/3586
|
||||
@ -9,7 +12,7 @@
|
||||
class _AwxTaskError():
|
||||
def build_exception(self, task, message=None):
|
||||
if message is None:
|
||||
message = "Execution error running {}".format(task.log_format)
|
||||
message = six.text_type("Execution error running {}").format(task.log_format)
|
||||
e = Exception(message)
|
||||
e.task = task
|
||||
e.is_awx_task_error = True
|
||||
@ -17,7 +20,7 @@ class _AwxTaskError():
|
||||
|
||||
def TaskCancel(self, task, rc):
|
||||
"""Canceled flag caused run_pexpect to kill the job run"""
|
||||
message="{} was canceled (rc={})".format(task.log_format, rc)
|
||||
message=six.text_type("{} was canceled (rc={})").format(task.log_format, rc)
|
||||
e = self.build_exception(task, message)
|
||||
e.rc = rc
|
||||
e.awx_task_error_type = "TaskCancel"
|
||||
@ -25,7 +28,7 @@ class _AwxTaskError():
|
||||
|
||||
def TaskError(self, task, rc):
|
||||
"""Userspace error (non-zero exit code) in run_pexpect subprocess"""
|
||||
message = "{} encountered an error (rc={}), please see task stdout for details.".format(task.log_format, rc)
|
||||
message = six.text_type("{} encountered an error (rc={}), please see task stdout for details.").format(task.log_format, rc)
|
||||
e = self.build_exception(task, message)
|
||||
e.rc = rc
|
||||
e.awx_task_error_type = "TaskError"
|
||||
|
||||
@ -101,7 +101,7 @@ def run_pexpect(args, cwd, env, logfile,
|
||||
|
||||
child = pexpect.spawn(
|
||||
args[0], args[1:], cwd=cwd, env=env, ignore_sighup=True,
|
||||
encoding='utf-8', echo=False,
|
||||
encoding='utf-8', echo=False, use_poll=True
|
||||
)
|
||||
child.logfile_read = logfile
|
||||
canceled = False
|
||||
|
||||
@ -4,12 +4,13 @@
|
||||
# Python
|
||||
import copy
|
||||
import json
|
||||
import operator
|
||||
import re
|
||||
import six
|
||||
import urllib
|
||||
|
||||
from jinja2 import Environment, StrictUndefined
|
||||
from jinja2.exceptions import UndefinedError
|
||||
from jinja2.exceptions import UndefinedError, TemplateSyntaxError
|
||||
|
||||
# Django
|
||||
from django.core import exceptions as django_exceptions
|
||||
@ -42,19 +43,24 @@ from rest_framework import serializers
|
||||
|
||||
# AWX
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
from awx.main.utils.encryption import encrypt_value, decrypt_value, get_encryption_key
|
||||
from awx.main.validators import validate_ssh_private_key
|
||||
from awx.main.models.rbac import batch_role_ancestor_rebuilding, Role
|
||||
from awx.main.constants import CHOICES_PRIVILEGE_ESCALATION_METHODS
|
||||
from awx.main import utils
|
||||
|
||||
|
||||
__all__ = ['AutoOneToOneField', 'ImplicitRoleField', 'JSONField', 'SmartFilterField']
|
||||
__all__ = ['AutoOneToOneField', 'ImplicitRoleField', 'JSONField',
|
||||
'SmartFilterField', 'update_role_parentage_for_instance',
|
||||
'is_implicit_parent']
|
||||
|
||||
|
||||
# Provide a (better) custom error message for enum jsonschema validation
|
||||
def __enum_validate__(validator, enums, instance, schema):
|
||||
if instance not in enums:
|
||||
yield jsonschema.exceptions.ValidationError(
|
||||
_("'%s' is not one of ['%s']") % (instance, "', '".join(enums))
|
||||
_("'{value}' is not one of ['{allowed_values}']").format(
|
||||
value=instance, allowed_values="', '".join(enums))
|
||||
)
|
||||
|
||||
|
||||
@ -180,6 +186,23 @@ def is_implicit_parent(parent_role, child_role):
|
||||
return False
|
||||
|
||||
|
||||
def update_role_parentage_for_instance(instance):
|
||||
'''update_role_parentage_for_instance
|
||||
updates the parents listing for all the roles
|
||||
of a given instance if they have changed
|
||||
'''
|
||||
for implicit_role_field in getattr(instance.__class__, '__implicit_role_fields'):
|
||||
cur_role = getattr(instance, implicit_role_field.name)
|
||||
new_parents = implicit_role_field._resolve_parent_roles(instance)
|
||||
cur_role.parents.set(new_parents)
|
||||
new_parents_list = list(new_parents)
|
||||
new_parents_list.sort()
|
||||
new_parents_json = json.dumps(new_parents_list)
|
||||
if cur_role.implicit_parents != new_parents_json:
|
||||
cur_role.implicit_parents = new_parents_json
|
||||
cur_role.save()
|
||||
|
||||
|
||||
class ImplicitRoleDescriptor(ForwardManyToOneDescriptor):
|
||||
pass
|
||||
|
||||
@ -273,43 +296,37 @@ class ImplicitRoleField(models.ForeignKey):
|
||||
Role_ = utils.get_current_apps().get_model('main', 'Role')
|
||||
ContentType_ = utils.get_current_apps().get_model('contenttypes', 'ContentType')
|
||||
ct_id = ContentType_.objects.get_for_model(instance).id
|
||||
|
||||
Model = utils.get_current_apps().get_model('main', instance.__class__.__name__)
|
||||
latest_instance = Model.objects.get(pk=instance.pk)
|
||||
|
||||
with batch_role_ancestor_rebuilding():
|
||||
# Create any missing role objects
|
||||
missing_roles = []
|
||||
for implicit_role_field in getattr(instance.__class__, '__implicit_role_fields'):
|
||||
cur_role = getattr(instance, implicit_role_field.name, None)
|
||||
for implicit_role_field in getattr(latest_instance.__class__, '__implicit_role_fields'):
|
||||
cur_role = getattr(latest_instance, implicit_role_field.name, None)
|
||||
if cur_role is None:
|
||||
missing_roles.append(
|
||||
Role_(
|
||||
role_field=implicit_role_field.name,
|
||||
content_type_id=ct_id,
|
||||
object_id=instance.id
|
||||
object_id=latest_instance.id
|
||||
)
|
||||
)
|
||||
|
||||
if len(missing_roles) > 0:
|
||||
Role_.objects.bulk_create(missing_roles)
|
||||
updates = {}
|
||||
role_ids = []
|
||||
for role in Role_.objects.filter(content_type_id=ct_id, object_id=instance.id):
|
||||
setattr(instance, role.role_field, role)
|
||||
for role in Role_.objects.filter(content_type_id=ct_id, object_id=latest_instance.id):
|
||||
setattr(latest_instance, role.role_field, role)
|
||||
updates[role.role_field] = role.id
|
||||
role_ids.append(role.id)
|
||||
type(instance).objects.filter(pk=instance.pk).update(**updates)
|
||||
type(latest_instance).objects.filter(pk=latest_instance.pk).update(**updates)
|
||||
Role.rebuild_role_ancestor_list(role_ids, [])
|
||||
|
||||
# Update parentage if necessary
|
||||
for implicit_role_field in getattr(instance.__class__, '__implicit_role_fields'):
|
||||
cur_role = getattr(instance, implicit_role_field.name)
|
||||
original_parents = set(json.loads(cur_role.implicit_parents))
|
||||
new_parents = implicit_role_field._resolve_parent_roles(instance)
|
||||
cur_role.parents.remove(*list(original_parents - new_parents))
|
||||
cur_role.parents.add(*list(new_parents - original_parents))
|
||||
new_parents_list = list(new_parents)
|
||||
new_parents_list.sort()
|
||||
new_parents_json = json.dumps(new_parents_list)
|
||||
if cur_role.implicit_parents != new_parents_json:
|
||||
cur_role.implicit_parents = new_parents_json
|
||||
cur_role.save()
|
||||
update_role_parentage_for_instance(latest_instance)
|
||||
instance.refresh_from_db()
|
||||
|
||||
|
||||
def _resolve_parent_roles(self, instance):
|
||||
@ -391,7 +408,25 @@ class JSONSchemaField(JSONBField):
|
||||
error.message = re.sub(r'\bu(\'|")', r'\1', error.message)
|
||||
|
||||
if error.validator == 'pattern' and 'error' in error.schema:
|
||||
error.message = error.schema['error'] % error.instance
|
||||
error.message = six.text_type(error.schema['error']).format(instance=error.instance)
|
||||
elif error.validator == 'type':
|
||||
expected_type = error.validator_value
|
||||
if expected_type == 'object':
|
||||
expected_type = 'dict'
|
||||
if error.path:
|
||||
error.message = _(
|
||||
'{type} provided in relative path {path}, expected {expected_type}'
|
||||
).format(path=list(error.path), type=type(error.instance).__name__,
|
||||
expected_type=expected_type)
|
||||
else:
|
||||
error.message = _(
|
||||
'{type} provided, expected {expected_type}'
|
||||
).format(path=list(error.path), type=type(error.instance).__name__,
|
||||
expected_type=expected_type)
|
||||
elif error.validator == 'additionalProperties' and hasattr(error, 'path'):
|
||||
error.message = _(
|
||||
'Schema validation error in relative path {path} ({error})'
|
||||
).format(path=list(error.path), error=error.message)
|
||||
errors.append(error)
|
||||
|
||||
if errors:
|
||||
@ -474,6 +509,9 @@ class CredentialInputField(JSONSchemaField):
|
||||
properties = {}
|
||||
for field in model_instance.credential_type.inputs.get('fields', []):
|
||||
field = field.copy()
|
||||
if field['type'] == 'become_method':
|
||||
field.pop('type')
|
||||
field['choices'] = map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS)
|
||||
properties[field['id']] = field
|
||||
if field.get('choices', []):
|
||||
field['enum'] = field['choices'][:]
|
||||
@ -523,7 +561,7 @@ class CredentialInputField(JSONSchemaField):
|
||||
format_checker=self.format_checker
|
||||
).iter_errors(decrypted_values):
|
||||
if error.validator == 'pattern' and 'error' in error.schema:
|
||||
error.message = error.schema['error'] % error.instance
|
||||
error.message = six.text_type(error.schema['error']).format(instance=error.instance)
|
||||
if error.validator == 'dependencies':
|
||||
# replace the default error messaging w/ a better i18n string
|
||||
# I wish there was a better way to determine the parameters of
|
||||
@ -617,7 +655,7 @@ class CredentialTypeInputField(JSONSchemaField):
|
||||
'items': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'type': {'enum': ['string', 'boolean']},
|
||||
'type': {'enum': ['string', 'boolean', 'become_method']},
|
||||
'format': {'enum': ['ssh_private_key']},
|
||||
'choices': {
|
||||
'type': 'array',
|
||||
@ -628,7 +666,7 @@ class CredentialTypeInputField(JSONSchemaField):
|
||||
'id': {
|
||||
'type': 'string',
|
||||
'pattern': '^[a-zA-Z_]+[a-zA-Z0-9_]*$',
|
||||
'error': '%s is an invalid variable name',
|
||||
'error': '{instance} is an invalid variable name',
|
||||
},
|
||||
'label': {'type': 'string'},
|
||||
'help_text': {'type': 'string'},
|
||||
@ -678,10 +716,22 @@ class CredentialTypeInputField(JSONSchemaField):
|
||||
# If no type is specified, default to string
|
||||
field['type'] = 'string'
|
||||
|
||||
if field['type'] == 'become_method':
|
||||
if not model_instance.managed_by_tower:
|
||||
raise django_exceptions.ValidationError(
|
||||
_('become_method is a reserved type name'),
|
||||
code='invalid',
|
||||
params={'value': value},
|
||||
)
|
||||
else:
|
||||
field.pop('type')
|
||||
field['choices'] = CHOICES_PRIVILEGE_ESCALATION_METHODS
|
||||
|
||||
for key in ('choices', 'multiline', 'format', 'secret',):
|
||||
if key in field and field['type'] != 'string':
|
||||
raise django_exceptions.ValidationError(
|
||||
_('%s not allowed for %s type (%s)' % (key, field['type'], field['id'])),
|
||||
_('{sub_key} not allowed for {element_type} type ({element_id})'.format(
|
||||
sub_key=key, element_type=field['type'], element_id=field['id'])),
|
||||
code='invalid',
|
||||
params={'value': value},
|
||||
)
|
||||
@ -778,7 +828,15 @@ class CredentialTypeInjectorField(JSONSchemaField):
|
||||
).from_string(tmpl).render(valid_namespace)
|
||||
except UndefinedError as e:
|
||||
raise django_exceptions.ValidationError(
|
||||
_('%s uses an undefined field (%s)') % (key, e),
|
||||
_('{sub_key} uses an undefined field ({error_msg})').format(
|
||||
sub_key=key, error_msg=e),
|
||||
code='invalid',
|
||||
params={'value': value},
|
||||
)
|
||||
except TemplateSyntaxError as e:
|
||||
raise django_exceptions.ValidationError(
|
||||
_('Syntax error rendering template for {sub_key} inside of {type} ({error_msg})').format(
|
||||
sub_key=key, type=type_, error_msg=e),
|
||||
code='invalid',
|
||||
params={'value': value},
|
||||
)
|
||||
@ -801,3 +859,16 @@ class AskForField(models.BooleanField):
|
||||
# self.name will be set by the model metaclass, not this field
|
||||
raise Exception('Corresponding allows_field cannot be accessed until model is initialized.')
|
||||
return self._allows_field
|
||||
|
||||
|
||||
class OAuth2ClientSecretField(models.CharField):
|
||||
|
||||
def get_db_prep_value(self, value, connection, prepared=False):
|
||||
return super(OAuth2ClientSecretField, self).get_db_prep_value(
|
||||
encrypt_value(value), connection, prepared
|
||||
)
|
||||
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
if value and value.startswith('$encrypted$'):
|
||||
return decrypt_value(get_encryption_key('value', pk=None), value)
|
||||
return value
|
||||
|
||||
@ -403,9 +403,7 @@ class Command(BaseCommand):
|
||||
_eager_fields=dict(
|
||||
job_args=json.dumps(sys.argv),
|
||||
job_env=dict(os.environ.items()),
|
||||
job_cwd=os.getcwd(),
|
||||
execution_node=settings.CLUSTER_HOST_ID,
|
||||
instance_group=InstanceGroup.objects.get(name='tower'))
|
||||
job_cwd=os.getcwd())
|
||||
)
|
||||
|
||||
# FIXME: Wait or raise error if inventory is being updated by another
|
||||
|
||||
@ -2,7 +2,6 @@
|
||||
# All Rights Reserved
|
||||
|
||||
from awx.main.models import Instance
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
from django.conf import settings
|
||||
|
||||
from django.db import transaction
|
||||
@ -27,15 +26,12 @@ class Command(BaseCommand):
|
||||
def _register_hostname(self, hostname):
|
||||
if not hostname:
|
||||
return
|
||||
with advisory_lock('instance_registration_%s' % hostname):
|
||||
instance = Instance.objects.filter(hostname=hostname)
|
||||
if instance.exists():
|
||||
print("Instance already registered {}".format(instance[0].hostname))
|
||||
return
|
||||
instance = Instance(uuid=self.uuid, hostname=hostname)
|
||||
instance.save()
|
||||
print('Successfully registered instance {}'.format(hostname))
|
||||
self.changed = True
|
||||
(changed, instance) = Instance.objects.register(uuid=self.uuid, hostname=hostname)
|
||||
if changed:
|
||||
print('Successfully registered instance {}'.format(hostname))
|
||||
else:
|
||||
print("Instance already registered {}".format(instance.hostname))
|
||||
self.changed = changed
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, **options):
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
# Copyright (c) 2017 Ansible Tower by Red Hat
|
||||
# All Rights Reserved.
|
||||
import sys
|
||||
import six
|
||||
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
from awx.main.models import Instance, InstanceGroup
|
||||
@ -8,6 +9,13 @@ from awx.main.models import Instance, InstanceGroup
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
|
||||
class InstanceNotFound(Exception):
|
||||
def __init__(self, message, changed, *args, **kwargs):
|
||||
self.message = message
|
||||
self.changed = changed
|
||||
super(InstanceNotFound, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
||||
def add_arguments(self, parser):
|
||||
@ -22,51 +30,95 @@ class Command(BaseCommand):
|
||||
parser.add_argument('--instance_minimum', dest='instance_minimum', type=int, default=0,
|
||||
help='The minimum number of instance that will be retained for this group from available instances')
|
||||
|
||||
|
||||
def get_create_update_instance_group(self, queuename, instance_percent, instance_min):
|
||||
ig = InstanceGroup.objects.filter(name=queuename)
|
||||
created = False
|
||||
changed = False
|
||||
|
||||
(ig, created) = InstanceGroup.objects.get_or_create(name=queuename)
|
||||
if ig.policy_instance_percentage != instance_percent:
|
||||
ig.policy_instance_percentage = instance_percent
|
||||
changed = True
|
||||
if ig.policy_instance_minimum != instance_min:
|
||||
ig.policy_instance_minimum = instance_min
|
||||
changed = True
|
||||
|
||||
return (ig, created, changed)
|
||||
|
||||
def update_instance_group_controller(self, ig, controller):
|
||||
changed = False
|
||||
control_ig = None
|
||||
|
||||
if controller:
|
||||
control_ig = InstanceGroup.objects.filter(name=controller).first()
|
||||
|
||||
if control_ig and ig.controller_id != control_ig.pk:
|
||||
ig.controller = control_ig
|
||||
ig.save()
|
||||
changed = True
|
||||
|
||||
return (control_ig, changed)
|
||||
|
||||
def add_instances_to_group(self, ig, hostname_list):
|
||||
changed = False
|
||||
|
||||
instance_list_unique = set([x.strip() for x in hostname_list if x])
|
||||
instances = []
|
||||
for inst_name in instance_list_unique:
|
||||
instance = Instance.objects.filter(hostname=inst_name)
|
||||
if instance.exists():
|
||||
instances.append(instance[0])
|
||||
else:
|
||||
raise InstanceNotFound(six.text_type("Instance does not exist: {}").format(inst_name), changed)
|
||||
|
||||
ig.instances = instances
|
||||
|
||||
instance_list_before = set(ig.policy_instance_list)
|
||||
instance_list_after = set(instance_list_unique)
|
||||
if len(instance_list_before) != len(instance_list_after) or \
|
||||
len(set(instance_list_before) - set(instance_list_after)) != 0:
|
||||
changed = True
|
||||
|
||||
ig.policy_instance_list = list(instance_list_unique)
|
||||
ig.save()
|
||||
return (instances, changed)
|
||||
|
||||
def handle(self, **options):
|
||||
instance_not_found_err = None
|
||||
queuename = options.get('queuename')
|
||||
if not queuename:
|
||||
raise CommandError("Specify `--queuename` to use this command.")
|
||||
changed = False
|
||||
ctrl = options.get('controller')
|
||||
inst_per = options.get('instance_percent')
|
||||
inst_min = options.get('instance_minimum')
|
||||
hostname_list = []
|
||||
if options.get('hostnames'):
|
||||
hostname_list = options.get('hostnames').split(",")
|
||||
|
||||
with advisory_lock('instance_group_registration_%s' % queuename):
|
||||
ig = InstanceGroup.objects.filter(name=queuename)
|
||||
control_ig = None
|
||||
if options.get('controller'):
|
||||
control_ig = InstanceGroup.objects.filter(name=options.get('controller')).first()
|
||||
if ig.exists():
|
||||
print("Instance Group already registered {}".format(ig[0].name))
|
||||
ig = ig[0]
|
||||
if control_ig and ig.controller_id != control_ig.pk:
|
||||
ig.controller = control_ig
|
||||
ig.save()
|
||||
print("Set controller group {} on {}.".format(control_ig.name, ig.name))
|
||||
changed = True
|
||||
else:
|
||||
print("Creating instance group {}".format(queuename))
|
||||
ig = InstanceGroup(name=queuename,
|
||||
policy_instance_percentage=options.get('instance_percent'),
|
||||
policy_instance_minimum=options.get('instance_minimum'))
|
||||
if control_ig:
|
||||
ig.controller = control_ig
|
||||
ig.save()
|
||||
changed = True
|
||||
hostname_list = []
|
||||
if options.get('hostnames'):
|
||||
hostname_list = options.get('hostnames').split(",")
|
||||
instance_list = [x.strip() for x in hostname_list if x]
|
||||
for inst_name in instance_list:
|
||||
instance = Instance.objects.filter(hostname=inst_name)
|
||||
if instance.exists() and instance[0] not in ig.instances.all():
|
||||
ig.instances.add(instance[0])
|
||||
print("Added instance {} to {}".format(instance[0].hostname, ig.name))
|
||||
changed = True
|
||||
elif not instance.exists():
|
||||
print("Instance does not exist: {}".format(inst_name))
|
||||
if changed:
|
||||
print('(changed: True)')
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("Instance already registered {}".format(instance[0].hostname))
|
||||
ig.policy_instance_list = instance_list
|
||||
ig.save()
|
||||
if changed:
|
||||
print('(changed: True)')
|
||||
(ig, created, changed) = self.get_create_update_instance_group(queuename, inst_per, inst_min)
|
||||
if created:
|
||||
print(six.text_type("Creating instance group {}".format(ig.name)))
|
||||
elif not created:
|
||||
print(six.text_type("Instance Group already registered {}").format(ig.name))
|
||||
|
||||
if ctrl:
|
||||
(ig_ctrl, changed) = self.update_instance_group_controller(ig, ctrl)
|
||||
if changed:
|
||||
print(six.text_type("Set controller group {} on {}.").format(ctrl, queuename))
|
||||
|
||||
try:
|
||||
(instances, changed) = self.add_instances_to_group(ig, hostname_list)
|
||||
for i in instances:
|
||||
print(six.text_type("Added instance {} to {}").format(i.hostname, ig.name))
|
||||
except InstanceNotFound as e:
|
||||
instance_not_found_err = e
|
||||
|
||||
if changed:
|
||||
print('(changed: True)')
|
||||
|
||||
if instance_not_found_err:
|
||||
print(instance_not_found_err.message)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@ -8,6 +8,7 @@ from django.db import models
|
||||
from django.conf import settings
|
||||
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
|
||||
___all__ = ['HostManager', 'InstanceManager', 'InstanceGroupManager']
|
||||
|
||||
@ -86,6 +87,24 @@ class InstanceManager(models.Manager):
|
||||
return node[0]
|
||||
raise RuntimeError("No instance found with the current cluster host id")
|
||||
|
||||
def register(self, uuid=None, hostname=None):
|
||||
if not uuid:
|
||||
uuid = settings.SYSTEM_UUID
|
||||
if not hostname:
|
||||
hostname = settings.CLUSTER_HOST_ID
|
||||
with advisory_lock('instance_registration_%s' % hostname):
|
||||
instance = self.filter(hostname=hostname)
|
||||
if instance.exists():
|
||||
return (False, instance[0])
|
||||
instance = self.create(uuid=uuid, hostname=hostname)
|
||||
return (True, instance)
|
||||
|
||||
def get_or_register(self):
|
||||
if settings.AWX_AUTO_DEPROVISION_INSTANCES:
|
||||
return self.register()
|
||||
else:
|
||||
return (False, self.me())
|
||||
|
||||
def active_count(self):
|
||||
"""Return count of active Tower nodes for licensing."""
|
||||
return self.all().count()
|
||||
@ -94,6 +113,9 @@ class InstanceManager(models.Manager):
|
||||
# NOTE: TODO: Likely to repurpose this once standalone ramparts are a thing
|
||||
return "tower"
|
||||
|
||||
def all_non_isolated(self):
|
||||
return self.exclude(rampart_groups__controller__isnull=False)
|
||||
|
||||
|
||||
class InstanceGroupManager(models.Manager):
|
||||
"""A custom manager class for the Instance model.
|
||||
@ -156,8 +178,6 @@ class InstanceGroupManager(models.Manager):
|
||||
if t.status == 'waiting' or not t.execution_node:
|
||||
# Subtract capacity from any peer groups that share instances
|
||||
if not t.instance_group:
|
||||
logger.warning('Excluded %s from capacity algorithm '
|
||||
'(missing instance_group).', t.log_format)
|
||||
impacted_groups = []
|
||||
elif t.instance_group.name not in ig_ig_mapping:
|
||||
# Waiting job in group with 0 capacity has no collateral impact
|
||||
|
||||
@ -7,11 +7,12 @@ from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
# TODO: Squash all of these migrations with '0024_v330_add_oauth_activity_stream_registrar'
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0024_v330_add_oauth_activity_stream_registrar'),
|
||||
('main', '0025_v330_add_oauth_activity_stream_registrar'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@ -8,7 +8,7 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0025_v330_delete_authtoken'),
|
||||
('main', '0026_v330_delete_authtoken'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@ -10,7 +10,7 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0026_v330_emitted_events'),
|
||||
('main', '0027_v330_emitted_events'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
23
awx/main/migrations/0030_v330_modify_application.py
Normal file
23
awx/main/migrations/0030_v330_modify_application.py
Normal file
@ -0,0 +1,23 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.11 on 2018-03-16 20:25
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import awx.main.fields
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0028_v330_add_tower_verify'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='oauth2application',
|
||||
name='organization',
|
||||
field=models.ForeignKey(help_text='Organization containing this application.', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='applications', to='main.Organization'),
|
||||
),
|
||||
]
|
||||
22
awx/main/migrations/0031_v330_encrypt_oauth2_secret.py
Normal file
22
awx/main/migrations/0031_v330_encrypt_oauth2_secret.py
Normal file
@ -0,0 +1,22 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.11 on 2018-04-03 20:48
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import awx.main.fields
|
||||
from django.db import migrations
|
||||
import oauth2_provider.generators
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0030_v330_modify_application'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='oauth2application',
|
||||
name='client_secret',
|
||||
field=awx.main.fields.OAuth2ClientSecretField(blank=True, db_index=True, default=oauth2_provider.generators.generate_client_secret, max_length=1024),
|
||||
),
|
||||
]
|
||||
21
awx/main/migrations/0032_v330_polymorphic_delete.py
Normal file
21
awx/main/migrations/0032_v330_polymorphic_delete.py
Normal file
@ -0,0 +1,21 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.11 on 2018-04-06 13:44
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import awx.main.utils.polymorphic
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0031_v330_encrypt_oauth2_secret'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='unifiedjob',
|
||||
name='instance_group',
|
||||
field=models.ForeignKey(blank=True, default=None, help_text='The Rampart/Instance group the job was run under', null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, to='main.InstanceGroup'),
|
||||
),
|
||||
]
|
||||
50
awx/main/migrations/0033_v330_oauth_help_text.py
Normal file
50
awx/main/migrations/0033_v330_oauth_help_text.py
Normal file
@ -0,0 +1,50 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.11 on 2018-04-11 15:54
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import awx.main.fields
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import oauth2_provider.generators
|
||||
|
||||
# TODO: Squash all of these migrations with '0024_v330_add_oauth_activity_stream_registrar'
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0032_v330_polymorphic_delete'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='oauth2accesstoken',
|
||||
name='scope',
|
||||
field=models.TextField(blank=True, help_text="Allowed scopes, further restricts user's permissions."),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='oauth2accesstoken',
|
||||
name='user',
|
||||
field=models.ForeignKey(blank=True, help_text='The user representing the token owner', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='main_oauth2accesstoken', to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='oauth2application',
|
||||
name='authorization_grant_type',
|
||||
field=models.CharField(choices=[(b'authorization-code', 'Authorization code'), (b'implicit', 'Implicit'), (b'password', 'Resource owner password-based'), (b'client-credentials', 'Client credentials')], help_text='The Grant type the user must use for acquire tokens for this application.', max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='oauth2application',
|
||||
name='client_secret',
|
||||
field=awx.main.fields.OAuth2ClientSecretField(blank=True, db_index=True, default=oauth2_provider.generators.generate_client_secret, help_text='Used for more stringent verification of access to an application when creating a token.', max_length=1024),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='oauth2application',
|
||||
name='client_type',
|
||||
field=models.CharField(choices=[(b'confidential', 'Confidential'), (b'public', 'Public')], help_text='Set to Public or Confidential depending on how secure the client device is.', max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='oauth2application',
|
||||
name='skip_authorization',
|
||||
field=models.BooleanField(default=False, help_text='Set True to skip authorization step for completely trusted applications.'),
|
||||
),
|
||||
]
|
||||
22
awx/main/migrations/0034_v330_delete_user_role.py
Normal file
22
awx/main/migrations/0034_v330_delete_user_role.py
Normal file
@ -0,0 +1,22 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.11 on 2018-04-02 19:18
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from awx.main.migrations import ActivityStreamDisabledMigration
|
||||
from awx.main.migrations._rbac import delete_all_user_roles, rebuild_role_hierarchy
|
||||
from awx.main.migrations import _migration_utils as migration_utils
|
||||
|
||||
|
||||
class Migration(ActivityStreamDisabledMigration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0033_v330_oauth_help_text'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(delete_all_user_roles),
|
||||
migrations.RunPython(rebuild_role_hierarchy),
|
||||
]
|
||||
21
awx/main/migrations/0035_v330_more_oauth2_help_text.py
Normal file
21
awx/main/migrations/0035_v330_more_oauth2_help_text.py
Normal file
@ -0,0 +1,21 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.11 on 2018-04-17 18:36
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
# TODO: Squash all of these migrations with '0024_v330_add_oauth_activity_stream_registrar'
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0034_v330_delete_user_role'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='oauth2accesstoken',
|
||||
name='scope',
|
||||
field=models.TextField(blank=True, help_text="Allowed scopes, further restricts user's permissions. Must be a simple space-separated string with allowed scopes ['read', 'write']."),
|
||||
),
|
||||
]
|
||||
@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# AWX
|
||||
from awx.main.migrations import _credentialtypes as credentialtypes
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0035_v330_more_oauth2_help_text'),
|
||||
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(credentialtypes.remove_become_methods),
|
||||
]
|
||||
18
awx/main/migrations/0037_v330_remove_legacy_fact_cleanup.py
Normal file
18
awx/main/migrations/0037_v330_remove_legacy_fact_cleanup.py
Normal file
@ -0,0 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
# AWX
|
||||
from awx.main.migrations._scan_jobs import remove_legacy_fact_cleanup
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0036_v330_credtype_remove_become_methods'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(remove_legacy_fact_cleanup),
|
||||
]
|
||||
@ -197,3 +197,9 @@ def add_azure_cloud_environment_field(apps, schema_editor):
|
||||
name='Microsoft Azure Resource Manager')
|
||||
azure_rm_credtype.inputs = CredentialType.defaults.get('azure_rm')().inputs
|
||||
azure_rm_credtype.save()
|
||||
|
||||
|
||||
def remove_become_methods(apps, schema_editor):
|
||||
become_credtype = CredentialType.objects.filter(kind='ssh', managed_by_tower=True).first()
|
||||
become_credtype.inputs = CredentialType.defaults.get('ssh')().inputs
|
||||
become_credtype.save()
|
||||
|
||||
@ -500,3 +500,12 @@ def infer_credential_org_from_team(apps, schema_editor):
|
||||
_update_credential_parents(cred.deprecated_team.organization, cred)
|
||||
except IntegrityError:
|
||||
logger.info("Organization<{}> credential for old Team<{}> credential already created".format(cred.deprecated_team.organization.pk, cred.pk))
|
||||
|
||||
|
||||
def delete_all_user_roles(apps, schema_editor):
|
||||
ContentType = apps.get_model('contenttypes', "ContentType")
|
||||
Role = apps.get_model('main', "Role")
|
||||
User = apps.get_model('auth', "User")
|
||||
user_content_type = ContentType.objects.get_for_model(User)
|
||||
for role in Role.objects.filter(content_type=user_content_type).iterator():
|
||||
role.delete()
|
||||
|
||||
@ -102,3 +102,11 @@ def remove_scan_type_nodes(apps, schema_editor):
|
||||
prompts.pop('job_type')
|
||||
node.char_prompts = prompts
|
||||
node.save()
|
||||
|
||||
|
||||
def remove_legacy_fact_cleanup(apps, schema_editor):
|
||||
SystemJobTemplate = apps.get_model('main', 'SystemJobTemplate')
|
||||
for job in SystemJobTemplate.objects.filter(job_type='cleanup_facts').all():
|
||||
for sched in job.schedules.all():
|
||||
sched.delete()
|
||||
job.delete()
|
||||
|
||||
@ -56,7 +56,6 @@ User.add_to_class('get_queryset', get_user_queryset)
|
||||
User.add_to_class('can_access', check_user_access)
|
||||
User.add_to_class('can_access_with_errors', check_user_access_with_errors)
|
||||
User.add_to_class('accessible_objects', user_accessible_objects)
|
||||
User.add_to_class('admin_role', user_admin_role)
|
||||
|
||||
|
||||
@property
|
||||
|
||||
@ -66,7 +66,6 @@ class ActivityStream(models.Model):
|
||||
label = models.ManyToManyField("Label", blank=True)
|
||||
role = models.ManyToManyField("Role", blank=True)
|
||||
instance_group = models.ManyToManyField("InstanceGroup", blank=True)
|
||||
|
||||
o_auth2_application = models.ManyToManyField("OAuth2Application", blank=True)
|
||||
o_auth2_access_token = models.ManyToManyField("OAuth2AccessToken", blank=True)
|
||||
|
||||
|
||||
@ -256,6 +256,7 @@ class PrimordialModel(CreatedModifiedModel):
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
update_fields = kwargs.get('update_fields', [])
|
||||
fields_are_specified = bool(update_fields)
|
||||
user = get_current_user()
|
||||
if user and not user.id:
|
||||
user = None
|
||||
@ -263,9 +264,14 @@ class PrimordialModel(CreatedModifiedModel):
|
||||
self.created_by = user
|
||||
if 'created_by' not in update_fields:
|
||||
update_fields.append('created_by')
|
||||
self.modified_by = user
|
||||
if 'modified_by' not in update_fields:
|
||||
update_fields.append('modified_by')
|
||||
# Update modified_by if not called with update_fields, or if any
|
||||
# editable fields are present in update_fields
|
||||
if (
|
||||
(not fields_are_specified) or
|
||||
any(getattr(self._meta.get_field(name), 'editable', True) for name in update_fields)):
|
||||
self.modified_by = user
|
||||
if 'modified_by' not in update_fields:
|
||||
update_fields.append('modified_by')
|
||||
super(PrimordialModel, self).save(*args, **kwargs)
|
||||
|
||||
def clean_description(self):
|
||||
|
||||
@ -2,13 +2,12 @@
|
||||
# All Rights Reserved.
|
||||
from collections import OrderedDict
|
||||
import functools
|
||||
import json
|
||||
import logging
|
||||
import operator
|
||||
import os
|
||||
import re
|
||||
import stat
|
||||
import tempfile
|
||||
import six
|
||||
|
||||
# Jinja2
|
||||
from jinja2 import Template
|
||||
@ -21,11 +20,11 @@ from django.utils.encoding import force_text
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||
from awx.main.fields import (ImplicitRoleField, CredentialInputField,
|
||||
CredentialTypeInputField,
|
||||
CredentialTypeInjectorField)
|
||||
from awx.main.utils import decrypt_field
|
||||
from awx.main.utils.safe_yaml import safe_dump
|
||||
from awx.main.validators import validate_ssh_private_key
|
||||
from awx.main.models.base import * # noqa
|
||||
from awx.main.models.mixins import ResourceMixin
|
||||
@ -34,6 +33,7 @@ from awx.main.models.rbac import (
|
||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
)
|
||||
from awx.main.utils import encrypt_field
|
||||
from awx.main.constants import CHOICES_PRIVILEGE_ESCALATION_METHODS
|
||||
from . import injectors as builtin_injectors
|
||||
|
||||
__all__ = ['Credential', 'CredentialType', 'V1Credential', 'build_safe_env']
|
||||
@ -164,7 +164,7 @@ class V1Credential(object):
|
||||
max_length=32,
|
||||
blank=True,
|
||||
default='',
|
||||
choices=[('', _('None'))] + PRIVILEGE_ESCALATION_METHODS,
|
||||
choices=CHOICES_PRIVILEGE_ESCALATION_METHODS,
|
||||
help_text=_('Privilege escalation method.')
|
||||
),
|
||||
'become_username': models.CharField(
|
||||
@ -415,9 +415,9 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
type_alias = self.credential_type_id
|
||||
if self.kind == 'vault' and self.inputs.get('vault_id', None):
|
||||
if display:
|
||||
fmt_str = '{} (id={})'
|
||||
fmt_str = six.text_type('{} (id={})')
|
||||
else:
|
||||
fmt_str = '{}_{}'
|
||||
fmt_str = six.text_type('{}_{}')
|
||||
return fmt_str.format(type_alias, self.inputs.get('vault_id'))
|
||||
return str(type_alias)
|
||||
|
||||
@ -445,6 +445,7 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
'AD_HOC_COMMAND_ID', 'REST_API_URL', 'REST_API_TOKEN', 'MAX_EVENT_RES',
|
||||
'CALLBACK_QUEUE', 'CALLBACK_CONNECTION', 'CACHE',
|
||||
'JOB_CALLBACK_DEBUG', 'INVENTORY_HOSTVARS', 'FACT_QUEUE',
|
||||
'AWX_HOST', 'PROJECT_REVISION'
|
||||
))
|
||||
|
||||
class Meta:
|
||||
@ -514,7 +515,7 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
if field['id'] == field_id:
|
||||
if 'choices' in field:
|
||||
return field['choices'][0]
|
||||
return {'string': '', 'boolean': False}[field['type']]
|
||||
return {'string': '', 'boolean': False, 'become_method': ''}[field['type']]
|
||||
|
||||
@classmethod
|
||||
def default(cls, f):
|
||||
@ -630,7 +631,7 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
data = Template(file_tmpl).render(**namespace)
|
||||
_, path = tempfile.mkstemp(dir=private_data_dir)
|
||||
with open(path, 'w') as f:
|
||||
f.write(data)
|
||||
f.write(data.encode('utf-8'))
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR)
|
||||
|
||||
# determine if filename indicates single file or many
|
||||
@ -651,25 +652,20 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
if 'INVENTORY_UPDATE_ID' not in env:
|
||||
# awx-manage inventory_update does not support extra_vars via -e
|
||||
extra_vars = {}
|
||||
safe_extra_vars = {}
|
||||
for var_name, tmpl in self.injectors.get('extra_vars', {}).items():
|
||||
extra_vars[var_name] = Template(tmpl).render(**namespace)
|
||||
safe_extra_vars[var_name] = Template(tmpl).render(**safe_namespace)
|
||||
|
||||
def build_extra_vars_file(vars, private_dir):
|
||||
handle, path = tempfile.mkstemp(dir = private_dir)
|
||||
f = os.fdopen(handle, 'w')
|
||||
f.write(json.dumps(vars))
|
||||
f.write(safe_dump(vars))
|
||||
f.close()
|
||||
os.chmod(path, stat.S_IRUSR)
|
||||
return path
|
||||
|
||||
path = build_extra_vars_file(extra_vars, private_data_dir)
|
||||
if extra_vars:
|
||||
path = build_extra_vars_file(extra_vars, private_data_dir)
|
||||
args.extend(['-e', '@%s' % path])
|
||||
|
||||
if safe_extra_vars:
|
||||
path = build_extra_vars_file(safe_extra_vars, private_data_dir)
|
||||
safe_args.extend(['-e', '@%s' % path])
|
||||
|
||||
|
||||
@ -706,8 +702,7 @@ def ssh(cls):
|
||||
}, {
|
||||
'id': 'become_method',
|
||||
'label': 'Privilege Escalation Method',
|
||||
'choices': map(operator.itemgetter(0),
|
||||
V1Credential.FIELDS['become_method'].choices),
|
||||
'type': 'become_method',
|
||||
'help_text': ('Specify a method for "become" operations. This is '
|
||||
'equivalent to specifying the --become-method '
|
||||
'Ansible parameter.')
|
||||
|
||||
@ -2,7 +2,7 @@ import datetime
|
||||
import logging
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
from django.db import models, DatabaseError
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.timezone import utc
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
@ -15,6 +15,8 @@ from awx.main.utils import ignore_inventory_computed_fields
|
||||
|
||||
analytics_logger = logging.getLogger('awx.analytics.job_events')
|
||||
|
||||
logger = logging.getLogger('awx.main.models.events')
|
||||
|
||||
|
||||
__all__ = ['JobEvent', 'ProjectUpdateEvent', 'AdHocCommandEvent',
|
||||
'InventoryUpdateEvent', 'SystemJobEvent']
|
||||
@ -235,12 +237,6 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
if res.get('changed', False):
|
||||
self.changed = True
|
||||
updated_fields.add('changed')
|
||||
# If we're not in verbose mode, wipe out any module arguments.
|
||||
invocation = res.get('invocation', None)
|
||||
if isinstance(invocation, dict) and self.job_verbosity == 0 and 'module_args' in invocation:
|
||||
event_data['res']['invocation']['module_args'] = ''
|
||||
self.event_data = event_data
|
||||
updated_fields.add('event_data')
|
||||
if self.event == 'playbook_on_stats':
|
||||
try:
|
||||
failures_dict = event_data.get('failures', {})
|
||||
@ -329,7 +325,10 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
|
||||
hostnames = self._hostnames()
|
||||
self._update_host_summary_from_stats(hostnames)
|
||||
self.job.inventory.update_computed_fields()
|
||||
try:
|
||||
self.job.inventory.update_computed_fields()
|
||||
except DatabaseError:
|
||||
logger.exception('Computed fields database error saving event {}'.format(self.pk))
|
||||
|
||||
|
||||
|
||||
@ -447,6 +446,9 @@ class JobEvent(BasePlaybookEvent):
|
||||
|
||||
def _update_host_summary_from_stats(self, hostnames):
|
||||
with ignore_inventory_computed_fields():
|
||||
if not self.job or not self.job.inventory:
|
||||
logger.info('Event {} missing job or inventory, host summaries not updated'.format(self.pk))
|
||||
return
|
||||
qs = self.job.inventory.hosts.filter(name__in=hostnames)
|
||||
job = self.job
|
||||
for host in hostnames:
|
||||
|
||||
@ -85,6 +85,10 @@ class Instance(models.Model):
|
||||
# NOTE: TODO: Likely to repurpose this once standalone ramparts are a thing
|
||||
return "awx"
|
||||
|
||||
@property
|
||||
def jobs_running(self):
|
||||
return UnifiedJob.objects.filter(execution_node=self.hostname, status__in=('running', 'waiting',)).count()
|
||||
|
||||
def is_lost(self, ref_time=None, isolated=False):
|
||||
if ref_time is None:
|
||||
ref_time = now()
|
||||
@ -188,9 +192,8 @@ class JobOrigin(models.Model):
|
||||
|
||||
@receiver(post_save, sender=InstanceGroup)
|
||||
def on_instance_group_saved(sender, instance, created=False, raw=False, **kwargs):
|
||||
if created:
|
||||
from awx.main.tasks import apply_cluster_membership_policies
|
||||
connection.on_commit(lambda: apply_cluster_membership_policies.apply_async())
|
||||
from awx.main.tasks import apply_cluster_membership_policies
|
||||
connection.on_commit(lambda: apply_cluster_membership_policies.apply_async())
|
||||
|
||||
|
||||
@receiver(post_save, sender=Instance)
|
||||
|
||||
@ -233,7 +233,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
return {}
|
||||
else:
|
||||
all_group = data.setdefault('all', dict())
|
||||
smart_hosts_qs = self.hosts.all()
|
||||
smart_hosts_qs = self.hosts.filter(**hosts_q).all()
|
||||
smart_hosts = list(smart_hosts_qs.values_list('name', flat=True))
|
||||
all_group['hosts'] = smart_hosts
|
||||
else:
|
||||
@ -517,7 +517,7 @@ class SmartInventoryMembership(BaseModel):
|
||||
host = models.ForeignKey('Host', related_name='+', on_delete=models.CASCADE)
|
||||
|
||||
|
||||
class Host(CommonModelNameNotUnique):
|
||||
class Host(CommonModelNameNotUnique, RelatedJobsMixin):
|
||||
'''
|
||||
A managed node
|
||||
'''
|
||||
@ -703,6 +703,12 @@ class Host(CommonModelNameNotUnique):
|
||||
self._update_host_smart_inventory_memeberships()
|
||||
super(Host, self).delete(*args, **kwargs)
|
||||
|
||||
'''
|
||||
RelatedJobsMixin
|
||||
'''
|
||||
def _get_related_jobs(self):
|
||||
return self.inventory._get_related_jobs()
|
||||
|
||||
|
||||
class Group(CommonModelNameNotUnique, RelatedJobsMixin):
|
||||
'''
|
||||
|
||||
@ -538,7 +538,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
for virtualenv in (
|
||||
self.job_template.custom_virtualenv if self.job_template else None,
|
||||
self.project.custom_virtualenv,
|
||||
self.project.organization.custom_virtualenv
|
||||
self.project.organization.custom_virtualenv if self.project.organization else None
|
||||
):
|
||||
if virtualenv:
|
||||
return virtualenv
|
||||
|
||||
@ -6,9 +6,13 @@ from django.core.validators import RegexValidator
|
||||
from django.db import models
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.conf import settings
|
||||
|
||||
# Django OAuth Toolkit
|
||||
from oauth2_provider.models import AbstractApplication, AbstractAccessToken
|
||||
from oauth2_provider.generators import generate_client_secret
|
||||
|
||||
from awx.main.fields import OAuth2ClientSecretField
|
||||
|
||||
|
||||
DATA_URI_RE = re.compile(r'.*') # FIXME
|
||||
@ -21,6 +25,24 @@ class OAuth2Application(AbstractApplication):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
verbose_name = _('application')
|
||||
|
||||
CLIENT_CONFIDENTIAL = "confidential"
|
||||
CLIENT_PUBLIC = "public"
|
||||
CLIENT_TYPES = (
|
||||
(CLIENT_CONFIDENTIAL, _("Confidential")),
|
||||
(CLIENT_PUBLIC, _("Public")),
|
||||
)
|
||||
|
||||
GRANT_AUTHORIZATION_CODE = "authorization-code"
|
||||
GRANT_IMPLICIT = "implicit"
|
||||
GRANT_PASSWORD = "password"
|
||||
GRANT_CLIENT_CREDENTIALS = "client-credentials"
|
||||
GRANT_TYPES = (
|
||||
(GRANT_AUTHORIZATION_CODE, _("Authorization code")),
|
||||
(GRANT_IMPLICIT, _("Implicit")),
|
||||
(GRANT_PASSWORD, _("Resource owner password-based")),
|
||||
(GRANT_CLIENT_CREDENTIALS, _("Client credentials")),
|
||||
)
|
||||
|
||||
description = models.TextField(
|
||||
default='',
|
||||
@ -31,6 +53,34 @@ class OAuth2Application(AbstractApplication):
|
||||
editable=False,
|
||||
validators=[RegexValidator(DATA_URI_RE)],
|
||||
)
|
||||
organization = models.ForeignKey(
|
||||
'Organization',
|
||||
related_name='applications',
|
||||
help_text=_('Organization containing this application.'),
|
||||
on_delete=models.CASCADE,
|
||||
null=True,
|
||||
)
|
||||
client_secret = OAuth2ClientSecretField(
|
||||
max_length=1024,
|
||||
blank=True,
|
||||
default=generate_client_secret,
|
||||
db_index=True,
|
||||
help_text=_('Used for more stringent verification of access to an application when creating a token.')
|
||||
)
|
||||
client_type = models.CharField(
|
||||
max_length=32,
|
||||
choices=CLIENT_TYPES,
|
||||
help_text=_('Set to Public or Confidential depending on how secure the client device is.')
|
||||
)
|
||||
skip_authorization = models.BooleanField(
|
||||
default=False,
|
||||
help_text=_('Set True to skip authorization step for completely trusted applications.')
|
||||
)
|
||||
authorization_grant_type = models.CharField(
|
||||
max_length=32,
|
||||
choices=GRANT_TYPES,
|
||||
help_text=_('The Grant type the user must use for acquire tokens for this application.')
|
||||
)
|
||||
|
||||
|
||||
class OAuth2AccessToken(AbstractAccessToken):
|
||||
@ -39,6 +89,14 @@ class OAuth2AccessToken(AbstractAccessToken):
|
||||
app_label = 'main'
|
||||
verbose_name = _('access token')
|
||||
|
||||
user = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.CASCADE,
|
||||
blank=True,
|
||||
null=True,
|
||||
related_name="%(app_label)s_%(class)s",
|
||||
help_text=_('The user representing the token owner')
|
||||
)
|
||||
description = models.CharField(
|
||||
max_length=200,
|
||||
default='',
|
||||
@ -49,6 +107,10 @@ class OAuth2AccessToken(AbstractAccessToken):
|
||||
default=None,
|
||||
editable=False,
|
||||
)
|
||||
scope = models.TextField(
|
||||
blank=True,
|
||||
help_text=_('Allowed scopes, further restricts user\'s permissions. Must be a simple space-separated string with allowed scopes [\'read\', \'write\'].')
|
||||
)
|
||||
|
||||
def is_valid(self, scopes=None):
|
||||
valid = super(OAuth2AccessToken, self).is_valid(scopes)
|
||||
|
||||
@ -97,7 +97,7 @@ class Schedule(CommonModel, LaunchTimeConfig):
|
||||
@classmethod
|
||||
def rrulestr(cls, rrule, **kwargs):
|
||||
"""
|
||||
Apply our own custom rrule parsing logic to support TZID=
|
||||
Apply our own custom rrule parsing requirements
|
||||
"""
|
||||
kwargs['forceset'] = True
|
||||
x = dateutil.rrule.rrulestr(rrule, **kwargs)
|
||||
@ -108,15 +108,6 @@ class Schedule(CommonModel, LaunchTimeConfig):
|
||||
'A valid TZID must be provided (e.g., America/New_York)'
|
||||
)
|
||||
|
||||
if r._dtstart and r._until:
|
||||
# If https://github.com/dateutil/dateutil/pull/634 ever makes
|
||||
# it into a python-dateutil release, we could remove this block.
|
||||
if all((
|
||||
r._dtstart.tzinfo != dateutil.tz.tzlocal(),
|
||||
r._until.tzinfo != dateutil.tz.tzutc(),
|
||||
)):
|
||||
raise ValueError('RRULE UNTIL values must be specified in UTC')
|
||||
|
||||
if 'MINUTELY' in rrule or 'HOURLY' in rrule:
|
||||
try:
|
||||
first_event = x[0]
|
||||
|
||||
@ -38,6 +38,7 @@ from awx.main.utils import (
|
||||
copy_model_by_class, copy_m2m_relationships,
|
||||
get_type_for_model, parse_yaml_or_json
|
||||
)
|
||||
from awx.main.utils import polymorphic
|
||||
from awx.main.constants import ACTIVE_STATES, CAN_CANCEL
|
||||
from awx.main.redact import UriCleaner, REPLACE_STR
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
@ -89,9 +90,6 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
|
||||
|
||||
ALL_STATUS_CHOICES = OrderedDict(PROJECT_STATUS_CHOICES + INVENTORY_SOURCE_STATUS_CHOICES + JOB_TEMPLATE_STATUS_CHOICES + DEPRECATED_STATUS_CHOICES).items()
|
||||
|
||||
# NOTE: Working around a django-polymorphic issue: https://github.com/django-polymorphic/django-polymorphic/issues/229
|
||||
base_manager_name = 'base_objects'
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
# unique_together here is intentionally commented out. Please make sure sub-classes of this model
|
||||
@ -265,14 +263,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
|
||||
if field not in update_fields:
|
||||
update_fields.append(field)
|
||||
# Do the actual save.
|
||||
try:
|
||||
super(UnifiedJobTemplate, self).save(*args, **kwargs)
|
||||
except ValueError:
|
||||
# A fix for https://trello.com/c/S4rU1F21
|
||||
# Does not resolve the root cause. Tis merely a bandaid.
|
||||
if 'scm_delete_on_next_update' in update_fields:
|
||||
update_fields.remove('scm_delete_on_next_update')
|
||||
super(UnifiedJobTemplate, self).save(*args, **kwargs)
|
||||
super(UnifiedJobTemplate, self).save(*args, **kwargs)
|
||||
|
||||
|
||||
def _get_current_status(self):
|
||||
@ -536,9 +527,6 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
|
||||
PASSWORD_FIELDS = ('start_args',)
|
||||
|
||||
# NOTE: Working around a django-polymorphic issue: https://github.com/django-polymorphic/django-polymorphic/issues/229
|
||||
base_manager_name = 'base_objects'
|
||||
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
|
||||
@ -669,7 +657,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
blank=True,
|
||||
null=True,
|
||||
default=None,
|
||||
on_delete=models.SET_NULL,
|
||||
on_delete=polymorphic.SET_NULL,
|
||||
help_text=_('The Rampart/Instance group the job was run under'),
|
||||
)
|
||||
credentials = models.ManyToManyField(
|
||||
@ -727,7 +715,10 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
def _get_parent_instance(self):
|
||||
return getattr(self, self._get_parent_field_name(), None)
|
||||
|
||||
def _update_parent_instance_no_save(self, parent_instance, update_fields=[]):
|
||||
def _update_parent_instance_no_save(self, parent_instance, update_fields=None):
|
||||
if update_fields is None:
|
||||
update_fields = []
|
||||
|
||||
def parent_instance_set(key, val):
|
||||
setattr(parent_instance, key, val)
|
||||
if key not in update_fields:
|
||||
@ -1268,10 +1259,6 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
if not all(opts.values()):
|
||||
return False
|
||||
|
||||
# Sanity check: If we are running unit tests, then run synchronously.
|
||||
if getattr(settings, 'CELERY_UNIT_TEST', False):
|
||||
return self.start(None, None, **kwargs)
|
||||
|
||||
# Save the pending status, and inform the SocketIO listener.
|
||||
self.update_fields(start_args=json.dumps(kwargs), status='pending')
|
||||
self.websocket_emit_status("pending")
|
||||
|
||||
@ -389,10 +389,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
return prompted_fields, rejected_fields, errors_dict
|
||||
|
||||
def can_start_without_user_input(self):
|
||||
return not bool(
|
||||
self.variables_needed_to_start or
|
||||
self.node_templates_missing() or
|
||||
self.node_prompts_rejected())
|
||||
return not bool(self.variables_needed_to_start)
|
||||
|
||||
def node_templates_missing(self):
|
||||
return [node.pk for node in self.workflow_job_template_nodes.filter(
|
||||
@ -477,7 +474,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
|
||||
|
||||
@property
|
||||
def preferred_instance_groups(self):
|
||||
return self.global_instance_groups
|
||||
return []
|
||||
|
||||
'''
|
||||
A WorkflowJob is a virtual job. It doesn't result in a celery task.
|
||||
|
||||
@ -6,8 +6,7 @@ REPLACE_STR = '$encrypted$'
|
||||
|
||||
class UriCleaner(object):
|
||||
REPLACE_STR = REPLACE_STR
|
||||
# https://regex101.com/r/sV2dO2/2
|
||||
SENSITIVE_URI_PATTERN = re.compile(ur'(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:\'".,<>?\xab\xbb\u201c\u201d\u2018\u2019]))', re.MULTILINE) # NOQA
|
||||
SENSITIVE_URI_PATTERN = re.compile(ur'(\w+:(\/?\/?)[^\s]+)', re.MULTILINE) # NOQA
|
||||
|
||||
@staticmethod
|
||||
def remove_sensitive(cleartext):
|
||||
@ -17,38 +16,46 @@ class UriCleaner(object):
|
||||
match = UriCleaner.SENSITIVE_URI_PATTERN.search(redactedtext, text_index)
|
||||
if not match:
|
||||
break
|
||||
o = urlparse.urlsplit(match.group(1))
|
||||
if not o.username and not o.password:
|
||||
if o.netloc and ":" in o.netloc:
|
||||
# Handle the special case url http://username:password that can appear in SCM url
|
||||
# on account of a bug? in ansible redaction
|
||||
(username, password) = o.netloc.split(':')
|
||||
try:
|
||||
uri_str = match.group(1)
|
||||
# May raise a ValueError if invalid URI for one reason or another
|
||||
o = urlparse.urlsplit(uri_str)
|
||||
|
||||
if not o.username and not o.password:
|
||||
if o.netloc and ":" in o.netloc:
|
||||
# Handle the special case url http://username:password that can appear in SCM url
|
||||
# on account of a bug? in ansible redaction
|
||||
(username, password) = o.netloc.split(':')
|
||||
else:
|
||||
text_index += len(match.group(1))
|
||||
continue
|
||||
else:
|
||||
text_index += len(match.group(1))
|
||||
continue
|
||||
else:
|
||||
username = o.username
|
||||
password = o.password
|
||||
username = o.username
|
||||
password = o.password
|
||||
|
||||
# Given a python MatchObject, with respect to redactedtext, find and
|
||||
# replace the first occurance of username and the first and second
|
||||
# occurance of password
|
||||
# Given a python MatchObject, with respect to redactedtext, find and
|
||||
# replace the first occurance of username and the first and second
|
||||
# occurance of password
|
||||
|
||||
uri_str = redactedtext[match.start():match.end()]
|
||||
if username:
|
||||
uri_str = uri_str.replace(username, UriCleaner.REPLACE_STR, 1)
|
||||
# 2, just in case the password is $encrypted$
|
||||
if password:
|
||||
uri_str = uri_str.replace(password, UriCleaner.REPLACE_STR, 2)
|
||||
uri_str = redactedtext[match.start():match.end()]
|
||||
if username:
|
||||
uri_str = uri_str.replace(username, UriCleaner.REPLACE_STR, 1)
|
||||
# 2, just in case the password is $encrypted$
|
||||
if password:
|
||||
uri_str = uri_str.replace(password, UriCleaner.REPLACE_STR, 2)
|
||||
|
||||
t = redactedtext[:match.start()] + uri_str
|
||||
text_index = len(t)
|
||||
if (match.end() < len(redactedtext)):
|
||||
t += redactedtext[match.end():]
|
||||
t = redactedtext[:match.start()] + uri_str
|
||||
text_index = len(t)
|
||||
if (match.end() < len(redactedtext)):
|
||||
t += redactedtext[match.end():]
|
||||
|
||||
redactedtext = t
|
||||
if text_index >= len(redactedtext):
|
||||
text_index = len(redactedtext) - 1
|
||||
redactedtext = t
|
||||
if text_index >= len(redactedtext):
|
||||
text_index = len(redactedtext) - 1
|
||||
except ValueError:
|
||||
# Invalid URI, redact the whole URI to be safe
|
||||
redactedtext = redactedtext[:match.start()] + UriCleaner.REPLACE_STR + redactedtext[match.end():]
|
||||
text_index = match.start() + len(UriCleaner.REPLACE_STR)
|
||||
|
||||
return redactedtext
|
||||
|
||||
|
||||
@ -153,8 +153,7 @@ class TaskManager():
|
||||
queue_name = queue_name[1 if len(queue_name) > 1 else 0]
|
||||
queues[queue_name] = active_tasks
|
||||
else:
|
||||
if not hasattr(settings, 'CELERY_UNIT_TEST'):
|
||||
return (None, None)
|
||||
return (None, None)
|
||||
|
||||
return (active_task_queues, queues)
|
||||
|
||||
@ -260,7 +259,8 @@ class TaskManager():
|
||||
else:
|
||||
if type(task) is WorkflowJob:
|
||||
task.status = 'running'
|
||||
if not task.supports_isolation() and rampart_group.controller_id:
|
||||
logger.info('Transitioning %s to running status.', task.log_format)
|
||||
elif not task.supports_isolation() and rampart_group.controller_id:
|
||||
# non-Ansible jobs on isolated instances run on controller
|
||||
task.instance_group = rampart_group.controller
|
||||
logger.info('Submitting isolated %s to queue %s via %s.',
|
||||
@ -272,17 +272,22 @@ class TaskManager():
|
||||
task.celery_task_id = str(uuid.uuid4())
|
||||
task.save()
|
||||
|
||||
self.consume_capacity(task, rampart_group.name)
|
||||
if rampart_group is not None:
|
||||
self.consume_capacity(task, rampart_group.name)
|
||||
|
||||
def post_commit():
|
||||
task.websocket_emit_status(task.status)
|
||||
if task.status != 'failed':
|
||||
task.start_celery_task(opts, error_callback=error_handler, success_callback=success_handler, queue=rampart_group.name)
|
||||
if rampart_group is not None:
|
||||
actual_queue=rampart_group.name
|
||||
else:
|
||||
actual_queue=settings.CELERY_DEFAULT_QUEUE
|
||||
task.start_celery_task(opts, error_callback=error_handler, success_callback=success_handler, queue=actual_queue)
|
||||
|
||||
connection.on_commit(post_commit)
|
||||
|
||||
def process_running_tasks(self, running_tasks):
|
||||
map(lambda task: self.graph[task.instance_group.name]['graph'].add_job(task), running_tasks)
|
||||
map(lambda task: self.graph[task.instance_group.name]['graph'].add_job(task) if task.instance_group else None, running_tasks)
|
||||
|
||||
def create_project_update(self, task):
|
||||
project_task = Project.objects.get(id=task.project_id).create_project_update(
|
||||
@ -448,6 +453,9 @@ class TaskManager():
|
||||
continue
|
||||
preferred_instance_groups = task.preferred_instance_groups
|
||||
found_acceptable_queue = False
|
||||
if isinstance(task, WorkflowJob):
|
||||
self.start_task(task, None, task.get_jobs_fail_chain())
|
||||
continue
|
||||
for rampart_group in preferred_instance_groups:
|
||||
remaining_capacity = self.get_remaining_capacity(rampart_group.name)
|
||||
if remaining_capacity <= 0:
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
import logging
|
||||
|
||||
# Celery
|
||||
from celery import Task, shared_task
|
||||
from celery import shared_task
|
||||
|
||||
# AWX
|
||||
from awx.main.scheduler import TaskManager
|
||||
@ -15,23 +15,17 @@ logger = logging.getLogger('awx.main.scheduler')
|
||||
# updated model, the call to schedule() may get stale data.
|
||||
|
||||
|
||||
class LogErrorsTask(Task):
|
||||
def on_failure(self, exc, task_id, args, kwargs, einfo):
|
||||
logger.exception('Task {} encountered exception.'.format(self.name), exc_info=exc)
|
||||
super(LogErrorsTask, self).on_failure(exc, task_id, args, kwargs, einfo)
|
||||
|
||||
|
||||
@shared_task(base=LogErrorsTask)
|
||||
@shared_task()
|
||||
def run_job_launch(job_id):
|
||||
TaskManager().schedule()
|
||||
|
||||
|
||||
@shared_task(base=LogErrorsTask)
|
||||
@shared_task()
|
||||
def run_job_complete(job_id):
|
||||
TaskManager().schedule()
|
||||
|
||||
|
||||
@shared_task(base=LogErrorsTask)
|
||||
@shared_task()
|
||||
def run_task_manager():
|
||||
logger.debug("Running Tower task manager.")
|
||||
TaskManager().schedule()
|
||||
|
||||
@ -9,7 +9,13 @@ import json
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.db.models.signals import post_save, pre_delete, post_delete, m2m_changed
|
||||
from django.db.models.signals import (
|
||||
post_init,
|
||||
post_save,
|
||||
pre_delete,
|
||||
post_delete,
|
||||
m2m_changed,
|
||||
)
|
||||
from django.dispatch import receiver
|
||||
from django.contrib.auth import SESSION_KEY
|
||||
from django.utils import timezone
|
||||
@ -25,10 +31,14 @@ import six
|
||||
from awx.main.models import * # noqa
|
||||
from django.contrib.sessions.models import Session
|
||||
from awx.api.serializers import * # noqa
|
||||
from awx.main.constants import TOKEN_CENSOR
|
||||
from awx.main.utils import model_instance_diff, model_to_dict, camelcase_to_underscore
|
||||
from awx.main.utils import ignore_inventory_computed_fields, ignore_inventory_group_removal, _inventory_updates
|
||||
from awx.main.tasks import update_inventory_computed_fields
|
||||
from awx.main.fields import is_implicit_parent
|
||||
from awx.main.fields import (
|
||||
is_implicit_parent,
|
||||
update_role_parentage_for_instance,
|
||||
)
|
||||
|
||||
from awx.main import consumers
|
||||
|
||||
@ -162,39 +172,6 @@ def sync_superuser_status_to_rbac(instance, **kwargs):
|
||||
Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR).members.remove(instance)
|
||||
|
||||
|
||||
def create_user_role(instance, **kwargs):
|
||||
if not kwargs.get('created', True):
|
||||
return
|
||||
try:
|
||||
Role.objects.get(
|
||||
content_type=ContentType.objects.get_for_model(instance),
|
||||
object_id=instance.id,
|
||||
role_field='admin_role'
|
||||
)
|
||||
except Role.DoesNotExist:
|
||||
role = Role.objects.create(
|
||||
role_field='admin_role',
|
||||
content_object = instance,
|
||||
)
|
||||
role.members.add(instance)
|
||||
|
||||
|
||||
def org_admin_edit_members(instance, action, model, reverse, pk_set, **kwargs):
|
||||
content_type = ContentType.objects.get_for_model(Organization)
|
||||
|
||||
if reverse:
|
||||
return
|
||||
else:
|
||||
if instance.content_type == content_type and \
|
||||
instance.content_object.member_role.id == instance.id:
|
||||
items = model.objects.filter(pk__in=pk_set).all()
|
||||
for user in items:
|
||||
if action == 'post_add':
|
||||
instance.content_object.admin_role.children.add(user.admin_role)
|
||||
if action == 'pre_remove':
|
||||
instance.content_object.admin_role.children.remove(user.admin_role)
|
||||
|
||||
|
||||
def rbac_activity_stream(instance, sender, **kwargs):
|
||||
user_type = ContentType.objects.get_for_model(User)
|
||||
# Only if we are associating/disassociating
|
||||
@ -223,6 +200,29 @@ def cleanup_detached_labels_on_deleted_parent(sender, instance, **kwargs):
|
||||
l.delete()
|
||||
|
||||
|
||||
def set_original_organization(sender, instance, **kwargs):
|
||||
'''set_original_organization is used to set the original, or
|
||||
pre-save organization, so we can later determine if the organization
|
||||
field is dirty.
|
||||
'''
|
||||
instance.__original_org = instance.organization
|
||||
|
||||
|
||||
def save_related_job_templates(sender, instance, **kwargs):
|
||||
'''save_related_job_templates loops through all of the
|
||||
job templates that use an Inventory or Project that have had their
|
||||
Organization updated. This triggers the rebuilding of the RBAC hierarchy
|
||||
and ensures the proper access restrictions.
|
||||
'''
|
||||
if sender not in (Project, Inventory):
|
||||
raise ValueError('This signal callback is only intended for use with Project or Inventory')
|
||||
|
||||
if instance.__original_org != instance.organization:
|
||||
jtq = JobTemplate.objects.filter(**{sender.__name__.lower(): instance})
|
||||
for jt in jtq:
|
||||
update_role_parentage_for_instance(jt)
|
||||
|
||||
|
||||
def connect_computed_field_signals():
|
||||
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Host)
|
||||
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Host)
|
||||
@ -240,18 +240,19 @@ def connect_computed_field_signals():
|
||||
|
||||
connect_computed_field_signals()
|
||||
|
||||
|
||||
post_init.connect(set_original_organization, sender=Project)
|
||||
post_init.connect(set_original_organization, sender=Inventory)
|
||||
post_save.connect(save_related_job_templates, sender=Project)
|
||||
post_save.connect(save_related_job_templates, sender=Inventory)
|
||||
post_save.connect(emit_job_event_detail, sender=JobEvent)
|
||||
post_save.connect(emit_ad_hoc_command_event_detail, sender=AdHocCommandEvent)
|
||||
post_save.connect(emit_project_update_event_detail, sender=ProjectUpdateEvent)
|
||||
post_save.connect(emit_inventory_update_event_detail, sender=InventoryUpdateEvent)
|
||||
post_save.connect(emit_system_job_event_detail, sender=SystemJobEvent)
|
||||
m2m_changed.connect(rebuild_role_ancestor_list, Role.parents.through)
|
||||
m2m_changed.connect(org_admin_edit_members, Role.members.through)
|
||||
m2m_changed.connect(rbac_activity_stream, Role.members.through)
|
||||
m2m_changed.connect(rbac_activity_stream, Role.parents.through)
|
||||
post_save.connect(sync_superuser_status_to_rbac, sender=User)
|
||||
post_save.connect(create_user_role, sender=User)
|
||||
pre_delete.connect(cleanup_detached_labels_on_deleted_parent, sender=UnifiedJob)
|
||||
pre_delete.connect(cleanup_detached_labels_on_deleted_parent, sender=UnifiedJobTemplate)
|
||||
|
||||
@ -400,6 +401,14 @@ model_serializer_mapping = {
|
||||
AdHocCommand: AdHocCommandSerializer,
|
||||
NotificationTemplate: NotificationTemplateSerializer,
|
||||
Notification: NotificationSerializer,
|
||||
CredentialType: CredentialTypeSerializer,
|
||||
Schedule: ScheduleSerializer,
|
||||
Label: LabelSerializer,
|
||||
WorkflowJobTemplate: WorkflowJobTemplateSerializer,
|
||||
WorkflowJobTemplateNode: WorkflowJobTemplateNodeSerializer,
|
||||
WorkflowJob: WorkflowJobSerializer,
|
||||
OAuth2AccessToken: OAuth2TokenSerializer,
|
||||
OAuth2Application: OAuth2ApplicationSerializer,
|
||||
}
|
||||
|
||||
|
||||
@ -419,7 +428,7 @@ def activity_stream_create(sender, instance, created, **kwargs):
|
||||
if 'extra_vars' in changes:
|
||||
changes['extra_vars'] = instance.display_extra_vars()
|
||||
if type(instance) == OAuth2AccessToken:
|
||||
changes['token'] = '*************'
|
||||
changes['token'] = TOKEN_CENSOR
|
||||
activity_entry = ActivityStream(
|
||||
operation='create',
|
||||
object1=object1,
|
||||
@ -620,12 +629,3 @@ def create_access_token_user_if_missing(sender, **kwargs):
|
||||
post_save.connect(create_access_token_user_if_missing, sender=OAuth2AccessToken)
|
||||
|
||||
|
||||
# @receiver(post_save, sender=User)
|
||||
# def create_default_oauth_app(sender, **kwargs):
|
||||
# if kwargs.get('created', False):
|
||||
# user = kwargs['instance']
|
||||
# OAuth2Application.objects.create(
|
||||
# name='Default application for {}'.format(user.username),
|
||||
# user=user, client_type='confidential', redirect_uris='',
|
||||
# authorization_grant_type='password'
|
||||
# )
|
||||
|
||||
@ -13,6 +13,7 @@ import os
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
@ -55,8 +56,9 @@ from awx.main.queue import CallbackQueueDispatcher
|
||||
from awx.main.expect import run, isolated_manager
|
||||
from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, update_scm_url,
|
||||
check_proot_installed, build_proot_temp_dir, get_licenser,
|
||||
wrap_args_with_proot, OutputEventFilter, ignore_inventory_computed_fields,
|
||||
wrap_args_with_proot, OutputEventFilter, OutputVerboseFilter, ignore_inventory_computed_fields,
|
||||
ignore_inventory_group_removal, get_type_for_model, extract_ansible_vars)
|
||||
from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja
|
||||
from awx.main.utils.reload import restart_local_services, stop_local_services
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
from awx.main.utils.ha import update_celery_worker_routes, register_celery_worker_queues
|
||||
@ -80,8 +82,8 @@ Try upgrading OpenSSH or providing your private key in an different format. \
|
||||
logger = logging.getLogger('awx.main.tasks')
|
||||
|
||||
|
||||
class LogErrorsTask(Task):
|
||||
def on_failure(self, exc, task_id, args, kwargs, einfo):
|
||||
def log_celery_failure(self, exc, task_id, args, kwargs, einfo):
|
||||
try:
|
||||
if getattr(exc, 'is_awx_task_error', False):
|
||||
# Error caused by user / tracked in job output
|
||||
logger.warning(six.text_type("{}").format(exc))
|
||||
@ -91,7 +93,12 @@ class LogErrorsTask(Task):
|
||||
.format(get_type_for_model(self.model), args[0]))
|
||||
else:
|
||||
logger.exception(six.text_type('Task {} encountered exception.').format(self.name), exc_info=exc)
|
||||
super(LogErrorsTask, self).on_failure(exc, task_id, args, kwargs, einfo)
|
||||
except Exception:
|
||||
# It's fairly critical that this code _not_ raise exceptions on logging
|
||||
# If you configure external logging in a way that _it_ fails, there's
|
||||
# not a lot we can do here; sys.stderr.write is a final hail mary
|
||||
_, _, tb = sys.exc_info()
|
||||
traceback.print_tb(tb)
|
||||
|
||||
|
||||
@celeryd_init.connect
|
||||
@ -116,7 +123,6 @@ def task_set_logger_pre_run(*args, **kwargs):
|
||||
cache.close()
|
||||
configure_external_logger(settings, is_startup=False)
|
||||
except Exception:
|
||||
# General exception because LogErrorsTask not used with celery signals
|
||||
logger.exception('Encountered error on initial log configuration.')
|
||||
|
||||
|
||||
@ -129,11 +135,10 @@ def inform_cluster_of_shutdown(*args, **kwargs):
|
||||
logger.warning(six.text_type('Normal shutdown signal for instance {}, '
|
||||
'removed self from capacity pool.').format(this_inst.hostname))
|
||||
except Exception:
|
||||
# General exception because LogErrorsTask not used with celery signals
|
||||
logger.exception('Encountered problem with normal shutdown signal.')
|
||||
|
||||
|
||||
@shared_task(bind=True, queue='tower_instance_router', base=LogErrorsTask)
|
||||
@shared_task(bind=True, queue=settings.CELERY_DEFAULT_QUEUE)
|
||||
def apply_cluster_membership_policies(self):
|
||||
with advisory_lock('cluster_policy_lock', wait=True):
|
||||
considered_instances = Instance.objects.all().order_by('id')
|
||||
@ -143,6 +148,7 @@ def apply_cluster_membership_policies(self):
|
||||
actual_instances = []
|
||||
Group = namedtuple('Group', ['obj', 'instances'])
|
||||
Node = namedtuple('Instance', ['obj', 'groups'])
|
||||
|
||||
# Process policy instance list first, these will represent manually managed instances
|
||||
# that will not go through automatic policy determination
|
||||
for ig in InstanceGroup.objects.all():
|
||||
@ -183,7 +189,7 @@ def apply_cluster_membership_policies(self):
|
||||
handle_ha_toplogy_changes.apply([])
|
||||
|
||||
|
||||
@shared_task(queue='tower_broadcast_all', bind=True, base=LogErrorsTask)
|
||||
@shared_task(queue='tower_broadcast_all', bind=True)
|
||||
def handle_setting_changes(self, setting_keys):
|
||||
orig_len = len(setting_keys)
|
||||
for i in range(orig_len):
|
||||
@ -202,9 +208,11 @@ def handle_setting_changes(self, setting_keys):
|
||||
restart_local_services(['uwsgi'])
|
||||
|
||||
|
||||
@shared_task(bind=True, queue='tower_broadcast_all', base=LogErrorsTask)
|
||||
@shared_task(bind=True, queue='tower_broadcast_all')
|
||||
def handle_ha_toplogy_changes(self):
|
||||
instance = Instance.objects.me()
|
||||
(changed, instance) = Instance.objects.get_or_register()
|
||||
if changed:
|
||||
logger.info(six.text_type("Registered tower node '{}'").format(instance.hostname))
|
||||
logger.debug(six.text_type("Reconfigure celeryd queues task on host {}").format(self.request.hostname))
|
||||
awx_app = Celery('awx')
|
||||
awx_app.config_from_object('django.conf:settings')
|
||||
@ -234,7 +242,9 @@ def handle_ha_toplogy_worker_ready(sender, **kwargs):
|
||||
def handle_update_celery_routes(sender=None, conf=None, **kwargs):
|
||||
conf = conf if conf else sender.app.conf
|
||||
logger.debug(six.text_type("Registering celery routes for {}").format(sender))
|
||||
instance = Instance.objects.me()
|
||||
(changed, instance) = Instance.objects.get_or_register()
|
||||
if changed:
|
||||
logger.info(six.text_type("Registered tower node '{}'").format(instance.hostname))
|
||||
added_routes = update_celery_worker_routes(instance, conf)
|
||||
logger.info(six.text_type("Workers on tower node '{}' added routes {} all routes are now {}")
|
||||
.format(instance.hostname, added_routes, conf.CELERY_ROUTES))
|
||||
@ -242,12 +252,14 @@ def handle_update_celery_routes(sender=None, conf=None, **kwargs):
|
||||
|
||||
@celeryd_after_setup.connect
|
||||
def handle_update_celery_hostname(sender, instance, **kwargs):
|
||||
tower_instance = Instance.objects.me()
|
||||
(changed, tower_instance) = Instance.objects.get_or_register()
|
||||
if changed:
|
||||
logger.info(six.text_type("Registered tower node '{}'").format(tower_instance.hostname))
|
||||
instance.hostname = 'celery@{}'.format(tower_instance.hostname)
|
||||
logger.warn(six.text_type("Set hostname to {}").format(instance.hostname))
|
||||
|
||||
|
||||
@shared_task(queue='tower', base=LogErrorsTask)
|
||||
@shared_task(queue=settings.CELERY_DEFAULT_QUEUE)
|
||||
def send_notifications(notification_list, job_id=None):
|
||||
if not isinstance(notification_list, list):
|
||||
raise TypeError("notification_list should be of type list")
|
||||
@ -259,6 +271,7 @@ def send_notifications(notification_list, job_id=None):
|
||||
job_actual.notifications.add(*notifications)
|
||||
|
||||
for notification in notifications:
|
||||
update_fields = ['status', 'notifications_sent']
|
||||
try:
|
||||
sent = notification.notification_template.send(notification.subject, notification.body)
|
||||
notification.status = "successful"
|
||||
@ -267,11 +280,12 @@ def send_notifications(notification_list, job_id=None):
|
||||
logger.error(six.text_type("Send Notification Failed {}").format(e))
|
||||
notification.status = "failed"
|
||||
notification.error = smart_str(e)
|
||||
update_fields.append('error')
|
||||
finally:
|
||||
notification.save()
|
||||
notification.save(update_fields=update_fields)
|
||||
|
||||
|
||||
@shared_task(bind=True, queue='tower', base=LogErrorsTask)
|
||||
@shared_task(bind=True, queue=settings.CELERY_DEFAULT_QUEUE)
|
||||
def run_administrative_checks(self):
|
||||
logger.warn("Running administrative checks.")
|
||||
if not settings.TOWER_ADMIN_ALERTS:
|
||||
@ -293,7 +307,7 @@ def run_administrative_checks(self):
|
||||
fail_silently=True)
|
||||
|
||||
|
||||
@shared_task(bind=True, base=LogErrorsTask)
|
||||
@shared_task(bind=True)
|
||||
def purge_old_stdout_files(self):
|
||||
nowtime = time.time()
|
||||
for f in os.listdir(settings.JOBOUTPUT_ROOT):
|
||||
@ -302,14 +316,18 @@ def purge_old_stdout_files(self):
|
||||
logger.info(six.text_type("Removing {}").format(os.path.join(settings.JOBOUTPUT_ROOT,f)))
|
||||
|
||||
|
||||
@shared_task(bind=True, base=LogErrorsTask)
|
||||
@shared_task(bind=True)
|
||||
def cluster_node_heartbeat(self):
|
||||
logger.debug("Cluster node heartbeat task.")
|
||||
nowtime = now()
|
||||
instance_list = list(Instance.objects.filter(rampart_groups__controller__isnull=True).distinct())
|
||||
instance_list = list(Instance.objects.all_non_isolated())
|
||||
this_inst = None
|
||||
lost_instances = []
|
||||
|
||||
(changed, instance) = Instance.objects.get_or_register()
|
||||
if changed:
|
||||
logger.info(six.text_type("Registered tower node '{}'").format(instance.hostname))
|
||||
|
||||
for inst in list(instance_list):
|
||||
if inst.hostname == settings.CLUSTER_HOST_ID:
|
||||
this_inst = inst
|
||||
@ -371,7 +389,7 @@ def cluster_node_heartbeat(self):
|
||||
logger.exception(six.text_type('Error marking {} as lost').format(other_inst.hostname))
|
||||
|
||||
|
||||
@shared_task(bind=True, base=LogErrorsTask)
|
||||
@shared_task(bind=True)
|
||||
def awx_isolated_heartbeat(self):
|
||||
local_hostname = settings.CLUSTER_HOST_ID
|
||||
logger.debug("Controlling node checking for any isolated management tasks.")
|
||||
@ -395,7 +413,7 @@ def awx_isolated_heartbeat(self):
|
||||
isolated_manager.IsolatedManager.health_check(isolated_instance_qs, awx_application_version)
|
||||
|
||||
|
||||
@shared_task(bind=True, queue='tower', base=LogErrorsTask)
|
||||
@shared_task(bind=True, queue=settings.CELERY_DEFAULT_QUEUE)
|
||||
def awx_periodic_scheduler(self):
|
||||
run_now = now()
|
||||
state = TowerScheduleState.get_solo()
|
||||
@ -430,7 +448,7 @@ def awx_periodic_scheduler(self):
|
||||
state.save()
|
||||
|
||||
|
||||
@shared_task(bind=True, queue='tower', base=LogErrorsTask)
|
||||
@shared_task(bind=True, queue=settings.CELERY_DEFAULT_QUEUE)
|
||||
def handle_work_success(self, result, task_actual):
|
||||
try:
|
||||
instance = UnifiedJob.get_instance_by_type(task_actual['type'], task_actual['id'])
|
||||
@ -444,7 +462,7 @@ def handle_work_success(self, result, task_actual):
|
||||
run_job_complete.delay(instance.id)
|
||||
|
||||
|
||||
@shared_task(queue='tower', base=LogErrorsTask)
|
||||
@shared_task(queue=settings.CELERY_DEFAULT_QUEUE)
|
||||
def handle_work_error(task_id, *args, **kwargs):
|
||||
subtasks = kwargs.get('subtasks', None)
|
||||
logger.debug('Executing error task id %s, subtasks: %s' % (task_id, str(subtasks)))
|
||||
@ -485,7 +503,7 @@ def handle_work_error(task_id, *args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
@shared_task(queue='tower', base=LogErrorsTask)
|
||||
@shared_task(queue=settings.CELERY_DEFAULT_QUEUE)
|
||||
def update_inventory_computed_fields(inventory_id, should_update_hosts=True):
|
||||
'''
|
||||
Signal handler and wrapper around inventory.update_computed_fields to
|
||||
@ -505,7 +523,7 @@ def update_inventory_computed_fields(inventory_id, should_update_hosts=True):
|
||||
raise
|
||||
|
||||
|
||||
@shared_task(queue='tower', base=LogErrorsTask)
|
||||
@shared_task(queue=settings.CELERY_DEFAULT_QUEUE)
|
||||
def update_host_smart_inventory_memberships():
|
||||
try:
|
||||
with transaction.atomic():
|
||||
@ -530,7 +548,7 @@ def update_host_smart_inventory_memberships():
|
||||
smart_inventory.update_computed_fields(update_groups=False, update_hosts=False)
|
||||
|
||||
|
||||
@shared_task(bind=True, queue='tower', base=LogErrorsTask, max_retries=5)
|
||||
@shared_task(bind=True, queue=settings.CELERY_DEFAULT_QUEUE, max_retries=5)
|
||||
def delete_inventory(self, inventory_id, user_id):
|
||||
# Delete inventory as user
|
||||
if user_id is None:
|
||||
@ -575,7 +593,7 @@ def with_path_cleanup(f):
|
||||
return _wrapped
|
||||
|
||||
|
||||
class BaseTask(LogErrorsTask):
|
||||
class BaseTask(Task):
|
||||
name = None
|
||||
model = None
|
||||
event_model = None
|
||||
@ -710,7 +728,10 @@ class BaseTask(LogErrorsTask):
|
||||
def build_extra_vars_file(self, vars, **kwargs):
|
||||
handle, path = tempfile.mkstemp(dir=kwargs.get('private_data_dir', None))
|
||||
f = os.fdopen(handle, 'w')
|
||||
f.write(json.dumps(vars))
|
||||
if settings.ALLOW_JINJA_IN_EXTRA_VARS == 'always':
|
||||
f.write(yaml.safe_dump(vars))
|
||||
else:
|
||||
f.write(safe_dump(vars, kwargs.get('safe_dict', {}) or None))
|
||||
f.close()
|
||||
os.chmod(path, stat.S_IRUSR)
|
||||
return path
|
||||
@ -724,7 +745,6 @@ class BaseTask(LogErrorsTask):
|
||||
raise RuntimeError(
|
||||
'a valid Python virtualenv does not exist at {}'.format(venv_path)
|
||||
)
|
||||
|
||||
env.pop('PYTHONPATH', None) # default to none if no python_ver matches
|
||||
if os.path.isdir(os.path.join(venv_libdir, "python2.7")):
|
||||
env['PYTHONPATH'] = os.path.join(venv_libdir, "python2.7", "site-packages") + ":"
|
||||
@ -811,19 +831,26 @@ class BaseTask(LogErrorsTask):
|
||||
|
||||
def get_stdout_handle(self, instance):
|
||||
'''
|
||||
Return an virtual file object for capturing stdout and events.
|
||||
Return an virtual file object for capturing stdout and/or events.
|
||||
'''
|
||||
dispatcher = CallbackQueueDispatcher()
|
||||
|
||||
def event_callback(event_data):
|
||||
event_data.setdefault(self.event_data_key, instance.id)
|
||||
if 'uuid' in event_data:
|
||||
cache_event = cache.get('ev-{}'.format(event_data['uuid']), None)
|
||||
if cache_event is not None:
|
||||
event_data.update(cache_event)
|
||||
dispatcher.dispatch(event_data)
|
||||
if isinstance(instance, (Job, AdHocCommand, ProjectUpdate)):
|
||||
def event_callback(event_data):
|
||||
event_data.setdefault(self.event_data_key, instance.id)
|
||||
if 'uuid' in event_data:
|
||||
cache_event = cache.get('ev-{}'.format(event_data['uuid']), None)
|
||||
if cache_event is not None:
|
||||
event_data.update(cache_event)
|
||||
dispatcher.dispatch(event_data)
|
||||
|
||||
return OutputEventFilter(event_callback)
|
||||
return OutputEventFilter(event_callback)
|
||||
else:
|
||||
def event_callback(event_data):
|
||||
event_data.setdefault(self.event_data_key, instance.id)
|
||||
dispatcher.dispatch(event_data)
|
||||
|
||||
return OutputVerboseFilter(event_callback)
|
||||
|
||||
def pre_run_hook(self, instance, **kwargs):
|
||||
'''
|
||||
@ -856,6 +883,7 @@ class BaseTask(LogErrorsTask):
|
||||
output_replacements = []
|
||||
extra_update_fields = {}
|
||||
event_ct = 0
|
||||
stdout_handle = None
|
||||
try:
|
||||
kwargs['isolated'] = isolated_host is not None
|
||||
self.pre_run_hook(instance, **kwargs)
|
||||
@ -966,15 +994,16 @@ class BaseTask(LogErrorsTask):
|
||||
)
|
||||
|
||||
except Exception:
|
||||
if status != 'canceled':
|
||||
tb = traceback.format_exc()
|
||||
if settings.DEBUG:
|
||||
logger.exception('%s Exception occurred while running task', instance.log_format)
|
||||
# run_pexpect does not throw exceptions for cancel or timeout
|
||||
# this could catch programming or file system errors
|
||||
tb = traceback.format_exc()
|
||||
logger.exception('%s Exception occurred while running task', instance.log_format)
|
||||
finally:
|
||||
try:
|
||||
stdout_handle.flush()
|
||||
stdout_handle.close()
|
||||
event_ct = getattr(stdout_handle, '_event_ct', 0)
|
||||
if stdout_handle:
|
||||
stdout_handle.flush()
|
||||
stdout_handle.close()
|
||||
event_ct = getattr(stdout_handle, '_event_ct', 0)
|
||||
logger.info('%s finished running, producing %s events.',
|
||||
instance.log_format, event_ct)
|
||||
except Exception:
|
||||
@ -997,7 +1026,7 @@ class BaseTask(LogErrorsTask):
|
||||
except Exception:
|
||||
logger.exception(six.text_type('{} Final run hook errored.').format(instance.log_format))
|
||||
instance.websocket_emit_status(status)
|
||||
if status != 'successful' and not hasattr(settings, 'CELERY_UNIT_TEST'):
|
||||
if status != 'successful':
|
||||
# Raising an exception will mark the job as 'failed' in celery
|
||||
# and will stop a task chain from continuing to execute
|
||||
if status == 'canceled':
|
||||
@ -1135,7 +1164,6 @@ class RunJob(BaseTask):
|
||||
if not kwargs.get('isolated'):
|
||||
env['ANSIBLE_CALLBACK_PLUGINS'] = plugin_path
|
||||
env['ANSIBLE_STDOUT_CALLBACK'] = 'awx_display'
|
||||
env['TOWER_HOST'] = settings.TOWER_URL_BASE
|
||||
env['AWX_HOST'] = settings.TOWER_URL_BASE
|
||||
env['CACHE'] = settings.CACHES['default']['LOCATION'] if 'LOCATION' in settings.CACHES['default'] else ''
|
||||
|
||||
@ -1195,7 +1223,7 @@ class RunJob(BaseTask):
|
||||
args = ['ansible-playbook', '-i', self.build_inventory(job, **kwargs)]
|
||||
if job.job_type == 'check':
|
||||
args.append('--check')
|
||||
args.extend(['-u', ssh_username])
|
||||
args.extend(['-u', sanitize_jinja(ssh_username)])
|
||||
if 'ssh_password' in kwargs.get('passwords', {}):
|
||||
args.append('--ask-pass')
|
||||
if job.become_enabled:
|
||||
@ -1203,9 +1231,9 @@ class RunJob(BaseTask):
|
||||
if job.diff_mode:
|
||||
args.append('--diff')
|
||||
if become_method:
|
||||
args.extend(['--become-method', become_method])
|
||||
args.extend(['--become-method', sanitize_jinja(become_method)])
|
||||
if become_username:
|
||||
args.extend(['--become-user', become_username])
|
||||
args.extend(['--become-user', sanitize_jinja(become_username)])
|
||||
if 'become_password' in kwargs.get('passwords', {}):
|
||||
args.append('--ask-become-pass')
|
||||
|
||||
@ -1242,7 +1270,20 @@ class RunJob(BaseTask):
|
||||
extra_vars.update(json.loads(job.display_extra_vars()))
|
||||
else:
|
||||
extra_vars.update(json.loads(job.decrypted_extra_vars()))
|
||||
extra_vars_path = self.build_extra_vars_file(vars=extra_vars, **kwargs)
|
||||
|
||||
# By default, all extra vars disallow Jinja2 template usage for
|
||||
# security reasons; top level key-values defined in JT.extra_vars, however,
|
||||
# are whitelisted as "safe" (because they can only be set by users with
|
||||
# higher levels of privilege - those that have the ability create and
|
||||
# edit Job Templates)
|
||||
safe_dict = {}
|
||||
if job.job_template and settings.ALLOW_JINJA_IN_EXTRA_VARS == 'template':
|
||||
safe_dict = job.job_template.extra_vars_dict
|
||||
extra_vars_path = self.build_extra_vars_file(
|
||||
vars=extra_vars,
|
||||
safe_dict=safe_dict,
|
||||
**kwargs
|
||||
)
|
||||
args.extend(['-e', '@%s' % (extra_vars_path)])
|
||||
|
||||
# Add path to playbook (relative to project.local_path).
|
||||
@ -2172,7 +2213,7 @@ class RunAdHocCommand(BaseTask):
|
||||
args = ['ansible', '-i', self.build_inventory(ad_hoc_command, **kwargs)]
|
||||
if ad_hoc_command.job_type == 'check':
|
||||
args.append('--check')
|
||||
args.extend(['-u', ssh_username])
|
||||
args.extend(['-u', sanitize_jinja(ssh_username)])
|
||||
if 'ssh_password' in kwargs.get('passwords', {}):
|
||||
args.append('--ask-pass')
|
||||
# We only specify sudo/su user and password if explicitly given by the
|
||||
@ -2180,9 +2221,9 @@ class RunAdHocCommand(BaseTask):
|
||||
if ad_hoc_command.become_enabled:
|
||||
args.append('--become')
|
||||
if become_method:
|
||||
args.extend(['--become-method', become_method])
|
||||
args.extend(['--become-method', sanitize_jinja(become_method)])
|
||||
if become_username:
|
||||
args.extend(['--become-user', become_username])
|
||||
args.extend(['--become-user', sanitize_jinja(become_username)])
|
||||
if 'become_password' in kwargs.get('passwords', {}):
|
||||
args.append('--ask-become-pass')
|
||||
|
||||
@ -2206,7 +2247,7 @@ class RunAdHocCommand(BaseTask):
|
||||
args.extend(['-e', '@%s' % (extra_vars_path)])
|
||||
|
||||
args.extend(['-m', ad_hoc_command.module_name])
|
||||
args.extend(['-a', ad_hoc_command.module_args])
|
||||
args.extend(['-a', sanitize_jinja(ad_hoc_command.module_args)])
|
||||
|
||||
if ad_hoc_command.limit:
|
||||
args.append(ad_hoc_command.limit)
|
||||
@ -2255,19 +2296,14 @@ class RunSystemJob(BaseTask):
|
||||
json_vars = {}
|
||||
else:
|
||||
json_vars = json.loads(system_job.extra_vars)
|
||||
if 'days' in json_vars and system_job.job_type != 'cleanup_facts':
|
||||
if 'days' in json_vars:
|
||||
args.extend(['--days', str(json_vars.get('days', 60))])
|
||||
if 'dry_run' in json_vars and json_vars['dry_run'] and system_job.job_type != 'cleanup_facts':
|
||||
if 'dry_run' in json_vars and json_vars['dry_run']:
|
||||
args.extend(['--dry-run'])
|
||||
if system_job.job_type == 'cleanup_jobs':
|
||||
args.extend(['--jobs', '--project-updates', '--inventory-updates',
|
||||
'--management-jobs', '--ad-hoc-commands', '--workflow-jobs',
|
||||
'--notifications'])
|
||||
if system_job.job_type == 'cleanup_facts':
|
||||
if 'older_than' in json_vars:
|
||||
args.extend(['--older_than', str(json_vars['older_than'])])
|
||||
if 'granularity' in json_vars:
|
||||
args.extend(['--granularity', str(json_vars['granularity'])])
|
||||
except Exception:
|
||||
logger.exception(six.text_type("{} Failed to parse system job").format(system_job.log_format))
|
||||
return args
|
||||
@ -2299,7 +2335,7 @@ def _reconstruct_relationships(copy_mapping):
|
||||
new_obj.save()
|
||||
|
||||
|
||||
@shared_task(bind=True, queue='tower', base=LogErrorsTask)
|
||||
@shared_task(bind=True, queue=settings.CELERY_DEFAULT_QUEUE)
|
||||
def deep_copy_model_obj(
|
||||
self, model_module, model_name, obj_pk, new_obj_pk,
|
||||
user_pk, sub_obj_list, permission_check_func=None
|
||||
|
||||
@ -1,687 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import base64
|
||||
import contextlib
|
||||
import json
|
||||
import os
|
||||
import random
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import urllib
|
||||
import re
|
||||
import mock
|
||||
|
||||
# PyYAML
|
||||
import yaml
|
||||
|
||||
# Django
|
||||
import django.test
|
||||
from django.conf import settings, UserSettingsHolder
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.cache import cache
|
||||
from django.test.client import Client
|
||||
from django.test.utils import override_settings
|
||||
from django.utils.encoding import force_text
|
||||
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.utils import get_ansible_version
|
||||
from awx.sso.backends import LDAPSettings
|
||||
from awx.main.tests.URI import URI # noqa
|
||||
|
||||
TEST_PLAYBOOK = '''- hosts: mygroup
|
||||
gather_facts: false
|
||||
tasks:
|
||||
- name: woohoo
|
||||
command: test 1 = 1
|
||||
'''
|
||||
|
||||
|
||||
class QueueTestMixin(object):
|
||||
def start_queue(self):
|
||||
self.start_rabbit()
|
||||
receiver = CallbackReceiver()
|
||||
self.queue_process = Process(target=receiver.run_subscriber,
|
||||
args=(False,))
|
||||
self.queue_process.start()
|
||||
|
||||
def terminate_queue(self):
|
||||
if hasattr(self, 'queue_process'):
|
||||
self.queue_process.terminate()
|
||||
self.stop_rabbit()
|
||||
|
||||
def start_rabbit(self):
|
||||
if not getattr(self, 'redis_process', None):
|
||||
# Centos 6.5 redis is runnable by non-root user but is not in a normal users path by default
|
||||
env = dict(os.environ)
|
||||
env['PATH'] = '%s:/usr/sbin/' % env['PATH']
|
||||
env['RABBITMQ_NODENAME'] = 'towerunittest'
|
||||
env['RABBITMQ_NODE_PORT'] = '55672'
|
||||
self.redis_process = Popen('rabbitmq-server > /dev/null',
|
||||
shell=True, executable='/bin/bash',
|
||||
env=env)
|
||||
|
||||
def stop_rabbit(self):
|
||||
if getattr(self, 'redis_process', None):
|
||||
self.redis_process.kill()
|
||||
self.redis_process = None
|
||||
|
||||
|
||||
# The observed effect of not calling terminate_queue() if you call start_queue() are
|
||||
# an hang on test cleanup database delete. Thus, to ensure terminate_queue() is called
|
||||
# whenever start_queue() is called just inherit from this class when you want to use the queue.
|
||||
class QueueStartStopTestMixin(QueueTestMixin):
|
||||
def setUp(self):
|
||||
super(QueueStartStopTestMixin, self).setUp()
|
||||
self.start_queue()
|
||||
|
||||
def tearDown(self):
|
||||
super(QueueStartStopTestMixin, self).tearDown()
|
||||
self.terminate_queue()
|
||||
|
||||
|
||||
class MockCommonlySlowTestMixin(object):
|
||||
def __init__(self, *args, **kwargs):
|
||||
from awx.api import generics
|
||||
mock.patch.object(generics, 'get_view_description', return_value=None).start()
|
||||
super(MockCommonlySlowTestMixin, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
ansible_version = get_ansible_version()
|
||||
|
||||
|
||||
class BaseTestMixin(MockCommonlySlowTestMixin):
|
||||
'''
|
||||
Mixin with shared code for use by all test cases.
|
||||
'''
|
||||
|
||||
def setUp(self):
|
||||
super(BaseTestMixin, self).setUp()
|
||||
global ansible_version
|
||||
|
||||
self.object_ctr = 0
|
||||
# Save sys.path before tests.
|
||||
self._sys_path = [x for x in sys.path]
|
||||
# Save os.environ before tests.
|
||||
self._environ = dict(os.environ.items())
|
||||
# Capture current directory to change back after each test.
|
||||
self._cwd = os.getcwd()
|
||||
# Capture list of temp files/directories created during tests.
|
||||
self._temp_paths = []
|
||||
self._current_auth = None
|
||||
self._user_passwords = {}
|
||||
self.ansible_version = ansible_version
|
||||
self.assertNotEqual(self.ansible_version, 'unknown')
|
||||
# Wrap settings so we can redefine them within each test.
|
||||
self._wrapped = settings._wrapped
|
||||
settings._wrapped = UserSettingsHolder(settings._wrapped)
|
||||
# Set all AUTH_LDAP_* settings to defaults to avoid using LDAP for
|
||||
# tests unless expicitly configured.
|
||||
for name, value in LDAPSettings.defaults.items():
|
||||
if name == 'SERVER_URI':
|
||||
value = ''
|
||||
setattr(settings, 'AUTH_LDAP_%s' % name, value)
|
||||
# Pass test database settings in environment for use by any management
|
||||
# commands that run from tests.
|
||||
for opt in ('ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'):
|
||||
os.environ['AWX_TEST_DATABASE_%s' % opt] = settings.DATABASES['default'][opt]
|
||||
# Set flag so that task chain works with unit tests.
|
||||
settings.CELERY_UNIT_TEST = True
|
||||
settings.SYSTEM_UUID='00000000-0000-0000-0000-000000000000'
|
||||
settings.CELERY_BROKER_URL='redis://localhost:55672/'
|
||||
settings.CALLBACK_QUEUE = 'callback_tasks_unit'
|
||||
|
||||
# Disable socket notifications for unit tests.
|
||||
settings.SOCKETIO_NOTIFICATION_PORT = None
|
||||
# Make temp job status directory for unit tests.
|
||||
job_status_dir = tempfile.mkdtemp()
|
||||
self._temp_paths.append(job_status_dir)
|
||||
settings.JOBOUTPUT_ROOT = os.path.abspath(job_status_dir)
|
||||
settings.CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
'LOCATION': 'unittests'
|
||||
}
|
||||
}
|
||||
cache.clear()
|
||||
self._start_time = time.time()
|
||||
|
||||
def tearDown(self):
|
||||
super(BaseTestMixin, self).tearDown()
|
||||
# Restore sys.path after tests.
|
||||
sys.path = self._sys_path
|
||||
# Restore os.environ after tests.
|
||||
for k,v in self._environ.items():
|
||||
if os.environ.get(k, None) != v:
|
||||
os.environ[k] = v
|
||||
for k,v in os.environ.items():
|
||||
if k not in self._environ.keys():
|
||||
del os.environ[k]
|
||||
# Restore current directory after each test.
|
||||
os.chdir(self._cwd)
|
||||
# Cleanup temp files/directories created during tests.
|
||||
for project_dir in self._temp_paths:
|
||||
if os.path.exists(project_dir):
|
||||
if os.path.isdir(project_dir):
|
||||
shutil.rmtree(project_dir, True)
|
||||
else:
|
||||
os.remove(project_dir)
|
||||
# Restore previous settings after each test.
|
||||
settings._wrapped = self._wrapped
|
||||
|
||||
def unique_name(self, string):
|
||||
rnd_str = '____' + str(random.randint(1, 9999999))
|
||||
return __name__ + '-generated-' + string + rnd_str
|
||||
|
||||
def assertElapsedLessThan(self, seconds):
|
||||
elapsed = time.time() - self._start_time
|
||||
self.assertTrue(elapsed < seconds, 'elapsed time of %0.3fs is greater than %0.3fs' % (elapsed, seconds))
|
||||
|
||||
@contextlib.contextmanager
|
||||
def current_user(self, user_or_username, password=None):
|
||||
try:
|
||||
if isinstance(user_or_username, User):
|
||||
username = user_or_username.username
|
||||
else:
|
||||
username = user_or_username
|
||||
password = password or self._user_passwords.get(username)
|
||||
previous_auth = self._current_auth
|
||||
if username is None:
|
||||
self._current_auth = None
|
||||
else:
|
||||
self._current_auth = (username, password)
|
||||
yield
|
||||
finally:
|
||||
self._current_auth = previous_auth
|
||||
|
||||
def make_user(self, username, password=None, super_user=False):
|
||||
user = None
|
||||
password = password or username
|
||||
if super_user:
|
||||
user = User.objects.create_superuser(username, "%s@example.com", password)
|
||||
else:
|
||||
user = User.objects.create_user(username, "%s@example.com", password)
|
||||
self._user_passwords[user.username] = password
|
||||
return user
|
||||
|
||||
def make_organizations(self, created_by, count=1):
|
||||
results = []
|
||||
for x in range(0, count):
|
||||
results.append(self.make_organization(created_by=created_by, count=x))
|
||||
return results
|
||||
|
||||
def make_organization(self, created_by, count=1):
|
||||
self.object_ctr = self.object_ctr + 1
|
||||
return Organization.objects.create(
|
||||
name="org%s-%s" % (count, self.object_ctr), description="org%s" % count, created_by=created_by
|
||||
)
|
||||
|
||||
def make_project(self, name=None, description='', created_by=None,
|
||||
playbook_content='', role_playbooks=None, unicode_prefix=True):
|
||||
if not name:
|
||||
name = self.unique_name('Project')
|
||||
|
||||
if not os.path.exists(settings.PROJECTS_ROOT):
|
||||
os.makedirs(settings.PROJECTS_ROOT)
|
||||
# Create temp project directory.
|
||||
if unicode_prefix:
|
||||
tmp_prefix = u'\u2620tmp'
|
||||
else:
|
||||
tmp_prefix = 'tmp'
|
||||
project_dir = tempfile.mkdtemp(prefix=tmp_prefix, dir=settings.PROJECTS_ROOT)
|
||||
self._temp_paths.append(project_dir)
|
||||
# Create temp playbook in project (if playbook content is given).
|
||||
if playbook_content:
|
||||
handle, playbook_path = tempfile.mkstemp(suffix=u'\u2620.yml',
|
||||
dir=project_dir)
|
||||
test_playbook_file = os.fdopen(handle, 'w')
|
||||
test_playbook_file.write(playbook_content.encode('utf-8'))
|
||||
test_playbook_file.close()
|
||||
# Role playbooks are specified as a dict of role name and the
|
||||
# content of tasks/main.yml playbook.
|
||||
role_playbooks = role_playbooks or {}
|
||||
for role_name, role_playbook_content in role_playbooks.items():
|
||||
role_tasks_dir = os.path.join(project_dir, 'roles', role_name, 'tasks')
|
||||
if not os.path.exists(role_tasks_dir):
|
||||
os.makedirs(role_tasks_dir)
|
||||
role_tasks_playbook_path = os.path.join(role_tasks_dir, 'main.yml')
|
||||
with open(role_tasks_playbook_path, 'w') as f:
|
||||
f.write(role_playbook_content)
|
||||
return Project.objects.create(
|
||||
name=name, description=description,
|
||||
local_path=os.path.basename(project_dir), created_by=created_by,
|
||||
#scm_type='git', default_playbook='foo.yml',
|
||||
)
|
||||
|
||||
def make_projects(self, created_by, count=1, playbook_content='',
|
||||
role_playbooks=None, unicode_prefix=False):
|
||||
results = []
|
||||
for x in range(0, count):
|
||||
self.object_ctr = self.object_ctr + 1
|
||||
results.append(self.make_project(
|
||||
name="proj%s-%s" % (x, self.object_ctr),
|
||||
description=u"proj%s" % x,
|
||||
created_by=created_by,
|
||||
playbook_content=playbook_content,
|
||||
role_playbooks=role_playbooks,
|
||||
unicode_prefix=unicode_prefix
|
||||
))
|
||||
return results
|
||||
|
||||
def decide_created_by(self, created_by=None):
|
||||
if created_by:
|
||||
return created_by
|
||||
if self.super_django_user:
|
||||
return self.super_django_user
|
||||
raise RuntimeError('please call setup_users() or specify a user')
|
||||
|
||||
def make_inventory(self, organization=None, name=None, created_by=None):
|
||||
created_by = self.decide_created_by(created_by)
|
||||
if not organization:
|
||||
organization = self.make_organization(created_by=created_by)
|
||||
|
||||
return Inventory.objects.create(name=name or self.unique_name('Inventory'), organization=organization, created_by=created_by)
|
||||
|
||||
def make_job_template(self, name=None, created_by=None, organization=None, inventory=None, project=None, playbook=None, **kwargs):
|
||||
created_by = self.decide_created_by(created_by)
|
||||
if not inventory:
|
||||
inventory = self.make_inventory(organization=organization, created_by=created_by)
|
||||
if not organization:
|
||||
organization = inventory.organization
|
||||
if not project:
|
||||
project = self.make_project(self.unique_name('Project'), created_by=created_by, playbook_content=playbook if playbook else TEST_PLAYBOOK)
|
||||
|
||||
if project and project.playbooks and len(project.playbooks) > 0:
|
||||
playbook = project.playbooks[0]
|
||||
else:
|
||||
raise RuntimeError('Expected project to have at least one playbook')
|
||||
|
||||
if project not in organization.projects.all():
|
||||
organization.projects.add(project)
|
||||
|
||||
opts = {
|
||||
'name' : name or self.unique_name('JobTemplate'),
|
||||
'job_type': 'check',
|
||||
'inventory': inventory,
|
||||
'project': project,
|
||||
'host_config_key': settings.SYSTEM_UUID,
|
||||
'created_by': created_by,
|
||||
'playbook': playbook,
|
||||
'ask_credential_on_launch': True,
|
||||
}
|
||||
opts.update(kwargs)
|
||||
return JobTemplate.objects.create(**opts)
|
||||
|
||||
def make_job(self, job_template=None, created_by=None, inital_state='new', **kwargs):
|
||||
created_by = self.decide_created_by(created_by)
|
||||
if not job_template:
|
||||
job_template = self.make_job_template(created_by=created_by)
|
||||
|
||||
opts = {
|
||||
'created_by': created_by,
|
||||
'status': inital_state,
|
||||
}
|
||||
opts.update(kwargs)
|
||||
return job_template.create_job(**opts)
|
||||
|
||||
def make_credential(self, **kwargs):
|
||||
opts = {
|
||||
'name': self.unique_name('Credential'),
|
||||
'kind': 'ssh',
|
||||
'user': self.super_django_user,
|
||||
'username': '',
|
||||
'ssh_key_data': '',
|
||||
'ssh_key_unlock': '',
|
||||
'password': '',
|
||||
'become_method': '',
|
||||
'become_username': '',
|
||||
'become_password': '',
|
||||
'vault_password': '',
|
||||
}
|
||||
opts.update(kwargs)
|
||||
user = opts['user']
|
||||
del opts['user']
|
||||
cred = Credential.objects.create(**opts)
|
||||
cred.admin_role.members.add(user)
|
||||
return cred
|
||||
|
||||
def setup_instances(self):
|
||||
instance = Instance(uuid=settings.SYSTEM_UUID, hostname='127.0.0.1')
|
||||
instance.save()
|
||||
|
||||
def setup_users(self, just_super_user=False):
|
||||
# Create a user.
|
||||
self.super_username = 'admin'
|
||||
self.super_password = 'admin'
|
||||
self.normal_username = 'normal'
|
||||
self.normal_password = 'normal'
|
||||
self.other_username = 'other'
|
||||
self.other_password = 'other'
|
||||
self.nobody_username = 'nobody'
|
||||
self.nobody_password = 'nobody'
|
||||
|
||||
self.super_django_user = self.make_user(self.super_username, self.super_password, super_user=True)
|
||||
|
||||
if not just_super_user:
|
||||
self.normal_django_user = self.make_user(self.normal_username, self.normal_password, super_user=False)
|
||||
self.other_django_user = self.make_user(self.other_username, self.other_password, super_user=False)
|
||||
self.nobody_django_user = self.make_user(self.nobody_username, self.nobody_password, super_user=False)
|
||||
|
||||
def get_super_credentials(self):
|
||||
return (self.super_username, self.super_password)
|
||||
|
||||
def get_normal_credentials(self):
|
||||
return (self.normal_username, self.normal_password)
|
||||
|
||||
def get_other_credentials(self):
|
||||
return (self.other_username, self.other_password)
|
||||
|
||||
def get_nobody_credentials(self):
|
||||
# here is a user without any permissions...
|
||||
return (self.nobody_username, self.nobody_password)
|
||||
|
||||
def get_invalid_credentials(self):
|
||||
return ('random', 'combination')
|
||||
|
||||
def _generic_rest(self, url, data=None, expect=204, auth=None, method=None,
|
||||
data_type=None, accept=None, remote_addr=None,
|
||||
return_response_object=False, client_kwargs=None):
|
||||
assert method is not None
|
||||
method_name = method.lower()
|
||||
client_kwargs = client_kwargs or {}
|
||||
if accept:
|
||||
client_kwargs['HTTP_ACCEPT'] = accept
|
||||
if remote_addr is not None:
|
||||
client_kwargs['REMOTE_ADDR'] = remote_addr
|
||||
auth = auth or self._current_auth
|
||||
if auth:
|
||||
# Dict is only used to test case when both Authorization and
|
||||
# X-Auth-Token headers are passed.
|
||||
if isinstance(auth, dict):
|
||||
basic = auth.get('basic', ())
|
||||
if basic:
|
||||
basic_auth = base64.b64encode('%s:%s' % (basic[0], basic[1]))
|
||||
basic_auth = basic_auth.decode('ascii')
|
||||
client_kwargs['HTTP_AUTHORIZATION'] = 'Basic %s' % basic_auth
|
||||
token = auth.get('token', '')
|
||||
if token and not basic:
|
||||
client_kwargs['HTTP_AUTHORIZATION'] = 'Token %s' % token
|
||||
elif token:
|
||||
client_kwargs['HTTP_X_AUTH_TOKEN'] = 'Token %s' % token
|
||||
elif isinstance(auth, (list, tuple)):
|
||||
#client.login(username=auth[0], password=auth[1])
|
||||
basic_auth = base64.b64encode('%s:%s' % (auth[0], auth[1]))
|
||||
basic_auth = basic_auth.decode('ascii')
|
||||
client_kwargs['HTTP_AUTHORIZATION'] = 'Basic %s' % basic_auth
|
||||
elif isinstance(auth, basestring):
|
||||
client_kwargs['HTTP_AUTHORIZATION'] = 'Token %s' % auth
|
||||
client = Client(**client_kwargs)
|
||||
method = getattr(client, method_name)
|
||||
response = None
|
||||
if method_name not in ('options', 'head', 'get', 'delete'):
|
||||
data_type = data_type or 'json'
|
||||
if data_type == 'json':
|
||||
response = method(url, json.dumps(data), 'application/json')
|
||||
elif data_type == 'yaml':
|
||||
response = method(url, yaml.safe_dump(data), 'application/yaml')
|
||||
elif data_type == 'form':
|
||||
response = method(url, urllib.urlencode(data), 'application/x-www-form-urlencoded')
|
||||
else:
|
||||
self.fail('Unsupported data_type %s' % data_type)
|
||||
else:
|
||||
response = method(url)
|
||||
|
||||
self.assertFalse(response.status_code == 500 and expect != 500,
|
||||
'Failed (500): %s' % force_text(response.content))
|
||||
if expect is not None:
|
||||
assert response.status_code == expect, u"expected status %s, got %s for url=%s as auth=%s: %s" % (
|
||||
expect, response.status_code, url, auth, force_text(response.content)
|
||||
)
|
||||
if method_name == 'head':
|
||||
self.assertFalse(response.content)
|
||||
if return_response_object:
|
||||
return response
|
||||
if response.status_code not in [204, 405] and method_name != 'head' and response.content:
|
||||
# no JSON responses in these at least for now, 409 should probably return some (FIXME)
|
||||
if response['Content-Type'].startswith('application/json'):
|
||||
obj = json.loads(force_text(response.content))
|
||||
elif response['Content-Type'].startswith('application/yaml'):
|
||||
obj = yaml.safe_load(force_text(response.content))
|
||||
elif response['Content-Type'].startswith('text/plain'):
|
||||
obj = {
|
||||
'content': force_text(response.content)
|
||||
}
|
||||
elif response['Content-Type'].startswith('text/html'):
|
||||
obj = {
|
||||
'content': force_text(response.content)
|
||||
}
|
||||
else:
|
||||
self.fail('Unsupport response content type %s' % response['Content-Type'])
|
||||
else:
|
||||
obj = {}
|
||||
|
||||
# Create a new subclass of object type and attach the response instance
|
||||
# to it (to allow for checking response headers).
|
||||
if isinstance(obj, dict):
|
||||
return type('DICT', (dict,), {'response': response})(obj.items())
|
||||
elif isinstance(obj, (tuple, list)):
|
||||
return type('LIST', (list,), {'response': response})(iter(obj))
|
||||
else:
|
||||
return obj
|
||||
|
||||
def options(self, url, expect=200, auth=None, accept=None,
|
||||
remote_addr=None):
|
||||
return self._generic_rest(url, data=None, expect=expect, auth=auth,
|
||||
method='options', accept=accept,
|
||||
remote_addr=remote_addr)
|
||||
|
||||
def head(self, url, expect=200, auth=None, accept=None, remote_addr=None):
|
||||
return self._generic_rest(url, data=None, expect=expect, auth=auth,
|
||||
method='head', accept=accept,
|
||||
remote_addr=remote_addr)
|
||||
|
||||
def get(self, url, expect=200, auth=None, accept=None, remote_addr=None, client_kwargs={}):
|
||||
return self._generic_rest(url, data=None, expect=expect, auth=auth,
|
||||
method='get', accept=accept,
|
||||
remote_addr=remote_addr,
|
||||
client_kwargs=client_kwargs)
|
||||
|
||||
def post(self, url, data, expect=204, auth=None, data_type=None,
|
||||
accept=None, remote_addr=None, client_kwargs={}):
|
||||
return self._generic_rest(url, data=data, expect=expect, auth=auth,
|
||||
method='post', data_type=data_type,
|
||||
accept=accept,
|
||||
remote_addr=remote_addr,
|
||||
client_kwargs=client_kwargs)
|
||||
|
||||
def put(self, url, data, expect=200, auth=None, data_type=None,
|
||||
accept=None, remote_addr=None):
|
||||
return self._generic_rest(url, data=data, expect=expect, auth=auth,
|
||||
method='put', data_type=data_type,
|
||||
accept=accept, remote_addr=remote_addr)
|
||||
|
||||
def patch(self, url, data, expect=200, auth=None, data_type=None,
|
||||
accept=None, remote_addr=None):
|
||||
return self._generic_rest(url, data=data, expect=expect, auth=auth,
|
||||
method='patch', data_type=data_type,
|
||||
accept=accept, remote_addr=remote_addr)
|
||||
|
||||
def delete(self, url, expect=201, auth=None, data_type=None, accept=None,
|
||||
remote_addr=None):
|
||||
return self._generic_rest(url, data=None, expect=expect, auth=auth,
|
||||
method='delete', accept=accept,
|
||||
remote_addr=remote_addr)
|
||||
|
||||
def get_urls(self, collection_url, auth=None):
|
||||
# TODO: this test helper function doesn't support pagination
|
||||
data = self.get(collection_url, expect=200, auth=auth)
|
||||
return [item['url'] for item in data['results']]
|
||||
|
||||
def check_invalid_auth(self, url, data=None, methods=None):
|
||||
'''
|
||||
Check various methods of accessing the given URL with invalid
|
||||
authentication credentials.
|
||||
'''
|
||||
data = data or {}
|
||||
methods = methods or ('options', 'head', 'get')
|
||||
for auth in [(None,), ('invalid', 'password')]:
|
||||
with self.current_user(*auth):
|
||||
for method in methods:
|
||||
f = getattr(self, method)
|
||||
if method in ('post', 'put', 'patch'):
|
||||
f(url, data, expect=401)
|
||||
else:
|
||||
f(url, expect=401)
|
||||
|
||||
def check_pagination_and_size(self, data, desired_count, previous=False,
|
||||
next=False):
|
||||
self.assertTrue('results' in data)
|
||||
self.assertEqual(data['count'], desired_count)
|
||||
if previous:
|
||||
self.assertTrue(data['previous'])
|
||||
else:
|
||||
self.assertFalse(data['previous'])
|
||||
if next:
|
||||
self.assertTrue(data['next'])
|
||||
else:
|
||||
self.assertFalse(data['next'])
|
||||
|
||||
def check_list_ids(self, data, queryset, check_order=False):
|
||||
data_ids = [x['id'] for x in data['results']]
|
||||
qs_ids = queryset.values_list('pk', flat=True)
|
||||
if check_order:
|
||||
self.assertEqual(tuple(data_ids), tuple(qs_ids))
|
||||
else:
|
||||
self.assertEqual(set(data_ids), set(qs_ids))
|
||||
|
||||
def check_get_list(self, url, user, qs, fields=None, expect=200,
|
||||
check_order=False, offset=None, limit=None):
|
||||
'''
|
||||
Check that the given list view URL returns results for the given user
|
||||
that match the given queryset.
|
||||
'''
|
||||
offset = offset or 0
|
||||
with self.current_user(user):
|
||||
if expect == 400:
|
||||
self.options(url, expect=200)
|
||||
else:
|
||||
self.options(url, expect=expect)
|
||||
self.head(url, expect=expect)
|
||||
response = self.get(url, expect=expect)
|
||||
if expect != 200:
|
||||
return
|
||||
total = qs.count()
|
||||
if limit is not None:
|
||||
if limit > 0:
|
||||
qs = qs[offset:offset + limit]
|
||||
else:
|
||||
qs = qs.none()
|
||||
self.check_pagination_and_size(response, total, offset > 0,
|
||||
limit and ((offset + limit) < total))
|
||||
self.check_list_ids(response, qs, check_order)
|
||||
if fields:
|
||||
for obj in response['results']:
|
||||
returned_fields = set(obj.keys())
|
||||
expected_fields = set(fields)
|
||||
msg = ''
|
||||
not_expected = returned_fields - expected_fields
|
||||
if not_expected:
|
||||
msg += 'fields %s not expected ' % ', '.join(not_expected)
|
||||
not_returned = expected_fields - returned_fields
|
||||
if not_returned:
|
||||
msg += 'fields %s not returned ' % ', '.join(not_returned)
|
||||
self.assertTrue(set(obj.keys()) <= set(fields), msg)
|
||||
|
||||
def check_not_found(self, string, substr, description=None, word_boundary=False):
|
||||
if word_boundary:
|
||||
count = len(re.findall(r'\b%s\b' % re.escape(substr), string))
|
||||
else:
|
||||
count = string.find(substr)
|
||||
if count == -1:
|
||||
count = 0
|
||||
|
||||
msg = ''
|
||||
if description:
|
||||
msg = 'Test "%s".\n' % description
|
||||
msg += '"%s" found in: "%s"' % (substr, string)
|
||||
self.assertEqual(count, 0, msg)
|
||||
|
||||
def check_found(self, string, substr, count=-1, description=None, word_boundary=False):
|
||||
if word_boundary:
|
||||
count_actual = len(re.findall(r'\b%s\b' % re.escape(substr), string))
|
||||
else:
|
||||
count_actual = string.count(substr)
|
||||
|
||||
msg = ''
|
||||
if description:
|
||||
msg = 'Test "%s".\n' % description
|
||||
if count == -1:
|
||||
self.assertTrue(count_actual > 0)
|
||||
else:
|
||||
msg += 'Found %d occurances of "%s" instead of %d in: "%s"' % (count_actual, substr, count, string)
|
||||
self.assertEqual(count_actual, count, msg)
|
||||
|
||||
def check_job_result(self, job, expected='successful', expect_stdout=True,
|
||||
expect_traceback=False):
|
||||
msg = u'job status is %s, expected %s' % (job.status, expected)
|
||||
msg = u'%s\nargs:\n%s' % (msg, job.job_args)
|
||||
msg = u'%s\nenv:\n%s' % (msg, job.job_env)
|
||||
if job.result_traceback:
|
||||
msg = u'%s\ngot traceback:\n%s' % (msg, job.result_traceback)
|
||||
if job.result_stdout:
|
||||
msg = u'%s\ngot stdout:\n%s' % (msg, job.result_stdout)
|
||||
if isinstance(expected, (list, tuple)):
|
||||
self.assertTrue(job.status in expected)
|
||||
else:
|
||||
self.assertEqual(job.status, expected, msg)
|
||||
if expect_stdout:
|
||||
self.assertTrue(job.result_stdout)
|
||||
else:
|
||||
self.assertTrue(job.result_stdout in ('', 'stdout capture is missing'),
|
||||
u'expected no stdout, got:\n%s' %
|
||||
job.result_stdout)
|
||||
if expect_traceback:
|
||||
self.assertTrue(job.result_traceback)
|
||||
else:
|
||||
self.assertFalse(job.result_traceback,
|
||||
u'expected no traceback, got:\n%s' %
|
||||
job.result_traceback)
|
||||
|
||||
|
||||
class BaseTest(BaseTestMixin, django.test.TestCase):
|
||||
'''
|
||||
Base class for unit tests.
|
||||
'''
|
||||
|
||||
|
||||
class BaseTransactionTest(BaseTestMixin, django.test.TransactionTestCase):
|
||||
'''
|
||||
Base class for tests requiring transactions (or where the test database
|
||||
needs to be accessed by subprocesses).
|
||||
'''
|
||||
|
||||
|
||||
@override_settings(CELERY_ALWAYS_EAGER=True,
|
||||
CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
|
||||
ANSIBLE_TRANSPORT='local')
|
||||
class BaseLiveServerTest(BaseTestMixin, django.test.LiveServerTestCase):
|
||||
'''
|
||||
Base class for tests requiring a live test server.
|
||||
'''
|
||||
def setUp(self):
|
||||
super(BaseLiveServerTest, self).setUp()
|
||||
settings.INTERNAL_API_URL = self.live_server_url
|
||||
|
||||
|
||||
@override_settings(CELERY_ALWAYS_EAGER=True,
|
||||
CELERY_EAGER_PROPAGATES_EXCEPTIONS=True,
|
||||
ANSIBLE_TRANSPORT='local',
|
||||
DEBUG=True)
|
||||
class BaseJobExecutionTest(BaseLiveServerTest):
|
||||
'''
|
||||
Base class for celery task tests.
|
||||
'''
|
||||
@ -1,3 +1,4 @@
|
||||
import json
|
||||
import mock
|
||||
import pytest
|
||||
|
||||
@ -5,6 +6,14 @@ from awx.main.models import Credential, Job
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ec2_source(inventory, project):
|
||||
with mock.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.update'):
|
||||
return inventory.inventory_sources.create(
|
||||
name='some_source', update_on_project_update=True, source='ec2',
|
||||
source_project=project, scm_last_revision=project.scm_revision)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_template(job_template, project, inventory):
|
||||
job_template.playbook = 'helloworld.yml'
|
||||
@ -34,6 +43,14 @@ def test_ssh_credential_access(get, job_template, admin, machine_credential):
|
||||
assert resp.data['summary_fields']['credential']['kind'] == 'ssh'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('key', ('credential', 'vault_credential', 'cloud_credential', 'network_credential'))
|
||||
def test_invalid_credential_update(get, patch, job_template, admin, key):
|
||||
url = reverse('api:job_template_detail', kwargs={'pk': job_template.pk, 'version': 'v1'})
|
||||
resp = patch(url, {key: 999999}, admin, expect=400)
|
||||
assert 'Credential 999999 does not exist' in json.loads(resp.content)[key]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_ssh_credential_update(get, patch, job_template, admin, machine_credential):
|
||||
url = reverse('api:job_template_detail', kwargs={'pk': job_template.pk})
|
||||
@ -362,3 +379,18 @@ def test_rbac_default_credential_usage(get, post, job_template, alice, machine_c
|
||||
new_cred.use_role.members.add(alice)
|
||||
url = reverse('api:job_template_launch', kwargs={'pk': job_template.pk})
|
||||
post(url, {'credential': new_cred.pk}, alice, expect=201)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_inventory_source_deprecated_credential(get, patch, admin, ec2_source, credential):
|
||||
url = reverse('api:inventory_source_detail', kwargs={'pk': ec2_source.pk})
|
||||
patch(url, {'credential': credential.pk}, admin, expect=200)
|
||||
resp = get(url, admin, expect=200)
|
||||
assert json.loads(resp.content)['credential'] == credential.pk
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_inventory_source_invalid_deprecated_credential(patch, admin, ec2_source, credential):
|
||||
url = reverse('api:inventory_source_detail', kwargs={'pk': ec2_source.pk})
|
||||
resp = patch(url, {'credential': 999999}, admin, expect=400)
|
||||
assert 'Credential 999999 does not exist' in resp.content
|
||||
|
||||
@ -91,3 +91,13 @@ class TestDeleteViews:
|
||||
job.get_absolute_url(), user=system_auditor
|
||||
)
|
||||
assert resp.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_non_filterable_field(options, instance, admin_user):
|
||||
r = options(
|
||||
url=instance.get_absolute_url(),
|
||||
user=admin_user
|
||||
)
|
||||
field_info = r.data['actions']['GET']['percent_capacity_remaining']
|
||||
assert 'filterable' in field_info
|
||||
|
||||
@ -7,6 +7,13 @@ from awx.main.models import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tower_instance_group():
|
||||
ig = InstanceGroup(name='tower')
|
||||
ig.save()
|
||||
return ig
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def instance_group(job_factory):
|
||||
ig = InstanceGroup(name="east")
|
||||
@ -15,8 +22,8 @@ def instance_group(job_factory):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tower_instance_group():
|
||||
ig = InstanceGroup(name='tower')
|
||||
def isolated_instance_group(instance_group):
|
||||
ig = InstanceGroup(name="iso", controller=instance_group)
|
||||
ig.save()
|
||||
return ig
|
||||
|
||||
@ -80,12 +87,22 @@ def test_delete_instance_group_jobs_running(delete, instance_group_jobs_running,
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_delete_tower_instance_group_prevented(delete, options, tower_instance_group, user):
|
||||
def test_modify_delete_tower_instance_group_prevented(delete, options, tower_instance_group, user, patch, put):
|
||||
url = reverse("api:instance_group_detail", kwargs={'pk': tower_instance_group.pk})
|
||||
super_user = user('bob', True)
|
||||
|
||||
delete(url, None, super_user, expect=403)
|
||||
|
||||
resp = options(url, None, super_user, expect=200)
|
||||
actions = ['GET', 'PUT',]
|
||||
assert len(resp.data['actions'].keys()) == 2
|
||||
assert 'DELETE' not in resp.data['actions']
|
||||
for action in actions:
|
||||
assert action in resp.data['actions']
|
||||
assert 'GET' in resp.data['actions']
|
||||
assert 'PUT' in resp.data['actions']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_prevent_delete_iso_and_control_groups(delete, isolated_instance_group, admin):
|
||||
iso_url = reverse("api:instance_group_detail", kwargs={'pk': isolated_instance_group.pk})
|
||||
controller_url = reverse("api:instance_group_detail", kwargs={'pk': isolated_instance_group.controller.pk})
|
||||
delete(iso_url, None, admin, expect=403)
|
||||
delete(controller_url, None, admin, expect=403)
|
||||
|
||||
@ -13,6 +13,9 @@ from awx.main.migrations import _save_password_keys as save_password_keys
|
||||
from django.conf import settings
|
||||
from django.apps import apps
|
||||
|
||||
# DRF
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
@ -113,6 +116,51 @@ def test_create_v1_rbac_check(get, post, project, credential, net_credential, ra
|
||||
post(reverse('api:job_template_list', kwargs={'version': 'v1'}), base_kwargs, rando, expect=403)
|
||||
|
||||
|
||||
# TODO: remove as each field tested has support removed
|
||||
@pytest.mark.django_db
|
||||
def test_jt_deprecated_summary_fields(
|
||||
project, inventory,
|
||||
machine_credential, net_credential, vault_credential,
|
||||
mocker):
|
||||
jt = JobTemplate.objects.create(
|
||||
project=project,
|
||||
inventory=inventory,
|
||||
playbook='helloworld.yml'
|
||||
)
|
||||
|
||||
class MockView:
|
||||
kwargs = {}
|
||||
request = None
|
||||
|
||||
class MockRequest:
|
||||
version = 'v1'
|
||||
user = None
|
||||
|
||||
view = MockView()
|
||||
request = MockRequest()
|
||||
view.request = request
|
||||
serializer = JobTemplateSerializer(instance=jt, context={'view': view, 'request': request})
|
||||
|
||||
for kwargs in [{}, {'pk': 1}]: # detail vs. list view
|
||||
for version in ['v1', 'v2']:
|
||||
view.kwargs = kwargs
|
||||
request.version = version
|
||||
sf = serializer.get_summary_fields(jt)
|
||||
assert 'credential' not in sf
|
||||
assert 'vault_credential' not in sf
|
||||
|
||||
jt.credentials.add(machine_credential, net_credential, vault_credential)
|
||||
|
||||
view.kwargs = {'pk': 1}
|
||||
for version in ['v1', 'v2']:
|
||||
request.version = version
|
||||
sf = serializer.get_summary_fields(jt)
|
||||
assert 'credential' in sf
|
||||
assert sf['credential'] # not empty dict
|
||||
assert 'vault_credential' in sf
|
||||
assert sf['vault_credential']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_extra_credential_creation(get, post, organization_factory, job_template_factory, credentialtype_aws):
|
||||
objs = organization_factory("org", superusers=['admin'])
|
||||
@ -615,3 +663,16 @@ def test_job_template_unset_custom_virtualenv(get, patch, organization_factory,
|
||||
url = reverse('api:job_template_detail', kwargs={'pk': jt.id})
|
||||
resp = patch(url, {'custom_virtualenv': value}, user=objs.superusers.admin, expect=200)
|
||||
assert resp.data['custom_virtualenv'] is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_callback_disallowed_null_inventory(project):
|
||||
jt = JobTemplate.objects.create(
|
||||
name='test-jt', inventory=None,
|
||||
ask_inventory_on_launch=True,
|
||||
project=project, playbook='helloworld.yml')
|
||||
serializer = JobTemplateSerializer(jt)
|
||||
assert serializer.instance == jt
|
||||
with pytest.raises(ValidationError) as exc:
|
||||
serializer.validate({'host_config_key': 'asdfbasecfeee'})
|
||||
assert 'Cannot enable provisioning callback without an inventory set' in str(exc)
|
||||
|
||||
@ -1,6 +1,9 @@
|
||||
import pytest
|
||||
import base64
|
||||
|
||||
from django.db import connection
|
||||
|
||||
from awx.main.utils.encryption import decrypt_value, get_encryption_key
|
||||
from awx.api.versioning import reverse, drf_reverse
|
||||
from awx.main.models.oauth import (OAuth2Application as Application,
|
||||
OAuth2AccessToken as AccessToken,
|
||||
@ -19,44 +22,42 @@ def test_personal_access_token_creation(oauth_application, post, alice):
|
||||
oauth_application.client_id, oauth_application.client_secret
|
||||
]))
|
||||
)
|
||||
|
||||
resp_json = resp._container[0]
|
||||
assert 'access_token' in resp_json
|
||||
assert 'scope' in resp_json
|
||||
assert 'refresh_token' in resp_json
|
||||
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_oauth_application_create(admin, post):
|
||||
def test_oauth_application_create(admin, organization, post):
|
||||
response = post(
|
||||
reverse('api:o_auth2_application_list'), {
|
||||
'name': 'test app',
|
||||
'user': admin.pk,
|
||||
'organization': organization.pk,
|
||||
'client_type': 'confidential',
|
||||
'authorization_grant_type': 'password',
|
||||
}, admin, expect=201
|
||||
)
|
||||
assert 'modified' in response.data
|
||||
assert 'updated' not in response.data
|
||||
assert 'user' in response.data['related']
|
||||
created_app = Application.objects.get(client_id=response.data['client_id'])
|
||||
assert created_app.name == 'test app'
|
||||
assert created_app.user == admin
|
||||
assert created_app.skip_authorization is False
|
||||
assert created_app.redirect_uris == ''
|
||||
assert created_app.client_type == 'confidential'
|
||||
assert created_app.authorization_grant_type == 'password'
|
||||
assert created_app.organization == organization
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_oauth_application_update(oauth_application, patch, admin, alice):
|
||||
def test_oauth_application_update(oauth_application, organization, patch, admin, alice):
|
||||
patch(
|
||||
reverse('api:o_auth2_application_detail', kwargs={'pk': oauth_application.pk}), {
|
||||
'name': 'Test app with immutable grant type and user',
|
||||
'organization': organization.pk,
|
||||
'redirect_uris': 'http://localhost/api/',
|
||||
'authorization_grant_type': 'implicit',
|
||||
'skip_authorization': True,
|
||||
'user': alice.pk,
|
||||
}, admin, expect=200
|
||||
)
|
||||
updated_app = Application.objects.get(client_id=oauth_application.client_id)
|
||||
@ -64,7 +65,27 @@ def test_oauth_application_update(oauth_application, patch, admin, alice):
|
||||
assert updated_app.redirect_uris == 'http://localhost/api/'
|
||||
assert updated_app.skip_authorization is True
|
||||
assert updated_app.authorization_grant_type == 'password'
|
||||
assert updated_app.user == admin
|
||||
assert updated_app.organization == organization
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_oauth_application_encryption(admin, organization, post):
|
||||
response = post(
|
||||
reverse('api:o_auth2_application_list'), {
|
||||
'name': 'test app',
|
||||
'organization': organization.pk,
|
||||
'client_type': 'confidential',
|
||||
'authorization_grant_type': 'password',
|
||||
}, admin, expect=201
|
||||
)
|
||||
pk = response.data.get('id')
|
||||
secret = response.data.get('client_secret')
|
||||
with connection.cursor() as cursor:
|
||||
encrypted = cursor.execute(
|
||||
'SELECT client_secret FROM main_oauth2application WHERE id={}'.format(pk)
|
||||
).fetchone()[0]
|
||||
assert encrypted.startswith('$encrypted$')
|
||||
assert decrypt_value(get_encryption_key('value', pk=None), encrypted) == secret
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@ -94,7 +115,7 @@ def test_oauth_token_create(oauth_application, get, post, admin):
|
||||
)
|
||||
assert response.data['summary_fields']['tokens']['count'] == 1
|
||||
assert response.data['summary_fields']['tokens']['results'][0] == {
|
||||
'id': token.pk, 'scope': token.scope, 'token': '**************'
|
||||
'id': token.pk, 'scope': token.scope, 'token': '************'
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -131,7 +131,7 @@ def test_organization_inventory_list(organization, inventory_factory, get, alice
|
||||
assert get(reverse('api:organization_inventories_list', kwargs={'pk': organization.id}), user=alice).data['count'] == 2
|
||||
assert get(reverse('api:organization_inventories_list', kwargs={'pk': organization.id}), user=bob).data['count'] == 1
|
||||
get(reverse('api:organization_inventories_list', kwargs={'pk': organization.id}), user=rando, expect=403)
|
||||
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
|
||||
@ -3,8 +3,8 @@ import pytest
|
||||
from awx.api.versioning import reverse
|
||||
from django.test.client import RequestFactory
|
||||
|
||||
from awx.main.models import Role, Group, UnifiedJobTemplate, JobTemplate
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.models import Role, Group, UnifiedJobTemplate, JobTemplate, WorkflowJobTemplate
|
||||
from awx.main.access import access_registry, WorkflowJobTemplateAccess
|
||||
from awx.main.utils import prefetch_page_capabilities
|
||||
from awx.api.serializers import JobTemplateSerializer, UnifiedJobTemplateSerializer
|
||||
|
||||
@ -196,12 +196,6 @@ class TestAccessListCapabilities:
|
||||
direct_access_list = response.data['results'][0]['summary_fields']['direct_access']
|
||||
assert direct_access_list[0]['role']['user_capabilities']['unattach'] == 'foobar'
|
||||
|
||||
def test_user_access_list_direct_access_capability(self, rando, get):
|
||||
"When a user views their own access list, they cannot unattach their admin role"
|
||||
response = get(reverse('api:user_access_list', kwargs={'pk': rando.id}), rando)
|
||||
direct_access_list = response.data['results'][0]['summary_fields']['direct_access']
|
||||
assert not direct_access_list[0]['role']['user_capabilities']['unattach']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_team_roles_unattach(mocker, team, team_member, inventory, mock_access_method, get):
|
||||
@ -322,6 +316,17 @@ def test_prefetch_jt_copy_capability(job_template, project, inventory, rando):
|
||||
assert mapping[job_template.id] == {'copy': True}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_workflow_orphaned_capabilities(rando):
|
||||
wfjt = WorkflowJobTemplate.objects.create(name='test', organization=None)
|
||||
wfjt.admin_role.members.add(rando)
|
||||
access = WorkflowJobTemplateAccess(rando)
|
||||
assert not access.get_user_capabilities(
|
||||
wfjt, method_list=['edit', 'copy'],
|
||||
capabilities_cache={'copy': True}
|
||||
)['copy']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_manual_projects_no_update(manual_project, get, admin_user):
|
||||
response = get(reverse('api:project_detail', kwargs={'pk': manual_project.pk}), admin_user, expect=200)
|
||||
|
||||
@ -2,7 +2,7 @@ import pytest
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import UnifiedJob, ProjectUpdate, InventoryUpdate
|
||||
from awx.main.tests.base import URI
|
||||
from awx.main.tests.URI import URI
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
|
||||
|
||||
|
||||
@ -16,6 +16,7 @@ from awx.main.models import (
|
||||
|
||||
# other AWX
|
||||
from awx.main.utils import model_to_dict
|
||||
from awx.main.utils.common import get_allowed_fields
|
||||
from awx.api.serializers import InventorySourceSerializer
|
||||
|
||||
# Django
|
||||
@ -181,3 +182,20 @@ def test_annon_user_action():
|
||||
inv = Inventory.objects.create(name='ainventory')
|
||||
entry = inv.activitystream_set.filter(operation='create').first()
|
||||
assert not entry.actor
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_modified_not_allowed_field(somecloud_type):
|
||||
'''
|
||||
If this test fails, that means that read-only fields are showing
|
||||
up in the activity stream serialization of an instance.
|
||||
|
||||
That _probably_ means that you just connected a new model to the
|
||||
activity_stream_registrar, but did not add its serializer to
|
||||
the model->serializer mapping.
|
||||
'''
|
||||
from awx.main.signals import model_serializer_mapping
|
||||
from awx.main.registrar import activity_stream_registrar
|
||||
|
||||
for Model in activity_stream_registrar.models:
|
||||
assert 'modified' not in get_allowed_fields(Model(), model_serializer_mapping), Model
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import pytest
|
||||
|
||||
from awx.main.models import JobTemplate, Job
|
||||
from crum import impersonate
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@ -49,3 +50,18 @@ def test_awx_custom_virtualenv_without_jt(project):
|
||||
|
||||
job = Job.objects.get(pk=job.id)
|
||||
assert job.ansible_virtualenv_path == '/venv/fancy-proj'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_parent_instance(job_template, alice):
|
||||
# jobs are launched as a particular user, user not saved as modified_by
|
||||
with impersonate(alice):
|
||||
assert job_template.current_job is None
|
||||
assert job_template.status == 'never updated'
|
||||
assert job_template.modified_by is None
|
||||
job = job_template.jobs.create(status='new')
|
||||
job.status = 'pending'
|
||||
job.save()
|
||||
assert job_template.current_job == job
|
||||
assert job_template.status == 'pending'
|
||||
assert job_template.modified_by is None
|
||||
|
||||
@ -2,7 +2,7 @@ import pytest
|
||||
import mock
|
||||
from datetime import timedelta
|
||||
from awx.main.scheduler import TaskManager
|
||||
from awx.main.models import InstanceGroup
|
||||
from awx.main.models import InstanceGroup, WorkflowJob
|
||||
from awx.main.tasks import apply_cluster_membership_policies
|
||||
|
||||
|
||||
@ -77,6 +77,18 @@ def test_multi_group_with_shared_dependency(instance_factory, default_instance_g
|
||||
assert TaskManager.start_task.call_count == 2
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_workflow_job_no_instancegroup(workflow_job_template_factory, default_instance_group, mocker):
|
||||
wfjt = workflow_job_template_factory('anicedayforawalk').workflow_job_template
|
||||
wfj = WorkflowJob.objects.create(workflow_job_template=wfjt)
|
||||
wfj.status = "pending"
|
||||
wfj.save()
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(wfj, None, [])
|
||||
assert wfj.instance_group is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_overcapacity_blocking_other_groups_unaffected(instance_factory, default_instance_group, mocker,
|
||||
instance_group_factory, job_template_factory):
|
||||
|
||||
@ -56,98 +56,6 @@ def test_cloud_kind_uniqueness():
|
||||
assert CredentialType.defaults['aws']().unique_by_kind is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('input_, valid', [
|
||||
({}, True),
|
||||
({'fields': []}, True),
|
||||
({'fields': {}}, False),
|
||||
({'fields': 123}, False),
|
||||
({'fields': [{'id': 'username', 'label': 'Username', 'foo': 'bar'}]}, False),
|
||||
({'fields': [{'id': 'username', 'label': 'Username'}]}, True),
|
||||
({'fields': [{'id': 'username', 'label': 'Username', 'type': 'string'}]}, True),
|
||||
({'fields': [{'id': 'username', 'label': 'Username', 'help_text': 1}]}, False),
|
||||
({'fields': [{'id': 'username', 'label': 'Username', 'help_text': 'Help Text'}]}, True), # noqa
|
||||
({'fields': [{'id': 'username', 'label': 'Username'}, {'id': 'username', 'label': 'Username 2'}]}, False), # noqa
|
||||
({'fields': [{'id': '$invalid$', 'label': 'Invalid', 'type': 'string'}]}, False), # noqa
|
||||
({'fields': [{'id': 'password', 'label': 'Password', 'type': 'invalid-type'}]}, False),
|
||||
({'fields': [{'id': 'ssh_key', 'label': 'SSH Key', 'type': 'string', 'format': 'ssh_private_key'}]}, True), # noqa
|
||||
({'fields': [{'id': 'flag', 'label': 'Some Flag', 'type': 'boolean'}]}, True),
|
||||
({'fields': [{'id': 'flag', 'label': 'Some Flag', 'type': 'boolean', 'choices': ['a', 'b']}]}, False),
|
||||
({'fields': [{'id': 'flag', 'label': 'Some Flag', 'type': 'boolean', 'secret': True}]}, False),
|
||||
({'fields': [{'id': 'certificate', 'label': 'Cert', 'multiline': True}]}, True),
|
||||
({'fields': [{'id': 'certificate', 'label': 'Cert', 'multiline': True, 'type': 'boolean'}]}, False), # noqa
|
||||
({'fields': [{'id': 'certificate', 'label': 'Cert', 'multiline': 'bad'}]}, False), # noqa
|
||||
({'fields': [{'id': 'token', 'label': 'Token', 'secret': True}]}, True),
|
||||
({'fields': [{'id': 'token', 'label': 'Token', 'secret': 'bad'}]}, False),
|
||||
({'fields': [{'id': 'token', 'label': 'Token', 'ask_at_runtime': True}]}, True),
|
||||
({'fields': [{'id': 'token', 'label': 'Token', 'ask_at_runtime': 'bad'}]}, False), # noqa
|
||||
({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': 'not-a-list'}]}, False), # noqa
|
||||
({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': []}]}, False),
|
||||
({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': ['su', 'sudo']}]}, True), # noqa
|
||||
({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': ['dup', 'dup']}]}, False), # noqa
|
||||
({'fields': [{'id': 'tower', 'label': 'Reserved!', }]}, False), # noqa
|
||||
])
|
||||
def test_cred_type_input_schema_validity(input_, valid):
|
||||
type_ = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=True,
|
||||
inputs=input_
|
||||
)
|
||||
if valid is False:
|
||||
with pytest.raises(Exception) as e:
|
||||
type_.full_clean()
|
||||
assert e.type in (ValidationError, serializers.ValidationError)
|
||||
else:
|
||||
type_.full_clean()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('injectors, valid', [
|
||||
({}, True),
|
||||
({'invalid-injector': {}}, False),
|
||||
({'file': 123}, False),
|
||||
({'file': {}}, True),
|
||||
({'file': {'template': '{{username}}'}}, True),
|
||||
({'file': {'template.username': '{{username}}'}}, True),
|
||||
({'file': {'template.username': '{{username}}', 'template.password': '{{pass}}'}}, True),
|
||||
({'file': {'template': '{{username}}', 'template.password': '{{pass}}'}}, False),
|
||||
({'file': {'foo': 'bar'}}, False),
|
||||
({'env': 123}, False),
|
||||
({'env': {}}, True),
|
||||
({'env': {'AWX_SECRET': '{{awx_secret}}'}}, True),
|
||||
({'env': {'AWX_SECRET_99': '{{awx_secret}}'}}, True),
|
||||
({'env': {'99': '{{awx_secret}}'}}, False),
|
||||
({'env': {'AWX_SECRET=': '{{awx_secret}}'}}, False),
|
||||
({'extra_vars': 123}, False),
|
||||
({'extra_vars': {}}, True),
|
||||
({'extra_vars': {'hostname': '{{host}}'}}, True),
|
||||
({'extra_vars': {'hostname_99': '{{host}}'}}, True),
|
||||
({'extra_vars': {'99': '{{host}}'}}, False),
|
||||
({'extra_vars': {'99=': '{{host}}'}}, False),
|
||||
])
|
||||
def test_cred_type_injectors_schema(injectors, valid):
|
||||
type_ = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'username', 'type': 'string', 'label': '_'},
|
||||
{'id': 'pass', 'type': 'string', 'label': '_'},
|
||||
{'id': 'awx_secret', 'type': 'string', 'label': '_'},
|
||||
{'id': 'host', 'type': 'string', 'label': '_'},
|
||||
]
|
||||
},
|
||||
injectors=injectors
|
||||
)
|
||||
if valid is False:
|
||||
with pytest.raises(ValidationError):
|
||||
type_.full_clean()
|
||||
else:
|
||||
type_.full_clean()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_credential_creation(organization_factory):
|
||||
org = organization_factory('test').organization
|
||||
@ -174,49 +82,6 @@ def test_credential_creation(organization_factory):
|
||||
assert cred.inputs['username'] == cred.username == 'bob'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('inputs', [
|
||||
['must-be-a-dict'],
|
||||
{'user': 'wrong-key'},
|
||||
{'username': 1},
|
||||
{'username': 1.5},
|
||||
{'username': ['a', 'b', 'c']},
|
||||
{'username': {'a': 'b'}},
|
||||
{'username': False},
|
||||
{'flag': 1},
|
||||
{'flag': 1.5},
|
||||
{'flag': ['a', 'b', 'c']},
|
||||
{'flag': {'a': 'b'}},
|
||||
{'flag': 'some-string'},
|
||||
])
|
||||
def test_credential_creation_validation_failure(organization_factory, inputs):
|
||||
org = organization_factory('test').organization
|
||||
type_ = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=True,
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'username',
|
||||
'label': 'Username for SomeCloud',
|
||||
'type': 'string'
|
||||
},{
|
||||
'id': 'flag',
|
||||
'label': 'Some Boolean Flag',
|
||||
'type': 'boolean'
|
||||
}]
|
||||
}
|
||||
)
|
||||
type_.save()
|
||||
|
||||
with pytest.raises(Exception) as e:
|
||||
cred = Credential(credential_type=type_, name="Bob's Credential",
|
||||
inputs=inputs, organization=org)
|
||||
cred.save()
|
||||
cred.full_clean()
|
||||
assert e.type in (ValidationError, serializers.ValidationError)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('kind', ['ssh', 'net', 'scm'])
|
||||
@pytest.mark.parametrize('ssh_key_data, ssh_key_unlock, valid', [
|
||||
|
||||
@ -60,6 +60,21 @@ def test_policy_instance_few_instances(mock, instance_factory, instance_group_fa
|
||||
assert i2 in ig_4.instances.all()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('awx.main.tasks.handle_ha_toplogy_changes', return_value=None)
|
||||
def test_policy_instance_distribution_round_up(mock, instance_factory, instance_group_factory):
|
||||
i1 = instance_factory("i1")
|
||||
i2 = instance_factory("i2")
|
||||
i3 = instance_factory("i3")
|
||||
i4 = instance_factory("i4")
|
||||
i5 = instance_factory("i5")
|
||||
ig_1 = instance_group_factory("ig1", percentage=79)
|
||||
apply_cluster_membership_policies()
|
||||
assert len(ig_1.instances.all()) == 4
|
||||
assert set([i1, i2, i3, i4]) == set(ig_1.instances.all())
|
||||
assert i5 not in ig_1.instances.all()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('awx.main.tasks.handle_ha_toplogy_changes', return_value=None)
|
||||
def test_policy_instance_distribution_uneven(mock, instance_factory, instance_group_factory):
|
||||
|
||||
@ -56,7 +56,6 @@ def test_get_roles_list_user(organization, inventory, team, get, user):
|
||||
assert Role.singleton(ROLE_SINGLETON_SYSTEM_ADMINISTRATOR).id in role_hash
|
||||
assert organization.admin_role.id in role_hash
|
||||
assert organization.member_role.id in role_hash
|
||||
assert this_user.admin_role.id in role_hash
|
||||
assert custom_role.id in role_hash
|
||||
|
||||
assert inventory.admin_role.id not in role_hash
|
||||
@ -99,12 +98,12 @@ def test_cant_create_role(post, admin):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_cant_delete_role(delete, admin):
|
||||
def test_cant_delete_role(delete, admin, inventory):
|
||||
"Ensure we can't delete roles through the api"
|
||||
# Some day we might want to do this, but until that is speced out, lets
|
||||
# ensure we don't slip up and allow this implicitly through some helper or
|
||||
# another
|
||||
response = delete(reverse('api:role_detail', kwargs={'pk': admin.admin_role.id}), admin)
|
||||
response = delete(reverse('api:role_detail', kwargs={'pk': inventory.admin_role.id}), admin)
|
||||
assert response.status_code == 405
|
||||
|
||||
|
||||
|
||||
@ -32,25 +32,40 @@ def test_custom_inv_script_access(organization, user):
|
||||
assert ou in custom_inv.admin_role
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_modify_inv_script_foreign_org_admin(org_admin, organization, organization_factory, project):
|
||||
custom_inv = CustomInventoryScript.objects.create(name='test', script='test', description='test',
|
||||
organization=organization)
|
||||
@pytest.fixture
|
||||
def custom_inv(organization):
|
||||
return CustomInventoryScript.objects.create(
|
||||
name='test', script='test', description='test', organization=organization)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_modify_inv_script_foreign_org_admin(
|
||||
org_admin, organization, organization_factory, project, custom_inv):
|
||||
other_org = organization_factory('not-my-org').organization
|
||||
access = CustomInventoryScriptAccess(org_admin)
|
||||
assert not access.can_change(custom_inv, {'organization': other_org.pk, 'name': 'new-project'})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_member_inventory_script_permissions(org_member, organization):
|
||||
custom_inv = CustomInventoryScript.objects.create(name='test', script='test', organization=organization)
|
||||
def test_org_member_inventory_script_permissions(org_member, organization, custom_inv):
|
||||
access = CustomInventoryScriptAccess(org_member)
|
||||
assert access.can_read(custom_inv)
|
||||
assert not access.can_delete(custom_inv)
|
||||
assert not access.can_change(custom_inv, {'name': 'ed-test'})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_copy_only_admin(org_member, organization, custom_inv):
|
||||
custom_inv.admin_role.members.add(org_member)
|
||||
access = CustomInventoryScriptAccess(org_member)
|
||||
assert not access.can_copy(custom_inv)
|
||||
assert access.get_user_capabilities(custom_inv, method_list=['edit', 'delete', 'copy']) == {
|
||||
'edit': True,
|
||||
'delete': True,
|
||||
'copy': False
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize("role", ["admin_role", "inventory_admin_role"])
|
||||
def test_access_admin(role, organization, inventory, user):
|
||||
|
||||
@ -11,6 +11,7 @@ from awx.main.access import (
|
||||
ScheduleAccess
|
||||
)
|
||||
from awx.main.models.jobs import JobTemplate
|
||||
from awx.main.models.organization import Organization
|
||||
from awx.main.models.schedules import Schedule
|
||||
|
||||
|
||||
@ -296,3 +297,30 @@ class TestJobTemplateSchedules:
|
||||
mock_change.return_value = True
|
||||
assert access.can_change(schedule, {'inventory': 42})
|
||||
mock_change.assert_called_once_with(schedule, {'inventory': 42})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_jt_org_ownership_change(user, jt_linked):
|
||||
admin1 = user('admin1')
|
||||
org1 = jt_linked.project.organization
|
||||
org1.admin_role.members.add(admin1)
|
||||
a1_access = JobTemplateAccess(admin1)
|
||||
|
||||
assert a1_access.can_read(jt_linked)
|
||||
|
||||
|
||||
admin2 = user('admin2')
|
||||
org2 = Organization.objects.create(name='mrroboto', description='domo')
|
||||
org2.admin_role.members.add(admin2)
|
||||
a2_access = JobTemplateAccess(admin2)
|
||||
|
||||
assert not a2_access.can_read(jt_linked)
|
||||
|
||||
|
||||
jt_linked.project.organization = org2
|
||||
jt_linked.project.save()
|
||||
jt_linked.inventory.organization = org2
|
||||
jt_linked.inventory.save()
|
||||
|
||||
assert a2_access.can_read(jt_linked)
|
||||
assert not a1_access.can_read(jt_linked)
|
||||
|
||||
@ -3,114 +3,250 @@ import pytest
|
||||
from awx.main.access import (
|
||||
OAuth2ApplicationAccess,
|
||||
OAuth2TokenAccess,
|
||||
ActivityStreamAccess,
|
||||
)
|
||||
from awx.main.models.oauth import (
|
||||
OAuth2Application as Application,
|
||||
OAuth2AccessToken as AccessToken,
|
||||
)
|
||||
from awx.main.models import ActivityStream
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestOAuthApplication:
|
||||
|
||||
@pytest.mark.parametrize("user_for_access, can_access_list", [
|
||||
(0, [True, True, True, True]),
|
||||
(1, [False, True, True, False]),
|
||||
(2, [False, False, True, False]),
|
||||
(3, [False, False, False, True]),
|
||||
])
|
||||
def test_can_read_change_delete(
|
||||
self, admin, org_admin, org_member, alice, user_for_access, can_access_list
|
||||
):
|
||||
user_list = [admin, org_admin, org_member, alice]
|
||||
access = OAuth2ApplicationAccess(user_list[user_for_access])
|
||||
for user, can_access in zip(user_list, can_access_list):
|
||||
class TestOAuth2Application:
|
||||
|
||||
@pytest.mark.parametrize("user_for_access, can_access_list", [
|
||||
(0, [True, True]),
|
||||
(1, [True, True]),
|
||||
(2, [True, True]),
|
||||
(3, [False, False]),
|
||||
])
|
||||
def test_can_read(
|
||||
self, admin, org_admin, org_member, alice, user_for_access, can_access_list, organization
|
||||
):
|
||||
user_list = [admin, org_admin, org_member, alice]
|
||||
access = OAuth2ApplicationAccess(user_list[user_for_access])
|
||||
app_creation_user_list = [admin, org_admin]
|
||||
for user, can_access in zip(app_creation_user_list, can_access_list):
|
||||
app = Application.objects.create(
|
||||
name='test app for {}'.format(user.username), user=user,
|
||||
client_type='confidential', authorization_grant_type='password', organization=organization
|
||||
)
|
||||
assert access.can_read(app) is can_access
|
||||
|
||||
|
||||
def test_app_activity_stream(self, org_admin, alice, organization):
|
||||
app = Application.objects.create(
|
||||
name='test app for {}'.format(user.username), user=user,
|
||||
client_type='confidential', authorization_grant_type='password'
|
||||
name='test app for {}'.format(org_admin.username), user=org_admin,
|
||||
client_type='confidential', authorization_grant_type='password', organization=organization
|
||||
)
|
||||
assert access.can_read(app) is can_access
|
||||
assert access.can_change(app, {}) is can_access
|
||||
assert access.can_delete(app) is can_access
|
||||
access = OAuth2ApplicationAccess(org_admin)
|
||||
assert access.can_read(app) is True
|
||||
access = ActivityStreamAccess(org_admin)
|
||||
activity_stream = ActivityStream.objects.filter(o_auth2_application=app).latest('pk')
|
||||
assert access.can_read(activity_stream) is True
|
||||
access = ActivityStreamAccess(alice)
|
||||
assert access.can_read(app) is False
|
||||
assert access.can_read(activity_stream) is False
|
||||
|
||||
|
||||
def test_superuser_can_always_create(self, admin, org_admin, org_member, alice):
|
||||
access = OAuth2ApplicationAccess(admin)
|
||||
for user in [admin, org_admin, org_member, alice]:
|
||||
assert access.can_add({
|
||||
'name': 'test app', 'user': user.pk, 'client_type': 'confidential',
|
||||
'authorization_grant_type': 'password'
|
||||
})
|
||||
|
||||
def test_normal_user_cannot_create(self, admin, org_admin, org_member, alice):
|
||||
for access_user in [org_member, alice]:
|
||||
access = OAuth2ApplicationAccess(access_user)
|
||||
for user in [admin, org_admin, org_member, alice]:
|
||||
assert not access.can_add({
|
||||
'name': 'test app', 'user': user.pk, 'client_type': 'confidential',
|
||||
'authorization_grant_type': 'password'
|
||||
})
|
||||
|
||||
def test_org_admin_can_create_in_org(self, admin, org_admin, org_member, alice):
|
||||
access = OAuth2ApplicationAccess(org_admin)
|
||||
for user in [admin, alice]:
|
||||
assert not access.can_add({
|
||||
'name': 'test app', 'user': user.pk, 'client_type': 'confidential',
|
||||
'authorization_grant_type': 'password'
|
||||
})
|
||||
for user in [org_admin, org_member]:
|
||||
assert access.can_add({
|
||||
'name': 'test app', 'user': user.pk, 'client_type': 'confidential',
|
||||
'authorization_grant_type': 'password'
|
||||
})
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Needs Update - CA")
|
||||
@pytest.mark.django_db
|
||||
class TestOAuthToken:
|
||||
|
||||
@pytest.mark.parametrize("user_for_access, can_access_list", [
|
||||
(0, [True, True, True, True]),
|
||||
(1, [False, True, True, False]),
|
||||
(2, [False, False, True, False]),
|
||||
(3, [False, False, False, True]),
|
||||
])
|
||||
def test_can_read_change_delete(
|
||||
self, post, admin, org_admin, org_member, alice, user_for_access, can_access_list
|
||||
):
|
||||
user_list = [admin, org_admin, org_member, alice]
|
||||
access = OAuth2TokenAccess(user_list[user_for_access])
|
||||
for user, can_access in zip(user_list, can_access_list):
|
||||
def test_token_activity_stream(self, org_admin, alice, organization, post):
|
||||
app = Application.objects.create(
|
||||
name='test app for {}'.format(user.username), user=user,
|
||||
client_type='confidential', authorization_grant_type='password'
|
||||
name='test app for {}'.format(org_admin.username), user=org_admin,
|
||||
client_type='confidential', authorization_grant_type='password', organization=organization
|
||||
)
|
||||
response = post(
|
||||
reverse('api:o_auth2_application_token_list', kwargs={'pk': app.pk}),
|
||||
{'scope': 'read'}, admin, expect=201
|
||||
{'scope': 'read'}, org_admin, expect=201
|
||||
)
|
||||
token = AccessToken.objects.get(token=response.data['token'])
|
||||
access = OAuth2ApplicationAccess(org_admin)
|
||||
assert access.can_read(app) is True
|
||||
access = ActivityStreamAccess(org_admin)
|
||||
activity_stream = ActivityStream.objects.filter(o_auth2_access_token=token).latest('pk')
|
||||
assert access.can_read(activity_stream) is True
|
||||
access = ActivityStreamAccess(alice)
|
||||
assert access.can_read(token) is False
|
||||
assert access.can_read(activity_stream) is False
|
||||
|
||||
assert access.can_read(token) is can_access # TODO: fix this test
|
||||
|
||||
|
||||
def test_can_edit_delete_app_org_admin(
|
||||
self, admin, org_admin, org_member, alice, organization
|
||||
):
|
||||
user_list = [admin, org_admin, org_member, alice]
|
||||
can_access_list = [True, True, False, False]
|
||||
for user, can_access in zip(user_list, can_access_list):
|
||||
app = Application.objects.create(
|
||||
name='test app for {}'.format(org_admin.username), user=org_admin,
|
||||
client_type='confidential', authorization_grant_type='password', organization=organization
|
||||
)
|
||||
access = OAuth2ApplicationAccess(user)
|
||||
assert access.can_change(app, {}) is can_access
|
||||
assert access.can_delete(app) is can_access
|
||||
|
||||
|
||||
def test_can_edit_delete_app_admin(
|
||||
self, admin, org_admin, org_member, alice, organization
|
||||
):
|
||||
user_list = [admin, org_admin, org_member, alice]
|
||||
can_access_list = [True, True, False, False]
|
||||
for user, can_access in zip(user_list, can_access_list):
|
||||
app = Application.objects.create(
|
||||
name='test app for {}'.format(admin.username), user=admin,
|
||||
client_type='confidential', authorization_grant_type='password', organization=organization
|
||||
)
|
||||
access = OAuth2ApplicationAccess(user)
|
||||
assert access.can_change(app, {}) is can_access
|
||||
assert access.can_delete(app) is can_access
|
||||
|
||||
|
||||
def test_superuser_can_always_create(self, admin, org_admin, org_member, alice):
|
||||
access = OAuth2ApplicationAccess(admin)
|
||||
for user in [admin, org_admin, org_member, alice]:
|
||||
assert access.can_add({
|
||||
'name': 'test app', 'user': user.pk, 'client_type': 'confidential',
|
||||
'authorization_grant_type': 'password', 'organization': 1
|
||||
})
|
||||
|
||||
def test_normal_user_cannot_create(self, admin, org_admin, org_member, alice):
|
||||
for access_user in [org_member, alice]:
|
||||
access = OAuth2ApplicationAccess(access_user)
|
||||
for user in [admin, org_admin, org_member, alice]:
|
||||
assert not access.can_add({
|
||||
'name': 'test app', 'user': user.pk, 'client_type': 'confidential',
|
||||
'authorization_grant_type': 'password', 'organization': 1
|
||||
})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestOAuth2Token:
|
||||
|
||||
def test_can_read_change_delete_app_token(
|
||||
self, post, admin, org_admin, org_member, alice, organization
|
||||
):
|
||||
user_list = [admin, org_admin, org_member, alice]
|
||||
can_access_list = [True, True, False, False]
|
||||
app = Application.objects.create(
|
||||
name='test app for {}'.format(admin.username), user=admin,
|
||||
client_type='confidential', authorization_grant_type='password',
|
||||
organization=organization
|
||||
)
|
||||
response = post(
|
||||
reverse('api:o_auth2_application_token_list', kwargs={'pk': app.pk}),
|
||||
{'scope': 'read'}, admin, expect=201
|
||||
)
|
||||
for user, can_access in zip(user_list, can_access_list):
|
||||
token = AccessToken.objects.get(token=response.data['token'])
|
||||
access = OAuth2TokenAccess(user)
|
||||
assert access.can_read(token) is can_access
|
||||
assert access.can_change(token, {}) is can_access
|
||||
assert access.can_delete(token) is can_access
|
||||
|
||||
|
||||
def test_auditor_can_read(
|
||||
self, post, admin, org_admin, org_member, alice, system_auditor, organization
|
||||
):
|
||||
user_list = [admin, org_admin, org_member]
|
||||
can_access_list = [True, True, True]
|
||||
cannot_access_list = [False, False, False]
|
||||
app = Application.objects.create(
|
||||
name='test app for {}'.format(admin.username), user=admin,
|
||||
client_type='confidential', authorization_grant_type='password',
|
||||
organization=organization
|
||||
)
|
||||
for user, can_access, cannot_access in zip(user_list, can_access_list, cannot_access_list):
|
||||
response = post(
|
||||
reverse('api:o_auth2_application_token_list', kwargs={'pk': app.pk}),
|
||||
{'scope': 'read'}, user, expect=201
|
||||
)
|
||||
token = AccessToken.objects.get(token=response.data['token'])
|
||||
access = OAuth2TokenAccess(system_auditor)
|
||||
assert access.can_read(token) is can_access
|
||||
assert access.can_change(token, {}) is cannot_access
|
||||
assert access.can_delete(token) is cannot_access
|
||||
|
||||
def test_user_auditor_can_change(
|
||||
self, post, org_member, org_admin, system_auditor, organization
|
||||
):
|
||||
app = Application.objects.create(
|
||||
name='test app for {}'.format(org_admin.username), user=org_admin,
|
||||
client_type='confidential', authorization_grant_type='password',
|
||||
organization=organization
|
||||
)
|
||||
response = post(
|
||||
reverse('api:o_auth2_application_token_list', kwargs={'pk': app.pk}),
|
||||
{'scope': 'read'}, org_member, expect=201
|
||||
)
|
||||
token = AccessToken.objects.get(token=response.data['token'])
|
||||
access = OAuth2TokenAccess(system_auditor)
|
||||
assert access.can_read(token) is True
|
||||
assert access.can_change(token, {}) is False
|
||||
assert access.can_delete(token) is False
|
||||
dual_user = system_auditor
|
||||
organization.admin_role.members.add(dual_user)
|
||||
access = OAuth2TokenAccess(dual_user)
|
||||
assert access.can_read(token) is True
|
||||
assert access.can_change(token, {}) is True
|
||||
assert access.can_delete(token) is True
|
||||
|
||||
|
||||
|
||||
def test_can_read_change_delete_personal_token_org_member(
|
||||
self, post, admin, org_admin, org_member, alice
|
||||
):
|
||||
# Tests who can read a token created by an org-member
|
||||
user_list = [admin, org_admin, org_member, alice]
|
||||
can_access_list = [True, False, True, False]
|
||||
response = post(
|
||||
reverse('api:o_auth2_personal_token_list', kwargs={'pk': org_member.pk}),
|
||||
{'scope': 'read'}, org_member, expect=201
|
||||
)
|
||||
token = AccessToken.objects.get(token=response.data['token'])
|
||||
for user, can_access in zip(user_list, can_access_list):
|
||||
access = OAuth2TokenAccess(user)
|
||||
assert access.can_read(token) is can_access
|
||||
assert access.can_change(token, {}) is can_access
|
||||
assert access.can_delete(token) is can_access
|
||||
|
||||
|
||||
def test_can_read_personal_token_creator(
|
||||
self, post, admin, org_admin, org_member, alice
|
||||
):
|
||||
# Tests the token's creator can read their tokens
|
||||
user_list = [admin, org_admin, org_member, alice]
|
||||
can_access_list = [True, True, True, True]
|
||||
|
||||
for user, can_access in zip(user_list, can_access_list):
|
||||
response = post(
|
||||
reverse('api:o_auth2_personal_token_list', kwargs={'pk': user.pk}),
|
||||
{'scope': 'read', 'application':None}, user, expect=201
|
||||
)
|
||||
token = AccessToken.objects.get(token=response.data['token'])
|
||||
access = OAuth2TokenAccess(user)
|
||||
assert access.can_read(token) is can_access
|
||||
assert access.can_change(token, {}) is can_access
|
||||
assert access.can_delete(token) is can_access
|
||||
|
||||
|
||||
@pytest.mark.parametrize("user_for_access, can_access_list", [
|
||||
(0, [True, True, True, True]),
|
||||
(1, [False, True, True, False]),
|
||||
(2, [False, False, True, False]),
|
||||
(3, [False, False, False, True]),
|
||||
(0, [True, True]),
|
||||
(1, [True, True]),
|
||||
(2, [True, True]),
|
||||
(3, [False, False]),
|
||||
])
|
||||
def test_can_create(
|
||||
self, post, admin, org_admin, org_member, alice, user_for_access, can_access_list
|
||||
self, post, admin, org_admin, org_member, alice, user_for_access, can_access_list, organization
|
||||
):
|
||||
user_list = [admin, org_admin, org_member, alice]
|
||||
for user, can_access in zip(user_list, can_access_list):
|
||||
app = Application.objects.create(
|
||||
name='test app for {}'.format(user.username), user=user,
|
||||
client_type='confidential', authorization_grant_type='password'
|
||||
client_type='confidential', authorization_grant_type='password', organization=organization
|
||||
)
|
||||
post(
|
||||
reverse('api:o_auth2_application_token_list', kwargs={'pk': app.pk}),
|
||||
{'scope': 'read'}, user_list[user_for_access], expect=201 if can_access else 403
|
||||
)
|
||||
|
||||
|
||||
@ -4,7 +4,7 @@ from awx.main.access import (
|
||||
RoleAccess,
|
||||
UserAccess,
|
||||
TeamAccess)
|
||||
from awx.main.models import Role
|
||||
from awx.main.models import Role, Organization
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@ -50,3 +50,98 @@ def test_visible_roles(admin_user, system_auditor, rando, organization, project)
|
||||
assert rando not in project.admin_role
|
||||
assert access.can_read(project.admin_role)
|
||||
assert project.admin_role in Role.visible_roles(rando)
|
||||
|
||||
|
||||
# Permissions when adding users to org member/admin
|
||||
@pytest.mark.django_db
|
||||
def test_org_user_role_attach(user, organization, inventory):
|
||||
'''
|
||||
Org admins must not be able to add arbitrary users to their
|
||||
organization, because that would give them admin permission to that user
|
||||
'''
|
||||
admin = user('admin')
|
||||
nonmember = user('nonmember')
|
||||
inventory.admin_role.members.add(nonmember)
|
||||
|
||||
organization.admin_role.members.add(admin)
|
||||
|
||||
role_access = RoleAccess(admin)
|
||||
assert not role_access.can_attach(organization.member_role, nonmember, 'members', None)
|
||||
assert not role_access.can_attach(organization.admin_role, nonmember, 'members', None)
|
||||
|
||||
|
||||
# Singleton user editing restrictions
|
||||
@pytest.mark.django_db
|
||||
def test_org_superuser_role_attach(admin_user, org_admin, organization):
|
||||
'''
|
||||
Ideally, you would not add superusers to roles (particularly member_role)
|
||||
but it has historically been possible
|
||||
this checks that the situation does not grant unexpected permissions
|
||||
'''
|
||||
organization.member_role.members.add(admin_user)
|
||||
|
||||
role_access = RoleAccess(org_admin)
|
||||
assert not role_access.can_attach(organization.member_role, admin_user, 'members', None)
|
||||
assert not role_access.can_attach(organization.admin_role, admin_user, 'members', None)
|
||||
user_access = UserAccess(org_admin)
|
||||
assert not user_access.can_change(admin_user, {'last_name': 'Witzel'})
|
||||
|
||||
|
||||
# Sanity check user editing permissions combined with new org roles
|
||||
@pytest.mark.django_db
|
||||
def test_org_object_role_not_sufficient(user, organization):
|
||||
member = user('amember')
|
||||
obj_admin = user('icontrolallworkflows')
|
||||
|
||||
organization.member_role.members.add(member)
|
||||
organization.workflow_admin_role.members.add(obj_admin)
|
||||
|
||||
user_access = UserAccess(obj_admin)
|
||||
assert not user_access.can_change(member, {'last_name': 'Witzel'})
|
||||
|
||||
|
||||
# Org admin user editing permission ANY to ALL change
|
||||
@pytest.mark.django_db
|
||||
def test_need_all_orgs_to_admin_user(user):
|
||||
'''
|
||||
Old behavior - org admin to ANY organization that a user is member of
|
||||
grants permission to admin that user
|
||||
New behavior enforced here - org admin to ALL organizations that a
|
||||
user is member of grants permission to admin that user
|
||||
'''
|
||||
org1 = Organization.objects.create(name='org1')
|
||||
org2 = Organization.objects.create(name='org2')
|
||||
|
||||
org1_admin = user('org1-admin')
|
||||
org1.admin_role.members.add(org1_admin)
|
||||
|
||||
org12_member = user('org12-member')
|
||||
org1.member_role.members.add(org12_member)
|
||||
org2.member_role.members.add(org12_member)
|
||||
|
||||
user_access = UserAccess(org1_admin)
|
||||
assert not user_access.can_change(org12_member, {'last_name': 'Witzel'})
|
||||
|
||||
role_access = RoleAccess(org1_admin)
|
||||
assert not role_access.can_attach(org1.admin_role, org12_member, 'members', None)
|
||||
assert not role_access.can_attach(org1.member_role, org12_member, 'members', None)
|
||||
|
||||
org2.admin_role.members.add(org1_admin)
|
||||
assert role_access.can_attach(org1.admin_role, org12_member, 'members', None)
|
||||
assert role_access.can_attach(org1.member_role, org12_member, 'members', None)
|
||||
|
||||
|
||||
# Orphaned user can be added to member role, only in special cases
|
||||
@pytest.mark.django_db
|
||||
def test_orphaned_user_allowed(org_admin, rando, organization):
|
||||
'''
|
||||
We still allow adoption of orphaned* users by assigning them to
|
||||
organization member role, but only in the situation where the
|
||||
org admin already posesses indirect access to all of the user's roles
|
||||
*orphaned means user is not a member of any organization
|
||||
'''
|
||||
role_access = RoleAccess(org_admin)
|
||||
assert role_access.can_attach(organization.member_role, rando, 'members', None)
|
||||
# Cannot edit the user directly without adding to org first
|
||||
user_access = UserAccess(org_admin)
|
||||
assert not user_access.can_change(rando, {'last_name': 'Witzel'})
|
||||
|
||||
@ -61,45 +61,21 @@ def test_user_queryset(user):
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_accessible_objects(user, organization):
|
||||
'''
|
||||
We cannot directly use accessible_objects for User model because
|
||||
both editing and read permissions are obligated to complex business logic
|
||||
'''
|
||||
admin = user('admin', False)
|
||||
u = user('john', False)
|
||||
assert User.accessible_objects(admin, 'admin_role').count() == 1
|
||||
access = UserAccess(admin)
|
||||
assert access.get_queryset().count() == 1 # can only see himself
|
||||
|
||||
organization.member_role.members.add(u)
|
||||
organization.admin_role.members.add(admin)
|
||||
assert User.accessible_objects(admin, 'admin_role').count() == 2
|
||||
organization.member_role.members.add(admin)
|
||||
assert access.get_queryset().count() == 2
|
||||
|
||||
organization.member_role.members.remove(u)
|
||||
assert User.accessible_objects(admin, 'admin_role').count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_user_admin(user, organization):
|
||||
admin = user('orgadmin')
|
||||
member = user('orgmember')
|
||||
|
||||
organization.member_role.members.add(member)
|
||||
assert admin not in member.admin_role
|
||||
|
||||
organization.admin_role.members.add(admin)
|
||||
assert admin in member.admin_role
|
||||
|
||||
organization.admin_role.members.remove(admin)
|
||||
assert admin not in member.admin_role
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_user_removed(user, organization):
|
||||
admin = user('orgadmin')
|
||||
member = user('orgmember')
|
||||
|
||||
organization.admin_role.members.add(admin)
|
||||
organization.member_role.members.add(member)
|
||||
|
||||
assert admin in member.admin_role
|
||||
|
||||
organization.member_role.members.remove(member)
|
||||
assert admin not in member.admin_role
|
||||
assert access.get_queryset().count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@ -1,541 +0,0 @@
|
||||
# Python
|
||||
import uuid
|
||||
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.tests.base import BaseTestMixin
|
||||
|
||||
TEST_PLAYBOOK = '''- hosts: all
|
||||
gather_facts: false
|
||||
tasks:
|
||||
- name: woohoo
|
||||
command: test 1 = 1
|
||||
'''
|
||||
|
||||
|
||||
class BaseJobTestMixin(BaseTestMixin):
|
||||
|
||||
|
||||
def _create_inventory(self, name, organization, created_by,
|
||||
groups_hosts_dict):
|
||||
'''Helper method for creating inventory with groups and hosts.'''
|
||||
inventory = organization.inventories.create(
|
||||
name=name,
|
||||
created_by=created_by,
|
||||
)
|
||||
for group_name, host_names in groups_hosts_dict.items():
|
||||
group = inventory.groups.create(
|
||||
name=group_name,
|
||||
created_by=created_by,
|
||||
)
|
||||
for host_name in host_names:
|
||||
host = inventory.hosts.create(
|
||||
name=host_name,
|
||||
created_by=created_by,
|
||||
)
|
||||
group.hosts.add(host)
|
||||
return inventory
|
||||
|
||||
def populate(self):
|
||||
# Here's a little story about the AWX Bread Company, or ABC. They
|
||||
# make machines that make bread - bakers, slicers, and packagers - and
|
||||
# these machines are each controlled by a Linux boxes, which is in turn
|
||||
# managed by AWX.
|
||||
|
||||
# Sue is the super user. You don't mess with Sue or you're toast. Ha.
|
||||
self.user_sue = self.make_user('sue', super_user=True)
|
||||
|
||||
# There are three organizations in ABC using Ansible, since it's the
|
||||
# best thing for dev ops automation since, well, sliced bread.
|
||||
|
||||
# Engineering - They design and build the machines.
|
||||
self.org_eng = Organization.objects.create(
|
||||
name='engineering',
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
# Support - They fix it when it's not working.
|
||||
self.org_sup = Organization.objects.create(
|
||||
name='support',
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
# Operations - They implement the production lines using the machines.
|
||||
self.org_ops = Organization.objects.create(
|
||||
name='operations',
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
|
||||
# Alex is Sue's IT assistant who can also administer all of the
|
||||
# organizations.
|
||||
self.user_alex = self.make_user('alex')
|
||||
self.org_eng.admin_role.members.add(self.user_alex)
|
||||
self.org_sup.admin_role.members.add(self.user_alex)
|
||||
self.org_ops.admin_role.members.add(self.user_alex)
|
||||
|
||||
# Bob is the head of engineering. He's an admin for engineering, but
|
||||
# also a user within the operations organization (so he can see the
|
||||
# results if things go wrong in production).
|
||||
self.user_bob = self.make_user('bob')
|
||||
self.org_eng.admin_role.members.add(self.user_bob)
|
||||
self.org_ops.member_role.members.add(self.user_bob)
|
||||
|
||||
# Chuck is the lead engineer. He has full reign over engineering, but
|
||||
# no other organizations.
|
||||
self.user_chuck = self.make_user('chuck')
|
||||
self.org_eng.admin_role.members.add(self.user_chuck)
|
||||
|
||||
# Doug is the other engineer working under Chuck. He can write
|
||||
# playbooks and check them, but Chuck doesn't quite think he's ready to
|
||||
# run them yet. Poor Doug.
|
||||
self.user_doug = self.make_user('doug')
|
||||
self.org_eng.member_role.members.add(self.user_doug)
|
||||
|
||||
# Juan is another engineer working under Chuck. He has a little more freedom
|
||||
# to run playbooks but can't create job templates
|
||||
self.user_juan = self.make_user('juan')
|
||||
self.org_eng.member_role.members.add(self.user_juan)
|
||||
|
||||
# Hannibal is Chuck's right-hand man. Chuck usually has him create the job
|
||||
# templates that the rest of the team will use
|
||||
self.user_hannibal = self.make_user('hannibal')
|
||||
self.org_eng.member_role.members.add(self.user_hannibal)
|
||||
|
||||
# Eve is the head of support. She can also see what goes on in
|
||||
# operations to help them troubleshoot problems.
|
||||
self.user_eve = self.make_user('eve')
|
||||
self.org_sup.admin_role.members.add(self.user_eve)
|
||||
self.org_ops.member_role.members.add(self.user_eve)
|
||||
|
||||
# Frank is the other support guy.
|
||||
self.user_frank = self.make_user('frank')
|
||||
self.org_sup.member_role.members.add(self.user_frank)
|
||||
|
||||
# Greg is the head of operations.
|
||||
self.user_greg = self.make_user('greg')
|
||||
self.org_ops.admin_role.members.add(self.user_greg)
|
||||
|
||||
# Holly is an operations engineer.
|
||||
self.user_holly = self.make_user('holly')
|
||||
self.org_ops.member_role.members.add(self.user_holly)
|
||||
|
||||
# Iris is another operations engineer.
|
||||
self.user_iris = self.make_user('iris')
|
||||
self.org_ops.member_role.members.add(self.user_iris)
|
||||
|
||||
# Randall and Billybob are new ops interns that ops uses to test
|
||||
# their playbooks and inventory
|
||||
self.user_randall = self.make_user('randall')
|
||||
self.org_ops.member_role.members.add(self.user_randall)
|
||||
|
||||
# He works with Randall
|
||||
self.user_billybob = self.make_user('billybob')
|
||||
self.org_ops.member_role.members.add(self.user_billybob)
|
||||
|
||||
# Jim is the newest intern. He can login, but can't do anything quite yet
|
||||
# except make everyone else fresh coffee.
|
||||
self.user_jim = self.make_user('jim')
|
||||
|
||||
# There are three main projects, one each for the development, test and
|
||||
# production branches of the playbook repository. All three orgs can
|
||||
# use the production branch, support can use the production and testing
|
||||
# branches, and operations can only use the production branch.
|
||||
self.proj_dev = self.make_project('dev', 'development branch',
|
||||
self.user_sue, TEST_PLAYBOOK)
|
||||
self.org_eng.projects.add(self.proj_dev)
|
||||
self.proj_test = self.make_project('test', 'testing branch',
|
||||
self.user_sue, TEST_PLAYBOOK)
|
||||
#self.org_eng.projects.add(self.proj_test) # No more multi org projects
|
||||
self.org_sup.projects.add(self.proj_test)
|
||||
self.proj_prod = self.make_project('prod', 'production branch',
|
||||
self.user_sue, TEST_PLAYBOOK)
|
||||
#self.org_eng.projects.add(self.proj_prod) # No more multi org projects
|
||||
#self.org_sup.projects.add(self.proj_prod) # No more multi org projects
|
||||
self.org_ops.projects.add(self.proj_prod)
|
||||
|
||||
# Operations also has 2 additional projects specific to the east/west
|
||||
# production environments.
|
||||
self.proj_prod_east = self.make_project('prod-east',
|
||||
'east production branch',
|
||||
self.user_sue, TEST_PLAYBOOK)
|
||||
self.org_ops.projects.add(self.proj_prod_east)
|
||||
self.proj_prod_west = self.make_project('prod-west',
|
||||
'west production branch',
|
||||
self.user_sue, TEST_PLAYBOOK)
|
||||
self.org_ops.projects.add(self.proj_prod_west)
|
||||
|
||||
# The engineering organization has a set of servers to use for
|
||||
# development and testing (2 bakers, 1 slicer, 1 packager).
|
||||
self.inv_eng = self._create_inventory(
|
||||
name='engineering environment',
|
||||
organization=self.org_eng,
|
||||
created_by=self.user_sue,
|
||||
groups_hosts_dict={
|
||||
'bakers': ['eng-baker1', 'eng-baker2'],
|
||||
'slicers': ['eng-slicer1'],
|
||||
'packagers': ['eng-packager1'],
|
||||
},
|
||||
)
|
||||
|
||||
# The support organization has a set of servers to use for
|
||||
# testing and reproducing problems from operations (1 baker, 1 slicer,
|
||||
# 1 packager).
|
||||
self.inv_sup = self._create_inventory(
|
||||
name='support environment',
|
||||
organization=self.org_sup,
|
||||
created_by=self.user_sue,
|
||||
groups_hosts_dict={
|
||||
'bakers': ['sup-baker1'],
|
||||
'slicers': ['sup-slicer1'],
|
||||
'packagers': ['sup-packager1'],
|
||||
},
|
||||
)
|
||||
|
||||
# The operations organization manages multiple sets of servers for the
|
||||
# east and west production facilities.
|
||||
self.inv_ops_east = self._create_inventory(
|
||||
name='east production environment',
|
||||
organization=self.org_ops,
|
||||
created_by=self.user_sue,
|
||||
groups_hosts_dict={
|
||||
'bakers': ['east-baker%d' % n for n in range(1, 4)],
|
||||
'slicers': ['east-slicer%d' % n for n in range(1, 3)],
|
||||
'packagers': ['east-packager%d' % n for n in range(1, 3)],
|
||||
},
|
||||
)
|
||||
self.inv_ops_west = self._create_inventory(
|
||||
name='west production environment',
|
||||
organization=self.org_ops,
|
||||
created_by=self.user_sue,
|
||||
groups_hosts_dict={
|
||||
'bakers': ['west-baker%d' % n for n in range(1, 6)],
|
||||
'slicers': ['west-slicer%d' % n for n in range(1, 4)],
|
||||
'packagers': ['west-packager%d' % n for n in range(1, 3)],
|
||||
},
|
||||
)
|
||||
|
||||
# Operations is divided into teams to work on the east/west servers.
|
||||
# Greg and Holly work on east, Greg and iris work on west.
|
||||
self.team_ops_east = self.org_ops.teams.create(
|
||||
name='easterners',
|
||||
created_by=self.user_sue)
|
||||
self.team_ops_east.member_role.children.add(self.proj_prod.admin_role)
|
||||
self.team_ops_east.member_role.children.add(self.proj_prod_east.admin_role)
|
||||
self.team_ops_east.member_role.members.add(self.user_greg)
|
||||
self.team_ops_east.member_role.members.add(self.user_holly)
|
||||
self.team_ops_west = self.org_ops.teams.create(
|
||||
name='westerners',
|
||||
created_by=self.user_sue)
|
||||
self.team_ops_west.member_role.children.add(self.proj_prod.admin_role)
|
||||
self.team_ops_west.member_role.children.add(self.proj_prod_west.admin_role)
|
||||
self.team_ops_west.member_role.members.add(self.user_greg)
|
||||
self.team_ops_west.member_role.members.add(self.user_iris)
|
||||
|
||||
# The south team is no longer active having been folded into the east team
|
||||
# FIXME: This code can be removed (probably)
|
||||
# - this case has been removed as we've gotten rid of the active flag, keeping
|
||||
# code around in case this has ramifications on some test failures.. if
|
||||
# you find this message and all tests are passing, then feel free to remove this
|
||||
# - anoek 2016-03-10
|
||||
#self.team_ops_south = self.org_ops.teams.create(
|
||||
# name='southerners',
|
||||
# created_by=self.user_sue,
|
||||
# active=False,
|
||||
#)
|
||||
#self.team_ops_south.member_role.children.add(self.proj_prod.admin_role)
|
||||
#self.team_ops_south.member_role.members.add(self.user_greg)
|
||||
|
||||
# The north team is going to be deleted
|
||||
self.team_ops_north = self.org_ops.teams.create(
|
||||
name='northerners',
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
self.team_ops_north.member_role.children.add(self.proj_prod.admin_role)
|
||||
self.team_ops_north.member_role.members.add(self.user_greg)
|
||||
|
||||
# The testers team are interns that can only check playbooks but can't
|
||||
# run them
|
||||
self.team_ops_testers = self.org_ops.teams.create(
|
||||
name='testers',
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
self.team_ops_testers.member_role.children.add(self.proj_prod.admin_role)
|
||||
self.team_ops_testers.member_role.members.add(self.user_randall)
|
||||
self.team_ops_testers.member_role.members.add(self.user_billybob)
|
||||
|
||||
# Each user has his/her own set of credentials.
|
||||
from awx.main.tests.data.ssh import (TEST_SSH_KEY_DATA,
|
||||
TEST_SSH_KEY_DATA_LOCKED,
|
||||
TEST_SSH_KEY_DATA_UNLOCK)
|
||||
self.cred_sue = Credential.objects.create(
|
||||
username='sue',
|
||||
password=TEST_SSH_KEY_DATA,
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
self.cred_sue.admin_role.members.add(self.user_sue)
|
||||
|
||||
self.cred_sue_ask = Credential.objects.create(
|
||||
username='sue',
|
||||
password='ASK',
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
self.cred_sue_ask.admin_role.members.add(self.user_sue)
|
||||
|
||||
self.cred_sue_ask_many = Credential.objects.create(
|
||||
username='sue',
|
||||
password='ASK',
|
||||
become_method='sudo',
|
||||
become_username='root',
|
||||
become_password='ASK',
|
||||
ssh_key_data=TEST_SSH_KEY_DATA_LOCKED,
|
||||
ssh_key_unlock='ASK',
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
self.cred_sue_ask_many.admin_role.members.add(self.user_sue)
|
||||
|
||||
self.cred_bob = Credential.objects.create(
|
||||
username='bob',
|
||||
password='ASK',
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
self.cred_bob.use_role.members.add(self.user_bob)
|
||||
|
||||
self.cred_chuck = Credential.objects.create(
|
||||
username='chuck',
|
||||
ssh_key_data=TEST_SSH_KEY_DATA,
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
self.cred_chuck.use_role.members.add(self.user_chuck)
|
||||
|
||||
self.cred_doug = Credential.objects.create(
|
||||
username='doug',
|
||||
password='doug doesn\'t mind his password being saved. this '
|
||||
'is why we dont\'t let doug actually run jobs.',
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
self.cred_doug.use_role.members.add(self.user_doug)
|
||||
|
||||
self.cred_eve = Credential.objects.create(
|
||||
username='eve',
|
||||
password='ASK',
|
||||
become_method='sudo',
|
||||
become_username='root',
|
||||
become_password='ASK',
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
self.cred_eve.use_role.members.add(self.user_eve)
|
||||
|
||||
self.cred_frank = Credential.objects.create(
|
||||
username='frank',
|
||||
password='fr@nk the t@nk',
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
self.cred_frank.use_role.members.add(self.user_frank)
|
||||
|
||||
self.cred_greg = Credential.objects.create(
|
||||
username='greg',
|
||||
ssh_key_data=TEST_SSH_KEY_DATA_LOCKED,
|
||||
ssh_key_unlock='ASK',
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
self.cred_greg.use_role.members.add(self.user_greg)
|
||||
|
||||
self.cred_holly = Credential.objects.create(
|
||||
username='holly',
|
||||
password='holly rocks',
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
self.cred_holly.use_role.members.add(self.user_holly)
|
||||
|
||||
self.cred_iris = Credential.objects.create(
|
||||
username='iris',
|
||||
password='ASK',
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
self.cred_iris.use_role.members.add(self.user_iris)
|
||||
|
||||
# Each operations team also has shared credentials they can use.
|
||||
self.cred_ops_east = Credential.objects.create(
|
||||
username='east',
|
||||
ssh_key_data=TEST_SSH_KEY_DATA_LOCKED,
|
||||
ssh_key_unlock=TEST_SSH_KEY_DATA_UNLOCK,
|
||||
created_by = self.user_sue,
|
||||
)
|
||||
self.team_ops_east.member_role.children.add(self.cred_ops_east.use_role)
|
||||
|
||||
self.cred_ops_west = Credential.objects.create(
|
||||
username='west',
|
||||
password='Heading270',
|
||||
created_by = self.user_sue,
|
||||
)
|
||||
self.team_ops_west.member_role.children.add(self.cred_ops_west.use_role)
|
||||
|
||||
|
||||
# FIXME: This code can be removed (probably)
|
||||
# - this case has been removed as we've gotten rid of the active flag, keeping
|
||||
# code around in case this has ramifications on some test failures.. if
|
||||
# you find this message and all tests are passing, then feel free to remove this
|
||||
# - anoek 2016-03-10
|
||||
#self.cred_ops_south = self.team_ops_south.credentials.create(
|
||||
# username='south',
|
||||
# password='Heading180',
|
||||
# created_by = self.user_sue,
|
||||
#)
|
||||
|
||||
self.cred_ops_north = Credential.objects.create(
|
||||
username='north',
|
||||
password='Heading0',
|
||||
created_by = self.user_sue,
|
||||
)
|
||||
self.team_ops_north.member_role.children.add(self.cred_ops_north.admin_role)
|
||||
|
||||
self.cred_ops_test = Credential.objects.create(
|
||||
username='testers',
|
||||
password='HeadingNone',
|
||||
created_by = self.user_sue,
|
||||
)
|
||||
self.team_ops_testers.member_role.children.add(self.cred_ops_test.use_role)
|
||||
|
||||
# Engineering has job templates to check/run the dev project onto
|
||||
# their own inventory.
|
||||
self.jt_eng_check = JobTemplate.objects.create(
|
||||
name='eng-dev-check',
|
||||
job_type='check',
|
||||
inventory= self.inv_eng,
|
||||
project=self.proj_dev,
|
||||
playbook=self.proj_dev.playbooks[0],
|
||||
host_config_key=uuid.uuid4().hex,
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
# self.job_eng_check = self.jt_eng_check.create_job(
|
||||
# created_by=self.user_sue,
|
||||
# credential=self.cred_doug,
|
||||
# )
|
||||
self.jt_eng_run = JobTemplate.objects.create(
|
||||
name='eng-dev-run',
|
||||
job_type='run',
|
||||
inventory= self.inv_eng,
|
||||
project=self.proj_dev,
|
||||
playbook=self.proj_dev.playbooks[0],
|
||||
host_config_key=uuid.uuid4().hex,
|
||||
created_by=self.user_sue,
|
||||
ask_credential_on_launch=True,
|
||||
)
|
||||
# self.job_eng_run = self.jt_eng_run.create_job(
|
||||
# created_by=self.user_sue,
|
||||
# credential=self.cred_chuck,
|
||||
# )
|
||||
|
||||
# Support has job templates to check/run the test project onto
|
||||
# their own inventory.
|
||||
self.jt_sup_check = JobTemplate.objects.create(
|
||||
name='sup-test-check',
|
||||
job_type='check',
|
||||
inventory= self.inv_sup,
|
||||
project=self.proj_test,
|
||||
playbook=self.proj_test.playbooks[0],
|
||||
host_config_key=uuid.uuid4().hex,
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
# self.job_sup_check = self.jt_sup_check.create_job(
|
||||
# created_by=self.user_sue,
|
||||
# credential=self.cred_frank,
|
||||
# )
|
||||
self.jt_sup_run = JobTemplate.objects.create(
|
||||
name='sup-test-run',
|
||||
job_type='run',
|
||||
inventory= self.inv_sup,
|
||||
project=self.proj_test,
|
||||
playbook=self.proj_test.playbooks[0],
|
||||
host_config_key=uuid.uuid4().hex,
|
||||
credential=self.cred_eve,
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
# self.job_sup_run = self.jt_sup_run.create_job(
|
||||
# created_by=self.user_sue,
|
||||
# )
|
||||
|
||||
# Operations has job templates to check/run the prod project onto
|
||||
# both east and west inventories, by default using the team credential.
|
||||
self.jt_ops_east_check = JobTemplate.objects.create(
|
||||
name='ops-east-prod-check',
|
||||
job_type='check',
|
||||
inventory= self.inv_ops_east,
|
||||
project=self.proj_prod,
|
||||
playbook=self.proj_prod.playbooks[0],
|
||||
credential=self.cred_ops_east,
|
||||
host_config_key=uuid.uuid4().hex,
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
# self.job_ops_east_check = self.jt_ops_east_check.create_job(
|
||||
# created_by=self.user_sue,
|
||||
# )
|
||||
self.jt_ops_east_run = JobTemplate.objects.create(
|
||||
name='ops-east-prod-run',
|
||||
job_type='run',
|
||||
inventory= self.inv_ops_east,
|
||||
project=self.proj_prod,
|
||||
playbook=self.proj_prod.playbooks[0],
|
||||
credential=self.cred_ops_east,
|
||||
host_config_key=uuid.uuid4().hex,
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
self.jt_ops_east_run_prod_east = JobTemplate.objects.create(
|
||||
name='ops-east-prod-run-on-prod-east',
|
||||
job_type='run',
|
||||
inventory= self.inv_ops_east,
|
||||
project=self.proj_prod_east,
|
||||
playbook=self.proj_prod_east.playbooks[0],
|
||||
credential=self.cred_ops_east,
|
||||
host_config_key=uuid.uuid4().hex,
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
# self.job_ops_east_run = self.jt_ops_east_run.create_job(
|
||||
# created_by=self.user_sue,
|
||||
# )
|
||||
self.jt_ops_west_check = JobTemplate.objects.create(
|
||||
name='ops-west-prod-check',
|
||||
job_type='check',
|
||||
inventory= self.inv_ops_west,
|
||||
project=self.proj_prod,
|
||||
playbook=self.proj_prod.playbooks[0],
|
||||
credential=self.cred_ops_west,
|
||||
host_config_key=uuid.uuid4().hex,
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
self.jt_ops_west_check_test_team = JobTemplate.objects.create(
|
||||
name='ops-west-prod-check-testers',
|
||||
job_type='check',
|
||||
inventory= self.inv_ops_west,
|
||||
project=self.proj_prod,
|
||||
playbook=self.proj_prod.playbooks[0],
|
||||
credential=self.cred_ops_test,
|
||||
host_config_key=uuid.uuid4().hex,
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
# self.job_ops_west_check = self.jt_ops_west_check.create_job(
|
||||
# created_by=self.user_sue,
|
||||
# )
|
||||
self.jt_ops_west_run = JobTemplate.objects.create(
|
||||
name='ops-west-prod-run',
|
||||
job_type='run',
|
||||
inventory= self.inv_ops_west,
|
||||
project=self.proj_prod,
|
||||
playbook=self.proj_prod.playbooks[0],
|
||||
credential=self.cred_ops_west,
|
||||
host_config_key=uuid.uuid4().hex,
|
||||
created_by=self.user_sue,
|
||||
)
|
||||
# self.job_ops_west_run = self.jt_ops_west_run.create_job(
|
||||
# created_by=self.user_sue,
|
||||
# )
|
||||
|
||||
def setUp(self):
|
||||
super(BaseJobTestMixin, self).setUp()
|
||||
self.start_rabbit()
|
||||
self.setup_instances()
|
||||
self.populate()
|
||||
self.start_queue()
|
||||
|
||||
def tearDown(self):
|
||||
super(BaseJobTestMixin, self).tearDown()
|
||||
self.stop_rabbit()
|
||||
self.terminate_queue()
|
||||
@ -0,0 +1,31 @@
|
||||
from awx.api.serializers import ActivityStreamSerializer
|
||||
from awx.main.registrar import activity_stream_registrar
|
||||
from awx.main.models import ActivityStream
|
||||
|
||||
from awx.conf.models import Setting
|
||||
|
||||
|
||||
def test_activity_stream_related():
|
||||
'''
|
||||
If this test failed with content in `missing_models`, that means that a
|
||||
model has been connected to the activity stream, but the model has not
|
||||
been added to the activity stream serializer.
|
||||
|
||||
How to fix this:
|
||||
Ideally, all models should be in awx.api.serializers.SUMMARIZABLE_FK_FIELDS
|
||||
|
||||
If, for whatever reason, the missing model should not generally be
|
||||
summarized from related resources, then a special case can be carved out in
|
||||
ActivityStreamSerializer._local_summarizable_fk_fields
|
||||
'''
|
||||
serializer_related = set(
|
||||
ActivityStream._meta.get_field(field_name).related_model for field_name, stuff in
|
||||
ActivityStreamSerializer()._local_summarizable_fk_fields
|
||||
if hasattr(ActivityStream, field_name)
|
||||
)
|
||||
|
||||
models = set(activity_stream_registrar.models)
|
||||
models.remove(Setting)
|
||||
|
||||
missing_models = models - serializer_related
|
||||
assert not missing_models
|
||||
14
awx/main/tests/unit/api/serializers/test_token_serializer.py
Normal file
14
awx/main/tests/unit/api/serializers/test_token_serializer.py
Normal file
@ -0,0 +1,14 @@
|
||||
import pytest
|
||||
|
||||
from awx.api.serializers import OAuth2TokenSerializer
|
||||
|
||||
|
||||
@pytest.mark.parametrize('scope, expect', [
|
||||
('', False),
|
||||
('read', True),
|
||||
('read read', False),
|
||||
('write read', True),
|
||||
('read rainbow', False)
|
||||
])
|
||||
def test_invalid_scopes(scope, expect):
|
||||
assert OAuth2TokenSerializer()._is_valid_scope(scope) is expect
|
||||
@ -1,7 +1,4 @@
|
||||
import mock
|
||||
from mock import PropertyMock
|
||||
|
||||
import pytest
|
||||
|
||||
from rest_framework.test import APIRequestFactory
|
||||
from rest_framework.test import force_authenticate
|
||||
@ -9,8 +6,6 @@ from rest_framework.test import force_authenticate
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from awx.api.views import (
|
||||
RoleUsersList,
|
||||
UserRolesList,
|
||||
TeamRolesList,
|
||||
)
|
||||
|
||||
@ -20,69 +15,6 @@ from awx.main.models import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("pk, err", [
|
||||
(111, "not change the membership"),
|
||||
(1, "may not perform"),
|
||||
])
|
||||
def test_user_roles_list_user_admin_role(pk, err):
|
||||
with mock.patch('awx.api.views.get_object_or_400') as role_get, \
|
||||
mock.patch('awx.api.views.ContentType.objects.get_for_model') as ct_get:
|
||||
|
||||
role_mock = mock.MagicMock(spec=Role, id=1, pk=1)
|
||||
content_type_mock = mock.MagicMock(spec=ContentType)
|
||||
role_mock.content_type = content_type_mock
|
||||
role_get.return_value = role_mock
|
||||
ct_get.return_value = content_type_mock
|
||||
|
||||
with mock.patch('awx.api.views.User.admin_role', new_callable=PropertyMock, return_value=role_mock):
|
||||
factory = APIRequestFactory()
|
||||
view = UserRolesList.as_view()
|
||||
|
||||
user = User(username="root", is_superuser=True, pk=1, id=1)
|
||||
|
||||
request = factory.post("/user/1/roles", {'id':pk}, format="json")
|
||||
force_authenticate(request, user)
|
||||
|
||||
response = view(request, pk=user.pk)
|
||||
response.render()
|
||||
|
||||
assert response.status_code == 403
|
||||
assert err in response.content
|
||||
|
||||
|
||||
@pytest.mark.parametrize("admin_role, err", [
|
||||
(True, "may not perform"),
|
||||
(False, "not change the membership"),
|
||||
])
|
||||
def test_role_users_list_other_user_admin_role(admin_role, err):
|
||||
with mock.patch('awx.api.views.RoleUsersList.get_parent_object') as role_get, \
|
||||
mock.patch('awx.api.views.ContentType.objects.get_for_model') as ct_get:
|
||||
|
||||
role_mock = mock.MagicMock(spec=Role, id=1)
|
||||
content_type_mock = mock.MagicMock(spec=ContentType)
|
||||
role_mock.content_type = content_type_mock
|
||||
role_get.return_value = role_mock
|
||||
ct_get.return_value = content_type_mock
|
||||
|
||||
user_admin_role = role_mock if admin_role else None
|
||||
with mock.patch('awx.api.views.User.admin_role', new_callable=PropertyMock, return_value=user_admin_role):
|
||||
factory = APIRequestFactory()
|
||||
view = RoleUsersList.as_view()
|
||||
|
||||
user = User(username="root", is_superuser=True, pk=1, id=1)
|
||||
queried_user = User(username="maynard")
|
||||
|
||||
request = factory.post("/role/1/users", {'id':1}, format="json")
|
||||
force_authenticate(request, user)
|
||||
|
||||
with mock.patch('awx.api.views.get_object_or_400', return_value=queried_user):
|
||||
response = view(request)
|
||||
response.render()
|
||||
|
||||
assert response.status_code == 403
|
||||
assert err in response.content
|
||||
|
||||
|
||||
def test_team_roles_list_post_org_roles():
|
||||
with mock.patch('awx.api.views.get_object_or_400') as role_get, \
|
||||
mock.patch('awx.api.views.ContentType.objects.get_for_model') as ct_get:
|
||||
|
||||
17
awx/main/tests/unit/models/test_credential.py
Normal file
17
awx/main/tests/unit/models/test_credential.py
Normal file
@ -0,0 +1,17 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from awx.main.models import Credential, CredentialType
|
||||
|
||||
|
||||
def test_unique_hash_with_unicode():
|
||||
ct = CredentialType(name=u'Väult', kind='vault')
|
||||
cred = Credential(
|
||||
id=4,
|
||||
name=u'Iñtërnâtiônàlizætiøn',
|
||||
credential_type=ct,
|
||||
inputs={
|
||||
u'vault_id': u'🐉🐉🐉'
|
||||
},
|
||||
credential_type_id=42
|
||||
)
|
||||
assert cred.unique_hash(display=True) == u'Väult (id=🐉🐉🐉)'
|
||||
@ -1,5 +1,6 @@
|
||||
import tempfile
|
||||
import json
|
||||
import yaml
|
||||
|
||||
import pytest
|
||||
from awx.main.utils.encryption import encrypt_value
|
||||
@ -10,6 +11,7 @@ from awx.main.models import (
|
||||
JobLaunchConfig,
|
||||
WorkflowJobTemplate
|
||||
)
|
||||
from awx.main.utils.safe_yaml import SafeLoader
|
||||
|
||||
ENCRYPTED_SECRET = encrypt_value('secret')
|
||||
|
||||
@ -122,7 +124,7 @@ def test_job_safe_args_redacted_passwords(job):
|
||||
safe_args = run_job.build_safe_args(job, **kwargs)
|
||||
ev_index = safe_args.index('-e') + 1
|
||||
extra_var_file = open(safe_args[ev_index][1:], 'r')
|
||||
extra_vars = json.load(extra_var_file)
|
||||
extra_vars = yaml.load(extra_var_file, SafeLoader)
|
||||
extra_var_file.close()
|
||||
assert extra_vars['secret_key'] == '$encrypted$'
|
||||
|
||||
@ -133,7 +135,7 @@ def test_job_args_unredacted_passwords(job, tmpdir_factory):
|
||||
args = run_job.build_args(job, **kwargs)
|
||||
ev_index = args.index('-e') + 1
|
||||
extra_var_file = open(args[ev_index][1:], 'r')
|
||||
extra_vars = json.load(extra_var_file)
|
||||
extra_vars = yaml.load(extra_var_file, SafeLoader)
|
||||
extra_var_file.close()
|
||||
assert extra_vars['secret_key'] == 'my_password'
|
||||
|
||||
|
||||
176
awx/main/tests/unit/test_fields.py
Normal file
176
awx/main/tests/unit/test_fields.py
Normal file
@ -0,0 +1,176 @@
|
||||
import pytest
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from rest_framework.serializers import ValidationError as DRFValidationError
|
||||
|
||||
from awx.main.models import Credential, CredentialType, BaseModel
|
||||
from awx.main.fields import JSONSchemaField
|
||||
|
||||
|
||||
@pytest.mark.parametrize('schema, given, message', [
|
||||
(
|
||||
{ # immitates what the CredentialType injectors field is
|
||||
"additionalProperties": False,
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"extra_vars": {
|
||||
"additionalProperties": False,
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
},
|
||||
{'extra_vars': ['duck', 'horse']},
|
||||
"list provided in relative path ['extra_vars'], expected dict"
|
||||
),
|
||||
(
|
||||
{ # immitates what the CredentialType injectors field is
|
||||
"additionalProperties": False,
|
||||
"type": "object",
|
||||
},
|
||||
['duck', 'horse'],
|
||||
"list provided, expected dict"
|
||||
),
|
||||
])
|
||||
def test_custom_error_messages(schema, given, message):
|
||||
instance = BaseModel()
|
||||
|
||||
class MockFieldSubclass(JSONSchemaField):
|
||||
def schema(self, model_instance):
|
||||
return schema
|
||||
|
||||
field = MockFieldSubclass()
|
||||
|
||||
with pytest.raises(ValidationError) as exc:
|
||||
field.validate(given, instance)
|
||||
|
||||
assert message == exc.value.error_list[0].message
|
||||
|
||||
|
||||
@pytest.mark.parametrize('input_, valid', [
|
||||
({}, True),
|
||||
({'fields': []}, True),
|
||||
({'fields': {}}, False),
|
||||
({'fields': 123}, False),
|
||||
({'fields': [{'id': 'username', 'label': 'Username', 'foo': 'bar'}]}, False),
|
||||
({'fields': [{'id': 'username', 'label': 'Username'}]}, True),
|
||||
({'fields': [{'id': 'username', 'label': 'Username', 'type': 'string'}]}, True),
|
||||
({'fields': [{'id': 'username', 'label': 'Username', 'help_text': 1}]}, False),
|
||||
({'fields': [{'id': 'username', 'label': 'Username', 'help_text': 'Help Text'}]}, True), # noqa
|
||||
({'fields': [{'id': 'username', 'label': 'Username'}, {'id': 'username', 'label': 'Username 2'}]}, False), # noqa
|
||||
({'fields': [{'id': '$invalid$', 'label': 'Invalid', 'type': 'string'}]}, False), # noqa
|
||||
({'fields': [{'id': 'password', 'label': 'Password', 'type': 'invalid-type'}]}, False),
|
||||
({'fields': [{'id': 'ssh_key', 'label': 'SSH Key', 'type': 'string', 'format': 'ssh_private_key'}]}, True), # noqa
|
||||
({'fields': [{'id': 'flag', 'label': 'Some Flag', 'type': 'boolean'}]}, True),
|
||||
({'fields': [{'id': 'flag', 'label': 'Some Flag', 'type': 'boolean', 'choices': ['a', 'b']}]}, False),
|
||||
({'fields': [{'id': 'flag', 'label': 'Some Flag', 'type': 'boolean', 'secret': True}]}, False),
|
||||
({'fields': [{'id': 'certificate', 'label': 'Cert', 'multiline': True}]}, True),
|
||||
({'fields': [{'id': 'certificate', 'label': 'Cert', 'multiline': True, 'type': 'boolean'}]}, False), # noqa
|
||||
({'fields': [{'id': 'certificate', 'label': 'Cert', 'multiline': 'bad'}]}, False), # noqa
|
||||
({'fields': [{'id': 'token', 'label': 'Token', 'secret': True}]}, True),
|
||||
({'fields': [{'id': 'token', 'label': 'Token', 'secret': 'bad'}]}, False),
|
||||
({'fields': [{'id': 'token', 'label': 'Token', 'ask_at_runtime': True}]}, True),
|
||||
({'fields': [{'id': 'token', 'label': 'Token', 'ask_at_runtime': 'bad'}]}, False), # noqa
|
||||
({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': 'not-a-list'}]}, False), # noqa
|
||||
({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': []}]}, False),
|
||||
({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': ['su', 'sudo']}]}, True), # noqa
|
||||
({'fields': [{'id': 'become_method', 'label': 'Become', 'choices': ['dup', 'dup']}]}, False), # noqa
|
||||
({'fields': [{'id': 'tower', 'label': 'Reserved!', }]}, False), # noqa
|
||||
])
|
||||
def test_cred_type_input_schema_validity(input_, valid):
|
||||
type_ = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=True,
|
||||
inputs=input_
|
||||
)
|
||||
field = CredentialType._meta.get_field('inputs')
|
||||
if valid is False:
|
||||
with pytest.raises(ValidationError):
|
||||
field.clean(input_, type_)
|
||||
else:
|
||||
field.clean(input_, type_)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('injectors, valid', [
|
||||
({}, True),
|
||||
({'invalid-injector': {}}, False),
|
||||
({'file': 123}, False),
|
||||
({'file': {}}, True),
|
||||
({'file': {'template': '{{username}}'}}, True),
|
||||
({'file': {'template.username': '{{username}}'}}, True),
|
||||
({'file': {'template.username': '{{username}}', 'template.password': '{{pass}}'}}, True),
|
||||
({'file': {'template': '{{username}}', 'template.password': '{{pass}}'}}, False),
|
||||
({'file': {'foo': 'bar'}}, False),
|
||||
({'env': 123}, False),
|
||||
({'env': {}}, True),
|
||||
({'env': {'AWX_SECRET': '{{awx_secret}}'}}, True),
|
||||
({'env': {'AWX_SECRET_99': '{{awx_secret}}'}}, True),
|
||||
({'env': {'99': '{{awx_secret}}'}}, False),
|
||||
({'env': {'AWX_SECRET=': '{{awx_secret}}'}}, False),
|
||||
({'extra_vars': 123}, False),
|
||||
({'extra_vars': {}}, True),
|
||||
({'extra_vars': {'hostname': '{{host}}'}}, True),
|
||||
({'extra_vars': {'hostname_99': '{{host}}'}}, True),
|
||||
({'extra_vars': {'99': '{{host}}'}}, False),
|
||||
({'extra_vars': {'99=': '{{host}}'}}, False),
|
||||
])
|
||||
def test_cred_type_injectors_schema(injectors, valid):
|
||||
type_ = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'username', 'type': 'string', 'label': '_'},
|
||||
{'id': 'pass', 'type': 'string', 'label': '_'},
|
||||
{'id': 'awx_secret', 'type': 'string', 'label': '_'},
|
||||
{'id': 'host', 'type': 'string', 'label': '_'},
|
||||
]
|
||||
},
|
||||
injectors=injectors
|
||||
)
|
||||
field = CredentialType._meta.get_field('injectors')
|
||||
if valid is False:
|
||||
with pytest.raises(ValidationError):
|
||||
field.clean(injectors, type_)
|
||||
else:
|
||||
field.clean(injectors, type_)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('inputs', [
|
||||
['must-be-a-dict'],
|
||||
{'user': 'wrong-key'},
|
||||
{'username': 1},
|
||||
{'username': 1.5},
|
||||
{'username': ['a', 'b', 'c']},
|
||||
{'username': {'a': 'b'}},
|
||||
{'flag': 1},
|
||||
{'flag': 1.5},
|
||||
{'flag': ['a', 'b', 'c']},
|
||||
{'flag': {'a': 'b'}},
|
||||
{'flag': 'some-string'},
|
||||
])
|
||||
def test_credential_creation_validation_failure(inputs):
|
||||
type_ = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=True,
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'username',
|
||||
'label': 'Username for SomeCloud',
|
||||
'type': 'string'
|
||||
},{
|
||||
'id': 'flag',
|
||||
'label': 'Some Boolean Flag',
|
||||
'type': 'boolean'
|
||||
}]
|
||||
}
|
||||
)
|
||||
cred = Credential(credential_type=type_, name="Bob's Credential",
|
||||
inputs=inputs)
|
||||
field = cred._meta.get_field('inputs')
|
||||
|
||||
with pytest.raises(Exception) as e:
|
||||
field.validate(inputs, cred)
|
||||
assert e.type in (ValidationError, DRFValidationError)
|
||||
@ -1,4 +1,5 @@
|
||||
import textwrap
|
||||
import pytest
|
||||
|
||||
# AWX
|
||||
from awx.main.redact import UriCleaner
|
||||
@ -78,60 +79,76 @@ TEST_CLEARTEXT.append({
|
||||
})
|
||||
|
||||
|
||||
@pytest.mark.parametrize('username, password, not_uri, expected', [
|
||||
('', '', 'www.famfamfam.com](http://www.famfamfam.com/fijdlfd', 'www.famfamfam.com](http://www.famfamfam.com/fijdlfd'),
|
||||
('', '', 'https://www.famfamfam.com](http://www.famfamfam.com/fijdlfd', '$encrypted$'),
|
||||
('root', 'gigity', 'https://root@gigity@www.famfamfam.com](http://www.famfamfam.com/fijdlfd', '$encrypted$'),
|
||||
('root', 'gigity@', 'https://root:gigity@@@www.famfamfam.com](http://www.famfamfam.com/fijdlfd', '$encrypted$'),
|
||||
])
|
||||
# should redact sensitive usernames and passwords
|
||||
def test_uri_scm_simple_redacted():
|
||||
for uri in TEST_URIS:
|
||||
redacted_str = UriCleaner.remove_sensitive(str(uri))
|
||||
if uri.username:
|
||||
assert uri.username not in redacted_str
|
||||
if uri.password:
|
||||
assert uri.username not in redacted_str
|
||||
def test_non_uri_redact(username, password, not_uri, expected):
|
||||
redacted_str = UriCleaner.remove_sensitive(not_uri)
|
||||
if username:
|
||||
assert username not in redacted_str
|
||||
if password:
|
||||
assert password not in redacted_str
|
||||
|
||||
assert redacted_str == expected
|
||||
|
||||
|
||||
def test_multiple_non_uri_redact():
|
||||
non_uri = 'https://www.famfamfam.com](http://www.famfamfam.com/fijdlfd hi '
|
||||
non_uri += 'https://www.famfamfam.com](http://www.famfamfam.com/fijdlfd world '
|
||||
non_uri += 'https://www.famfamfam.com](http://www.famfamfam.com/fijdlfd foo '
|
||||
non_uri += 'https://foo:bar@giggity.com bar'
|
||||
redacted_str = UriCleaner.remove_sensitive(non_uri)
|
||||
assert redacted_str == '$encrypted$ hi $encrypted$ world $encrypted$ foo https://$encrypted$:$encrypted$@giggity.com bar'
|
||||
|
||||
|
||||
# should replace secret data with safe string, UriCleaner.REPLACE_STR
|
||||
def test_uri_scm_simple_replaced():
|
||||
for uri in TEST_URIS:
|
||||
redacted_str = UriCleaner.remove_sensitive(str(uri))
|
||||
assert redacted_str.count(UriCleaner.REPLACE_STR) == uri.get_secret_count()
|
||||
@pytest.mark.parametrize('uri', TEST_URIS)
|
||||
def test_uri_scm_simple_replaced(uri):
|
||||
redacted_str = UriCleaner.remove_sensitive(str(uri))
|
||||
assert redacted_str.count(UriCleaner.REPLACE_STR) == uri.get_secret_count()
|
||||
|
||||
|
||||
# should redact multiple uris in text
|
||||
def test_uri_scm_multiple():
|
||||
@pytest.mark.parametrize('uri', TEST_URIS)
|
||||
def test_uri_scm_multiple(uri):
|
||||
cleartext = ''
|
||||
for uri in TEST_URIS:
|
||||
cleartext += str(uri) + ' '
|
||||
for uri in TEST_URIS:
|
||||
cleartext += str(uri) + '\n'
|
||||
cleartext += str(uri) + ' '
|
||||
cleartext += str(uri) + '\n'
|
||||
|
||||
redacted_str = UriCleaner.remove_sensitive(str(uri))
|
||||
if uri.username:
|
||||
assert uri.username not in redacted_str
|
||||
if uri.password:
|
||||
assert uri.username not in redacted_str
|
||||
assert uri.password not in redacted_str
|
||||
|
||||
|
||||
# should replace multiple secret data with safe string
|
||||
def test_uri_scm_multiple_replaced():
|
||||
@pytest.mark.parametrize('uri', TEST_URIS)
|
||||
def test_uri_scm_multiple_replaced(uri):
|
||||
cleartext = ''
|
||||
find_count = 0
|
||||
for uri in TEST_URIS:
|
||||
cleartext += str(uri) + ' '
|
||||
find_count += uri.get_secret_count()
|
||||
|
||||
for uri in TEST_URIS:
|
||||
cleartext += str(uri) + '\n'
|
||||
find_count += uri.get_secret_count()
|
||||
cleartext += str(uri) + ' '
|
||||
find_count += uri.get_secret_count()
|
||||
|
||||
cleartext += str(uri) + '\n'
|
||||
find_count += uri.get_secret_count()
|
||||
|
||||
redacted_str = UriCleaner.remove_sensitive(cleartext)
|
||||
assert redacted_str.count(UriCleaner.REPLACE_STR) == find_count
|
||||
|
||||
|
||||
# should redact and replace multiple secret data within a complex cleartext blob
|
||||
def test_uri_scm_cleartext_redact_and_replace():
|
||||
for test_data in TEST_CLEARTEXT:
|
||||
uri = test_data['uri']
|
||||
redacted_str = UriCleaner.remove_sensitive(test_data['text'])
|
||||
assert uri.username not in redacted_str
|
||||
assert uri.password not in redacted_str
|
||||
# Ensure the host didn't get redacted
|
||||
assert redacted_str.count(uri.host) == test_data['host_occurrences']
|
||||
@pytest.mark.parametrize('test_data', TEST_CLEARTEXT)
|
||||
def test_uri_scm_cleartext_redact_and_replace(test_data):
|
||||
uri = test_data['uri']
|
||||
redacted_str = UriCleaner.remove_sensitive(test_data['text'])
|
||||
assert uri.username not in redacted_str
|
||||
assert uri.password not in redacted_str
|
||||
# Ensure the host didn't get redacted
|
||||
assert redacted_str.count(uri.host) == test_data['host_occurrences']
|
||||
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
@ -12,7 +14,9 @@ from backports.tempfile import TemporaryDirectory
|
||||
import fcntl
|
||||
import mock
|
||||
import pytest
|
||||
import six
|
||||
import yaml
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
@ -24,6 +28,7 @@ from awx.main.models import (
|
||||
InventorySource,
|
||||
InventoryUpdate,
|
||||
Job,
|
||||
JobTemplate,
|
||||
Notification,
|
||||
Project,
|
||||
ProjectUpdate,
|
||||
@ -36,7 +41,7 @@ from awx.main.models import (
|
||||
from awx.main import tasks
|
||||
from awx.main.queue import CallbackQueueDispatcher
|
||||
from awx.main.utils import encrypt_field, encrypt_value, OutputEventFilter
|
||||
|
||||
from awx.main.utils.safe_yaml import SafeLoader
|
||||
|
||||
|
||||
@contextmanager
|
||||
@ -187,7 +192,7 @@ def parse_extra_vars(args):
|
||||
for chunk in args:
|
||||
if chunk.startswith('@/tmp/'):
|
||||
with open(chunk.strip('@'), 'r') as f:
|
||||
extra_vars.update(json.load(f))
|
||||
extra_vars.update(yaml.load(f, SafeLoader))
|
||||
return extra_vars
|
||||
|
||||
|
||||
@ -218,7 +223,7 @@ class TestJobExecution:
|
||||
self.run_pexpect.return_value = ['successful', 0]
|
||||
|
||||
self.patches = [
|
||||
mock.patch.object(CallbackQueueDispatcher, 'dispatch', lambda obj: None),
|
||||
mock.patch.object(CallbackQueueDispatcher, 'dispatch', lambda self, obj: None),
|
||||
mock.patch.object(Project, 'get_project_path', lambda *a, **kw: self.project_path),
|
||||
# don't emit websocket statuses; they use the DB and complicate testing
|
||||
mock.patch.object(UnifiedJob, 'websocket_emit_status', mock.Mock()),
|
||||
@ -267,7 +272,8 @@ class TestJobExecution:
|
||||
cancel_flag=False,
|
||||
project=Project(),
|
||||
playbook='helloworld.yml',
|
||||
verbosity=3
|
||||
verbosity=3,
|
||||
job_template=JobTemplate(extra_vars='')
|
||||
)
|
||||
|
||||
# mock the job.credentials M2M relation so we can avoid DB access
|
||||
@ -293,8 +299,142 @@ class TestJobExecution:
|
||||
return self.instance.pk
|
||||
|
||||
|
||||
class TestExtraVarSanitation(TestJobExecution):
|
||||
# By default, extra vars are marked as `!unsafe` in the generated yaml
|
||||
# _unless_ they've been specified on the JobTemplate's extra_vars (which
|
||||
# are deemed trustable, because they can only be added by users w/ enough
|
||||
# privilege to add/modify a Job Template)
|
||||
|
||||
UNSAFE = '{{ lookup(''pipe'',''ls -la'') }}'
|
||||
|
||||
def test_vars_unsafe_by_default(self):
|
||||
self.instance.created_by = User(pk=123, username='angry-spud')
|
||||
|
||||
def run_pexpect_side_effect(*args, **kwargs):
|
||||
args, cwd, env, stdout = args
|
||||
extra_vars = parse_extra_vars(args)
|
||||
|
||||
# ensure that strings are marked as unsafe
|
||||
for unsafe in ['awx_job_template_name', 'tower_job_template_name',
|
||||
'awx_user_name', 'tower_job_launch_type',
|
||||
'awx_project_revision',
|
||||
'tower_project_revision', 'tower_user_name',
|
||||
'awx_job_launch_type']:
|
||||
assert hasattr(extra_vars[unsafe], '__UNSAFE__')
|
||||
|
||||
# ensure that non-strings are marked as safe
|
||||
for safe in ['awx_job_template_id', 'awx_job_id', 'awx_user_id',
|
||||
'tower_user_id', 'tower_job_template_id',
|
||||
'tower_job_id']:
|
||||
assert not hasattr(extra_vars[safe], '__UNSAFE__')
|
||||
return ['successful', 0]
|
||||
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
self.task.run(self.pk)
|
||||
|
||||
def test_launchtime_vars_unsafe(self):
|
||||
self.instance.extra_vars = json.dumps({'msg': self.UNSAFE})
|
||||
|
||||
def run_pexpect_side_effect(*args, **kwargs):
|
||||
args, cwd, env, stdout = args
|
||||
extra_vars = parse_extra_vars(args)
|
||||
assert extra_vars['msg'] == self.UNSAFE
|
||||
assert hasattr(extra_vars['msg'], '__UNSAFE__')
|
||||
return ['successful', 0]
|
||||
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
self.task.run(self.pk)
|
||||
|
||||
def test_nested_launchtime_vars_unsafe(self):
|
||||
self.instance.extra_vars = json.dumps({'msg': {'a': [self.UNSAFE]}})
|
||||
|
||||
def run_pexpect_side_effect(*args, **kwargs):
|
||||
args, cwd, env, stdout = args
|
||||
extra_vars = parse_extra_vars(args)
|
||||
assert extra_vars['msg'] == {'a': [self.UNSAFE]}
|
||||
assert hasattr(extra_vars['msg']['a'][0], '__UNSAFE__')
|
||||
return ['successful', 0]
|
||||
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
self.task.run(self.pk)
|
||||
|
||||
def test_whitelisted_jt_extra_vars(self):
|
||||
self.instance.job_template.extra_vars = self.instance.extra_vars = json.dumps({'msg': self.UNSAFE})
|
||||
|
||||
def run_pexpect_side_effect(*args, **kwargs):
|
||||
args, cwd, env, stdout = args
|
||||
extra_vars = parse_extra_vars(args)
|
||||
assert extra_vars['msg'] == self.UNSAFE
|
||||
assert not hasattr(extra_vars['msg'], '__UNSAFE__')
|
||||
return ['successful', 0]
|
||||
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
self.task.run(self.pk)
|
||||
|
||||
def test_nested_whitelisted_vars(self):
|
||||
self.instance.extra_vars = json.dumps({'msg': {'a': {'b': [self.UNSAFE]}}})
|
||||
self.instance.job_template.extra_vars = self.instance.extra_vars
|
||||
|
||||
def run_pexpect_side_effect(*args, **kwargs):
|
||||
args, cwd, env, stdout = args
|
||||
extra_vars = parse_extra_vars(args)
|
||||
assert extra_vars['msg'] == {'a': {'b': [self.UNSAFE]}}
|
||||
assert not hasattr(extra_vars['msg']['a']['b'][0], '__UNSAFE__')
|
||||
return ['successful', 0]
|
||||
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
self.task.run(self.pk)
|
||||
|
||||
def test_sensitive_values_dont_leak(self):
|
||||
# JT defines `msg=SENSITIVE`, the job *should not* be able to do
|
||||
# `other_var=SENSITIVE`
|
||||
self.instance.job_template.extra_vars = json.dumps({'msg': self.UNSAFE})
|
||||
self.instance.extra_vars = json.dumps({
|
||||
'msg': 'other-value',
|
||||
'other_var': self.UNSAFE
|
||||
})
|
||||
|
||||
def run_pexpect_side_effect(*args, **kwargs):
|
||||
args, cwd, env, stdout = args
|
||||
extra_vars = parse_extra_vars(args)
|
||||
|
||||
assert extra_vars['msg'] == 'other-value'
|
||||
assert hasattr(extra_vars['msg'], '__UNSAFE__')
|
||||
|
||||
assert extra_vars['other_var'] == self.UNSAFE
|
||||
assert hasattr(extra_vars['other_var'], '__UNSAFE__')
|
||||
|
||||
return ['successful', 0]
|
||||
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
self.task.run(self.pk)
|
||||
|
||||
def test_overwritten_jt_extra_vars(self):
|
||||
self.instance.job_template.extra_vars = json.dumps({'msg': 'SAFE'})
|
||||
self.instance.extra_vars = json.dumps({'msg': self.UNSAFE})
|
||||
|
||||
def run_pexpect_side_effect(*args, **kwargs):
|
||||
args, cwd, env, stdout = args
|
||||
extra_vars = parse_extra_vars(args)
|
||||
assert extra_vars['msg'] == self.UNSAFE
|
||||
assert hasattr(extra_vars['msg'], '__UNSAFE__')
|
||||
return ['successful', 0]
|
||||
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
self.task.run(self.pk)
|
||||
|
||||
|
||||
class TestGenericRun(TestJobExecution):
|
||||
|
||||
def test_generic_failure(self):
|
||||
self.task.build_private_data_files = mock.Mock(side_effect=IOError())
|
||||
with pytest.raises(Exception):
|
||||
self.task.run(self.pk)
|
||||
update_model_call = self.task.update_model.call_args[1]
|
||||
assert 'IOError' in update_model_call['result_traceback']
|
||||
assert update_model_call['status'] == 'error'
|
||||
assert update_model_call['emitted_events'] == 0
|
||||
|
||||
def test_cancel_flag(self):
|
||||
self.instance.cancel_flag = True
|
||||
with pytest.raises(Exception):
|
||||
@ -460,6 +600,13 @@ class TestAdhocRun(TestJobExecution):
|
||||
extra_vars={'awx_foo': 'awx-bar'}
|
||||
)
|
||||
|
||||
def test_options_jinja_usage(self):
|
||||
self.instance.module_args = '{{ ansible_ssh_pass }}'
|
||||
with pytest.raises(Exception):
|
||||
self.task.run(self.pk)
|
||||
update_model_call = self.task.update_model.call_args[1]
|
||||
assert 'Jinja variables are not allowed' in update_model_call['result_traceback']
|
||||
|
||||
def test_created_by_extra_vars(self):
|
||||
self.instance.created_by = User(pk=123, username='angry-spud')
|
||||
|
||||
@ -571,6 +718,33 @@ class TestJobCredentials(TestJobExecution):
|
||||
]
|
||||
}
|
||||
|
||||
def test_username_jinja_usage(self):
|
||||
ssh = CredentialType.defaults['ssh']()
|
||||
credential = Credential(
|
||||
pk=1,
|
||||
credential_type=ssh,
|
||||
inputs = {'username': '{{ ansible_ssh_pass }}'}
|
||||
)
|
||||
self.instance.credentials.add(credential)
|
||||
with pytest.raises(Exception):
|
||||
self.task.run(self.pk)
|
||||
update_model_call = self.task.update_model.call_args[1]
|
||||
assert 'Jinja variables are not allowed' in update_model_call['result_traceback']
|
||||
|
||||
@pytest.mark.parametrize("flag", ['become_username', 'become_method'])
|
||||
def test_become_jinja_usage(self, flag):
|
||||
ssh = CredentialType.defaults['ssh']()
|
||||
credential = Credential(
|
||||
pk=1,
|
||||
credential_type=ssh,
|
||||
inputs = {'username': 'joe', flag: '{{ ansible_ssh_pass }}'}
|
||||
)
|
||||
self.instance.credentials.add(credential)
|
||||
with pytest.raises(Exception):
|
||||
self.task.run(self.pk)
|
||||
update_model_call = self.task.update_model.call_args[1]
|
||||
assert 'Jinja variables are not allowed' in update_model_call['result_traceback']
|
||||
|
||||
def test_ssh_passwords(self, field, password_name, expected_flag):
|
||||
ssh = CredentialType.defaults['ssh']()
|
||||
credential = Credential(
|
||||
@ -1158,6 +1332,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
args, cwd, env, stdout = args
|
||||
extra_vars = parse_extra_vars(args)
|
||||
assert extra_vars["api_token"] == "ABC123"
|
||||
assert hasattr(extra_vars["api_token"], '__UNSAFE__')
|
||||
return ['successful', 0]
|
||||
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
@ -1309,6 +1484,33 @@ class TestJobCredentials(TestJobExecution):
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
self.task.run(self.pk)
|
||||
|
||||
def test_custom_environment_injectors_with_unicode_content(self):
|
||||
value = six.u('Iñtërnâtiônàlizætiøn')
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
inputs={'fields': []},
|
||||
injectors={
|
||||
'file': {'template': value},
|
||||
'env': {'MY_CLOUD_INI_FILE': '{{tower.filename}}'}
|
||||
}
|
||||
)
|
||||
credential = Credential(
|
||||
pk=1,
|
||||
credential_type=some_cloud,
|
||||
)
|
||||
self.instance.credentials.add(credential)
|
||||
self.task.run(self.pk)
|
||||
|
||||
def run_pexpect_side_effect(*args, **kwargs):
|
||||
args, cwd, env, stdout = args
|
||||
assert open(env['MY_CLOUD_INI_FILE'], 'rb').read() == value.encode('utf-8')
|
||||
return ['successful', 0]
|
||||
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
self.task.run(self.pk)
|
||||
|
||||
def test_custom_environment_injectors_with_files(self):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
|
||||
@ -44,6 +44,16 @@ def test_parse_yaml_or_json(input_, output):
|
||||
assert common.parse_yaml_or_json(input_) == output
|
||||
|
||||
|
||||
def test_recursive_vars_not_allowed():
|
||||
rdict = {}
|
||||
rdict['a'] = rdict
|
||||
# YAML dumper will use a tag to give recursive data
|
||||
data = yaml.dump(rdict, default_flow_style=False)
|
||||
with pytest.raises(ParseError) as exc:
|
||||
common.parse_yaml_or_json(data, silent_failure=False)
|
||||
assert 'Circular reference detected' in str(exc)
|
||||
|
||||
|
||||
class TestParserExceptions:
|
||||
|
||||
@staticmethod
|
||||
|
||||
@ -5,7 +5,7 @@ from StringIO import StringIO
|
||||
|
||||
from six.moves import xrange
|
||||
|
||||
from awx.main.utils import OutputEventFilter
|
||||
from awx.main.utils import OutputEventFilter, OutputVerboseFilter
|
||||
|
||||
MAX_WIDTH = 78
|
||||
EXAMPLE_UUID = '890773f5-fe6d-4091-8faf-bdc8021d65dd'
|
||||
@ -145,3 +145,55 @@ def test_large_stdout_blob():
|
||||
f = OutputEventFilter(_callback)
|
||||
for x in range(1024 * 10):
|
||||
f.write('x' * 1024)
|
||||
|
||||
|
||||
def test_verbose_line_buffering():
|
||||
events = []
|
||||
|
||||
def _callback(event_data):
|
||||
events.append(event_data)
|
||||
|
||||
f = OutputVerboseFilter(_callback)
|
||||
f.write('one two\r\n\r\n')
|
||||
|
||||
assert len(events) == 2
|
||||
assert events[0]['start_line'] == 0
|
||||
assert events[0]['end_line'] == 1
|
||||
assert events[0]['stdout'] == 'one two'
|
||||
|
||||
assert events[1]['start_line'] == 1
|
||||
assert events[1]['end_line'] == 2
|
||||
assert events[1]['stdout'] == ''
|
||||
|
||||
f.write('three')
|
||||
assert len(events) == 2
|
||||
f.write('\r\nfou')
|
||||
|
||||
# three is not pushed to buffer until its line completes
|
||||
assert len(events) == 3
|
||||
assert events[2]['start_line'] == 2
|
||||
assert events[2]['end_line'] == 3
|
||||
assert events[2]['stdout'] == 'three'
|
||||
|
||||
f.write('r\r')
|
||||
f.write('\nfi')
|
||||
|
||||
assert events[3]['start_line'] == 3
|
||||
assert events[3]['end_line'] == 4
|
||||
assert events[3]['stdout'] == 'four'
|
||||
|
||||
f.write('ve')
|
||||
f.write('\r\n')
|
||||
|
||||
assert len(events) == 5
|
||||
assert events[4]['start_line'] == 4
|
||||
assert events[4]['end_line'] == 5
|
||||
assert events[4]['stdout'] == 'five'
|
||||
|
||||
f.close()
|
||||
|
||||
from pprint import pprint
|
||||
pprint(events)
|
||||
assert len(events) == 6
|
||||
|
||||
assert events[5]['event'] == 'EOF'
|
||||
|
||||
@ -39,6 +39,7 @@ class TestSmartFilterQueryFromString():
|
||||
('a__b__c=3.14', Q(**{u"a__b__c": 3.14})),
|
||||
('a__b__c=true', Q(**{u"a__b__c": True})),
|
||||
('a__b__c=false', Q(**{u"a__b__c": False})),
|
||||
('a__b__c=null', Q(**{u"a__b__c": None})),
|
||||
('ansible_facts__a="true"', Q(**{u"ansible_facts__contains": {u"a": u"true"}})),
|
||||
#('"a__b\"__c"="true"', Q(**{u"a__b\"__c": "true"})),
|
||||
#('a__b\"__c="true"', Q(**{u"a__b\"__c": "true"})),
|
||||
@ -114,7 +115,7 @@ class TestSmartFilterQueryFromString():
|
||||
assert six.text_type(q) == six.text_type(q_expected)
|
||||
|
||||
@pytest.mark.parametrize("filter_string,q_expected", [
|
||||
('ansible_facts__a=null', Q(**{u"ansible_facts__contains": {u"a": u"null"}})),
|
||||
('ansible_facts__a=null', Q(**{u"ansible_facts__contains": {u"a": None}})),
|
||||
('ansible_facts__c="null"', Q(**{u"ansible_facts__contains": {u"c": u"\"null\""}})),
|
||||
])
|
||||
def test_contains_query_generated_null(self, mock_get_host_model, filter_string, q_expected):
|
||||
@ -130,7 +131,10 @@ class TestSmartFilterQueryFromString():
|
||||
Q(**{u"group__name__contains": u"foo"}) | Q(**{u"group__description__contains": u"foo"}))),
|
||||
('search=foo or ansible_facts__a=null',
|
||||
Q(Q(**{u"name__contains": u"foo"}) | Q(**{u"description__contains": u"foo"})) |
|
||||
Q(**{u"ansible_facts__contains": {u"a": u"null"}})),
|
||||
Q(**{u"ansible_facts__contains": {u"a": None}})),
|
||||
('search=foo or ansible_facts__a="null"',
|
||||
Q(Q(**{u"name__contains": u"foo"}) | Q(**{u"description__contains": u"foo"})) |
|
||||
Q(**{u"ansible_facts__contains": {u"a": u"\"null\""}})),
|
||||
])
|
||||
def test_search_related_fields(self, mock_get_host_model, filter_string, q_expected):
|
||||
q = SmartFilter.query_from_string(filter_string)
|
||||
|
||||
@ -60,7 +60,6 @@ class TestAddRemoveCeleryWorkerQueues():
|
||||
static_queues, _worker_queues,
|
||||
groups, hostname,
|
||||
added_expected, removed_expected):
|
||||
added_expected.append('tower_instance_router')
|
||||
instance = instance_generator(groups=groups, hostname=hostname)
|
||||
worker_queues = worker_queues_generator(_worker_queues)
|
||||
with mock.patch('awx.main.utils.ha.settings.AWX_CELERY_QUEUES_STATIC', static_queues):
|
||||
|
||||
97
awx/main/tests/unit/utils/test_safe_yaml.py
Normal file
97
awx/main/tests/unit/utils/test_safe_yaml.py
Normal file
@ -0,0 +1,97 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from copy import deepcopy
|
||||
import pytest
|
||||
import yaml
|
||||
from awx.main.utils.safe_yaml import safe_dump
|
||||
|
||||
|
||||
@pytest.mark.parametrize('value', [None, 1, 1.5, []])
|
||||
def test_native_types(value):
|
||||
# Native non-string types should dump the same way that `yaml.safe_dump` does
|
||||
assert safe_dump(value) == yaml.safe_dump(value)
|
||||
|
||||
|
||||
def test_empty():
|
||||
assert safe_dump({}) == ''
|
||||
|
||||
|
||||
def test_raw_string():
|
||||
assert safe_dump('foo') == "!unsafe 'foo'\n"
|
||||
|
||||
|
||||
def test_kv_null():
|
||||
assert safe_dump({'a': None}) == "!unsafe 'a': null\n"
|
||||
|
||||
|
||||
def test_kv_null_safe():
|
||||
assert safe_dump({'a': None}, {'a': None}) == "a: null\n"
|
||||
|
||||
|
||||
def test_kv_null_unsafe():
|
||||
assert safe_dump({'a': ''}, {'a': None}) == "!unsafe 'a': !unsafe ''\n"
|
||||
|
||||
|
||||
def test_kv_int():
|
||||
assert safe_dump({'a': 1}) == "!unsafe 'a': 1\n"
|
||||
|
||||
|
||||
def test_kv_float():
|
||||
assert safe_dump({'a': 1.5}) == "!unsafe 'a': 1.5\n"
|
||||
|
||||
|
||||
def test_kv_unsafe():
|
||||
assert safe_dump({'a': 'b'}) == "!unsafe 'a': !unsafe 'b'\n"
|
||||
|
||||
|
||||
def test_kv_unsafe_unicode():
|
||||
assert safe_dump({'a': u'🐉'}) == '!unsafe \'a\': !unsafe "\\U0001F409"\n'
|
||||
|
||||
|
||||
def test_kv_unsafe_in_list():
|
||||
assert safe_dump({'a': ['b']}) == "!unsafe 'a':\n- !unsafe 'b'\n"
|
||||
|
||||
|
||||
def test_kv_unsafe_in_mixed_list():
|
||||
assert safe_dump({'a': [1, 'b']}) == "!unsafe 'a':\n- 1\n- !unsafe 'b'\n"
|
||||
|
||||
|
||||
def test_kv_unsafe_deep_nesting():
|
||||
yaml = safe_dump({'a': [1, [{'b': {'c': [{'d': 'e'}]}}]]})
|
||||
for x in ('a', 'b', 'c', 'd', 'e'):
|
||||
assert "!unsafe '{}'".format(x) in yaml
|
||||
|
||||
|
||||
def test_kv_unsafe_multiple():
|
||||
assert safe_dump({'a': 'b', 'c': 'd'}) == '\n'.join([
|
||||
"!unsafe 'a': !unsafe 'b'",
|
||||
"!unsafe 'c': !unsafe 'd'",
|
||||
""
|
||||
])
|
||||
|
||||
|
||||
def test_safe_marking():
|
||||
assert safe_dump({'a': 'b'}, safe_dict={'a': 'b'}) == "a: b\n"
|
||||
|
||||
|
||||
def test_safe_marking_mixed():
|
||||
assert safe_dump({'a': 'b', 'c': 'd'}, safe_dict={'a': 'b'}) == '\n'.join([
|
||||
"a: b",
|
||||
"!unsafe 'c': !unsafe 'd'",
|
||||
""
|
||||
])
|
||||
|
||||
|
||||
def test_safe_marking_deep_nesting():
|
||||
deep = {'a': [1, [{'b': {'c': [{'d': 'e'}]}}]]}
|
||||
yaml = safe_dump(deep, deepcopy(deep))
|
||||
for x in ('a', 'b', 'c', 'd', 'e'):
|
||||
assert "!unsafe '{}'".format(x) not in yaml
|
||||
|
||||
|
||||
def test_deep_diff_unsafe_marking():
|
||||
deep = {'a': [1, [{'b': {'c': [{'d': 'e'}]}}]]}
|
||||
jt_vars = deepcopy(deep)
|
||||
deep['a'][1][0]['b']['z'] = 'not safe'
|
||||
yaml = safe_dump(deep, jt_vars)
|
||||
assert "!unsafe 'z'" in yaml
|
||||
@ -48,7 +48,7 @@ __all__ = ['get_object_or_400', 'get_object_or_403', 'camelcase_to_underscore',
|
||||
'copy_m2m_relationships', 'prefetch_page_capabilities', 'to_python_boolean',
|
||||
'ignore_inventory_computed_fields', 'ignore_inventory_group_removal',
|
||||
'_inventory_updates', 'get_pk_from_dict', 'getattrd', 'NoDefaultProvided',
|
||||
'get_current_apps', 'set_current_apps', 'OutputEventFilter',
|
||||
'get_current_apps', 'set_current_apps', 'OutputEventFilter', 'OutputVerboseFilter',
|
||||
'extract_ansible_vars', 'get_search_fields', 'get_system_task_capacity', 'get_cpu_capacity', 'get_mem_capacity',
|
||||
'wrap_args_with_proot', 'build_proot_temp_dir', 'check_proot_installed', 'model_to_dict',
|
||||
'model_instance_diff', 'timestamp_apiformat', 'parse_yaml_or_json', 'RequireDebugTrueOrTest',
|
||||
@ -350,11 +350,14 @@ def get_allowed_fields(obj, serializer_mapping):
|
||||
allowed_fields = [x for x in serializer_actual.fields if not serializer_actual.fields[x].read_only] + ['id']
|
||||
else:
|
||||
allowed_fields = [x.name for x in obj._meta.fields]
|
||||
if obj._meta.model_name == 'user':
|
||||
field_blacklist = ['last_login']
|
||||
allowed_fields = [f for f in allowed_fields if f not in field_blacklist]
|
||||
if obj._meta.model_name == 'oauth2application':
|
||||
field_blacklist = ['client_secret']
|
||||
|
||||
ACTIVITY_STREAM_FIELD_EXCLUSIONS = {
|
||||
'user': ['last_login'],
|
||||
'oauth2accesstoken': ['last_used'],
|
||||
'oauth2application': ['client_secret']
|
||||
}
|
||||
field_blacklist = ACTIVITY_STREAM_FIELD_EXCLUSIONS.get(obj._meta.model_name, [])
|
||||
if field_blacklist:
|
||||
allowed_fields = [f for f in allowed_fields if f not in field_blacklist]
|
||||
return allowed_fields
|
||||
|
||||
@ -380,7 +383,7 @@ def _convert_model_field_for_display(obj, field_name, password_fields=None):
|
||||
field_val = json.dumps(field_val, ensure_ascii=False)
|
||||
except Exception:
|
||||
pass
|
||||
if type(field_val) not in (bool, int, type(None)):
|
||||
if type(field_val) not in (bool, int, type(None), long):
|
||||
field_val = smart_str(field_val)
|
||||
return field_val
|
||||
|
||||
@ -413,10 +416,8 @@ def model_instance_diff(old, new, serializer_mapping=None):
|
||||
_convert_model_field_for_display(old, field, password_fields=old_password_fields),
|
||||
_convert_model_field_for_display(new, field, password_fields=new_password_fields),
|
||||
)
|
||||
|
||||
if len(diff) == 0:
|
||||
diff = None
|
||||
|
||||
return diff
|
||||
|
||||
|
||||
@ -435,7 +436,6 @@ def model_to_dict(obj, serializer_mapping=None):
|
||||
if field.name not in allowed_fields:
|
||||
continue
|
||||
attr_d[field.name] = _convert_model_field_for_display(obj, field.name, password_fields=password_fields)
|
||||
|
||||
return attr_d
|
||||
|
||||
|
||||
@ -630,8 +630,16 @@ def parse_yaml_or_json(vars_str, silent_failure=True):
|
||||
vars_dict = yaml.safe_load(vars_str)
|
||||
# Can be None if '---'
|
||||
if vars_dict is None:
|
||||
return {}
|
||||
vars_dict = {}
|
||||
validate_vars_type(vars_dict)
|
||||
if not silent_failure:
|
||||
# is valid YAML, check that it is compatible with JSON
|
||||
try:
|
||||
json.dumps(vars_dict)
|
||||
except (ValueError, TypeError, AssertionError) as json_err2:
|
||||
raise ParseError(_(
|
||||
'Variables not compatible with JSON standard (error: {json_error})').format(
|
||||
json_error=str(json_err2)))
|
||||
except (yaml.YAMLError, TypeError, AttributeError, AssertionError) as yaml_err:
|
||||
if silent_failure:
|
||||
return {}
|
||||
@ -1009,6 +1017,32 @@ class OutputEventFilter(object):
|
||||
self._current_event_data = None
|
||||
|
||||
|
||||
class OutputVerboseFilter(OutputEventFilter):
|
||||
'''
|
||||
File-like object that dispatches stdout data.
|
||||
Does not search for encoded job event data.
|
||||
Use for unified job types that do not encode job event data.
|
||||
'''
|
||||
def write(self, data):
|
||||
self._buffer.write(data)
|
||||
|
||||
# if the current chunk contains a line break
|
||||
if data and '\n' in data:
|
||||
# emit events for all complete lines we know about
|
||||
lines = self._buffer.getvalue().splitlines(True) # keep ends
|
||||
remainder = None
|
||||
# if last line is not a complete line, then exclude it
|
||||
if '\n' not in lines[-1]:
|
||||
remainder = lines.pop()
|
||||
# emit all complete lines
|
||||
for line in lines:
|
||||
self._emit_event(line)
|
||||
self._buffer = StringIO()
|
||||
# put final partial line back on buffer
|
||||
if remainder:
|
||||
self._buffer.write(remainder)
|
||||
|
||||
|
||||
def is_ansible_variable(key):
|
||||
return key.startswith('ansible_')
|
||||
|
||||
|
||||
@ -19,6 +19,8 @@ __all__ = ['SmartFilter']
|
||||
|
||||
|
||||
def string_to_type(t):
|
||||
if t == u'null':
|
||||
return None
|
||||
if t == u'true':
|
||||
return True
|
||||
elif t == u'false':
|
||||
|
||||
@ -3,9 +3,6 @@
|
||||
# Copyright (c) 2017 Ansible Tower by Red Hat
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import six
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
|
||||
@ -16,24 +13,26 @@ from awx.main.models import Instance
|
||||
def _add_remove_celery_worker_queues(app, controlled_instances, worker_queues, worker_name):
|
||||
removed_queues = []
|
||||
added_queues = []
|
||||
ig_names = set([six.text_type('tower_instance_router')])
|
||||
ig_names = set()
|
||||
hostnames = set([instance.hostname for instance in controlled_instances])
|
||||
for instance in controlled_instances:
|
||||
ig_names.update(instance.rampart_groups.values_list('name', flat=True))
|
||||
worker_queue_names = set([q['name'] for q in worker_queues])
|
||||
|
||||
all_queue_names = ig_names | hostnames | set(settings.AWX_CELERY_QUEUES_STATIC)
|
||||
|
||||
# Remove queues that aren't in the instance group
|
||||
for queue in worker_queues:
|
||||
if queue['name'] in settings.AWX_CELERY_QUEUES_STATIC or \
|
||||
queue['alias'] in settings.AWX_CELERY_QUEUES_STATIC:
|
||||
queue['alias'] in settings.AWX_CELERY_BCAST_QUEUES_STATIC:
|
||||
continue
|
||||
|
||||
if queue['name'] not in ig_names | hostnames or not instance.enabled:
|
||||
if queue['name'] not in all_queue_names or not instance.enabled:
|
||||
app.control.cancel_consumer(queue['name'].encode("utf8"), reply=True, destination=[worker_name])
|
||||
removed_queues.append(queue['name'].encode("utf8"))
|
||||
|
||||
# Add queues for instance and instance groups
|
||||
for queue_name in ig_names | hostnames:
|
||||
for queue_name in all_queue_names:
|
||||
if queue_name not in worker_queue_names:
|
||||
app.control.add_consumer(queue_name.encode("utf8"), reply=True, destination=[worker_name])
|
||||
added_queues.append(queue_name.encode("utf8"))
|
||||
@ -76,6 +75,5 @@ def register_celery_worker_queues(app, celery_worker_name):
|
||||
celery_worker_queues = celery_host_queues[celery_worker_name] if celery_host_queues else []
|
||||
(added_queues, removed_queues) = _add_remove_celery_worker_queues(app, controlled_instances,
|
||||
celery_worker_queues, celery_worker_name)
|
||||
|
||||
return (controlled_instances, removed_queues, added_queues)
|
||||
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db import models
|
||||
|
||||
|
||||
def build_polymorphic_ctypes_map(cls):
|
||||
@ -10,3 +11,7 @@ def build_polymorphic_ctypes_map(cls):
|
||||
if ct_model_class and issubclass(ct_model_class, cls):
|
||||
mapping[ct.id] = ct_model_class._camel_to_underscore(ct_model_class.__name__)
|
||||
return mapping
|
||||
|
||||
|
||||
def SET_NULL(collector, field, sub_objs, using):
|
||||
return models.SET_NULL(collector, field, sub_objs.non_polymorphic(), using)
|
||||
|
||||
87
awx/main/utils/safe_yaml.py
Normal file
87
awx/main/utils/safe_yaml.py
Normal file
@ -0,0 +1,87 @@
|
||||
import re
|
||||
import six
|
||||
import yaml
|
||||
|
||||
|
||||
__all__ = ['safe_dump', 'SafeLoader']
|
||||
|
||||
|
||||
class SafeStringDumper(yaml.SafeDumper):
|
||||
|
||||
def represent_data(self, value):
|
||||
if isinstance(value, six.string_types):
|
||||
return self.represent_scalar('!unsafe', value)
|
||||
return super(SafeStringDumper, self).represent_data(value)
|
||||
|
||||
|
||||
class SafeLoader(yaml.Loader):
|
||||
|
||||
def construct_yaml_unsafe(self, node):
|
||||
class UnsafeText(six.text_type):
|
||||
__UNSAFE__ = True
|
||||
node = UnsafeText(self.construct_scalar(node))
|
||||
return node
|
||||
|
||||
|
||||
SafeLoader.add_constructor(
|
||||
u'!unsafe',
|
||||
SafeLoader.construct_yaml_unsafe
|
||||
)
|
||||
|
||||
|
||||
def safe_dump(x, safe_dict=None):
|
||||
"""
|
||||
Used to serialize an extra_vars dict to YAML
|
||||
|
||||
By default, extra vars are marked as `!unsafe` in the generated yaml
|
||||
_unless_ they've been deemed "trusted" (meaning, they likely were set/added
|
||||
by a user with a high level of privilege).
|
||||
|
||||
This function allows you to pass in a trusted `safe_dict` to whitelist
|
||||
certain extra vars so that they are _not_ marked as `!unsafe` in the
|
||||
resulting YAML. Anything _not_ in this dict will automatically be
|
||||
`!unsafe`.
|
||||
|
||||
safe_dump({'a': 'b', 'c': 'd'}) ->
|
||||
!unsafe 'a': !unsafe 'b'
|
||||
!unsafe 'c': !unsafe 'd'
|
||||
|
||||
safe_dump({'a': 'b', 'c': 'd'}, safe_dict={'a': 'b'})
|
||||
a: b
|
||||
!unsafe 'c': !unsafe 'd'
|
||||
"""
|
||||
if isinstance(x, dict):
|
||||
yamls = []
|
||||
safe_dict = safe_dict or {}
|
||||
|
||||
# Compare the top level keys so that we can find values that have
|
||||
# equality matches (and consider those branches safe)
|
||||
for k, v in x.items():
|
||||
dumper = yaml.SafeDumper
|
||||
if k not in safe_dict or safe_dict.get(k) != v:
|
||||
dumper = SafeStringDumper
|
||||
yamls.append(yaml.dump_all(
|
||||
[{k: v}],
|
||||
None,
|
||||
Dumper=dumper,
|
||||
default_flow_style=False,
|
||||
))
|
||||
return ''.join(yamls)
|
||||
else:
|
||||
return yaml.dump_all([x], None, Dumper=SafeStringDumper, default_flow_style=False)
|
||||
|
||||
|
||||
def sanitize_jinja(arg):
|
||||
"""
|
||||
For some string, prevent usage of Jinja-like flags
|
||||
"""
|
||||
if isinstance(arg, six.string_types):
|
||||
# If the argument looks like it contains Jinja expressions
|
||||
# {{ x }} ...
|
||||
if re.search('\{\{[^}]+}}', arg) is not None:
|
||||
raise ValueError('Inline Jinja variables are not allowed.')
|
||||
# If the argument looks like it contains Jinja statements/control flow...
|
||||
# {% if x.foo() %} ...
|
||||
if re.search('\{%[^%]+%}', arg) is not None:
|
||||
raise ValueError('Inline Jinja variables are not allowed.')
|
||||
return arg
|
||||
@ -80,7 +80,7 @@ class NetworkingEvents(object):
|
||||
type='device_type',
|
||||
id='cid',
|
||||
host_id='host_id'), device)
|
||||
logger.info("Device %s", device)
|
||||
logger.info("Device created %s", device)
|
||||
d, _ = Device.objects.get_or_create(topology_id=topology_id, cid=device['cid'], defaults=device)
|
||||
d.x = device['x']
|
||||
d.y = device['y']
|
||||
@ -92,6 +92,7 @@ class NetworkingEvents(object):
|
||||
.update(device_id_seq=device['cid']))
|
||||
|
||||
def onDeviceDestroy(self, device, topology_id, client_id):
|
||||
logger.info("Device removed %s", device)
|
||||
Device.objects.filter(topology_id=topology_id, cid=device['id']).delete()
|
||||
|
||||
def onDeviceMove(self, device, topology_id, client_id):
|
||||
@ -101,6 +102,7 @@ class NetworkingEvents(object):
|
||||
Device.objects.filter(topology_id=topology_id, cid=device['id']).update(host_id=device['host_id'])
|
||||
|
||||
def onDeviceLabelEdit(self, device, topology_id, client_id):
|
||||
logger.debug("Device label edited %s", device)
|
||||
Device.objects.filter(topology_id=topology_id, cid=device['id']).update(name=device['name'])
|
||||
|
||||
def onInterfaceLabelEdit(self, interface, topology_id, client_id):
|
||||
@ -111,6 +113,7 @@ class NetworkingEvents(object):
|
||||
.update(name=interface['name']))
|
||||
|
||||
def onLinkLabelEdit(self, link, topology_id, client_id):
|
||||
logger.debug("Link label edited %s", link)
|
||||
Link.objects.filter(from_device__topology_id=topology_id, cid=link['id']).update(name=link['name'])
|
||||
|
||||
def onInterfaceCreate(self, interface, topology_id, client_id):
|
||||
@ -125,6 +128,7 @@ class NetworkingEvents(object):
|
||||
.update(interface_id_seq=interface['id']))
|
||||
|
||||
def onLinkCreate(self, link, topology_id, client_id):
|
||||
logger.debug("Link created %s", link)
|
||||
device_map = dict(Device.objects
|
||||
.filter(topology_id=topology_id, cid__in=[link['from_device_id'], link['to_device_id']])
|
||||
.values_list('cid', 'pk'))
|
||||
@ -141,6 +145,7 @@ class NetworkingEvents(object):
|
||||
.update(link_id_seq=link['id']))
|
||||
|
||||
def onLinkDestroy(self, link, topology_id, client_id):
|
||||
logger.debug("Link deleted %s", link)
|
||||
device_map = dict(Device.objects
|
||||
.filter(topology_id=topology_id, cid__in=[link['from_device_id'], link['to_device_id']])
|
||||
.values_list('cid', 'pk'))
|
||||
|
||||
@ -11,7 +11,7 @@ class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
('main', '0026_v330_emitted_events'),
|
||||
('main', '0027_v330_emitted_events'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
|
||||
@ -40,7 +40,7 @@
|
||||
|
||||
- name: break if already checked out
|
||||
meta: end_play
|
||||
when: scm_full_checkout|default('') and repo_check|succeeded and repo_check.before == scm_branch
|
||||
when: scm_full_checkout|default('') and repo_check is succeeded and repo_check.before == scm_branch
|
||||
|
||||
- name: update project using git
|
||||
git:
|
||||
@ -139,7 +139,7 @@
|
||||
register: doesRequirementsExist
|
||||
|
||||
- name: fetch galaxy roles from requirements.yml
|
||||
command: ansible-galaxy install -r requirements.yml -p {{project_path|quote}}/roles/ --force
|
||||
command: ansible-galaxy install -r requirements.yml -p {{project_path|quote}}/roles/ {{ scm_result is defined|ternary('--force',omit) }}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}/roles"
|
||||
when: doesRequirementsExist.stat.exists
|
||||
|
||||
@ -138,7 +138,7 @@ class CloudFormsInventory(object):
|
||||
warnings.warn("No username specified, you need to specify a CloudForms username.")
|
||||
|
||||
if config.has_option('cloudforms', 'password'):
|
||||
self.cloudforms_pw = config.get('cloudforms', 'password')
|
||||
self.cloudforms_pw = config.get('cloudforms', 'password', raw=True)
|
||||
else:
|
||||
self.cloudforms_pw = None
|
||||
|
||||
|
||||
@ -84,7 +84,7 @@ class ForemanInventory(object):
|
||||
try:
|
||||
self.foreman_url = config.get('foreman', 'url')
|
||||
self.foreman_user = config.get('foreman', 'user')
|
||||
self.foreman_pw = config.get('foreman', 'password')
|
||||
self.foreman_pw = config.get('foreman', 'password', raw=True)
|
||||
self.foreman_ssl_verify = config.getboolean('foreman', 'ssl_verify')
|
||||
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError) as e:
|
||||
print("Error parsing configuration: %s" % e, file=sys.stderr)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user