move code linting to a stricter pep8-esque auto-formatting tool, black

This commit is contained in:
Ryan Petrello
2021-03-19 12:44:51 -04:00
parent 9b702e46fe
commit c2ef0a6500
671 changed files with 20538 additions and 21924 deletions

View File

@@ -127,7 +127,7 @@ Fixes and Features for AWX will go through the Github pull request process. Subm
Here are a few things you can do to help the visibility of your change, and increase the likelihood that it will be accepted: Here are a few things you can do to help the visibility of your change, and increase the likelihood that it will be accepted:
* No issues when running linters/code checkers * No issues when running linters/code checkers
* Python: flake8: `(container)/awx_devel$ make flake8` * Python: black: `(container)/awx_devel$ make black`
* Javascript: JsHint: `(container)/awx_devel$ make jshint` * Javascript: JsHint: `(container)/awx_devel$ make jshint`
* No issues from unit tests * No issues from unit tests
* Python: py.test: `(container)/awx_devel$ make test` * Python: py.test: `(container)/awx_devel$ make test`

View File

@@ -271,20 +271,8 @@ jupyter:
reports: reports:
mkdir -p $@ mkdir -p $@
pep8: reports black: reports
@(set -o pipefail && $@ | tee reports/$@.report) (set -o pipefail && $@ $(BLACK_ARGS) --skip-string-normalization --fast --line-length 160 awx awxkit awx_collection | tee reports/$@.report)
flake8: reports
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \
fi; \
(set -o pipefail && $@ | tee reports/$@.report)
pyflakes: reports
@(set -o pipefail && $@ | tee reports/$@.report)
pylint: reports
@(set -o pipefail && $@ | reports/$@.report)
genschema: reports genschema: reports
$(MAKE) swagger PYTEST_ARGS="--genschema --create-db " $(MAKE) swagger PYTEST_ARGS="--genschema --create-db "
@@ -296,7 +284,7 @@ swagger: reports
fi; \ fi; \
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report) (set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
check: flake8 pep8 # pyflakes pylint check: black
awx-link: awx-link:
[ -d "/awx_devel/awx.egg-info" ] || python3 /awx_devel/setup.py egg_info_dev [ -d "/awx_devel/awx.egg-info" ] || python3 /awx_devel/setup.py egg_info_dev
@@ -332,10 +320,7 @@ test_collection:
# Second we will load any libraries out of the virtualenv (if it's unspecified that should be ok because python should not load out of an empty directory) # Second we will load any libraries out of the virtualenv (if it's unspecified that should be ok because python should not load out of an empty directory)
# Finally we will add the system path so that the tests can find the ansible libraries # Finally we will add the system path so that the tests can find the ansible libraries
flake8_collection: test_collection_all: test_collection
flake8 awx_collection/ # Different settings, in main exclude list
test_collection_all: test_collection flake8_collection
# WARNING: symlinking a collection is fundamentally unstable # WARNING: symlinking a collection is fundamentally unstable
# this is for rapid development iteration with playbooks, do not use with other test targets # this is for rapid development iteration with playbooks, do not use with other test targets

View File

@@ -15,9 +15,10 @@ __all__ = ['__version__']
# Check for the presence/absence of "devonly" module to determine if running # Check for the presence/absence of "devonly" module to determine if running
# from a source code checkout or release packaage. # from a source code checkout or release packaage.
try: try:
import awx.devonly # noqa import awx.devonly # noqa
MODE = 'development' MODE = 'development'
except ImportError: # pragma: no cover except ImportError: # pragma: no cover
MODE = 'production' MODE = 'production'
@@ -25,6 +26,7 @@ import hashlib
try: try:
import django # noqa: F401 import django # noqa: F401
HAS_DJANGO = True HAS_DJANGO = True
except ImportError: except ImportError:
HAS_DJANGO = False HAS_DJANGO = False
@@ -40,6 +42,7 @@ if HAS_DJANGO is True:
try: try:
names_digest('foo', 'bar', 'baz', length=8) names_digest('foo', 'bar', 'baz', length=8)
except ValueError: except ValueError:
def names_digest(*args, length): def names_digest(*args, length):
""" """
Generate a 32-bit digest of a set of arguments that can be used to shorten Generate a 32-bit digest of a set of arguments that can be used to shorten
@@ -64,7 +67,7 @@ def find_commands(management_dir):
continue continue
elif f.endswith('.py') and f[:-3] not in commands: elif f.endswith('.py') and f[:-3] not in commands:
commands.append(f[:-3]) commands.append(f[:-3])
elif f.endswith('.pyc') and f[:-4] not in commands: # pragma: no cover elif f.endswith('.pyc') and f[:-4] not in commands: # pragma: no cover
commands.append(f[:-4]) commands.append(f[:-4])
except OSError: except OSError:
pass pass
@@ -75,6 +78,7 @@ def oauth2_getattribute(self, attr):
# Custom method to override # Custom method to override
# oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__ # oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__
from django.conf import settings from django.conf import settings
val = None val = None
if 'migrate' not in sys.argv: if 'migrate' not in sys.argv:
# certain Django OAuth Toolkit migrations actually reference # certain Django OAuth Toolkit migrations actually reference
@@ -94,33 +98,38 @@ def prepare_env():
# Hide DeprecationWarnings when running in production. Need to first load # Hide DeprecationWarnings when running in production. Need to first load
# settings to apply our filter after Django's own warnings filter. # settings to apply our filter after Django's own warnings filter.
from django.conf import settings from django.conf import settings
if not settings.DEBUG: # pragma: no cover
if not settings.DEBUG: # pragma: no cover
warnings.simplefilter('ignore', DeprecationWarning) warnings.simplefilter('ignore', DeprecationWarning)
# Monkeypatch Django find_commands to also work with .pyc files. # Monkeypatch Django find_commands to also work with .pyc files.
import django.core.management import django.core.management
django.core.management.find_commands = find_commands django.core.management.find_commands = find_commands
# Monkeypatch Oauth2 toolkit settings class to check for settings # Monkeypatch Oauth2 toolkit settings class to check for settings
# in django.conf settings each time, not just once during import # in django.conf settings each time, not just once during import
import oauth2_provider.settings import oauth2_provider.settings
oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__ = oauth2_getattribute oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__ = oauth2_getattribute
# Use the AWX_TEST_DATABASE_* environment variables to specify the test # Use the AWX_TEST_DATABASE_* environment variables to specify the test
# database settings to use when management command is run as an external # database settings to use when management command is run as an external
# program via unit tests. # program via unit tests.
for opt in ('ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'): # pragma: no cover for opt in ('ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'): # pragma: no cover
if os.environ.get('AWX_TEST_DATABASE_%s' % opt, None): if os.environ.get('AWX_TEST_DATABASE_%s' % opt, None):
settings.DATABASES['default'][opt] = os.environ['AWX_TEST_DATABASE_%s' % opt] settings.DATABASES['default'][opt] = os.environ['AWX_TEST_DATABASE_%s' % opt]
# Disable capturing all SQL queries in memory when in DEBUG mode. # Disable capturing all SQL queries in memory when in DEBUG mode.
if settings.DEBUG and not getattr(settings, 'SQL_DEBUG', True): if settings.DEBUG and not getattr(settings, 'SQL_DEBUG', True):
from django.db.backends.base.base import BaseDatabaseWrapper from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.utils import CursorWrapper from django.db.backends.utils import CursorWrapper
BaseDatabaseWrapper.make_debug_cursor = lambda self, cursor: CursorWrapper(cursor, self) BaseDatabaseWrapper.make_debug_cursor = lambda self, cursor: CursorWrapper(cursor, self)
# Use the default devserver addr/port defined in settings for runserver. # Use the default devserver addr/port defined in settings for runserver.
default_addr = getattr(settings, 'DEVSERVER_DEFAULT_ADDR', '127.0.0.1') default_addr = getattr(settings, 'DEVSERVER_DEFAULT_ADDR', '127.0.0.1')
default_port = getattr(settings, 'DEVSERVER_DEFAULT_PORT', 8000) default_port = getattr(settings, 'DEVSERVER_DEFAULT_PORT', 8000)
from django.core.management.commands import runserver as core_runserver from django.core.management.commands import runserver as core_runserver
original_handle = core_runserver.Command.handle original_handle = core_runserver.Command.handle
def handle(self, *args, **options): def handle(self, *args, **options):
@@ -139,7 +148,8 @@ def manage():
# Now run the command (or display the version). # Now run the command (or display the version).
from django.conf import settings from django.conf import settings
from django.core.management import execute_from_command_line from django.core.management import execute_from_command_line
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
sys.stdout.write('%s\n' % __version__) sys.stdout.write('%s\n' % __version__)
# If running as a user without permission to read settings, display an # If running as a user without permission to read settings, display an
# error message. Allow --help to still work. # error message. Allow --help to still work.

View File

@@ -18,7 +18,6 @@ logger = logging.getLogger('awx.api.authentication')
class LoggedBasicAuthentication(authentication.BasicAuthentication): class LoggedBasicAuthentication(authentication.BasicAuthentication):
def authenticate(self, request): def authenticate(self, request):
if not settings.AUTH_BASIC_ENABLED: if not settings.AUTH_BASIC_ENABLED:
return return
@@ -35,22 +34,18 @@ class LoggedBasicAuthentication(authentication.BasicAuthentication):
class SessionAuthentication(authentication.SessionAuthentication): class SessionAuthentication(authentication.SessionAuthentication):
def authenticate_header(self, request): def authenticate_header(self, request):
return 'Session' return 'Session'
class LoggedOAuth2Authentication(OAuth2Authentication): class LoggedOAuth2Authentication(OAuth2Authentication):
def authenticate(self, request): def authenticate(self, request):
ret = super(LoggedOAuth2Authentication, self).authenticate(request) ret = super(LoggedOAuth2Authentication, self).authenticate(request)
if ret: if ret:
user, token = ret user, token = ret
username = user.username if user else '<none>' username = user.username if user else '<none>'
logger.info(smart_text( logger.info(
u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format( smart_text(u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(username, request.method, request.path, token.pk))
username, request.method, request.path, token.pk )
)
))
setattr(user, 'oauth_scopes', [x for x in token.scope.split() if x]) setattr(user, 'oauth_scopes', [x for x in token.scope.split() if x])
return ret return ret

View File

@@ -38,16 +38,20 @@ register(
register( register(
'OAUTH2_PROVIDER', 'OAUTH2_PROVIDER',
field_class=OAuth2ProviderField, field_class=OAuth2ProviderField,
default={'ACCESS_TOKEN_EXPIRE_SECONDS': oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS, default={
'AUTHORIZATION_CODE_EXPIRE_SECONDS': oauth2_settings.AUTHORIZATION_CODE_EXPIRE_SECONDS, 'ACCESS_TOKEN_EXPIRE_SECONDS': oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS,
'REFRESH_TOKEN_EXPIRE_SECONDS': oauth2_settings.REFRESH_TOKEN_EXPIRE_SECONDS}, 'AUTHORIZATION_CODE_EXPIRE_SECONDS': oauth2_settings.AUTHORIZATION_CODE_EXPIRE_SECONDS,
'REFRESH_TOKEN_EXPIRE_SECONDS': oauth2_settings.REFRESH_TOKEN_EXPIRE_SECONDS,
},
label=_('OAuth 2 Timeout Settings'), label=_('OAuth 2 Timeout Settings'),
help_text=_('Dictionary for customizing OAuth 2 timeouts, available items are ' help_text=_(
'`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number ' 'Dictionary for customizing OAuth 2 timeouts, available items are '
'of seconds, `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of ' '`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number '
'authorization codes in the number of seconds, and `REFRESH_TOKEN_EXPIRE_SECONDS`, ' 'of seconds, `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
'the duration of refresh tokens, after expired access tokens, ' 'authorization codes in the number of seconds, and `REFRESH_TOKEN_EXPIRE_SECONDS`, '
'in the number of seconds.'), 'the duration of refresh tokens, after expired access tokens, '
'in the number of seconds.'
),
category=_('Authentication'), category=_('Authentication'),
category_slug='authentication', category_slug='authentication',
unit=_('seconds'), unit=_('seconds'),
@@ -57,10 +61,12 @@ register(
field_class=fields.BooleanField, field_class=fields.BooleanField,
default=False, default=False,
label=_('Allow External Users to Create OAuth2 Tokens'), label=_('Allow External Users to Create OAuth2 Tokens'),
help_text=_('For security reasons, users from external auth providers (LDAP, SAML, ' help_text=_(
'SSO, Radius, and others) are not allowed to create OAuth2 tokens. ' 'For security reasons, users from external auth providers (LDAP, SAML, '
'To change this behavior, enable this setting. Existing tokens will ' 'SSO, Radius, and others) are not allowed to create OAuth2 tokens. '
'not be deleted when this setting is toggled off.'), 'To change this behavior, enable this setting. Existing tokens will '
'not be deleted when this setting is toggled off.'
),
category=_('Authentication'), category=_('Authentication'),
category_slug='authentication', category_slug='authentication',
) )
@@ -71,8 +77,7 @@ register(
required=False, required=False,
default='', default='',
label=_('Login redirect override URL'), label=_('Login redirect override URL'),
help_text=_('URL to which unauthorized users will be redirected to log in. ' help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the Tower login page.'),
'If blank, users will be sent to the Tower login page.'),
category=_('Authentication'), category=_('Authentication'),
category_slug='authentication', category_slug='authentication',
) )

View File

@@ -16,7 +16,4 @@ class ActiveJobConflict(ValidationError):
# turn everything in self.detail into string by using force_text. # turn everything in self.detail into string by using force_text.
# Declare detail afterwards circumvent this behavior. # Declare detail afterwards circumvent this behavior.
super(ActiveJobConflict, self).__init__() super(ActiveJobConflict, self).__init__()
self.detail = { self.detail = {"error": _("Resource is being used by running jobs."), "active_jobs": active_jobs}
"error": _("Resource is being used by running jobs."),
"active_jobs": active_jobs
}

View File

@@ -16,10 +16,10 @@ __all__ = ['BooleanNullField', 'CharNullField', 'ChoiceNullField', 'VerbatimFiel
class NullFieldMixin(object): class NullFieldMixin(object):
''' """
Mixin to prevent shortcutting validation when we want to allow null input, Mixin to prevent shortcutting validation when we want to allow null input,
but coerce the resulting value to another type. but coerce the resulting value to another type.
''' """
def validate_empty_values(self, data): def validate_empty_values(self, data):
(is_empty_value, data) = super(NullFieldMixin, self).validate_empty_values(data) (is_empty_value, data) = super(NullFieldMixin, self).validate_empty_values(data)
@@ -29,18 +29,18 @@ class NullFieldMixin(object):
class BooleanNullField(NullFieldMixin, serializers.NullBooleanField): class BooleanNullField(NullFieldMixin, serializers.NullBooleanField):
''' """
Custom boolean field that allows null and empty string as False values. Custom boolean field that allows null and empty string as False values.
''' """
def to_internal_value(self, data): def to_internal_value(self, data):
return bool(super(BooleanNullField, self).to_internal_value(data)) return bool(super(BooleanNullField, self).to_internal_value(data))
class CharNullField(NullFieldMixin, serializers.CharField): class CharNullField(NullFieldMixin, serializers.CharField):
''' """
Custom char field that allows null as input and coerces to an empty string. Custom char field that allows null as input and coerces to an empty string.
''' """
def __init__(self, **kwargs): def __init__(self, **kwargs):
kwargs['allow_null'] = True kwargs['allow_null'] = True
@@ -51,9 +51,9 @@ class CharNullField(NullFieldMixin, serializers.CharField):
class ChoiceNullField(NullFieldMixin, serializers.ChoiceField): class ChoiceNullField(NullFieldMixin, serializers.ChoiceField):
''' """
Custom choice field that allows null as input and coerces to an empty string. Custom choice field that allows null as input and coerces to an empty string.
''' """
def __init__(self, **kwargs): def __init__(self, **kwargs):
kwargs['allow_null'] = True kwargs['allow_null'] = True
@@ -64,9 +64,9 @@ class ChoiceNullField(NullFieldMixin, serializers.ChoiceField):
class VerbatimField(serializers.Field): class VerbatimField(serializers.Field):
''' """
Custom field that passes the value through without changes. Custom field that passes the value through without changes.
''' """
def to_internal_value(self, data): def to_internal_value(self, data):
return data return data
@@ -77,22 +77,19 @@ class VerbatimField(serializers.Field):
class OAuth2ProviderField(fields.DictField): class OAuth2ProviderField(fields.DictField):
default_error_messages = { default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')}
'invalid_key_names': _('Invalid key names: {invalid_key_names}'),
}
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'} valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
child = fields.IntegerField(min_value=1) child = fields.IntegerField(min_value=1)
def to_internal_value(self, data): def to_internal_value(self, data):
data = super(OAuth2ProviderField, self).to_internal_value(data) data = super(OAuth2ProviderField, self).to_internal_value(data)
invalid_flags = (set(data.keys()) - self.valid_key_names) invalid_flags = set(data.keys()) - self.valid_key_names
if invalid_flags: if invalid_flags:
self.fail('invalid_key_names', invalid_key_names=', '.join(list(invalid_flags))) self.fail('invalid_key_names', invalid_key_names=', '.join(list(invalid_flags)))
return data return data
class DeprecatedCredentialField(serializers.IntegerField): class DeprecatedCredentialField(serializers.IntegerField):
def __init__(self, **kwargs): def __init__(self, **kwargs):
kwargs['allow_null'] = True kwargs['allow_null'] = True
kwargs['default'] = None kwargs['default'] = None

View File

@@ -27,9 +27,9 @@ from awx.main.utils.db import get_all_field_names
class TypeFilterBackend(BaseFilterBackend): class TypeFilterBackend(BaseFilterBackend):
''' """
Filter on type field now returned with all objects. Filter on type field now returned with all objects.
''' """
def filter_queryset(self, request, queryset, view): def filter_queryset(self, request, queryset, view):
try: try:
@@ -64,7 +64,7 @@ class TypeFilterBackend(BaseFilterBackend):
def get_fields_from_path(model, path): def get_fields_from_path(model, path):
''' """
Given a Django ORM lookup path (possibly over multiple models) Given a Django ORM lookup path (possibly over multiple models)
Returns the fields in the line, and also the revised lookup path Returns the fields in the line, and also the revised lookup path
ex., given ex., given
@@ -73,7 +73,7 @@ def get_fields_from_path(model, path):
returns tuple of fields traversed as well and a corrected path, returns tuple of fields traversed as well and a corrected path,
for special cases we do substitutions for special cases we do substitutions
([<IntegerField for timeout>], 'project__timeout') ([<IntegerField for timeout>], 'project__timeout')
''' """
# Store of all the fields used to detect repeats # Store of all the fields used to detect repeats
field_list = [] field_list = []
new_parts = [] new_parts = []
@@ -82,12 +82,9 @@ def get_fields_from_path(model, path):
raise ParseError(_('No related model for field {}.').format(name)) raise ParseError(_('No related model for field {}.').format(name))
# HACK: Make project and inventory source filtering by old field names work for backwards compatibility. # HACK: Make project and inventory source filtering by old field names work for backwards compatibility.
if model._meta.object_name in ('Project', 'InventorySource'): if model._meta.object_name in ('Project', 'InventorySource'):
name = { name = {'current_update': 'current_job', 'last_update': 'last_job', 'last_update_failed': 'last_job_failed', 'last_updated': 'last_job_run'}.get(
'current_update': 'current_job', name, name
'last_update': 'last_job', )
'last_update_failed': 'last_job_failed',
'last_updated': 'last_job_run',
}.get(name, name)
if name == 'type' and 'polymorphic_ctype' in get_all_field_names(model): if name == 'type' and 'polymorphic_ctype' in get_all_field_names(model):
name = 'polymorphic_ctype' name = 'polymorphic_ctype'
@@ -121,28 +118,42 @@ def get_fields_from_path(model, path):
def get_field_from_path(model, path): def get_field_from_path(model, path):
''' """
Given a Django ORM lookup path (possibly over multiple models) Given a Django ORM lookup path (possibly over multiple models)
Returns the last field in the line, and the revised lookup path Returns the last field in the line, and the revised lookup path
ex. ex.
(<IntegerField for timeout>, 'project__timeout') (<IntegerField for timeout>, 'project__timeout')
''' """
field_list, new_path = get_fields_from_path(model, path) field_list, new_path = get_fields_from_path(model, path)
return (field_list[-1], new_path) return (field_list[-1], new_path)
class FieldLookupBackend(BaseFilterBackend): class FieldLookupBackend(BaseFilterBackend):
''' """
Filter using field lookups provided via query string parameters. Filter using field lookups provided via query string parameters.
''' """
RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by', RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by', 'search', 'type', 'host_filter', 'count_disabled', 'no_truncate')
'search', 'type', 'host_filter', 'count_disabled', 'no_truncate')
SUPPORTED_LOOKUPS = ('exact', 'iexact', 'contains', 'icontains', SUPPORTED_LOOKUPS = (
'startswith', 'istartswith', 'endswith', 'iendswith', 'exact',
'regex', 'iregex', 'gt', 'gte', 'lt', 'lte', 'in', 'iexact',
'isnull', 'search') 'contains',
'icontains',
'startswith',
'istartswith',
'endswith',
'iendswith',
'regex',
'iregex',
'gt',
'gte',
'lt',
'lte',
'in',
'isnull',
'search',
)
# A list of fields that we know can be filtered on without the possiblity # A list of fields that we know can be filtered on without the possiblity
# of introducing duplicates # of introducing duplicates
@@ -189,10 +200,7 @@ class FieldLookupBackend(BaseFilterBackend):
try: try:
return self.to_python_related(value) return self.to_python_related(value)
except ValueError: except ValueError:
raise ParseError(_('Invalid {field_name} id: {field_id}').format( raise ParseError(_('Invalid {field_name} id: {field_id}').format(field_name=getattr(field, 'name', 'related field'), field_id=value))
field_name=getattr(field, 'name', 'related field'),
field_id=value)
)
else: else:
return field.to_python(value) return field.to_python(value)
@@ -205,13 +213,13 @@ class FieldLookupBackend(BaseFilterBackend):
field_list, new_lookup = self.get_fields_from_lookup(model, lookup) field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
field = field_list[-1] field = field_list[-1]
needs_distinct = (not all(isinstance(f, self.NO_DUPLICATES_ALLOW_LIST) for f in field_list)) needs_distinct = not all(isinstance(f, self.NO_DUPLICATES_ALLOW_LIST) for f in field_list)
# Type names are stored without underscores internally, but are presented and # Type names are stored without underscores internally, but are presented and
# and serialized over the API containing underscores so we remove `_` # and serialized over the API containing underscores so we remove `_`
# for polymorphic_ctype__model lookups. # for polymorphic_ctype__model lookups.
if new_lookup.startswith('polymorphic_ctype__model'): if new_lookup.startswith('polymorphic_ctype__model'):
value = value.replace('_','') value = value.replace('_', '')
elif new_lookup.endswith('__isnull'): elif new_lookup.endswith('__isnull'):
value = to_python_boolean(value) value = to_python_boolean(value)
elif new_lookup.endswith('__in'): elif new_lookup.endswith('__in'):
@@ -329,24 +337,20 @@ class FieldLookupBackend(BaseFilterBackend):
args = [] args = []
for n, k, v in and_filters: for n, k, v in and_filters:
if n: if n:
args.append(~Q(**{k:v})) args.append(~Q(**{k: v}))
else: else:
args.append(Q(**{k:v})) args.append(Q(**{k: v}))
for role_name in role_filters: for role_name in role_filters:
if not hasattr(queryset.model, 'accessible_pk_qs'): if not hasattr(queryset.model, 'accessible_pk_qs'):
raise ParseError(_( raise ParseError(_('Cannot apply role_level filter to this list because its model ' 'does not use roles for access control.'))
'Cannot apply role_level filter to this list because its model ' args.append(Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name)))
'does not use roles for access control.'))
args.append(
Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name))
)
if or_filters: if or_filters:
q = Q() q = Q()
for n,k,v in or_filters: for n, k, v in or_filters:
if n: if n:
q |= ~Q(**{k:v}) q |= ~Q(**{k: v})
else: else:
q |= Q(**{k:v}) q |= Q(**{k: v})
args.append(q) args.append(q)
if search_filters and search_filter_relation == 'OR': if search_filters and search_filter_relation == 'OR':
q = Q() q = Q()
@@ -360,11 +364,11 @@ class FieldLookupBackend(BaseFilterBackend):
for constrain in constrains: for constrain in constrains:
q_chain |= Q(**{constrain: term}) q_chain |= Q(**{constrain: term})
queryset = queryset.filter(q_chain) queryset = queryset.filter(q_chain)
for n,k,v in chain_filters: for n, k, v in chain_filters:
if n: if n:
q = ~Q(**{k:v}) q = ~Q(**{k: v})
else: else:
q = Q(**{k:v}) q = Q(**{k: v})
queryset = queryset.filter(q) queryset = queryset.filter(q)
queryset = queryset.filter(*args) queryset = queryset.filter(*args)
if needs_distinct: if needs_distinct:
@@ -377,9 +381,9 @@ class FieldLookupBackend(BaseFilterBackend):
class OrderByBackend(BaseFilterBackend): class OrderByBackend(BaseFilterBackend):
''' """
Filter to apply ordering based on query string parameters. Filter to apply ordering based on query string parameters.
''' """
def filter_queryset(self, request, queryset, view): def filter_queryset(self, request, queryset, view):
try: try:

View File

@@ -35,55 +35,50 @@ from rest_framework.negotiation import DefaultContentNegotiation
# AWX # AWX
from awx.api.filters import FieldLookupBackend from awx.api.filters import FieldLookupBackend
from awx.main.models import ( from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
UnifiedJob, UnifiedJobTemplate, User, Role, Credential,
WorkflowJobTemplateNode, WorkflowApprovalTemplate
)
from awx.main.access import access_registry from awx.main.access import access_registry
from awx.main.utils import ( from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
camelcase_to_underscore,
get_search_fields,
getattrd,
get_object_or_400,
decrypt_field,
get_awx_version,
)
from awx.main.utils.db import get_all_field_names from awx.main.utils.db import get_all_field_names
from awx.main.views import ApiErrorView from awx.main.views import ApiErrorView
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
from awx.api.versioning import URLPathVersioning from awx.api.versioning import URLPathVersioning
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
__all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView', __all__ = [
'ListCreateAPIView', 'SubListAPIView', 'SubListCreateAPIView', 'APIView',
'SubListDestroyAPIView', 'GenericAPIView',
'SubListCreateAttachDetachAPIView', 'RetrieveAPIView', 'ListAPIView',
'RetrieveUpdateAPIView', 'RetrieveDestroyAPIView', 'SimpleListAPIView',
'RetrieveUpdateDestroyAPIView', 'ListCreateAPIView',
'SubDetailAPIView', 'SubListAPIView',
'ResourceAccessList', 'SubListCreateAPIView',
'ParentMixin', 'SubListDestroyAPIView',
'DeleteLastUnattachLabelMixin', 'SubListCreateAttachDetachAPIView',
'SubListAttachDetachAPIView', 'RetrieveAPIView',
'CopyAPIView', 'BaseUsersList',] 'RetrieveUpdateAPIView',
'RetrieveDestroyAPIView',
'RetrieveUpdateDestroyAPIView',
'SubDetailAPIView',
'ResourceAccessList',
'ParentMixin',
'DeleteLastUnattachLabelMixin',
'SubListAttachDetachAPIView',
'CopyAPIView',
'BaseUsersList',
]
logger = logging.getLogger('awx.api.generics') logger = logging.getLogger('awx.api.generics')
analytics_logger = logging.getLogger('awx.analytics.performance') analytics_logger = logging.getLogger('awx.analytics.performance')
class LoggedLoginView(auth_views.LoginView): class LoggedLoginView(auth_views.LoginView):
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
# The django.auth.contrib login form doesn't perform the content # The django.auth.contrib login form doesn't perform the content
# negotiation we've come to expect from DRF; add in code to catch # negotiation we've come to expect from DRF; add in code to catch
# situations where Accept != text/html (or */*) and reply with # situations where Accept != text/html (or */*) and reply with
# an HTTP 406 # an HTTP 406
try: try:
DefaultContentNegotiation().select_renderer( DefaultContentNegotiation().select_renderer(request, [StaticHTMLRenderer], 'html')
request,
[StaticHTMLRenderer],
'html'
)
except NotAcceptable: except NotAcceptable:
resp = Response(status=status.HTTP_406_NOT_ACCEPTABLE) resp = Response(status=status.HTTP_406_NOT_ACCEPTABLE)
resp.accepted_renderer = StaticHTMLRenderer() resp.accepted_renderer = StaticHTMLRenderer()
@@ -96,7 +91,7 @@ class LoggedLoginView(auth_views.LoginView):
ret = super(LoggedLoginView, self).post(request, *args, **kwargs) ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
current_user = getattr(request, 'user', None) current_user = getattr(request, 'user', None)
if request.user.is_authenticated: if request.user.is_authenticated:
logger.info(smart_text(u"User {} logged in from {}".format(self.request.user.username,request.META.get('REMOTE_ADDR', None)))) logger.info(smart_text(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
ret.set_cookie('userLoggedIn', 'true') ret.set_cookie('userLoggedIn', 'true')
current_user = UserSerializer(self.request.user) current_user = UserSerializer(self.request.user)
current_user = smart_text(JSONRenderer().render(current_user.data)) current_user = smart_text(JSONRenderer().render(current_user.data))
@@ -106,29 +101,27 @@ class LoggedLoginView(auth_views.LoginView):
return ret return ret
else: else:
if 'username' in self.request.POST: if 'username' in self.request.POST:
logger.warn(smart_text(u"Login failed for user {} from {}".format(self.request.POST.get('username'),request.META.get('REMOTE_ADDR', None)))) logger.warn(smart_text(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
ret.status_code = 401 ret.status_code = 401
return ret return ret
class LoggedLogoutView(auth_views.LogoutView): class LoggedLogoutView(auth_views.LogoutView):
def dispatch(self, request, *args, **kwargs): def dispatch(self, request, *args, **kwargs):
original_user = getattr(request, 'user', None) original_user = getattr(request, 'user', None)
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs) ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
current_user = getattr(request, 'user', None) current_user = getattr(request, 'user', None)
ret.set_cookie('userLoggedIn', 'false') ret.set_cookie('userLoggedIn', 'false')
if (not current_user or not getattr(current_user, 'pk', True)) \ if (not current_user or not getattr(current_user, 'pk', True)) and current_user != original_user:
and current_user != original_user:
logger.info("User {} logged out.".format(original_user.username)) logger.info("User {} logged out.".format(original_user.username))
return ret return ret
def get_view_description(view, html=False): def get_view_description(view, html=False):
'''Wrapper around REST framework get_view_description() to continue """Wrapper around REST framework get_view_description() to continue
to support our historical div. to support our historical div.
''' """
desc = views.get_view_description(view, html=html) desc = views.get_view_description(view, html=html)
if html: if html:
desc = '<div class="description">%s</div>' % desc desc = '<div class="description">%s</div>' % desc
@@ -138,6 +131,7 @@ def get_view_description(view, html=False):
def get_default_schema(): def get_default_schema():
if settings.SETTINGS_MODULE == 'awx.settings.development': if settings.SETTINGS_MODULE == 'awx.settings.development':
from awx.api.swagger import AutoSchema from awx.api.swagger import AutoSchema
return AutoSchema() return AutoSchema()
else: else:
return views.APIView.schema return views.APIView.schema
@@ -149,21 +143,23 @@ class APIView(views.APIView):
versioning_class = URLPathVersioning versioning_class = URLPathVersioning
def initialize_request(self, request, *args, **kwargs): def initialize_request(self, request, *args, **kwargs):
''' """
Store the Django REST Framework Request object as an attribute on the Store the Django REST Framework Request object as an attribute on the
normal Django request, store time the request started. normal Django request, store time the request started.
''' """
self.time_started = time.time() self.time_started = time.time()
if getattr(settings, 'SQL_DEBUG', False): if getattr(settings, 'SQL_DEBUG', False):
self.queries_before = len(connection.queries) self.queries_before = len(connection.queries)
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure # If there are any custom headers in REMOTE_HOST_HEADERS, make sure
# they respect the allowed proxy list # they respect the allowed proxy list
if all([ if all(
settings.PROXY_IP_ALLOWED_LIST, [
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST, settings.PROXY_IP_ALLOWED_LIST,
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
]): request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
]
):
for custom_header in settings.REMOTE_HOST_HEADERS: for custom_header in settings.REMOTE_HOST_HEADERS:
if custom_header.startswith('HTTP_'): if custom_header.startswith('HTTP_'):
request.environ.pop(custom_header, None) request.environ.pop(custom_header, None)
@@ -178,17 +174,19 @@ class APIView(views.APIView):
request.drf_request_user = None request.drf_request_user = None
self.__init_request_error__ = exc self.__init_request_error__ = exc
except UnsupportedMediaType as exc: except UnsupportedMediaType as exc:
exc.detail = _('You did not use correct Content-Type in your HTTP request. ' exc.detail = _(
'If you are using our REST API, the Content-Type must be application/json') 'You did not use correct Content-Type in your HTTP request. ' 'If you are using our REST API, the Content-Type must be application/json'
)
self.__init_request_error__ = exc self.__init_request_error__ = exc
return drf_request return drf_request
def finalize_response(self, request, response, *args, **kwargs): def finalize_response(self, request, response, *args, **kwargs):
''' """
Log warning for 400 requests. Add header with elapsed time. Log warning for 400 requests. Add header with elapsed time.
''' """
from awx.main.utils import get_licenser from awx.main.utils import get_licenser
from awx.main.utils.licensing import OpenLicense from awx.main.utils.licensing import OpenLicense
# #
# If the URL was rewritten, and we get a 404, we should entirely # If the URL was rewritten, and we get a 404, we should entirely
# replace the view in the request context with an ApiErrorView() # replace the view in the request context with an ApiErrorView()
@@ -212,8 +210,12 @@ class APIView(views.APIView):
return response return response
if response.status_code >= 400: if response.status_code >= 400:
status_msg = "status %s received by user %s attempting to access %s from %s" % \ status_msg = "status %s received by user %s attempting to access %s from %s" % (
(response.status_code, request.user, request.path, request.META.get('REMOTE_ADDR', None)) response.status_code,
request.user,
request.path,
request.META.get('REMOTE_ADDR', None),
)
if hasattr(self, '__init_request_error__'): if hasattr(self, '__init_request_error__'):
response = self.handle_exception(self.__init_request_error__) response = self.handle_exception(self.__init_request_error__)
if response.status_code == 401: if response.status_code == 401:
@@ -225,7 +227,7 @@ class APIView(views.APIView):
time_started = getattr(self, 'time_started', None) time_started = getattr(self, 'time_started', None)
response['X-API-Product-Version'] = get_awx_version() response['X-API-Product-Version'] = get_awx_version()
response['X-API-Product-Name'] = 'AWX' if isinstance(get_licenser(), OpenLicense) else 'Red Hat Ansible Tower' response['X-API-Product-Name'] = 'AWX' if isinstance(get_licenser(), OpenLicense) else 'Red Hat Ansible Tower'
response['X-API-Node'] = settings.CLUSTER_HOST_ID response['X-API-Node'] = settings.CLUSTER_HOST_ID
if time_started: if time_started:
time_elapsed = time.time() - self.time_started time_elapsed = time.time() - self.time_started
@@ -311,18 +313,12 @@ class APIView(views.APIView):
return data return data
def determine_version(self, request, *args, **kwargs): def determine_version(self, request, *args, **kwargs):
return ( return (getattr(request, 'version', None), getattr(request, 'versioning_scheme', None))
getattr(request, 'version', None),
getattr(request, 'versioning_scheme', None),
)
def dispatch(self, request, *args, **kwargs): def dispatch(self, request, *args, **kwargs):
if self.versioning_class is not None: if self.versioning_class is not None:
scheme = self.versioning_class() scheme = self.versioning_class()
request.version, request.versioning_scheme = ( request.version, request.versioning_scheme = (scheme.determine_version(request, *args, **kwargs), scheme)
scheme.determine_version(request, *args, **kwargs),
scheme
)
if 'version' in kwargs: if 'version' in kwargs:
kwargs.pop('version') kwargs.pop('version')
return super(APIView, self).dispatch(request, *args, **kwargs) return super(APIView, self).dispatch(request, *args, **kwargs)
@@ -378,25 +374,22 @@ class GenericAPIView(generics.GenericAPIView, APIView):
d = super(GenericAPIView, self).get_description_context() d = super(GenericAPIView, self).get_description_context()
if hasattr(self.model, "_meta"): if hasattr(self.model, "_meta"):
if hasattr(self.model._meta, "verbose_name"): if hasattr(self.model._meta, "verbose_name"):
d.update({ d.update(
'model_verbose_name': smart_text(self.model._meta.verbose_name), {
'model_verbose_name_plural': smart_text(self.model._meta.verbose_name_plural), 'model_verbose_name': smart_text(self.model._meta.verbose_name),
}) 'model_verbose_name_plural': smart_text(self.model._meta.verbose_name_plural),
}
)
serializer = self.get_serializer() serializer = self.get_serializer()
metadata = self.metadata_class() metadata = self.metadata_class()
metadata.request = self.request metadata.request = self.request
for method, key in [ for method, key in [('GET', 'serializer_fields'), ('POST', 'serializer_create_fields'), ('PUT', 'serializer_update_fields')]:
('GET', 'serializer_fields'),
('POST', 'serializer_create_fields'),
('PUT', 'serializer_update_fields')
]:
d[key] = metadata.get_serializer_info(serializer, method=method) d[key] = metadata.get_serializer_info(serializer, method=method)
d['settings'] = settings d['settings'] = settings
return d return d
class SimpleListAPIView(generics.ListAPIView, GenericAPIView): class SimpleListAPIView(generics.ListAPIView, GenericAPIView):
def get_queryset(self): def get_queryset(self):
return self.request.user.get_queryset(self.model) return self.request.user.get_queryset(self.model)
@@ -413,9 +406,7 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
else: else:
order_field = 'name' order_field = 'name'
d = super(ListAPIView, self).get_description_context() d = super(ListAPIView, self).get_description_context()
d.update({ d.update({'order_field': order_field})
'order_field': order_field,
})
return d return d
@property @property
@@ -426,9 +417,13 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
def related_search_fields(self): def related_search_fields(self):
def skip_related_name(name): def skip_related_name(name):
return ( return (
name is None or name.endswith('_role') or name.startswith('_') or name is None
name.startswith('deprecated_') or name.endswith('_set') or or name.endswith('_role')
name == 'polymorphic_ctype') or name.startswith('_')
or name.startswith('deprecated_')
or name.endswith('_set')
or name == 'polymorphic_ctype'
)
fields = set([]) fields = set([])
for field in self.model._meta.fields: for field in self.model._meta.fields:
@@ -482,9 +477,7 @@ class ParentMixin(object):
def get_parent_object(self): def get_parent_object(self):
if self.parent_object is not None: if self.parent_object is not None:
return self.parent_object return self.parent_object
parent_filter = { parent_filter = {self.lookup_field: self.kwargs.get(self.lookup_field, None)}
self.lookup_field: self.kwargs.get(self.lookup_field, None),
}
self.parent_object = get_object_or_404(self.parent_model, **parent_filter) self.parent_object = get_object_or_404(self.parent_model, **parent_filter)
return self.parent_object return self.parent_object
@@ -513,10 +506,12 @@ class SubListAPIView(ParentMixin, ListAPIView):
def get_description_context(self): def get_description_context(self):
d = super(SubListAPIView, self).get_description_context() d = super(SubListAPIView, self).get_description_context()
d.update({ d.update(
'parent_model_verbose_name': smart_text(self.parent_model._meta.verbose_name), {
'parent_model_verbose_name_plural': smart_text(self.parent_model._meta.verbose_name_plural), 'parent_model_verbose_name': smart_text(self.parent_model._meta.verbose_name),
}) 'parent_model_verbose_name_plural': smart_text(self.parent_model._meta.verbose_name_plural),
}
)
return d return d
def get_queryset(self): def get_queryset(self):
@@ -531,7 +526,6 @@ class SubListAPIView(ParentMixin, ListAPIView):
class DestroyAPIView(generics.DestroyAPIView): class DestroyAPIView(generics.DestroyAPIView):
def has_delete_permission(self, obj): def has_delete_permission(self, obj):
return self.request.user.can_access(self.model, 'delete', obj) return self.request.user.can_access(self.model, 'delete', obj)
@@ -545,12 +539,12 @@ class SubListDestroyAPIView(DestroyAPIView, SubListAPIView):
""" """
Concrete view for deleting everything related by `relationship`. Concrete view for deleting everything related by `relationship`.
""" """
check_sub_obj_permission = True check_sub_obj_permission = True
def destroy(self, request, *args, **kwargs): def destroy(self, request, *args, **kwargs):
instance_list = self.get_queryset() instance_list = self.get_queryset()
if (not self.check_sub_obj_permission and if not self.check_sub_obj_permission and not request.user.can_access(self.parent_model, 'delete', self.get_parent_object()):
not request.user.can_access(self.parent_model, 'delete', self.get_parent_object())):
raise PermissionDenied() raise PermissionDenied()
self.perform_list_destroy(instance_list) self.perform_list_destroy(instance_list)
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
@@ -574,9 +568,7 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
def get_description_context(self): def get_description_context(self):
d = super(SubListCreateAPIView, self).get_description_context() d = super(SubListCreateAPIView, self).get_description_context()
d.update({ d.update({'parent_key': getattr(self, 'parent_key', None)})
'parent_key': getattr(self, 'parent_key', None),
})
return d return d
def get_queryset(self): def get_queryset(self):
@@ -610,8 +602,7 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
# attempt to deserialize the object # attempt to deserialize the object
serializer = self.get_serializer(data=data) serializer = self.get_serializer(data=data)
if not serializer.is_valid(): if not serializer.is_valid():
return Response(serializer.errors, return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
status=status.HTTP_400_BAD_REQUEST)
# Verify we have permission to add the object as given. # Verify we have permission to add the object as given.
if not request.user.can_access(self.model, 'add', serializer.validated_data): if not request.user.can_access(self.model, 'add', serializer.validated_data):
@@ -635,9 +626,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
def get_description_context(self): def get_description_context(self):
d = super(SubListCreateAttachDetachAPIView, self).get_description_context() d = super(SubListCreateAttachDetachAPIView, self).get_description_context()
d.update({ d.update({"has_attach": True})
"has_attach": True,
})
return d return d
def attach_validate(self, request): def attach_validate(self, request):
@@ -675,9 +664,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
sub = get_object_or_400(self.model, pk=sub_id) sub = get_object_or_400(self.model, pk=sub_id)
# Verify we have permission to attach. # Verify we have permission to attach.
if not request.user.can_access(self.parent_model, 'attach', parent, sub, if not request.user.can_access(self.parent_model, 'attach', parent, sub, self.relationship, data, skip_sub_obj_read_check=created):
self.relationship, data,
skip_sub_obj_read_check=created):
raise PermissionDenied() raise PermissionDenied()
# Verify that the relationship to be added is valid. # Verify that the relationship to be added is valid.
@@ -716,8 +703,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
relationship = getattrd(parent, self.relationship) relationship = getattrd(parent, self.relationship)
sub = get_object_or_400(self.model, pk=sub_id) sub = get_object_or_400(self.model, pk=sub_id)
if not request.user.can_access(self.parent_model, 'unattach', parent, if not request.user.can_access(self.parent_model, 'unattach', parent, sub, self.relationship, request.data):
sub, self.relationship, request.data):
raise PermissionDenied() raise PermissionDenied()
if parent_key: if parent_key:
@@ -735,28 +721,24 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
def post(self, request, *args, **kwargs): def post(self, request, *args, **kwargs):
if not isinstance(request.data, dict): if not isinstance(request.data, dict):
return Response('invalid type for post data', return Response('invalid type for post data', status=status.HTTP_400_BAD_REQUEST)
status=status.HTTP_400_BAD_REQUEST)
if 'disassociate' in request.data: if 'disassociate' in request.data:
return self.unattach(request, *args, **kwargs) return self.unattach(request, *args, **kwargs)
else: else:
return self.attach(request, *args, **kwargs) return self.attach(request, *args, **kwargs)
class SubListAttachDetachAPIView(SubListCreateAttachDetachAPIView): class SubListAttachDetachAPIView(SubListCreateAttachDetachAPIView):
''' """
Derived version of SubListCreateAttachDetachAPIView that prohibits creation Derived version of SubListCreateAttachDetachAPIView that prohibits creation
''' """
metadata_class = SublistAttachDetatchMetadata metadata_class = SublistAttachDetatchMetadata
def post(self, request, *args, **kwargs): def post(self, request, *args, **kwargs):
sub_id = request.data.get('id', None) sub_id = request.data.get('id', None)
if not sub_id: if not sub_id:
return Response( return Response(dict(msg=_("{} 'id' field is missing.".format(self.model._meta.verbose_name.title()))), status=status.HTTP_400_BAD_REQUEST)
dict(msg=_("{} 'id' field is missing.".format(
self.model._meta.verbose_name.title()))),
status=status.HTTP_400_BAD_REQUEST)
return super(SubListAttachDetachAPIView, self).post(request, *args, **kwargs) return super(SubListAttachDetachAPIView, self).post(request, *args, **kwargs)
def update_raw_data(self, data): def update_raw_data(self, data):
@@ -768,11 +750,11 @@ class SubListAttachDetachAPIView(SubListCreateAttachDetachAPIView):
class DeleteLastUnattachLabelMixin(object): class DeleteLastUnattachLabelMixin(object):
''' """
Models for which you want the last instance to be deleted from the database Models for which you want the last instance to be deleted from the database
when the last disassociate is called should inherit from this class. Further, when the last disassociate is called should inherit from this class. Further,
the model should implement is_detached() the model should implement is_detached()
''' """
def unattach(self, request, *args, **kwargs): def unattach(self, request, *args, **kwargs):
(sub_id, res) = super(DeleteLastUnattachLabelMixin, self).unattach_validate(request) (sub_id, res) = super(DeleteLastUnattachLabelMixin, self).unattach_validate(request)
@@ -798,7 +780,6 @@ class RetrieveAPIView(generics.RetrieveAPIView, GenericAPIView):
class RetrieveUpdateAPIView(RetrieveAPIView, generics.RetrieveUpdateAPIView): class RetrieveUpdateAPIView(RetrieveAPIView, generics.RetrieveUpdateAPIView):
def update(self, request, *args, **kwargs): def update(self, request, *args, **kwargs):
self.update_filter(request, *args, **kwargs) self.update_filter(request, *args, **kwargs)
return super(RetrieveUpdateAPIView, self).update(request, *args, **kwargs) return super(RetrieveUpdateAPIView, self).update(request, *args, **kwargs)
@@ -839,6 +820,7 @@ class ResourceAccessList(ParentMixin, ListAPIView):
def trigger_delayed_deep_copy(*args, **kwargs): def trigger_delayed_deep_copy(*args, **kwargs):
from awx.main.tasks import deep_copy_model_obj from awx.main.tasks import deep_copy_model_obj
connection.on_commit(lambda: deep_copy_model_obj.delay(*args, **kwargs)) connection.on_commit(lambda: deep_copy_model_obj.delay(*args, **kwargs))
@@ -869,8 +851,7 @@ class CopyAPIView(GenericAPIView):
field_val[secret] = decrypt_field(obj, secret) field_val[secret] = decrypt_field(obj, secret)
elif isinstance(field_val, dict): elif isinstance(field_val, dict):
for sub_field in field_val: for sub_field in field_val:
if isinstance(sub_field, str) \ if isinstance(sub_field, str) and isinstance(field_val[sub_field], str):
and isinstance(field_val[sub_field], str):
field_val[sub_field] = decrypt_field(obj, field_name, sub_field) field_val[sub_field] = decrypt_field(obj, field_name, sub_field)
elif isinstance(field_val, str): elif isinstance(field_val, str):
try: try:
@@ -882,15 +863,11 @@ class CopyAPIView(GenericAPIView):
def _build_create_dict(self, obj): def _build_create_dict(self, obj):
ret = {} ret = {}
if self.copy_return_serializer_class: if self.copy_return_serializer_class:
all_fields = Metadata().get_serializer_info( all_fields = Metadata().get_serializer_info(self._get_copy_return_serializer(), method='POST')
self._get_copy_return_serializer(), method='POST'
)
for field_name, field_info in all_fields.items(): for field_name, field_info in all_fields.items():
if not hasattr(obj, field_name) or field_info.get('read_only', True): if not hasattr(obj, field_name) or field_info.get('read_only', True):
continue continue
ret[field_name] = CopyAPIView._decrypt_model_field_if_needed( ret[field_name] = CopyAPIView._decrypt_model_field_if_needed(obj, field_name, getattr(obj, field_name))
obj, field_name, getattr(obj, field_name)
)
return ret return ret
@staticmethod @staticmethod
@@ -908,9 +885,11 @@ class CopyAPIView(GenericAPIView):
except AttributeError: except AttributeError:
continue continue
# Adjust copy blocked fields here. # Adjust copy blocked fields here.
if field.name in fields_to_discard or field.name in [ if (
'id', 'pk', 'polymorphic_ctype', 'unifiedjobtemplate_ptr', 'created_by', 'modified_by' field.name in fields_to_discard
] or field.name.endswith('_role'): or field.name in ['id', 'pk', 'polymorphic_ctype', 'unifiedjobtemplate_ptr', 'created_by', 'modified_by']
or field.name.endswith('_role')
):
create_kwargs.pop(field.name, None) create_kwargs.pop(field.name, None)
continue continue
if field.one_to_many: if field.one_to_many:
@@ -926,33 +905,24 @@ class CopyAPIView(GenericAPIView):
elif field.name == 'name' and not old_parent: elif field.name == 'name' and not old_parent:
create_kwargs[field.name] = copy_name or field_val + ' copy' create_kwargs[field.name] = copy_name or field_val + ' copy'
elif field.name in fields_to_preserve: elif field.name in fields_to_preserve:
create_kwargs[field.name] = CopyAPIView._decrypt_model_field_if_needed( create_kwargs[field.name] = CopyAPIView._decrypt_model_field_if_needed(obj, field.name, field_val)
obj, field.name, field_val
)
# WorkflowJobTemplateNodes that represent an approval are *special*; # WorkflowJobTemplateNodes that represent an approval are *special*;
# when we copy them, we actually want to *copy* the UJT they point at # when we copy them, we actually want to *copy* the UJT they point at
# rather than share the template reference between nodes in disparate # rather than share the template reference between nodes in disparate
# workflows # workflows
if ( if isinstance(obj, WorkflowJobTemplateNode) and isinstance(getattr(obj, 'unified_job_template'), WorkflowApprovalTemplate):
isinstance(obj, WorkflowJobTemplateNode) and new_approval_template, sub_objs = CopyAPIView.copy_model_obj(None, None, WorkflowApprovalTemplate, obj.unified_job_template, creater)
isinstance(getattr(obj, 'unified_job_template'), WorkflowApprovalTemplate)
):
new_approval_template, sub_objs = CopyAPIView.copy_model_obj(
None, None, WorkflowApprovalTemplate,
obj.unified_job_template, creater
)
create_kwargs['unified_job_template'] = new_approval_template create_kwargs['unified_job_template'] = new_approval_template
new_obj = model.objects.create(**create_kwargs) new_obj = model.objects.create(**create_kwargs)
logger.debug('Deep copy: Created new object {}({})'.format( logger.debug('Deep copy: Created new object {}({})'.format(new_obj, model))
new_obj, model
))
# Need to save separatedly because Djang-crum get_current_user would # Need to save separatedly because Djang-crum get_current_user would
# not work properly in non-request-response-cycle context. # not work properly in non-request-response-cycle context.
new_obj.created_by = creater new_obj.created_by = creater
new_obj.save() new_obj.save()
from awx.main.signals import disable_activity_stream from awx.main.signals import disable_activity_stream
with disable_activity_stream(): with disable_activity_stream():
for m2m in m2m_to_preserve: for m2m in m2m_to_preserve:
for related_obj in m2m_to_preserve[m2m].all(): for related_obj in m2m_to_preserve[m2m].all():
@@ -978,8 +948,7 @@ class CopyAPIView(GenericAPIView):
for key in create_kwargs: for key in create_kwargs:
create_kwargs[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key] create_kwargs[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key]
try: try:
can_copy = request.user.can_access(self.model, 'add', create_kwargs) and \ can_copy = request.user.can_access(self.model, 'add', create_kwargs) and request.user.can_access(self.model, 'copy_related', obj)
request.user.can_access(self.model, 'copy_related', obj)
except PermissionDenied: except PermissionDenied:
return Response({'can_copy': False}) return Response({'can_copy': False})
return Response({'can_copy': can_copy}) return Response({'can_copy': can_copy})
@@ -998,8 +967,7 @@ class CopyAPIView(GenericAPIView):
if not serializer.is_valid(): if not serializer.is_valid():
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
new_obj, sub_objs = CopyAPIView.copy_model_obj( new_obj, sub_objs = CopyAPIView.copy_model_obj(
None, None, self.model, obj, request.user, create_kwargs=create_kwargs, None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '')
copy_name=serializer.validated_data.get('name', '')
) )
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all(): if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
new_obj.admin_role.members.add(request.user) new_obj.admin_role.members.add(request.user)
@@ -1011,13 +979,9 @@ class CopyAPIView(GenericAPIView):
cache.set(key, sub_objs, timeout=3600) cache.set(key, sub_objs, timeout=3600)
permission_check_func = None permission_check_func = None
if hasattr(type(self), 'deep_copy_permission_check_func'): if hasattr(type(self), 'deep_copy_permission_check_func'):
permission_check_func = ( permission_check_func = (type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func')
type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func'
)
trigger_delayed_deep_copy( trigger_delayed_deep_copy(
self.model.__module__, self.model.__name__, self.model.__module__, self.model.__name__, obj.pk, new_obj.pk, request.user.pk, key, permission_check_func=permission_check_func
obj.pk, new_obj.pk, request.user.pk, key,
permission_check_func=permission_check_func
) )
serializer = self._get_copy_return_serializer(new_obj) serializer = self._get_copy_return_serializer(new_obj)
headers = {'Location': new_obj.get_absolute_url(request=request)} headers = {'Location': new_obj.get_absolute_url(request=request)}
@@ -1026,7 +990,7 @@ class CopyAPIView(GenericAPIView):
class BaseUsersList(SubListCreateAttachDetachAPIView): class BaseUsersList(SubListCreateAttachDetachAPIView):
def post(self, request, *args, **kwargs): def post(self, request, *args, **kwargs):
ret = super(BaseUsersList, self).post( request, *args, **kwargs) ret = super(BaseUsersList, self).post(request, *args, **kwargs)
if ret.status_code != 201: if ret.status_code != 201:
return ret return ret
try: try:

View File

@@ -28,18 +28,23 @@ from awx.main.tasks import AWXReceptorJob
class Metadata(metadata.SimpleMetadata): class Metadata(metadata.SimpleMetadata):
def get_field_info(self, field): def get_field_info(self, field):
field_info = OrderedDict() field_info = OrderedDict()
field_info['type'] = self.label_lookup[field] field_info['type'] = self.label_lookup[field]
field_info['required'] = getattr(field, 'required', False) field_info['required'] = getattr(field, 'required', False)
text_attrs = [ text_attrs = [
'read_only', 'label', 'help_text', 'read_only',
'min_length', 'max_length', 'label',
'min_value', 'max_value', 'help_text',
'category', 'category_slug', 'min_length',
'defined_in_file', 'unit', 'max_length',
'min_value',
'max_value',
'category',
'category_slug',
'defined_in_file',
'unit',
] ]
for attr in text_attrs: for attr in text_attrs:
@@ -61,8 +66,9 @@ class Metadata(metadata.SimpleMetadata):
'type': _('Data type for this {}.'), 'type': _('Data type for this {}.'),
'url': _('URL for this {}.'), 'url': _('URL for this {}.'),
'related': _('Data structure with URLs of related resources.'), 'related': _('Data structure with URLs of related resources.'),
'summary_fields': _('Data structure with name/description for related resources. ' 'summary_fields': _(
'The output for some objects may be limited for performance reasons.'), 'Data structure with name/description for related resources. ' 'The output for some objects may be limited for performance reasons.'
),
'created': _('Timestamp when this {} was created.'), 'created': _('Timestamp when this {} was created.'),
'modified': _('Timestamp when this {} was last modified.'), 'modified': _('Timestamp when this {} was last modified.'),
} }
@@ -101,9 +107,7 @@ class Metadata(metadata.SimpleMetadata):
field_info['children'] = self.get_serializer_info(field) field_info['children'] = self.get_serializer_info(field)
if not isinstance(field, (RelatedField, ManyRelatedField)) and hasattr(field, 'choices'): if not isinstance(field, (RelatedField, ManyRelatedField)) and hasattr(field, 'choices'):
choices = [ choices = [(choice_value, choice_name) for choice_value, choice_name in field.choices.items()]
(choice_value, choice_name) for choice_value, choice_name in field.choices.items()
]
if not any(choice in ('', None) for choice, _ in choices): if not any(choice in ('', None) for choice, _ in choices):
if field.allow_blank: if field.allow_blank:
choices = [("", "---------")] + choices choices = [("", "---------")] + choices
@@ -131,7 +135,6 @@ class Metadata(metadata.SimpleMetadata):
for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES: for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
field_info[notification_type_name] = notification_type_class.default_messages field_info[notification_type_name] = notification_type_class.default_messages
# Update type of fields returned... # Update type of fields returned...
model_field = None model_field = None
if serializer and hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'): if serializer and hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'):
@@ -149,22 +152,19 @@ class Metadata(metadata.SimpleMetadata):
field_info['type'] = 'integer' field_info['type'] = 'integer'
elif field.field_name in ('created', 'modified'): elif field.field_name in ('created', 'modified'):
field_info['type'] = 'datetime' field_info['type'] = 'datetime'
elif ( elif RelatedField in field.__class__.__bases__ or isinstance(model_field, ForeignKey):
RelatedField in field.__class__.__bases__ or
isinstance(model_field, ForeignKey)
):
field_info['type'] = 'id' field_info['type'] = 'id'
elif ( elif (
isinstance(field, JSONField) or isinstance(field, JSONField)
isinstance(model_field, JSONField) or or isinstance(model_field, JSONField)
isinstance(field, DRFJSONField) or or isinstance(field, DRFJSONField)
isinstance(getattr(field, 'model_field', None), JSONField) or or isinstance(getattr(field, 'model_field', None), JSONField)
field.field_name == 'credential_passwords' or field.field_name == 'credential_passwords'
): ):
field_info['type'] = 'json' field_info['type'] = 'json'
elif ( elif (
isinstance(field, ManyRelatedField) and isinstance(field, ManyRelatedField)
field.field_name == 'credentials' and field.field_name == 'credentials'
# launch-time credentials # launch-time credentials
): ):
field_info['type'] = 'list_of_ids' field_info['type'] = 'list_of_ids'
@@ -175,10 +175,7 @@ class Metadata(metadata.SimpleMetadata):
def get_serializer_info(self, serializer, method=None): def get_serializer_info(self, serializer, method=None):
filterer = getattr(serializer, 'filter_field_metadata', lambda fields, method: fields) filterer = getattr(serializer, 'filter_field_metadata', lambda fields, method: fields)
return filterer( return filterer(super(Metadata, self).get_serializer_info(serializer), method)
super(Metadata, self).get_serializer_info(serializer),
method
)
def determine_actions(self, request, view): def determine_actions(self, request, view):
# Add field information for GET requests (so field names/labels are # Add field information for GET requests (so field names/labels are
@@ -274,6 +271,7 @@ class Metadata(metadata.SimpleMetadata):
metadata['object_roles'] = roles metadata['object_roles'] = roles
from rest_framework import generics from rest_framework import generics
if isinstance(view, generics.ListAPIView) and hasattr(view, 'paginator'): if isinstance(view, generics.ListAPIView) and hasattr(view, 'paginator'):
metadata['max_page_size'] = view.paginator.max_page_size metadata['max_page_size'] = view.paginator.max_page_size
@@ -293,7 +291,6 @@ class RoleMetadata(Metadata):
class SublistAttachDetatchMetadata(Metadata): class SublistAttachDetatchMetadata(Metadata):
def determine_actions(self, request, view): def determine_actions(self, request, view):
actions = super(SublistAttachDetatchMetadata, self).determine_actions(request, view) actions = super(SublistAttachDetatchMetadata, self).determine_actions(request, view)
method = 'POST' method = 'POST'

View File

@@ -3,13 +3,9 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import MetricsView
MetricsView
)
urls = [ urls = [url(r'^$', MetricsView.as_view(), name='metrics_view')]
url(r'^$', MetricsView.as_view(), name='metrics_view'),
]
__all__ = ['urls'] __all__ = ['urls']

View File

@@ -10,7 +10,6 @@ from rest_framework.utils.urls import replace_query_param
class DisabledPaginator(DjangoPaginator): class DisabledPaginator(DjangoPaginator):
@property @property
def num_pages(self): def num_pages(self):
return 1 return 1
@@ -49,8 +48,7 @@ class Pagination(pagination.PageNumberPagination):
def get_html_context(self): def get_html_context(self):
context = super().get_html_context() context = super().get_html_context()
context['page_links'] = [pl._replace(url=self.cap_page_size(pl.url)) context['page_links'] = [pl._replace(url=self.cap_page_size(pl.url)) for pl in context['page_links']]
for pl in context['page_links']]
return context return context

View File

@@ -15,16 +15,25 @@ from awx.main.utils import get_object_or_400
logger = logging.getLogger('awx.api.permissions') logger = logging.getLogger('awx.api.permissions')
__all__ = ['ModelAccessPermission', 'JobTemplateCallbackPermission', 'VariableDataPermission', __all__ = [
'TaskPermission', 'ProjectUpdatePermission', 'InventoryInventorySourcesUpdatePermission', 'ModelAccessPermission',
'UserPermission', 'IsSuperUser', 'InstanceGroupTowerPermission', 'WorkflowApprovalPermission'] 'JobTemplateCallbackPermission',
'VariableDataPermission',
'TaskPermission',
'ProjectUpdatePermission',
'InventoryInventorySourcesUpdatePermission',
'UserPermission',
'IsSuperUser',
'InstanceGroupTowerPermission',
'WorkflowApprovalPermission',
]
class ModelAccessPermission(permissions.BasePermission): class ModelAccessPermission(permissions.BasePermission):
''' """
Default permissions class to check user access based on the model and Default permissions class to check user access based on the model and
request method, optionally verifying the request data. request method, optionally verifying the request data.
''' """
def check_options_permissions(self, request, view, obj=None): def check_options_permissions(self, request, view, obj=None):
return self.check_get_permissions(request, view, obj) return self.check_get_permissions(request, view, obj)
@@ -35,8 +44,7 @@ class ModelAccessPermission(permissions.BasePermission):
def check_get_permissions(self, request, view, obj=None): def check_get_permissions(self, request, view, obj=None):
if hasattr(view, 'parent_model'): if hasattr(view, 'parent_model'):
parent_obj = view.get_parent_object() parent_obj = view.get_parent_object()
if not check_user_access(request.user, view.parent_model, 'read', if not check_user_access(request.user, view.parent_model, 'read', parent_obj):
parent_obj):
return False return False
if not obj: if not obj:
return True return True
@@ -45,8 +53,7 @@ class ModelAccessPermission(permissions.BasePermission):
def check_post_permissions(self, request, view, obj=None): def check_post_permissions(self, request, view, obj=None):
if hasattr(view, 'parent_model'): if hasattr(view, 'parent_model'):
parent_obj = view.get_parent_object() parent_obj = view.get_parent_object()
if not check_user_access(request.user, view.parent_model, 'read', if not check_user_access(request.user, view.parent_model, 'read', parent_obj):
parent_obj):
return False return False
if hasattr(view, 'parent_key'): if hasattr(view, 'parent_key'):
if not check_user_access(request.user, view.model, 'add', {view.parent_key: parent_obj}): if not check_user_access(request.user, view.model, 'add', {view.parent_key: parent_obj}):
@@ -60,10 +67,7 @@ class ModelAccessPermission(permissions.BasePermission):
extra_kwargs = {} extra_kwargs = {}
if view.obj_permission_type == 'admin': if view.obj_permission_type == 'admin':
extra_kwargs['data'] = {} extra_kwargs['data'] = {}
return check_user_access( return check_user_access(request.user, view.model, view.obj_permission_type, obj, **extra_kwargs)
request.user, view.model, view.obj_permission_type, obj,
**extra_kwargs
)
else: else:
if obj: if obj:
return True return True
@@ -74,8 +78,7 @@ class ModelAccessPermission(permissions.BasePermission):
# FIXME: For some reason this needs to return True # FIXME: For some reason this needs to return True
# because it is first called with obj=None? # because it is first called with obj=None?
return True return True
return check_user_access(request.user, view.model, 'change', obj, return check_user_access(request.user, view.model, 'change', obj, request.data)
request.data)
def check_patch_permissions(self, request, view, obj=None): def check_patch_permissions(self, request, view, obj=None):
return self.check_put_permissions(request, view, obj) return self.check_put_permissions(request, view, obj)
@@ -89,10 +92,10 @@ class ModelAccessPermission(permissions.BasePermission):
return check_user_access(request.user, view.model, 'delete', obj) return check_user_access(request.user, view.model, 'delete', obj)
def check_permissions(self, request, view, obj=None): def check_permissions(self, request, view, obj=None):
''' """
Perform basic permissions checking before delegating to the appropriate Perform basic permissions checking before delegating to the appropriate
method based on the request method. method based on the request method.
''' """
# Don't allow anonymous users. 401, not 403, hence no raised exception. # Don't allow anonymous users. 401, not 403, hence no raised exception.
if not request.user or request.user.is_anonymous: if not request.user or request.user.is_anonymous:
@@ -117,9 +120,7 @@ class ModelAccessPermission(permissions.BasePermission):
return result return result
def has_permission(self, request, view, obj=None): def has_permission(self, request, view, obj=None):
logger.debug('has_permission(user=%s method=%s data=%r, %s, %r)', logger.debug('has_permission(user=%s method=%s data=%r, %s, %r)', request.user, request.method, request.data, view.__class__.__name__, obj)
request.user, request.method, request.data,
view.__class__.__name__, obj)
try: try:
response = self.check_permissions(request, view, obj) response = self.check_permissions(request, view, obj)
except Exception as e: except Exception as e:
@@ -134,10 +135,10 @@ class ModelAccessPermission(permissions.BasePermission):
class JobTemplateCallbackPermission(ModelAccessPermission): class JobTemplateCallbackPermission(ModelAccessPermission):
''' """
Permission check used by job template callback view for requests from Permission check used by job template callback view for requests from
empheral hosts. empheral hosts.
''' """
def has_permission(self, request, view, obj=None): def has_permission(self, request, view, obj=None):
# If another authentication method was used and it's not a POST, return # If another authentication method was used and it's not a POST, return
@@ -160,18 +161,16 @@ class JobTemplateCallbackPermission(ModelAccessPermission):
class VariableDataPermission(ModelAccessPermission): class VariableDataPermission(ModelAccessPermission):
def check_put_permissions(self, request, view, obj=None): def check_put_permissions(self, request, view, obj=None):
if not obj: if not obj:
return True return True
return check_user_access(request.user, view.model, 'change', obj, return check_user_access(request.user, view.model, 'change', obj, dict(variables=request.data))
dict(variables=request.data))
class TaskPermission(ModelAccessPermission): class TaskPermission(ModelAccessPermission):
''' """
Permission checks used for API callbacks from running a task. Permission checks used for API callbacks from running a task.
''' """
def has_permission(self, request, view, obj=None): def has_permission(self, request, view, obj=None):
# If another authentication method was used other than the one for # If another authentication method was used other than the one for
@@ -182,8 +181,7 @@ class TaskPermission(ModelAccessPermission):
# Verify that the ID present in the auth token is for a valid, active # Verify that the ID present in the auth token is for a valid, active
# unified job. # unified job.
try: try:
unified_job = UnifiedJob.objects.get(status='running', unified_job = UnifiedJob.objects.get(status='running', pk=int(request.auth.split('-')[0]))
pk=int(request.auth.split('-')[0]))
except (UnifiedJob.DoesNotExist, TypeError): except (UnifiedJob.DoesNotExist, TypeError):
return False return False
@@ -197,10 +195,10 @@ class TaskPermission(ModelAccessPermission):
class WorkflowApprovalPermission(ModelAccessPermission): class WorkflowApprovalPermission(ModelAccessPermission):
''' """
Permission check used by workflow `approval` and `deny` views to determine Permission check used by workflow `approval` and `deny` views to determine
who has access to approve and deny paused workflow nodes who has access to approve and deny paused workflow nodes
''' """
def check_post_permissions(self, request, view, obj=None): def check_post_permissions(self, request, view, obj=None):
approval = get_object_or_400(view.model, pk=view.kwargs['pk']) approval = get_object_or_400(view.model, pk=view.kwargs['pk'])
@@ -208,9 +206,10 @@ class WorkflowApprovalPermission(ModelAccessPermission):
class ProjectUpdatePermission(ModelAccessPermission): class ProjectUpdatePermission(ModelAccessPermission):
''' """
Permission check used by ProjectUpdateView to determine who can update projects Permission check used by ProjectUpdateView to determine who can update projects
''' """
def check_get_permissions(self, request, view, obj=None): def check_get_permissions(self, request, view, obj=None):
project = get_object_or_400(view.model, pk=view.kwargs['pk']) project = get_object_or_400(view.model, pk=view.kwargs['pk'])
return check_user_access(request.user, view.model, 'read', project) return check_user_access(request.user, view.model, 'read', project)

View File

@@ -11,7 +11,6 @@ from rest_framework.utils import encoders
class SurrogateEncoder(encoders.JSONEncoder): class SurrogateEncoder(encoders.JSONEncoder):
def encode(self, obj): def encode(self, obj):
ret = super(SurrogateEncoder, self).encode(obj) ret = super(SurrogateEncoder, self).encode(obj)
try: try:
@@ -28,9 +27,9 @@ class DefaultJSONRenderer(renderers.JSONRenderer):
class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer): class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
''' """
Customizations to the default browsable API renderer. Customizations to the default browsable API renderer.
''' """
def get_default_renderer(self, view): def get_default_renderer(self, view):
renderer = super(BrowsableAPIRenderer, self).get_default_renderer(view) renderer = super(BrowsableAPIRenderer, self).get_default_renderer(view)
@@ -48,9 +47,7 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
# see: https://github.com/ansible/awx/issues/3108 # see: https://github.com/ansible/awx/issues/3108
# https://code.djangoproject.com/ticket/28121 # https://code.djangoproject.com/ticket/28121
return data return data
return super(BrowsableAPIRenderer, self).get_content(renderer, data, return super(BrowsableAPIRenderer, self).get_content(renderer, data, accepted_media_type, renderer_context)
accepted_media_type,
renderer_context)
def get_context(self, data, accepted_media_type, renderer_context): def get_context(self, data, accepted_media_type, renderer_context):
# Store the associated response status to know how to populate the raw # Store the associated response status to know how to populate the raw
@@ -125,18 +122,13 @@ class AnsiDownloadRenderer(PlainTextRenderer):
class PrometheusJSONRenderer(renderers.JSONRenderer): class PrometheusJSONRenderer(renderers.JSONRenderer):
def render(self, data, accepted_media_type=None, renderer_context=None): def render(self, data, accepted_media_type=None, renderer_context=None):
if isinstance(data, dict): if isinstance(data, dict):
# HTTP errors are {'detail': ErrorDetail(string='...', code=...)} # HTTP errors are {'detail': ErrorDetail(string='...', code=...)}
return super(PrometheusJSONRenderer, self).render( return super(PrometheusJSONRenderer, self).render(data, accepted_media_type, renderer_context)
data, accepted_media_type, renderer_context
)
parsed_metrics = text_string_to_metric_families(data) parsed_metrics = text_string_to_metric_families(data)
data = {} data = {}
for family in parsed_metrics: for family in parsed_metrics:
for sample in family.samples: for sample in family.samples:
data[sample[0]] = {"labels": sample[1], "value": sample[2]} data[sample[0]] = {"labels": sample[1], "value": sample[2]}
return super(PrometheusJSONRenderer, self).render( return super(PrometheusJSONRenderer, self).render(data, accepted_media_type, renderer_context)
data, accepted_media_type, renderer_context
)

File diff suppressed because it is too large Load Diff

View File

@@ -14,7 +14,6 @@ from rest_framework_swagger import renderers
class SuperUserSchemaGenerator(SchemaGenerator): class SuperUserSchemaGenerator(SchemaGenerator):
def has_view_permissions(self, path, method, view): def has_view_permissions(self, path, method, view):
# #
# Generate the Swagger schema as if you were a superuser and # Generate the Swagger schema as if you were a superuser and
@@ -25,17 +24,17 @@ class SuperUserSchemaGenerator(SchemaGenerator):
class AutoSchema(DRFAuthSchema): class AutoSchema(DRFAuthSchema):
def get_link(self, path, method, base_url): def get_link(self, path, method, base_url):
link = super(AutoSchema, self).get_link(path, method, base_url) link = super(AutoSchema, self).get_link(path, method, base_url)
try: try:
serializer = self.view.get_serializer() serializer = self.view.get_serializer()
except Exception: except Exception:
serializer = None serializer = None
warnings.warn('{}.get_serializer() raised an exception during ' warnings.warn(
'schema generation. Serializer fields will not be ' '{}.get_serializer() raised an exception during '
'generated for {} {}.' 'schema generation. Serializer fields will not be '
.format(self.view.__class__.__name__, method, path)) 'generated for {} {}.'.format(self.view.__class__.__name__, method, path)
)
link.__dict__['deprecated'] = getattr(self.view, 'deprecated', False) link.__dict__['deprecated'] = getattr(self.view, 'deprecated', False)
@@ -43,9 +42,7 @@ class AutoSchema(DRFAuthSchema):
if hasattr(self.view, 'swagger_topic'): if hasattr(self.view, 'swagger_topic'):
link.__dict__['topic'] = str(self.view.swagger_topic).title() link.__dict__['topic'] = str(self.view.swagger_topic).title()
elif serializer and hasattr(serializer, 'Meta'): elif serializer and hasattr(serializer, 'Meta'):
link.__dict__['topic'] = str( link.__dict__['topic'] = str(serializer.Meta.model._meta.verbose_name_plural).title()
serializer.Meta.model._meta.verbose_name_plural
).title()
elif hasattr(self.view, 'model'): elif hasattr(self.view, 'model'):
link.__dict__['topic'] = str(self.view.model._meta.verbose_name_plural).title() link.__dict__['topic'] = str(self.view.model._meta.verbose_name_plural).title()
else: else:
@@ -62,18 +59,10 @@ class SwaggerSchemaView(APIView):
_ignore_model_permissions = True _ignore_model_permissions = True
exclude_from_schema = True exclude_from_schema = True
permission_classes = [AllowAny] permission_classes = [AllowAny]
renderer_classes = [ renderer_classes = [CoreJSONRenderer, renderers.OpenAPIRenderer, renderers.SwaggerUIRenderer]
CoreJSONRenderer,
renderers.OpenAPIRenderer,
renderers.SwaggerUIRenderer
]
def get(self, request): def get(self, request):
generator = SuperUserSchemaGenerator( generator = SuperUserSchemaGenerator(title='Ansible Tower API', patterns=None, urlconf=None)
title='Ansible Tower API',
patterns=None,
urlconf=None
)
schema = generator.get_schema(request=request) schema = generator.get_schema(request=request)
# python core-api doesn't support the deprecation yet, so track it # python core-api doesn't support the deprecation yet, so track it
# ourselves and return it in a response header # ourselves and return it in a response header
@@ -103,11 +92,6 @@ class SwaggerSchemaView(APIView):
schema._data[topic]._data[path] = node schema._data[topic]._data[path] = node
if not schema: if not schema:
raise exceptions.ValidationError( raise exceptions.ValidationError('The schema generator did not return a schema Document')
'The schema generator did not return a schema Document'
)
return Response( return Response(schema, headers={'X-Deprecated-Paths': json.dumps(_deprecated)})
schema,
headers={'X-Deprecated-Paths': json.dumps(_deprecated)}
)

View File

@@ -3,10 +3,7 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import ActivityStreamList, ActivityStreamDetail
ActivityStreamList,
ActivityStreamDetail,
)
urls = [ urls = [

View File

@@ -3,10 +3,7 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import AdHocCommandEventList, AdHocCommandEventDetail
AdHocCommandEventList,
AdHocCommandEventDetail,
)
urls = [ urls = [

View File

@@ -3,10 +3,7 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import CredentialInputSourceDetail, CredentialInputSourceList
CredentialInputSourceDetail,
CredentialInputSourceList,
)
urls = [ urls = [

View File

@@ -3,13 +3,7 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import CredentialTypeList, CredentialTypeDetail, CredentialTypeCredentialList, CredentialTypeActivityStreamList, CredentialTypeExternalTest
CredentialTypeList,
CredentialTypeDetail,
CredentialTypeCredentialList,
CredentialTypeActivityStreamList,
CredentialTypeExternalTest,
)
urls = [ urls = [

View File

@@ -3,20 +3,14 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import InstanceList, InstanceDetail, InstanceUnifiedJobsList, InstanceInstanceGroupsList
InstanceList,
InstanceDetail,
InstanceUnifiedJobsList,
InstanceInstanceGroupsList,
)
urls = [ urls = [
url(r'^$', InstanceList.as_view(), name='instance_list'), url(r'^$', InstanceList.as_view(), name='instance_list'),
url(r'^(?P<pk>[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'), url(r'^(?P<pk>[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'),
url(r'^(?P<pk>[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'), url(r'^(?P<pk>[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'),
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), url(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
name='instance_instance_groups_list'),
] ]
__all__ = ['urls'] __all__ = ['urls']

View File

@@ -3,12 +3,7 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import InstanceGroupList, InstanceGroupDetail, InstanceGroupUnifiedJobsList, InstanceGroupInstanceList
InstanceGroupList,
InstanceGroupDetail,
InstanceGroupUnifiedJobsList,
InstanceGroupInstanceList,
)
urls = [ urls = [

View File

@@ -3,12 +3,7 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import InventoryScriptList, InventoryScriptDetail, InventoryScriptObjectRolesList, InventoryScriptCopy
InventoryScriptList,
InventoryScriptDetail,
InventoryScriptObjectRolesList,
InventoryScriptCopy,
)
urls = [ urls = [

View File

@@ -29,12 +29,21 @@ urls = [
url(r'^(?P<pk>[0-9]+)/credentials/$', InventorySourceCredentialsList.as_view(), name='inventory_source_credentials_list'), url(r'^(?P<pk>[0-9]+)/credentials/$', InventorySourceCredentialsList.as_view(), name='inventory_source_credentials_list'),
url(r'^(?P<pk>[0-9]+)/groups/$', InventorySourceGroupsList.as_view(), name='inventory_source_groups_list'), url(r'^(?P<pk>[0-9]+)/groups/$', InventorySourceGroupsList.as_view(), name='inventory_source_groups_list'),
url(r'^(?P<pk>[0-9]+)/hosts/$', InventorySourceHostsList.as_view(), name='inventory_source_hosts_list'), url(r'^(?P<pk>[0-9]+)/hosts/$', InventorySourceHostsList.as_view(), name='inventory_source_hosts_list'),
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', InventorySourceNotificationTemplatesStartedList.as_view(), url(
name='inventory_source_notification_templates_started_list'), r'^(?P<pk>[0-9]+)/notification_templates_started/$',
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', InventorySourceNotificationTemplatesErrorList.as_view(), InventorySourceNotificationTemplatesStartedList.as_view(),
name='inventory_source_notification_templates_error_list'), name='inventory_source_notification_templates_started_list',
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', InventorySourceNotificationTemplatesSuccessList.as_view(), ),
name='inventory_source_notification_templates_success_list'), url(
r'^(?P<pk>[0-9]+)/notification_templates_error/$',
InventorySourceNotificationTemplatesErrorList.as_view(),
name='inventory_source_notification_templates_error_list',
),
url(
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
InventorySourceNotificationTemplatesSuccessList.as_view(),
name='inventory_source_notification_templates_success_list',
),
] ]
__all__ = ['urls'] __all__ = ['urls']

View File

@@ -3,12 +3,7 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import JobEventList, JobEventDetail, JobEventChildrenList, JobEventHostsList
JobEventList,
JobEventDetail,
JobEventChildrenList,
JobEventHostsList,
)
urls = [ urls = [

View File

@@ -3,13 +3,9 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import JobHostSummaryDetail
JobHostSummaryDetail,
)
urls = [ urls = [url(r'^(?P<pk>[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail')]
url(r'^(?P<pk>[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail'),
]
__all__ = ['urls'] __all__ = ['urls']

View File

@@ -34,12 +34,21 @@ urls = [
url(r'^(?P<pk>[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'), url(r'^(?P<pk>[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'),
url(r'^(?P<pk>[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'), url(r'^(?P<pk>[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'),
url(r'^(?P<pk>[0-9]+)/activity_stream/$', JobTemplateActivityStreamList.as_view(), name='job_template_activity_stream_list'), url(r'^(?P<pk>[0-9]+)/activity_stream/$', JobTemplateActivityStreamList.as_view(), name='job_template_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', JobTemplateNotificationTemplatesStartedList.as_view(), url(
name='job_template_notification_templates_started_list'), r'^(?P<pk>[0-9]+)/notification_templates_started/$',
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', JobTemplateNotificationTemplatesErrorList.as_view(), JobTemplateNotificationTemplatesStartedList.as_view(),
name='job_template_notification_templates_error_list'), name='job_template_notification_templates_started_list',
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', JobTemplateNotificationTemplatesSuccessList.as_view(), ),
name='job_template_notification_templates_success_list'), url(
r'^(?P<pk>[0-9]+)/notification_templates_error/$',
JobTemplateNotificationTemplatesErrorList.as_view(),
name='job_template_notification_templates_error_list',
),
url(
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
JobTemplateNotificationTemplatesSuccessList.as_view(),
name='job_template_notification_templates_success_list',
),
url(r'^(?P<pk>[0-9]+)/instance_groups/$', JobTemplateInstanceGroupsList.as_view(), name='job_template_instance_groups_list'), url(r'^(?P<pk>[0-9]+)/instance_groups/$', JobTemplateInstanceGroupsList.as_view(), name='job_template_instance_groups_list'),
url(r'^(?P<pk>[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'), url(r'^(?P<pk>[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'), url(r'^(?P<pk>[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'),

View File

@@ -3,15 +3,9 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import LabelList, LabelDetail
LabelList,
LabelDetail,
)
urls = [ urls = [url(r'^$', LabelList.as_view(), name='label_list'), url(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail')]
url(r'^$', LabelList.as_view(), name='label_list'),
url(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail'),
]
__all__ = ['urls'] __all__ = ['urls']

View File

@@ -3,15 +3,9 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import NotificationList, NotificationDetail
NotificationList,
NotificationDetail,
)
urls = [ urls = [url(r'^$', NotificationList.as_view(), name='notification_list'), url(r'^(?P<pk>[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail')]
url(r'^$', NotificationList.as_view(), name='notification_list'),
url(r'^(?P<pk>[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail'),
]
__all__ = ['urls'] __all__ = ['urls']

View File

@@ -16,32 +16,12 @@ from awx.api.views import (
urls = [ urls = [
url(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'), url(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'),
url( url(r'^applications/(?P<pk>[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'),
r'^applications/(?P<pk>[0-9]+)/$', url(r'^applications/(?P<pk>[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='o_auth2_application_token_list'),
OAuth2ApplicationDetail.as_view(), url(r'^applications/(?P<pk>[0-9]+)/activity_stream/$', OAuth2ApplicationActivityStreamList.as_view(), name='o_auth2_application_activity_stream_list'),
name='o_auth2_application_detail'
),
url(
r'^applications/(?P<pk>[0-9]+)/tokens/$',
ApplicationOAuth2TokenList.as_view(),
name='o_auth2_application_token_list'
),
url(
r'^applications/(?P<pk>[0-9]+)/activity_stream/$',
OAuth2ApplicationActivityStreamList.as_view(),
name='o_auth2_application_activity_stream_list'
),
url(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'), url(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'),
url( url(r'^tokens/(?P<pk>[0-9]+)/$', OAuth2TokenDetail.as_view(), name='o_auth2_token_detail'),
r'^tokens/(?P<pk>[0-9]+)/$', url(r'^tokens/(?P<pk>[0-9]+)/activity_stream/$', OAuth2TokenActivityStreamList.as_view(), name='o_auth2_token_activity_stream_list'),
OAuth2TokenDetail.as_view(),
name='o_auth2_token_detail'
),
url(
r'^tokens/(?P<pk>[0-9]+)/activity_stream/$',
OAuth2TokenActivityStreamList.as_view(),
name='o_auth2_token_activity_stream_list'
),
] ]
__all__ = ['urls'] __all__ = ['urls']

View File

@@ -10,13 +10,10 @@ from oauthlib import oauth2
from oauth2_provider import views from oauth2_provider import views
from awx.main.models import RefreshToken from awx.main.models import RefreshToken
from awx.api.views import ( from awx.api.views import ApiOAuthAuthorizationRootView
ApiOAuthAuthorizationRootView,
)
class TokenView(views.TokenView): class TokenView(views.TokenView):
def create_token_response(self, request): def create_token_response(self, request):
# Django OAuth2 Toolkit has a bug whereby refresh tokens are *never* # Django OAuth2 Toolkit has a bug whereby refresh tokens are *never*
# properly expired (ugh): # properly expired (ugh):
@@ -26,9 +23,7 @@ class TokenView(views.TokenView):
# This code detects and auto-expires them on refresh grant # This code detects and auto-expires them on refresh grant
# requests. # requests.
if request.POST.get('grant_type') == 'refresh_token' and 'refresh_token' in request.POST: if request.POST.get('grant_type') == 'refresh_token' and 'refresh_token' in request.POST:
refresh_token = RefreshToken.objects.filter( refresh_token = RefreshToken.objects.filter(token=request.POST['refresh_token']).first()
token=request.POST['refresh_token']
).first()
if refresh_token: if refresh_token:
expire_seconds = settings.OAUTH2_PROVIDER.get('REFRESH_TOKEN_EXPIRE_SECONDS', 0) expire_seconds = settings.OAUTH2_PROVIDER.get('REFRESH_TOKEN_EXPIRE_SECONDS', 0)
if refresh_token.created + timedelta(seconds=expire_seconds) < now(): if refresh_token.created + timedelta(seconds=expire_seconds) < now():

View File

@@ -43,14 +43,26 @@ urls = [
url(r'^(?P<pk>[0-9]+)/credentials/$', OrganizationCredentialList.as_view(), name='organization_credential_list'), url(r'^(?P<pk>[0-9]+)/credentials/$', OrganizationCredentialList.as_view(), name='organization_credential_list'),
url(r'^(?P<pk>[0-9]+)/activity_stream/$', OrganizationActivityStreamList.as_view(), name='organization_activity_stream_list'), url(r'^(?P<pk>[0-9]+)/activity_stream/$', OrganizationActivityStreamList.as_view(), name='organization_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/notification_templates/$', OrganizationNotificationTemplatesList.as_view(), name='organization_notification_templates_list'), url(r'^(?P<pk>[0-9]+)/notification_templates/$', OrganizationNotificationTemplatesList.as_view(), name='organization_notification_templates_list'),
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', OrganizationNotificationTemplatesStartedList.as_view(), url(
name='organization_notification_templates_started_list'), r'^(?P<pk>[0-9]+)/notification_templates_started/$',
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', OrganizationNotificationTemplatesErrorList.as_view(), OrganizationNotificationTemplatesStartedList.as_view(),
name='organization_notification_templates_error_list'), name='organization_notification_templates_started_list',
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', OrganizationNotificationTemplatesSuccessList.as_view(), ),
name='organization_notification_templates_success_list'), url(
url(r'^(?P<pk>[0-9]+)/notification_templates_approvals/$', OrganizationNotificationTemplatesApprovalList.as_view(), r'^(?P<pk>[0-9]+)/notification_templates_error/$',
name='organization_notification_templates_approvals_list'), OrganizationNotificationTemplatesErrorList.as_view(),
name='organization_notification_templates_error_list',
),
url(
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
OrganizationNotificationTemplatesSuccessList.as_view(),
name='organization_notification_templates_success_list',
),
url(
r'^(?P<pk>[0-9]+)/notification_templates_approvals/$',
OrganizationNotificationTemplatesApprovalList.as_view(),
name='organization_notification_templates_approvals_list',
),
url(r'^(?P<pk>[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'), url(r'^(?P<pk>[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'),
url(r'^(?P<pk>[0-9]+)/galaxy_credentials/$', OrganizationGalaxyCredentialsList.as_view(), name='organization_galaxy_credentials_list'), url(r'^(?P<pk>[0-9]+)/galaxy_credentials/$', OrganizationGalaxyCredentialsList.as_view(), name='organization_galaxy_credentials_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'), url(r'^(?P<pk>[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'),

View File

@@ -35,10 +35,16 @@ urls = [
url(r'^(?P<pk>[0-9]+)/activity_stream/$', ProjectActivityStreamList.as_view(), name='project_activity_stream_list'), url(r'^(?P<pk>[0-9]+)/activity_stream/$', ProjectActivityStreamList.as_view(), name='project_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/schedules/$', ProjectSchedulesList.as_view(), name='project_schedules_list'), url(r'^(?P<pk>[0-9]+)/schedules/$', ProjectSchedulesList.as_view(), name='project_schedules_list'),
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', ProjectNotificationTemplatesErrorList.as_view(), name='project_notification_templates_error_list'), url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', ProjectNotificationTemplatesErrorList.as_view(), name='project_notification_templates_error_list'),
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', ProjectNotificationTemplatesSuccessList.as_view(), url(
name='project_notification_templates_success_list'), r'^(?P<pk>[0-9]+)/notification_templates_success/$',
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', ProjectNotificationTemplatesStartedList.as_view(), ProjectNotificationTemplatesSuccessList.as_view(),
name='project_notification_templates_started_list'), name='project_notification_templates_success_list',
),
url(
r'^(?P<pk>[0-9]+)/notification_templates_started/$',
ProjectNotificationTemplatesStartedList.as_view(),
name='project_notification_templates_started_list',
),
url(r'^(?P<pk>[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'), url(r'^(?P<pk>[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'), url(r'^(?P<pk>[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'),
url(r'^(?P<pk>[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'), url(r'^(?P<pk>[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'),

View File

@@ -3,14 +3,7 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import RoleList, RoleDetail, RoleUsersList, RoleTeamsList, RoleParentsList, RoleChildrenList
RoleList,
RoleDetail,
RoleUsersList,
RoleTeamsList,
RoleParentsList,
RoleChildrenList,
)
urls = [ urls = [

View File

@@ -3,12 +3,7 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList
ScheduleList,
ScheduleDetail,
ScheduleUnifiedJobsList,
ScheduleCredentialsList,
)
urls = [ urls = [

View File

@@ -3,13 +3,7 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import SystemJobList, SystemJobDetail, SystemJobCancel, SystemJobNotificationsList, SystemJobEventsList
SystemJobList,
SystemJobDetail,
SystemJobCancel,
SystemJobNotificationsList,
SystemJobEventsList
)
urls = [ urls = [

View File

@@ -21,12 +21,21 @@ urls = [
url(r'^(?P<pk>[0-9]+)/launch/$', SystemJobTemplateLaunch.as_view(), name='system_job_template_launch'), url(r'^(?P<pk>[0-9]+)/launch/$', SystemJobTemplateLaunch.as_view(), name='system_job_template_launch'),
url(r'^(?P<pk>[0-9]+)/jobs/$', SystemJobTemplateJobsList.as_view(), name='system_job_template_jobs_list'), url(r'^(?P<pk>[0-9]+)/jobs/$', SystemJobTemplateJobsList.as_view(), name='system_job_template_jobs_list'),
url(r'^(?P<pk>[0-9]+)/schedules/$', SystemJobTemplateSchedulesList.as_view(), name='system_job_template_schedules_list'), url(r'^(?P<pk>[0-9]+)/schedules/$', SystemJobTemplateSchedulesList.as_view(), name='system_job_template_schedules_list'),
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', SystemJobTemplateNotificationTemplatesStartedList.as_view(), url(
name='system_job_template_notification_templates_started_list'), r'^(?P<pk>[0-9]+)/notification_templates_started/$',
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', SystemJobTemplateNotificationTemplatesErrorList.as_view(), SystemJobTemplateNotificationTemplatesStartedList.as_view(),
name='system_job_template_notification_templates_error_list'), name='system_job_template_notification_templates_started_list',
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', SystemJobTemplateNotificationTemplatesSuccessList.as_view(), ),
name='system_job_template_notification_templates_success_list'), url(
r'^(?P<pk>[0-9]+)/notification_templates_error/$',
SystemJobTemplateNotificationTemplatesErrorList.as_view(),
name='system_job_template_notification_templates_error_list',
),
url(
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
SystemJobTemplateNotificationTemplatesSuccessList.as_view(),
name='system_job_template_notification_templates_success_list',
),
] ]
__all__ = ['urls'] __all__ = ['urls']

View File

@@ -5,10 +5,7 @@ from __future__ import absolute_import, unicode_literals
from django.conf import settings from django.conf import settings
from django.conf.urls import include, url from django.conf.urls import include, url
from awx.api.generics import ( from awx.api.generics import LoggedLoginView, LoggedLogoutView
LoggedLoginView,
LoggedLogoutView,
)
from awx.api.views import ( from awx.api.views import (
ApiRootView, ApiRootView,
ApiV2RootView, ApiV2RootView,
@@ -33,9 +30,7 @@ from awx.api.views import (
OAuth2ApplicationDetail, OAuth2ApplicationDetail,
) )
from awx.api.views.metrics import ( from awx.api.views.metrics import MetricsView
MetricsView,
)
from .organization import urls as organization_urls from .organization import urls as organization_urls
from .user import urls as user_urls from .user import urls as user_urls
@@ -146,17 +141,11 @@ app_name = 'api'
urlpatterns = [ urlpatterns = [
url(r'^$', ApiRootView.as_view(), name='api_root_view'), url(r'^$', ApiRootView.as_view(), name='api_root_view'),
url(r'^(?P<version>(v2))/', include(v2_urls)), url(r'^(?P<version>(v2))/', include(v2_urls)),
url(r'^login/$', LoggedLoginView.as_view( url(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'),
template_name='rest_framework/login.html', url(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
extra_context={'inside_login_context': True}
), name='login'),
url(r'^logout/$', LoggedLogoutView.as_view(
next_page='/api/', redirect_field_name='next'
), name='logout'),
url(r'^o/', include(oauth2_root_urls)), url(r'^o/', include(oauth2_root_urls)),
] ]
if settings.SETTINGS_MODULE == 'awx.settings.development': if settings.SETTINGS_MODULE == 'awx.settings.development':
from awx.api.swagger import SwaggerSchemaView from awx.api.swagger import SwaggerSchemaView
urlpatterns += [
url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view'), urlpatterns += [url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')]
]

View File

@@ -20,7 +20,7 @@ from awx.api.views import (
UserAuthorizedTokenList, UserAuthorizedTokenList,
) )
urls = [ urls = [
url(r'^$', UserList.as_view(), name='user_list'), url(r'^$', UserList.as_view(), name='user_list'),
url(r'^(?P<pk>[0-9]+)/$', UserDetail.as_view(), name='user_detail'), url(r'^(?P<pk>[0-9]+)/$', UserDetail.as_view(), name='user_detail'),
url(r'^(?P<pk>[0-9]+)/teams/$', UserTeamsList.as_view(), name='user_teams_list'), url(r'^(?P<pk>[0-9]+)/teams/$', UserTeamsList.as_view(), name='user_teams_list'),
@@ -35,7 +35,6 @@ urls = [
url(r'^(?P<pk>[0-9]+)/tokens/$', OAuth2UserTokenList.as_view(), name='o_auth2_token_list'), url(r'^(?P<pk>[0-9]+)/tokens/$', OAuth2UserTokenList.as_view(), name='o_auth2_token_list'),
url(r'^(?P<pk>[0-9]+)/authorized_tokens/$', UserAuthorizedTokenList.as_view(), name='user_authorized_token_list'), url(r'^(?P<pk>[0-9]+)/authorized_tokens/$', UserAuthorizedTokenList.as_view(), name='user_authorized_token_list'),
url(r'^(?P<pk>[0-9]+)/personal_tokens/$', UserPersonalTokenList.as_view(), name='user_personal_token_list'), url(r'^(?P<pk>[0-9]+)/personal_tokens/$', UserPersonalTokenList.as_view(), name='user_personal_token_list'),
]
]
__all__ = ['urls'] __all__ = ['urls']

View File

@@ -1,10 +1,6 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
WebhookKeyView,
GithubWebhookReceiver,
GitlabWebhookReceiver,
)
urlpatterns = [ urlpatterns = [

View File

@@ -3,12 +3,7 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import WorkflowApprovalList, WorkflowApprovalDetail, WorkflowApprovalApprove, WorkflowApprovalDeny
WorkflowApprovalList,
WorkflowApprovalDetail,
WorkflowApprovalApprove,
WorkflowApprovalDeny,
)
urls = [ urls = [

View File

@@ -3,10 +3,7 @@
from django.conf.urls import url from django.conf.urls import url
from awx.api.views import ( from awx.api.views import WorkflowApprovalTemplateDetail, WorkflowApprovalTemplateJobsList
WorkflowApprovalTemplateDetail,
WorkflowApprovalTemplateJobsList,
)
urls = [ urls = [

View File

@@ -33,14 +33,26 @@ urls = [
url(r'^(?P<pk>[0-9]+)/survey_spec/$', WorkflowJobTemplateSurveySpec.as_view(), name='workflow_job_template_survey_spec'), url(r'^(?P<pk>[0-9]+)/survey_spec/$', WorkflowJobTemplateSurveySpec.as_view(), name='workflow_job_template_survey_spec'),
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', WorkflowJobTemplateWorkflowNodesList.as_view(), name='workflow_job_template_workflow_nodes_list'), url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', WorkflowJobTemplateWorkflowNodesList.as_view(), name='workflow_job_template_workflow_nodes_list'),
url(r'^(?P<pk>[0-9]+)/activity_stream/$', WorkflowJobTemplateActivityStreamList.as_view(), name='workflow_job_template_activity_stream_list'), url(r'^(?P<pk>[0-9]+)/activity_stream/$', WorkflowJobTemplateActivityStreamList.as_view(), name='workflow_job_template_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', WorkflowJobTemplateNotificationTemplatesStartedList.as_view(), url(
name='workflow_job_template_notification_templates_started_list'), r'^(?P<pk>[0-9]+)/notification_templates_started/$',
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', WorkflowJobTemplateNotificationTemplatesErrorList.as_view(), WorkflowJobTemplateNotificationTemplatesStartedList.as_view(),
name='workflow_job_template_notification_templates_error_list'), name='workflow_job_template_notification_templates_started_list',
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(), ),
name='workflow_job_template_notification_templates_success_list'), url(
url(r'^(?P<pk>[0-9]+)/notification_templates_approvals/$', WorkflowJobTemplateNotificationTemplatesApprovalList.as_view(), r'^(?P<pk>[0-9]+)/notification_templates_error/$',
name='workflow_job_template_notification_templates_approvals_list'), WorkflowJobTemplateNotificationTemplatesErrorList.as_view(),
name='workflow_job_template_notification_templates_error_list',
),
url(
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(),
name='workflow_job_template_notification_templates_success_list',
),
url(
r'^(?P<pk>[0-9]+)/notification_templates_approvals/$',
WorkflowJobTemplateNotificationTemplatesApprovalList.as_view(),
name='workflow_job_template_notification_templates_approvals_list',
),
url(r'^(?P<pk>[0-9]+)/access_list/$', WorkflowJobTemplateAccessList.as_view(), name='workflow_job_template_access_list'), url(r'^(?P<pk>[0-9]+)/access_list/$', WorkflowJobTemplateAccessList.as_view(), name='workflow_job_template_access_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', WorkflowJobTemplateObjectRolesList.as_view(), name='workflow_job_template_object_roles_list'), url(r'^(?P<pk>[0-9]+)/object_roles/$', WorkflowJobTemplateObjectRolesList.as_view(), name='workflow_job_template_object_roles_list'),
url(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobTemplateLabelList.as_view(), name='workflow_job_template_label_list'), url(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobTemplateLabelList.as_view(), name='workflow_job_template_label_list'),

View File

@@ -40,13 +40,10 @@ def reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra
class URLPathVersioning(BaseVersioning): class URLPathVersioning(BaseVersioning):
def reverse(self, viewname, args=None, kwargs=None, request=None, format=None, **extra): def reverse(self, viewname, args=None, kwargs=None, request=None, format=None, **extra):
if request.version is not None: if request.version is not None:
kwargs = {} if (kwargs is None) else kwargs kwargs = {} if (kwargs is None) else kwargs
kwargs[self.version_param] = request.version kwargs[self.version_param] = request.version
request = None request = None
return super(BaseVersioning, self).reverse( return super(BaseVersioning, self).reverse(viewname, args, kwargs, request, format, **extra)
viewname, args, kwargs, request, format, **extra
)

File diff suppressed because it is too large Load Diff

View File

@@ -28,14 +28,7 @@ from awx.main.models import (
InventorySource, InventorySource,
CustomInventoryScript, CustomInventoryScript,
) )
from awx.api.generics import ( from awx.api.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView, SubListAPIView, SubListAttachDetachAPIView, ResourceAccessList, CopyAPIView
ListCreateAPIView,
RetrieveUpdateDestroyAPIView,
SubListAPIView,
SubListAttachDetachAPIView,
ResourceAccessList,
CopyAPIView,
)
from awx.api.serializers import ( from awx.api.serializers import (
InventorySerializer, InventorySerializer,
@@ -46,10 +39,7 @@ from awx.api.serializers import (
CustomInventoryScriptSerializer, CustomInventoryScriptSerializer,
JobTemplateSerializer, JobTemplateSerializer,
) )
from awx.api.views.mixin import ( from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, ControlledByScmMixin
RelatedJobsPreventDeleteMixin,
ControlledByScmMixin,
)
logger = logging.getLogger('awx.api.views.organization') logger = logging.getLogger('awx.api.views.organization')
@@ -101,7 +91,7 @@ class InventoryScriptObjectRolesList(SubListAPIView):
model = Role model = Role
serializer_class = RoleSerializer serializer_class = RoleSerializer
parent_model = CustomInventoryScript parent_model = CustomInventoryScript
search_fields = ('role_field', 'content_type__model',) search_fields = ('role_field', 'content_type__model')
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()
@@ -134,8 +124,7 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, Retri
# Do not allow changes to an Inventory kind. # Do not allow changes to an Inventory kind.
if kind is not None and obj.kind != kind: if kind is not None and obj.kind != kind:
return Response(dict(error=_('You cannot turn a regular inventory into a "smart" inventory.')), return Response(dict(error=_('You cannot turn a regular inventory into a "smart" inventory.')), status=status.HTTP_405_METHOD_NOT_ALLOWED)
status=status.HTTP_405_METHOD_NOT_ALLOWED)
return super(InventoryDetail, self).update(request, *args, **kwargs) return super(InventoryDetail, self).update(request, *args, **kwargs)
def destroy(self, request, *args, **kwargs): def destroy(self, request, *args, **kwargs):
@@ -175,7 +164,7 @@ class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
class InventoryAccessList(ResourceAccessList): class InventoryAccessList(ResourceAccessList):
model = User # needs to be User for AccessLists's model = User # needs to be User for AccessLists's
parent_model = Inventory parent_model = Inventory
@@ -184,7 +173,7 @@ class InventoryObjectRolesList(SubListAPIView):
model = Role model = Role
serializer_class = RoleSerializer serializer_class = RoleSerializer
parent_model = Inventory parent_model = Inventory
search_fields = ('role_field', 'content_type__model',) search_fields = ('role_field', 'content_type__model')
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()

View File

@@ -17,9 +17,7 @@ from rest_framework.exceptions import PermissionDenied
from awx.main.analytics.metrics import metrics from awx.main.analytics.metrics import metrics
from awx.api import renderers from awx.api import renderers
from awx.api.generics import ( from awx.api.generics import APIView
APIView,
)
logger = logging.getLogger('awx.analytics') logger = logging.getLogger('awx.analytics')
@@ -30,13 +28,10 @@ class MetricsView(APIView):
name = _('Metrics') name = _('Metrics')
swagger_topic = 'Metrics' swagger_topic = 'Metrics'
renderer_classes = [renderers.PlainTextRenderer, renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer]
renderers.PrometheusJSONRenderer,
renderers.BrowsableAPIRenderer,]
def get(self, request): def get(self, request):
''' Show Metrics Details ''' ''' Show Metrics Details '''
if (request.user.is_superuser or request.user.is_system_auditor): if request.user.is_superuser or request.user.is_system_auditor:
return Response(metrics().decode('UTF-8')) return Response(metrics().decode('UTF-8'))
raise PermissionDenied() raise PermissionDenied()

View File

@@ -16,14 +16,8 @@ from rest_framework.response import Response
from rest_framework import status from rest_framework import status
from awx.main.constants import ACTIVE_STATES from awx.main.constants import ACTIVE_STATES
from awx.main.utils import ( from awx.main.utils import get_object_or_400, parse_yaml_or_json
get_object_or_400, from awx.main.models.ha import Instance, InstanceGroup
parse_yaml_or_json,
)
from awx.main.models.ha import (
Instance,
InstanceGroup,
)
from awx.main.models.organization import Team from awx.main.models.organization import Team
from awx.main.models.projects import Project from awx.main.models.projects import Project
from awx.main.models.inventory import Inventory from awx.main.models.inventory import Inventory
@@ -34,9 +28,10 @@ logger = logging.getLogger('awx.api.views.mixin')
class UnifiedJobDeletionMixin(object): class UnifiedJobDeletionMixin(object):
''' """
Special handling when deleting a running unified job object. Special handling when deleting a running unified job object.
''' """
def destroy(self, request, *args, **kwargs): def destroy(self, request, *args, **kwargs):
obj = self.get_object() obj = self.get_object()
if not request.user.can_access(self.model, 'delete', obj): if not request.user.can_access(self.model, 'delete', obj):
@@ -53,22 +48,21 @@ class UnifiedJobDeletionMixin(object):
# Prohibit deletion if job events are still coming in # Prohibit deletion if job events are still coming in
if obj.finished and now() < obj.finished + dateutil.relativedelta.relativedelta(minutes=1): if obj.finished and now() < obj.finished + dateutil.relativedelta.relativedelta(minutes=1):
# less than 1 minute has passed since job finished and events are not in # less than 1 minute has passed since job finished and events are not in
return Response({"error": _("Job has not finished processing events.")}, return Response({"error": _("Job has not finished processing events.")}, status=status.HTTP_400_BAD_REQUEST)
status=status.HTTP_400_BAD_REQUEST)
else: else:
# if it has been > 1 minute, events are probably lost # if it has been > 1 minute, events are probably lost
logger.warning('Allowing deletion of {} through the API without all events ' logger.warning('Allowing deletion of {} through the API without all events ' 'processed.'.format(obj.log_format))
'processed.'.format(obj.log_format))
obj.delete() obj.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
class InstanceGroupMembershipMixin(object): class InstanceGroupMembershipMixin(object):
''' """
This mixin overloads attach/detach so that it calls InstanceGroup.save(), This mixin overloads attach/detach so that it calls InstanceGroup.save(),
triggering a background recalculation of policy-based instance group triggering a background recalculation of policy-based instance group
membership. membership.
''' """
def attach(self, request, *args, **kwargs): def attach(self, request, *args, **kwargs):
response = super(InstanceGroupMembershipMixin, self).attach(request, *args, **kwargs) response = super(InstanceGroupMembershipMixin, self).attach(request, *args, **kwargs)
sub_id, res = self.attach_validate(request) sub_id, res = self.attach_validate(request)
@@ -84,9 +78,7 @@ class InstanceGroupMembershipMixin(object):
ig_obj = get_object_or_400(ig_qs, pk=sub_id) ig_obj = get_object_or_400(ig_qs, pk=sub_id)
else: else:
# similar to get_parent_object, but selected for update # similar to get_parent_object, but selected for update
parent_filter = { parent_filter = {self.lookup_field: self.kwargs.get(self.lookup_field, None)}
self.lookup_field: self.kwargs.get(self.lookup_field, None),
}
ig_obj = get_object_or_404(ig_qs, **parent_filter) ig_obj = get_object_or_404(ig_qs, **parent_filter)
if inst_name not in ig_obj.policy_instance_list: if inst_name not in ig_obj.policy_instance_list:
ig_obj.policy_instance_list.append(inst_name) ig_obj.policy_instance_list.append(inst_name)
@@ -126,9 +118,7 @@ class InstanceGroupMembershipMixin(object):
ig_obj = get_object_or_400(ig_qs, pk=sub_id) ig_obj = get_object_or_400(ig_qs, pk=sub_id)
else: else:
# similar to get_parent_object, but selected for update # similar to get_parent_object, but selected for update
parent_filter = { parent_filter = {self.lookup_field: self.kwargs.get(self.lookup_field, None)}
self.lookup_field: self.kwargs.get(self.lookup_field, None),
}
ig_obj = get_object_or_404(ig_qs, **parent_filter) ig_obj = get_object_or_404(ig_qs, **parent_filter)
if inst_name in ig_obj.policy_instance_list: if inst_name in ig_obj.policy_instance_list:
ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name)) ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name))
@@ -146,16 +136,13 @@ class RelatedJobsPreventDeleteMixin(object):
if len(active_jobs) > 0: if len(active_jobs) > 0:
raise ActiveJobConflict(active_jobs) raise ActiveJobConflict(active_jobs)
time_cutoff = now() - dateutil.relativedelta.relativedelta(minutes=1) time_cutoff = now() - dateutil.relativedelta.relativedelta(minutes=1)
recent_jobs = obj._get_related_jobs().filter(finished__gte = time_cutoff) recent_jobs = obj._get_related_jobs().filter(finished__gte=time_cutoff)
for unified_job in recent_jobs.get_real_instances(): for unified_job in recent_jobs.get_real_instances():
if not unified_job.event_processing_finished: if not unified_job.event_processing_finished:
raise PermissionDenied(_( raise PermissionDenied(_('Related job {} is still processing events.').format(unified_job.log_format))
'Related job {} is still processing events.'
).format(unified_job.log_format))
class OrganizationCountsMixin(object): class OrganizationCountsMixin(object):
def get_serializer_context(self, *args, **kwargs): def get_serializer_context(self, *args, **kwargs):
full_context = super(OrganizationCountsMixin, self).get_serializer_context(*args, **kwargs) full_context = super(OrganizationCountsMixin, self).get_serializer_context(*args, **kwargs)
@@ -177,26 +164,23 @@ class OrganizationCountsMixin(object):
# Produce counts of Foreign Key relationships # Produce counts of Foreign Key relationships
db_results['inventories'] = inv_qs.values('organization').annotate(Count('organization')).order_by('organization') db_results['inventories'] = inv_qs.values('organization').annotate(Count('organization')).order_by('organization')
db_results['teams'] = Team.accessible_objects( db_results['teams'] = (
self.request.user, 'read_role').values('organization').annotate( Team.accessible_objects(self.request.user, 'read_role').values('organization').annotate(Count('organization')).order_by('organization')
Count('organization')).order_by('organization') )
db_results['job_templates'] = jt_qs.values('organization').annotate(Count('organization')).order_by('organization') db_results['job_templates'] = jt_qs.values('organization').annotate(Count('organization')).order_by('organization')
db_results['projects'] = project_qs.values('organization').annotate(Count('organization')).order_by('organization') db_results['projects'] = project_qs.values('organization').annotate(Count('organization')).order_by('organization')
# Other members and admins of organization are always viewable # Other members and admins of organization are always viewable
db_results['users'] = org_qs.annotate( db_results['users'] = org_qs.annotate(users=Count('member_role__members', distinct=True), admins=Count('admin_role__members', distinct=True)).values(
users=Count('member_role__members', distinct=True), 'id', 'users', 'admins'
admins=Count('admin_role__members', distinct=True) )
).values('id', 'users', 'admins')
count_context = {} count_context = {}
for org in org_id_list: for org in org_id_list:
org_id = org['id'] org_id = org['id']
count_context[org_id] = { count_context[org_id] = {'inventories': 0, 'teams': 0, 'users': 0, 'job_templates': 0, 'admins': 0, 'projects': 0}
'inventories': 0, 'teams': 0, 'users': 0, 'job_templates': 0,
'admins': 0, 'projects': 0}
for res, count_qs in db_results.items(): for res, count_qs in db_results.items():
if res == 'users': if res == 'users':
@@ -218,21 +202,20 @@ class OrganizationCountsMixin(object):
class ControlledByScmMixin(object): class ControlledByScmMixin(object):
''' """
Special method to reset SCM inventory commit hash Special method to reset SCM inventory commit hash
if anything that it manages changes. if anything that it manages changes.
''' """
def _reset_inv_src_rev(self, obj): def _reset_inv_src_rev(self, obj):
if self.request.method in SAFE_METHODS or not obj: if self.request.method in SAFE_METHODS or not obj:
return return
project_following_sources = obj.inventory_sources.filter( project_following_sources = obj.inventory_sources.filter(update_on_project_update=True, source='scm')
update_on_project_update=True, source='scm')
if project_following_sources: if project_following_sources:
# Allow inventory changes unrelated to variables # Allow inventory changes unrelated to variables
if self.model == Inventory and ( if self.model == Inventory and (
not self.request or not self.request.data or not self.request or not self.request.data or parse_yaml_or_json(self.request.data.get('variables', '')) == parse_yaml_or_json(obj.variables)
parse_yaml_or_json(self.request.data.get('variables', '')) == parse_yaml_or_json(obj.variables)): ):
return return
project_following_sources.update(scm_last_revision='') project_following_sources.update(scm_last_revision='')

View File

@@ -24,7 +24,7 @@ from awx.main.models import (
User, User,
Team, Team,
InstanceGroup, InstanceGroup,
Credential Credential,
) )
from awx.api.generics import ( from awx.api.generics import (
ListCreateAPIView, ListCreateAPIView,
@@ -47,13 +47,12 @@ from awx.api.serializers import (
NotificationTemplateSerializer, NotificationTemplateSerializer,
InstanceGroupSerializer, InstanceGroupSerializer,
ExecutionEnvironmentSerializer, ExecutionEnvironmentSerializer,
ProjectSerializer, JobTemplateSerializer, WorkflowJobTemplateSerializer, ProjectSerializer,
CredentialSerializer JobTemplateSerializer,
) WorkflowJobTemplateSerializer,
from awx.api.views.mixin import ( CredentialSerializer,
RelatedJobsPreventDeleteMixin,
OrganizationCountsMixin,
) )
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
logger = logging.getLogger('awx.api.views.organization') logger = logging.getLogger('awx.api.views.organization')
@@ -84,23 +83,20 @@ class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPI
org_counts = {} org_counts = {}
access_kwargs = {'accessor': self.request.user, 'role_field': 'read_role'} access_kwargs = {'accessor': self.request.user, 'role_field': 'read_role'}
direct_counts = Organization.objects.filter(id=org_id).annotate( direct_counts = (
users=Count('member_role__members', distinct=True), Organization.objects.filter(id=org_id)
admins=Count('admin_role__members', distinct=True) .annotate(users=Count('member_role__members', distinct=True), admins=Count('admin_role__members', distinct=True))
).values('users', 'admins') .values('users', 'admins')
)
if not direct_counts: if not direct_counts:
return full_context return full_context
org_counts = direct_counts[0] org_counts = direct_counts[0]
org_counts['inventories'] = Inventory.accessible_objects(**access_kwargs).filter( org_counts['inventories'] = Inventory.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
organization__id=org_id).count() org_counts['teams'] = Team.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
org_counts['teams'] = Team.accessible_objects(**access_kwargs).filter( org_counts['projects'] = Project.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
organization__id=org_id).count() org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
org_counts['projects'] = Project.accessible_objects(**access_kwargs).filter(
organization__id=org_id).count()
org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).filter(
organization__id=org_id).count()
org_counts['hosts'] = Host.objects.org_active_count(org_id) org_counts['hosts'] = Host.objects.org_active_count(org_id)
full_context['related_field_counts'] = {} full_context['related_field_counts'] = {}
@@ -240,14 +236,12 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
def is_valid_relation(self, parent, sub, created=False): def is_valid_relation(self, parent, sub, created=False):
if sub.kind != 'galaxy_api_token': if sub.kind != 'galaxy_api_token':
return {'msg': _( return {'msg': _(f"Credential must be a Galaxy credential, not {sub.credential_type.name}.")}
f"Credential must be a Galaxy credential, not {sub.credential_type.name}."
)}
class OrganizationAccessList(ResourceAccessList): class OrganizationAccessList(ResourceAccessList):
model = User # needs to be User for AccessLists's model = User # needs to be User for AccessLists's
parent_model = Organization parent_model = Organization
@@ -256,7 +250,7 @@ class OrganizationObjectRolesList(SubListAPIView):
model = Role model = Role
serializer_class = RoleSerializer serializer_class = RoleSerializer
parent_model = Organization parent_model = Organization
search_fields = ('role_field', 'content_type__model',) search_fields = ('role_field', 'content_type__model')
def get_queryset(self): def get_queryset(self):
po = self.get_parent_object() po = self.get_parent_object()

View File

@@ -24,22 +24,11 @@ from awx.api.generics import APIView
from awx.conf.registry import settings_registry from awx.conf.registry import settings_registry
from awx.main.analytics import all_collectors from awx.main.analytics import all_collectors
from awx.main.ha import is_ha_environment from awx.main.ha import is_ha_environment
from awx.main.utils import ( from awx.main.utils import get_awx_version, get_ansible_version, get_custom_venv_choices, to_python_boolean
get_awx_version,
get_ansible_version,
get_custom_venv_choices,
to_python_boolean,
)
from awx.main.utils.licensing import validate_entitlement_manifest from awx.main.utils.licensing import validate_entitlement_manifest
from awx.api.versioning import reverse, drf_reverse from awx.api.versioning import reverse, drf_reverse
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
from awx.main.models import ( from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
Project,
Organization,
Instance,
InstanceGroup,
JobTemplate,
)
from awx.main.utils import set_environ from awx.main.utils import set_environ
logger = logging.getLogger('awx.api.views.root') logger = logging.getLogger('awx.api.views.root')
@@ -60,7 +49,7 @@ class ApiRootView(APIView):
data = OrderedDict() data = OrderedDict()
data['description'] = _('AWX REST API') data['description'] = _('AWX REST API')
data['current_version'] = v2 data['current_version'] = v2
data['available_versions'] = dict(v2 = v2) data['available_versions'] = dict(v2=v2)
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view') data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
data['custom_logo'] = settings.CUSTOM_LOGO data['custom_logo'] = settings.CUSTOM_LOGO
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
@@ -146,6 +135,7 @@ class ApiV2PingView(APIView):
"""A simple view that reports very basic information about this """A simple view that reports very basic information about this
instance, which is acceptable to be public information. instance, which is acceptable to be public information.
""" """
permission_classes = (AllowAny,) permission_classes = (AllowAny,)
authentication_classes = () authentication_classes = ()
name = _('Ping') name = _('Ping')
@@ -157,23 +147,19 @@ class ApiV2PingView(APIView):
Everything returned here should be considered public / insecure, as Everything returned here should be considered public / insecure, as
this requires no auth and is intended for use by the installer process. this requires no auth and is intended for use by the installer process.
""" """
response = { response = {'ha': is_ha_environment(), 'version': get_awx_version(), 'active_node': settings.CLUSTER_HOST_ID, 'install_uuid': settings.INSTALL_UUID}
'ha': is_ha_environment(),
'version': get_awx_version(),
'active_node': settings.CLUSTER_HOST_ID,
'install_uuid': settings.INSTALL_UUID,
}
response['instances'] = [] response['instances'] = []
for instance in Instance.objects.all(): for instance in Instance.objects.all():
response['instances'].append(dict(node=instance.hostname, uuid=instance.uuid, heartbeat=instance.modified, response['instances'].append(
capacity=instance.capacity, version=instance.version)) dict(node=instance.hostname, uuid=instance.uuid, heartbeat=instance.modified, capacity=instance.capacity, version=instance.version)
)
sorted(response['instances'], key=operator.itemgetter('node')) sorted(response['instances'], key=operator.itemgetter('node'))
response['instance_groups'] = [] response['instance_groups'] = []
for instance_group in InstanceGroup.objects.prefetch_related('instances'): for instance_group in InstanceGroup.objects.prefetch_related('instances'):
response['instance_groups'].append(dict(name=instance_group.name, response['instance_groups'].append(
capacity=instance_group.capacity, dict(name=instance_group.name, capacity=instance_group.capacity, instances=[x.hostname for x in instance_group.instances.all()])
instances=[x.hostname for x in instance_group.instances.all()])) )
return Response(response) return Response(response)
@@ -190,6 +176,7 @@ class ApiV2SubscriptionView(APIView):
def post(self, request): def post(self, request):
from awx.main.utils.common import get_licenser from awx.main.utils.common import get_licenser
data = request.data.copy() data = request.data.copy()
if data.get('subscriptions_password') == '$encrypted$': if data.get('subscriptions_password') == '$encrypted$':
data['subscriptions_password'] = settings.SUBSCRIPTIONS_PASSWORD data['subscriptions_password'] = settings.SUBSCRIPTIONS_PASSWORD
@@ -203,10 +190,7 @@ class ApiV2SubscriptionView(APIView):
settings.SUBSCRIPTIONS_PASSWORD = data['subscriptions_password'] settings.SUBSCRIPTIONS_PASSWORD = data['subscriptions_password']
except Exception as exc: except Exception as exc:
msg = _("Invalid Subscription") msg = _("Invalid Subscription")
if ( if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
isinstance(exc, requests.exceptions.HTTPError) and
getattr(getattr(exc, 'response', None), 'status_code', None) == 401
):
msg = _("The provided credentials are invalid (HTTP 401).") msg = _("The provided credentials are invalid (HTTP 401).")
elif isinstance(exc, requests.exceptions.ProxyError): elif isinstance(exc, requests.exceptions.ProxyError):
msg = _("Unable to connect to proxy server.") msg = _("Unable to connect to proxy server.")
@@ -215,8 +199,7 @@ class ApiV2SubscriptionView(APIView):
elif isinstance(exc, (ValueError, OSError)) and exc.args: elif isinstance(exc, (ValueError, OSError)) and exc.args:
msg = exc.args[0] msg = exc.args[0]
else: else:
logger.exception(smart_text(u"Invalid subscription submitted."), logger.exception(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
extra=dict(actor=request.user.username))
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
return Response(validated) return Response(validated)
@@ -242,16 +225,14 @@ class ApiV2AttachView(APIView):
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None) pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
if pool_id and user and pw: if pool_id and user and pw:
from awx.main.utils.common import get_licenser from awx.main.utils.common import get_licenser
data = request.data.copy() data = request.data.copy()
try: try:
with set_environ(**settings.AWX_TASK_ENV): with set_environ(**settings.AWX_TASK_ENV):
validated = get_licenser().validate_rh(user, pw) validated = get_licenser().validate_rh(user, pw)
except Exception as exc: except Exception as exc:
msg = _("Invalid Subscription") msg = _("Invalid Subscription")
if ( if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
isinstance(exc, requests.exceptions.HTTPError) and
getattr(getattr(exc, 'response', None), 'status_code', None) == 401
):
msg = _("The provided credentials are invalid (HTTP 401).") msg = _("The provided credentials are invalid (HTTP 401).")
elif isinstance(exc, requests.exceptions.ProxyError): elif isinstance(exc, requests.exceptions.ProxyError):
msg = _("Unable to connect to proxy server.") msg = _("Unable to connect to proxy server.")
@@ -260,8 +241,7 @@ class ApiV2AttachView(APIView):
elif isinstance(exc, (ValueError, OSError)) and exc.args: elif isinstance(exc, (ValueError, OSError)) and exc.args:
msg = exc.args[0] msg = exc.args[0]
else: else:
logger.exception(smart_text(u"Invalid subscription submitted."), logger.exception(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
extra=dict(actor=request.user.username))
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
for sub in validated: for sub in validated:
if sub['pool_id'] == pool_id: if sub['pool_id'] == pool_id:
@@ -287,6 +267,7 @@ class ApiV2ConfigView(APIView):
'''Return various sitewide configuration settings''' '''Return various sitewide configuration settings'''
from awx.main.utils.common import get_licenser from awx.main.utils.common import get_licenser
license_data = get_licenser().validate() license_data = get_licenser().validate()
if not license_data.get('valid_key', False): if not license_data.get('valid_key', False):
@@ -314,22 +295,23 @@ class ApiV2ConfigView(APIView):
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys()) user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
data['user_ldap_fields'] = user_ldap_fields data['user_ldap_fields'] = user_ldap_fields
if request.user.is_superuser \ if (
or request.user.is_system_auditor \ request.user.is_superuser
or Organization.accessible_objects(request.user, 'admin_role').exists() \ or request.user.is_system_auditor
or Organization.accessible_objects(request.user, 'auditor_role').exists() \ or Organization.accessible_objects(request.user, 'admin_role').exists()
or Organization.accessible_objects(request.user, 'project_admin_role').exists(): or Organization.accessible_objects(request.user, 'auditor_role').exists()
data.update(dict( or Organization.accessible_objects(request.user, 'project_admin_role').exists()
project_base_dir = settings.PROJECTS_ROOT, ):
project_local_paths = Project.get_local_path_choices(), data.update(
custom_virtualenvs = get_custom_venv_choices() dict(
)) project_base_dir=settings.PROJECTS_ROOT, project_local_paths=Project.get_local_path_choices(), custom_virtualenvs=get_custom_venv_choices()
)
)
elif JobTemplate.accessible_objects(request.user, 'admin_role').exists(): elif JobTemplate.accessible_objects(request.user, 'admin_role').exists():
data['custom_virtualenvs'] = get_custom_venv_choices() data['custom_virtualenvs'] = get_custom_venv_choices()
return Response(data) return Response(data)
def post(self, request): def post(self, request):
if not isinstance(request.data, dict): if not isinstance(request.data, dict):
return Response({"error": _("Invalid subscription data")}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": _("Invalid subscription data")}, status=status.HTTP_400_BAD_REQUEST)
@@ -346,11 +328,11 @@ class ApiV2ConfigView(APIView):
try: try:
data_actual = json.dumps(request.data) data_actual = json.dumps(request.data)
except Exception: except Exception:
logger.info(smart_text(u"Invalid JSON submitted for license."), logger.info(smart_text(u"Invalid JSON submitted for license."), extra=dict(actor=request.user.username))
extra=dict(actor=request.user.username))
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
from awx.main.utils.common import get_licenser from awx.main.utils.common import get_licenser
license_data = json.loads(data_actual) license_data = json.loads(data_actual)
if 'license_key' in license_data: if 'license_key' in license_data:
return Response({"error": _('Legacy license submitted. A subscription manifest is now required.')}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": _('Legacy license submitted. A subscription manifest is now required.')}, status=status.HTTP_400_BAD_REQUEST)
@@ -358,10 +340,7 @@ class ApiV2ConfigView(APIView):
try: try:
json_actual = json.loads(base64.b64decode(license_data['manifest'])) json_actual = json.loads(base64.b64decode(license_data['manifest']))
if 'license_key' in json_actual: if 'license_key' in json_actual:
return Response( return Response({"error": _('Legacy license submitted. A subscription manifest is now required.')}, status=status.HTTP_400_BAD_REQUEST)
{"error": _('Legacy license submitted. A subscription manifest is now required.')},
status=status.HTTP_400_BAD_REQUEST
)
except Exception: except Exception:
pass pass
try: try:
@@ -375,8 +354,7 @@ class ApiV2ConfigView(APIView):
try: try:
license_data_validated = get_licenser().license_from_manifest(license_data) license_data_validated = get_licenser().license_from_manifest(license_data)
except Exception: except Exception:
logger.warning(smart_text(u"Invalid subscription submitted."), logger.warning(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
extra=dict(actor=request.user.username))
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
else: else:
license_data_validated = get_licenser().validate() license_data_validated = get_licenser().validate()
@@ -387,8 +365,7 @@ class ApiV2ConfigView(APIView):
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host()) settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
return Response(license_data_validated) return Response(license_data_validated)
logger.warning(smart_text(u"Invalid subscription submitted."), logger.warning(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
extra=dict(actor=request.user.username))
return Response({"error": _("Invalid subscription")}, status=status.HTTP_400_BAD_REQUEST) return Response({"error": _("Invalid subscription")}, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request): def delete(self, request):

View File

@@ -26,10 +26,7 @@ class WebhookKeyView(GenericAPIView):
permission_classes = (WebhookKeyPermission,) permission_classes = (WebhookKeyPermission,)
def get_queryset(self): def get_queryset(self):
qs_models = { qs_models = {'job_templates': JobTemplate, 'workflow_job_templates': WorkflowJobTemplate}
'job_templates': JobTemplate,
'workflow_job_templates': WorkflowJobTemplate,
}
self.model = qs_models.get(self.kwargs['model_kwarg']) self.model = qs_models.get(self.kwargs['model_kwarg'])
return super().get_queryset() return super().get_queryset()
@@ -57,10 +54,7 @@ class WebhookReceiverBase(APIView):
ref_keys = {} ref_keys = {}
def get_queryset(self): def get_queryset(self):
qs_models = { qs_models = {'job_templates': JobTemplate, 'workflow_job_templates': WorkflowJobTemplate}
'job_templates': JobTemplate,
'workflow_job_templates': WorkflowJobTemplate,
}
model = qs_models.get(self.kwargs['model_kwarg']) model = qs_models.get(self.kwargs['model_kwarg'])
if model is None: if model is None:
raise PermissionDenied raise PermissionDenied
@@ -120,10 +114,7 @@ class WebhookReceiverBase(APIView):
# Ensure that the full contents of the request are captured for multiple uses. # Ensure that the full contents of the request are captured for multiple uses.
request.body request.body
logger.debug( logger.debug("headers: {}\n" "data: {}\n".format(request.headers, request.data))
"headers: {}\n"
"data: {}\n".format(request.headers, request.data)
)
obj = self.get_object() obj = self.get_object()
self.check_signature(obj) self.check_signature(obj)
@@ -132,16 +123,11 @@ class WebhookReceiverBase(APIView):
event_ref = self.get_event_ref() event_ref = self.get_event_ref()
status_api = self.get_event_status_api() status_api = self.get_event_status_api()
kwargs = { kwargs = {'unified_job_template_id': obj.id, 'webhook_service': obj.webhook_service, 'webhook_guid': event_guid}
'unified_job_template_id': obj.id,
'webhook_service': obj.webhook_service,
'webhook_guid': event_guid,
}
if WorkflowJob.objects.filter(**kwargs).exists() or Job.objects.filter(**kwargs).exists(): if WorkflowJob.objects.filter(**kwargs).exists() or Job.objects.filter(**kwargs).exists():
# Short circuit if this webhook has already been received and acted upon. # Short circuit if this webhook has already been received and acted upon.
logger.debug("Webhook previously received, returning without action.") logger.debug("Webhook previously received, returning without action.")
return Response({'message': _("Webhook previously received, aborting.")}, return Response({'message': _("Webhook previously received, aborting.")}, status=status.HTTP_202_ACCEPTED)
status=status.HTTP_202_ACCEPTED)
kwargs = { kwargs = {
'_eager_fields': { '_eager_fields': {
@@ -156,7 +142,7 @@ class WebhookReceiverBase(APIView):
'tower_webhook_event_ref': event_ref, 'tower_webhook_event_ref': event_ref,
'tower_webhook_status_api': status_api, 'tower_webhook_status_api': status_api,
'tower_webhook_payload': request.data, 'tower_webhook_payload': request.data,
} },
} }
new_job = obj.create_unified_job(**kwargs) new_job = obj.create_unified_job(**kwargs)
@@ -205,11 +191,7 @@ class GithubWebhookReceiver(WebhookReceiverBase):
class GitlabWebhookReceiver(WebhookReceiverBase): class GitlabWebhookReceiver(WebhookReceiverBase):
service = 'gitlab' service = 'gitlab'
ref_keys = { ref_keys = {'Push Hook': 'checkout_sha', 'Tag Push Hook': 'checkout_sha', 'Merge Request Hook': 'object_attributes.last_commit.id'}
'Push Hook': 'checkout_sha',
'Tag Push Hook': 'checkout_sha',
'Merge Request Hook': 'object_attributes.last_commit.id',
}
def get_event_type(self): def get_event_type(self):
return self.request.META.get('HTTP_X_GITLAB_EVENT') return self.request.META.get('HTTP_X_GITLAB_EVENT')
@@ -229,8 +211,7 @@ class GitlabWebhookReceiver(WebhookReceiverBase):
return return
parsed = urllib.parse.urlparse(repo_url) parsed = urllib.parse.urlparse(repo_url)
return "{}://{}/api/v4/projects/{}/statuses/{}".format( return "{}://{}/api/v4/projects/{}/statuses/{}".format(parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
def get_signature(self): def get_signature(self):
return force_bytes(self.request.META.get('HTTP_X_GITLAB_TOKEN') or '') return force_bytes(self.request.META.get('HTTP_X_GITLAB_TOKEN') or '')

View File

@@ -4,11 +4,12 @@ import os
import logging import logging
import django import django
from awx import __version__ as tower_version from awx import __version__ as tower_version
# Prepare the AWX environment. # Prepare the AWX environment.
from awx import prepare_env, MODE from awx import prepare_env, MODE
from channels.routing import get_default_application # noqa from channels.routing import get_default_application # noqa
prepare_env() # NOQA
prepare_env() # NOQA
""" """

View File

@@ -10,12 +10,12 @@ from awx.conf.models import Setting
class SettingAccess(BaseAccess): class SettingAccess(BaseAccess):
''' """
- I can see settings when I am a super user or system auditor. - I can see settings when I am a super user or system auditor.
- I can edit settings when I am a super user. - I can edit settings when I am a super user.
- I can clear settings when I am a super user. - I can clear settings when I am a super user.
- I can always see/edit/clear my own user settings. - I can always see/edit/clear my own user settings.
''' """
model = Setting model = Setting

View File

@@ -1,5 +1,6 @@
# Django # Django
from django.apps import AppConfig from django.apps import AppConfig
# from django.core import checks # from django.core import checks
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
@@ -12,4 +13,5 @@ class ConfConfig(AppConfig):
def ready(self): def ready(self):
self.module.autodiscover() self.module.autodiscover()
from .settings import SettingsWrapper from .settings import SettingsWrapper
SettingsWrapper.initialize() SettingsWrapper.initialize()

View File

@@ -10,10 +10,7 @@ from django.core.validators import URLValidator, _lazy_re_compile
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
# Django REST Framework # Django REST Framework
from rest_framework.fields import ( # noqa from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField, NullBooleanField # noqa
BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField,
IntegerField, ListField, NullBooleanField
)
from rest_framework.serializers import PrimaryKeyRelatedField # noqa from rest_framework.serializers import PrimaryKeyRelatedField # noqa
logger = logging.getLogger('awx.conf.fields') logger = logging.getLogger('awx.conf.fields')
@@ -27,7 +24,6 @@ logger = logging.getLogger('awx.conf.fields')
class CharField(CharField): class CharField(CharField):
def to_representation(self, value): def to_representation(self, value):
# django_rest_frameworks' default CharField implementation casts `None` # django_rest_frameworks' default CharField implementation casts `None`
# to a string `"None"`: # to a string `"None"`:
@@ -39,7 +35,6 @@ class CharField(CharField):
class IntegerField(IntegerField): class IntegerField(IntegerField):
def get_value(self, dictionary): def get_value(self, dictionary):
ret = super(IntegerField, self).get_value(dictionary) ret = super(IntegerField, self).get_value(dictionary)
# Handle UI corner case # Handle UI corner case
@@ -60,9 +55,7 @@ class StringListField(ListField):
class StringListBooleanField(ListField): class StringListBooleanField(ListField):
default_error_messages = { default_error_messages = {'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.')}
'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.'),
}
child = CharField() child = CharField()
def to_representation(self, value): def to_representation(self, value):
@@ -101,10 +94,7 @@ class StringListBooleanField(ListField):
class StringListPathField(StringListField): class StringListPathField(StringListField):
default_error_messages = { default_error_messages = {'type_error': _('Expected list of strings but got {input_type} instead.'), 'path_error': _('{path} is not a valid path choice.')}
'type_error': _('Expected list of strings but got {input_type} instead.'),
'path_error': _('{path} is not a valid path choice.'),
}
def to_internal_value(self, paths): def to_internal_value(self, paths):
if isinstance(paths, (list, tuple)): if isinstance(paths, (list, tuple)):
@@ -123,12 +113,12 @@ class URLField(CharField):
# these lines set up a custom regex that allow numbers in the # these lines set up a custom regex that allow numbers in the
# top-level domain # top-level domain
tld_re = ( tld_re = (
r'\.' # dot r'\.' # dot
r'(?!-)' # can't start with a dash r'(?!-)' # can't start with a dash
r'(?:[a-z' + URLValidator.ul + r'0-9' + '-]{2,63}' # domain label, this line was changed from the original URLValidator r'(?:[a-z' + URLValidator.ul + r'0-9' + '-]{2,63}' # domain label, this line was changed from the original URLValidator
r'|xn--[a-z0-9]{1,59})' # or punycode label r'|xn--[a-z0-9]{1,59})' # or punycode label
r'(?<!-)' # can't end with a dash r'(?<!-)' # can't end with a dash
r'\.?' # may have a trailing dot r'\.?' # may have a trailing dot
) )
host_re = '(' + URLValidator.hostname_re + URLValidator.domain_re + tld_re + '|localhost)' host_re = '(' + URLValidator.hostname_re + URLValidator.domain_re + tld_re + '|localhost)'
@@ -139,7 +129,9 @@ class URLField(CharField):
r'(?:' + URLValidator.ipv4_re + '|' + URLValidator.ipv6_re + '|' + host_re + ')' r'(?:' + URLValidator.ipv4_re + '|' + URLValidator.ipv6_re + '|' + host_re + ')'
r'(?::\d{2,5})?' # port r'(?::\d{2,5})?' # port
r'(?:[/?#][^\s]*)?' # resource path r'(?:[/?#][^\s]*)?' # resource path
r'\Z', re.IGNORECASE) r'\Z',
re.IGNORECASE,
)
def __init__(self, **kwargs): def __init__(self, **kwargs):
schemes = kwargs.pop('schemes', None) schemes = kwargs.pop('schemes', None)
@@ -184,9 +176,7 @@ class URLField(CharField):
class KeyValueField(DictField): class KeyValueField(DictField):
child = CharField() child = CharField()
default_error_messages = { default_error_messages = {'invalid_child': _('"{input}" is not a valid string.')}
'invalid_child': _('"{input}" is not a valid string.')
}
def to_internal_value(self, data): def to_internal_value(self, data):
ret = super(KeyValueField, self).to_internal_value(data) ret = super(KeyValueField, self).to_internal_value(data)
@@ -199,9 +189,7 @@ class KeyValueField(DictField):
class ListTuplesField(ListField): class ListTuplesField(ListField):
default_error_messages = { default_error_messages = {'type_error': _('Expected a list of tuples of max length 2 but got {input_type} instead.')}
'type_error': _('Expected a list of tuples of max length 2 but got {input_type} instead.'),
}
def to_representation(self, value): def to_representation(self, value):
if isinstance(value, (list, tuple)): if isinstance(value, (list, tuple)):

View File

@@ -6,6 +6,7 @@ __all__ = ['get_license']
def _get_validated_license_data(): def _get_validated_license_data():
from awx.main.utils import get_licenser from awx.main.utils import get_licenser
return get_licenser().validate() return get_licenser().validate()

View File

@@ -8,9 +8,7 @@ from django.conf import settings
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [ operations = [
migrations.CreateModel( migrations.CreateModel(
@@ -21,11 +19,11 @@ class Migration(migrations.Migration):
('modified', models.DateTimeField(default=None, editable=False)), ('modified', models.DateTimeField(default=None, editable=False)),
('key', models.CharField(max_length=255)), ('key', models.CharField(max_length=255)),
('value', jsonfield.fields.JSONField(null=True)), ('value', jsonfield.fields.JSONField(null=True)),
('user', models.ForeignKey(related_name='settings', default=None, editable=False, (
to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True)), 'user',
models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True),
),
], ],
options={ options={'abstract': False},
'abstract': False, )
},
),
] ]

View File

@@ -15,11 +15,7 @@ def copy_tower_settings(apps, schema_editor):
if tower_setting.key == 'LICENSE': if tower_setting.key == 'LICENSE':
value = json.loads(value) value = json.loads(value)
setting, created = Setting.objects.get_or_create( setting, created = Setting.objects.get_or_create(
key=tower_setting.key, key=tower_setting.key, user=tower_setting.user, created=tower_setting.created, modified=tower_setting.modified, defaults=dict(value=value)
user=tower_setting.user,
created=tower_setting.created,
modified=tower_setting.modified,
defaults=dict(value=value),
) )
if not created and setting.value != value: if not created and setting.value != value:
setting.value = value setting.value = value
@@ -36,18 +32,9 @@ def revert_tower_settings(apps, schema_editor):
# LICENSE is stored as a JSON object; convert it back to a string. # LICENSE is stored as a JSON object; convert it back to a string.
if setting.key == 'LICENSE': if setting.key == 'LICENSE':
value = json.dumps(value) value = json.dumps(value)
defaults = dict( defaults = dict(value=value, value_type='string', description='', category='')
value=value,
value_type='string',
description='',
category='',
)
try: try:
tower_setting, created = TowerSettings.objects.get_or_create( tower_setting, created = TowerSettings.objects.get_or_create(key=setting.key, user=setting.user, defaults=defaults)
key=setting.key,
user=setting.user,
defaults=defaults,
)
if not created: if not created:
update_fields = [] update_fields = []
for k, v in defaults.items(): for k, v in defaults.items():
@@ -62,15 +49,8 @@ def revert_tower_settings(apps, schema_editor):
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [('conf', '0001_initial'), ('main', '0004_squashed_v310_release')]
('conf', '0001_initial'),
('main', '0004_squashed_v310_release'),
]
run_before = [ run_before = [('main', '0005_squashed_v310_v313_updates')]
('main', '0005_squashed_v310_v313_updates'),
]
operations = [ operations = [migrations.RunPython(copy_tower_settings, revert_tower_settings)]
migrations.RunPython(copy_tower_settings, revert_tower_settings),
]

View File

@@ -7,14 +7,6 @@ import awx.main.fields
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [('conf', '0002_v310_copy_tower_settings')]
('conf', '0002_v310_copy_tower_settings'),
]
operations = [ operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONField(null=True))]
migrations.AlterField(
model_name='setting',
name='value',
field=awx.main.fields.JSONField(null=True),
),
]

View File

@@ -6,9 +6,7 @@ from django.db import migrations
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [('conf', '0003_v310_JSONField_changes')]
('conf', '0003_v310_JSONField_changes'),
]
operations = [ operations = [
# This list is intentionally empty. # This list is intentionally empty.

View File

@@ -2,8 +2,8 @@
from __future__ import unicode_literals from __future__ import unicode_literals
from django.db import migrations from django.db import migrations
from awx.conf.migrations import _rename_setting from awx.conf.migrations import _rename_setting
def copy_session_settings(apps, schema_editor): def copy_session_settings(apps, schema_editor):
_rename_setting.rename_setting(apps, schema_editor, old_key='AUTH_TOKEN_PER_USER', new_key='SESSIONS_PER_USER') _rename_setting.rename_setting(apps, schema_editor, old_key='AUTH_TOKEN_PER_USER', new_key='SESSIONS_PER_USER')
_rename_setting.rename_setting(apps, schema_editor, old_key='AUTH_TOKEN_EXPIRATION', new_key='SESSION_COOKIE_AGE') _rename_setting.rename_setting(apps, schema_editor, old_key='AUTH_TOKEN_EXPIRATION', new_key='SESSION_COOKIE_AGE')
@@ -16,11 +16,6 @@ def reverse_copy_session_settings(apps, schema_editor):
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [('conf', '0004_v320_reencrypt')]
('conf', '0004_v320_reencrypt'),
]
operations = [
migrations.RunPython(copy_session_settings, reverse_copy_session_settings),
]
operations = [migrations.RunPython(copy_session_settings, reverse_copy_session_settings)]

View File

@@ -9,10 +9,6 @@ from django.db import migrations
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [('conf', '0005_v330_rename_two_session_settings')]
('conf', '0005_v330_rename_two_session_settings'),
]
operations = [ operations = [migrations.RunPython(fill_ldap_group_type_params)]
migrations.RunPython(fill_ldap_group_type_params),
]

View File

@@ -10,10 +10,6 @@ def copy_allowed_ips(apps, schema_editor):
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [('conf', '0006_v331_ldap_group_type')]
('conf', '0006_v331_ldap_group_type'),
]
operations = [ operations = [migrations.RunPython(copy_allowed_ips)]
migrations.RunPython(copy_allowed_ips),
]

View File

@@ -15,12 +15,6 @@ def _noop(apps, schema_editor):
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [('conf', '0007_v380_rename_more_settings')]
('conf', '0007_v380_rename_more_settings'),
]
operations = [migrations.RunPython(clear_old_license, _noop), migrations.RunPython(prefill_rh_credentials, _noop)]
operations = [
migrations.RunPython(clear_old_license, _noop),
migrations.RunPython(prefill_rh_credentials, _noop)
]

View File

@@ -1,4 +1,3 @@
import inspect import inspect
from django.conf import settings from django.conf import settings
@@ -16,10 +15,7 @@ def fill_ldap_group_type_params(apps, schema_editor):
entry = qs[0] entry = qs[0]
group_type_params = entry.value group_type_params = entry.value
else: else:
entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS', entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS', value=group_type_params, created=now(), modified=now())
value=group_type_params,
created=now(),
modified=now())
init_attrs = set(inspect.getargspec(group_type.__init__).args[1:]) init_attrs = set(inspect.getargspec(group_type.__init__).args[1:])
for k in list(group_type_params.keys()): for k in list(group_type_params.keys()):

View File

@@ -11,15 +11,16 @@ __all__ = ['get_encryption_key', 'decrypt_field']
def get_encryption_key(field_name, pk=None): def get_encryption_key(field_name, pk=None):
''' """
Generate key for encrypted password based on field name, Generate key for encrypted password based on field name,
``settings.SECRET_KEY``, and instance pk (if available). ``settings.SECRET_KEY``, and instance pk (if available).
:param pk: (optional) the primary key of the ``awx.conf.model.Setting``; :param pk: (optional) the primary key of the ``awx.conf.model.Setting``;
can be omitted in situations where you're encrypting a setting can be omitted in situations where you're encrypting a setting
that is not database-persistent (like a read-only setting) that is not database-persistent (like a read-only setting)
''' """
from django.conf import settings from django.conf import settings
h = hashlib.sha1() h = hashlib.sha1()
h.update(settings.SECRET_KEY) h.update(settings.SECRET_KEY)
if pk is not None: if pk is not None:
@@ -29,11 +30,11 @@ def get_encryption_key(field_name, pk=None):
def decrypt_value(encryption_key, value): def decrypt_value(encryption_key, value):
raw_data = value[len('$encrypted$'):] raw_data = value[len('$encrypted$') :]
# If the encrypted string contains a UTF8 marker, discard it # If the encrypted string contains a UTF8 marker, discard it
utf8 = raw_data.startswith('UTF8$') utf8 = raw_data.startswith('UTF8$')
if utf8: if utf8:
raw_data = raw_data[len('UTF8$'):] raw_data = raw_data[len('UTF8$') :]
algo, b64data = raw_data.split('$', 1) algo, b64data = raw_data.split('$', 1)
if algo != 'AES': if algo != 'AES':
raise ValueError('unsupported algorithm: %s' % algo) raise ValueError('unsupported algorithm: %s' % algo)
@@ -48,9 +49,9 @@ def decrypt_value(encryption_key, value):
def decrypt_field(instance, field_name, subfield=None): def decrypt_field(instance, field_name, subfield=None):
''' """
Return content of the given instance and field name decrypted. Return content of the given instance and field name decrypted.
''' """
value = getattr(instance, field_name) value = getattr(instance, field_name)
if isinstance(value, dict) and subfield is not None: if isinstance(value, dict) and subfield is not None:
value = value[subfield] value = value[subfield]

View File

@@ -6,11 +6,11 @@ from django.conf import settings
logger = logging.getLogger('awx.conf.settings') logger = logging.getLogger('awx.conf.settings')
__all__ = ['rename_setting'] __all__ = ['rename_setting']
def rename_setting(apps, schema_editor, old_key, new_key): def rename_setting(apps, schema_editor, old_key, new_key):
old_setting = None old_setting = None
Setting = apps.get_model('conf', 'Setting') Setting = apps.get_model('conf', 'Setting')
if Setting.objects.filter(key=new_key).exists() or hasattr(settings, new_key): if Setting.objects.filter(key=new_key).exists() or hasattr(settings, new_key):
@@ -24,9 +24,4 @@ def rename_setting(apps, schema_editor, old_key, new_key):
if hasattr(settings, old_key): if hasattr(settings, old_key):
old_setting = getattr(settings, old_key) old_setting = getattr(settings, old_key)
if old_setting is not None: if old_setting is not None:
Setting.objects.create(key=new_key, Setting.objects.create(key=new_key, value=old_setting, created=now(), modified=now())
value=old_setting,
created=now(),
modified=now()
)

View File

@@ -6,7 +6,7 @@ from awx.main.utils.encryption import decrypt_field, encrypt_field
logger = logging.getLogger('awx.conf.settings') logger = logging.getLogger('awx.conf.settings')
__all__ = ['clear_old_license', 'prefill_rh_credentials'] __all__ = ['clear_old_license', 'prefill_rh_credentials']
def clear_old_license(apps, schema_editor): def clear_old_license(apps, schema_editor):
Setting = apps.get_model('conf', 'Setting') Setting = apps.get_model('conf', 'Setting')
@@ -17,10 +17,7 @@ def _migrate_setting(apps, old_key, new_key, encrypted=False):
Setting = apps.get_model('conf', 'Setting') Setting = apps.get_model('conf', 'Setting')
if not Setting.objects.filter(key=old_key).exists(): if not Setting.objects.filter(key=old_key).exists():
return return
new_setting = Setting.objects.create(key=new_key, new_setting = Setting.objects.create(key=new_key, created=now(), modified=now())
created=now(),
modified=now()
)
if encrypted: if encrypted:
new_setting.value = decrypt_field(Setting.objects.filter(key=old_key).first(), 'value') new_setting.value = decrypt_field(Setting.objects.filter(key=old_key).first(), 'value')
new_setting.value = encrypt_field(new_setting, 'value') new_setting.value = encrypt_field(new_setting, 'value')

View File

@@ -18,20 +18,9 @@ __all__ = ['Setting']
class Setting(CreatedModifiedModel): class Setting(CreatedModifiedModel):
key = models.CharField( key = models.CharField(max_length=255)
max_length=255, value = JSONField(null=True)
) user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE))
value = JSONField(
null=True,
)
user = prevent_search(models.ForeignKey(
'auth.User',
related_name='settings',
default=None,
null=True,
editable=False,
on_delete=models.CASCADE,
))
def __str__(self): def __str__(self):
try: try:
@@ -66,6 +55,7 @@ class Setting(CreatedModifiedModel):
# field and save again. # field and save again.
if encrypted and new_instance: if encrypted and new_instance:
from awx.main.signals import disable_activity_stream from awx.main.signals import disable_activity_stream
with disable_activity_stream(): with disable_activity_stream():
self.value = self._saved_value self.value = self._saved_value
self.save(update_fields=['value']) self.save(update_fields=['value'])
@@ -82,6 +72,7 @@ class Setting(CreatedModifiedModel):
import awx.conf.signals # noqa import awx.conf.signals # noqa
from awx.main.registrar import activity_stream_registrar # noqa from awx.main.registrar import activity_stream_registrar # noqa
activity_stream_registrar.connect(Setting) activity_stream_registrar.connect(Setting)
import awx.conf.access # noqa import awx.conf.access # noqa

View File

@@ -69,10 +69,7 @@ class SettingsRegistry(object):
return self._dependent_settings.get(setting, set()) return self._dependent_settings.get(setting, set())
def get_registered_categories(self): def get_registered_categories(self):
categories = { categories = {'all': _('All'), 'changed': _('Changed')}
'all': _('All'),
'changed': _('Changed'),
}
for setting, kwargs in self._registry.items(): for setting, kwargs in self._registry.items():
category_slug = kwargs.get('category_slug', None) category_slug = kwargs.get('category_slug', None)
if category_slug is None or category_slug in categories: if category_slug is None or category_slug in categories:
@@ -95,8 +92,11 @@ class SettingsRegistry(object):
continue continue
if kwargs.get('category_slug', None) in slugs_to_ignore: if kwargs.get('category_slug', None) in slugs_to_ignore:
continue continue
if (read_only in {True, False} and kwargs.get('read_only', False) != read_only and if (
setting not in ('INSTALL_UUID', 'AWX_ISOLATED_PRIVATE_KEY', 'AWX_ISOLATED_PUBLIC_KEY')): read_only in {True, False}
and kwargs.get('read_only', False) != read_only
and setting not in ('INSTALL_UUID', 'AWX_ISOLATED_PRIVATE_KEY', 'AWX_ISOLATED_PUBLIC_KEY')
):
# Note: Doesn't catch fields that set read_only via __init__; # Note: Doesn't catch fields that set read_only via __init__;
# read-only field kwargs should always include read_only=True. # read-only field kwargs should always include read_only=True.
continue continue
@@ -117,6 +117,7 @@ class SettingsRegistry(object):
def get_setting_field(self, setting, mixin_class=None, for_user=False, **kwargs): def get_setting_field(self, setting, mixin_class=None, for_user=False, **kwargs):
from rest_framework.fields import empty from rest_framework.fields import empty
field_kwargs = {} field_kwargs = {}
field_kwargs.update(self._registry[setting]) field_kwargs.update(self._registry[setting])
field_kwargs.update(kwargs) field_kwargs.update(kwargs)
@@ -141,11 +142,7 @@ class SettingsRegistry(object):
field_instance.placeholder = placeholder field_instance.placeholder = placeholder
field_instance.defined_in_file = defined_in_file field_instance.defined_in_file = defined_in_file
if field_instance.defined_in_file: if field_instance.defined_in_file:
field_instance.help_text = ( field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
str(_('This value has been set manually in a settings file.')) +
'\n\n' +
str(field_instance.help_text)
)
field_instance.encrypted = encrypted field_instance.encrypted = encrypted
original_field_instance = field_instance original_field_instance = field_instance
if field_class != original_field_class: if field_class != original_field_class:

View File

@@ -30,15 +30,9 @@ class SettingSerializer(BaseSerializer):
class SettingCategorySerializer(serializers.Serializer): class SettingCategorySerializer(serializers.Serializer):
"""Serialize setting category """ """Serialize setting category """
url = serializers.CharField( url = serializers.CharField(read_only=True)
read_only=True, slug = serializers.CharField(read_only=True)
) name = serializers.CharField(read_only=True)
slug = serializers.CharField(
read_only=True,
)
name = serializers.CharField(
read_only=True,
)
class SettingFieldMixin(object): class SettingFieldMixin(object):

View File

@@ -62,12 +62,12 @@ __all__ = ['SettingsWrapper', 'get_settings_to_cache', 'SETTING_CACHE_NOTSET']
@contextlib.contextmanager @contextlib.contextmanager
def _ctit_db_wrapper(trans_safe=False): def _ctit_db_wrapper(trans_safe=False):
''' """
Wrapper to avoid undesired actions by Django ORM when managing settings Wrapper to avoid undesired actions by Django ORM when managing settings
if only getting a setting, can use trans_safe=True, which will avoid if only getting a setting, can use trans_safe=True, which will avoid
throwing errors if the prior context was a broken transaction. throwing errors if the prior context was a broken transaction.
Any database errors will be logged, but exception will be suppressed. Any database errors will be logged, but exception will be suppressed.
''' """
rollback_set = None rollback_set = None
is_atomic = None is_atomic = None
try: try:
@@ -115,7 +115,6 @@ class TransientSetting(object):
class EncryptedCacheProxy(object): class EncryptedCacheProxy(object):
def __init__(self, cache, registry, encrypter=None, decrypter=None): def __init__(self, cache, registry, encrypter=None, decrypter=None):
""" """
This proxy wraps a Django cache backend and overwrites the This proxy wraps a Django cache backend and overwrites the
@@ -145,19 +144,11 @@ class EncryptedCacheProxy(object):
def set(self, key, value, log=True, **kwargs): def set(self, key, value, log=True, **kwargs):
if log is True: if log is True:
logger.debug('cache set(%r, %r, %r)', key, filter_sensitive(self.registry, key, value), logger.debug('cache set(%r, %r, %r)', key, filter_sensitive(self.registry, key, value), SETTING_CACHE_TIMEOUT)
SETTING_CACHE_TIMEOUT) self.cache.set(key, self._handle_encryption(self.encrypter, key, value), **kwargs)
self.cache.set(
key,
self._handle_encryption(self.encrypter, key, value),
**kwargs
)
def set_many(self, data, **kwargs): def set_many(self, data, **kwargs):
filtered_data = dict( filtered_data = dict((key, filter_sensitive(self.registry, key, value)) for key, value in data.items())
(key, filter_sensitive(self.registry, key, value))
for key, value in data.items()
)
logger.debug('cache set_many(%r, %r)', filtered_data, SETTING_CACHE_TIMEOUT) logger.debug('cache set_many(%r, %r)', filtered_data, SETTING_CACHE_TIMEOUT)
for key, value in data.items(): for key, value in data.items():
self.set(key, value, log=False, **kwargs) self.set(key, value, log=False, **kwargs)
@@ -168,18 +159,11 @@ class EncryptedCacheProxy(object):
# as part of the AES key when encrypting/decrypting # as part of the AES key when encrypting/decrypting
obj_id = self.cache.get(Setting.get_cache_id_key(key), default=empty) obj_id = self.cache.get(Setting.get_cache_id_key(key), default=empty)
if obj_id is empty: if obj_id is empty:
logger.info('Efficiency notice: Corresponding id not stored in cache %s', logger.info('Efficiency notice: Corresponding id not stored in cache %s', Setting.get_cache_id_key(key))
Setting.get_cache_id_key(key))
obj_id = getattr(self._get_setting_from_db(key), 'pk', None) obj_id = getattr(self._get_setting_from_db(key), 'pk', None)
elif obj_id == SETTING_CACHE_NONE: elif obj_id == SETTING_CACHE_NONE:
obj_id = None obj_id = None
return method( return method(TransientSetting(pk=obj_id, value=value), 'value')
TransientSetting(
pk=obj_id,
value=value
),
'value'
)
# If the field in question isn't an "encrypted" field, this function is # If the field in question isn't an "encrypted" field, this function is
# a no-op; it just returns the provided value # a no-op; it just returns the provided value
@@ -206,9 +190,9 @@ def get_settings_to_cache(registry):
def get_cache_value(value): def get_cache_value(value):
'''Returns the proper special cache setting for a value """Returns the proper special cache setting for a value
based on instance type. based on instance type.
''' """
if value is None: if value is None:
value = SETTING_CACHE_NONE value = SETTING_CACHE_NONE
elif isinstance(value, (list, tuple)) and len(value) == 0: elif isinstance(value, (list, tuple)) and len(value) == 0:
@@ -219,7 +203,6 @@ def get_cache_value(value):
class SettingsWrapper(UserSettingsHolder): class SettingsWrapper(UserSettingsHolder):
@classmethod @classmethod
def initialize(cls, cache=None, registry=None): def initialize(cls, cache=None, registry=None):
""" """
@@ -231,11 +214,7 @@ class SettingsWrapper(UserSettingsHolder):
``awx.conf.settings_registry`` is used by default. ``awx.conf.settings_registry`` is used by default.
""" """
if not getattr(settings, '_awx_conf_settings', False): if not getattr(settings, '_awx_conf_settings', False):
settings_wrapper = cls( settings_wrapper = cls(settings._wrapped, cache=cache or django_cache, registry=registry or settings_registry)
settings._wrapped,
cache=cache or django_cache,
registry=registry or settings_registry
)
settings._wrapped = settings_wrapper settings._wrapped = settings_wrapper
def __init__(self, default_settings, cache, registry): def __init__(self, default_settings, cache, registry):
@@ -322,7 +301,7 @@ class SettingsWrapper(UserSettingsHolder):
try: try:
value = decrypt_field(setting, 'value') value = decrypt_field(setting, 'value')
except ValueError as e: except ValueError as e:
#TODO: Remove in Tower 3.3 # TODO: Remove in Tower 3.3
logger.debug('encountered error decrypting field: %s - attempting fallback to old', e) logger.debug('encountered error decrypting field: %s - attempting fallback to old', e)
value = old_decrypt_field(setting, 'value') value = old_decrypt_field(setting, 'value')
@@ -345,8 +324,7 @@ class SettingsWrapper(UserSettingsHolder):
# Generate a cache key for each setting and store them all at once. # Generate a cache key for each setting and store them all at once.
settings_to_cache = dict([(Setting.get_cache_key(k), v) for k, v in settings_to_cache.items()]) settings_to_cache = dict([(Setting.get_cache_key(k), v) for k, v in settings_to_cache.items()])
for k, id_val in setting_ids.items(): for k, id_val in setting_ids.items():
logger.debug('Saving id in cache for encrypted setting %s, %s', logger.debug('Saving id in cache for encrypted setting %s, %s', Setting.get_cache_id_key(k), id_val)
Setting.get_cache_id_key(k), id_val)
self.cache.cache.set(Setting.get_cache_id_key(k), id_val) self.cache.cache.set(Setting.get_cache_id_key(k), id_val)
settings_to_cache['_awx_conf_preload_expires'] = self._awx_conf_preload_expires settings_to_cache['_awx_conf_preload_expires'] = self._awx_conf_preload_expires
self.cache.set_many(settings_to_cache, timeout=SETTING_CACHE_TIMEOUT) self.cache.set_many(settings_to_cache, timeout=SETTING_CACHE_TIMEOUT)
@@ -420,9 +398,7 @@ class SettingsWrapper(UserSettingsHolder):
else: else:
return value return value
except Exception: except Exception:
logger.warning( logger.warning('The current value "%r" for setting "%s" is invalid.', value, name, exc_info=True)
'The current value "%r" for setting "%s" is invalid.',
value, name, exc_info=True)
return empty return empty
def _get_default(self, name): def _get_default(self, name):
@@ -453,8 +429,7 @@ class SettingsWrapper(UserSettingsHolder):
setting_value = field.run_validation(data) setting_value = field.run_validation(data)
db_value = field.to_representation(setting_value) db_value = field.to_representation(setting_value)
except Exception as e: except Exception as e:
logger.exception('Unable to assign value "%r" to setting "%s".', logger.exception('Unable to assign value "%r" to setting "%s".', value, name, exc_info=True)
value, name, exc_info=True)
raise e raise e
setting = Setting.objects.filter(key=name, user__isnull=True).order_by('pk').first() setting = Setting.objects.filter(key=name, user__isnull=True).order_by('pk').first()
@@ -492,8 +467,7 @@ class SettingsWrapper(UserSettingsHolder):
def __dir__(self): def __dir__(self):
keys = [] keys = []
with _ctit_db_wrapper(trans_safe=True): with _ctit_db_wrapper(trans_safe=True):
for setting in Setting.objects.filter( for setting in Setting.objects.filter(key__in=self.all_supported_settings, user__isnull=True):
key__in=self.all_supported_settings, user__isnull=True):
# Skip returning settings that have been overridden but are # Skip returning settings that have been overridden but are
# considered to be "not set". # considered to be "not set".
if setting.value is None and SETTING_CACHE_NOTSET == SETTING_CACHE_NONE: if setting.value is None and SETTING_CACHE_NOTSET == SETTING_CACHE_NONE:
@@ -511,7 +485,7 @@ class SettingsWrapper(UserSettingsHolder):
with _ctit_db_wrapper(trans_safe=True): with _ctit_db_wrapper(trans_safe=True):
set_locally = Setting.objects.filter(key=setting, user__isnull=True).exists() set_locally = Setting.objects.filter(key=setting, user__isnull=True).exists()
set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting) set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting)
return (set_locally or set_on_default) return set_locally or set_on_default
def __getattr_without_cache__(self, name): def __getattr_without_cache__(self, name):

View File

@@ -30,12 +30,7 @@ def handle_setting_change(key, for_delete=False):
# Send setting_changed signal with new value for each setting. # Send setting_changed signal with new value for each setting.
for setting_key in setting_keys: for setting_key in setting_keys:
setting_changed.send( setting_changed.send(sender=Setting, setting=setting_key, value=getattr(settings, setting_key, None), enter=not bool(for_delete))
sender=Setting,
setting=setting_key,
value=getattr(settings, setting_key, None),
enter=not bool(for_delete),
)
@receiver(post_save, sender=Setting) @receiver(post_save, sender=Setting)

View File

@@ -5,10 +5,7 @@ import pytest
from django.urls import resolve from django.urls import resolve
from django.contrib.auth.models import User from django.contrib.auth.models import User
from rest_framework.test import ( from rest_framework.test import APIRequestFactory, force_authenticate
APIRequestFactory,
force_authenticate,
)
@pytest.fixture @pytest.fixture
@@ -41,4 +38,5 @@ def api_request(admin):
response = view(request, *view_args, **view_kwargs) response = view(request, *view_args, **view_kwargs)
response.render() response.render()
return response return response
return rf return rf

View File

@@ -45,44 +45,19 @@ def dummy_validate():
@pytest.mark.django_db @pytest.mark.django_db
def test_non_admin_user_does_not_see_categories(api_request, dummy_setting, normal_user): def test_non_admin_user_does_not_see_categories(api_request, dummy_setting, normal_user):
with dummy_setting( with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'):
'FOO_BAR', response = api_request('get', reverse('api:setting_category_list', kwargs={'version': 'v2'}))
field_class=fields.IntegerField,
category='FooBar',
category_slug='foobar'
):
response = api_request(
'get',
reverse('api:setting_category_list',
kwargs={'version': 'v2'})
)
assert response.data['results'] assert response.data['results']
response = api_request( response = api_request('get', reverse('api:setting_category_list', kwargs={'version': 'v2'}), user=normal_user)
'get',
reverse('api:setting_category_list',
kwargs={'version': 'v2'}),
user=normal_user
)
assert not response.data['results'] assert not response.data['results']
@pytest.mark.django_db @pytest.mark.django_db
def test_setting_singleton_detail_retrieve(api_request, dummy_setting): def test_setting_singleton_detail_retrieve(api_request, dummy_setting):
with dummy_setting( with dummy_setting('FOO_BAR_1', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), dummy_setting(
'FOO_BAR_1', 'FOO_BAR_2', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'
field_class=fields.IntegerField,
category='FooBar',
category_slug='foobar'
), dummy_setting(
'FOO_BAR_2',
field_class=fields.IntegerField,
category='FooBar',
category_slug='foobar'
): ):
response = api_request( response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
'get',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
)
assert response.status_code == 200 assert response.status_code == 200
assert 'FOO_BAR_1' in response.data and response.data['FOO_BAR_1'] is None assert 'FOO_BAR_1' in response.data and response.data['FOO_BAR_1'] is None
assert 'FOO_BAR_2' in response.data and response.data['FOO_BAR_2'] is None assert 'FOO_BAR_2' in response.data and response.data['FOO_BAR_2'] is None
@@ -90,97 +65,43 @@ def test_setting_singleton_detail_retrieve(api_request, dummy_setting):
@pytest.mark.django_db @pytest.mark.django_db
def test_setting_singleton_detail_invalid_retrieve(api_request, dummy_setting, normal_user): def test_setting_singleton_detail_invalid_retrieve(api_request, dummy_setting, normal_user):
with dummy_setting( with dummy_setting('FOO_BAR_1', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), dummy_setting(
'FOO_BAR_1', 'FOO_BAR_2', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'
field_class=fields.IntegerField,
category='FooBar',
category_slug='foobar'
), dummy_setting(
'FOO_BAR_2',
field_class=fields.IntegerField,
category='FooBar',
category_slug='foobar'
): ):
response = api_request( response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'barfoo'}))
'get',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'barfoo'})
)
assert response.status_code == 404 assert response.status_code == 404
response = api_request( response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), user=normal_user)
'get',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
user = normal_user
)
assert response.status_code == 403 assert response.status_code == 403
@pytest.mark.django_db @pytest.mark.django_db
def test_setting_signleton_retrieve_hierachy(api_request, dummy_setting): def test_setting_signleton_retrieve_hierachy(api_request, dummy_setting):
with dummy_setting( with dummy_setting('FOO_BAR', field_class=fields.IntegerField, default=0, category='FooBar', category_slug='foobar'):
'FOO_BAR', response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
field_class=fields.IntegerField,
default=0,
category='FooBar',
category_slug='foobar'
):
response = api_request(
'get',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
)
assert response.data['FOO_BAR'] == 0 assert response.data['FOO_BAR'] == 0
s = Setting(key='FOO_BAR', value=1) s = Setting(key='FOO_BAR', value=1)
s.save() s.save()
response = api_request( response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
'get',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
)
assert response.data['FOO_BAR'] == 1 assert response.data['FOO_BAR'] == 1
@pytest.mark.django_db @pytest.mark.django_db
def test_setting_singleton_retrieve_readonly(api_request, dummy_setting): def test_setting_singleton_retrieve_readonly(api_request, dummy_setting):
with dummy_setting( with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=2, category='FooBar', category_slug='foobar'):
'FOO_BAR', response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
field_class=fields.IntegerField,
read_only=True,
default=2,
category='FooBar',
category_slug='foobar'
):
response = api_request(
'get',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
)
assert response.data['FOO_BAR'] == 2 assert response.data['FOO_BAR'] == 2
@pytest.mark.django_db @pytest.mark.django_db
def test_setting_singleton_update(api_request, dummy_setting): def test_setting_singleton_update(api_request, dummy_setting):
with dummy_setting( with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), mock.patch(
'FOO_BAR', 'awx.conf.views.handle_setting_changes'
field_class=fields.IntegerField, ):
category='FooBar', api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 3})
category_slug='foobar' response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
), mock.patch('awx.conf.views.handle_setting_changes'):
api_request(
'patch',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
data={'FOO_BAR': 3}
)
response = api_request(
'get',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
)
assert response.data['FOO_BAR'] == 3 assert response.data['FOO_BAR'] == 3
api_request( api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 4})
'patch', response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
data={'FOO_BAR': 4}
)
response = api_request(
'get',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
)
assert response.data['FOO_BAR'] == 4 assert response.data['FOO_BAR'] == 4
@@ -190,138 +111,70 @@ def test_setting_singleton_update_hybriddictfield_with_forbidden(api_request, du
# indicating that only the defined fields can be filled in. Make # indicating that only the defined fields can be filled in. Make
# sure that the _Forbidden validator doesn't get used for the # sure that the _Forbidden validator doesn't get used for the
# fields. See also https://github.com/ansible/awx/issues/4099. # fields. See also https://github.com/ansible/awx/issues/4099.
with dummy_setting( with dummy_setting('FOO_BAR', field_class=sso_fields.SAMLOrgAttrField, category='FooBar', category_slug='foobar'), mock.patch(
'FOO_BAR', 'awx.conf.views.handle_setting_changes'
field_class=sso_fields.SAMLOrgAttrField, ):
category='FooBar',
category_slug='foobar',
), mock.patch('awx.conf.views.handle_setting_changes'):
api_request( api_request(
'patch', 'patch',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
data={'FOO_BAR': {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}} data={'FOO_BAR': {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}},
)
response = api_request(
'get',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
) )
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
assert response.data['FOO_BAR'] == {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'} assert response.data['FOO_BAR'] == {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}
@pytest.mark.django_db @pytest.mark.django_db
def test_setting_singleton_update_dont_change_readonly_fields(api_request, dummy_setting): def test_setting_singleton_update_dont_change_readonly_fields(api_request, dummy_setting):
with dummy_setting( with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=4, category='FooBar', category_slug='foobar'), mock.patch(
'FOO_BAR', 'awx.conf.views.handle_setting_changes'
field_class=fields.IntegerField, ):
read_only=True, api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 5})
default=4, response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
category='FooBar',
category_slug='foobar'
), mock.patch('awx.conf.views.handle_setting_changes'):
api_request(
'patch',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
data={'FOO_BAR': 5}
)
response = api_request(
'get',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
)
assert response.data['FOO_BAR'] == 4 assert response.data['FOO_BAR'] == 4
@pytest.mark.django_db @pytest.mark.django_db
def test_setting_singleton_update_dont_change_encrypted_mark(api_request, dummy_setting): def test_setting_singleton_update_dont_change_encrypted_mark(api_request, dummy_setting):
with dummy_setting( with dummy_setting('FOO_BAR', field_class=fields.CharField, encrypted=True, category='FooBar', category_slug='foobar'), mock.patch(
'FOO_BAR', 'awx.conf.views.handle_setting_changes'
field_class=fields.CharField, ):
encrypted=True, api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 'password'})
category='FooBar',
category_slug='foobar'
), mock.patch('awx.conf.views.handle_setting_changes'):
api_request(
'patch',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
data={'FOO_BAR': 'password'}
)
assert Setting.objects.get(key='FOO_BAR').value.startswith('$encrypted$') assert Setting.objects.get(key='FOO_BAR').value.startswith('$encrypted$')
response = api_request( response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
'get',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
)
assert response.data['FOO_BAR'] == '$encrypted$' assert response.data['FOO_BAR'] == '$encrypted$'
api_request( api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': '$encrypted$'})
'patch',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
data={'FOO_BAR': '$encrypted$'}
)
assert decrypt_field(Setting.objects.get(key='FOO_BAR'), 'value') == 'password' assert decrypt_field(Setting.objects.get(key='FOO_BAR'), 'value') == 'password'
api_request( api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 'new_pw'})
'patch',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
data={'FOO_BAR': 'new_pw'}
)
assert decrypt_field(Setting.objects.get(key='FOO_BAR'), 'value') == 'new_pw' assert decrypt_field(Setting.objects.get(key='FOO_BAR'), 'value') == 'new_pw'
@pytest.mark.django_db @pytest.mark.django_db
def test_setting_singleton_update_runs_custom_validate(api_request, dummy_setting, dummy_validate): def test_setting_singleton_update_runs_custom_validate(api_request, dummy_setting, dummy_validate):
def func_raising_exception(serializer, attrs): def func_raising_exception(serializer, attrs):
raise serializers.ValidationError('Error') raise serializers.ValidationError('Error')
with dummy_setting( with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), dummy_validate(
'FOO_BAR',
field_class=fields.IntegerField,
category='FooBar',
category_slug='foobar'
), dummy_validate(
'foobar', func_raising_exception 'foobar', func_raising_exception
), mock.patch('awx.conf.views.handle_setting_changes'): ), mock.patch('awx.conf.views.handle_setting_changes'):
response = api_request( response = api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 23})
'patch',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
data={'FOO_BAR': 23}
)
assert response.status_code == 400 assert response.status_code == 400
@pytest.mark.django_db @pytest.mark.django_db
def test_setting_singleton_delete(api_request, dummy_setting): def test_setting_singleton_delete(api_request, dummy_setting):
with dummy_setting( with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), mock.patch(
'FOO_BAR', 'awx.conf.views.handle_setting_changes'
field_class=fields.IntegerField, ):
category='FooBar', api_request('delete', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
category_slug='foobar' response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
), mock.patch('awx.conf.views.handle_setting_changes'):
api_request(
'delete',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
)
response = api_request(
'get',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
)
assert not response.data['FOO_BAR'] assert not response.data['FOO_BAR']
@pytest.mark.django_db @pytest.mark.django_db
def test_setting_singleton_delete_no_read_only_fields(api_request, dummy_setting): def test_setting_singleton_delete_no_read_only_fields(api_request, dummy_setting):
with dummy_setting( with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=23, category='FooBar', category_slug='foobar'), mock.patch(
'FOO_BAR', 'awx.conf.views.handle_setting_changes'
field_class=fields.IntegerField, ):
read_only=True, api_request('delete', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
default=23, response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
category='FooBar',
category_slug='foobar'
), mock.patch('awx.conf.views.handle_setting_changes'):
api_request(
'delete',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
)
response = api_request(
'get',
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
)
assert response.data['FOO_BAR'] == 23 assert response.data['FOO_BAR'] == 23

View File

@@ -1,5 +1,3 @@
# Ensure that our autouse overwrites are working # Ensure that our autouse overwrites are working
def test_cache(settings): def test_cache(settings):
assert settings.CACHES['default']['BACKEND'] == 'django.core.cache.backends.locmem.LocMemCache' assert settings.CACHES['default']['BACKEND'] == 'django.core.cache.backends.locmem.LocMemCache'

View File

@@ -4,7 +4,7 @@ from rest_framework.fields import ValidationError
from awx.conf.fields import StringListBooleanField, StringListPathField, ListTuplesField, URLField from awx.conf.fields import StringListBooleanField, StringListPathField, ListTuplesField, URLField
class TestStringListBooleanField(): class TestStringListBooleanField:
FIELD_VALUES = [ FIELD_VALUES = [
("hello", "hello"), ("hello", "hello"),
@@ -23,10 +23,7 @@ class TestStringListBooleanField():
("NULL", None), ("NULL", None),
] ]
FIELD_VALUES_INVALID = [ FIELD_VALUES_INVALID = [1.245, {"a": "b"}]
1.245,
{"a": "b"},
]
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES) @pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
def test_to_internal_value_valid(self, value_in, value_known): def test_to_internal_value_valid(self, value_in, value_known):
@@ -39,8 +36,7 @@ class TestStringListBooleanField():
field = StringListBooleanField() field = StringListBooleanField()
with pytest.raises(ValidationError) as e: with pytest.raises(ValidationError) as e:
field.to_internal_value(value) field.to_internal_value(value)
assert e.value.detail[0] == "Expected None, True, False, a string or list " \ assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
"of strings but got {} instead.".format(type(value))
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES) @pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
def test_to_representation_valid(self, value_in, value_known): def test_to_representation_valid(self, value_in, value_known):
@@ -53,22 +49,14 @@ class TestStringListBooleanField():
field = StringListBooleanField() field = StringListBooleanField()
with pytest.raises(ValidationError) as e: with pytest.raises(ValidationError) as e:
field.to_representation(value) field.to_representation(value)
assert e.value.detail[0] == "Expected None, True, False, a string or list " \ assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
"of strings but got {} instead.".format(type(value))
class TestListTuplesField(): class TestListTuplesField:
FIELD_VALUES = [ FIELD_VALUES = [([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")])]
([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")]),
]
FIELD_VALUES_INVALID = [ FIELD_VALUES_INVALID = [("abc", type("abc")), ([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))), (['a', 'b'], type('a')), (123, type(123))]
("abc", type("abc")),
([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))),
(['a', 'b'], type('a')),
(123, type(123)),
]
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES) @pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
def test_to_internal_value_valid(self, value_in, value_known): def test_to_internal_value_valid(self, value_in, value_known):
@@ -81,11 +69,10 @@ class TestListTuplesField():
field = ListTuplesField() field = ListTuplesField()
with pytest.raises(ValidationError) as e: with pytest.raises(ValidationError) as e:
field.to_internal_value(value) field.to_internal_value(value)
assert e.value.detail[0] == "Expected a list of tuples of max length 2 " \ assert e.value.detail[0] == "Expected a list of tuples of max length 2 " "but got {} instead.".format(t)
"but got {} instead.".format(t)
class TestStringListPathField(): class TestStringListPathField:
FIELD_VALUES = [ FIELD_VALUES = [
((".", "..", "/"), [".", "..", "/"]), ((".", "..", "/"), [".", "..", "/"]),
@@ -93,22 +80,12 @@ class TestStringListPathField():
(("///home///",), ["/home"]), (("///home///",), ["/home"]),
(("/home/././././",), ["/home"]), (("/home/././././",), ["/home"]),
(("/home", "/home", "/home/"), ["/home"]), (("/home", "/home", "/home/"), ["/home"]),
(["/home/", "/home/", "/opt/", "/opt/", "/var/"], ["/home", "/opt", "/var"]) (["/home/", "/home/", "/opt/", "/opt/", "/var/"], ["/home", "/opt", "/var"]),
] ]
FIELD_VALUES_INVALID_TYPE = [ FIELD_VALUES_INVALID_TYPE = [1.245, {"a": "b"}, ("/home")]
1.245,
{"a": "b"},
("/home"),
]
FIELD_VALUES_INVALID_PATH = [ FIELD_VALUES_INVALID_PATH = ["", "~/", "home", "/invalid_path", "/home/invalid_path"]
"",
"~/",
"home",
"/invalid_path",
"/home/invalid_path",
]
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES) @pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
def test_to_internal_value_valid(self, value_in, value_known): def test_to_internal_value_valid(self, value_in, value_known):
@@ -131,16 +108,19 @@ class TestStringListPathField():
assert e.value.detail[0] == "{} is not a valid path choice.".format(value) assert e.value.detail[0] == "{} is not a valid path choice.".format(value)
class TestURLField(): class TestURLField:
regex = "^https://www.example.org$" regex = "^https://www.example.org$"
@pytest.mark.parametrize("url,schemes,regex, allow_numbers_in_top_level_domain, expect_no_error",[ @pytest.mark.parametrize(
("ldap://www.example.org42", "ldap", None, True, True), "url,schemes,regex, allow_numbers_in_top_level_domain, expect_no_error",
("https://www.example.org42", "https", None, False, False), [
("https://www.example.org", None, regex, None, True), ("ldap://www.example.org42", "ldap", None, True, True),
("https://www.example3.org", None, regex, None, False), ("https://www.example.org42", "https", None, False, False),
("ftp://www.example.org", "https", None, None, False) ("https://www.example.org", None, regex, None, True),
]) ("https://www.example3.org", None, regex, None, False),
("ftp://www.example.org", "https", None, None, False),
],
)
def test_urls(self, url, schemes, regex, allow_numbers_in_top_level_domain, expect_no_error): def test_urls(self, url, schemes, regex, allow_numbers_in_top_level_domain, expect_no_error):
kwargs = {} kwargs = {}
kwargs.setdefault("allow_numbers_in_top_level_domain", allow_numbers_in_top_level_domain) kwargs.setdefault("allow_numbers_in_top_level_domain", allow_numbers_in_top_level_domain)

View File

@@ -33,30 +33,18 @@ def reg(request):
if marker.name == 'defined_in_file': if marker.name == 'defined_in_file':
settings.configure(**marker.kwargs) settings.configure(**marker.kwargs)
settings._wrapped = SettingsWrapper(settings._wrapped, settings._wrapped = SettingsWrapper(settings._wrapped, cache, registry)
cache,
registry)
return registry return registry
def test_simple_setting_registration(reg): def test_simple_setting_registration(reg):
assert reg.get_registered_settings() == [] assert reg.get_registered_settings() == []
reg.register( reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system',
)
assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED'] assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED']
def test_simple_setting_unregistration(reg): def test_simple_setting_unregistration(reg):
reg.register( reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system',
)
assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED'] assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED']
reg.unregister('AWX_SOME_SETTING_ENABLED') reg.unregister('AWX_SOME_SETTING_ENABLED')
@@ -67,12 +55,7 @@ def test_duplicate_setting_registration(reg):
"ensure that settings cannot be registered twice." "ensure that settings cannot be registered twice."
with pytest.raises(ImproperlyConfigured): with pytest.raises(ImproperlyConfigured):
for i in range(2): for i in range(2):
reg.register( reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system',
)
def test_field_class_required_for_registration(reg): def test_field_class_required_for_registration(reg):
@@ -82,110 +65,42 @@ def test_field_class_required_for_registration(reg):
def test_get_registered_settings_by_slug(reg): def test_get_registered_settings_by_slug(reg):
reg.register( reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING_ENABLED', assert reg.get_registered_settings(category_slug='system') == ['AWX_SOME_SETTING_ENABLED']
field_class=fields.BooleanField,
category=_('System'),
category_slug='system',
)
assert reg.get_registered_settings(category_slug='system') == [
'AWX_SOME_SETTING_ENABLED'
]
assert reg.get_registered_settings(category_slug='other') == [] assert reg.get_registered_settings(category_slug='other') == []
def test_get_registered_read_only_settings(reg): def test_get_registered_read_only_settings(reg):
reg.register( reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING_ENABLED', reg.register('AWX_SOME_READ_ONLY', field_class=fields.BooleanField, category=_('System'), category_slug='system', read_only=True)
field_class=fields.BooleanField, assert reg.get_registered_settings(read_only=True) == ['AWX_SOME_READ_ONLY']
category=_('System'), assert reg.get_registered_settings(read_only=False) == ['AWX_SOME_SETTING_ENABLED']
category_slug='system' assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED', 'AWX_SOME_READ_ONLY']
)
reg.register(
'AWX_SOME_READ_ONLY',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system',
read_only=True
)
assert reg.get_registered_settings(read_only=True) ==[
'AWX_SOME_READ_ONLY'
]
assert reg.get_registered_settings(read_only=False) == [
'AWX_SOME_SETTING_ENABLED'
]
assert reg.get_registered_settings() == [
'AWX_SOME_SETTING_ENABLED',
'AWX_SOME_READ_ONLY'
]
def test_get_dependent_settings(reg): def test_get_dependent_settings(reg):
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
reg.register( reg.register(
'AWX_SOME_SETTING_ENABLED', 'AWX_SOME_DEPENDENT_SETTING', field_class=fields.BooleanField, category=_('System'), category_slug='system', depends_on=['AWX_SOME_SETTING_ENABLED']
field_class=fields.BooleanField,
category=_('System'),
category_slug='system'
) )
reg.register( assert reg.get_dependent_settings('AWX_SOME_SETTING_ENABLED') == set(['AWX_SOME_DEPENDENT_SETTING'])
'AWX_SOME_DEPENDENT_SETTING',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system',
depends_on=['AWX_SOME_SETTING_ENABLED']
)
assert reg.get_dependent_settings('AWX_SOME_SETTING_ENABLED') == set([
'AWX_SOME_DEPENDENT_SETTING'
])
def test_get_registered_categories(reg): def test_get_registered_categories(reg):
reg.register( reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING_ENABLED', reg.register('AWX_SOME_OTHER_SETTING_ENABLED', field_class=fields.BooleanField, category=_('OtherSystem'), category_slug='other-system')
field_class=fields.BooleanField, assert reg.get_registered_categories() == {'all': _('All'), 'changed': _('Changed'), 'system': _('System'), 'other-system': _('OtherSystem')}
category=_('System'),
category_slug='system'
)
reg.register(
'AWX_SOME_OTHER_SETTING_ENABLED',
field_class=fields.BooleanField,
category=_('OtherSystem'),
category_slug='other-system'
)
assert reg.get_registered_categories() == {
'all': _('All'),
'changed': _('Changed'),
'system': _('System'),
'other-system': _('OtherSystem'),
}
def test_is_setting_encrypted(reg): def test_is_setting_encrypted(reg):
reg.register( reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.CharField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING_ENABLED', reg.register('AWX_SOME_ENCRYPTED_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', encrypted=True)
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
reg.register(
'AWX_SOME_ENCRYPTED_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
encrypted=True
)
assert reg.is_setting_encrypted('AWX_SOME_SETTING_ENABLED') is False assert reg.is_setting_encrypted('AWX_SOME_SETTING_ENABLED') is False
assert reg.is_setting_encrypted('AWX_SOME_ENCRYPTED_SETTING') is True assert reg.is_setting_encrypted('AWX_SOME_ENCRYPTED_SETTING') is True
def test_simple_field(reg): def test_simple_field(reg):
reg.register( reg.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', placeholder='Example Value')
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
placeholder='Example Value',
)
field = reg.get_setting_field('AWX_SOME_SETTING') field = reg.get_setting_field('AWX_SOME_SETTING')
assert isinstance(field, fields.CharField) assert isinstance(field, fields.CharField)
@@ -196,31 +111,20 @@ def test_simple_field(reg):
def test_field_with_custom_attribute(reg): def test_field_with_custom_attribute(reg):
reg.register( reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category_slug='system')
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category_slug='system',
)
field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED', field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED', category_slug='other-system')
category_slug='other-system')
assert field.category_slug == 'other-system' assert field.category_slug == 'other-system'
def test_field_with_custom_mixin(reg): def test_field_with_custom_mixin(reg):
class GreatMixin(object): class GreatMixin(object):
def is_great(self): def is_great(self):
return True return True
reg.register( reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category_slug='system')
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category_slug='system',
)
field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED', field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED', mixin_class=GreatMixin)
mixin_class=GreatMixin)
assert isinstance(field, fields.BooleanField) assert isinstance(field, fields.BooleanField)
assert isinstance(field, GreatMixin) assert isinstance(field, GreatMixin)
assert field.is_great() is True assert field.is_great() is True
@@ -228,12 +132,7 @@ def test_field_with_custom_mixin(reg):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT') @pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_default_value_from_settings(reg): def test_default_value_from_settings(reg):
reg.register( reg.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
)
field = reg.get_setting_field('AWX_SOME_SETTING') field = reg.get_setting_field('AWX_SOME_SETTING')
assert field.default == 'DEFAULT' assert field.default == 'DEFAULT'
@@ -242,16 +141,10 @@ def test_default_value_from_settings(reg):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT') @pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_default_value_from_settings_with_custom_representation(reg): def test_default_value_from_settings_with_custom_representation(reg):
class LowercaseCharField(fields.CharField): class LowercaseCharField(fields.CharField):
def to_representation(self, value): def to_representation(self, value):
return value.lower() return value.lower()
reg.register( reg.register('AWX_SOME_SETTING', field_class=LowercaseCharField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING',
field_class=LowercaseCharField,
category=_('System'),
category_slug='system',
)
field = reg.get_setting_field('AWX_SOME_SETTING') field = reg.get_setting_field('AWX_SOME_SETTING')
assert field.default == 'default' assert field.default == 'default'

View File

@@ -53,9 +53,7 @@ def settings(request):
defaults['DEFAULTS_SNAPSHOT'] = {} defaults['DEFAULTS_SNAPSHOT'] = {}
settings.configure(**defaults) settings.configure(**defaults)
settings._wrapped = SettingsWrapper(settings._wrapped, settings._wrapped = SettingsWrapper(settings._wrapped, cache, registry)
cache,
registry)
return settings return settings
@@ -67,14 +65,7 @@ def test_unregistered_setting(settings):
def test_read_only_setting(settings): def test_read_only_setting(settings):
settings.registry.register( settings.registry.register('AWX_READ_ONLY', field_class=fields.CharField, category=_('System'), category_slug='system', default='NO-EDITS', read_only=True)
'AWX_READ_ONLY',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
default='NO-EDITS',
read_only=True
)
assert settings.AWX_READ_ONLY == 'NO-EDITS' assert settings.AWX_READ_ONLY == 'NO-EDITS'
assert len(settings.registry.get_registered_settings(read_only=False)) == 0 assert len(settings.registry.get_registered_settings(read_only=False)) == 0
settings = settings.registry.get_registered_settings(read_only=True) settings = settings.registry.get_registered_settings(read_only=True)
@@ -85,13 +76,7 @@ def test_read_only_setting(settings):
@pytest.mark.parametrize('read_only', [True, False]) @pytest.mark.parametrize('read_only', [True, False])
def test_setting_defined_in_file(settings, read_only): def test_setting_defined_in_file(settings, read_only):
kwargs = {'read_only': True} if read_only else {} kwargs = {'read_only': True} if read_only else {}
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', **kwargs)
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
**kwargs
)
assert settings.AWX_SOME_SETTING == 'DEFAULT' assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert len(settings.registry.get_registered_settings(read_only=False)) == 0 assert len(settings.registry.get_registered_settings(read_only=False)) == 0
settings = settings.registry.get_registered_settings(read_only=True) settings = settings.registry.get_registered_settings(read_only=True)
@@ -100,13 +85,7 @@ def test_setting_defined_in_file(settings, read_only):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT') @pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_setting_defined_in_file_with_empty_default(settings): def test_setting_defined_in_file_with_empty_default(settings):
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='')
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
default='',
)
assert settings.AWX_SOME_SETTING == 'DEFAULT' assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert len(settings.registry.get_registered_settings(read_only=False)) == 0 assert len(settings.registry.get_registered_settings(read_only=False)) == 0
settings = settings.registry.get_registered_settings(read_only=True) settings = settings.registry.get_registered_settings(read_only=True)
@@ -115,13 +94,7 @@ def test_setting_defined_in_file_with_empty_default(settings):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT') @pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_setting_defined_in_file_with_specific_default(settings): def test_setting_defined_in_file_with_specific_default(settings):
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default=123)
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
default=123
)
assert settings.AWX_SOME_SETTING == 'DEFAULT' assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert len(settings.registry.get_registered_settings(read_only=False)) == 0 assert len(settings.registry.get_registered_settings(read_only=False)) == 0
settings = settings.registry.get_registered_settings(read_only=True) settings = settings.registry.get_registered_settings(read_only=True)
@@ -131,12 +104,7 @@ def test_setting_defined_in_file_with_specific_default(settings):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT') @pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_read_only_defaults_are_cached(settings): def test_read_only_defaults_are_cached(settings):
"read-only settings are stored in the cache" "read-only settings are stored in the cache"
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
assert settings.AWX_SOME_SETTING == 'DEFAULT' assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT' assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
@@ -144,12 +112,7 @@ def test_read_only_defaults_are_cached(settings):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT') @pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_cache_respects_timeout(settings): def test_cache_respects_timeout(settings):
"only preload the cache every SETTING_CACHE_TIMEOUT settings" "only preload the cache every SETTING_CACHE_TIMEOUT settings"
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
assert settings.AWX_SOME_SETTING == 'DEFAULT' assert settings.AWX_SOME_SETTING == 'DEFAULT'
cache_expiration = settings.cache.get('_awx_conf_preload_expires') cache_expiration = settings.cache.get('_awx_conf_preload_expires')
@@ -161,13 +124,7 @@ def test_cache_respects_timeout(settings):
def test_default_setting(settings, mocker): def test_default_setting(settings, mocker):
"settings that specify a default are inserted into the cache" "settings that specify a default are inserted into the cache"
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
default='DEFAULT'
)
settings_to_cache = mocker.Mock(**{'order_by.return_value': []}) settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache): with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
@@ -177,24 +134,13 @@ def test_default_setting(settings, mocker):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT') @pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_setting_is_from_setting_file(settings, mocker): def test_setting_is_from_setting_file(settings, mocker):
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
assert settings.AWX_SOME_SETTING == 'DEFAULT' assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is True assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is True
def test_setting_is_not_from_setting_file(settings, mocker): def test_setting_is_not_from_setting_file(settings, mocker):
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
default='DEFAULT'
)
settings_to_cache = mocker.Mock(**{'order_by.return_value': []}) settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache): with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
@@ -204,19 +150,9 @@ def test_setting_is_not_from_setting_file(settings, mocker):
def test_empty_setting(settings, mocker): def test_empty_setting(settings, mocker):
"settings with no default and no defined value are not valid" "settings with no default and no defined value are not valid"
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
mocks = mocker.Mock(**{ mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
'order_by.return_value': mocker.Mock(**{
'__iter__': lambda self: iter([]),
'first.return_value': None
}),
})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks): with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
with pytest.raises(AttributeError): with pytest.raises(AttributeError):
settings.AWX_SOME_SETTING settings.AWX_SOME_SETTING
@@ -225,21 +161,10 @@ def test_empty_setting(settings, mocker):
def test_setting_from_db(settings, mocker): def test_setting_from_db(settings, mocker):
"settings can be loaded from the database" "settings can be loaded from the database"
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
default='DEFAULT'
)
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB') setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
mocks = mocker.Mock(**{ mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
'order_by.return_value': mocker.Mock(**{
'__iter__': lambda self: iter([setting_from_db]),
'first.return_value': setting_from_db
}),
})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks): with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
assert settings.AWX_SOME_SETTING == 'FROM_DB' assert settings.AWX_SOME_SETTING == 'FROM_DB'
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB' assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
@@ -248,12 +173,7 @@ def test_setting_from_db(settings, mocker):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT') @pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_read_only_setting_assignment(settings): def test_read_only_setting_assignment(settings):
"read-only settings cannot be overwritten" "read-only settings cannot be overwritten"
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
assert settings.AWX_SOME_SETTING == 'DEFAULT' assert settings.AWX_SOME_SETTING == 'DEFAULT'
with pytest.raises(ImproperlyConfigured): with pytest.raises(ImproperlyConfigured):
settings.AWX_SOME_SETTING = 'CHANGED' settings.AWX_SOME_SETTING = 'CHANGED'
@@ -262,41 +182,26 @@ def test_read_only_setting_assignment(settings):
def test_db_setting_create(settings, mocker): def test_db_setting_create(settings, mocker):
"settings are stored in the database when set for the first time" "settings are stored in the database when set for the first time"
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': None}) setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': None})
with apply_patches([ with apply_patches(
mocker.patch('awx.conf.models.Setting.objects.filter', [
return_value=setting_list), mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list),
mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock()) mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock()),
]): ]
):
settings.AWX_SOME_SETTING = 'NEW-VALUE' settings.AWX_SOME_SETTING = 'NEW-VALUE'
models.Setting.objects.create.assert_called_with( models.Setting.objects.create.assert_called_with(key='AWX_SOME_SETTING', user=None, value='NEW-VALUE')
key='AWX_SOME_SETTING',
user=None,
value='NEW-VALUE'
)
def test_db_setting_update(settings, mocker): def test_db_setting_update(settings, mocker):
"settings are updated in the database when their value changes" "settings are updated in the database when their value changes"
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB') existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
setting_list = mocker.Mock(**{ setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
'order_by.return_value.first.return_value': existing_setting
})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list): with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list):
settings.AWX_SOME_SETTING = 'NEW-VALUE' settings.AWX_SOME_SETTING = 'NEW-VALUE'
@@ -306,12 +211,7 @@ def test_db_setting_update(settings, mocker):
def test_db_setting_deletion(settings, mocker): def test_db_setting_deletion(settings, mocker):
"settings are auto-deleted from the database" "settings are auto-deleted from the database"
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB') existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]): with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]):
@@ -323,12 +223,7 @@ def test_db_setting_deletion(settings, mocker):
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT') @pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_read_only_setting_deletion(settings): def test_read_only_setting_deletion(settings):
"read-only settings cannot be deleted" "read-only settings cannot be deleted"
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
assert settings.AWX_SOME_SETTING == 'DEFAULT' assert settings.AWX_SOME_SETTING == 'DEFAULT'
with pytest.raises(ImproperlyConfigured): with pytest.raises(ImproperlyConfigured):
del settings.AWX_SOME_SETTING del settings.AWX_SOME_SETTING
@@ -337,36 +232,22 @@ def test_read_only_setting_deletion(settings):
def test_charfield_properly_sets_none(settings, mocker): def test_charfield_properly_sets_none(settings, mocker):
"see: https://github.com/ansible/ansible-tower/issues/5322" "see: https://github.com/ansible/ansible-tower/issues/5322"
settings.registry.register( settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', allow_null=True)
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
allow_null=True
)
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': None}) setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': None})
with apply_patches([ with apply_patches(
mocker.patch('awx.conf.models.Setting.objects.filter', [
return_value=setting_list), mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list),
mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock()) mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock()),
]): ]
):
settings.AWX_SOME_SETTING = None settings.AWX_SOME_SETTING = None
models.Setting.objects.create.assert_called_with( models.Setting.objects.create.assert_called_with(key='AWX_SOME_SETTING', user=None, value=None)
key='AWX_SOME_SETTING',
user=None,
value=None
)
def test_settings_use_cache(settings, mocker): def test_settings_use_cache(settings, mocker):
settings.registry.register( settings.registry.register('AWX_VAR', field_class=fields.CharField, category=_('System'), category_slug='system')
'AWX_VAR',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
settings.cache.set('AWX_VAR', 'foobar') settings.cache.set('AWX_VAR', 'foobar')
settings.cache.set('_awx_conf_preload_expires', 100) settings.cache.set('_awx_conf_preload_expires', 100)
# Will fail test if database is used # Will fail test if database is used
@@ -374,13 +255,7 @@ def test_settings_use_cache(settings, mocker):
def test_settings_use_an_encrypted_cache(settings, mocker): def test_settings_use_an_encrypted_cache(settings, mocker):
settings.registry.register( settings.registry.register('AWX_ENCRYPTED', field_class=fields.CharField, category=_('System'), category_slug='system', encrypted=True)
'AWX_ENCRYPTED',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
encrypted=True
)
assert isinstance(settings.cache, EncryptedCacheProxy) assert isinstance(settings.cache, EncryptedCacheProxy)
assert settings.cache.__dict__['encrypter'] == encrypt_field assert settings.cache.__dict__['encrypter'] == encrypt_field
assert settings.cache.__dict__['decrypter'] == decrypt_field assert settings.cache.__dict__['decrypter'] == decrypt_field
@@ -393,34 +268,18 @@ def test_settings_use_an_encrypted_cache(settings, mocker):
def test_sensitive_cache_data_is_encrypted(settings, mocker): def test_sensitive_cache_data_is_encrypted(settings, mocker):
"fields marked as `encrypted` are stored in the cache with encryption" "fields marked as `encrypted` are stored in the cache with encryption"
settings.registry.register( settings.registry.register('AWX_ENCRYPTED', field_class=fields.CharField, category=_('System'), category_slug='system', encrypted=True)
'AWX_ENCRYPTED',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
encrypted=True
)
def rot13(obj, attribute): def rot13(obj, attribute):
assert obj.pk == 123 assert obj.pk == 123
return codecs.encode(getattr(obj, attribute), 'rot_13') return codecs.encode(getattr(obj, attribute), 'rot_13')
native_cache = LocMemCache(str(uuid4()), {}) native_cache = LocMemCache(str(uuid4()), {})
cache = EncryptedCacheProxy( cache = EncryptedCacheProxy(native_cache, settings.registry, encrypter=rot13, decrypter=rot13)
native_cache,
settings.registry,
encrypter=rot13,
decrypter=rot13
)
# Insert the setting value into the database; the encryption process will # Insert the setting value into the database; the encryption process will
# use its primary key as part of the encryption key # use its primary key as part of the encryption key
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!') setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
mocks = mocker.Mock(**{ mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
'order_by.return_value': mocker.Mock(**{
'__iter__': lambda self: iter([setting_from_db]),
'first.return_value': setting_from_db
}),
})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks): with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
cache.set('AWX_ENCRYPTED', 'SECRET!') cache.set('AWX_ENCRYPTED', 'SECRET!')
assert cache.get('AWX_ENCRYPTED') == 'SECRET!' assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
@@ -429,26 +288,14 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
def test_readonly_sensitive_cache_data_is_encrypted(settings): def test_readonly_sensitive_cache_data_is_encrypted(settings):
"readonly fields marked as `encrypted` are stored in the cache with encryption" "readonly fields marked as `encrypted` are stored in the cache with encryption"
settings.registry.register( settings.registry.register('AWX_ENCRYPTED', field_class=fields.CharField, category=_('System'), category_slug='system', read_only=True, encrypted=True)
'AWX_ENCRYPTED',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
read_only=True,
encrypted=True
)
def rot13(obj, attribute): def rot13(obj, attribute):
assert obj.pk is None assert obj.pk is None
return codecs.encode(getattr(obj, attribute), 'rot_13') return codecs.encode(getattr(obj, attribute), 'rot_13')
native_cache = LocMemCache(str(uuid4()), {}) native_cache = LocMemCache(str(uuid4()), {})
cache = EncryptedCacheProxy( cache = EncryptedCacheProxy(native_cache, settings.registry, encrypter=rot13, decrypter=rot13)
native_cache,
settings.registry,
encrypter=rot13,
decrypter=rot13
)
cache.set('AWX_ENCRYPTED', 'SECRET!') cache.set('AWX_ENCRYPTED', 'SECRET!')
assert cache.get('AWX_ENCRYPTED') == 'SECRET!' assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!' assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'

View File

@@ -3,14 +3,10 @@
from django.conf.urls import url from django.conf.urls import url
from awx.conf.views import ( from awx.conf.views import SettingCategoryList, SettingSingletonDetail, SettingLoggingTest
SettingCategoryList,
SettingSingletonDetail,
SettingLoggingTest,
)
urlpatterns = [ urlpatterns = [
url(r'^$', SettingCategoryList.as_view(), name='setting_category_list'), url(r'^$', SettingCategoryList.as_view(), name='setting_category_list'),
url(r'^(?P<category_slug>[a-z0-9-]+)/$', SettingSingletonDetail.as_view(), name='setting_singleton_detail'), url(r'^(?P<category_slug>[a-z0-9-]+)/$', SettingSingletonDetail.as_view(), name='setting_singleton_detail'),
url(r'^logging/test/$', SettingLoggingTest.as_view(), name='setting_logging_test'), url(r'^logging/test/$', SettingLoggingTest.as_view(), name='setting_logging_test'),

View File

@@ -7,7 +7,4 @@ __all__ = ['conf_to_dict']
def conf_to_dict(obj): def conf_to_dict(obj):
return { return {'category': settings_registry.get_setting_category(obj.key), 'name': obj.key}
'category': settings_registry.get_setting_category(obj.key),
'name': obj.key,
}

View File

@@ -22,12 +22,7 @@ from rest_framework import serializers
from rest_framework import status from rest_framework import status
# Tower # Tower
from awx.api.generics import ( from awx.api.generics import APIView, GenericAPIView, ListAPIView, RetrieveUpdateDestroyAPIView
APIView,
GenericAPIView,
ListAPIView,
RetrieveUpdateDestroyAPIView,
)
from awx.api.permissions import IsSuperUser from awx.api.permissions import IsSuperUser
from awx.api.versioning import reverse from awx.api.versioning import reverse
from awx.main.utils import camelcase_to_underscore from awx.main.utils import camelcase_to_underscore
@@ -81,9 +76,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
if self.category_slug not in category_slugs: if self.category_slug not in category_slugs:
raise PermissionDenied() raise PermissionDenied()
registered_settings = settings_registry.get_registered_settings( registered_settings = settings_registry.get_registered_settings(category_slug=self.category_slug, read_only=False)
category_slug=self.category_slug, read_only=False,
)
if self.category_slug == 'user': if self.category_slug == 'user':
return Setting.objects.filter(key__in=registered_settings, user=self.request.user) return Setting.objects.filter(key__in=registered_settings, user=self.request.user)
else: else:
@@ -91,9 +84,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
def get_object(self): def get_object(self):
settings_qs = self.get_queryset() settings_qs = self.get_queryset()
registered_settings = settings_registry.get_registered_settings( registered_settings = settings_registry.get_registered_settings(category_slug=self.category_slug)
category_slug=self.category_slug,
)
all_settings = {} all_settings = {}
for setting in settings_qs: for setting in settings_qs:
all_settings[setting.key] = setting.value all_settings[setting.key] = setting.value
@@ -117,9 +108,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
for key, value in serializer.validated_data.items(): for key, value in serializer.validated_data.items():
if key == 'LICENSE' or settings_registry.is_setting_read_only(key): if key == 'LICENSE' or settings_registry.is_setting_read_only(key):
continue continue
if settings_registry.is_setting_encrypted(key) and \ if settings_registry.is_setting_encrypted(key) and isinstance(value, str) and value.startswith('$encrypted$'):
isinstance(value, str) and \
value.startswith('$encrypted$'):
continue continue
setattr(serializer.instance, key, value) setattr(serializer.instance, key, value)
setting = settings_qs.filter(key=key).order_by('pk').first() setting = settings_qs.filter(key=key).order_by('pk').first()
@@ -133,7 +122,6 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
if settings_change_list: if settings_change_list:
connection.on_commit(lambda: handle_setting_changes.delay(settings_change_list)) connection.on_commit(lambda: handle_setting_changes.delay(settings_change_list))
def destroy(self, request, *args, **kwargs): def destroy(self, request, *args, **kwargs):
instance = self.get_object() instance = self.get_object()
self.perform_destroy(instance) self.perform_destroy(instance)
@@ -170,7 +158,7 @@ class SettingLoggingTest(GenericAPIView):
enabled = getattr(settings, 'LOG_AGGREGATOR_ENABLED', False) enabled = getattr(settings, 'LOG_AGGREGATOR_ENABLED', False)
if not enabled: if not enabled:
return Response({'error': 'Logging not enabled'}, status=status.HTTP_409_CONFLICT) return Response({'error': 'Logging not enabled'}, status=status.HTTP_409_CONFLICT)
# Send test message to configured logger based on db settings # Send test message to configured logger based on db settings
try: try:
default_logger = settings.LOG_AGGREGATOR_LOGGERS[0] default_logger = settings.LOG_AGGREGATOR_LOGGERS[0]
@@ -179,18 +167,15 @@ class SettingLoggingTest(GenericAPIView):
except IndexError: except IndexError:
default_logger = 'awx' default_logger = 'awx'
logging.getLogger(default_logger).error('AWX Connection Test Message') logging.getLogger(default_logger).error('AWX Connection Test Message')
hostname = getattr(settings, 'LOG_AGGREGATOR_HOST', None) hostname = getattr(settings, 'LOG_AGGREGATOR_HOST', None)
protocol = getattr(settings, 'LOG_AGGREGATOR_PROTOCOL', None) protocol = getattr(settings, 'LOG_AGGREGATOR_PROTOCOL', None)
try: try:
subprocess.check_output( subprocess.check_output(['rsyslogd', '-N1', '-f', '/var/lib/awx/rsyslog/rsyslog.conf'], stderr=subprocess.STDOUT)
['rsyslogd', '-N1', '-f', '/var/lib/awx/rsyslog/rsyslog.conf'],
stderr=subprocess.STDOUT
)
except subprocess.CalledProcessError as exc: except subprocess.CalledProcessError as exc:
return Response({'error': exc.output}, status=status.HTTP_400_BAD_REQUEST) return Response({'error': exc.output}, status=status.HTTP_400_BAD_REQUEST)
# Check to ensure port is open at host # Check to ensure port is open at host
if protocol in ['udp', 'tcp']: if protocol in ['udp', 'tcp']:
port = getattr(settings, 'LOG_AGGREGATOR_PORT', None) port = getattr(settings, 'LOG_AGGREGATOR_PORT', None)
@@ -206,7 +191,7 @@ class SettingLoggingTest(GenericAPIView):
else: else:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try: try:
s.settimeout(.5) s.settimeout(0.5)
s.connect((hostname, int(port))) s.connect((hostname, int(port)))
s.shutdown(SHUT_RDWR) s.shutdown(SHUT_RDWR)
s.close() s.close()

File diff suppressed because it is too large Load Diff

View File

@@ -24,7 +24,7 @@ logger = logging.getLogger('awx.analytics.broadcast_websocket')
def dt_to_seconds(dt): def dt_to_seconds(dt):
return int((dt - datetime.datetime(1970,1,1)).total_seconds()) return int((dt - datetime.datetime(1970, 1, 1)).total_seconds())
def now_seconds(): def now_seconds():
@@ -37,7 +37,7 @@ def safe_name(s):
# Second granularity; Per-minute # Second granularity; Per-minute
class FixedSlidingWindow(): class FixedSlidingWindow:
def __init__(self, start_time=None): def __init__(self, start_time=None):
self.buckets = dict() self.buckets = dict()
self.start_time = start_time or now_seconds() self.start_time = start_time or now_seconds()
@@ -65,7 +65,7 @@ class FixedSlidingWindow():
return sum(self.buckets.values()) or 0 return sum(self.buckets.values()) or 0
class BroadcastWebsocketStatsManager(): class BroadcastWebsocketStatsManager:
def __init__(self, event_loop, local_hostname): def __init__(self, event_loop, local_hostname):
self._local_hostname = local_hostname self._local_hostname = local_hostname
@@ -74,8 +74,7 @@ class BroadcastWebsocketStatsManager():
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
def new_remote_host_stats(self, remote_hostname): def new_remote_host_stats(self, remote_hostname):
self._stats[remote_hostname] = BroadcastWebsocketStats(self._local_hostname, self._stats[remote_hostname] = BroadcastWebsocketStats(self._local_hostname, remote_hostname)
remote_hostname)
return self._stats[remote_hostname] return self._stats[remote_hostname]
def delete_remote_host_stats(self, remote_hostname): def delete_remote_host_stats(self, remote_hostname):
@@ -100,15 +99,15 @@ class BroadcastWebsocketStatsManager():
@classmethod @classmethod
def get_stats_sync(cls): def get_stats_sync(cls):
''' """
Stringified verion of all the stats Stringified verion of all the stats
''' """
redis_conn = redis.Redis.from_url(settings.BROKER_URL) redis_conn = redis.Redis.from_url(settings.BROKER_URL)
stats_str = redis_conn.get(BROADCAST_WEBSOCKET_REDIS_KEY_NAME) or b'' stats_str = redis_conn.get(BROADCAST_WEBSOCKET_REDIS_KEY_NAME) or b''
return parser.text_string_to_metric_families(stats_str.decode('UTF-8')) return parser.text_string_to_metric_families(stats_str.decode('UTF-8'))
class BroadcastWebsocketStats(): class BroadcastWebsocketStats:
def __init__(self, local_hostname, remote_hostname): def __init__(self, local_hostname, remote_hostname):
self._local_hostname = local_hostname self._local_hostname = local_hostname
self._remote_hostname = remote_hostname self._remote_hostname = remote_hostname
@@ -118,24 +117,25 @@ class BroadcastWebsocketStats():
self.name = safe_name(self._local_hostname) self.name = safe_name(self._local_hostname)
self.remote_name = safe_name(self._remote_hostname) self.remote_name = safe_name(self._remote_hostname)
self._messages_received_total = Counter(f'awx_{self.remote_name}_messages_received_total', self._messages_received_total = Counter(
'Number of messages received, to be forwarded, by the broadcast websocket system', f'awx_{self.remote_name}_messages_received_total',
registry=self._registry) 'Number of messages received, to be forwarded, by the broadcast websocket system',
self._messages_received = Gauge(f'awx_{self.remote_name}_messages_received', registry=self._registry,
'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection', )
registry=self._registry) self._messages_received = Gauge(
self._connection = Enum(f'awx_{self.remote_name}_connection', f'awx_{self.remote_name}_messages_received',
'Websocket broadcast connection', 'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection',
states=['disconnected', 'connected'], registry=self._registry,
registry=self._registry) )
self._connection = Enum(
f'awx_{self.remote_name}_connection', 'Websocket broadcast connection', states=['disconnected', 'connected'], registry=self._registry
)
self._connection.state('disconnected') self._connection.state('disconnected')
self._connection_start = Gauge(f'awx_{self.remote_name}_connection_start', self._connection_start = Gauge(f'awx_{self.remote_name}_connection_start', 'Time the connection was established', registry=self._registry)
'Time the connection was established',
registry=self._registry)
self._messages_received_per_minute = Gauge(f'awx_{self.remote_name}_messages_received_per_minute', self._messages_received_per_minute = Gauge(
'Messages received per minute', f'awx_{self.remote_name}_messages_received_per_minute', 'Messages received per minute', registry=self._registry
registry=self._registry) )
self._internal_messages_received_per_minute = FixedSlidingWindow() self._internal_messages_received_per_minute = FixedSlidingWindow()
def unregister(self): def unregister(self):

View File

@@ -10,8 +10,7 @@ from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
from awx.conf.license import get_license from awx.conf.license import get_license
from awx.main.utils import (get_awx_version, get_ansible_version, from awx.main.utils import get_awx_version, get_ansible_version, get_custom_venv_choices, camelcase_to_underscore
get_custom_venv_choices, camelcase_to_underscore)
from awx.main import models from awx.main import models
from django.contrib.sessions.models import Session from django.contrib.sessions.models import Session
from awx.main.analytics import register from awx.main.analytics import register
@@ -68,96 +67,99 @@ def config(since, **kwargs):
@register('counts', '1.0', description=_('Counts of objects such as organizations, inventories, and projects')) @register('counts', '1.0', description=_('Counts of objects such as organizations, inventories, and projects'))
def counts(since, **kwargs): def counts(since, **kwargs):
counts = {} counts = {}
for cls in (models.Organization, models.Team, models.User, for cls in (
models.Inventory, models.Credential, models.Project, models.Organization,
models.JobTemplate, models.WorkflowJobTemplate, models.Team,
models.Host, models.Schedule, models.CustomInventoryScript, models.User,
models.NotificationTemplate): models.Inventory,
models.Credential,
models.Project,
models.JobTemplate,
models.WorkflowJobTemplate,
models.Host,
models.Schedule,
models.CustomInventoryScript,
models.NotificationTemplate,
):
counts[camelcase_to_underscore(cls.__name__)] = cls.objects.count() counts[camelcase_to_underscore(cls.__name__)] = cls.objects.count()
venvs = get_custom_venv_choices() venvs = get_custom_venv_choices()
counts['custom_virtualenvs'] = len([ counts['custom_virtualenvs'] = len([v for v in venvs if os.path.basename(v.rstrip('/')) != 'ansible'])
v for v in venvs
if os.path.basename(v.rstrip('/')) != 'ansible'
])
inv_counts = dict(models.Inventory.objects.order_by().values_list('kind').annotate(Count('kind'))) inv_counts = dict(models.Inventory.objects.order_by().values_list('kind').annotate(Count('kind')))
inv_counts['normal'] = inv_counts.get('', 0) inv_counts['normal'] = inv_counts.get('', 0)
inv_counts.pop('', None) inv_counts.pop('', None)
inv_counts['smart'] = inv_counts.get('smart', 0) inv_counts['smart'] = inv_counts.get('smart', 0)
counts['inventories'] = inv_counts counts['inventories'] = inv_counts
counts['unified_job'] = models.UnifiedJob.objects.exclude(launch_type='sync').count() # excludes implicit project_updates counts['unified_job'] = models.UnifiedJob.objects.exclude(launch_type='sync').count() # excludes implicit project_updates
counts['active_host_count'] = models.Host.objects.active_count() counts['active_host_count'] = models.Host.objects.active_count()
active_sessions = Session.objects.filter(expire_date__gte=now()).count() active_sessions = Session.objects.filter(expire_date__gte=now()).count()
active_user_sessions = models.UserSessionMembership.objects.select_related('session').filter(session__expire_date__gte=now()).count() active_user_sessions = models.UserSessionMembership.objects.select_related('session').filter(session__expire_date__gte=now()).count()
active_anonymous_sessions = active_sessions - active_user_sessions active_anonymous_sessions = active_sessions - active_user_sessions
counts['active_sessions'] = active_sessions counts['active_sessions'] = active_sessions
counts['active_user_sessions'] = active_user_sessions counts['active_user_sessions'] = active_user_sessions
counts['active_anonymous_sessions'] = active_anonymous_sessions counts['active_anonymous_sessions'] = active_anonymous_sessions
counts['running_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').filter(status__in=('running', 'waiting',)).count() counts['running_jobs'] = (
models.UnifiedJob.objects.exclude(launch_type='sync')
.filter(
status__in=(
'running',
'waiting',
)
)
.count()
)
counts['pending_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').filter(status__in=('pending',)).count() counts['pending_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').filter(status__in=('pending',)).count()
return counts return counts
@register('org_counts', '1.0', description=_('Counts of users and teams by organization')) @register('org_counts', '1.0', description=_('Counts of users and teams by organization'))
def org_counts(since, **kwargs): def org_counts(since, **kwargs):
counts = {} counts = {}
for org in models.Organization.objects.annotate(num_users=Count('member_role__members', distinct=True), for org in models.Organization.objects.annotate(num_users=Count('member_role__members', distinct=True), num_teams=Count('teams', distinct=True)).values(
num_teams=Count('teams', distinct=True)).values('name', 'id', 'num_users', 'num_teams'): 'name', 'id', 'num_users', 'num_teams'
counts[org['id']] = {'name': org['name'], ):
'users': org['num_users'], counts[org['id']] = {'name': org['name'], 'users': org['num_users'], 'teams': org['num_teams']}
'teams': org['num_teams']
}
return counts return counts
@register('cred_type_counts', '1.0', description=_('Counts of credentials by credential type')) @register('cred_type_counts', '1.0', description=_('Counts of credentials by credential type'))
def cred_type_counts(since, **kwargs): def cred_type_counts(since, **kwargs):
counts = {} counts = {}
for cred_type in models.CredentialType.objects.annotate(num_credentials=Count( for cred_type in models.CredentialType.objects.annotate(num_credentials=Count('credentials', distinct=True)).values(
'credentials', distinct=True)).values('name', 'id', 'managed_by_tower', 'num_credentials'): 'name', 'id', 'managed_by_tower', 'num_credentials'
counts[cred_type['id']] = {'name': cred_type['name'], ):
'credential_count': cred_type['num_credentials'], counts[cred_type['id']] = {
'managed_by_tower': cred_type['managed_by_tower'] 'name': cred_type['name'],
} 'credential_count': cred_type['num_credentials'],
'managed_by_tower': cred_type['managed_by_tower'],
}
return counts return counts
@register('inventory_counts', '1.2', description=_('Inventories, their inventory sources, and host counts')) @register('inventory_counts', '1.2', description=_('Inventories, their inventory sources, and host counts'))
def inventory_counts(since, **kwargs): def inventory_counts(since, **kwargs):
counts = {} counts = {}
for inv in models.Inventory.objects.filter(kind='').annotate(num_sources=Count('inventory_sources', distinct=True), for inv in (
num_hosts=Count('hosts', distinct=True)).only('id', 'name', 'kind'): models.Inventory.objects.filter(kind='')
.annotate(num_sources=Count('inventory_sources', distinct=True), num_hosts=Count('hosts', distinct=True))
.only('id', 'name', 'kind')
):
source_list = [] source_list = []
for source in inv.inventory_sources.filter().annotate(num_hosts=Count('hosts', distinct=True)).values('name','source', 'num_hosts'): for source in inv.inventory_sources.filter().annotate(num_hosts=Count('hosts', distinct=True)).values('name', 'source', 'num_hosts'):
source_list.append(source) source_list.append(source)
counts[inv.id] = {'name': inv.name, counts[inv.id] = {'name': inv.name, 'kind': inv.kind, 'hosts': inv.num_hosts, 'sources': inv.num_sources, 'source_list': source_list}
'kind': inv.kind,
'hosts': inv.num_hosts,
'sources': inv.num_sources,
'source_list': source_list
}
for smart_inv in models.Inventory.objects.filter(kind='smart'): for smart_inv in models.Inventory.objects.filter(kind='smart'):
counts[smart_inv.id] = {'name': smart_inv.name, counts[smart_inv.id] = {'name': smart_inv.name, 'kind': smart_inv.kind, 'hosts': smart_inv.hosts.count(), 'sources': 0, 'source_list': []}
'kind': smart_inv.kind,
'hosts': smart_inv.hosts.count(),
'sources': 0,
'source_list': []
}
return counts return counts
@register('projects_by_scm_type', '1.0', description=_('Counts of projects by source control type')) @register('projects_by_scm_type', '1.0', description=_('Counts of projects by source control type'))
def projects_by_scm_type(since, **kwargs): def projects_by_scm_type(since, **kwargs):
counts = dict( counts = dict((t[0] or 'manual', 0) for t in models.Project.SCM_TYPE_CHOICES)
(t[0] or 'manual', 0) for result in models.Project.objects.values('scm_type').annotate(count=Count('scm_type')).order_by('scm_type'):
for t in models.Project.SCM_TYPE_CHOICES
)
for result in models.Project.objects.values('scm_type').annotate(
count=Count('scm_type')
).order_by('scm_type'):
counts[result['scm_type'] or 'manual'] = result['count'] counts[result['scm_type'] or 'manual'] = result['count']
return counts return counts
@@ -172,10 +174,10 @@ def _get_isolated_datetime(last_check):
def instance_info(since, include_hostnames=False, **kwargs): def instance_info(since, include_hostnames=False, **kwargs):
info = {} info = {}
instances = models.Instance.objects.values_list('hostname').values( instances = models.Instance.objects.values_list('hostname').values(
'uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'hostname', 'last_isolated_check', 'enabled') 'uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'hostname', 'last_isolated_check', 'enabled'
)
for instance in instances: for instance in instances:
consumed_capacity = sum(x.task_impact for x in models.UnifiedJob.objects.filter(execution_node=instance['hostname'], consumed_capacity = sum(x.task_impact for x in models.UnifiedJob.objects.filter(execution_node=instance['hostname'], status__in=('running', 'waiting')))
status__in=('running', 'waiting')))
instance_info = { instance_info = {
'uuid': instance['uuid'], 'uuid': instance['uuid'],
'version': instance['version'], 'version': instance['version'],
@@ -186,7 +188,7 @@ def instance_info(since, include_hostnames=False, **kwargs):
'last_isolated_check': _get_isolated_datetime(instance['last_isolated_check']), 'last_isolated_check': _get_isolated_datetime(instance['last_isolated_check']),
'enabled': instance['enabled'], 'enabled': instance['enabled'],
'consumed_capacity': consumed_capacity, 'consumed_capacity': consumed_capacity,
'remaining_capacity': instance['capacity'] - consumed_capacity 'remaining_capacity': instance['capacity'] - consumed_capacity,
} }
if include_hostnames is True: if include_hostnames is True:
instance_info['hostname'] = instance['hostname'] instance_info['hostname'] = instance['hostname']
@@ -198,20 +200,22 @@ def job_counts(since, **kwargs):
counts = {} counts = {}
counts['total_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').count() counts['total_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').count()
counts['status'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list('status').annotate(Count('status')).order_by()) counts['status'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list('status').annotate(Count('status')).order_by())
counts['launch_type'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list( counts['launch_type'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list('launch_type').annotate(Count('launch_type')).order_by())
'launch_type').annotate(Count('launch_type')).order_by())
return counts return counts
def job_instance_counts(since, **kwargs): def job_instance_counts(since, **kwargs):
counts = {} counts = {}
job_types = models.UnifiedJob.objects.exclude(launch_type='sync').values_list( job_types = (
'execution_node', 'launch_type').annotate(job_launch_type=Count('launch_type')).order_by() models.UnifiedJob.objects.exclude(launch_type='sync')
.values_list('execution_node', 'launch_type')
.annotate(job_launch_type=Count('launch_type'))
.order_by()
)
for job in job_types: for job in job_types:
counts.setdefault(job[0], {}).setdefault('launch_type', {})[job[1]] = job[2] counts.setdefault(job[0], {}).setdefault('launch_type', {})[job[1]] = job[2]
job_statuses = models.UnifiedJob.objects.exclude(launch_type='sync').values_list( job_statuses = models.UnifiedJob.objects.exclude(launch_type='sync').values_list('execution_node', 'status').annotate(job_status=Count('status')).order_by()
'execution_node', 'status').annotate(job_status=Count('status')).order_by()
for job in job_statuses: for job in job_statuses:
counts.setdefault(job[0], {}).setdefault('status', {})[job[1]] = job[2] counts.setdefault(job[0], {}).setdefault('status', {})[job[1]] = job[2]
return counts return counts
@@ -261,12 +265,12 @@ class FileSplitter(io.StringIO):
self.files = self.files[:-1] self.files = self.files[:-1]
# If we only have one file, remove the suffix # If we only have one file, remove the suffix
if len(self.files) == 1: if len(self.files) == 1:
os.rename(self.files[0],self.files[0].replace('_split0','')) os.rename(self.files[0], self.files[0].replace('_split0', ''))
return self.files return self.files
def write(self, s): def write(self, s):
if not self.header: if not self.header:
self.header = s[0:s.index('\n')] self.header = s[0 : s.index('\n')]
self.counter += self.currentfile.write(s) self.counter += self.currentfile.write(s)
if self.counter >= MAX_TABLE_SIZE: if self.counter >= MAX_TABLE_SIZE:
self.cycle_file() self.cycle_file()
@@ -307,7 +311,9 @@ def events_table(since, full_path, until, **kwargs):
FROM main_jobevent FROM main_jobevent
WHERE (main_jobevent.created > '{}' AND main_jobevent.created <= '{}') WHERE (main_jobevent.created > '{}' AND main_jobevent.created <= '{}')
ORDER BY main_jobevent.id ASC) TO STDOUT WITH CSV HEADER ORDER BY main_jobevent.id ASC) TO STDOUT WITH CSV HEADER
'''.format(since.isoformat(),until.isoformat()) '''.format(
since.isoformat(), until.isoformat()
)
return _copy_table(table='events', query=events_query, path=full_path) return _copy_table(table='events', query=events_query, path=full_path)
@@ -346,7 +352,9 @@ def unified_jobs_table(since, full_path, until, **kwargs):
OR (main_unifiedjob.finished > '{0}' AND main_unifiedjob.finished <= '{1}')) OR (main_unifiedjob.finished > '{0}' AND main_unifiedjob.finished <= '{1}'))
AND main_unifiedjob.launch_type != 'sync' AND main_unifiedjob.launch_type != 'sync'
ORDER BY main_unifiedjob.id ASC) TO STDOUT WITH CSV HEADER ORDER BY main_unifiedjob.id ASC) TO STDOUT WITH CSV HEADER
'''.format(since.isoformat(),until.isoformat()) '''.format(
since.isoformat(), until.isoformat()
)
return _copy_table(table='unified_jobs', query=unified_job_query, path=full_path) return _copy_table(table='unified_jobs', query=unified_job_query, path=full_path)
@@ -369,7 +377,7 @@ def unified_job_template_table(since, full_path, **kwargs):
main_unifiedjobtemplate.status main_unifiedjobtemplate.status
FROM main_unifiedjobtemplate, django_content_type FROM main_unifiedjobtemplate, django_content_type
WHERE main_unifiedjobtemplate.polymorphic_ctype_id = django_content_type.id WHERE main_unifiedjobtemplate.polymorphic_ctype_id = django_content_type.id
ORDER BY main_unifiedjobtemplate.id ASC) TO STDOUT WITH CSV HEADER''' ORDER BY main_unifiedjobtemplate.id ASC) TO STDOUT WITH CSV HEADER'''
return _copy_table(table='unified_job_template', query=unified_job_template_query, path=full_path) return _copy_table(table='unified_job_template', query=unified_job_template_query, path=full_path)
@@ -405,7 +413,9 @@ def workflow_job_node_table(since, full_path, until, **kwargs):
) always_nodes ON main_workflowjobnode.id = always_nodes.from_workflowjobnode_id ) always_nodes ON main_workflowjobnode.id = always_nodes.from_workflowjobnode_id
WHERE (main_workflowjobnode.modified > '{}' AND main_workflowjobnode.modified <= '{}') WHERE (main_workflowjobnode.modified > '{}' AND main_workflowjobnode.modified <= '{}')
ORDER BY main_workflowjobnode.id ASC) TO STDOUT WITH CSV HEADER ORDER BY main_workflowjobnode.id ASC) TO STDOUT WITH CSV HEADER
'''.format(since.isoformat(),until.isoformat()) '''.format(
since.isoformat(), until.isoformat()
)
return _copy_table(table='workflow_job_node', query=workflow_job_node_query, path=full_path) return _copy_table(table='workflow_job_node', query=workflow_job_node_query, path=full_path)
@@ -437,5 +447,5 @@ def workflow_job_template_node_table(since, full_path, **kwargs):
FROM main_workflowjobtemplatenode_always_nodes FROM main_workflowjobtemplatenode_always_nodes
GROUP BY from_workflowjobtemplatenode_id GROUP BY from_workflowjobtemplatenode_id
) always_nodes ON main_workflowjobtemplatenode.id = always_nodes.from_workflowjobtemplatenode_id ) always_nodes ON main_workflowjobtemplatenode.id = always_nodes.from_workflowjobtemplatenode_id
ORDER BY main_workflowjobtemplatenode.id ASC) TO STDOUT WITH CSV HEADER''' ORDER BY main_workflowjobtemplatenode.id ASC) TO STDOUT WITH CSV HEADER'''
return _copy_table(table='workflow_job_template_node', query=workflow_job_template_node_query, path=full_path) return _copy_table(table='workflow_job_template_node', query=workflow_job_template_node_query, path=full_path)

View File

@@ -43,7 +43,7 @@ def all_collectors():
key = func.__awx_analytics_key__ key = func.__awx_analytics_key__
desc = func.__awx_analytics_description__ or '' desc = func.__awx_analytics_description__ or ''
version = func.__awx_analytics_version__ version = func.__awx_analytics_version__
collector_dict[key] = { 'name': key, 'version': version, 'description': desc} collector_dict[key] = {'name': key, 'version': version, 'description': desc}
return collector_dict return collector_dict
@@ -82,7 +82,7 @@ def register(key, version, description=None, format='json', expensive=False):
return decorate return decorate
def gather(dest=None, module=None, subset = None, since = None, until = now(), collection_type='scheduled'): def gather(dest=None, module=None, subset=None, since=None, until=now(), collection_type='scheduled'):
""" """
Gather all defined metrics and write them as JSON files in a .tgz Gather all defined metrics and write them as JSON files in a .tgz
@@ -90,6 +90,7 @@ def gather(dest=None, module=None, subset = None, since = None, until = now(), c
:param module: the module to search for registered analytic collector :param module: the module to search for registered analytic collector
functions; defaults to awx.main.analytics.collectors functions; defaults to awx.main.analytics.collectors
""" """
def _write_manifest(destdir, manifest): def _write_manifest(destdir, manifest):
path = os.path.join(destdir, 'manifest.json') path = os.path.join(destdir, 'manifest.json')
with open(path, 'w', encoding='utf-8') as f: with open(path, 'w', encoding='utf-8') as f:
@@ -116,13 +117,10 @@ def gather(dest=None, module=None, subset = None, since = None, until = now(), c
collector_module = module collector_module = module
else: else:
from awx.main.analytics import collectors from awx.main.analytics import collectors
collector_module = collectors collector_module = collectors
for name, func in inspect.getmembers(collector_module): for name, func in inspect.getmembers(collector_module):
if ( if inspect.isfunction(func) and hasattr(func, '__awx_analytics_key__') and (not subset or name in subset):
inspect.isfunction(func) and
hasattr(func, '__awx_analytics_key__') and
(not subset or name in subset)
):
collector_list.append((name, func)) collector_list.append((name, func))
manifest = dict() manifest = dict()
@@ -162,6 +160,7 @@ def gather(dest=None, module=None, subset = None, since = None, until = now(), c
# Always include config.json if we're using our collectors # Always include config.json if we're using our collectors
if 'config.json' not in manifest.keys() and not module: if 'config.json' not in manifest.keys() and not module:
from awx.main.analytics import collectors from awx.main.analytics import collectors
config = collectors.config config = collectors.config
path = '{}.json'.format(os.path.join(gather_dir, config.__awx_analytics_key__)) path = '{}.json'.format(os.path.join(gather_dir, config.__awx_analytics_key__))
with open(path, 'w', encoding='utf-8') as f: with open(path, 'w', encoding='utf-8') as f:
@@ -204,22 +203,14 @@ def gather(dest=None, module=None, subset = None, since = None, until = now(), c
for i in range(0, len(stage_dirs)): for i in range(0, len(stage_dirs)):
stage_dir = stage_dirs[i] stage_dir = stage_dirs[i]
# can't use isoformat() since it has colons, which GNU tar doesn't like # can't use isoformat() since it has colons, which GNU tar doesn't like
tarname = '_'.join([ tarname = '_'.join([settings.SYSTEM_UUID, until.strftime('%Y-%m-%d-%H%M%S%z'), str(i)])
settings.SYSTEM_UUID, tgz = shutil.make_archive(os.path.join(os.path.dirname(dest), tarname), 'gztar', stage_dir)
until.strftime('%Y-%m-%d-%H%M%S%z'),
str(i)
])
tgz = shutil.make_archive(
os.path.join(os.path.dirname(dest), tarname),
'gztar',
stage_dir
)
tarfiles.append(tgz) tarfiles.append(tgz)
except Exception: except Exception:
shutil.rmtree(stage_dir, ignore_errors = True) shutil.rmtree(stage_dir, ignore_errors=True)
logger.exception("Failed to write analytics archive file") logger.exception("Failed to write analytics archive file")
finally: finally:
shutil.rmtree(dest, ignore_errors = True) shutil.rmtree(dest, ignore_errors=True)
return tarfiles return tarfiles
@@ -253,16 +244,17 @@ def ship(path):
s.headers = get_awx_http_client_headers() s.headers = get_awx_http_client_headers()
s.headers.pop('Content-Type') s.headers.pop('Content-Type')
with set_environ(**settings.AWX_TASK_ENV): with set_environ(**settings.AWX_TASK_ENV):
response = s.post(url, response = s.post(
files=files, url,
verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", files=files,
auth=(rh_user, rh_password), verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
headers=s.headers, auth=(rh_user, rh_password),
timeout=(31, 31)) headers=s.headers,
timeout=(31, 31),
)
# Accept 2XX status_codes # Accept 2XX status_codes
if response.status_code >= 300: if response.status_code >= 300:
return logger.exception('Upload failed with status {}, {}'.format(response.status_code, return logger.exception('Upload failed with status {}, {}'.format(response.status_code, response.text))
response.text))
finally: finally:
# cleanup tar.gz # cleanup tar.gz
if os.path.exists(path): if os.path.exists(path):

View File

@@ -1,16 +1,8 @@
from django.conf import settings from django.conf import settings
from prometheus_client import ( from prometheus_client import REGISTRY, PROCESS_COLLECTOR, PLATFORM_COLLECTOR, GC_COLLECTOR, Gauge, Info, generate_latest
REGISTRY,
PROCESS_COLLECTOR,
PLATFORM_COLLECTOR,
GC_COLLECTOR,
Gauge,
Info,
generate_latest
)
from awx.conf.license import get_license from awx.conf.license import get_license
from awx.main.utils import (get_awx_version, get_ansible_version) from awx.main.utils import get_awx_version, get_ansible_version
from awx.main.analytics.collectors import ( from awx.main.analytics.collectors import (
counts, counts,
instance_info, instance_info,
@@ -31,23 +23,97 @@ INV_COUNT = Gauge('awx_inventories_total', 'Number of inventories')
PROJ_COUNT = Gauge('awx_projects_total', 'Number of projects') PROJ_COUNT = Gauge('awx_projects_total', 'Number of projects')
JT_COUNT = Gauge('awx_job_templates_total', 'Number of job templates') JT_COUNT = Gauge('awx_job_templates_total', 'Number of job templates')
WFJT_COUNT = Gauge('awx_workflow_job_templates_total', 'Number of workflow job templates') WFJT_COUNT = Gauge('awx_workflow_job_templates_total', 'Number of workflow job templates')
HOST_COUNT = Gauge('awx_hosts_total', 'Number of hosts', ['type',]) HOST_COUNT = Gauge(
'awx_hosts_total',
'Number of hosts',
[
'type',
],
)
SCHEDULE_COUNT = Gauge('awx_schedules_total', 'Number of schedules') SCHEDULE_COUNT = Gauge('awx_schedules_total', 'Number of schedules')
INV_SCRIPT_COUNT = Gauge('awx_inventory_scripts_total', 'Number of invetory scripts') INV_SCRIPT_COUNT = Gauge('awx_inventory_scripts_total', 'Number of invetory scripts')
USER_SESSIONS = Gauge('awx_sessions_total', 'Number of sessions', ['type',]) USER_SESSIONS = Gauge(
'awx_sessions_total',
'Number of sessions',
[
'type',
],
)
CUSTOM_VENVS = Gauge('awx_custom_virtualenvs_total', 'Number of virtualenvs') CUSTOM_VENVS = Gauge('awx_custom_virtualenvs_total', 'Number of virtualenvs')
RUNNING_JOBS = Gauge('awx_running_jobs_total', 'Number of running jobs on the Tower system') RUNNING_JOBS = Gauge('awx_running_jobs_total', 'Number of running jobs on the Tower system')
PENDING_JOBS = Gauge('awx_pending_jobs_total', 'Number of pending jobs on the Tower system') PENDING_JOBS = Gauge('awx_pending_jobs_total', 'Number of pending jobs on the Tower system')
STATUS = Gauge('awx_status_total', 'Status of Job launched', ['status',]) STATUS = Gauge(
'awx_status_total',
'Status of Job launched',
[
'status',
],
)
INSTANCE_CAPACITY = Gauge('awx_instance_capacity', 'Capacity of each node in a Tower system', ['hostname', 'instance_uuid',]) INSTANCE_CAPACITY = Gauge(
INSTANCE_CPU = Gauge('awx_instance_cpu', 'CPU cores on each node in a Tower system', ['hostname', 'instance_uuid',]) 'awx_instance_capacity',
INSTANCE_MEMORY = Gauge('awx_instance_memory', 'RAM (Kb) on each node in a Tower system', ['hostname', 'instance_uuid',]) 'Capacity of each node in a Tower system',
INSTANCE_INFO = Info('awx_instance', 'Info about each node in a Tower system', ['hostname', 'instance_uuid',]) [
INSTANCE_LAUNCH_TYPE = Gauge('awx_instance_launch_type_total', 'Type of Job launched', ['node', 'launch_type',]) 'hostname',
INSTANCE_STATUS = Gauge('awx_instance_status_total', 'Status of Job launched', ['node', 'status',]) 'instance_uuid',
INSTANCE_CONSUMED_CAPACITY = Gauge('awx_instance_consumed_capacity', 'Consumed capacity of each node in a Tower system', ['hostname', 'instance_uuid',]) ],
INSTANCE_REMAINING_CAPACITY = Gauge('awx_instance_remaining_capacity', 'Remaining capacity of each node in a Tower system', ['hostname', 'instance_uuid',]) )
INSTANCE_CPU = Gauge(
'awx_instance_cpu',
'CPU cores on each node in a Tower system',
[
'hostname',
'instance_uuid',
],
)
INSTANCE_MEMORY = Gauge(
'awx_instance_memory',
'RAM (Kb) on each node in a Tower system',
[
'hostname',
'instance_uuid',
],
)
INSTANCE_INFO = Info(
'awx_instance',
'Info about each node in a Tower system',
[
'hostname',
'instance_uuid',
],
)
INSTANCE_LAUNCH_TYPE = Gauge(
'awx_instance_launch_type_total',
'Type of Job launched',
[
'node',
'launch_type',
],
)
INSTANCE_STATUS = Gauge(
'awx_instance_status_total',
'Status of Job launched',
[
'node',
'status',
],
)
INSTANCE_CONSUMED_CAPACITY = Gauge(
'awx_instance_consumed_capacity',
'Consumed capacity of each node in a Tower system',
[
'hostname',
'instance_uuid',
],
)
INSTANCE_REMAINING_CAPACITY = Gauge(
'awx_instance_remaining_capacity',
'Remaining capacity of each node in a Tower system',
[
'hostname',
'instance_uuid',
],
)
LICENSE_INSTANCE_TOTAL = Gauge('awx_license_instance_total', 'Total number of managed hosts provided by your license') LICENSE_INSTANCE_TOTAL = Gauge('awx_license_instance_total', 'Total number of managed hosts provided by your license')
LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining managed hosts provided by your license') LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining managed hosts provided by your license')
@@ -55,18 +121,20 @@ LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining
def metrics(): def metrics():
license_info = get_license() license_info = get_license()
SYSTEM_INFO.info({ SYSTEM_INFO.info(
'install_uuid': settings.INSTALL_UUID, {
'insights_analytics': str(settings.INSIGHTS_TRACKING_STATE), 'install_uuid': settings.INSTALL_UUID,
'tower_url_base': settings.TOWER_URL_BASE, 'insights_analytics': str(settings.INSIGHTS_TRACKING_STATE),
'tower_version': get_awx_version(), 'tower_url_base': settings.TOWER_URL_BASE,
'ansible_version': get_ansible_version(), 'tower_version': get_awx_version(),
'license_type': license_info.get('license_type', 'UNLICENSED'), 'ansible_version': get_ansible_version(),
'license_expiry': str(license_info.get('time_remaining', 0)), 'license_type': license_info.get('license_type', 'UNLICENSED'),
'pendo_tracking': settings.PENDO_TRACKING_STATE, 'license_expiry': str(license_info.get('time_remaining', 0)),
'external_logger_enabled': str(settings.LOG_AGGREGATOR_ENABLED), 'pendo_tracking': settings.PENDO_TRACKING_STATE,
'external_logger_type': getattr(settings, 'LOG_AGGREGATOR_TYPE', 'None') 'external_logger_enabled': str(settings.LOG_AGGREGATOR_ENABLED),
}) 'external_logger_type': getattr(settings, 'LOG_AGGREGATOR_TYPE', 'None'),
}
)
LICENSE_INSTANCE_TOTAL.set(str(license_info.get('instance_count', 0))) LICENSE_INSTANCE_TOTAL.set(str(license_info.get('instance_count', 0)))
LICENSE_INSTANCE_FREE.set(str(license_info.get('free_instances', 0))) LICENSE_INSTANCE_FREE.set(str(license_info.get('free_instances', 0)))
@@ -108,16 +176,18 @@ def metrics():
INSTANCE_MEMORY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['memory']) INSTANCE_MEMORY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['memory'])
INSTANCE_CONSUMED_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['consumed_capacity']) INSTANCE_CONSUMED_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['consumed_capacity'])
INSTANCE_REMAINING_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['remaining_capacity']) INSTANCE_REMAINING_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['remaining_capacity'])
INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid).info({ INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid).info(
'enabled': str(instance_data[uuid]['enabled']), {
'last_isolated_check': getattr(instance_data[uuid], 'last_isolated_check', 'None'), 'enabled': str(instance_data[uuid]['enabled']),
'managed_by_policy': str(instance_data[uuid]['managed_by_policy']), 'last_isolated_check': getattr(instance_data[uuid], 'last_isolated_check', 'None'),
'version': instance_data[uuid]['version'] 'managed_by_policy': str(instance_data[uuid]['managed_by_policy']),
}) 'version': instance_data[uuid]['version'],
}
)
instance_data = job_instance_counts(None) instance_data = job_instance_counts(None)
for node in instance_data: for node in instance_data:
# skipping internal execution node (for system jobs) # skipping internal execution node (for system jobs)
if node == '': if node == '':
continue continue
types = instance_data[node].get('launch_type', {}) types = instance_data[node].get('launch_type', {})
@@ -127,7 +197,6 @@ def metrics():
for status, value in statuses.items(): for status, value in statuses.items():
INSTANCE_STATUS.labels(node=node, status=status).set(value) INSTANCE_STATUS.labels(node=node, status=status).set(value)
return generate_latest() return generate_latest()

View File

@@ -37,8 +37,7 @@ register(
'ORG_ADMINS_CAN_SEE_ALL_USERS', 'ORG_ADMINS_CAN_SEE_ALL_USERS',
field_class=fields.BooleanField, field_class=fields.BooleanField,
label=_('All Users Visible to Organization Admins'), label=_('All Users Visible to Organization Admins'),
help_text=_('Controls whether any Organization Admin can view all users and teams, ' help_text=_('Controls whether any Organization Admin can view all users and teams, ' 'even those not associated with their Organization.'),
'even those not associated with their Organization.'),
category=_('System'), category=_('System'),
category_slug='system', category_slug='system',
) )
@@ -47,8 +46,10 @@ register(
'MANAGE_ORGANIZATION_AUTH', 'MANAGE_ORGANIZATION_AUTH',
field_class=fields.BooleanField, field_class=fields.BooleanField,
label=_('Organization Admins Can Manage Users and Teams'), label=_('Organization Admins Can Manage Users and Teams'),
help_text=_('Controls whether any Organization Admin has the privileges to create and manage users and teams. ' help_text=_(
'You may want to disable this ability if you are using an LDAP or SAML integration.'), 'Controls whether any Organization Admin has the privileges to create and manage users and teams. '
'You may want to disable this ability if you are using an LDAP or SAML integration.'
),
category=_('System'), category=_('System'),
category_slug='system', category_slug='system',
) )
@@ -59,8 +60,7 @@ register(
schemes=('http', 'https'), schemes=('http', 'https'),
allow_plain_hostname=True, # Allow hostname only without TLD. allow_plain_hostname=True, # Allow hostname only without TLD.
label=_('Base URL of the Tower host'), label=_('Base URL of the Tower host'),
help_text=_('This setting is used by services like notifications to render ' help_text=_('This setting is used by services like notifications to render ' 'a valid url to the Tower host.'),
'a valid url to the Tower host.'),
category=_('System'), category=_('System'),
category_slug='system', category_slug='system',
) )
@@ -69,11 +69,13 @@ register(
'REMOTE_HOST_HEADERS', 'REMOTE_HOST_HEADERS',
field_class=fields.StringListField, field_class=fields.StringListField,
label=_('Remote Host Headers'), label=_('Remote Host Headers'),
help_text=_('HTTP headers and meta keys to search to determine remote host ' help_text=_(
'name or IP. Add additional items to this list, such as ' 'HTTP headers and meta keys to search to determine remote host '
'"HTTP_X_FORWARDED_FOR", if behind a reverse proxy. ' 'name or IP. Add additional items to this list, such as '
'See the "Proxy Support" section of the Adminstrator guide for ' '"HTTP_X_FORWARDED_FOR", if behind a reverse proxy. '
'more details.'), 'See the "Proxy Support" section of the Adminstrator guide for '
'more details.'
),
category=_('System'), category=_('System'),
category_slug='system', category_slug='system',
) )
@@ -82,11 +84,13 @@ register(
'PROXY_IP_ALLOWED_LIST', 'PROXY_IP_ALLOWED_LIST',
field_class=fields.StringListField, field_class=fields.StringListField,
label=_('Proxy IP Allowed List'), label=_('Proxy IP Allowed List'),
help_text=_("If Tower is behind a reverse proxy/load balancer, use this setting " help_text=_(
"to configure the proxy IP addresses from which Tower should trust " "If Tower is behind a reverse proxy/load balancer, use this setting "
"custom REMOTE_HOST_HEADERS header values. " "to configure the proxy IP addresses from which Tower should trust "
"If this setting is an empty list (the default), the headers specified by " "custom REMOTE_HOST_HEADERS header values. "
"REMOTE_HOST_HEADERS will be trusted unconditionally')"), "If this setting is an empty list (the default), the headers specified by "
"REMOTE_HOST_HEADERS will be trusted unconditionally')"
),
category=_('System'), category=_('System'),
category_slug='system', category_slug='system',
) )
@@ -97,9 +101,7 @@ register(
field_class=fields.DictField, field_class=fields.DictField,
default=lambda: {}, default=lambda: {},
label=_('License'), label=_('License'),
help_text=_('The license controls which features and functionality are ' help_text=_('The license controls which features and functionality are ' 'enabled. Use /api/v2/config/ to update or change ' 'the license.'),
'enabled. Use /api/v2/config/ to update or change '
'the license.'),
category=_('System'), category=_('System'),
category_slug='system', category_slug='system',
) )
@@ -193,8 +195,7 @@ register(
'CUSTOM_VENV_PATHS', 'CUSTOM_VENV_PATHS',
field_class=fields.StringListPathField, field_class=fields.StringListPathField,
label=_('Custom virtual environment paths'), label=_('Custom virtual environment paths'),
help_text=_('Paths where Tower will look for custom virtual environments ' help_text=_('Paths where Tower will look for custom virtual environments ' '(in addition to /var/lib/awx/venv/). Enter one path per line.'),
'(in addition to /var/lib/awx/venv/). Enter one path per line.'),
category=_('System'), category=_('System'),
category_slug='system', category_slug='system',
default=[], default=[],
@@ -244,9 +245,11 @@ register(
'AWX_PROOT_BASE_PATH', 'AWX_PROOT_BASE_PATH',
field_class=fields.CharField, field_class=fields.CharField,
label=_('Job execution path'), label=_('Job execution path'),
help_text=_('The directory in which Tower will create new temporary ' help_text=_(
'directories for job execution and isolation ' 'The directory in which Tower will create new temporary '
'(such as credential files and custom inventory scripts).'), 'directories for job execution and isolation '
'(such as credential files and custom inventory scripts).'
),
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs', category_slug='jobs',
) )
@@ -287,8 +290,10 @@ register(
field_class=fields.IntegerField, field_class=fields.IntegerField,
min_value=0, min_value=0,
label=_('Isolated launch timeout'), label=_('Isolated launch timeout'),
help_text=_('The timeout (in seconds) for launching jobs on isolated instances. ' help_text=_(
'This includes the time needed to copy source control files (playbooks) to the isolated instance.'), 'The timeout (in seconds) for launching jobs on isolated instances. '
'This includes the time needed to copy source control files (playbooks) to the isolated instance.'
),
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs', category_slug='jobs',
unit=_('seconds'), unit=_('seconds'),
@@ -300,8 +305,10 @@ register(
min_value=0, min_value=0,
default=10, default=10,
label=_('Isolated connection timeout'), label=_('Isolated connection timeout'),
help_text=_('Ansible SSH connection timeout (in seconds) to use when communicating with isolated instances. ' help_text=_(
'Value should be substantially greater than expected network latency.'), 'Ansible SSH connection timeout (in seconds) to use when communicating with isolated instances. '
'Value should be substantially greater than expected network latency.'
),
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs', category_slug='jobs',
unit=_('seconds'), unit=_('seconds'),
@@ -314,7 +321,7 @@ register(
help_text=_('When set to True, AWX will enforce strict host key checking for communication with isolated nodes.'), help_text=_('When set to True, AWX will enforce strict host key checking for communication with isolated nodes.'),
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs', category_slug='jobs',
default=False default=False,
) )
register( register(
@@ -322,9 +329,11 @@ register(
field_class=fields.BooleanField, field_class=fields.BooleanField,
default=True, default=True,
label=_('Generate RSA keys for isolated instances'), label=_('Generate RSA keys for isolated instances'),
help_text=_('If set, a random RSA key will be generated and distributed to ' help_text=_(
'isolated instances. To disable this behavior and manage authentication ' 'If set, a random RSA key will be generated and distributed to '
'for isolated instances outside of Tower, disable this setting.'), # noqa 'isolated instances. To disable this behavior and manage authentication '
'for isolated instances outside of Tower, disable this setting.'
), # noqa
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs', category_slug='jobs',
) )
@@ -359,8 +368,7 @@ register(
field_class=fields.BooleanField, field_class=fields.BooleanField,
default=False, default=False,
label=_('Enable detailed resource profiling on all playbook runs'), label=_('Enable detailed resource profiling on all playbook runs'),
help_text=_('If set, detailed resource profiling data will be collected on all jobs. ' help_text=_('If set, detailed resource profiling data will be collected on all jobs. ' 'This data can be gathered with `sosreport`.'), # noqa
'This data can be gathered with `sosreport`.'), # noqa
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs', category_slug='jobs',
) )
@@ -370,8 +378,7 @@ register(
field_class=FloatField, field_class=FloatField,
default='0.25', default='0.25',
label=_('Interval (in seconds) between polls for cpu usage.'), label=_('Interval (in seconds) between polls for cpu usage.'),
help_text=_('Interval (in seconds) between polls for cpu usage. ' help_text=_('Interval (in seconds) between polls for cpu usage. ' 'Setting this lower than the default will affect playbook performance.'),
'Setting this lower than the default will affect playbook performance.'),
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs', category_slug='jobs',
required=False, required=False,
@@ -382,8 +389,7 @@ register(
field_class=FloatField, field_class=FloatField,
default='0.25', default='0.25',
label=_('Interval (in seconds) between polls for memory usage.'), label=_('Interval (in seconds) between polls for memory usage.'),
help_text=_('Interval (in seconds) between polls for memory usage. ' help_text=_('Interval (in seconds) between polls for memory usage. ' 'Setting this lower than the default will affect playbook performance.'),
'Setting this lower than the default will affect playbook performance.'),
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs', category_slug='jobs',
required=False, required=False,
@@ -394,8 +400,7 @@ register(
field_class=FloatField, field_class=FloatField,
default='0.25', default='0.25',
label=_('Interval (in seconds) between polls for PID count.'), label=_('Interval (in seconds) between polls for PID count.'),
help_text=_('Interval (in seconds) between polls for PID count. ' help_text=_('Interval (in seconds) between polls for PID count. ' 'Setting this lower than the default will affect playbook performance.'),
'Setting this lower than the default will affect playbook performance.'),
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs', category_slug='jobs',
required=False, required=False,
@@ -469,10 +474,9 @@ register(
field_class=fields.BooleanField, field_class=fields.BooleanField,
default=False, default=False,
label=_('Ignore Ansible Galaxy SSL Certificate Verification'), label=_('Ignore Ansible Galaxy SSL Certificate Verification'),
help_text=_('If set to true, certificate validation will not be done when ' help_text=_('If set to true, certificate validation will not be done when ' 'installing content from any Galaxy server.'),
'installing content from any Galaxy server.'),
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs' category_slug='jobs',
) )
register( register(
@@ -491,7 +495,8 @@ register(
min_value=0, min_value=0,
label=_('Job Event Standard Output Maximum Display Size'), label=_('Job Event Standard Output Maximum Display Size'),
help_text=_( help_text=_(
u'Maximum Size of Standard Output in bytes to display for a single job or ad hoc command event. `stdout` will end with `\u2026` when truncated.'), u'Maximum Size of Standard Output in bytes to display for a single job or ad hoc command event. `stdout` will end with `\u2026` when truncated.'
),
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs', category_slug='jobs',
) )
@@ -522,8 +527,10 @@ register(
min_value=0, min_value=0,
default=0, default=0,
label=_('Default Job Timeout'), label=_('Default Job Timeout'),
help_text=_('Maximum time in seconds to allow jobs to run. Use value of 0 to indicate that no ' help_text=_(
'timeout should be imposed. A timeout set on an individual job template will override this.'), 'Maximum time in seconds to allow jobs to run. Use value of 0 to indicate that no '
'timeout should be imposed. A timeout set on an individual job template will override this.'
),
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs', category_slug='jobs',
unit=_('seconds'), unit=_('seconds'),
@@ -535,8 +542,10 @@ register(
min_value=0, min_value=0,
default=0, default=0,
label=_('Default Inventory Update Timeout'), label=_('Default Inventory Update Timeout'),
help_text=_('Maximum time in seconds to allow inventory updates to run. Use value of 0 to indicate that no ' help_text=_(
'timeout should be imposed. A timeout set on an individual inventory source will override this.'), 'Maximum time in seconds to allow inventory updates to run. Use value of 0 to indicate that no '
'timeout should be imposed. A timeout set on an individual inventory source will override this.'
),
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs', category_slug='jobs',
unit=_('seconds'), unit=_('seconds'),
@@ -548,8 +557,10 @@ register(
min_value=0, min_value=0,
default=0, default=0,
label=_('Default Project Update Timeout'), label=_('Default Project Update Timeout'),
help_text=_('Maximum time in seconds to allow project updates to run. Use value of 0 to indicate that no ' help_text=_(
'timeout should be imposed. A timeout set on an individual project will override this.'), 'Maximum time in seconds to allow project updates to run. Use value of 0 to indicate that no '
'timeout should be imposed. A timeout set on an individual project will override this.'
),
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs', category_slug='jobs',
unit=_('seconds'), unit=_('seconds'),
@@ -561,10 +572,12 @@ register(
min_value=0, min_value=0,
default=0, default=0,
label=_('Per-Host Ansible Fact Cache Timeout'), label=_('Per-Host Ansible Fact Cache Timeout'),
help_text=_('Maximum time, in seconds, that stored Ansible facts are considered valid since ' help_text=_(
'the last time they were modified. Only valid, non-stale, facts will be accessible by ' 'Maximum time, in seconds, that stored Ansible facts are considered valid since '
'a playbook. Note, this does not influence the deletion of ansible_facts from the database. ' 'the last time they were modified. Only valid, non-stale, facts will be accessible by '
'Use a value of 0 to indicate that no timeout should be imposed.'), 'a playbook. Note, this does not influence the deletion of ansible_facts from the database. '
'Use a value of 0 to indicate that no timeout should be imposed.'
),
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs', category_slug='jobs',
unit=_('seconds'), unit=_('seconds'),
@@ -576,8 +589,7 @@ register(
allow_null=False, allow_null=False,
default=200, default=200,
label=_('Maximum number of forks per job'), label=_('Maximum number of forks per job'),
help_text=_('Saving a Job Template with more than this number of forks will result in an error. ' help_text=_('Saving a Job Template with more than this number of forks will result in an error. ' 'When set to 0, no limit is applied.'),
'When set to 0, no limit is applied.'),
category=_('Jobs'), category=_('Jobs'),
category_slug='jobs', category_slug='jobs',
) )
@@ -598,11 +610,10 @@ register(
allow_null=True, allow_null=True,
default=None, default=None,
label=_('Logging Aggregator Port'), label=_('Logging Aggregator Port'),
help_text=_('Port on Logging Aggregator to send logs to (if required and not' help_text=_('Port on Logging Aggregator to send logs to (if required and not' ' provided in Logging Aggregator).'),
' provided in Logging Aggregator).'),
category=_('Logging'), category=_('Logging'),
category_slug='logging', category_slug='logging',
required=False required=False,
) )
register( register(
'LOG_AGGREGATOR_TYPE', 'LOG_AGGREGATOR_TYPE',
@@ -643,12 +654,14 @@ register(
field_class=fields.StringListField, field_class=fields.StringListField,
default=['awx', 'activity_stream', 'job_events', 'system_tracking'], default=['awx', 'activity_stream', 'job_events', 'system_tracking'],
label=_('Loggers Sending Data to Log Aggregator Form'), label=_('Loggers Sending Data to Log Aggregator Form'),
help_text=_('List of loggers that will send HTTP logs to the collector, these can ' help_text=_(
'include any or all of: \n' 'List of loggers that will send HTTP logs to the collector, these can '
'awx - service logs\n' 'include any or all of: \n'
'activity_stream - activity stream records\n' 'awx - service logs\n'
'job_events - callback data from Ansible job events\n' 'activity_stream - activity stream records\n'
'system_tracking - facts gathered from scan jobs.'), 'job_events - callback data from Ansible job events\n'
'system_tracking - facts gathered from scan jobs.'
),
category=_('Logging'), category=_('Logging'),
category_slug='logging', category_slug='logging',
) )
@@ -657,10 +670,12 @@ register(
field_class=fields.BooleanField, field_class=fields.BooleanField,
default=False, default=False,
label=_('Log System Tracking Facts Individually'), label=_('Log System Tracking Facts Individually'),
help_text=_('If set, system tracking facts will be sent for each package, service, or ' help_text=_(
'other item found in a scan, allowing for greater search query granularity. ' 'If set, system tracking facts will be sent for each package, service, or '
'If unset, facts will be sent as a single dictionary, allowing for greater ' 'other item found in a scan, allowing for greater search query granularity. '
'efficiency in fact processing.'), 'If unset, facts will be sent as a single dictionary, allowing for greater '
'efficiency in fact processing.'
),
category=_('Logging'), category=_('Logging'),
category_slug='logging', category_slug='logging',
) )
@@ -689,9 +704,11 @@ register(
choices=[('https', 'HTTPS/HTTP'), ('tcp', 'TCP'), ('udp', 'UDP')], choices=[('https', 'HTTPS/HTTP'), ('tcp', 'TCP'), ('udp', 'UDP')],
default='https', default='https',
label=_('Logging Aggregator Protocol'), label=_('Logging Aggregator Protocol'),
help_text=_('Protocol used to communicate with log aggregator. ' help_text=_(
'HTTPS/HTTP assumes HTTPS unless http:// is explicitly used in ' 'Protocol used to communicate with log aggregator. '
'the Logging Aggregator hostname.'), 'HTTPS/HTTP assumes HTTPS unless http:// is explicitly used in '
'the Logging Aggregator hostname.'
),
category=_('Logging'), category=_('Logging'),
category_slug='logging', category_slug='logging',
) )
@@ -700,9 +717,7 @@ register(
field_class=fields.IntegerField, field_class=fields.IntegerField,
default=5, default=5,
label=_('TCP Connection Timeout'), label=_('TCP Connection Timeout'),
help_text=_('Number of seconds for a TCP connection to external log ' help_text=_('Number of seconds for a TCP connection to external log ' 'aggregator to timeout. Applies to HTTPS and TCP log ' 'aggregator protocols.'),
'aggregator to timeout. Applies to HTTPS and TCP log '
'aggregator protocols.'),
category=_('Logging'), category=_('Logging'),
category_slug='logging', category_slug='logging',
unit=_('seconds'), unit=_('seconds'),
@@ -712,10 +727,12 @@ register(
field_class=fields.BooleanField, field_class=fields.BooleanField,
default=True, default=True,
label=_('Enable/disable HTTPS certificate verification'), label=_('Enable/disable HTTPS certificate verification'),
help_text=_('Flag to control enable/disable of certificate verification' help_text=_(
' when LOG_AGGREGATOR_PROTOCOL is "https". If enabled, Tower\'s' 'Flag to control enable/disable of certificate verification'
' log handler will verify certificate sent by external log aggregator' ' when LOG_AGGREGATOR_PROTOCOL is "https". If enabled, Tower\'s'
' before establishing connection.'), ' log handler will verify certificate sent by external log aggregator'
' before establishing connection.'
),
category=_('Logging'), category=_('Logging'),
category_slug='logging', category_slug='logging',
) )
@@ -725,10 +742,12 @@ register(
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
default='WARNING', default='WARNING',
label=_('Logging Aggregator Level Threshold'), label=_('Logging Aggregator Level Threshold'),
help_text=_('Level threshold used by log handler. Severities from lowest to highest' help_text=_(
' are DEBUG, INFO, WARNING, ERROR, CRITICAL. Messages less severe ' 'Level threshold used by log handler. Severities from lowest to highest'
'than the threshold will be ignored by log handler. (messages under category ' ' are DEBUG, INFO, WARNING, ERROR, CRITICAL. Messages less severe '
'awx.anlytics ignore this setting)'), 'than the threshold will be ignored by log handler. (messages under category '
'awx.anlytics ignore this setting)'
),
category=_('Logging'), category=_('Logging'),
category_slug='logging', category_slug='logging',
) )
@@ -738,9 +757,11 @@ register(
default=1, default=1,
min_value=1, min_value=1,
label=_('Maximum disk persistance for external log aggregation (in GB)'), label=_('Maximum disk persistance for external log aggregation (in GB)'),
help_text=_('Amount of data to store (in gigabytes) during an outage of ' help_text=_(
'the external log aggregator (defaults to 1). ' 'Amount of data to store (in gigabytes) during an outage of '
'Equivalent to the rsyslogd queue.maxdiskspace setting.'), 'the external log aggregator (defaults to 1). '
'Equivalent to the rsyslogd queue.maxdiskspace setting.'
),
category=_('Logging'), category=_('Logging'),
category_slug='logging', category_slug='logging',
) )
@@ -749,9 +770,11 @@ register(
field_class=fields.CharField, field_class=fields.CharField,
default='/var/lib/awx', default='/var/lib/awx',
label=_('File system location for rsyslogd disk persistence'), label=_('File system location for rsyslogd disk persistence'),
help_text=_('Location to persist logs that should be retried after an outage ' help_text=_(
'of the external log aggregator (defaults to /var/lib/awx). ' 'Location to persist logs that should be retried after an outage '
'Equivalent to the rsyslogd queue.spoolDirectory setting.'), 'of the external log aggregator (defaults to /var/lib/awx). '
'Equivalent to the rsyslogd queue.spoolDirectory setting.'
),
category=_('Logging'), category=_('Logging'),
category_slug='logging', category_slug='logging',
) )
@@ -760,21 +783,19 @@ register(
field_class=fields.BooleanField, field_class=fields.BooleanField,
default=False, default=False,
label=_('Enable rsyslogd debugging'), label=_('Enable rsyslogd debugging'),
help_text=_('Enabled high verbosity debugging for rsyslogd. ' help_text=_('Enabled high verbosity debugging for rsyslogd. ' 'Useful for debugging connection issues for external log aggregation.'),
'Useful for debugging connection issues for external log aggregation.'),
category=_('Logging'), category=_('Logging'),
category_slug='logging', category_slug='logging',
) )
register( register(
'AUTOMATION_ANALYTICS_LAST_GATHER', 'AUTOMATION_ANALYTICS_LAST_GATHER',
field_class=fields.DateTimeField, field_class=fields.DateTimeField,
label=_('Last gather date for Automation Analytics.'), label=_('Last gather date for Automation Analytics.'),
allow_null=True, allow_null=True,
category=_('System'), category=_('System'),
category_slug='system' category_slug='system',
) )
@@ -783,8 +804,8 @@ register(
field_class=fields.IntegerField, field_class=fields.IntegerField,
label=_('Automation Analytics Gather Interval'), label=_('Automation Analytics Gather Interval'),
help_text=_('Interval (in seconds) between data gathering.'), help_text=_('Interval (in seconds) between data gathering.'),
default=14400, # every 4 hours default=14400, # every 4 hours
min_value=1800, # every 30 minutes min_value=1800, # every 30 minutes
category=_('System'), category=_('System'),
category_slug='system', category_slug='system',
unit=_('seconds'), unit=_('seconds'),
@@ -792,17 +813,23 @@ register(
def logging_validate(serializer, attrs): def logging_validate(serializer, attrs):
if not serializer.instance or \ if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or \
not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
return attrs return attrs
errors = [] errors = []
if attrs.get('LOG_AGGREGATOR_ENABLED', False): if attrs.get('LOG_AGGREGATOR_ENABLED', False):
if not serializer.instance.LOG_AGGREGATOR_HOST and not attrs.get('LOG_AGGREGATOR_HOST', None) or\ if (
serializer.instance.LOG_AGGREGATOR_HOST and not attrs.get('LOG_AGGREGATOR_HOST', True): not serializer.instance.LOG_AGGREGATOR_HOST
and not attrs.get('LOG_AGGREGATOR_HOST', None)
or serializer.instance.LOG_AGGREGATOR_HOST
and not attrs.get('LOG_AGGREGATOR_HOST', True)
):
errors.append('Cannot enable log aggregator without providing host.') errors.append('Cannot enable log aggregator without providing host.')
if not serializer.instance.LOG_AGGREGATOR_TYPE and not attrs.get('LOG_AGGREGATOR_TYPE', None) or\ if (
serializer.instance.LOG_AGGREGATOR_TYPE and not attrs.get('LOG_AGGREGATOR_TYPE', True): not serializer.instance.LOG_AGGREGATOR_TYPE
and not attrs.get('LOG_AGGREGATOR_TYPE', None)
or serializer.instance.LOG_AGGREGATOR_TYPE
and not attrs.get('LOG_AGGREGATOR_TYPE', True)
):
errors.append('Cannot enable log aggregator without providing type.') errors.append('Cannot enable log aggregator without providing type.')
if errors: if errors:
raise serializers.ValidationError(_('\n'.join(errors))) raise serializers.ValidationError(_('\n'.join(errors)))

View File

@@ -6,17 +6,33 @@ import re
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
__all__ = [ __all__ = [
'CLOUD_PROVIDERS', 'SCHEDULEABLE_PROVIDERS', 'PRIVILEGE_ESCALATION_METHODS', 'CLOUD_PROVIDERS',
'ANSI_SGR_PATTERN', 'CAN_CANCEL', 'ACTIVE_STATES', 'STANDARD_INVENTORY_UPDATE_ENV' 'SCHEDULEABLE_PROVIDERS',
'PRIVILEGE_ESCALATION_METHODS',
'ANSI_SGR_PATTERN',
'CAN_CANCEL',
'ACTIVE_STATES',
'STANDARD_INVENTORY_UPDATE_ENV',
] ]
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'tower') CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'tower')
SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + ('custom', 'scm',) SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + (
'custom',
'scm',
)
PRIVILEGE_ESCALATION_METHODS = [ PRIVILEGE_ESCALATION_METHODS = [
('sudo', _('Sudo')), ('su', _('Su')), ('pbrun', _('Pbrun')), ('pfexec', _('Pfexec')), ('sudo', _('Sudo')),
('dzdo', _('DZDO')), ('pmrun', _('Pmrun')), ('runas', _('Runas')), ('su', _('Su')),
('enable', _('Enable')), ('doas', _('Doas')), ('ksu', _('Ksu')), ('pbrun', _('Pbrun')),
('machinectl', _('Machinectl')), ('sesu', _('Sesu')), ('pfexec', _('Pfexec')),
('dzdo', _('DZDO')),
('pmrun', _('Pmrun')),
('runas', _('Runas')),
('enable', _('Enable')),
('doas', _('Doas')),
('ksu', _('Ksu')),
('machinectl', _('Machinectl')),
('sesu', _('Sesu')),
] ]
CHOICES_PRIVILEGE_ESCALATION_METHODS = [('', _('None'))] + PRIVILEGE_ESCALATION_METHODS CHOICES_PRIVILEGE_ESCALATION_METHODS = [('', _('None'))] + PRIVILEGE_ESCALATION_METHODS
ANSI_SGR_PATTERN = re.compile(r'\x1b\[[0-9;]*m') ANSI_SGR_PATTERN = re.compile(r'\x1b\[[0-9;]*m')
@@ -26,19 +42,35 @@ STANDARD_INVENTORY_UPDATE_ENV = {
# Always use the --export option for ansible-inventory # Always use the --export option for ansible-inventory
'ANSIBLE_INVENTORY_EXPORT': 'True', 'ANSIBLE_INVENTORY_EXPORT': 'True',
# Redirecting output to stderr allows JSON parsing to still work with -vvv # Redirecting output to stderr allows JSON parsing to still work with -vvv
'ANSIBLE_VERBOSE_TO_STDERR': 'True' 'ANSIBLE_VERBOSE_TO_STDERR': 'True',
} }
CAN_CANCEL = ('new', 'pending', 'waiting', 'running') CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
ACTIVE_STATES = CAN_CANCEL ACTIVE_STATES = CAN_CANCEL
CENSOR_VALUE = '************' CENSOR_VALUE = '************'
ENV_BLOCKLIST = frozenset(( ENV_BLOCKLIST = frozenset(
'VIRTUAL_ENV', 'PATH', 'PYTHONPATH', 'PROOT_TMP_DIR', 'JOB_ID', (
'INVENTORY_ID', 'INVENTORY_SOURCE_ID', 'INVENTORY_UPDATE_ID', 'VIRTUAL_ENV',
'AD_HOC_COMMAND_ID', 'REST_API_URL', 'REST_API_TOKEN', 'MAX_EVENT_RES', 'PATH',
'CALLBACK_QUEUE', 'CALLBACK_CONNECTION', 'CACHE', 'PYTHONPATH',
'JOB_CALLBACK_DEBUG', 'INVENTORY_HOSTVARS', 'PROOT_TMP_DIR',
'AWX_HOST', 'PROJECT_REVISION', 'SUPERVISOR_WEB_CONFIG_PATH' 'JOB_ID',
)) 'INVENTORY_ID',
'INVENTORY_SOURCE_ID',
'INVENTORY_UPDATE_ID',
'AD_HOC_COMMAND_ID',
'REST_API_URL',
'REST_API_TOKEN',
'MAX_EVENT_RES',
'CALLBACK_QUEUE',
'CALLBACK_CONNECTION',
'CACHE',
'JOB_CALLBACK_DEBUG',
'INVENTORY_HOSTVARS',
'AWX_HOST',
'PROJECT_REVISION',
'SUPERVISOR_WEB_CONFIG_PATH',
)
)
# loggers that may be called in process of emitting a log # loggers that may be called in process of emitting a log
LOGGER_BLOCKLIST = ( LOGGER_BLOCKLIST = (
@@ -48,5 +80,5 @@ LOGGER_BLOCKLIST = (
'awx.main.utils.encryption', 'awx.main.utils.encryption',
'awx.main.utils.log', 'awx.main.utils.log',
# loggers that may be called getting logging settings # loggers that may be called getting logging settings
'awx.conf' 'awx.conf',
) )

View File

@@ -22,7 +22,7 @@ class WebsocketSecretAuthHelper:
""" """
Middlewareish for websockets to verify node websocket broadcast interconnect. Middlewareish for websockets to verify node websocket broadcast interconnect.
Note: The "ish" is due to the channels routing interface. Routing occurs Note: The "ish" is due to the channels routing interface. Routing occurs
_after_ authentication; making it hard to apply this auth to _only_ a subset of _after_ authentication; making it hard to apply this auth to _only_ a subset of
websocket endpoints. websocket endpoints.
""" """
@@ -30,19 +30,13 @@ class WebsocketSecretAuthHelper:
@classmethod @classmethod
def construct_secret(cls): def construct_secret(cls):
nonce_serialized = f"{int(time.time())}" nonce_serialized = f"{int(time.time())}"
payload_dict = { payload_dict = {'secret': settings.BROADCAST_WEBSOCKET_SECRET, 'nonce': nonce_serialized}
'secret': settings.BROADCAST_WEBSOCKET_SECRET,
'nonce': nonce_serialized
}
payload_serialized = json.dumps(payload_dict) payload_serialized = json.dumps(payload_dict)
secret_serialized = hmac.new(force_bytes(settings.BROADCAST_WEBSOCKET_SECRET), secret_serialized = hmac.new(force_bytes(settings.BROADCAST_WEBSOCKET_SECRET), msg=force_bytes(payload_serialized), digestmod='sha256').hexdigest()
msg=force_bytes(payload_serialized),
digestmod='sha256').hexdigest()
return 'HMAC-SHA256 {}:{}'.format(nonce_serialized, secret_serialized) return 'HMAC-SHA256 {}:{}'.format(nonce_serialized, secret_serialized)
@classmethod @classmethod
def verify_secret(cls, s, nonce_tolerance=300): def verify_secret(cls, s, nonce_tolerance=300):
try: try:
@@ -62,9 +56,7 @@ class WebsocketSecretAuthHelper:
except Exception: except Exception:
raise ValueError("Failed to create hash to compare to secret.") raise ValueError("Failed to create hash to compare to secret.")
secret_serialized = hmac.new(force_bytes(settings.BROADCAST_WEBSOCKET_SECRET), secret_serialized = hmac.new(force_bytes(settings.BROADCAST_WEBSOCKET_SECRET), msg=force_bytes(payload_serialized), digestmod='sha256').hexdigest()
msg=force_bytes(payload_serialized),
digestmod='sha256').hexdigest()
if secret_serialized != secret_parsed: if secret_serialized != secret_parsed:
raise ValueError("Invalid secret") raise ValueError("Invalid secret")
@@ -90,7 +82,6 @@ class WebsocketSecretAuthHelper:
class BroadcastConsumer(AsyncJsonWebsocketConsumer): class BroadcastConsumer(AsyncJsonWebsocketConsumer):
async def connect(self): async def connect(self):
try: try:
WebsocketSecretAuthHelper.is_authorized(self.scope) WebsocketSecretAuthHelper.is_authorized(self.scope)
@@ -151,13 +142,10 @@ class EventConsumer(AsyncJsonWebsocketConsumer):
async def receive_json(self, data): async def receive_json(self, data):
from awx.main.access import consumer_access from awx.main.access import consumer_access
user = self.scope['user'] user = self.scope['user']
xrftoken = data.get('xrftoken') xrftoken = data.get('xrftoken')
if ( if not xrftoken or XRF_KEY not in self.scope["session"] or xrftoken != self.scope["session"][XRF_KEY]:
not xrftoken or
XRF_KEY not in self.scope["session"] or
xrftoken != self.scope["session"][XRF_KEY]
):
logger.error(f"access denied to channel, XRF mismatch for {user.username}") logger.error(f"access denied to channel, XRF mismatch for {user.username}")
await self.send_json({"error": "access denied to channel"}) await self.send_json({"error": "access denied to channel"})
return return
@@ -166,7 +154,7 @@ class EventConsumer(AsyncJsonWebsocketConsumer):
groups = data['groups'] groups = data['groups']
new_groups = set() new_groups = set()
current_groups = set(self.scope['session'].pop('groups') if 'groups' in self.scope['session'] else []) current_groups = set(self.scope['session'].pop('groups') if 'groups' in self.scope['session'] else [])
for group_name,v in groups.items(): for group_name, v in groups.items():
if type(v) is list: if type(v) is list:
for oid in v: for oid in v:
name = '{}-{}'.format(group_name, oid) name = '{}-{}'.format(group_name, oid)
@@ -191,16 +179,9 @@ class EventConsumer(AsyncJsonWebsocketConsumer):
new_groups_exclusive = new_groups - current_groups new_groups_exclusive = new_groups - current_groups
for group_name in new_groups_exclusive: for group_name in new_groups_exclusive:
await self.channel_layer.group_add( await self.channel_layer.group_add(group_name, self.channel_name)
group_name,
self.channel_name
)
self.scope['session']['groups'] = new_groups self.scope['session']['groups'] = new_groups
await self.send_json({ await self.send_json({"groups_current": list(new_groups), "groups_left": list(old_groups), "groups_joined": list(new_groups_exclusive)})
"groups_current": list(new_groups),
"groups_left": list(old_groups),
"groups_joined": list(new_groups_exclusive)
})
async def internal_message(self, event): async def internal_message(self, event):
await self.send(event['text']) await self.send(event['text'])
@@ -221,7 +202,7 @@ def _dump_payload(payload):
def emit_channel_notification(group, payload): def emit_channel_notification(group, payload):
from awx.main.wsbroadcast import wrap_broadcast_msg # noqa from awx.main.wsbroadcast import wrap_broadcast_msg # noqa
payload_dumped = _dump_payload(payload) payload_dumped = _dump_payload(payload)
if payload_dumped is None: if payload_dumped is None:
@@ -229,18 +210,19 @@ def emit_channel_notification(group, payload):
channel_layer = get_channel_layer() channel_layer = get_channel_layer()
run_sync(channel_layer.group_send( run_sync(
group, channel_layer.group_send(
{ group,
"type": "internal.message", {"type": "internal.message", "text": payload_dumped},
"text": payload_dumped )
}, )
))
run_sync(channel_layer.group_send( run_sync(
settings.BROADCAST_WEBSOCKET_GROUP_NAME, channel_layer.group_send(
{ settings.BROADCAST_WEBSOCKET_GROUP_NAME,
"type": "internal.message", {
"text": wrap_broadcast_msg(group, payload_dumped), "type": "internal.message",
}, "text": wrap_broadcast_msg(group, payload_dumped),
)) },
)
)

View File

@@ -6,51 +6,55 @@ from django.utils.translation import ugettext_lazy as _
import requests import requests
aim_inputs = { aim_inputs = {
'fields': [{ 'fields': [
'id': 'url', {
'label': _('CyberArk AIM URL'), 'id': 'url',
'type': 'string', 'label': _('CyberArk AIM URL'),
'format': 'url', 'type': 'string',
}, { 'format': 'url',
'id': 'app_id', },
'label': _('Application ID'), {
'type': 'string', 'id': 'app_id',
'secret': True, 'label': _('Application ID'),
}, { 'type': 'string',
'id': 'client_key', 'secret': True,
'label': _('Client Key'), },
'type': 'string', {
'secret': True, 'id': 'client_key',
'multiline': True, 'label': _('Client Key'),
}, { 'type': 'string',
'id': 'client_cert', 'secret': True,
'label': _('Client Certificate'), 'multiline': True,
'type': 'string', },
'secret': True, {
'multiline': True, 'id': 'client_cert',
}, { 'label': _('Client Certificate'),
'id': 'verify', 'type': 'string',
'label': _('Verify SSL Certificates'), 'secret': True,
'type': 'boolean', 'multiline': True,
'default': True, },
}], {
'metadata': [{ 'id': 'verify',
'id': 'object_query', 'label': _('Verify SSL Certificates'),
'label': _('Object Query'), 'type': 'boolean',
'type': 'string', 'default': True,
'help_text': _('Lookup query for the object. Ex: Safe=TestSafe;Object=testAccountName123'), },
}, { ],
'id': 'object_query_format', 'metadata': [
'label': _('Object Query Format'), {
'type': 'string', 'id': 'object_query',
'default': 'Exact', 'label': _('Object Query'),
'choices': ['Exact', 'Regexp'] 'type': 'string',
}, { 'help_text': _('Lookup query for the object. Ex: Safe=TestSafe;Object=testAccountName123'),
'id': 'reason', },
'label': _('Reason'), {'id': 'object_query_format', 'label': _('Object Query Format'), 'type': 'string', 'default': 'Exact', 'choices': ['Exact', 'Regexp']},
'type': 'string', {
'help_text': _('Object request reason. This is only needed if it is required by the object\'s policy.') 'id': 'reason',
}], 'label': _('Reason'),
'type': 'string',
'help_text': _('Object request reason. This is only needed if it is required by the object\'s policy.'),
},
],
'required': ['url', 'app_id', 'object_query'], 'required': ['url', 'app_id', 'object_query'],
} }
@@ -88,8 +92,4 @@ def aim_backend(**kwargs):
return res.json()['Content'] return res.json()['Content']
aim_plugin = CredentialPlugin( aim_plugin = CredentialPlugin('CyberArk AIM Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
'CyberArk AIM Central Credential Provider Lookup',
inputs=aim_inputs,
backend=aim_backend
)

View File

@@ -7,51 +7,48 @@ from msrestazure import azure_cloud
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py # https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
clouds = [ clouds = [vars(azure_cloud)[n] for n in dir(azure_cloud) if n.startswith("AZURE_") and n.endswith("_CLOUD")]
vars(azure_cloud)[n]
for n in dir(azure_cloud)
if n.startswith("AZURE_") and n.endswith("_CLOUD")
]
default_cloud = vars(azure_cloud)["AZURE_PUBLIC_CLOUD"] default_cloud = vars(azure_cloud)["AZURE_PUBLIC_CLOUD"]
azure_keyvault_inputs = { azure_keyvault_inputs = {
'fields': [{ 'fields': [
'id': 'url', {
'label': _('Vault URL (DNS Name)'), 'id': 'url',
'type': 'string', 'label': _('Vault URL (DNS Name)'),
'format': 'url', 'type': 'string',
}, { 'format': 'url',
'id': 'client', },
'label': _('Client ID'), {'id': 'client', 'label': _('Client ID'), 'type': 'string'},
'type': 'string' {
}, { 'id': 'secret',
'id': 'secret', 'label': _('Client Secret'),
'label': _('Client Secret'), 'type': 'string',
'type': 'string', 'secret': True,
'secret': True, },
}, { {'id': 'tenant', 'label': _('Tenant ID'), 'type': 'string'},
'id': 'tenant', {
'label': _('Tenant ID'), 'id': 'cloud_name',
'type': 'string' 'label': _('Cloud Environment'),
}, { 'help_text': _('Specify which azure cloud environment to use.'),
'id': 'cloud_name', 'choices': list(set([default_cloud.name] + [c.name for c in clouds])),
'label': _('Cloud Environment'), 'default': default_cloud.name,
'help_text': _('Specify which azure cloud environment to use.'), },
'choices': list(set([default_cloud.name] + [c.name for c in clouds])), ],
'default': default_cloud.name 'metadata': [
}], {
'metadata': [{ 'id': 'secret_field',
'id': 'secret_field', 'label': _('Secret Name'),
'label': _('Secret Name'), 'type': 'string',
'type': 'string', 'help_text': _('The name of the secret to look up.'),
'help_text': _('The name of the secret to look up.'), },
}, { {
'id': 'secret_version', 'id': 'secret_version',
'label': _('Secret Version'), 'label': _('Secret Version'),
'type': 'string', 'type': 'string',
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'), 'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
}], },
],
'required': ['url', 'client', 'secret', 'tenant', 'secret_field'], 'required': ['url', 'client', 'secret', 'tenant', 'secret_field'],
} }
@@ -62,11 +59,11 @@ def azure_keyvault_backend(**kwargs):
def auth_callback(server, resource, scope): def auth_callback(server, resource, scope):
credentials = ServicePrincipalCredentials( credentials = ServicePrincipalCredentials(
url = url, url=url,
client_id = kwargs['client'], client_id=kwargs['client'],
secret = kwargs['secret'], secret=kwargs['secret'],
tenant = kwargs['tenant'], tenant=kwargs['tenant'],
resource = f"https://{cloud.suffixes.keyvault_dns.split('.', 1).pop()}", resource=f"https://{cloud.suffixes.keyvault_dns.split('.', 1).pop()}",
) )
token = credentials.token token = credentials.token
return token['token_type'], token['access_token'] return token['token_type'], token['access_token']
@@ -75,8 +72,4 @@ def azure_keyvault_backend(**kwargs):
return kv.get_secret(url, kwargs['secret_field'], kwargs.get('secret_version', '')).value return kv.get_secret(url, kwargs['secret_field'], kwargs.get('secret_version', '')).value
azure_keyvault_plugin = CredentialPlugin( azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)
'Microsoft Azure Key Vault',
inputs=azure_keyvault_inputs,
backend=azure_keyvault_backend
)

View File

@@ -2,68 +2,68 @@ from .plugin import CredentialPlugin, raise_for_status
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
from urllib.parse import urljoin from urllib.parse import urljoin
import requests import requests
pas_inputs = {
'fields': [{
'id': 'url',
'label': _('Centrify Tenant URL'),
'type': 'string',
'help_text': _('Centrify Tenant URL'),
'format': 'url',
}, {
'id':'client_id',
'label':_('Centrify API User'),
'type':'string',
'help_text': _('Centrify API User, having necessary permissions as mentioned in support doc'),
}, { pas_inputs = {
'id':'client_password', 'fields': [
'label':_('Centrify API Password'), {
'type':'string', 'id': 'url',
'help_text': _('Password of Centrify API User with necessary permissions'), 'label': _('Centrify Tenant URL'),
'secret':True, 'type': 'string',
},{ 'help_text': _('Centrify Tenant URL'),
'id':'oauth_application_id', 'format': 'url',
'label':_('OAuth2 Application ID'), },
'type':'string', {
'help_text': _('Application ID of the configured OAuth2 Client (defaults to \'awx\')'), 'id': 'client_id',
'default': 'awx', 'label': _('Centrify API User'),
},{ 'type': 'string',
'id':'oauth_scope', 'help_text': _('Centrify API User, having necessary permissions as mentioned in support doc'),
'label':_('OAuth2 Scope'), },
'type':'string', {
'help_text': _('Scope of the configured OAuth2 Client (defaults to \'awx\')'), 'id': 'client_password',
'default': 'awx', 'label': _('Centrify API Password'),
}], 'type': 'string',
'metadata': [{ 'help_text': _('Password of Centrify API User with necessary permissions'),
'id': 'account-name', 'secret': True,
'label': _('Account Name'), },
'type': 'string', {
'help_text': _('Local system account or Domain account name enrolled in Centrify Vault. eg. (root or DOMAIN/Administrator)'), 'id': 'oauth_application_id',
},{ 'label': _('OAuth2 Application ID'),
'id': 'system-name', 'type': 'string',
'label': _('System Name'), 'help_text': _('Application ID of the configured OAuth2 Client (defaults to \'awx\')'),
'type': 'string', 'default': 'awx',
'help_text': _('Machine Name enrolled with in Centrify Portal'), },
}], {
'required': ['url', 'account-name', 'system-name','client_id','client_password'], 'id': 'oauth_scope',
'label': _('OAuth2 Scope'),
'type': 'string',
'help_text': _('Scope of the configured OAuth2 Client (defaults to \'awx\')'),
'default': 'awx',
},
],
'metadata': [
{
'id': 'account-name',
'label': _('Account Name'),
'type': 'string',
'help_text': _('Local system account or Domain account name enrolled in Centrify Vault. eg. (root or DOMAIN/Administrator)'),
},
{
'id': 'system-name',
'label': _('System Name'),
'type': 'string',
'help_text': _('Machine Name enrolled with in Centrify Portal'),
},
],
'required': ['url', 'account-name', 'system-name', 'client_id', 'client_password'],
} }
# generate bearer token to authenticate with PAS portal, Input : Client ID, Client Secret # generate bearer token to authenticate with PAS portal, Input : Client ID, Client Secret
def handle_auth(**kwargs): def handle_auth(**kwargs):
post_data = { post_data = {"grant_type": "client_credentials", "scope": kwargs['oauth_scope']}
"grant_type": "client_credentials", response = requests.post(kwargs['endpoint'], data=post_data, auth=(kwargs['client_id'], kwargs['client_password']), verify=True, timeout=(5, 30))
"scope": kwargs['oauth_scope']
}
response = requests.post(
kwargs['endpoint'],
data = post_data,
auth = (kwargs['client_id'],kwargs['client_password']),
verify = True,
timeout = (5, 30)
)
raise_for_status(response) raise_for_status(response)
try: try:
return response.json()['access_token'] return response.json()['access_token']
except KeyError: except KeyError:
raise RuntimeError('OAuth request to tenant was unsuccessful') raise RuntimeError('OAuth request to tenant was unsuccessful')
@@ -71,20 +71,11 @@ def handle_auth(**kwargs):
# fetch the ID of system with RedRock query, Input : System Name, Account Name # fetch the ID of system with RedRock query, Input : System Name, Account Name
def get_ID(**kwargs): def get_ID(**kwargs):
endpoint = urljoin(kwargs['url'],'/Redrock/query') endpoint = urljoin(kwargs['url'], '/Redrock/query')
name=" Name='{0}' and User='{1}'".format(kwargs['system_name'],kwargs['acc_name']) name = " Name='{0}' and User='{1}'".format(kwargs['system_name'], kwargs['acc_name'])
query = 'Select ID from VaultAccount where {0}'.format(name) query = 'Select ID from VaultAccount where {0}'.format(name)
post_headers = { post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"}
"Authorization": "Bearer " + kwargs['access_token'], response = requests.post(endpoint, json={'Script': query}, headers=post_headers, verify=True, timeout=(5, 30))
"X-CENTRIFY-NATIVE-CLIENT":"true"
}
response = requests.post(
endpoint,
json = {'Script': query},
headers = post_headers,
verify = True,
timeout = (5, 30)
)
raise_for_status(response) raise_for_status(response)
try: try:
result_str = response.json()["Result"]["Results"] result_str = response.json()["Result"]["Results"]
@@ -95,23 +86,14 @@ def get_ID(**kwargs):
# CheckOut Password from Centrify Vault, Input : ID # CheckOut Password from Centrify Vault, Input : ID
def get_passwd(**kwargs): def get_passwd(**kwargs):
endpoint = urljoin(kwargs['url'],'/ServerManage/CheckoutPassword') endpoint = urljoin(kwargs['url'], '/ServerManage/CheckoutPassword')
post_headers = { post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"}
"Authorization": "Bearer " + kwargs['access_token'], response = requests.post(endpoint, json={'ID': kwargs['acc_id']}, headers=post_headers, verify=True, timeout=(5, 30))
"X-CENTRIFY-NATIVE-CLIENT":"true"
}
response = requests.post(
endpoint,
json = {'ID': kwargs['acc_id']},
headers = post_headers,
verify = True,
timeout = (5, 30)
)
raise_for_status(response) raise_for_status(response)
try: try:
return response.json()["Result"]["Password"] return response.json()["Result"]["Password"]
except KeyError: except KeyError:
raise RuntimeError("Password Not Found") raise RuntimeError("Password Not Found")
def centrify_backend(**kwargs): def centrify_backend(**kwargs):
@@ -122,21 +104,12 @@ def centrify_backend(**kwargs):
client_password = kwargs.get('client_password') client_password = kwargs.get('client_password')
app_id = kwargs.get('oauth_application_id', 'awx') app_id = kwargs.get('oauth_application_id', 'awx')
endpoint = urljoin(url, f'/oauth2/token/{app_id}') endpoint = urljoin(url, f'/oauth2/token/{app_id}')
endpoint = { endpoint = {'endpoint': endpoint, 'client_id': client_id, 'client_password': client_password, 'oauth_scope': kwargs.get('oauth_scope', 'awx')}
'endpoint': endpoint,
'client_id': client_id,
'client_password': client_password,
'oauth_scope': kwargs.get('oauth_scope', 'awx')
}
token = handle_auth(**endpoint) token = handle_auth(**endpoint)
get_id_args = {'system_name':system_name,'acc_name':acc_name,'url':url,'access_token':token} get_id_args = {'system_name': system_name, 'acc_name': acc_name, 'url': url, 'access_token': token}
acc_id = get_ID(**get_id_args) acc_id = get_ID(**get_id_args)
get_pwd_args = {'url':url,'acc_id':acc_id,'access_token':token} get_pwd_args = {'url': url, 'acc_id': acc_id, 'access_token': token}
return get_passwd(**get_pwd_args) return get_passwd(**get_pwd_args)
centrify_plugin = CredentialPlugin( centrify_plugin = CredentialPlugin('Centrify Vault Credential Provider Lookup', inputs=pas_inputs, backend=centrify_backend)
'Centrify Vault Credential Provider Lookup',
inputs=pas_inputs,
backend=centrify_backend
)

View File

@@ -8,41 +8,45 @@ import requests
conjur_inputs = { conjur_inputs = {
'fields': [{ 'fields': [
'id': 'url', {
'label': _('Conjur URL'), 'id': 'url',
'type': 'string', 'label': _('Conjur URL'),
'format': 'url', 'type': 'string',
}, { 'format': 'url',
'id': 'api_key', },
'label': _('API Key'), {
'type': 'string', 'id': 'api_key',
'secret': True, 'label': _('API Key'),
}, { 'type': 'string',
'id': 'account', 'secret': True,
'label': _('Account'), },
'type': 'string', {
}, { 'id': 'account',
'id': 'username', 'label': _('Account'),
'label': _('Username'), 'type': 'string',
'type': 'string', },
}, { {
'id': 'cacert', 'id': 'username',
'label': _('Public Key Certificate'), 'label': _('Username'),
'type': 'string', 'type': 'string',
'multiline': True },
}], {'id': 'cacert', 'label': _('Public Key Certificate'), 'type': 'string', 'multiline': True},
'metadata': [{ ],
'id': 'secret_path', 'metadata': [
'label': _('Secret Identifier'), {
'type': 'string', 'id': 'secret_path',
'help_text': _('The identifier for the secret e.g., /some/identifier'), 'label': _('Secret Identifier'),
}, { 'type': 'string',
'id': 'secret_version', 'help_text': _('The identifier for the secret e.g., /some/identifier'),
'label': _('Secret Version'), },
'type': 'string', {
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'), 'id': 'secret_version',
}], 'label': _('Secret Version'),
'type': 'string',
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
},
],
'required': ['url', 'api_key', 'account', 'username'], 'required': ['url', 'api_key', 'account', 'username'],
} }
@@ -50,7 +54,7 @@ conjur_inputs = {
def conjur_backend(**kwargs): def conjur_backend(**kwargs):
url = kwargs['url'] url = kwargs['url']
api_key = kwargs['api_key'] api_key = kwargs['api_key']
account = quote(kwargs['account'], safe='') account = quote(kwargs['account'], safe='')
username = quote(kwargs['username'], safe='') username = quote(kwargs['username'], safe='')
secret_path = quote(kwargs['secret_path'], safe='') secret_path = quote(kwargs['secret_path'], safe='')
version = kwargs.get('secret_version') version = kwargs.get('secret_version')
@@ -65,10 +69,7 @@ def conjur_backend(**kwargs):
with CertFiles(cacert) as cert: with CertFiles(cacert) as cert:
# https://www.conjur.org/api.html#authentication-authenticate-post # https://www.conjur.org/api.html#authentication-authenticate-post
auth_kwargs['verify'] = cert auth_kwargs['verify'] = cert
resp = requests.post( resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
urljoin(url, '/'.join(['authn', account, username, 'authenticate'])),
**auth_kwargs
)
raise_for_status(resp) raise_for_status(resp)
token = base64.b64encode(resp.content).decode('utf-8') token = base64.b64encode(resp.content).decode('utf-8')
@@ -78,12 +79,7 @@ def conjur_backend(**kwargs):
} }
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get # https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
path = urljoin(url, '/'.join([ path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path]))
'secrets',
account,
'variable',
secret_path
]))
if version: if version:
path = '?'.join([path, version]) path = '?'.join([path, version])
@@ -94,8 +90,4 @@ def conjur_backend(**kwargs):
return resp.text return resp.text
conjur_plugin = CredentialPlugin( conjur_plugin = CredentialPlugin('CyberArk Conjur Secret Lookup', inputs=conjur_inputs, backend=conjur_backend)
'CyberArk Conjur Secret Lookup',
inputs=conjur_inputs,
backend=conjur_backend
)

View File

@@ -9,110 +9,131 @@ import requests
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
base_inputs = { base_inputs = {
'fields': [{ 'fields': [
'id': 'url', {
'label': _('Server URL'), 'id': 'url',
'type': 'string', 'label': _('Server URL'),
'format': 'url', 'type': 'string',
'help_text': _('The URL to the HashiCorp Vault'), 'format': 'url',
}, { 'help_text': _('The URL to the HashiCorp Vault'),
'id': 'token', },
'label': _('Token'), {
'type': 'string', 'id': 'token',
'secret': True, 'label': _('Token'),
'help_text': _('The access token used to authenticate to the Vault server'), 'type': 'string',
}, { 'secret': True,
'id': 'cacert', 'help_text': _('The access token used to authenticate to the Vault server'),
'label': _('CA Certificate'), },
'type': 'string', {
'multiline': True, 'id': 'cacert',
'help_text': _('The CA certificate used to verify the SSL certificate of the Vault server') 'label': _('CA Certificate'),
}, { 'type': 'string',
'id': 'role_id', 'multiline': True,
'label': _('AppRole role_id'), 'help_text': _('The CA certificate used to verify the SSL certificate of the Vault server'),
'type': 'string', },
'multiline': False, {'id': 'role_id', 'label': _('AppRole role_id'), 'type': 'string', 'multiline': False, 'help_text': _('The Role ID for AppRole Authentication')},
'help_text': _('The Role ID for AppRole Authentication') {
}, { 'id': 'secret_id',
'id': 'secret_id', 'label': _('AppRole secret_id'),
'label': _('AppRole secret_id'), 'type': 'string',
'type': 'string', 'multiline': False,
'multiline': False, 'secret': True,
'secret': True, 'help_text': _('The Secret ID for AppRole Authentication'),
'help_text': _('The Secret ID for AppRole Authentication') },
}, { {
'id': 'namespace', 'id': 'namespace',
'label': _('Namespace name (Vault Enterprise only)'), 'label': _('Namespace name (Vault Enterprise only)'),
'type': 'string', 'type': 'string',
'multiline': False, 'multiline': False,
'help_text': _('Name of the namespace to use when authenticate and retrieve secrets') 'help_text': _('Name of the namespace to use when authenticate and retrieve secrets'),
}, { },
'id': 'default_auth_path', {
'label': _('Path to Approle Auth'), 'id': 'default_auth_path',
'type': 'string', 'label': _('Path to Approle Auth'),
'multiline': False, 'type': 'string',
'default': 'approle', 'multiline': False,
'help_text': _('The AppRole Authentication path to use if one isn\'t provided in the metadata when linking to an input field. Defaults to \'approle\'') 'default': 'approle',
} 'help_text': _(
'The AppRole Authentication path to use if one isn\'t provided in the metadata when linking to an input field. Defaults to \'approle\''
),
},
],
'metadata': [
{
'id': 'secret_path',
'label': _('Path to Secret'),
'type': 'string',
'help_text': _('The path to the secret stored in the secret backend e.g, /some/secret/'),
},
{
'id': 'auth_path',
'label': _('Path to Auth'),
'type': 'string',
'multiline': False,
'help_text': _('The path where the Authentication method is mounted e.g, approle'),
},
], ],
'metadata': [{
'id': 'secret_path',
'label': _('Path to Secret'),
'type': 'string',
'help_text': _('The path to the secret stored in the secret backend e.g, /some/secret/')
}, {
'id': 'auth_path',
'label': _('Path to Auth'),
'type': 'string',
'multiline': False,
'help_text': _('The path where the Authentication method is mounted e.g, approle')
}],
'required': ['url', 'secret_path'], 'required': ['url', 'secret_path'],
} }
hashi_kv_inputs = copy.deepcopy(base_inputs) hashi_kv_inputs = copy.deepcopy(base_inputs)
hashi_kv_inputs['fields'].append({ hashi_kv_inputs['fields'].append(
'id': 'api_version', {
'label': _('API Version'), 'id': 'api_version',
'choices': ['v1', 'v2'], 'label': _('API Version'),
'help_text': _('API v1 is for static key/value lookups. API v2 is for versioned key/value lookups.'), 'choices': ['v1', 'v2'],
'default': 'v1', 'help_text': _('API v1 is for static key/value lookups. API v2 is for versioned key/value lookups.'),
}) 'default': 'v1',
hashi_kv_inputs['metadata'] = [{ }
'id': 'secret_backend', )
'label': _('Name of Secret Backend'), hashi_kv_inputs['metadata'] = (
'type': 'string', [
'help_text': _('The name of the kv secret backend (if left empty, the first segment of the secret path will be used).') {
}] + hashi_kv_inputs['metadata'] + [{ 'id': 'secret_backend',
'id': 'secret_key', 'label': _('Name of Secret Backend'),
'label': _('Key Name'), 'type': 'string',
'type': 'string', 'help_text': _('The name of the kv secret backend (if left empty, the first segment of the secret path will be used).'),
'help_text': _('The name of the key to look up in the secret.'), }
}, { ]
'id': 'secret_version', + hashi_kv_inputs['metadata']
'label': _('Secret Version (v2 only)'), + [
'type': 'string', {
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'), 'id': 'secret_key',
}] 'label': _('Key Name'),
'type': 'string',
'help_text': _('The name of the key to look up in the secret.'),
},
{
'id': 'secret_version',
'label': _('Secret Version (v2 only)'),
'type': 'string',
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
},
]
)
hashi_kv_inputs['required'].extend(['api_version', 'secret_key']) hashi_kv_inputs['required'].extend(['api_version', 'secret_key'])
hashi_ssh_inputs = copy.deepcopy(base_inputs) hashi_ssh_inputs = copy.deepcopy(base_inputs)
hashi_ssh_inputs['metadata'] = [{ hashi_ssh_inputs['metadata'] = (
'id': 'public_key', [
'label': _('Unsigned Public Key'), {
'type': 'string', 'id': 'public_key',
'multiline': True, 'label': _('Unsigned Public Key'),
}] + hashi_ssh_inputs['metadata'] + [{ 'type': 'string',
'id': 'role', 'multiline': True,
'label': _('Role Name'), }
'type': 'string', ]
'help_text': _('The name of the role used to sign.') + hashi_ssh_inputs['metadata']
}, { + [
'id': 'valid_principals', {'id': 'role', 'label': _('Role Name'), 'type': 'string', 'help_text': _('The name of the role used to sign.')},
'label': _('Valid Principals'), {
'type': 'string', 'id': 'valid_principals',
'help_text': _('Valid principals (either usernames or hostnames) that the certificate should be signed for.'), 'label': _('Valid Principals'),
}] 'type': 'string',
'help_text': _('Valid principals (either usernames or hostnames) that the certificate should be signed for.'),
},
]
)
hashi_ssh_inputs['required'].extend(['public_key', 'role']) hashi_ssh_inputs['required'].extend(['public_key', 'role'])
@@ -209,9 +230,7 @@ def kv_backend(**kwargs):
try: try:
return json['data'][secret_key] return json['data'][secret_key]
except KeyError: except KeyError:
raise RuntimeError( raise RuntimeError('{} is not present at {}'.format(secret_key, secret_path))
'{} is not present at {}'.format(secret_key, secret_path)
)
return json['data'] return json['data']
@@ -248,14 +267,6 @@ def ssh_backend(**kwargs):
return resp.json()['data']['signed_key'] return resp.json()['data']['signed_key']
hashivault_kv_plugin = CredentialPlugin( hashivault_kv_plugin = CredentialPlugin('HashiCorp Vault Secret Lookup', inputs=hashi_kv_inputs, backend=kv_backend)
'HashiCorp Vault Secret Lookup',
inputs=hashi_kv_inputs,
backend=kv_backend
)
hashivault_ssh_plugin = CredentialPlugin( hashivault_ssh_plugin = CredentialPlugin('HashiCorp Vault Signed SSH', inputs=hashi_ssh_inputs, backend=ssh_backend)
'HashiCorp Vault Signed SSH',
inputs=hashi_ssh_inputs,
backend=ssh_backend
)

View File

@@ -16,7 +16,7 @@ def raise_for_status(resp):
raise exc raise exc
class CertFiles(): class CertFiles:
""" """
A context manager used for writing a certificate and (optional) key A context manager used for writing a certificate and (optional) key
to $TMPDIR, and cleaning up afterwards. to $TMPDIR, and cleaning up afterwards.

View File

@@ -16,7 +16,6 @@ __all__ = ['DatabaseWrapper']
class RecordedQueryLog(object): class RecordedQueryLog(object):
def __init__(self, log, db, dest='/var/log/tower/profile'): def __init__(self, log, db, dest='/var/log/tower/profile'):
self.log = log self.log = log
self.db = db self.db = db
@@ -70,10 +69,7 @@ class RecordedQueryLog(object):
break break
else: else:
progname = os.path.basename(sys.argv[0]) progname = os.path.basename(sys.argv[0])
filepath = os.path.join( filepath = os.path.join(self.dest, '{}.sqlite'.format(progname))
self.dest,
'{}.sqlite'.format(progname)
)
version = pkg_resources.get_distribution('awx').version version = pkg_resources.get_distribution('awx').version
log = sqlite3.connect(filepath, timeout=3) log = sqlite3.connect(filepath, timeout=3)
log.execute( log.execute(
@@ -91,9 +87,8 @@ class RecordedQueryLog(object):
) )
log.commit() log.commit()
log.execute( log.execute(
'INSERT INTO queries (pid, version, argv, time, sql, explain, bt) ' 'INSERT INTO queries (pid, version, argv, time, sql, explain, bt) ' 'VALUES (?, ?, ?, ?, ?, ?, ?);',
'VALUES (?, ?, ?, ?, ?, ?, ?);', (os.getpid(), version, ' '.join(sys.argv), seconds, sql, explain, bt),
(os.getpid(), version, ' ' .join(sys.argv), seconds, sql, explain, bt)
) )
log.commit() log.commit()

View File

@@ -47,16 +47,9 @@ class PubSub(object):
@contextmanager @contextmanager
def pg_bus_conn(): def pg_bus_conn():
conf = settings.DATABASES['default'] conf = settings.DATABASES['default']
conn = psycopg2.connect(dbname=conf['NAME'], conn = psycopg2.connect(dbname=conf['NAME'], host=conf['HOST'], user=conf['USER'], password=conf['PASSWORD'], port=conf['PORT'], **conf.get("OPTIONS", {}))
host=conf['HOST'],
user=conf['USER'],
password=conf['PASSWORD'],
port=conf['PORT'],
**conf.get("OPTIONS", {}))
# Django connection.cursor().connection doesn't have autocommit=True on # Django connection.cursor().connection doesn't have autocommit=True on
conn.set_session(autocommit=True) conn.set_session(autocommit=True)
pubsub = PubSub(conn) pubsub = PubSub(conn)
yield pubsub yield pubsub
conn.close() conn.close()

View File

@@ -48,8 +48,7 @@ class Control(object):
with pg_bus_conn() as conn: with pg_bus_conn() as conn:
conn.listen(reply_queue) conn.listen(reply_queue)
conn.notify(self.queuename, conn.notify(self.queuename, json.dumps({'control': command, 'reply_to': reply_queue}))
json.dumps({'control': command, 'reply_to': reply_queue}))
for reply in conn.events(select_timeout=timeout, yield_timeouts=True): for reply in conn.events(select_timeout=timeout, yield_timeouts=True):
if reply is None: if reply is None:

View File

@@ -14,12 +14,8 @@ logger = logging.getLogger('awx.main.dispatch.periodic')
class Scheduler(Scheduler): class Scheduler(Scheduler):
def run_continuously(self): def run_continuously(self):
idle_seconds = max( idle_seconds = max(1, min(self.jobs).period.total_seconds() / 2)
1,
min(self.jobs).period.total_seconds() / 2
)
def run(): def run():
ppid = os.getppid() ppid = os.getppid()
@@ -39,9 +35,7 @@ class Scheduler(Scheduler):
GuidMiddleware.set_guid(GuidMiddleware._generate_guid()) GuidMiddleware.set_guid(GuidMiddleware._generate_guid())
self.run_pending() self.run_pending()
except Exception: except Exception:
logger.exception( logger.exception('encountered an error while scheduling periodic tasks')
'encountered an error while scheduling periodic tasks'
)
time.sleep(idle_seconds) time.sleep(idle_seconds)
process = Process(target=run) process = Process(target=run)

Some files were not shown because too many files have changed in this diff Show More