mirror of
https://github.com/ansible/awx.git
synced 2026-01-09 15:02:07 -03:30
move code linting to a stricter pep8-esque auto-formatting tool, black
This commit is contained in:
parent
9b702e46fe
commit
c2ef0a6500
@ -127,7 +127,7 @@ Fixes and Features for AWX will go through the Github pull request process. Subm
|
||||
Here are a few things you can do to help the visibility of your change, and increase the likelihood that it will be accepted:
|
||||
|
||||
* No issues when running linters/code checkers
|
||||
* Python: flake8: `(container)/awx_devel$ make flake8`
|
||||
* Python: black: `(container)/awx_devel$ make black`
|
||||
* Javascript: JsHint: `(container)/awx_devel$ make jshint`
|
||||
* No issues from unit tests
|
||||
* Python: py.test: `(container)/awx_devel$ make test`
|
||||
|
||||
23
Makefile
23
Makefile
@ -271,20 +271,8 @@ jupyter:
|
||||
reports:
|
||||
mkdir -p $@
|
||||
|
||||
pep8: reports
|
||||
@(set -o pipefail && $@ | tee reports/$@.report)
|
||||
|
||||
flake8: reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
(set -o pipefail && $@ | tee reports/$@.report)
|
||||
|
||||
pyflakes: reports
|
||||
@(set -o pipefail && $@ | tee reports/$@.report)
|
||||
|
||||
pylint: reports
|
||||
@(set -o pipefail && $@ | reports/$@.report)
|
||||
black: reports
|
||||
(set -o pipefail && $@ $(BLACK_ARGS) --skip-string-normalization --fast --line-length 160 awx awxkit awx_collection | tee reports/$@.report)
|
||||
|
||||
genschema: reports
|
||||
$(MAKE) swagger PYTEST_ARGS="--genschema --create-db "
|
||||
@ -296,7 +284,7 @@ swagger: reports
|
||||
fi; \
|
||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
||||
|
||||
check: flake8 pep8 # pyflakes pylint
|
||||
check: black
|
||||
|
||||
awx-link:
|
||||
[ -d "/awx_devel/awx.egg-info" ] || python3 /awx_devel/setup.py egg_info_dev
|
||||
@ -332,10 +320,7 @@ test_collection:
|
||||
# Second we will load any libraries out of the virtualenv (if it's unspecified that should be ok because python should not load out of an empty directory)
|
||||
# Finally we will add the system path so that the tests can find the ansible libraries
|
||||
|
||||
flake8_collection:
|
||||
flake8 awx_collection/ # Different settings, in main exclude list
|
||||
|
||||
test_collection_all: test_collection flake8_collection
|
||||
test_collection_all: test_collection
|
||||
|
||||
# WARNING: symlinking a collection is fundamentally unstable
|
||||
# this is for rapid development iteration with playbooks, do not use with other test targets
|
||||
|
||||
@ -15,9 +15,10 @@ __all__ = ['__version__']
|
||||
# Check for the presence/absence of "devonly" module to determine if running
|
||||
# from a source code checkout or release packaage.
|
||||
try:
|
||||
import awx.devonly # noqa
|
||||
import awx.devonly # noqa
|
||||
|
||||
MODE = 'development'
|
||||
except ImportError: # pragma: no cover
|
||||
except ImportError: # pragma: no cover
|
||||
MODE = 'production'
|
||||
|
||||
|
||||
@ -25,6 +26,7 @@ import hashlib
|
||||
|
||||
try:
|
||||
import django # noqa: F401
|
||||
|
||||
HAS_DJANGO = True
|
||||
except ImportError:
|
||||
HAS_DJANGO = False
|
||||
@ -40,6 +42,7 @@ if HAS_DJANGO is True:
|
||||
try:
|
||||
names_digest('foo', 'bar', 'baz', length=8)
|
||||
except ValueError:
|
||||
|
||||
def names_digest(*args, length):
|
||||
"""
|
||||
Generate a 32-bit digest of a set of arguments that can be used to shorten
|
||||
@ -64,7 +67,7 @@ def find_commands(management_dir):
|
||||
continue
|
||||
elif f.endswith('.py') and f[:-3] not in commands:
|
||||
commands.append(f[:-3])
|
||||
elif f.endswith('.pyc') and f[:-4] not in commands: # pragma: no cover
|
||||
elif f.endswith('.pyc') and f[:-4] not in commands: # pragma: no cover
|
||||
commands.append(f[:-4])
|
||||
except OSError:
|
||||
pass
|
||||
@ -75,6 +78,7 @@ def oauth2_getattribute(self, attr):
|
||||
# Custom method to override
|
||||
# oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__
|
||||
from django.conf import settings
|
||||
|
||||
val = None
|
||||
if 'migrate' not in sys.argv:
|
||||
# certain Django OAuth Toolkit migrations actually reference
|
||||
@ -94,33 +98,38 @@ def prepare_env():
|
||||
# Hide DeprecationWarnings when running in production. Need to first load
|
||||
# settings to apply our filter after Django's own warnings filter.
|
||||
from django.conf import settings
|
||||
if not settings.DEBUG: # pragma: no cover
|
||||
|
||||
if not settings.DEBUG: # pragma: no cover
|
||||
warnings.simplefilter('ignore', DeprecationWarning)
|
||||
# Monkeypatch Django find_commands to also work with .pyc files.
|
||||
import django.core.management
|
||||
|
||||
django.core.management.find_commands = find_commands
|
||||
|
||||
# Monkeypatch Oauth2 toolkit settings class to check for settings
|
||||
# in django.conf settings each time, not just once during import
|
||||
import oauth2_provider.settings
|
||||
|
||||
oauth2_provider.settings.OAuth2ProviderSettings.__getattribute__ = oauth2_getattribute
|
||||
|
||||
# Use the AWX_TEST_DATABASE_* environment variables to specify the test
|
||||
# database settings to use when management command is run as an external
|
||||
# program via unit tests.
|
||||
for opt in ('ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'): # pragma: no cover
|
||||
for opt in ('ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'): # pragma: no cover
|
||||
if os.environ.get('AWX_TEST_DATABASE_%s' % opt, None):
|
||||
settings.DATABASES['default'][opt] = os.environ['AWX_TEST_DATABASE_%s' % opt]
|
||||
# Disable capturing all SQL queries in memory when in DEBUG mode.
|
||||
if settings.DEBUG and not getattr(settings, 'SQL_DEBUG', True):
|
||||
from django.db.backends.base.base import BaseDatabaseWrapper
|
||||
from django.db.backends.utils import CursorWrapper
|
||||
|
||||
BaseDatabaseWrapper.make_debug_cursor = lambda self, cursor: CursorWrapper(cursor, self)
|
||||
|
||||
# Use the default devserver addr/port defined in settings for runserver.
|
||||
default_addr = getattr(settings, 'DEVSERVER_DEFAULT_ADDR', '127.0.0.1')
|
||||
default_port = getattr(settings, 'DEVSERVER_DEFAULT_PORT', 8000)
|
||||
from django.core.management.commands import runserver as core_runserver
|
||||
|
||||
original_handle = core_runserver.Command.handle
|
||||
|
||||
def handle(self, *args, **options):
|
||||
@ -139,7 +148,8 @@ def manage():
|
||||
# Now run the command (or display the version).
|
||||
from django.conf import settings
|
||||
from django.core.management import execute_from_command_line
|
||||
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
||||
|
||||
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
||||
sys.stdout.write('%s\n' % __version__)
|
||||
# If running as a user without permission to read settings, display an
|
||||
# error message. Allow --help to still work.
|
||||
|
||||
@ -18,7 +18,6 @@ logger = logging.getLogger('awx.api.authentication')
|
||||
|
||||
|
||||
class LoggedBasicAuthentication(authentication.BasicAuthentication):
|
||||
|
||||
def authenticate(self, request):
|
||||
if not settings.AUTH_BASIC_ENABLED:
|
||||
return
|
||||
@ -35,22 +34,18 @@ class LoggedBasicAuthentication(authentication.BasicAuthentication):
|
||||
|
||||
|
||||
class SessionAuthentication(authentication.SessionAuthentication):
|
||||
|
||||
def authenticate_header(self, request):
|
||||
return 'Session'
|
||||
|
||||
|
||||
class LoggedOAuth2Authentication(OAuth2Authentication):
|
||||
|
||||
def authenticate(self, request):
|
||||
ret = super(LoggedOAuth2Authentication, self).authenticate(request)
|
||||
if ret:
|
||||
user, token = ret
|
||||
username = user.username if user else '<none>'
|
||||
logger.info(smart_text(
|
||||
u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(
|
||||
username, request.method, request.path, token.pk
|
||||
)
|
||||
))
|
||||
logger.info(
|
||||
smart_text(u"User {} performed a {} to {} through the API using OAuth 2 token {}.".format(username, request.method, request.path, token.pk))
|
||||
)
|
||||
setattr(user, 'oauth_scopes', [x for x in token.scope.split() if x])
|
||||
return ret
|
||||
|
||||
@ -38,16 +38,20 @@ register(
|
||||
register(
|
||||
'OAUTH2_PROVIDER',
|
||||
field_class=OAuth2ProviderField,
|
||||
default={'ACCESS_TOKEN_EXPIRE_SECONDS': oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS,
|
||||
'AUTHORIZATION_CODE_EXPIRE_SECONDS': oauth2_settings.AUTHORIZATION_CODE_EXPIRE_SECONDS,
|
||||
'REFRESH_TOKEN_EXPIRE_SECONDS': oauth2_settings.REFRESH_TOKEN_EXPIRE_SECONDS},
|
||||
default={
|
||||
'ACCESS_TOKEN_EXPIRE_SECONDS': oauth2_settings.ACCESS_TOKEN_EXPIRE_SECONDS,
|
||||
'AUTHORIZATION_CODE_EXPIRE_SECONDS': oauth2_settings.AUTHORIZATION_CODE_EXPIRE_SECONDS,
|
||||
'REFRESH_TOKEN_EXPIRE_SECONDS': oauth2_settings.REFRESH_TOKEN_EXPIRE_SECONDS,
|
||||
},
|
||||
label=_('OAuth 2 Timeout Settings'),
|
||||
help_text=_('Dictionary for customizing OAuth 2 timeouts, available items are '
|
||||
'`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number '
|
||||
'of seconds, `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
|
||||
'authorization codes in the number of seconds, and `REFRESH_TOKEN_EXPIRE_SECONDS`, '
|
||||
'the duration of refresh tokens, after expired access tokens, '
|
||||
'in the number of seconds.'),
|
||||
help_text=_(
|
||||
'Dictionary for customizing OAuth 2 timeouts, available items are '
|
||||
'`ACCESS_TOKEN_EXPIRE_SECONDS`, the duration of access tokens in the number '
|
||||
'of seconds, `AUTHORIZATION_CODE_EXPIRE_SECONDS`, the duration of '
|
||||
'authorization codes in the number of seconds, and `REFRESH_TOKEN_EXPIRE_SECONDS`, '
|
||||
'the duration of refresh tokens, after expired access tokens, '
|
||||
'in the number of seconds.'
|
||||
),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
unit=_('seconds'),
|
||||
@ -57,10 +61,12 @@ register(
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Allow External Users to Create OAuth2 Tokens'),
|
||||
help_text=_('For security reasons, users from external auth providers (LDAP, SAML, '
|
||||
'SSO, Radius, and others) are not allowed to create OAuth2 tokens. '
|
||||
'To change this behavior, enable this setting. Existing tokens will '
|
||||
'not be deleted when this setting is toggled off.'),
|
||||
help_text=_(
|
||||
'For security reasons, users from external auth providers (LDAP, SAML, '
|
||||
'SSO, Radius, and others) are not allowed to create OAuth2 tokens. '
|
||||
'To change this behavior, enable this setting. Existing tokens will '
|
||||
'not be deleted when this setting is toggled off.'
|
||||
),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
@ -71,8 +77,7 @@ register(
|
||||
required=False,
|
||||
default='',
|
||||
label=_('Login redirect override URL'),
|
||||
help_text=_('URL to which unauthorized users will be redirected to log in. '
|
||||
'If blank, users will be sent to the Tower login page.'),
|
||||
help_text=_('URL to which unauthorized users will be redirected to log in. If blank, users will be sent to the Tower login page.'),
|
||||
category=_('Authentication'),
|
||||
category_slug='authentication',
|
||||
)
|
||||
|
||||
@ -16,7 +16,4 @@ class ActiveJobConflict(ValidationError):
|
||||
# turn everything in self.detail into string by using force_text.
|
||||
# Declare detail afterwards circumvent this behavior.
|
||||
super(ActiveJobConflict, self).__init__()
|
||||
self.detail = {
|
||||
"error": _("Resource is being used by running jobs."),
|
||||
"active_jobs": active_jobs
|
||||
}
|
||||
self.detail = {"error": _("Resource is being used by running jobs."), "active_jobs": active_jobs}
|
||||
|
||||
@ -16,10 +16,10 @@ __all__ = ['BooleanNullField', 'CharNullField', 'ChoiceNullField', 'VerbatimFiel
|
||||
|
||||
|
||||
class NullFieldMixin(object):
|
||||
'''
|
||||
"""
|
||||
Mixin to prevent shortcutting validation when we want to allow null input,
|
||||
but coerce the resulting value to another type.
|
||||
'''
|
||||
"""
|
||||
|
||||
def validate_empty_values(self, data):
|
||||
(is_empty_value, data) = super(NullFieldMixin, self).validate_empty_values(data)
|
||||
@ -29,18 +29,18 @@ class NullFieldMixin(object):
|
||||
|
||||
|
||||
class BooleanNullField(NullFieldMixin, serializers.NullBooleanField):
|
||||
'''
|
||||
"""
|
||||
Custom boolean field that allows null and empty string as False values.
|
||||
'''
|
||||
"""
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return bool(super(BooleanNullField, self).to_internal_value(data))
|
||||
|
||||
|
||||
class CharNullField(NullFieldMixin, serializers.CharField):
|
||||
'''
|
||||
"""
|
||||
Custom char field that allows null as input and coerces to an empty string.
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs['allow_null'] = True
|
||||
@ -51,9 +51,9 @@ class CharNullField(NullFieldMixin, serializers.CharField):
|
||||
|
||||
|
||||
class ChoiceNullField(NullFieldMixin, serializers.ChoiceField):
|
||||
'''
|
||||
"""
|
||||
Custom choice field that allows null as input and coerces to an empty string.
|
||||
'''
|
||||
"""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs['allow_null'] = True
|
||||
@ -64,9 +64,9 @@ class ChoiceNullField(NullFieldMixin, serializers.ChoiceField):
|
||||
|
||||
|
||||
class VerbatimField(serializers.Field):
|
||||
'''
|
||||
"""
|
||||
Custom field that passes the value through without changes.
|
||||
'''
|
||||
"""
|
||||
|
||||
def to_internal_value(self, data):
|
||||
return data
|
||||
@ -77,22 +77,19 @@ class VerbatimField(serializers.Field):
|
||||
|
||||
class OAuth2ProviderField(fields.DictField):
|
||||
|
||||
default_error_messages = {
|
||||
'invalid_key_names': _('Invalid key names: {invalid_key_names}'),
|
||||
}
|
||||
default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')}
|
||||
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
|
||||
child = fields.IntegerField(min_value=1)
|
||||
|
||||
def to_internal_value(self, data):
|
||||
data = super(OAuth2ProviderField, self).to_internal_value(data)
|
||||
invalid_flags = (set(data.keys()) - self.valid_key_names)
|
||||
invalid_flags = set(data.keys()) - self.valid_key_names
|
||||
if invalid_flags:
|
||||
self.fail('invalid_key_names', invalid_key_names=', '.join(list(invalid_flags)))
|
||||
return data
|
||||
|
||||
|
||||
class DeprecatedCredentialField(serializers.IntegerField):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
kwargs['allow_null'] = True
|
||||
kwargs['default'] = None
|
||||
|
||||
@ -27,9 +27,9 @@ from awx.main.utils.db import get_all_field_names
|
||||
|
||||
|
||||
class TypeFilterBackend(BaseFilterBackend):
|
||||
'''
|
||||
"""
|
||||
Filter on type field now returned with all objects.
|
||||
'''
|
||||
"""
|
||||
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
try:
|
||||
@ -64,7 +64,7 @@ class TypeFilterBackend(BaseFilterBackend):
|
||||
|
||||
|
||||
def get_fields_from_path(model, path):
|
||||
'''
|
||||
"""
|
||||
Given a Django ORM lookup path (possibly over multiple models)
|
||||
Returns the fields in the line, and also the revised lookup path
|
||||
ex., given
|
||||
@ -73,7 +73,7 @@ def get_fields_from_path(model, path):
|
||||
returns tuple of fields traversed as well and a corrected path,
|
||||
for special cases we do substitutions
|
||||
([<IntegerField for timeout>], 'project__timeout')
|
||||
'''
|
||||
"""
|
||||
# Store of all the fields used to detect repeats
|
||||
field_list = []
|
||||
new_parts = []
|
||||
@ -82,12 +82,9 @@ def get_fields_from_path(model, path):
|
||||
raise ParseError(_('No related model for field {}.').format(name))
|
||||
# HACK: Make project and inventory source filtering by old field names work for backwards compatibility.
|
||||
if model._meta.object_name in ('Project', 'InventorySource'):
|
||||
name = {
|
||||
'current_update': 'current_job',
|
||||
'last_update': 'last_job',
|
||||
'last_update_failed': 'last_job_failed',
|
||||
'last_updated': 'last_job_run',
|
||||
}.get(name, name)
|
||||
name = {'current_update': 'current_job', 'last_update': 'last_job', 'last_update_failed': 'last_job_failed', 'last_updated': 'last_job_run'}.get(
|
||||
name, name
|
||||
)
|
||||
|
||||
if name == 'type' and 'polymorphic_ctype' in get_all_field_names(model):
|
||||
name = 'polymorphic_ctype'
|
||||
@ -121,28 +118,42 @@ def get_fields_from_path(model, path):
|
||||
|
||||
|
||||
def get_field_from_path(model, path):
|
||||
'''
|
||||
"""
|
||||
Given a Django ORM lookup path (possibly over multiple models)
|
||||
Returns the last field in the line, and the revised lookup path
|
||||
ex.
|
||||
(<IntegerField for timeout>, 'project__timeout')
|
||||
'''
|
||||
"""
|
||||
field_list, new_path = get_fields_from_path(model, path)
|
||||
return (field_list[-1], new_path)
|
||||
|
||||
|
||||
class FieldLookupBackend(BaseFilterBackend):
|
||||
'''
|
||||
"""
|
||||
Filter using field lookups provided via query string parameters.
|
||||
'''
|
||||
"""
|
||||
|
||||
RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by',
|
||||
'search', 'type', 'host_filter', 'count_disabled', 'no_truncate')
|
||||
RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by', 'search', 'type', 'host_filter', 'count_disabled', 'no_truncate')
|
||||
|
||||
SUPPORTED_LOOKUPS = ('exact', 'iexact', 'contains', 'icontains',
|
||||
'startswith', 'istartswith', 'endswith', 'iendswith',
|
||||
'regex', 'iregex', 'gt', 'gte', 'lt', 'lte', 'in',
|
||||
'isnull', 'search')
|
||||
SUPPORTED_LOOKUPS = (
|
||||
'exact',
|
||||
'iexact',
|
||||
'contains',
|
||||
'icontains',
|
||||
'startswith',
|
||||
'istartswith',
|
||||
'endswith',
|
||||
'iendswith',
|
||||
'regex',
|
||||
'iregex',
|
||||
'gt',
|
||||
'gte',
|
||||
'lt',
|
||||
'lte',
|
||||
'in',
|
||||
'isnull',
|
||||
'search',
|
||||
)
|
||||
|
||||
# A list of fields that we know can be filtered on without the possiblity
|
||||
# of introducing duplicates
|
||||
@ -189,10 +200,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
try:
|
||||
return self.to_python_related(value)
|
||||
except ValueError:
|
||||
raise ParseError(_('Invalid {field_name} id: {field_id}').format(
|
||||
field_name=getattr(field, 'name', 'related field'),
|
||||
field_id=value)
|
||||
)
|
||||
raise ParseError(_('Invalid {field_name} id: {field_id}').format(field_name=getattr(field, 'name', 'related field'), field_id=value))
|
||||
else:
|
||||
return field.to_python(value)
|
||||
|
||||
@ -205,13 +213,13 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
|
||||
field = field_list[-1]
|
||||
|
||||
needs_distinct = (not all(isinstance(f, self.NO_DUPLICATES_ALLOW_LIST) for f in field_list))
|
||||
needs_distinct = not all(isinstance(f, self.NO_DUPLICATES_ALLOW_LIST) for f in field_list)
|
||||
|
||||
# Type names are stored without underscores internally, but are presented and
|
||||
# and serialized over the API containing underscores so we remove `_`
|
||||
# for polymorphic_ctype__model lookups.
|
||||
if new_lookup.startswith('polymorphic_ctype__model'):
|
||||
value = value.replace('_','')
|
||||
value = value.replace('_', '')
|
||||
elif new_lookup.endswith('__isnull'):
|
||||
value = to_python_boolean(value)
|
||||
elif new_lookup.endswith('__in'):
|
||||
@ -329,24 +337,20 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
args = []
|
||||
for n, k, v in and_filters:
|
||||
if n:
|
||||
args.append(~Q(**{k:v}))
|
||||
args.append(~Q(**{k: v}))
|
||||
else:
|
||||
args.append(Q(**{k:v}))
|
||||
args.append(Q(**{k: v}))
|
||||
for role_name in role_filters:
|
||||
if not hasattr(queryset.model, 'accessible_pk_qs'):
|
||||
raise ParseError(_(
|
||||
'Cannot apply role_level filter to this list because its model '
|
||||
'does not use roles for access control.'))
|
||||
args.append(
|
||||
Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name))
|
||||
)
|
||||
raise ParseError(_('Cannot apply role_level filter to this list because its model ' 'does not use roles for access control.'))
|
||||
args.append(Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name)))
|
||||
if or_filters:
|
||||
q = Q()
|
||||
for n,k,v in or_filters:
|
||||
for n, k, v in or_filters:
|
||||
if n:
|
||||
q |= ~Q(**{k:v})
|
||||
q |= ~Q(**{k: v})
|
||||
else:
|
||||
q |= Q(**{k:v})
|
||||
q |= Q(**{k: v})
|
||||
args.append(q)
|
||||
if search_filters and search_filter_relation == 'OR':
|
||||
q = Q()
|
||||
@ -360,11 +364,11 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
for constrain in constrains:
|
||||
q_chain |= Q(**{constrain: term})
|
||||
queryset = queryset.filter(q_chain)
|
||||
for n,k,v in chain_filters:
|
||||
for n, k, v in chain_filters:
|
||||
if n:
|
||||
q = ~Q(**{k:v})
|
||||
q = ~Q(**{k: v})
|
||||
else:
|
||||
q = Q(**{k:v})
|
||||
q = Q(**{k: v})
|
||||
queryset = queryset.filter(q)
|
||||
queryset = queryset.filter(*args)
|
||||
if needs_distinct:
|
||||
@ -377,9 +381,9 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
|
||||
|
||||
class OrderByBackend(BaseFilterBackend):
|
||||
'''
|
||||
"""
|
||||
Filter to apply ordering based on query string parameters.
|
||||
'''
|
||||
"""
|
||||
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
try:
|
||||
|
||||
@ -35,55 +35,50 @@ from rest_framework.negotiation import DefaultContentNegotiation
|
||||
|
||||
# AWX
|
||||
from awx.api.filters import FieldLookupBackend
|
||||
from awx.main.models import (
|
||||
UnifiedJob, UnifiedJobTemplate, User, Role, Credential,
|
||||
WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||
)
|
||||
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.utils import (
|
||||
camelcase_to_underscore,
|
||||
get_search_fields,
|
||||
getattrd,
|
||||
get_object_or_400,
|
||||
decrypt_field,
|
||||
get_awx_version,
|
||||
)
|
||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.views import ApiErrorView
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
|
||||
from awx.api.versioning import URLPathVersioning
|
||||
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
|
||||
|
||||
__all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
|
||||
'ListCreateAPIView', 'SubListAPIView', 'SubListCreateAPIView',
|
||||
'SubListDestroyAPIView',
|
||||
'SubListCreateAttachDetachAPIView', 'RetrieveAPIView',
|
||||
'RetrieveUpdateAPIView', 'RetrieveDestroyAPIView',
|
||||
'RetrieveUpdateDestroyAPIView',
|
||||
'SubDetailAPIView',
|
||||
'ResourceAccessList',
|
||||
'ParentMixin',
|
||||
'DeleteLastUnattachLabelMixin',
|
||||
'SubListAttachDetachAPIView',
|
||||
'CopyAPIView', 'BaseUsersList',]
|
||||
__all__ = [
|
||||
'APIView',
|
||||
'GenericAPIView',
|
||||
'ListAPIView',
|
||||
'SimpleListAPIView',
|
||||
'ListCreateAPIView',
|
||||
'SubListAPIView',
|
||||
'SubListCreateAPIView',
|
||||
'SubListDestroyAPIView',
|
||||
'SubListCreateAttachDetachAPIView',
|
||||
'RetrieveAPIView',
|
||||
'RetrieveUpdateAPIView',
|
||||
'RetrieveDestroyAPIView',
|
||||
'RetrieveUpdateDestroyAPIView',
|
||||
'SubDetailAPIView',
|
||||
'ResourceAccessList',
|
||||
'ParentMixin',
|
||||
'DeleteLastUnattachLabelMixin',
|
||||
'SubListAttachDetachAPIView',
|
||||
'CopyAPIView',
|
||||
'BaseUsersList',
|
||||
]
|
||||
|
||||
logger = logging.getLogger('awx.api.generics')
|
||||
analytics_logger = logging.getLogger('awx.analytics.performance')
|
||||
|
||||
|
||||
class LoggedLoginView(auth_views.LoginView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
# The django.auth.contrib login form doesn't perform the content
|
||||
# negotiation we've come to expect from DRF; add in code to catch
|
||||
# situations where Accept != text/html (or */*) and reply with
|
||||
# an HTTP 406
|
||||
try:
|
||||
DefaultContentNegotiation().select_renderer(
|
||||
request,
|
||||
[StaticHTMLRenderer],
|
||||
'html'
|
||||
)
|
||||
DefaultContentNegotiation().select_renderer(request, [StaticHTMLRenderer], 'html')
|
||||
except NotAcceptable:
|
||||
resp = Response(status=status.HTTP_406_NOT_ACCEPTABLE)
|
||||
resp.accepted_renderer = StaticHTMLRenderer()
|
||||
@ -96,7 +91,7 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||
current_user = getattr(request, 'user', None)
|
||||
if request.user.is_authenticated:
|
||||
logger.info(smart_text(u"User {} logged in from {}".format(self.request.user.username,request.META.get('REMOTE_ADDR', None))))
|
||||
logger.info(smart_text(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
||||
ret.set_cookie('userLoggedIn', 'true')
|
||||
current_user = UserSerializer(self.request.user)
|
||||
current_user = smart_text(JSONRenderer().render(current_user.data))
|
||||
@ -106,29 +101,27 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
return ret
|
||||
else:
|
||||
if 'username' in self.request.POST:
|
||||
logger.warn(smart_text(u"Login failed for user {} from {}".format(self.request.POST.get('username'),request.META.get('REMOTE_ADDR', None))))
|
||||
logger.warn(smart_text(u"Login failed for user {} from {}".format(self.request.POST.get('username'), request.META.get('REMOTE_ADDR', None))))
|
||||
ret.status_code = 401
|
||||
return ret
|
||||
|
||||
|
||||
class LoggedLogoutView(auth_views.LogoutView):
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
original_user = getattr(request, 'user', None)
|
||||
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
||||
current_user = getattr(request, 'user', None)
|
||||
ret.set_cookie('userLoggedIn', 'false')
|
||||
if (not current_user or not getattr(current_user, 'pk', True)) \
|
||||
and current_user != original_user:
|
||||
if (not current_user or not getattr(current_user, 'pk', True)) and current_user != original_user:
|
||||
logger.info("User {} logged out.".format(original_user.username))
|
||||
return ret
|
||||
|
||||
|
||||
def get_view_description(view, html=False):
|
||||
'''Wrapper around REST framework get_view_description() to continue
|
||||
"""Wrapper around REST framework get_view_description() to continue
|
||||
to support our historical div.
|
||||
|
||||
'''
|
||||
"""
|
||||
desc = views.get_view_description(view, html=html)
|
||||
if html:
|
||||
desc = '<div class="description">%s</div>' % desc
|
||||
@ -138,6 +131,7 @@ def get_view_description(view, html=False):
|
||||
def get_default_schema():
|
||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
||||
from awx.api.swagger import AutoSchema
|
||||
|
||||
return AutoSchema()
|
||||
else:
|
||||
return views.APIView.schema
|
||||
@ -149,21 +143,23 @@ class APIView(views.APIView):
|
||||
versioning_class = URLPathVersioning
|
||||
|
||||
def initialize_request(self, request, *args, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Store the Django REST Framework Request object as an attribute on the
|
||||
normal Django request, store time the request started.
|
||||
'''
|
||||
"""
|
||||
self.time_started = time.time()
|
||||
if getattr(settings, 'SQL_DEBUG', False):
|
||||
self.queries_before = len(connection.queries)
|
||||
|
||||
# If there are any custom headers in REMOTE_HOST_HEADERS, make sure
|
||||
# they respect the allowed proxy list
|
||||
if all([
|
||||
settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST
|
||||
]):
|
||||
if all(
|
||||
[
|
||||
settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_ADDR') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
request.environ.get('REMOTE_HOST') not in settings.PROXY_IP_ALLOWED_LIST,
|
||||
]
|
||||
):
|
||||
for custom_header in settings.REMOTE_HOST_HEADERS:
|
||||
if custom_header.startswith('HTTP_'):
|
||||
request.environ.pop(custom_header, None)
|
||||
@ -178,17 +174,19 @@ class APIView(views.APIView):
|
||||
request.drf_request_user = None
|
||||
self.__init_request_error__ = exc
|
||||
except UnsupportedMediaType as exc:
|
||||
exc.detail = _('You did not use correct Content-Type in your HTTP request. '
|
||||
'If you are using our REST API, the Content-Type must be application/json')
|
||||
exc.detail = _(
|
||||
'You did not use correct Content-Type in your HTTP request. ' 'If you are using our REST API, the Content-Type must be application/json'
|
||||
)
|
||||
self.__init_request_error__ = exc
|
||||
return drf_request
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
'''
|
||||
"""
|
||||
Log warning for 400 requests. Add header with elapsed time.
|
||||
'''
|
||||
"""
|
||||
from awx.main.utils import get_licenser
|
||||
from awx.main.utils.licensing import OpenLicense
|
||||
|
||||
#
|
||||
# If the URL was rewritten, and we get a 404, we should entirely
|
||||
# replace the view in the request context with an ApiErrorView()
|
||||
@ -212,8 +210,12 @@ class APIView(views.APIView):
|
||||
return response
|
||||
|
||||
if response.status_code >= 400:
|
||||
status_msg = "status %s received by user %s attempting to access %s from %s" % \
|
||||
(response.status_code, request.user, request.path, request.META.get('REMOTE_ADDR', None))
|
||||
status_msg = "status %s received by user %s attempting to access %s from %s" % (
|
||||
response.status_code,
|
||||
request.user,
|
||||
request.path,
|
||||
request.META.get('REMOTE_ADDR', None),
|
||||
)
|
||||
if hasattr(self, '__init_request_error__'):
|
||||
response = self.handle_exception(self.__init_request_error__)
|
||||
if response.status_code == 401:
|
||||
@ -225,7 +227,7 @@ class APIView(views.APIView):
|
||||
time_started = getattr(self, 'time_started', None)
|
||||
response['X-API-Product-Version'] = get_awx_version()
|
||||
response['X-API-Product-Name'] = 'AWX' if isinstance(get_licenser(), OpenLicense) else 'Red Hat Ansible Tower'
|
||||
|
||||
|
||||
response['X-API-Node'] = settings.CLUSTER_HOST_ID
|
||||
if time_started:
|
||||
time_elapsed = time.time() - self.time_started
|
||||
@ -311,18 +313,12 @@ class APIView(views.APIView):
|
||||
return data
|
||||
|
||||
def determine_version(self, request, *args, **kwargs):
|
||||
return (
|
||||
getattr(request, 'version', None),
|
||||
getattr(request, 'versioning_scheme', None),
|
||||
)
|
||||
return (getattr(request, 'version', None), getattr(request, 'versioning_scheme', None))
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
if self.versioning_class is not None:
|
||||
scheme = self.versioning_class()
|
||||
request.version, request.versioning_scheme = (
|
||||
scheme.determine_version(request, *args, **kwargs),
|
||||
scheme
|
||||
)
|
||||
request.version, request.versioning_scheme = (scheme.determine_version(request, *args, **kwargs), scheme)
|
||||
if 'version' in kwargs:
|
||||
kwargs.pop('version')
|
||||
return super(APIView, self).dispatch(request, *args, **kwargs)
|
||||
@ -378,25 +374,22 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
d = super(GenericAPIView, self).get_description_context()
|
||||
if hasattr(self.model, "_meta"):
|
||||
if hasattr(self.model._meta, "verbose_name"):
|
||||
d.update({
|
||||
'model_verbose_name': smart_text(self.model._meta.verbose_name),
|
||||
'model_verbose_name_plural': smart_text(self.model._meta.verbose_name_plural),
|
||||
})
|
||||
d.update(
|
||||
{
|
||||
'model_verbose_name': smart_text(self.model._meta.verbose_name),
|
||||
'model_verbose_name_plural': smart_text(self.model._meta.verbose_name_plural),
|
||||
}
|
||||
)
|
||||
serializer = self.get_serializer()
|
||||
metadata = self.metadata_class()
|
||||
metadata.request = self.request
|
||||
for method, key in [
|
||||
('GET', 'serializer_fields'),
|
||||
('POST', 'serializer_create_fields'),
|
||||
('PUT', 'serializer_update_fields')
|
||||
]:
|
||||
for method, key in [('GET', 'serializer_fields'), ('POST', 'serializer_create_fields'), ('PUT', 'serializer_update_fields')]:
|
||||
d[key] = metadata.get_serializer_info(serializer, method=method)
|
||||
d['settings'] = settings
|
||||
return d
|
||||
|
||||
|
||||
class SimpleListAPIView(generics.ListAPIView, GenericAPIView):
|
||||
|
||||
def get_queryset(self):
|
||||
return self.request.user.get_queryset(self.model)
|
||||
|
||||
@ -413,9 +406,7 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
|
||||
else:
|
||||
order_field = 'name'
|
||||
d = super(ListAPIView, self).get_description_context()
|
||||
d.update({
|
||||
'order_field': order_field,
|
||||
})
|
||||
d.update({'order_field': order_field})
|
||||
return d
|
||||
|
||||
@property
|
||||
@ -426,9 +417,13 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
|
||||
def related_search_fields(self):
|
||||
def skip_related_name(name):
|
||||
return (
|
||||
name is None or name.endswith('_role') or name.startswith('_') or
|
||||
name.startswith('deprecated_') or name.endswith('_set') or
|
||||
name == 'polymorphic_ctype')
|
||||
name is None
|
||||
or name.endswith('_role')
|
||||
or name.startswith('_')
|
||||
or name.startswith('deprecated_')
|
||||
or name.endswith('_set')
|
||||
or name == 'polymorphic_ctype'
|
||||
)
|
||||
|
||||
fields = set([])
|
||||
for field in self.model._meta.fields:
|
||||
@ -482,9 +477,7 @@ class ParentMixin(object):
|
||||
def get_parent_object(self):
|
||||
if self.parent_object is not None:
|
||||
return self.parent_object
|
||||
parent_filter = {
|
||||
self.lookup_field: self.kwargs.get(self.lookup_field, None),
|
||||
}
|
||||
parent_filter = {self.lookup_field: self.kwargs.get(self.lookup_field, None)}
|
||||
self.parent_object = get_object_or_404(self.parent_model, **parent_filter)
|
||||
return self.parent_object
|
||||
|
||||
@ -513,10 +506,12 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
||||
|
||||
def get_description_context(self):
|
||||
d = super(SubListAPIView, self).get_description_context()
|
||||
d.update({
|
||||
'parent_model_verbose_name': smart_text(self.parent_model._meta.verbose_name),
|
||||
'parent_model_verbose_name_plural': smart_text(self.parent_model._meta.verbose_name_plural),
|
||||
})
|
||||
d.update(
|
||||
{
|
||||
'parent_model_verbose_name': smart_text(self.parent_model._meta.verbose_name),
|
||||
'parent_model_verbose_name_plural': smart_text(self.parent_model._meta.verbose_name_plural),
|
||||
}
|
||||
)
|
||||
return d
|
||||
|
||||
def get_queryset(self):
|
||||
@ -531,7 +526,6 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
||||
|
||||
|
||||
class DestroyAPIView(generics.DestroyAPIView):
|
||||
|
||||
def has_delete_permission(self, obj):
|
||||
return self.request.user.can_access(self.model, 'delete', obj)
|
||||
|
||||
@ -545,12 +539,12 @@ class SubListDestroyAPIView(DestroyAPIView, SubListAPIView):
|
||||
"""
|
||||
Concrete view for deleting everything related by `relationship`.
|
||||
"""
|
||||
|
||||
check_sub_obj_permission = True
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance_list = self.get_queryset()
|
||||
if (not self.check_sub_obj_permission and
|
||||
not request.user.can_access(self.parent_model, 'delete', self.get_parent_object())):
|
||||
if not self.check_sub_obj_permission and not request.user.can_access(self.parent_model, 'delete', self.get_parent_object()):
|
||||
raise PermissionDenied()
|
||||
self.perform_list_destroy(instance_list)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
@ -574,9 +568,7 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
||||
|
||||
def get_description_context(self):
|
||||
d = super(SubListCreateAPIView, self).get_description_context()
|
||||
d.update({
|
||||
'parent_key': getattr(self, 'parent_key', None),
|
||||
})
|
||||
d.update({'parent_key': getattr(self, 'parent_key', None)})
|
||||
return d
|
||||
|
||||
def get_queryset(self):
|
||||
@ -610,8 +602,7 @@ class SubListCreateAPIView(SubListAPIView, ListCreateAPIView):
|
||||
# attempt to deserialize the object
|
||||
serializer = self.get_serializer(data=data)
|
||||
if not serializer.is_valid():
|
||||
return Response(serializer.errors,
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Verify we have permission to add the object as given.
|
||||
if not request.user.can_access(self.model, 'add', serializer.validated_data):
|
||||
@ -635,9 +626,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
|
||||
def get_description_context(self):
|
||||
d = super(SubListCreateAttachDetachAPIView, self).get_description_context()
|
||||
d.update({
|
||||
"has_attach": True,
|
||||
})
|
||||
d.update({"has_attach": True})
|
||||
return d
|
||||
|
||||
def attach_validate(self, request):
|
||||
@ -675,9 +664,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
sub = get_object_or_400(self.model, pk=sub_id)
|
||||
|
||||
# Verify we have permission to attach.
|
||||
if not request.user.can_access(self.parent_model, 'attach', parent, sub,
|
||||
self.relationship, data,
|
||||
skip_sub_obj_read_check=created):
|
||||
if not request.user.can_access(self.parent_model, 'attach', parent, sub, self.relationship, data, skip_sub_obj_read_check=created):
|
||||
raise PermissionDenied()
|
||||
|
||||
# Verify that the relationship to be added is valid.
|
||||
@ -716,8 +703,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
relationship = getattrd(parent, self.relationship)
|
||||
sub = get_object_or_400(self.model, pk=sub_id)
|
||||
|
||||
if not request.user.can_access(self.parent_model, 'unattach', parent,
|
||||
sub, self.relationship, request.data):
|
||||
if not request.user.can_access(self.parent_model, 'unattach', parent, sub, self.relationship, request.data):
|
||||
raise PermissionDenied()
|
||||
|
||||
if parent_key:
|
||||
@ -735,28 +721,24 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
if not isinstance(request.data, dict):
|
||||
return Response('invalid type for post data',
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response('invalid type for post data', status=status.HTTP_400_BAD_REQUEST)
|
||||
if 'disassociate' in request.data:
|
||||
return self.unattach(request, *args, **kwargs)
|
||||
else:
|
||||
return self.attach(request, *args, **kwargs)
|
||||
|
||||
|
||||
|
||||
class SubListAttachDetachAPIView(SubListCreateAttachDetachAPIView):
|
||||
'''
|
||||
"""
|
||||
Derived version of SubListCreateAttachDetachAPIView that prohibits creation
|
||||
'''
|
||||
"""
|
||||
|
||||
metadata_class = SublistAttachDetatchMetadata
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
sub_id = request.data.get('id', None)
|
||||
if not sub_id:
|
||||
return Response(
|
||||
dict(msg=_("{} 'id' field is missing.".format(
|
||||
self.model._meta.verbose_name.title()))),
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(msg=_("{} 'id' field is missing.".format(self.model._meta.verbose_name.title()))), status=status.HTTP_400_BAD_REQUEST)
|
||||
return super(SubListAttachDetachAPIView, self).post(request, *args, **kwargs)
|
||||
|
||||
def update_raw_data(self, data):
|
||||
@ -768,11 +750,11 @@ class SubListAttachDetachAPIView(SubListCreateAttachDetachAPIView):
|
||||
|
||||
|
||||
class DeleteLastUnattachLabelMixin(object):
|
||||
'''
|
||||
"""
|
||||
Models for which you want the last instance to be deleted from the database
|
||||
when the last disassociate is called should inherit from this class. Further,
|
||||
the model should implement is_detached()
|
||||
'''
|
||||
"""
|
||||
|
||||
def unattach(self, request, *args, **kwargs):
|
||||
(sub_id, res) = super(DeleteLastUnattachLabelMixin, self).unattach_validate(request)
|
||||
@ -798,7 +780,6 @@ class RetrieveAPIView(generics.RetrieveAPIView, GenericAPIView):
|
||||
|
||||
|
||||
class RetrieveUpdateAPIView(RetrieveAPIView, generics.RetrieveUpdateAPIView):
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
self.update_filter(request, *args, **kwargs)
|
||||
return super(RetrieveUpdateAPIView, self).update(request, *args, **kwargs)
|
||||
@ -839,6 +820,7 @@ class ResourceAccessList(ParentMixin, ListAPIView):
|
||||
|
||||
def trigger_delayed_deep_copy(*args, **kwargs):
|
||||
from awx.main.tasks import deep_copy_model_obj
|
||||
|
||||
connection.on_commit(lambda: deep_copy_model_obj.delay(*args, **kwargs))
|
||||
|
||||
|
||||
@ -869,8 +851,7 @@ class CopyAPIView(GenericAPIView):
|
||||
field_val[secret] = decrypt_field(obj, secret)
|
||||
elif isinstance(field_val, dict):
|
||||
for sub_field in field_val:
|
||||
if isinstance(sub_field, str) \
|
||||
and isinstance(field_val[sub_field], str):
|
||||
if isinstance(sub_field, str) and isinstance(field_val[sub_field], str):
|
||||
field_val[sub_field] = decrypt_field(obj, field_name, sub_field)
|
||||
elif isinstance(field_val, str):
|
||||
try:
|
||||
@ -882,15 +863,11 @@ class CopyAPIView(GenericAPIView):
|
||||
def _build_create_dict(self, obj):
|
||||
ret = {}
|
||||
if self.copy_return_serializer_class:
|
||||
all_fields = Metadata().get_serializer_info(
|
||||
self._get_copy_return_serializer(), method='POST'
|
||||
)
|
||||
all_fields = Metadata().get_serializer_info(self._get_copy_return_serializer(), method='POST')
|
||||
for field_name, field_info in all_fields.items():
|
||||
if not hasattr(obj, field_name) or field_info.get('read_only', True):
|
||||
continue
|
||||
ret[field_name] = CopyAPIView._decrypt_model_field_if_needed(
|
||||
obj, field_name, getattr(obj, field_name)
|
||||
)
|
||||
ret[field_name] = CopyAPIView._decrypt_model_field_if_needed(obj, field_name, getattr(obj, field_name))
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
@ -908,9 +885,11 @@ class CopyAPIView(GenericAPIView):
|
||||
except AttributeError:
|
||||
continue
|
||||
# Adjust copy blocked fields here.
|
||||
if field.name in fields_to_discard or field.name in [
|
||||
'id', 'pk', 'polymorphic_ctype', 'unifiedjobtemplate_ptr', 'created_by', 'modified_by'
|
||||
] or field.name.endswith('_role'):
|
||||
if (
|
||||
field.name in fields_to_discard
|
||||
or field.name in ['id', 'pk', 'polymorphic_ctype', 'unifiedjobtemplate_ptr', 'created_by', 'modified_by']
|
||||
or field.name.endswith('_role')
|
||||
):
|
||||
create_kwargs.pop(field.name, None)
|
||||
continue
|
||||
if field.one_to_many:
|
||||
@ -926,33 +905,24 @@ class CopyAPIView(GenericAPIView):
|
||||
elif field.name == 'name' and not old_parent:
|
||||
create_kwargs[field.name] = copy_name or field_val + ' copy'
|
||||
elif field.name in fields_to_preserve:
|
||||
create_kwargs[field.name] = CopyAPIView._decrypt_model_field_if_needed(
|
||||
obj, field.name, field_val
|
||||
)
|
||||
create_kwargs[field.name] = CopyAPIView._decrypt_model_field_if_needed(obj, field.name, field_val)
|
||||
|
||||
# WorkflowJobTemplateNodes that represent an approval are *special*;
|
||||
# when we copy them, we actually want to *copy* the UJT they point at
|
||||
# rather than share the template reference between nodes in disparate
|
||||
# workflows
|
||||
if (
|
||||
isinstance(obj, WorkflowJobTemplateNode) and
|
||||
isinstance(getattr(obj, 'unified_job_template'), WorkflowApprovalTemplate)
|
||||
):
|
||||
new_approval_template, sub_objs = CopyAPIView.copy_model_obj(
|
||||
None, None, WorkflowApprovalTemplate,
|
||||
obj.unified_job_template, creater
|
||||
)
|
||||
if isinstance(obj, WorkflowJobTemplateNode) and isinstance(getattr(obj, 'unified_job_template'), WorkflowApprovalTemplate):
|
||||
new_approval_template, sub_objs = CopyAPIView.copy_model_obj(None, None, WorkflowApprovalTemplate, obj.unified_job_template, creater)
|
||||
create_kwargs['unified_job_template'] = new_approval_template
|
||||
|
||||
new_obj = model.objects.create(**create_kwargs)
|
||||
logger.debug('Deep copy: Created new object {}({})'.format(
|
||||
new_obj, model
|
||||
))
|
||||
logger.debug('Deep copy: Created new object {}({})'.format(new_obj, model))
|
||||
# Need to save separatedly because Djang-crum get_current_user would
|
||||
# not work properly in non-request-response-cycle context.
|
||||
new_obj.created_by = creater
|
||||
new_obj.save()
|
||||
from awx.main.signals import disable_activity_stream
|
||||
|
||||
with disable_activity_stream():
|
||||
for m2m in m2m_to_preserve:
|
||||
for related_obj in m2m_to_preserve[m2m].all():
|
||||
@ -978,8 +948,7 @@ class CopyAPIView(GenericAPIView):
|
||||
for key in create_kwargs:
|
||||
create_kwargs[key] = getattr(create_kwargs[key], 'pk', None) or create_kwargs[key]
|
||||
try:
|
||||
can_copy = request.user.can_access(self.model, 'add', create_kwargs) and \
|
||||
request.user.can_access(self.model, 'copy_related', obj)
|
||||
can_copy = request.user.can_access(self.model, 'add', create_kwargs) and request.user.can_access(self.model, 'copy_related', obj)
|
||||
except PermissionDenied:
|
||||
return Response({'can_copy': False})
|
||||
return Response({'can_copy': can_copy})
|
||||
@ -998,8 +967,7 @@ class CopyAPIView(GenericAPIView):
|
||||
if not serializer.is_valid():
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
new_obj, sub_objs = CopyAPIView.copy_model_obj(
|
||||
None, None, self.model, obj, request.user, create_kwargs=create_kwargs,
|
||||
copy_name=serializer.validated_data.get('name', '')
|
||||
None, None, self.model, obj, request.user, create_kwargs=create_kwargs, copy_name=serializer.validated_data.get('name', '')
|
||||
)
|
||||
if hasattr(new_obj, 'admin_role') and request.user not in new_obj.admin_role.members.all():
|
||||
new_obj.admin_role.members.add(request.user)
|
||||
@ -1011,13 +979,9 @@ class CopyAPIView(GenericAPIView):
|
||||
cache.set(key, sub_objs, timeout=3600)
|
||||
permission_check_func = None
|
||||
if hasattr(type(self), 'deep_copy_permission_check_func'):
|
||||
permission_check_func = (
|
||||
type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func'
|
||||
)
|
||||
permission_check_func = (type(self).__module__, type(self).__name__, 'deep_copy_permission_check_func')
|
||||
trigger_delayed_deep_copy(
|
||||
self.model.__module__, self.model.__name__,
|
||||
obj.pk, new_obj.pk, request.user.pk, key,
|
||||
permission_check_func=permission_check_func
|
||||
self.model.__module__, self.model.__name__, obj.pk, new_obj.pk, request.user.pk, key, permission_check_func=permission_check_func
|
||||
)
|
||||
serializer = self._get_copy_return_serializer(new_obj)
|
||||
headers = {'Location': new_obj.get_absolute_url(request=request)}
|
||||
@ -1026,7 +990,7 @@ class CopyAPIView(GenericAPIView):
|
||||
|
||||
class BaseUsersList(SubListCreateAttachDetachAPIView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(BaseUsersList, self).post( request, *args, **kwargs)
|
||||
ret = super(BaseUsersList, self).post(request, *args, **kwargs)
|
||||
if ret.status_code != 201:
|
||||
return ret
|
||||
try:
|
||||
|
||||
@ -28,18 +28,23 @@ from awx.main.tasks import AWXReceptorJob
|
||||
|
||||
|
||||
class Metadata(metadata.SimpleMetadata):
|
||||
|
||||
def get_field_info(self, field):
|
||||
field_info = OrderedDict()
|
||||
field_info['type'] = self.label_lookup[field]
|
||||
field_info['required'] = getattr(field, 'required', False)
|
||||
|
||||
text_attrs = [
|
||||
'read_only', 'label', 'help_text',
|
||||
'min_length', 'max_length',
|
||||
'min_value', 'max_value',
|
||||
'category', 'category_slug',
|
||||
'defined_in_file', 'unit',
|
||||
'read_only',
|
||||
'label',
|
||||
'help_text',
|
||||
'min_length',
|
||||
'max_length',
|
||||
'min_value',
|
||||
'max_value',
|
||||
'category',
|
||||
'category_slug',
|
||||
'defined_in_file',
|
||||
'unit',
|
||||
]
|
||||
|
||||
for attr in text_attrs:
|
||||
@ -61,8 +66,9 @@ class Metadata(metadata.SimpleMetadata):
|
||||
'type': _('Data type for this {}.'),
|
||||
'url': _('URL for this {}.'),
|
||||
'related': _('Data structure with URLs of related resources.'),
|
||||
'summary_fields': _('Data structure with name/description for related resources. '
|
||||
'The output for some objects may be limited for performance reasons.'),
|
||||
'summary_fields': _(
|
||||
'Data structure with name/description for related resources. ' 'The output for some objects may be limited for performance reasons.'
|
||||
),
|
||||
'created': _('Timestamp when this {} was created.'),
|
||||
'modified': _('Timestamp when this {} was last modified.'),
|
||||
}
|
||||
@ -101,9 +107,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
field_info['children'] = self.get_serializer_info(field)
|
||||
|
||||
if not isinstance(field, (RelatedField, ManyRelatedField)) and hasattr(field, 'choices'):
|
||||
choices = [
|
||||
(choice_value, choice_name) for choice_value, choice_name in field.choices.items()
|
||||
]
|
||||
choices = [(choice_value, choice_name) for choice_value, choice_name in field.choices.items()]
|
||||
if not any(choice in ('', None) for choice, _ in choices):
|
||||
if field.allow_blank:
|
||||
choices = [("", "---------")] + choices
|
||||
@ -131,7 +135,6 @@ class Metadata(metadata.SimpleMetadata):
|
||||
for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
|
||||
field_info[notification_type_name] = notification_type_class.default_messages
|
||||
|
||||
|
||||
# Update type of fields returned...
|
||||
model_field = None
|
||||
if serializer and hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'):
|
||||
@ -149,22 +152,19 @@ class Metadata(metadata.SimpleMetadata):
|
||||
field_info['type'] = 'integer'
|
||||
elif field.field_name in ('created', 'modified'):
|
||||
field_info['type'] = 'datetime'
|
||||
elif (
|
||||
RelatedField in field.__class__.__bases__ or
|
||||
isinstance(model_field, ForeignKey)
|
||||
):
|
||||
elif RelatedField in field.__class__.__bases__ or isinstance(model_field, ForeignKey):
|
||||
field_info['type'] = 'id'
|
||||
elif (
|
||||
isinstance(field, JSONField) or
|
||||
isinstance(model_field, JSONField) or
|
||||
isinstance(field, DRFJSONField) or
|
||||
isinstance(getattr(field, 'model_field', None), JSONField) or
|
||||
field.field_name == 'credential_passwords'
|
||||
isinstance(field, JSONField)
|
||||
or isinstance(model_field, JSONField)
|
||||
or isinstance(field, DRFJSONField)
|
||||
or isinstance(getattr(field, 'model_field', None), JSONField)
|
||||
or field.field_name == 'credential_passwords'
|
||||
):
|
||||
field_info['type'] = 'json'
|
||||
elif (
|
||||
isinstance(field, ManyRelatedField) and
|
||||
field.field_name == 'credentials'
|
||||
isinstance(field, ManyRelatedField)
|
||||
and field.field_name == 'credentials'
|
||||
# launch-time credentials
|
||||
):
|
||||
field_info['type'] = 'list_of_ids'
|
||||
@ -175,10 +175,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
|
||||
def get_serializer_info(self, serializer, method=None):
|
||||
filterer = getattr(serializer, 'filter_field_metadata', lambda fields, method: fields)
|
||||
return filterer(
|
||||
super(Metadata, self).get_serializer_info(serializer),
|
||||
method
|
||||
)
|
||||
return filterer(super(Metadata, self).get_serializer_info(serializer), method)
|
||||
|
||||
def determine_actions(self, request, view):
|
||||
# Add field information for GET requests (so field names/labels are
|
||||
@ -274,6 +271,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
metadata['object_roles'] = roles
|
||||
|
||||
from rest_framework import generics
|
||||
|
||||
if isinstance(view, generics.ListAPIView) and hasattr(view, 'paginator'):
|
||||
metadata['max_page_size'] = view.paginator.max_page_size
|
||||
|
||||
@ -293,7 +291,6 @@ class RoleMetadata(Metadata):
|
||||
|
||||
|
||||
class SublistAttachDetatchMetadata(Metadata):
|
||||
|
||||
def determine_actions(self, request, view):
|
||||
actions = super(SublistAttachDetatchMetadata, self).determine_actions(request, view)
|
||||
method = 'POST'
|
||||
|
||||
@ -3,13 +3,9 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
MetricsView
|
||||
)
|
||||
from awx.api.views import MetricsView
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', MetricsView.as_view(), name='metrics_view'),
|
||||
]
|
||||
urls = [url(r'^$', MetricsView.as_view(), name='metrics_view')]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@ -10,7 +10,6 @@ from rest_framework.utils.urls import replace_query_param
|
||||
|
||||
|
||||
class DisabledPaginator(DjangoPaginator):
|
||||
|
||||
@property
|
||||
def num_pages(self):
|
||||
return 1
|
||||
@ -49,8 +48,7 @@ class Pagination(pagination.PageNumberPagination):
|
||||
|
||||
def get_html_context(self):
|
||||
context = super().get_html_context()
|
||||
context['page_links'] = [pl._replace(url=self.cap_page_size(pl.url))
|
||||
for pl in context['page_links']]
|
||||
context['page_links'] = [pl._replace(url=self.cap_page_size(pl.url)) for pl in context['page_links']]
|
||||
|
||||
return context
|
||||
|
||||
|
||||
@ -15,16 +15,25 @@ from awx.main.utils import get_object_or_400
|
||||
|
||||
logger = logging.getLogger('awx.api.permissions')
|
||||
|
||||
__all__ = ['ModelAccessPermission', 'JobTemplateCallbackPermission', 'VariableDataPermission',
|
||||
'TaskPermission', 'ProjectUpdatePermission', 'InventoryInventorySourcesUpdatePermission',
|
||||
'UserPermission', 'IsSuperUser', 'InstanceGroupTowerPermission', 'WorkflowApprovalPermission']
|
||||
__all__ = [
|
||||
'ModelAccessPermission',
|
||||
'JobTemplateCallbackPermission',
|
||||
'VariableDataPermission',
|
||||
'TaskPermission',
|
||||
'ProjectUpdatePermission',
|
||||
'InventoryInventorySourcesUpdatePermission',
|
||||
'UserPermission',
|
||||
'IsSuperUser',
|
||||
'InstanceGroupTowerPermission',
|
||||
'WorkflowApprovalPermission',
|
||||
]
|
||||
|
||||
|
||||
class ModelAccessPermission(permissions.BasePermission):
|
||||
'''
|
||||
"""
|
||||
Default permissions class to check user access based on the model and
|
||||
request method, optionally verifying the request data.
|
||||
'''
|
||||
"""
|
||||
|
||||
def check_options_permissions(self, request, view, obj=None):
|
||||
return self.check_get_permissions(request, view, obj)
|
||||
@ -35,8 +44,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
def check_get_permissions(self, request, view, obj=None):
|
||||
if hasattr(view, 'parent_model'):
|
||||
parent_obj = view.get_parent_object()
|
||||
if not check_user_access(request.user, view.parent_model, 'read',
|
||||
parent_obj):
|
||||
if not check_user_access(request.user, view.parent_model, 'read', parent_obj):
|
||||
return False
|
||||
if not obj:
|
||||
return True
|
||||
@ -45,8 +53,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
def check_post_permissions(self, request, view, obj=None):
|
||||
if hasattr(view, 'parent_model'):
|
||||
parent_obj = view.get_parent_object()
|
||||
if not check_user_access(request.user, view.parent_model, 'read',
|
||||
parent_obj):
|
||||
if not check_user_access(request.user, view.parent_model, 'read', parent_obj):
|
||||
return False
|
||||
if hasattr(view, 'parent_key'):
|
||||
if not check_user_access(request.user, view.model, 'add', {view.parent_key: parent_obj}):
|
||||
@ -60,10 +67,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
extra_kwargs = {}
|
||||
if view.obj_permission_type == 'admin':
|
||||
extra_kwargs['data'] = {}
|
||||
return check_user_access(
|
||||
request.user, view.model, view.obj_permission_type, obj,
|
||||
**extra_kwargs
|
||||
)
|
||||
return check_user_access(request.user, view.model, view.obj_permission_type, obj, **extra_kwargs)
|
||||
else:
|
||||
if obj:
|
||||
return True
|
||||
@ -74,8 +78,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
# FIXME: For some reason this needs to return True
|
||||
# because it is first called with obj=None?
|
||||
return True
|
||||
return check_user_access(request.user, view.model, 'change', obj,
|
||||
request.data)
|
||||
return check_user_access(request.user, view.model, 'change', obj, request.data)
|
||||
|
||||
def check_patch_permissions(self, request, view, obj=None):
|
||||
return self.check_put_permissions(request, view, obj)
|
||||
@ -89,10 +92,10 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
return check_user_access(request.user, view.model, 'delete', obj)
|
||||
|
||||
def check_permissions(self, request, view, obj=None):
|
||||
'''
|
||||
"""
|
||||
Perform basic permissions checking before delegating to the appropriate
|
||||
method based on the request method.
|
||||
'''
|
||||
"""
|
||||
|
||||
# Don't allow anonymous users. 401, not 403, hence no raised exception.
|
||||
if not request.user or request.user.is_anonymous:
|
||||
@ -117,9 +120,7 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
return result
|
||||
|
||||
def has_permission(self, request, view, obj=None):
|
||||
logger.debug('has_permission(user=%s method=%s data=%r, %s, %r)',
|
||||
request.user, request.method, request.data,
|
||||
view.__class__.__name__, obj)
|
||||
logger.debug('has_permission(user=%s method=%s data=%r, %s, %r)', request.user, request.method, request.data, view.__class__.__name__, obj)
|
||||
try:
|
||||
response = self.check_permissions(request, view, obj)
|
||||
except Exception as e:
|
||||
@ -134,10 +135,10 @@ class ModelAccessPermission(permissions.BasePermission):
|
||||
|
||||
|
||||
class JobTemplateCallbackPermission(ModelAccessPermission):
|
||||
'''
|
||||
"""
|
||||
Permission check used by job template callback view for requests from
|
||||
empheral hosts.
|
||||
'''
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view, obj=None):
|
||||
# If another authentication method was used and it's not a POST, return
|
||||
@ -160,18 +161,16 @@ class JobTemplateCallbackPermission(ModelAccessPermission):
|
||||
|
||||
|
||||
class VariableDataPermission(ModelAccessPermission):
|
||||
|
||||
def check_put_permissions(self, request, view, obj=None):
|
||||
if not obj:
|
||||
return True
|
||||
return check_user_access(request.user, view.model, 'change', obj,
|
||||
dict(variables=request.data))
|
||||
return check_user_access(request.user, view.model, 'change', obj, dict(variables=request.data))
|
||||
|
||||
|
||||
class TaskPermission(ModelAccessPermission):
|
||||
'''
|
||||
"""
|
||||
Permission checks used for API callbacks from running a task.
|
||||
'''
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view, obj=None):
|
||||
# If another authentication method was used other than the one for
|
||||
@ -182,8 +181,7 @@ class TaskPermission(ModelAccessPermission):
|
||||
# Verify that the ID present in the auth token is for a valid, active
|
||||
# unified job.
|
||||
try:
|
||||
unified_job = UnifiedJob.objects.get(status='running',
|
||||
pk=int(request.auth.split('-')[0]))
|
||||
unified_job = UnifiedJob.objects.get(status='running', pk=int(request.auth.split('-')[0]))
|
||||
except (UnifiedJob.DoesNotExist, TypeError):
|
||||
return False
|
||||
|
||||
@ -197,10 +195,10 @@ class TaskPermission(ModelAccessPermission):
|
||||
|
||||
|
||||
class WorkflowApprovalPermission(ModelAccessPermission):
|
||||
'''
|
||||
"""
|
||||
Permission check used by workflow `approval` and `deny` views to determine
|
||||
who has access to approve and deny paused workflow nodes
|
||||
'''
|
||||
"""
|
||||
|
||||
def check_post_permissions(self, request, view, obj=None):
|
||||
approval = get_object_or_400(view.model, pk=view.kwargs['pk'])
|
||||
@ -208,9 +206,10 @@ class WorkflowApprovalPermission(ModelAccessPermission):
|
||||
|
||||
|
||||
class ProjectUpdatePermission(ModelAccessPermission):
|
||||
'''
|
||||
"""
|
||||
Permission check used by ProjectUpdateView to determine who can update projects
|
||||
'''
|
||||
"""
|
||||
|
||||
def check_get_permissions(self, request, view, obj=None):
|
||||
project = get_object_or_400(view.model, pk=view.kwargs['pk'])
|
||||
return check_user_access(request.user, view.model, 'read', project)
|
||||
|
||||
@ -11,7 +11,6 @@ from rest_framework.utils import encoders
|
||||
|
||||
|
||||
class SurrogateEncoder(encoders.JSONEncoder):
|
||||
|
||||
def encode(self, obj):
|
||||
ret = super(SurrogateEncoder, self).encode(obj)
|
||||
try:
|
||||
@ -28,9 +27,9 @@ class DefaultJSONRenderer(renderers.JSONRenderer):
|
||||
|
||||
|
||||
class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
'''
|
||||
"""
|
||||
Customizations to the default browsable API renderer.
|
||||
'''
|
||||
"""
|
||||
|
||||
def get_default_renderer(self, view):
|
||||
renderer = super(BrowsableAPIRenderer, self).get_default_renderer(view)
|
||||
@ -48,9 +47,7 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
# see: https://github.com/ansible/awx/issues/3108
|
||||
# https://code.djangoproject.com/ticket/28121
|
||||
return data
|
||||
return super(BrowsableAPIRenderer, self).get_content(renderer, data,
|
||||
accepted_media_type,
|
||||
renderer_context)
|
||||
return super(BrowsableAPIRenderer, self).get_content(renderer, data, accepted_media_type, renderer_context)
|
||||
|
||||
def get_context(self, data, accepted_media_type, renderer_context):
|
||||
# Store the associated response status to know how to populate the raw
|
||||
@ -125,18 +122,13 @@ class AnsiDownloadRenderer(PlainTextRenderer):
|
||||
|
||||
|
||||
class PrometheusJSONRenderer(renderers.JSONRenderer):
|
||||
|
||||
def render(self, data, accepted_media_type=None, renderer_context=None):
|
||||
if isinstance(data, dict):
|
||||
# HTTP errors are {'detail': ErrorDetail(string='...', code=...)}
|
||||
return super(PrometheusJSONRenderer, self).render(
|
||||
data, accepted_media_type, renderer_context
|
||||
)
|
||||
return super(PrometheusJSONRenderer, self).render(data, accepted_media_type, renderer_context)
|
||||
parsed_metrics = text_string_to_metric_families(data)
|
||||
data = {}
|
||||
for family in parsed_metrics:
|
||||
for sample in family.samples:
|
||||
data[sample[0]] = {"labels": sample[1], "value": sample[2]}
|
||||
return super(PrometheusJSONRenderer, self).render(
|
||||
data, accepted_media_type, renderer_context
|
||||
)
|
||||
return super(PrometheusJSONRenderer, self).render(data, accepted_media_type, renderer_context)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -14,7 +14,6 @@ from rest_framework_swagger import renderers
|
||||
|
||||
|
||||
class SuperUserSchemaGenerator(SchemaGenerator):
|
||||
|
||||
def has_view_permissions(self, path, method, view):
|
||||
#
|
||||
# Generate the Swagger schema as if you were a superuser and
|
||||
@ -25,17 +24,17 @@ class SuperUserSchemaGenerator(SchemaGenerator):
|
||||
|
||||
|
||||
class AutoSchema(DRFAuthSchema):
|
||||
|
||||
def get_link(self, path, method, base_url):
|
||||
link = super(AutoSchema, self).get_link(path, method, base_url)
|
||||
try:
|
||||
serializer = self.view.get_serializer()
|
||||
except Exception:
|
||||
serializer = None
|
||||
warnings.warn('{}.get_serializer() raised an exception during '
|
||||
'schema generation. Serializer fields will not be '
|
||||
'generated for {} {}.'
|
||||
.format(self.view.__class__.__name__, method, path))
|
||||
warnings.warn(
|
||||
'{}.get_serializer() raised an exception during '
|
||||
'schema generation. Serializer fields will not be '
|
||||
'generated for {} {}.'.format(self.view.__class__.__name__, method, path)
|
||||
)
|
||||
|
||||
link.__dict__['deprecated'] = getattr(self.view, 'deprecated', False)
|
||||
|
||||
@ -43,9 +42,7 @@ class AutoSchema(DRFAuthSchema):
|
||||
if hasattr(self.view, 'swagger_topic'):
|
||||
link.__dict__['topic'] = str(self.view.swagger_topic).title()
|
||||
elif serializer and hasattr(serializer, 'Meta'):
|
||||
link.__dict__['topic'] = str(
|
||||
serializer.Meta.model._meta.verbose_name_plural
|
||||
).title()
|
||||
link.__dict__['topic'] = str(serializer.Meta.model._meta.verbose_name_plural).title()
|
||||
elif hasattr(self.view, 'model'):
|
||||
link.__dict__['topic'] = str(self.view.model._meta.verbose_name_plural).title()
|
||||
else:
|
||||
@ -62,18 +59,10 @@ class SwaggerSchemaView(APIView):
|
||||
_ignore_model_permissions = True
|
||||
exclude_from_schema = True
|
||||
permission_classes = [AllowAny]
|
||||
renderer_classes = [
|
||||
CoreJSONRenderer,
|
||||
renderers.OpenAPIRenderer,
|
||||
renderers.SwaggerUIRenderer
|
||||
]
|
||||
renderer_classes = [CoreJSONRenderer, renderers.OpenAPIRenderer, renderers.SwaggerUIRenderer]
|
||||
|
||||
def get(self, request):
|
||||
generator = SuperUserSchemaGenerator(
|
||||
title='Ansible Tower API',
|
||||
patterns=None,
|
||||
urlconf=None
|
||||
)
|
||||
generator = SuperUserSchemaGenerator(title='Ansible Tower API', patterns=None, urlconf=None)
|
||||
schema = generator.get_schema(request=request)
|
||||
# python core-api doesn't support the deprecation yet, so track it
|
||||
# ourselves and return it in a response header
|
||||
@ -103,11 +92,6 @@ class SwaggerSchemaView(APIView):
|
||||
schema._data[topic]._data[path] = node
|
||||
|
||||
if not schema:
|
||||
raise exceptions.ValidationError(
|
||||
'The schema generator did not return a schema Document'
|
||||
)
|
||||
raise exceptions.ValidationError('The schema generator did not return a schema Document')
|
||||
|
||||
return Response(
|
||||
schema,
|
||||
headers={'X-Deprecated-Paths': json.dumps(_deprecated)}
|
||||
)
|
||||
return Response(schema, headers={'X-Deprecated-Paths': json.dumps(_deprecated)})
|
||||
|
||||
@ -3,10 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
ActivityStreamList,
|
||||
ActivityStreamDetail,
|
||||
)
|
||||
from awx.api.views import ActivityStreamList, ActivityStreamDetail
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@ -3,10 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
AdHocCommandEventList,
|
||||
AdHocCommandEventDetail,
|
||||
)
|
||||
from awx.api.views import AdHocCommandEventList, AdHocCommandEventDetail
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@ -3,10 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
CredentialInputSourceDetail,
|
||||
CredentialInputSourceList,
|
||||
)
|
||||
from awx.api.views import CredentialInputSourceDetail, CredentialInputSourceList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@ -3,13 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
CredentialTypeList,
|
||||
CredentialTypeDetail,
|
||||
CredentialTypeCredentialList,
|
||||
CredentialTypeActivityStreamList,
|
||||
CredentialTypeExternalTest,
|
||||
)
|
||||
from awx.api.views import CredentialTypeList, CredentialTypeDetail, CredentialTypeCredentialList, CredentialTypeActivityStreamList, CredentialTypeExternalTest
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@ -3,20 +3,14 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InstanceList,
|
||||
InstanceDetail,
|
||||
InstanceUnifiedJobsList,
|
||||
InstanceInstanceGroupsList,
|
||||
)
|
||||
from awx.api.views import InstanceList, InstanceDetail, InstanceUnifiedJobsList, InstanceInstanceGroupsList
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', InstanceList.as_view(), name='instance_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', InstanceDetail.as_view(), name='instance_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', InstanceUnifiedJobsList.as_view(), name='instance_unified_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(),
|
||||
name='instance_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', InstanceInstanceGroupsList.as_view(), name='instance_instance_groups_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@ -3,12 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InstanceGroupList,
|
||||
InstanceGroupDetail,
|
||||
InstanceGroupUnifiedJobsList,
|
||||
InstanceGroupInstanceList,
|
||||
)
|
||||
from awx.api.views import InstanceGroupList, InstanceGroupDetail, InstanceGroupUnifiedJobsList, InstanceGroupInstanceList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@ -3,12 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
InventoryScriptList,
|
||||
InventoryScriptDetail,
|
||||
InventoryScriptObjectRolesList,
|
||||
InventoryScriptCopy,
|
||||
)
|
||||
from awx.api.views import InventoryScriptList, InventoryScriptDetail, InventoryScriptObjectRolesList, InventoryScriptCopy
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@ -29,12 +29,21 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', InventorySourceCredentialsList.as_view(), name='inventory_source_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/groups/$', InventorySourceGroupsList.as_view(), name='inventory_source_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/hosts/$', InventorySourceHostsList.as_view(), name='inventory_source_hosts_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', InventorySourceNotificationTemplatesStartedList.as_view(),
|
||||
name='inventory_source_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', InventorySourceNotificationTemplatesErrorList.as_view(),
|
||||
name='inventory_source_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', InventorySourceNotificationTemplatesSuccessList.as_view(),
|
||||
name='inventory_source_notification_templates_success_list'),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_started/$',
|
||||
InventorySourceNotificationTemplatesStartedList.as_view(),
|
||||
name='inventory_source_notification_templates_started_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_error/$',
|
||||
InventorySourceNotificationTemplatesErrorList.as_view(),
|
||||
name='inventory_source_notification_templates_error_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
|
||||
InventorySourceNotificationTemplatesSuccessList.as_view(),
|
||||
name='inventory_source_notification_templates_success_list',
|
||||
),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@ -3,12 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
JobEventList,
|
||||
JobEventDetail,
|
||||
JobEventChildrenList,
|
||||
JobEventHostsList,
|
||||
)
|
||||
from awx.api.views import JobEventList, JobEventDetail, JobEventChildrenList, JobEventHostsList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@ -3,13 +3,9 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
JobHostSummaryDetail,
|
||||
)
|
||||
from awx.api.views import JobHostSummaryDetail
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail'),
|
||||
]
|
||||
urls = [url(r'^(?P<pk>[0-9]+)/$', JobHostSummaryDetail.as_view(), name='job_host_summary_detail')]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@ -34,12 +34,21 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', JobTemplateActivityStreamList.as_view(), name='job_template_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', JobTemplateNotificationTemplatesStartedList.as_view(),
|
||||
name='job_template_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', JobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', JobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='job_template_notification_templates_success_list'),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_started/$',
|
||||
JobTemplateNotificationTemplatesStartedList.as_view(),
|
||||
name='job_template_notification_templates_started_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_error/$',
|
||||
JobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='job_template_notification_templates_error_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
|
||||
JobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='job_template_notification_templates_success_list',
|
||||
),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', JobTemplateInstanceGroupsList.as_view(), name='job_template_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', JobTemplateAccessList.as_view(), name='job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', JobTemplateObjectRolesList.as_view(), name='job_template_object_roles_list'),
|
||||
|
||||
@ -3,15 +3,9 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
LabelList,
|
||||
LabelDetail,
|
||||
)
|
||||
from awx.api.views import LabelList, LabelDetail
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', LabelList.as_view(), name='label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail'),
|
||||
]
|
||||
urls = [url(r'^$', LabelList.as_view(), name='label_list'), url(r'^(?P<pk>[0-9]+)/$', LabelDetail.as_view(), name='label_detail')]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@ -3,15 +3,9 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
NotificationList,
|
||||
NotificationDetail,
|
||||
)
|
||||
from awx.api.views import NotificationList, NotificationDetail
|
||||
|
||||
|
||||
urls = [
|
||||
url(r'^$', NotificationList.as_view(), name='notification_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail'),
|
||||
]
|
||||
urls = [url(r'^$', NotificationList.as_view(), name='notification_list'), url(r'^(?P<pk>[0-9]+)/$', NotificationDetail.as_view(), name='notification_detail')]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@ -16,32 +16,12 @@ from awx.api.views import (
|
||||
|
||||
urls = [
|
||||
url(r'^applications/$', OAuth2ApplicationList.as_view(), name='o_auth2_application_list'),
|
||||
url(
|
||||
r'^applications/(?P<pk>[0-9]+)/$',
|
||||
OAuth2ApplicationDetail.as_view(),
|
||||
name='o_auth2_application_detail'
|
||||
),
|
||||
url(
|
||||
r'^applications/(?P<pk>[0-9]+)/tokens/$',
|
||||
ApplicationOAuth2TokenList.as_view(),
|
||||
name='o_auth2_application_token_list'
|
||||
),
|
||||
url(
|
||||
r'^applications/(?P<pk>[0-9]+)/activity_stream/$',
|
||||
OAuth2ApplicationActivityStreamList.as_view(),
|
||||
name='o_auth2_application_activity_stream_list'
|
||||
),
|
||||
url(r'^applications/(?P<pk>[0-9]+)/$', OAuth2ApplicationDetail.as_view(), name='o_auth2_application_detail'),
|
||||
url(r'^applications/(?P<pk>[0-9]+)/tokens/$', ApplicationOAuth2TokenList.as_view(), name='o_auth2_application_token_list'),
|
||||
url(r'^applications/(?P<pk>[0-9]+)/activity_stream/$', OAuth2ApplicationActivityStreamList.as_view(), name='o_auth2_application_activity_stream_list'),
|
||||
url(r'^tokens/$', OAuth2TokenList.as_view(), name='o_auth2_token_list'),
|
||||
url(
|
||||
r'^tokens/(?P<pk>[0-9]+)/$',
|
||||
OAuth2TokenDetail.as_view(),
|
||||
name='o_auth2_token_detail'
|
||||
),
|
||||
url(
|
||||
r'^tokens/(?P<pk>[0-9]+)/activity_stream/$',
|
||||
OAuth2TokenActivityStreamList.as_view(),
|
||||
name='o_auth2_token_activity_stream_list'
|
||||
),
|
||||
url(r'^tokens/(?P<pk>[0-9]+)/$', OAuth2TokenDetail.as_view(), name='o_auth2_token_detail'),
|
||||
url(r'^tokens/(?P<pk>[0-9]+)/activity_stream/$', OAuth2TokenActivityStreamList.as_view(), name='o_auth2_token_activity_stream_list'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@ -10,13 +10,10 @@ from oauthlib import oauth2
|
||||
from oauth2_provider import views
|
||||
|
||||
from awx.main.models import RefreshToken
|
||||
from awx.api.views import (
|
||||
ApiOAuthAuthorizationRootView,
|
||||
)
|
||||
from awx.api.views import ApiOAuthAuthorizationRootView
|
||||
|
||||
|
||||
class TokenView(views.TokenView):
|
||||
|
||||
def create_token_response(self, request):
|
||||
# Django OAuth2 Toolkit has a bug whereby refresh tokens are *never*
|
||||
# properly expired (ugh):
|
||||
@ -26,9 +23,7 @@ class TokenView(views.TokenView):
|
||||
# This code detects and auto-expires them on refresh grant
|
||||
# requests.
|
||||
if request.POST.get('grant_type') == 'refresh_token' and 'refresh_token' in request.POST:
|
||||
refresh_token = RefreshToken.objects.filter(
|
||||
token=request.POST['refresh_token']
|
||||
).first()
|
||||
refresh_token = RefreshToken.objects.filter(token=request.POST['refresh_token']).first()
|
||||
if refresh_token:
|
||||
expire_seconds = settings.OAUTH2_PROVIDER.get('REFRESH_TOKEN_EXPIRE_SECONDS', 0)
|
||||
if refresh_token.created + timedelta(seconds=expire_seconds) < now():
|
||||
|
||||
@ -43,14 +43,26 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', OrganizationCredentialList.as_view(), name='organization_credential_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', OrganizationActivityStreamList.as_view(), name='organization_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates/$', OrganizationNotificationTemplatesList.as_view(), name='organization_notification_templates_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', OrganizationNotificationTemplatesStartedList.as_view(),
|
||||
name='organization_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', OrganizationNotificationTemplatesErrorList.as_view(),
|
||||
name='organization_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', OrganizationNotificationTemplatesSuccessList.as_view(),
|
||||
name='organization_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_approvals/$', OrganizationNotificationTemplatesApprovalList.as_view(),
|
||||
name='organization_notification_templates_approvals_list'),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_started/$',
|
||||
OrganizationNotificationTemplatesStartedList.as_view(),
|
||||
name='organization_notification_templates_started_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_error/$',
|
||||
OrganizationNotificationTemplatesErrorList.as_view(),
|
||||
name='organization_notification_templates_error_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
|
||||
OrganizationNotificationTemplatesSuccessList.as_view(),
|
||||
name='organization_notification_templates_success_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_approvals/$',
|
||||
OrganizationNotificationTemplatesApprovalList.as_view(),
|
||||
name='organization_notification_templates_approvals_list',
|
||||
),
|
||||
url(r'^(?P<pk>[0-9]+)/instance_groups/$', OrganizationInstanceGroupsList.as_view(), name='organization_instance_groups_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/galaxy_credentials/$', OrganizationGalaxyCredentialsList.as_view(), name='organization_galaxy_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', OrganizationObjectRolesList.as_view(), name='organization_object_roles_list'),
|
||||
|
||||
@ -35,10 +35,16 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', ProjectActivityStreamList.as_view(), name='project_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', ProjectSchedulesList.as_view(), name='project_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', ProjectNotificationTemplatesErrorList.as_view(), name='project_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', ProjectNotificationTemplatesSuccessList.as_view(),
|
||||
name='project_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', ProjectNotificationTemplatesStartedList.as_view(),
|
||||
name='project_notification_templates_started_list'),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
|
||||
ProjectNotificationTemplatesSuccessList.as_view(),
|
||||
name='project_notification_templates_success_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_started/$',
|
||||
ProjectNotificationTemplatesStartedList.as_view(),
|
||||
name='project_notification_templates_started_list',
|
||||
),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', ProjectObjectRolesList.as_view(), name='project_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', ProjectAccessList.as_view(), name='project_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', ProjectCopy.as_view(), name='project_copy'),
|
||||
|
||||
@ -3,14 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
RoleList,
|
||||
RoleDetail,
|
||||
RoleUsersList,
|
||||
RoleTeamsList,
|
||||
RoleParentsList,
|
||||
RoleChildrenList,
|
||||
)
|
||||
from awx.api.views import RoleList, RoleDetail, RoleUsersList, RoleTeamsList, RoleParentsList, RoleChildrenList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@ -3,12 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
ScheduleList,
|
||||
ScheduleDetail,
|
||||
ScheduleUnifiedJobsList,
|
||||
ScheduleCredentialsList,
|
||||
)
|
||||
from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@ -3,13 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
SystemJobList,
|
||||
SystemJobDetail,
|
||||
SystemJobCancel,
|
||||
SystemJobNotificationsList,
|
||||
SystemJobEventsList
|
||||
)
|
||||
from awx.api.views import SystemJobList, SystemJobDetail, SystemJobCancel, SystemJobNotificationsList, SystemJobEventsList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@ -21,12 +21,21 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', SystemJobTemplateLaunch.as_view(), name='system_job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/jobs/$', SystemJobTemplateJobsList.as_view(), name='system_job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', SystemJobTemplateSchedulesList.as_view(), name='system_job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', SystemJobTemplateNotificationTemplatesStartedList.as_view(),
|
||||
name='system_job_template_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', SystemJobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='system_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', SystemJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='system_job_template_notification_templates_success_list'),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_started/$',
|
||||
SystemJobTemplateNotificationTemplatesStartedList.as_view(),
|
||||
name='system_job_template_notification_templates_started_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_error/$',
|
||||
SystemJobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='system_job_template_notification_templates_error_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
|
||||
SystemJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='system_job_template_notification_templates_success_list',
|
||||
),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@ -5,10 +5,7 @@ from __future__ import absolute_import, unicode_literals
|
||||
from django.conf import settings
|
||||
from django.conf.urls import include, url
|
||||
|
||||
from awx.api.generics import (
|
||||
LoggedLoginView,
|
||||
LoggedLogoutView,
|
||||
)
|
||||
from awx.api.generics import LoggedLoginView, LoggedLogoutView
|
||||
from awx.api.views import (
|
||||
ApiRootView,
|
||||
ApiV2RootView,
|
||||
@ -33,9 +30,7 @@ from awx.api.views import (
|
||||
OAuth2ApplicationDetail,
|
||||
)
|
||||
|
||||
from awx.api.views.metrics import (
|
||||
MetricsView,
|
||||
)
|
||||
from awx.api.views.metrics import MetricsView
|
||||
|
||||
from .organization import urls as organization_urls
|
||||
from .user import urls as user_urls
|
||||
@ -146,17 +141,11 @@ app_name = 'api'
|
||||
urlpatterns = [
|
||||
url(r'^$', ApiRootView.as_view(), name='api_root_view'),
|
||||
url(r'^(?P<version>(v2))/', include(v2_urls)),
|
||||
url(r'^login/$', LoggedLoginView.as_view(
|
||||
template_name='rest_framework/login.html',
|
||||
extra_context={'inside_login_context': True}
|
||||
), name='login'),
|
||||
url(r'^logout/$', LoggedLogoutView.as_view(
|
||||
next_page='/api/', redirect_field_name='next'
|
||||
), name='logout'),
|
||||
url(r'^login/$', LoggedLoginView.as_view(template_name='rest_framework/login.html', extra_context={'inside_login_context': True}), name='login'),
|
||||
url(r'^logout/$', LoggedLogoutView.as_view(next_page='/api/', redirect_field_name='next'), name='logout'),
|
||||
url(r'^o/', include(oauth2_root_urls)),
|
||||
]
|
||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
||||
from awx.api.swagger import SwaggerSchemaView
|
||||
urlpatterns += [
|
||||
url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view'),
|
||||
]
|
||||
|
||||
urlpatterns += [url(r'^swagger/$', SwaggerSchemaView.as_view(), name='swagger_view')]
|
||||
|
||||
@ -20,7 +20,7 @@ from awx.api.views import (
|
||||
UserAuthorizedTokenList,
|
||||
)
|
||||
|
||||
urls = [
|
||||
urls = [
|
||||
url(r'^$', UserList.as_view(), name='user_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', UserDetail.as_view(), name='user_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/teams/$', UserTeamsList.as_view(), name='user_teams_list'),
|
||||
@ -35,7 +35,6 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/tokens/$', OAuth2UserTokenList.as_view(), name='o_auth2_token_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/authorized_tokens/$', UserAuthorizedTokenList.as_view(), name='user_authorized_token_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/personal_tokens/$', UserPersonalTokenList.as_view(), name='user_personal_token_list'),
|
||||
|
||||
]
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
|
||||
@ -1,10 +1,6 @@
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WebhookKeyView,
|
||||
GithubWebhookReceiver,
|
||||
GitlabWebhookReceiver,
|
||||
)
|
||||
from awx.api.views import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
|
||||
@ -3,12 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WorkflowApprovalList,
|
||||
WorkflowApprovalDetail,
|
||||
WorkflowApprovalApprove,
|
||||
WorkflowApprovalDeny,
|
||||
)
|
||||
from awx.api.views import WorkflowApprovalList, WorkflowApprovalDetail, WorkflowApprovalApprove, WorkflowApprovalDeny
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@ -3,10 +3,7 @@
|
||||
|
||||
from django.conf.urls import url
|
||||
|
||||
from awx.api.views import (
|
||||
WorkflowApprovalTemplateDetail,
|
||||
WorkflowApprovalTemplateJobsList,
|
||||
)
|
||||
from awx.api.views import WorkflowApprovalTemplateDetail, WorkflowApprovalTemplateJobsList
|
||||
|
||||
|
||||
urls = [
|
||||
|
||||
@ -33,14 +33,26 @@ urls = [
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', WorkflowJobTemplateSurveySpec.as_view(), name='workflow_job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', WorkflowJobTemplateWorkflowNodesList.as_view(), name='workflow_job_template_workflow_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', WorkflowJobTemplateActivityStreamList.as_view(), name='workflow_job_template_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_started/$', WorkflowJobTemplateNotificationTemplatesStartedList.as_view(),
|
||||
name='workflow_job_template_notification_templates_started_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', WorkflowJobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='workflow_job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='workflow_job_template_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_approvals/$', WorkflowJobTemplateNotificationTemplatesApprovalList.as_view(),
|
||||
name='workflow_job_template_notification_templates_approvals_list'),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_started/$',
|
||||
WorkflowJobTemplateNotificationTemplatesStartedList.as_view(),
|
||||
name='workflow_job_template_notification_templates_started_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_error/$',
|
||||
WorkflowJobTemplateNotificationTemplatesErrorList.as_view(),
|
||||
name='workflow_job_template_notification_templates_error_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_success/$',
|
||||
WorkflowJobTemplateNotificationTemplatesSuccessList.as_view(),
|
||||
name='workflow_job_template_notification_templates_success_list',
|
||||
),
|
||||
url(
|
||||
r'^(?P<pk>[0-9]+)/notification_templates_approvals/$',
|
||||
WorkflowJobTemplateNotificationTemplatesApprovalList.as_view(),
|
||||
name='workflow_job_template_notification_templates_approvals_list',
|
||||
),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', WorkflowJobTemplateAccessList.as_view(), name='workflow_job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', WorkflowJobTemplateObjectRolesList.as_view(), name='workflow_job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobTemplateLabelList.as_view(), name='workflow_job_template_label_list'),
|
||||
|
||||
@ -40,13 +40,10 @@ def reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra
|
||||
|
||||
|
||||
class URLPathVersioning(BaseVersioning):
|
||||
|
||||
def reverse(self, viewname, args=None, kwargs=None, request=None, format=None, **extra):
|
||||
if request.version is not None:
|
||||
kwargs = {} if (kwargs is None) else kwargs
|
||||
kwargs[self.version_param] = request.version
|
||||
request = None
|
||||
|
||||
return super(BaseVersioning, self).reverse(
|
||||
viewname, args, kwargs, request, format, **extra
|
||||
)
|
||||
return super(BaseVersioning, self).reverse(viewname, args, kwargs, request, format, **extra)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -28,14 +28,7 @@ from awx.main.models import (
|
||||
InventorySource,
|
||||
CustomInventoryScript,
|
||||
)
|
||||
from awx.api.generics import (
|
||||
ListCreateAPIView,
|
||||
RetrieveUpdateDestroyAPIView,
|
||||
SubListAPIView,
|
||||
SubListAttachDetachAPIView,
|
||||
ResourceAccessList,
|
||||
CopyAPIView,
|
||||
)
|
||||
from awx.api.generics import ListCreateAPIView, RetrieveUpdateDestroyAPIView, SubListAPIView, SubListAttachDetachAPIView, ResourceAccessList, CopyAPIView
|
||||
|
||||
from awx.api.serializers import (
|
||||
InventorySerializer,
|
||||
@ -46,10 +39,7 @@ from awx.api.serializers import (
|
||||
CustomInventoryScriptSerializer,
|
||||
JobTemplateSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import (
|
||||
RelatedJobsPreventDeleteMixin,
|
||||
ControlledByScmMixin,
|
||||
)
|
||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, ControlledByScmMixin
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
@ -101,7 +91,7 @@ class InventoryScriptObjectRolesList(SubListAPIView):
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = CustomInventoryScript
|
||||
search_fields = ('role_field', 'content_type__model',)
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
@ -134,8 +124,7 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, ControlledByScmMixin, Retri
|
||||
|
||||
# Do not allow changes to an Inventory kind.
|
||||
if kind is not None and obj.kind != kind:
|
||||
return Response(dict(error=_('You cannot turn a regular inventory into a "smart" inventory.')),
|
||||
status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||
return Response(dict(error=_('You cannot turn a regular inventory into a "smart" inventory.')), status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||
return super(InventoryDetail, self).update(request, *args, **kwargs)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
@ -175,7 +164,7 @@ class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
|
||||
|
||||
class InventoryAccessList(ResourceAccessList):
|
||||
|
||||
model = User # needs to be User for AccessLists's
|
||||
model = User # needs to be User for AccessLists's
|
||||
parent_model = Inventory
|
||||
|
||||
|
||||
@ -184,7 +173,7 @@ class InventoryObjectRolesList(SubListAPIView):
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = Inventory
|
||||
search_fields = ('role_field', 'content_type__model',)
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
|
||||
@ -17,9 +17,7 @@ from rest_framework.exceptions import PermissionDenied
|
||||
from awx.main.analytics.metrics import metrics
|
||||
from awx.api import renderers
|
||||
|
||||
from awx.api.generics import (
|
||||
APIView,
|
||||
)
|
||||
from awx.api.generics import APIView
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.analytics')
|
||||
@ -30,13 +28,10 @@ class MetricsView(APIView):
|
||||
name = _('Metrics')
|
||||
swagger_topic = 'Metrics'
|
||||
|
||||
renderer_classes = [renderers.PlainTextRenderer,
|
||||
renderers.PrometheusJSONRenderer,
|
||||
renderers.BrowsableAPIRenderer,]
|
||||
renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer]
|
||||
|
||||
def get(self, request):
|
||||
''' Show Metrics Details '''
|
||||
if (request.user.is_superuser or request.user.is_system_auditor):
|
||||
if request.user.is_superuser or request.user.is_system_auditor:
|
||||
return Response(metrics().decode('UTF-8'))
|
||||
raise PermissionDenied()
|
||||
|
||||
|
||||
@ -16,14 +16,8 @@ from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.utils import (
|
||||
get_object_or_400,
|
||||
parse_yaml_or_json,
|
||||
)
|
||||
from awx.main.models.ha import (
|
||||
Instance,
|
||||
InstanceGroup,
|
||||
)
|
||||
from awx.main.utils import get_object_or_400, parse_yaml_or_json
|
||||
from awx.main.models.ha import Instance, InstanceGroup
|
||||
from awx.main.models.organization import Team
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.inventory import Inventory
|
||||
@ -34,9 +28,10 @@ logger = logging.getLogger('awx.api.views.mixin')
|
||||
|
||||
|
||||
class UnifiedJobDeletionMixin(object):
|
||||
'''
|
||||
"""
|
||||
Special handling when deleting a running unified job object.
|
||||
'''
|
||||
"""
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'delete', obj):
|
||||
@ -53,22 +48,21 @@ class UnifiedJobDeletionMixin(object):
|
||||
# Prohibit deletion if job events are still coming in
|
||||
if obj.finished and now() < obj.finished + dateutil.relativedelta.relativedelta(minutes=1):
|
||||
# less than 1 minute has passed since job finished and events are not in
|
||||
return Response({"error": _("Job has not finished processing events.")},
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"error": _("Job has not finished processing events.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
# if it has been > 1 minute, events are probably lost
|
||||
logger.warning('Allowing deletion of {} through the API without all events '
|
||||
'processed.'.format(obj.log_format))
|
||||
logger.warning('Allowing deletion of {} through the API without all events ' 'processed.'.format(obj.log_format))
|
||||
obj.delete()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class InstanceGroupMembershipMixin(object):
|
||||
'''
|
||||
"""
|
||||
This mixin overloads attach/detach so that it calls InstanceGroup.save(),
|
||||
triggering a background recalculation of policy-based instance group
|
||||
membership.
|
||||
'''
|
||||
"""
|
||||
|
||||
def attach(self, request, *args, **kwargs):
|
||||
response = super(InstanceGroupMembershipMixin, self).attach(request, *args, **kwargs)
|
||||
sub_id, res = self.attach_validate(request)
|
||||
@ -84,9 +78,7 @@ class InstanceGroupMembershipMixin(object):
|
||||
ig_obj = get_object_or_400(ig_qs, pk=sub_id)
|
||||
else:
|
||||
# similar to get_parent_object, but selected for update
|
||||
parent_filter = {
|
||||
self.lookup_field: self.kwargs.get(self.lookup_field, None),
|
||||
}
|
||||
parent_filter = {self.lookup_field: self.kwargs.get(self.lookup_field, None)}
|
||||
ig_obj = get_object_or_404(ig_qs, **parent_filter)
|
||||
if inst_name not in ig_obj.policy_instance_list:
|
||||
ig_obj.policy_instance_list.append(inst_name)
|
||||
@ -126,9 +118,7 @@ class InstanceGroupMembershipMixin(object):
|
||||
ig_obj = get_object_or_400(ig_qs, pk=sub_id)
|
||||
else:
|
||||
# similar to get_parent_object, but selected for update
|
||||
parent_filter = {
|
||||
self.lookup_field: self.kwargs.get(self.lookup_field, None),
|
||||
}
|
||||
parent_filter = {self.lookup_field: self.kwargs.get(self.lookup_field, None)}
|
||||
ig_obj = get_object_or_404(ig_qs, **parent_filter)
|
||||
if inst_name in ig_obj.policy_instance_list:
|
||||
ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name))
|
||||
@ -146,16 +136,13 @@ class RelatedJobsPreventDeleteMixin(object):
|
||||
if len(active_jobs) > 0:
|
||||
raise ActiveJobConflict(active_jobs)
|
||||
time_cutoff = now() - dateutil.relativedelta.relativedelta(minutes=1)
|
||||
recent_jobs = obj._get_related_jobs().filter(finished__gte = time_cutoff)
|
||||
recent_jobs = obj._get_related_jobs().filter(finished__gte=time_cutoff)
|
||||
for unified_job in recent_jobs.get_real_instances():
|
||||
if not unified_job.event_processing_finished:
|
||||
raise PermissionDenied(_(
|
||||
'Related job {} is still processing events.'
|
||||
).format(unified_job.log_format))
|
||||
raise PermissionDenied(_('Related job {} is still processing events.').format(unified_job.log_format))
|
||||
|
||||
|
||||
class OrganizationCountsMixin(object):
|
||||
|
||||
def get_serializer_context(self, *args, **kwargs):
|
||||
full_context = super(OrganizationCountsMixin, self).get_serializer_context(*args, **kwargs)
|
||||
|
||||
@ -177,26 +164,23 @@ class OrganizationCountsMixin(object):
|
||||
# Produce counts of Foreign Key relationships
|
||||
db_results['inventories'] = inv_qs.values('organization').annotate(Count('organization')).order_by('organization')
|
||||
|
||||
db_results['teams'] = Team.accessible_objects(
|
||||
self.request.user, 'read_role').values('organization').annotate(
|
||||
Count('organization')).order_by('organization')
|
||||
db_results['teams'] = (
|
||||
Team.accessible_objects(self.request.user, 'read_role').values('organization').annotate(Count('organization')).order_by('organization')
|
||||
)
|
||||
|
||||
db_results['job_templates'] = jt_qs.values('organization').annotate(Count('organization')).order_by('organization')
|
||||
|
||||
db_results['projects'] = project_qs.values('organization').annotate(Count('organization')).order_by('organization')
|
||||
|
||||
# Other members and admins of organization are always viewable
|
||||
db_results['users'] = org_qs.annotate(
|
||||
users=Count('member_role__members', distinct=True),
|
||||
admins=Count('admin_role__members', distinct=True)
|
||||
).values('id', 'users', 'admins')
|
||||
db_results['users'] = org_qs.annotate(users=Count('member_role__members', distinct=True), admins=Count('admin_role__members', distinct=True)).values(
|
||||
'id', 'users', 'admins'
|
||||
)
|
||||
|
||||
count_context = {}
|
||||
for org in org_id_list:
|
||||
org_id = org['id']
|
||||
count_context[org_id] = {
|
||||
'inventories': 0, 'teams': 0, 'users': 0, 'job_templates': 0,
|
||||
'admins': 0, 'projects': 0}
|
||||
count_context[org_id] = {'inventories': 0, 'teams': 0, 'users': 0, 'job_templates': 0, 'admins': 0, 'projects': 0}
|
||||
|
||||
for res, count_qs in db_results.items():
|
||||
if res == 'users':
|
||||
@ -218,21 +202,20 @@ class OrganizationCountsMixin(object):
|
||||
|
||||
|
||||
class ControlledByScmMixin(object):
|
||||
'''
|
||||
"""
|
||||
Special method to reset SCM inventory commit hash
|
||||
if anything that it manages changes.
|
||||
'''
|
||||
"""
|
||||
|
||||
def _reset_inv_src_rev(self, obj):
|
||||
if self.request.method in SAFE_METHODS or not obj:
|
||||
return
|
||||
project_following_sources = obj.inventory_sources.filter(
|
||||
update_on_project_update=True, source='scm')
|
||||
project_following_sources = obj.inventory_sources.filter(update_on_project_update=True, source='scm')
|
||||
if project_following_sources:
|
||||
# Allow inventory changes unrelated to variables
|
||||
if self.model == Inventory and (
|
||||
not self.request or not self.request.data or
|
||||
parse_yaml_or_json(self.request.data.get('variables', '')) == parse_yaml_or_json(obj.variables)):
|
||||
not self.request or not self.request.data or parse_yaml_or_json(self.request.data.get('variables', '')) == parse_yaml_or_json(obj.variables)
|
||||
):
|
||||
return
|
||||
project_following_sources.update(scm_last_revision='')
|
||||
|
||||
|
||||
@ -24,7 +24,7 @@ from awx.main.models import (
|
||||
User,
|
||||
Team,
|
||||
InstanceGroup,
|
||||
Credential
|
||||
Credential,
|
||||
)
|
||||
from awx.api.generics import (
|
||||
ListCreateAPIView,
|
||||
@ -47,13 +47,12 @@ from awx.api.serializers import (
|
||||
NotificationTemplateSerializer,
|
||||
InstanceGroupSerializer,
|
||||
ExecutionEnvironmentSerializer,
|
||||
ProjectSerializer, JobTemplateSerializer, WorkflowJobTemplateSerializer,
|
||||
CredentialSerializer
|
||||
)
|
||||
from awx.api.views.mixin import (
|
||||
RelatedJobsPreventDeleteMixin,
|
||||
OrganizationCountsMixin,
|
||||
ProjectSerializer,
|
||||
JobTemplateSerializer,
|
||||
WorkflowJobTemplateSerializer,
|
||||
CredentialSerializer,
|
||||
)
|
||||
from awx.api.views.mixin import RelatedJobsPreventDeleteMixin, OrganizationCountsMixin
|
||||
|
||||
logger = logging.getLogger('awx.api.views.organization')
|
||||
|
||||
@ -84,23 +83,20 @@ class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPI
|
||||
|
||||
org_counts = {}
|
||||
access_kwargs = {'accessor': self.request.user, 'role_field': 'read_role'}
|
||||
direct_counts = Organization.objects.filter(id=org_id).annotate(
|
||||
users=Count('member_role__members', distinct=True),
|
||||
admins=Count('admin_role__members', distinct=True)
|
||||
).values('users', 'admins')
|
||||
direct_counts = (
|
||||
Organization.objects.filter(id=org_id)
|
||||
.annotate(users=Count('member_role__members', distinct=True), admins=Count('admin_role__members', distinct=True))
|
||||
.values('users', 'admins')
|
||||
)
|
||||
|
||||
if not direct_counts:
|
||||
return full_context
|
||||
|
||||
org_counts = direct_counts[0]
|
||||
org_counts['inventories'] = Inventory.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['teams'] = Team.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['projects'] = Project.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).filter(
|
||||
organization__id=org_id).count()
|
||||
org_counts['inventories'] = Inventory.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
|
||||
org_counts['teams'] = Team.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
|
||||
org_counts['projects'] = Project.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
|
||||
org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).filter(organization__id=org_id).count()
|
||||
org_counts['hosts'] = Host.objects.org_active_count(org_id)
|
||||
|
||||
full_context['related_field_counts'] = {}
|
||||
@ -240,14 +236,12 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
if sub.kind != 'galaxy_api_token':
|
||||
return {'msg': _(
|
||||
f"Credential must be a Galaxy credential, not {sub.credential_type.name}."
|
||||
)}
|
||||
return {'msg': _(f"Credential must be a Galaxy credential, not {sub.credential_type.name}.")}
|
||||
|
||||
|
||||
class OrganizationAccessList(ResourceAccessList):
|
||||
|
||||
model = User # needs to be User for AccessLists's
|
||||
model = User # needs to be User for AccessLists's
|
||||
parent_model = Organization
|
||||
|
||||
|
||||
@ -256,7 +250,7 @@ class OrganizationObjectRolesList(SubListAPIView):
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = Organization
|
||||
search_fields = ('role_field', 'content_type__model',)
|
||||
search_fields = ('role_field', 'content_type__model')
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
|
||||
@ -24,22 +24,11 @@ from awx.api.generics import APIView
|
||||
from awx.conf.registry import settings_registry
|
||||
from awx.main.analytics import all_collectors
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.main.utils import (
|
||||
get_awx_version,
|
||||
get_ansible_version,
|
||||
get_custom_venv_choices,
|
||||
to_python_boolean,
|
||||
)
|
||||
from awx.main.utils import get_awx_version, get_ansible_version, get_custom_venv_choices, to_python_boolean
|
||||
from awx.main.utils.licensing import validate_entitlement_manifest
|
||||
from awx.api.versioning import reverse, drf_reverse
|
||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||
from awx.main.models import (
|
||||
Project,
|
||||
Organization,
|
||||
Instance,
|
||||
InstanceGroup,
|
||||
JobTemplate,
|
||||
)
|
||||
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
||||
from awx.main.utils import set_environ
|
||||
|
||||
logger = logging.getLogger('awx.api.views.root')
|
||||
@ -60,7 +49,7 @@ class ApiRootView(APIView):
|
||||
data = OrderedDict()
|
||||
data['description'] = _('AWX REST API')
|
||||
data['current_version'] = v2
|
||||
data['available_versions'] = dict(v2 = v2)
|
||||
data['available_versions'] = dict(v2=v2)
|
||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
@ -146,6 +135,7 @@ class ApiV2PingView(APIView):
|
||||
"""A simple view that reports very basic information about this
|
||||
instance, which is acceptable to be public information.
|
||||
"""
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
authentication_classes = ()
|
||||
name = _('Ping')
|
||||
@ -157,23 +147,19 @@ class ApiV2PingView(APIView):
|
||||
Everything returned here should be considered public / insecure, as
|
||||
this requires no auth and is intended for use by the installer process.
|
||||
"""
|
||||
response = {
|
||||
'ha': is_ha_environment(),
|
||||
'version': get_awx_version(),
|
||||
'active_node': settings.CLUSTER_HOST_ID,
|
||||
'install_uuid': settings.INSTALL_UUID,
|
||||
}
|
||||
response = {'ha': is_ha_environment(), 'version': get_awx_version(), 'active_node': settings.CLUSTER_HOST_ID, 'install_uuid': settings.INSTALL_UUID}
|
||||
|
||||
response['instances'] = []
|
||||
for instance in Instance.objects.all():
|
||||
response['instances'].append(dict(node=instance.hostname, uuid=instance.uuid, heartbeat=instance.modified,
|
||||
capacity=instance.capacity, version=instance.version))
|
||||
response['instances'].append(
|
||||
dict(node=instance.hostname, uuid=instance.uuid, heartbeat=instance.modified, capacity=instance.capacity, version=instance.version)
|
||||
)
|
||||
sorted(response['instances'], key=operator.itemgetter('node'))
|
||||
response['instance_groups'] = []
|
||||
for instance_group in InstanceGroup.objects.prefetch_related('instances'):
|
||||
response['instance_groups'].append(dict(name=instance_group.name,
|
||||
capacity=instance_group.capacity,
|
||||
instances=[x.hostname for x in instance_group.instances.all()]))
|
||||
response['instance_groups'].append(
|
||||
dict(name=instance_group.name, capacity=instance_group.capacity, instances=[x.hostname for x in instance_group.instances.all()])
|
||||
)
|
||||
return Response(response)
|
||||
|
||||
|
||||
@ -190,6 +176,7 @@ class ApiV2SubscriptionView(APIView):
|
||||
|
||||
def post(self, request):
|
||||
from awx.main.utils.common import get_licenser
|
||||
|
||||
data = request.data.copy()
|
||||
if data.get('subscriptions_password') == '$encrypted$':
|
||||
data['subscriptions_password'] = settings.SUBSCRIPTIONS_PASSWORD
|
||||
@ -203,10 +190,7 @@ class ApiV2SubscriptionView(APIView):
|
||||
settings.SUBSCRIPTIONS_PASSWORD = data['subscriptions_password']
|
||||
except Exception as exc:
|
||||
msg = _("Invalid Subscription")
|
||||
if (
|
||||
isinstance(exc, requests.exceptions.HTTPError) and
|
||||
getattr(getattr(exc, 'response', None), 'status_code', None) == 401
|
||||
):
|
||||
if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
|
||||
msg = _("The provided credentials are invalid (HTTP 401).")
|
||||
elif isinstance(exc, requests.exceptions.ProxyError):
|
||||
msg = _("Unable to connect to proxy server.")
|
||||
@ -215,8 +199,7 @@ class ApiV2SubscriptionView(APIView):
|
||||
elif isinstance(exc, (ValueError, OSError)) and exc.args:
|
||||
msg = exc.args[0]
|
||||
else:
|
||||
logger.exception(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
logger.exception(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
return Response(validated)
|
||||
@ -242,16 +225,14 @@ class ApiV2AttachView(APIView):
|
||||
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
|
||||
if pool_id and user and pw:
|
||||
from awx.main.utils.common import get_licenser
|
||||
|
||||
data = request.data.copy()
|
||||
try:
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
validated = get_licenser().validate_rh(user, pw)
|
||||
except Exception as exc:
|
||||
msg = _("Invalid Subscription")
|
||||
if (
|
||||
isinstance(exc, requests.exceptions.HTTPError) and
|
||||
getattr(getattr(exc, 'response', None), 'status_code', None) == 401
|
||||
):
|
||||
if isinstance(exc, requests.exceptions.HTTPError) and getattr(getattr(exc, 'response', None), 'status_code', None) == 401:
|
||||
msg = _("The provided credentials are invalid (HTTP 401).")
|
||||
elif isinstance(exc, requests.exceptions.ProxyError):
|
||||
msg = _("Unable to connect to proxy server.")
|
||||
@ -260,8 +241,7 @@ class ApiV2AttachView(APIView):
|
||||
elif isinstance(exc, (ValueError, OSError)) and exc.args:
|
||||
msg = exc.args[0]
|
||||
else:
|
||||
logger.exception(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
logger.exception(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": msg}, status=status.HTTP_400_BAD_REQUEST)
|
||||
for sub in validated:
|
||||
if sub['pool_id'] == pool_id:
|
||||
@ -287,6 +267,7 @@ class ApiV2ConfigView(APIView):
|
||||
'''Return various sitewide configuration settings'''
|
||||
|
||||
from awx.main.utils.common import get_licenser
|
||||
|
||||
license_data = get_licenser().validate()
|
||||
|
||||
if not license_data.get('valid_key', False):
|
||||
@ -314,22 +295,23 @@ class ApiV2ConfigView(APIView):
|
||||
user_ldap_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
|
||||
data['user_ldap_fields'] = user_ldap_fields
|
||||
|
||||
if request.user.is_superuser \
|
||||
or request.user.is_system_auditor \
|
||||
or Organization.accessible_objects(request.user, 'admin_role').exists() \
|
||||
or Organization.accessible_objects(request.user, 'auditor_role').exists() \
|
||||
or Organization.accessible_objects(request.user, 'project_admin_role').exists():
|
||||
data.update(dict(
|
||||
project_base_dir = settings.PROJECTS_ROOT,
|
||||
project_local_paths = Project.get_local_path_choices(),
|
||||
custom_virtualenvs = get_custom_venv_choices()
|
||||
))
|
||||
if (
|
||||
request.user.is_superuser
|
||||
or request.user.is_system_auditor
|
||||
or Organization.accessible_objects(request.user, 'admin_role').exists()
|
||||
or Organization.accessible_objects(request.user, 'auditor_role').exists()
|
||||
or Organization.accessible_objects(request.user, 'project_admin_role').exists()
|
||||
):
|
||||
data.update(
|
||||
dict(
|
||||
project_base_dir=settings.PROJECTS_ROOT, project_local_paths=Project.get_local_path_choices(), custom_virtualenvs=get_custom_venv_choices()
|
||||
)
|
||||
)
|
||||
elif JobTemplate.accessible_objects(request.user, 'admin_role').exists():
|
||||
data['custom_virtualenvs'] = get_custom_venv_choices()
|
||||
|
||||
return Response(data)
|
||||
|
||||
|
||||
def post(self, request):
|
||||
if not isinstance(request.data, dict):
|
||||
return Response({"error": _("Invalid subscription data")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
@ -346,11 +328,11 @@ class ApiV2ConfigView(APIView):
|
||||
try:
|
||||
data_actual = json.dumps(request.data)
|
||||
except Exception:
|
||||
logger.info(smart_text(u"Invalid JSON submitted for license."),
|
||||
extra=dict(actor=request.user.username))
|
||||
logger.info(smart_text(u"Invalid JSON submitted for license."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
from awx.main.utils.common import get_licenser
|
||||
|
||||
license_data = json.loads(data_actual)
|
||||
if 'license_key' in license_data:
|
||||
return Response({"error": _('Legacy license submitted. A subscription manifest is now required.')}, status=status.HTTP_400_BAD_REQUEST)
|
||||
@ -358,10 +340,7 @@ class ApiV2ConfigView(APIView):
|
||||
try:
|
||||
json_actual = json.loads(base64.b64decode(license_data['manifest']))
|
||||
if 'license_key' in json_actual:
|
||||
return Response(
|
||||
{"error": _('Legacy license submitted. A subscription manifest is now required.')},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
return Response({"error": _('Legacy license submitted. A subscription manifest is now required.')}, status=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
@ -375,8 +354,7 @@ class ApiV2ConfigView(APIView):
|
||||
try:
|
||||
license_data_validated = get_licenser().license_from_manifest(license_data)
|
||||
except Exception:
|
||||
logger.warning(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
logger.warning(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
license_data_validated = get_licenser().validate()
|
||||
@ -387,8 +365,7 @@ class ApiV2ConfigView(APIView):
|
||||
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
|
||||
return Response(license_data_validated)
|
||||
|
||||
logger.warning(smart_text(u"Invalid subscription submitted."),
|
||||
extra=dict(actor=request.user.username))
|
||||
logger.warning(smart_text(u"Invalid subscription submitted."), extra=dict(actor=request.user.username))
|
||||
return Response({"error": _("Invalid subscription")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request):
|
||||
|
||||
@ -26,10 +26,7 @@ class WebhookKeyView(GenericAPIView):
|
||||
permission_classes = (WebhookKeyPermission,)
|
||||
|
||||
def get_queryset(self):
|
||||
qs_models = {
|
||||
'job_templates': JobTemplate,
|
||||
'workflow_job_templates': WorkflowJobTemplate,
|
||||
}
|
||||
qs_models = {'job_templates': JobTemplate, 'workflow_job_templates': WorkflowJobTemplate}
|
||||
self.model = qs_models.get(self.kwargs['model_kwarg'])
|
||||
|
||||
return super().get_queryset()
|
||||
@ -57,10 +54,7 @@ class WebhookReceiverBase(APIView):
|
||||
ref_keys = {}
|
||||
|
||||
def get_queryset(self):
|
||||
qs_models = {
|
||||
'job_templates': JobTemplate,
|
||||
'workflow_job_templates': WorkflowJobTemplate,
|
||||
}
|
||||
qs_models = {'job_templates': JobTemplate, 'workflow_job_templates': WorkflowJobTemplate}
|
||||
model = qs_models.get(self.kwargs['model_kwarg'])
|
||||
if model is None:
|
||||
raise PermissionDenied
|
||||
@ -120,10 +114,7 @@ class WebhookReceiverBase(APIView):
|
||||
# Ensure that the full contents of the request are captured for multiple uses.
|
||||
request.body
|
||||
|
||||
logger.debug(
|
||||
"headers: {}\n"
|
||||
"data: {}\n".format(request.headers, request.data)
|
||||
)
|
||||
logger.debug("headers: {}\n" "data: {}\n".format(request.headers, request.data))
|
||||
obj = self.get_object()
|
||||
self.check_signature(obj)
|
||||
|
||||
@ -132,16 +123,11 @@ class WebhookReceiverBase(APIView):
|
||||
event_ref = self.get_event_ref()
|
||||
status_api = self.get_event_status_api()
|
||||
|
||||
kwargs = {
|
||||
'unified_job_template_id': obj.id,
|
||||
'webhook_service': obj.webhook_service,
|
||||
'webhook_guid': event_guid,
|
||||
}
|
||||
kwargs = {'unified_job_template_id': obj.id, 'webhook_service': obj.webhook_service, 'webhook_guid': event_guid}
|
||||
if WorkflowJob.objects.filter(**kwargs).exists() or Job.objects.filter(**kwargs).exists():
|
||||
# Short circuit if this webhook has already been received and acted upon.
|
||||
logger.debug("Webhook previously received, returning without action.")
|
||||
return Response({'message': _("Webhook previously received, aborting.")},
|
||||
status=status.HTTP_202_ACCEPTED)
|
||||
return Response({'message': _("Webhook previously received, aborting.")}, status=status.HTTP_202_ACCEPTED)
|
||||
|
||||
kwargs = {
|
||||
'_eager_fields': {
|
||||
@ -156,7 +142,7 @@ class WebhookReceiverBase(APIView):
|
||||
'tower_webhook_event_ref': event_ref,
|
||||
'tower_webhook_status_api': status_api,
|
||||
'tower_webhook_payload': request.data,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
new_job = obj.create_unified_job(**kwargs)
|
||||
@ -205,11 +191,7 @@ class GithubWebhookReceiver(WebhookReceiverBase):
|
||||
class GitlabWebhookReceiver(WebhookReceiverBase):
|
||||
service = 'gitlab'
|
||||
|
||||
ref_keys = {
|
||||
'Push Hook': 'checkout_sha',
|
||||
'Tag Push Hook': 'checkout_sha',
|
||||
'Merge Request Hook': 'object_attributes.last_commit.id',
|
||||
}
|
||||
ref_keys = {'Push Hook': 'checkout_sha', 'Tag Push Hook': 'checkout_sha', 'Merge Request Hook': 'object_attributes.last_commit.id'}
|
||||
|
||||
def get_event_type(self):
|
||||
return self.request.META.get('HTTP_X_GITLAB_EVENT')
|
||||
@ -229,8 +211,7 @@ class GitlabWebhookReceiver(WebhookReceiverBase):
|
||||
return
|
||||
parsed = urllib.parse.urlparse(repo_url)
|
||||
|
||||
return "{}://{}/api/v4/projects/{}/statuses/{}".format(
|
||||
parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
|
||||
return "{}://{}/api/v4/projects/{}/statuses/{}".format(parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
|
||||
|
||||
def get_signature(self):
|
||||
return force_bytes(self.request.META.get('HTTP_X_GITLAB_TOKEN') or '')
|
||||
|
||||
@ -4,11 +4,12 @@ import os
|
||||
import logging
|
||||
import django
|
||||
from awx import __version__ as tower_version
|
||||
|
||||
# Prepare the AWX environment.
|
||||
from awx import prepare_env, MODE
|
||||
from channels.routing import get_default_application # noqa
|
||||
prepare_env() # NOQA
|
||||
|
||||
prepare_env() # NOQA
|
||||
|
||||
|
||||
"""
|
||||
|
||||
@ -10,12 +10,12 @@ from awx.conf.models import Setting
|
||||
|
||||
|
||||
class SettingAccess(BaseAccess):
|
||||
'''
|
||||
"""
|
||||
- I can see settings when I am a super user or system auditor.
|
||||
- I can edit settings when I am a super user.
|
||||
- I can clear settings when I am a super user.
|
||||
- I can always see/edit/clear my own user settings.
|
||||
'''
|
||||
"""
|
||||
|
||||
model = Setting
|
||||
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
# Django
|
||||
from django.apps import AppConfig
|
||||
|
||||
# from django.core import checks
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
@ -12,4 +13,5 @@ class ConfConfig(AppConfig):
|
||||
def ready(self):
|
||||
self.module.autodiscover()
|
||||
from .settings import SettingsWrapper
|
||||
|
||||
SettingsWrapper.initialize()
|
||||
|
||||
@ -10,10 +10,7 @@ from django.core.validators import URLValidator, _lazy_re_compile
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.fields import ( # noqa
|
||||
BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField,
|
||||
IntegerField, ListField, NullBooleanField
|
||||
)
|
||||
from rest_framework.fields import BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField, NullBooleanField # noqa
|
||||
from rest_framework.serializers import PrimaryKeyRelatedField # noqa
|
||||
|
||||
logger = logging.getLogger('awx.conf.fields')
|
||||
@ -27,7 +24,6 @@ logger = logging.getLogger('awx.conf.fields')
|
||||
|
||||
|
||||
class CharField(CharField):
|
||||
|
||||
def to_representation(self, value):
|
||||
# django_rest_frameworks' default CharField implementation casts `None`
|
||||
# to a string `"None"`:
|
||||
@ -39,7 +35,6 @@ class CharField(CharField):
|
||||
|
||||
|
||||
class IntegerField(IntegerField):
|
||||
|
||||
def get_value(self, dictionary):
|
||||
ret = super(IntegerField, self).get_value(dictionary)
|
||||
# Handle UI corner case
|
||||
@ -60,9 +55,7 @@ class StringListField(ListField):
|
||||
|
||||
class StringListBooleanField(ListField):
|
||||
|
||||
default_error_messages = {
|
||||
'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.'),
|
||||
}
|
||||
default_error_messages = {'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.')}
|
||||
child = CharField()
|
||||
|
||||
def to_representation(self, value):
|
||||
@ -101,10 +94,7 @@ class StringListBooleanField(ListField):
|
||||
|
||||
class StringListPathField(StringListField):
|
||||
|
||||
default_error_messages = {
|
||||
'type_error': _('Expected list of strings but got {input_type} instead.'),
|
||||
'path_error': _('{path} is not a valid path choice.'),
|
||||
}
|
||||
default_error_messages = {'type_error': _('Expected list of strings but got {input_type} instead.'), 'path_error': _('{path} is not a valid path choice.')}
|
||||
|
||||
def to_internal_value(self, paths):
|
||||
if isinstance(paths, (list, tuple)):
|
||||
@ -123,12 +113,12 @@ class URLField(CharField):
|
||||
# these lines set up a custom regex that allow numbers in the
|
||||
# top-level domain
|
||||
tld_re = (
|
||||
r'\.' # dot
|
||||
r'(?!-)' # can't start with a dash
|
||||
r'(?:[a-z' + URLValidator.ul + r'0-9' + '-]{2,63}' # domain label, this line was changed from the original URLValidator
|
||||
r'|xn--[a-z0-9]{1,59})' # or punycode label
|
||||
r'(?<!-)' # can't end with a dash
|
||||
r'\.?' # may have a trailing dot
|
||||
r'\.' # dot
|
||||
r'(?!-)' # can't start with a dash
|
||||
r'(?:[a-z' + URLValidator.ul + r'0-9' + '-]{2,63}' # domain label, this line was changed from the original URLValidator
|
||||
r'|xn--[a-z0-9]{1,59})' # or punycode label
|
||||
r'(?<!-)' # can't end with a dash
|
||||
r'\.?' # may have a trailing dot
|
||||
)
|
||||
|
||||
host_re = '(' + URLValidator.hostname_re + URLValidator.domain_re + tld_re + '|localhost)'
|
||||
@ -139,7 +129,9 @@ class URLField(CharField):
|
||||
r'(?:' + URLValidator.ipv4_re + '|' + URLValidator.ipv6_re + '|' + host_re + ')'
|
||||
r'(?::\d{2,5})?' # port
|
||||
r'(?:[/?#][^\s]*)?' # resource path
|
||||
r'\Z', re.IGNORECASE)
|
||||
r'\Z',
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
schemes = kwargs.pop('schemes', None)
|
||||
@ -184,9 +176,7 @@ class URLField(CharField):
|
||||
|
||||
class KeyValueField(DictField):
|
||||
child = CharField()
|
||||
default_error_messages = {
|
||||
'invalid_child': _('"{input}" is not a valid string.')
|
||||
}
|
||||
default_error_messages = {'invalid_child': _('"{input}" is not a valid string.')}
|
||||
|
||||
def to_internal_value(self, data):
|
||||
ret = super(KeyValueField, self).to_internal_value(data)
|
||||
@ -199,9 +189,7 @@ class KeyValueField(DictField):
|
||||
|
||||
|
||||
class ListTuplesField(ListField):
|
||||
default_error_messages = {
|
||||
'type_error': _('Expected a list of tuples of max length 2 but got {input_type} instead.'),
|
||||
}
|
||||
default_error_messages = {'type_error': _('Expected a list of tuples of max length 2 but got {input_type} instead.')}
|
||||
|
||||
def to_representation(self, value):
|
||||
if isinstance(value, (list, tuple)):
|
||||
|
||||
@ -6,6 +6,7 @@ __all__ = ['get_license']
|
||||
|
||||
def _get_validated_license_data():
|
||||
from awx.main.utils import get_licenser
|
||||
|
||||
return get_licenser().validate()
|
||||
|
||||
|
||||
|
||||
@ -8,9 +8,7 @@ from django.conf import settings
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
@ -21,11 +19,11 @@ class Migration(migrations.Migration):
|
||||
('modified', models.DateTimeField(default=None, editable=False)),
|
||||
('key', models.CharField(max_length=255)),
|
||||
('value', jsonfield.fields.JSONField(null=True)),
|
||||
('user', models.ForeignKey(related_name='settings', default=None, editable=False,
|
||||
to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True)),
|
||||
(
|
||||
'user',
|
||||
models.ForeignKey(related_name='settings', default=None, editable=False, to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=True),
|
||||
),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
options={'abstract': False},
|
||||
)
|
||||
]
|
||||
|
||||
@ -15,11 +15,7 @@ def copy_tower_settings(apps, schema_editor):
|
||||
if tower_setting.key == 'LICENSE':
|
||||
value = json.loads(value)
|
||||
setting, created = Setting.objects.get_or_create(
|
||||
key=tower_setting.key,
|
||||
user=tower_setting.user,
|
||||
created=tower_setting.created,
|
||||
modified=tower_setting.modified,
|
||||
defaults=dict(value=value),
|
||||
key=tower_setting.key, user=tower_setting.user, created=tower_setting.created, modified=tower_setting.modified, defaults=dict(value=value)
|
||||
)
|
||||
if not created and setting.value != value:
|
||||
setting.value = value
|
||||
@ -36,18 +32,9 @@ def revert_tower_settings(apps, schema_editor):
|
||||
# LICENSE is stored as a JSON object; convert it back to a string.
|
||||
if setting.key == 'LICENSE':
|
||||
value = json.dumps(value)
|
||||
defaults = dict(
|
||||
value=value,
|
||||
value_type='string',
|
||||
description='',
|
||||
category='',
|
||||
)
|
||||
defaults = dict(value=value, value_type='string', description='', category='')
|
||||
try:
|
||||
tower_setting, created = TowerSettings.objects.get_or_create(
|
||||
key=setting.key,
|
||||
user=setting.user,
|
||||
defaults=defaults,
|
||||
)
|
||||
tower_setting, created = TowerSettings.objects.get_or_create(key=setting.key, user=setting.user, defaults=defaults)
|
||||
if not created:
|
||||
update_fields = []
|
||||
for k, v in defaults.items():
|
||||
@ -62,15 +49,8 @@ def revert_tower_settings(apps, schema_editor):
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0001_initial'),
|
||||
('main', '0004_squashed_v310_release'),
|
||||
]
|
||||
dependencies = [('conf', '0001_initial'), ('main', '0004_squashed_v310_release')]
|
||||
|
||||
run_before = [
|
||||
('main', '0005_squashed_v310_v313_updates'),
|
||||
]
|
||||
run_before = [('main', '0005_squashed_v310_v313_updates')]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(copy_tower_settings, revert_tower_settings),
|
||||
]
|
||||
operations = [migrations.RunPython(copy_tower_settings, revert_tower_settings)]
|
||||
|
||||
@ -7,14 +7,6 @@ import awx.main.fields
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0002_v310_copy_tower_settings'),
|
||||
]
|
||||
dependencies = [('conf', '0002_v310_copy_tower_settings')]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='setting',
|
||||
name='value',
|
||||
field=awx.main.fields.JSONField(null=True),
|
||||
),
|
||||
]
|
||||
operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONField(null=True))]
|
||||
|
||||
@ -6,9 +6,7 @@ from django.db import migrations
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0003_v310_JSONField_changes'),
|
||||
]
|
||||
dependencies = [('conf', '0003_v310_JSONField_changes')]
|
||||
|
||||
operations = [
|
||||
# This list is intentionally empty.
|
||||
|
||||
@ -2,8 +2,8 @@
|
||||
from __future__ import unicode_literals
|
||||
from django.db import migrations
|
||||
from awx.conf.migrations import _rename_setting
|
||||
|
||||
|
||||
|
||||
|
||||
def copy_session_settings(apps, schema_editor):
|
||||
_rename_setting.rename_setting(apps, schema_editor, old_key='AUTH_TOKEN_PER_USER', new_key='SESSIONS_PER_USER')
|
||||
_rename_setting.rename_setting(apps, schema_editor, old_key='AUTH_TOKEN_EXPIRATION', new_key='SESSION_COOKIE_AGE')
|
||||
@ -16,11 +16,6 @@ def reverse_copy_session_settings(apps, schema_editor):
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0004_v320_reencrypt'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(copy_session_settings, reverse_copy_session_settings),
|
||||
]
|
||||
dependencies = [('conf', '0004_v320_reencrypt')]
|
||||
|
||||
operations = [migrations.RunPython(copy_session_settings, reverse_copy_session_settings)]
|
||||
|
||||
@ -9,10 +9,6 @@ from django.db import migrations
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0005_v330_rename_two_session_settings'),
|
||||
]
|
||||
dependencies = [('conf', '0005_v330_rename_two_session_settings')]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(fill_ldap_group_type_params),
|
||||
]
|
||||
operations = [migrations.RunPython(fill_ldap_group_type_params)]
|
||||
|
||||
@ -10,10 +10,6 @@ def copy_allowed_ips(apps, schema_editor):
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0006_v331_ldap_group_type'),
|
||||
]
|
||||
dependencies = [('conf', '0006_v331_ldap_group_type')]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(copy_allowed_ips),
|
||||
]
|
||||
operations = [migrations.RunPython(copy_allowed_ips)]
|
||||
|
||||
@ -15,12 +15,6 @@ def _noop(apps, schema_editor):
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('conf', '0007_v380_rename_more_settings'),
|
||||
]
|
||||
dependencies = [('conf', '0007_v380_rename_more_settings')]
|
||||
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(clear_old_license, _noop),
|
||||
migrations.RunPython(prefill_rh_credentials, _noop)
|
||||
]
|
||||
operations = [migrations.RunPython(clear_old_license, _noop), migrations.RunPython(prefill_rh_credentials, _noop)]
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
|
||||
import inspect
|
||||
|
||||
from django.conf import settings
|
||||
@ -16,10 +15,7 @@ def fill_ldap_group_type_params(apps, schema_editor):
|
||||
entry = qs[0]
|
||||
group_type_params = entry.value
|
||||
else:
|
||||
entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS',
|
||||
value=group_type_params,
|
||||
created=now(),
|
||||
modified=now())
|
||||
entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS', value=group_type_params, created=now(), modified=now())
|
||||
|
||||
init_attrs = set(inspect.getargspec(group_type.__init__).args[1:])
|
||||
for k in list(group_type_params.keys()):
|
||||
|
||||
@ -11,15 +11,16 @@ __all__ = ['get_encryption_key', 'decrypt_field']
|
||||
|
||||
|
||||
def get_encryption_key(field_name, pk=None):
|
||||
'''
|
||||
"""
|
||||
Generate key for encrypted password based on field name,
|
||||
``settings.SECRET_KEY``, and instance pk (if available).
|
||||
|
||||
:param pk: (optional) the primary key of the ``awx.conf.model.Setting``;
|
||||
can be omitted in situations where you're encrypting a setting
|
||||
that is not database-persistent (like a read-only setting)
|
||||
'''
|
||||
"""
|
||||
from django.conf import settings
|
||||
|
||||
h = hashlib.sha1()
|
||||
h.update(settings.SECRET_KEY)
|
||||
if pk is not None:
|
||||
@ -29,11 +30,11 @@ def get_encryption_key(field_name, pk=None):
|
||||
|
||||
|
||||
def decrypt_value(encryption_key, value):
|
||||
raw_data = value[len('$encrypted$'):]
|
||||
raw_data = value[len('$encrypted$') :]
|
||||
# If the encrypted string contains a UTF8 marker, discard it
|
||||
utf8 = raw_data.startswith('UTF8$')
|
||||
if utf8:
|
||||
raw_data = raw_data[len('UTF8$'):]
|
||||
raw_data = raw_data[len('UTF8$') :]
|
||||
algo, b64data = raw_data.split('$', 1)
|
||||
if algo != 'AES':
|
||||
raise ValueError('unsupported algorithm: %s' % algo)
|
||||
@ -48,9 +49,9 @@ def decrypt_value(encryption_key, value):
|
||||
|
||||
|
||||
def decrypt_field(instance, field_name, subfield=None):
|
||||
'''
|
||||
"""
|
||||
Return content of the given instance and field name decrypted.
|
||||
'''
|
||||
"""
|
||||
value = getattr(instance, field_name)
|
||||
if isinstance(value, dict) and subfield is not None:
|
||||
value = value[subfield]
|
||||
|
||||
@ -6,11 +6,11 @@ from django.conf import settings
|
||||
|
||||
logger = logging.getLogger('awx.conf.settings')
|
||||
|
||||
__all__ = ['rename_setting']
|
||||
|
||||
|
||||
__all__ = ['rename_setting']
|
||||
|
||||
|
||||
def rename_setting(apps, schema_editor, old_key, new_key):
|
||||
|
||||
|
||||
old_setting = None
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
if Setting.objects.filter(key=new_key).exists() or hasattr(settings, new_key):
|
||||
@ -24,9 +24,4 @@ def rename_setting(apps, schema_editor, old_key, new_key):
|
||||
if hasattr(settings, old_key):
|
||||
old_setting = getattr(settings, old_key)
|
||||
if old_setting is not None:
|
||||
Setting.objects.create(key=new_key,
|
||||
value=old_setting,
|
||||
created=now(),
|
||||
modified=now()
|
||||
)
|
||||
|
||||
Setting.objects.create(key=new_key, value=old_setting, created=now(), modified=now())
|
||||
|
||||
@ -6,7 +6,7 @@ from awx.main.utils.encryption import decrypt_field, encrypt_field
|
||||
logger = logging.getLogger('awx.conf.settings')
|
||||
|
||||
__all__ = ['clear_old_license', 'prefill_rh_credentials']
|
||||
|
||||
|
||||
|
||||
def clear_old_license(apps, schema_editor):
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
@ -17,10 +17,7 @@ def _migrate_setting(apps, old_key, new_key, encrypted=False):
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
if not Setting.objects.filter(key=old_key).exists():
|
||||
return
|
||||
new_setting = Setting.objects.create(key=new_key,
|
||||
created=now(),
|
||||
modified=now()
|
||||
)
|
||||
new_setting = Setting.objects.create(key=new_key, created=now(), modified=now())
|
||||
if encrypted:
|
||||
new_setting.value = decrypt_field(Setting.objects.filter(key=old_key).first(), 'value')
|
||||
new_setting.value = encrypt_field(new_setting, 'value')
|
||||
|
||||
@ -18,20 +18,9 @@ __all__ = ['Setting']
|
||||
|
||||
class Setting(CreatedModifiedModel):
|
||||
|
||||
key = models.CharField(
|
||||
max_length=255,
|
||||
)
|
||||
value = JSONField(
|
||||
null=True,
|
||||
)
|
||||
user = prevent_search(models.ForeignKey(
|
||||
'auth.User',
|
||||
related_name='settings',
|
||||
default=None,
|
||||
null=True,
|
||||
editable=False,
|
||||
on_delete=models.CASCADE,
|
||||
))
|
||||
key = models.CharField(max_length=255)
|
||||
value = JSONField(null=True)
|
||||
user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE))
|
||||
|
||||
def __str__(self):
|
||||
try:
|
||||
@ -66,6 +55,7 @@ class Setting(CreatedModifiedModel):
|
||||
# field and save again.
|
||||
if encrypted and new_instance:
|
||||
from awx.main.signals import disable_activity_stream
|
||||
|
||||
with disable_activity_stream():
|
||||
self.value = self._saved_value
|
||||
self.save(update_fields=['value'])
|
||||
@ -82,6 +72,7 @@ class Setting(CreatedModifiedModel):
|
||||
import awx.conf.signals # noqa
|
||||
|
||||
from awx.main.registrar import activity_stream_registrar # noqa
|
||||
|
||||
activity_stream_registrar.connect(Setting)
|
||||
|
||||
import awx.conf.access # noqa
|
||||
|
||||
@ -69,10 +69,7 @@ class SettingsRegistry(object):
|
||||
return self._dependent_settings.get(setting, set())
|
||||
|
||||
def get_registered_categories(self):
|
||||
categories = {
|
||||
'all': _('All'),
|
||||
'changed': _('Changed'),
|
||||
}
|
||||
categories = {'all': _('All'), 'changed': _('Changed')}
|
||||
for setting, kwargs in self._registry.items():
|
||||
category_slug = kwargs.get('category_slug', None)
|
||||
if category_slug is None or category_slug in categories:
|
||||
@ -95,8 +92,11 @@ class SettingsRegistry(object):
|
||||
continue
|
||||
if kwargs.get('category_slug', None) in slugs_to_ignore:
|
||||
continue
|
||||
if (read_only in {True, False} and kwargs.get('read_only', False) != read_only and
|
||||
setting not in ('INSTALL_UUID', 'AWX_ISOLATED_PRIVATE_KEY', 'AWX_ISOLATED_PUBLIC_KEY')):
|
||||
if (
|
||||
read_only in {True, False}
|
||||
and kwargs.get('read_only', False) != read_only
|
||||
and setting not in ('INSTALL_UUID', 'AWX_ISOLATED_PRIVATE_KEY', 'AWX_ISOLATED_PUBLIC_KEY')
|
||||
):
|
||||
# Note: Doesn't catch fields that set read_only via __init__;
|
||||
# read-only field kwargs should always include read_only=True.
|
||||
continue
|
||||
@ -117,6 +117,7 @@ class SettingsRegistry(object):
|
||||
|
||||
def get_setting_field(self, setting, mixin_class=None, for_user=False, **kwargs):
|
||||
from rest_framework.fields import empty
|
||||
|
||||
field_kwargs = {}
|
||||
field_kwargs.update(self._registry[setting])
|
||||
field_kwargs.update(kwargs)
|
||||
@ -141,11 +142,7 @@ class SettingsRegistry(object):
|
||||
field_instance.placeholder = placeholder
|
||||
field_instance.defined_in_file = defined_in_file
|
||||
if field_instance.defined_in_file:
|
||||
field_instance.help_text = (
|
||||
str(_('This value has been set manually in a settings file.')) +
|
||||
'\n\n' +
|
||||
str(field_instance.help_text)
|
||||
)
|
||||
field_instance.help_text = str(_('This value has been set manually in a settings file.')) + '\n\n' + str(field_instance.help_text)
|
||||
field_instance.encrypted = encrypted
|
||||
original_field_instance = field_instance
|
||||
if field_class != original_field_class:
|
||||
|
||||
@ -30,15 +30,9 @@ class SettingSerializer(BaseSerializer):
|
||||
class SettingCategorySerializer(serializers.Serializer):
|
||||
"""Serialize setting category """
|
||||
|
||||
url = serializers.CharField(
|
||||
read_only=True,
|
||||
)
|
||||
slug = serializers.CharField(
|
||||
read_only=True,
|
||||
)
|
||||
name = serializers.CharField(
|
||||
read_only=True,
|
||||
)
|
||||
url = serializers.CharField(read_only=True)
|
||||
slug = serializers.CharField(read_only=True)
|
||||
name = serializers.CharField(read_only=True)
|
||||
|
||||
|
||||
class SettingFieldMixin(object):
|
||||
|
||||
@ -62,12 +62,12 @@ __all__ = ['SettingsWrapper', 'get_settings_to_cache', 'SETTING_CACHE_NOTSET']
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _ctit_db_wrapper(trans_safe=False):
|
||||
'''
|
||||
"""
|
||||
Wrapper to avoid undesired actions by Django ORM when managing settings
|
||||
if only getting a setting, can use trans_safe=True, which will avoid
|
||||
throwing errors if the prior context was a broken transaction.
|
||||
Any database errors will be logged, but exception will be suppressed.
|
||||
'''
|
||||
"""
|
||||
rollback_set = None
|
||||
is_atomic = None
|
||||
try:
|
||||
@ -115,7 +115,6 @@ class TransientSetting(object):
|
||||
|
||||
|
||||
class EncryptedCacheProxy(object):
|
||||
|
||||
def __init__(self, cache, registry, encrypter=None, decrypter=None):
|
||||
"""
|
||||
This proxy wraps a Django cache backend and overwrites the
|
||||
@ -145,19 +144,11 @@ class EncryptedCacheProxy(object):
|
||||
|
||||
def set(self, key, value, log=True, **kwargs):
|
||||
if log is True:
|
||||
logger.debug('cache set(%r, %r, %r)', key, filter_sensitive(self.registry, key, value),
|
||||
SETTING_CACHE_TIMEOUT)
|
||||
self.cache.set(
|
||||
key,
|
||||
self._handle_encryption(self.encrypter, key, value),
|
||||
**kwargs
|
||||
)
|
||||
logger.debug('cache set(%r, %r, %r)', key, filter_sensitive(self.registry, key, value), SETTING_CACHE_TIMEOUT)
|
||||
self.cache.set(key, self._handle_encryption(self.encrypter, key, value), **kwargs)
|
||||
|
||||
def set_many(self, data, **kwargs):
|
||||
filtered_data = dict(
|
||||
(key, filter_sensitive(self.registry, key, value))
|
||||
for key, value in data.items()
|
||||
)
|
||||
filtered_data = dict((key, filter_sensitive(self.registry, key, value)) for key, value in data.items())
|
||||
logger.debug('cache set_many(%r, %r)', filtered_data, SETTING_CACHE_TIMEOUT)
|
||||
for key, value in data.items():
|
||||
self.set(key, value, log=False, **kwargs)
|
||||
@ -168,18 +159,11 @@ class EncryptedCacheProxy(object):
|
||||
# as part of the AES key when encrypting/decrypting
|
||||
obj_id = self.cache.get(Setting.get_cache_id_key(key), default=empty)
|
||||
if obj_id is empty:
|
||||
logger.info('Efficiency notice: Corresponding id not stored in cache %s',
|
||||
Setting.get_cache_id_key(key))
|
||||
logger.info('Efficiency notice: Corresponding id not stored in cache %s', Setting.get_cache_id_key(key))
|
||||
obj_id = getattr(self._get_setting_from_db(key), 'pk', None)
|
||||
elif obj_id == SETTING_CACHE_NONE:
|
||||
obj_id = None
|
||||
return method(
|
||||
TransientSetting(
|
||||
pk=obj_id,
|
||||
value=value
|
||||
),
|
||||
'value'
|
||||
)
|
||||
return method(TransientSetting(pk=obj_id, value=value), 'value')
|
||||
|
||||
# If the field in question isn't an "encrypted" field, this function is
|
||||
# a no-op; it just returns the provided value
|
||||
@ -206,9 +190,9 @@ def get_settings_to_cache(registry):
|
||||
|
||||
|
||||
def get_cache_value(value):
|
||||
'''Returns the proper special cache setting for a value
|
||||
"""Returns the proper special cache setting for a value
|
||||
based on instance type.
|
||||
'''
|
||||
"""
|
||||
if value is None:
|
||||
value = SETTING_CACHE_NONE
|
||||
elif isinstance(value, (list, tuple)) and len(value) == 0:
|
||||
@ -219,7 +203,6 @@ def get_cache_value(value):
|
||||
|
||||
|
||||
class SettingsWrapper(UserSettingsHolder):
|
||||
|
||||
@classmethod
|
||||
def initialize(cls, cache=None, registry=None):
|
||||
"""
|
||||
@ -231,11 +214,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
``awx.conf.settings_registry`` is used by default.
|
||||
"""
|
||||
if not getattr(settings, '_awx_conf_settings', False):
|
||||
settings_wrapper = cls(
|
||||
settings._wrapped,
|
||||
cache=cache or django_cache,
|
||||
registry=registry or settings_registry
|
||||
)
|
||||
settings_wrapper = cls(settings._wrapped, cache=cache or django_cache, registry=registry or settings_registry)
|
||||
settings._wrapped = settings_wrapper
|
||||
|
||||
def __init__(self, default_settings, cache, registry):
|
||||
@ -322,7 +301,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
try:
|
||||
value = decrypt_field(setting, 'value')
|
||||
except ValueError as e:
|
||||
#TODO: Remove in Tower 3.3
|
||||
# TODO: Remove in Tower 3.3
|
||||
logger.debug('encountered error decrypting field: %s - attempting fallback to old', e)
|
||||
value = old_decrypt_field(setting, 'value')
|
||||
|
||||
@ -345,8 +324,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
# Generate a cache key for each setting and store them all at once.
|
||||
settings_to_cache = dict([(Setting.get_cache_key(k), v) for k, v in settings_to_cache.items()])
|
||||
for k, id_val in setting_ids.items():
|
||||
logger.debug('Saving id in cache for encrypted setting %s, %s',
|
||||
Setting.get_cache_id_key(k), id_val)
|
||||
logger.debug('Saving id in cache for encrypted setting %s, %s', Setting.get_cache_id_key(k), id_val)
|
||||
self.cache.cache.set(Setting.get_cache_id_key(k), id_val)
|
||||
settings_to_cache['_awx_conf_preload_expires'] = self._awx_conf_preload_expires
|
||||
self.cache.set_many(settings_to_cache, timeout=SETTING_CACHE_TIMEOUT)
|
||||
@ -420,9 +398,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
else:
|
||||
return value
|
||||
except Exception:
|
||||
logger.warning(
|
||||
'The current value "%r" for setting "%s" is invalid.',
|
||||
value, name, exc_info=True)
|
||||
logger.warning('The current value "%r" for setting "%s" is invalid.', value, name, exc_info=True)
|
||||
return empty
|
||||
|
||||
def _get_default(self, name):
|
||||
@ -453,8 +429,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
setting_value = field.run_validation(data)
|
||||
db_value = field.to_representation(setting_value)
|
||||
except Exception as e:
|
||||
logger.exception('Unable to assign value "%r" to setting "%s".',
|
||||
value, name, exc_info=True)
|
||||
logger.exception('Unable to assign value "%r" to setting "%s".', value, name, exc_info=True)
|
||||
raise e
|
||||
|
||||
setting = Setting.objects.filter(key=name, user__isnull=True).order_by('pk').first()
|
||||
@ -492,8 +467,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
def __dir__(self):
|
||||
keys = []
|
||||
with _ctit_db_wrapper(trans_safe=True):
|
||||
for setting in Setting.objects.filter(
|
||||
key__in=self.all_supported_settings, user__isnull=True):
|
||||
for setting in Setting.objects.filter(key__in=self.all_supported_settings, user__isnull=True):
|
||||
# Skip returning settings that have been overridden but are
|
||||
# considered to be "not set".
|
||||
if setting.value is None and SETTING_CACHE_NOTSET == SETTING_CACHE_NONE:
|
||||
@ -511,7 +485,7 @@ class SettingsWrapper(UserSettingsHolder):
|
||||
with _ctit_db_wrapper(trans_safe=True):
|
||||
set_locally = Setting.objects.filter(key=setting, user__isnull=True).exists()
|
||||
set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting)
|
||||
return (set_locally or set_on_default)
|
||||
return set_locally or set_on_default
|
||||
|
||||
|
||||
def __getattr_without_cache__(self, name):
|
||||
|
||||
@ -30,12 +30,7 @@ def handle_setting_change(key, for_delete=False):
|
||||
|
||||
# Send setting_changed signal with new value for each setting.
|
||||
for setting_key in setting_keys:
|
||||
setting_changed.send(
|
||||
sender=Setting,
|
||||
setting=setting_key,
|
||||
value=getattr(settings, setting_key, None),
|
||||
enter=not bool(for_delete),
|
||||
)
|
||||
setting_changed.send(sender=Setting, setting=setting_key, value=getattr(settings, setting_key, None), enter=not bool(for_delete))
|
||||
|
||||
|
||||
@receiver(post_save, sender=Setting)
|
||||
|
||||
@ -5,10 +5,7 @@ import pytest
|
||||
from django.urls import resolve
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
from rest_framework.test import (
|
||||
APIRequestFactory,
|
||||
force_authenticate,
|
||||
)
|
||||
from rest_framework.test import APIRequestFactory, force_authenticate
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -41,4 +38,5 @@ def api_request(admin):
|
||||
response = view(request, *view_args, **view_kwargs)
|
||||
response.render()
|
||||
return response
|
||||
|
||||
return rf
|
||||
|
||||
@ -45,44 +45,19 @@ def dummy_validate():
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_non_admin_user_does_not_see_categories(api_request, dummy_setting, normal_user):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_category_list',
|
||||
kwargs={'version': 'v2'})
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'):
|
||||
response = api_request('get', reverse('api:setting_category_list', kwargs={'version': 'v2'}))
|
||||
assert response.data['results']
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_category_list',
|
||||
kwargs={'version': 'v2'}),
|
||||
user=normal_user
|
||||
)
|
||||
response = api_request('get', reverse('api:setting_category_list', kwargs={'version': 'v2'}), user=normal_user)
|
||||
assert not response.data['results']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_detail_retrieve(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR_1',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), dummy_setting(
|
||||
'FOO_BAR_2',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
with dummy_setting('FOO_BAR_1', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), dummy_setting(
|
||||
'FOO_BAR_2', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.status_code == 200
|
||||
assert 'FOO_BAR_1' in response.data and response.data['FOO_BAR_1'] is None
|
||||
assert 'FOO_BAR_2' in response.data and response.data['FOO_BAR_2'] is None
|
||||
@ -90,97 +65,43 @@ def test_setting_singleton_detail_retrieve(api_request, dummy_setting):
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_detail_invalid_retrieve(api_request, dummy_setting, normal_user):
|
||||
with dummy_setting(
|
||||
'FOO_BAR_1',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), dummy_setting(
|
||||
'FOO_BAR_2',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
with dummy_setting('FOO_BAR_1', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), dummy_setting(
|
||||
'FOO_BAR_2', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'barfoo'})
|
||||
)
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'barfoo'}))
|
||||
assert response.status_code == 404
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
user = normal_user
|
||||
)
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), user=normal_user)
|
||||
assert response.status_code == 403
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_signleton_retrieve_hierachy(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
default=0,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, default=0, category='FooBar', category_slug='foobar'):
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == 0
|
||||
s = Setting(key='FOO_BAR', value=1)
|
||||
s.save()
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_retrieve_readonly(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
read_only=True,
|
||||
default=2,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
):
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=2, category='FooBar', category_slug='foobar'):
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == 2
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_update(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 3}
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), mock.patch(
|
||||
'awx.conf.views.handle_setting_changes'
|
||||
):
|
||||
api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 3})
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == 3
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 4}
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 4})
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == 4
|
||||
|
||||
|
||||
@ -190,138 +111,70 @@ def test_setting_singleton_update_hybriddictfield_with_forbidden(api_request, du
|
||||
# indicating that only the defined fields can be filled in. Make
|
||||
# sure that the _Forbidden validator doesn't get used for the
|
||||
# fields. See also https://github.com/ansible/awx/issues/4099.
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=sso_fields.SAMLOrgAttrField,
|
||||
category='FooBar',
|
||||
category_slug='foobar',
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
with dummy_setting('FOO_BAR', field_class=sso_fields.SAMLOrgAttrField, category='FooBar', category_slug='foobar'), mock.patch(
|
||||
'awx.conf.views.handle_setting_changes'
|
||||
):
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}}
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
data={'FOO_BAR': {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}},
|
||||
)
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == {'saml_admin_attr': 'Admins', 'saml_attr': 'Orgs'}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_update_dont_change_readonly_fields(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
read_only=True,
|
||||
default=4,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 5}
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=4, category='FooBar', category_slug='foobar'), mock.patch(
|
||||
'awx.conf.views.handle_setting_changes'
|
||||
):
|
||||
api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 5})
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == 4
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_update_dont_change_encrypted_mark(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.CharField,
|
||||
encrypted=True,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 'password'}
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.CharField, encrypted=True, category='FooBar', category_slug='foobar'), mock.patch(
|
||||
'awx.conf.views.handle_setting_changes'
|
||||
):
|
||||
api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 'password'})
|
||||
assert Setting.objects.get(key='FOO_BAR').value.startswith('$encrypted$')
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == '$encrypted$'
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': '$encrypted$'}
|
||||
)
|
||||
api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': '$encrypted$'})
|
||||
assert decrypt_field(Setting.objects.get(key='FOO_BAR'), 'value') == 'password'
|
||||
api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 'new_pw'}
|
||||
)
|
||||
api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 'new_pw'})
|
||||
assert decrypt_field(Setting.objects.get(key='FOO_BAR'), 'value') == 'new_pw'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_update_runs_custom_validate(api_request, dummy_setting, dummy_validate):
|
||||
|
||||
def func_raising_exception(serializer, attrs):
|
||||
raise serializers.ValidationError('Error')
|
||||
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), dummy_validate(
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), dummy_validate(
|
||||
'foobar', func_raising_exception
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
response = api_request(
|
||||
'patch',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}),
|
||||
data={'FOO_BAR': 23}
|
||||
)
|
||||
response = api_request('patch', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}), data={'FOO_BAR': 23})
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_delete(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
api_request(
|
||||
'delete',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, category='FooBar', category_slug='foobar'), mock.patch(
|
||||
'awx.conf.views.handle_setting_changes'
|
||||
):
|
||||
api_request('delete', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert not response.data['FOO_BAR']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_setting_singleton_delete_no_read_only_fields(api_request, dummy_setting):
|
||||
with dummy_setting(
|
||||
'FOO_BAR',
|
||||
field_class=fields.IntegerField,
|
||||
read_only=True,
|
||||
default=23,
|
||||
category='FooBar',
|
||||
category_slug='foobar'
|
||||
), mock.patch('awx.conf.views.handle_setting_changes'):
|
||||
api_request(
|
||||
'delete',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
response = api_request(
|
||||
'get',
|
||||
reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'})
|
||||
)
|
||||
with dummy_setting('FOO_BAR', field_class=fields.IntegerField, read_only=True, default=23, category='FooBar', category_slug='foobar'), mock.patch(
|
||||
'awx.conf.views.handle_setting_changes'
|
||||
):
|
||||
api_request('delete', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
response = api_request('get', reverse('api:setting_singleton_detail', kwargs={'category_slug': 'foobar'}))
|
||||
assert response.data['FOO_BAR'] == 23
|
||||
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
|
||||
|
||||
# Ensure that our autouse overwrites are working
|
||||
def test_cache(settings):
|
||||
assert settings.CACHES['default']['BACKEND'] == 'django.core.cache.backends.locmem.LocMemCache'
|
||||
|
||||
@ -4,7 +4,7 @@ from rest_framework.fields import ValidationError
|
||||
from awx.conf.fields import StringListBooleanField, StringListPathField, ListTuplesField, URLField
|
||||
|
||||
|
||||
class TestStringListBooleanField():
|
||||
class TestStringListBooleanField:
|
||||
|
||||
FIELD_VALUES = [
|
||||
("hello", "hello"),
|
||||
@ -23,10 +23,7 @@ class TestStringListBooleanField():
|
||||
("NULL", None),
|
||||
]
|
||||
|
||||
FIELD_VALUES_INVALID = [
|
||||
1.245,
|
||||
{"a": "b"},
|
||||
]
|
||||
FIELD_VALUES_INVALID = [1.245, {"a": "b"}]
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_internal_value_valid(self, value_in, value_known):
|
||||
@ -39,8 +36,7 @@ class TestStringListBooleanField():
|
||||
field = StringListBooleanField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_internal_value(value)
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list " \
|
||||
"of strings but got {} instead.".format(type(value))
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_representation_valid(self, value_in, value_known):
|
||||
@ -53,22 +49,14 @@ class TestStringListBooleanField():
|
||||
field = StringListBooleanField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_representation(value)
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list " \
|
||||
"of strings but got {} instead.".format(type(value))
|
||||
assert e.value.detail[0] == "Expected None, True, False, a string or list " "of strings but got {} instead.".format(type(value))
|
||||
|
||||
|
||||
class TestListTuplesField():
|
||||
class TestListTuplesField:
|
||||
|
||||
FIELD_VALUES = [
|
||||
([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")]),
|
||||
]
|
||||
FIELD_VALUES = [([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")])]
|
||||
|
||||
FIELD_VALUES_INVALID = [
|
||||
("abc", type("abc")),
|
||||
([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))),
|
||||
(['a', 'b'], type('a')),
|
||||
(123, type(123)),
|
||||
]
|
||||
FIELD_VALUES_INVALID = [("abc", type("abc")), ([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))), (['a', 'b'], type('a')), (123, type(123))]
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_internal_value_valid(self, value_in, value_known):
|
||||
@ -81,11 +69,10 @@ class TestListTuplesField():
|
||||
field = ListTuplesField()
|
||||
with pytest.raises(ValidationError) as e:
|
||||
field.to_internal_value(value)
|
||||
assert e.value.detail[0] == "Expected a list of tuples of max length 2 " \
|
||||
"but got {} instead.".format(t)
|
||||
assert e.value.detail[0] == "Expected a list of tuples of max length 2 " "but got {} instead.".format(t)
|
||||
|
||||
|
||||
class TestStringListPathField():
|
||||
class TestStringListPathField:
|
||||
|
||||
FIELD_VALUES = [
|
||||
((".", "..", "/"), [".", "..", "/"]),
|
||||
@ -93,22 +80,12 @@ class TestStringListPathField():
|
||||
(("///home///",), ["/home"]),
|
||||
(("/home/././././",), ["/home"]),
|
||||
(("/home", "/home", "/home/"), ["/home"]),
|
||||
(["/home/", "/home/", "/opt/", "/opt/", "/var/"], ["/home", "/opt", "/var"])
|
||||
(["/home/", "/home/", "/opt/", "/opt/", "/var/"], ["/home", "/opt", "/var"]),
|
||||
]
|
||||
|
||||
FIELD_VALUES_INVALID_TYPE = [
|
||||
1.245,
|
||||
{"a": "b"},
|
||||
("/home"),
|
||||
]
|
||||
FIELD_VALUES_INVALID_TYPE = [1.245, {"a": "b"}, ("/home")]
|
||||
|
||||
FIELD_VALUES_INVALID_PATH = [
|
||||
"",
|
||||
"~/",
|
||||
"home",
|
||||
"/invalid_path",
|
||||
"/home/invalid_path",
|
||||
]
|
||||
FIELD_VALUES_INVALID_PATH = ["", "~/", "home", "/invalid_path", "/home/invalid_path"]
|
||||
|
||||
@pytest.mark.parametrize("value_in, value_known", FIELD_VALUES)
|
||||
def test_to_internal_value_valid(self, value_in, value_known):
|
||||
@ -131,16 +108,19 @@ class TestStringListPathField():
|
||||
assert e.value.detail[0] == "{} is not a valid path choice.".format(value)
|
||||
|
||||
|
||||
class TestURLField():
|
||||
class TestURLField:
|
||||
regex = "^https://www.example.org$"
|
||||
|
||||
@pytest.mark.parametrize("url,schemes,regex, allow_numbers_in_top_level_domain, expect_no_error",[
|
||||
("ldap://www.example.org42", "ldap", None, True, True),
|
||||
("https://www.example.org42", "https", None, False, False),
|
||||
("https://www.example.org", None, regex, None, True),
|
||||
("https://www.example3.org", None, regex, None, False),
|
||||
("ftp://www.example.org", "https", None, None, False)
|
||||
])
|
||||
@pytest.mark.parametrize(
|
||||
"url,schemes,regex, allow_numbers_in_top_level_domain, expect_no_error",
|
||||
[
|
||||
("ldap://www.example.org42", "ldap", None, True, True),
|
||||
("https://www.example.org42", "https", None, False, False),
|
||||
("https://www.example.org", None, regex, None, True),
|
||||
("https://www.example3.org", None, regex, None, False),
|
||||
("ftp://www.example.org", "https", None, None, False),
|
||||
],
|
||||
)
|
||||
def test_urls(self, url, schemes, regex, allow_numbers_in_top_level_domain, expect_no_error):
|
||||
kwargs = {}
|
||||
kwargs.setdefault("allow_numbers_in_top_level_domain", allow_numbers_in_top_level_domain)
|
||||
|
||||
@ -33,30 +33,18 @@ def reg(request):
|
||||
if marker.name == 'defined_in_file':
|
||||
settings.configure(**marker.kwargs)
|
||||
|
||||
settings._wrapped = SettingsWrapper(settings._wrapped,
|
||||
cache,
|
||||
registry)
|
||||
settings._wrapped = SettingsWrapper(settings._wrapped, cache, registry)
|
||||
return registry
|
||||
|
||||
|
||||
def test_simple_setting_registration(reg):
|
||||
assert reg.get_registered_settings() == []
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
|
||||
assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED']
|
||||
|
||||
|
||||
def test_simple_setting_unregistration(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
|
||||
assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED']
|
||||
|
||||
reg.unregister('AWX_SOME_SETTING_ENABLED')
|
||||
@ -67,12 +55,7 @@ def test_duplicate_setting_registration(reg):
|
||||
"ensure that settings cannot be registered twice."
|
||||
with pytest.raises(ImproperlyConfigured):
|
||||
for i in range(2):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
|
||||
|
||||
|
||||
def test_field_class_required_for_registration(reg):
|
||||
@ -82,110 +65,42 @@ def test_field_class_required_for_registration(reg):
|
||||
|
||||
|
||||
def test_get_registered_settings_by_slug(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
assert reg.get_registered_settings(category_slug='system') == [
|
||||
'AWX_SOME_SETTING_ENABLED'
|
||||
]
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
|
||||
assert reg.get_registered_settings(category_slug='system') == ['AWX_SOME_SETTING_ENABLED']
|
||||
assert reg.get_registered_settings(category_slug='other') == []
|
||||
|
||||
|
||||
def test_get_registered_read_only_settings(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
reg.register(
|
||||
'AWX_SOME_READ_ONLY',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
read_only=True
|
||||
)
|
||||
assert reg.get_registered_settings(read_only=True) ==[
|
||||
'AWX_SOME_READ_ONLY'
|
||||
]
|
||||
assert reg.get_registered_settings(read_only=False) == [
|
||||
'AWX_SOME_SETTING_ENABLED'
|
||||
]
|
||||
assert reg.get_registered_settings() == [
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
'AWX_SOME_READ_ONLY'
|
||||
]
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
|
||||
reg.register('AWX_SOME_READ_ONLY', field_class=fields.BooleanField, category=_('System'), category_slug='system', read_only=True)
|
||||
assert reg.get_registered_settings(read_only=True) == ['AWX_SOME_READ_ONLY']
|
||||
assert reg.get_registered_settings(read_only=False) == ['AWX_SOME_SETTING_ENABLED']
|
||||
assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED', 'AWX_SOME_READ_ONLY']
|
||||
|
||||
|
||||
def test_get_dependent_settings(reg):
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
'AWX_SOME_DEPENDENT_SETTING', field_class=fields.BooleanField, category=_('System'), category_slug='system', depends_on=['AWX_SOME_SETTING_ENABLED']
|
||||
)
|
||||
reg.register(
|
||||
'AWX_SOME_DEPENDENT_SETTING',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
depends_on=['AWX_SOME_SETTING_ENABLED']
|
||||
)
|
||||
assert reg.get_dependent_settings('AWX_SOME_SETTING_ENABLED') == set([
|
||||
'AWX_SOME_DEPENDENT_SETTING'
|
||||
])
|
||||
assert reg.get_dependent_settings('AWX_SOME_SETTING_ENABLED') == set(['AWX_SOME_DEPENDENT_SETTING'])
|
||||
|
||||
|
||||
def test_get_registered_categories(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
reg.register(
|
||||
'AWX_SOME_OTHER_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category=_('OtherSystem'),
|
||||
category_slug='other-system'
|
||||
)
|
||||
assert reg.get_registered_categories() == {
|
||||
'all': _('All'),
|
||||
'changed': _('Changed'),
|
||||
'system': _('System'),
|
||||
'other-system': _('OtherSystem'),
|
||||
}
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category=_('System'), category_slug='system')
|
||||
reg.register('AWX_SOME_OTHER_SETTING_ENABLED', field_class=fields.BooleanField, category=_('OtherSystem'), category_slug='other-system')
|
||||
assert reg.get_registered_categories() == {'all': _('All'), 'changed': _('Changed'), 'system': _('System'), 'other-system': _('OtherSystem')}
|
||||
|
||||
|
||||
def test_is_setting_encrypted(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
reg.register(
|
||||
'AWX_SOME_ENCRYPTED_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
encrypted=True
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
reg.register('AWX_SOME_ENCRYPTED_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', encrypted=True)
|
||||
assert reg.is_setting_encrypted('AWX_SOME_SETTING_ENABLED') is False
|
||||
assert reg.is_setting_encrypted('AWX_SOME_ENCRYPTED_SETTING') is True
|
||||
|
||||
|
||||
def test_simple_field(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
placeholder='Example Value',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', placeholder='Example Value')
|
||||
|
||||
field = reg.get_setting_field('AWX_SOME_SETTING')
|
||||
assert isinstance(field, fields.CharField)
|
||||
@ -196,31 +111,20 @@ def test_simple_field(reg):
|
||||
|
||||
|
||||
def test_field_with_custom_attribute(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category_slug='system',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category_slug='system')
|
||||
|
||||
field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED',
|
||||
category_slug='other-system')
|
||||
field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED', category_slug='other-system')
|
||||
assert field.category_slug == 'other-system'
|
||||
|
||||
|
||||
def test_field_with_custom_mixin(reg):
|
||||
class GreatMixin(object):
|
||||
|
||||
def is_great(self):
|
||||
return True
|
||||
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
category_slug='system',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING_ENABLED', field_class=fields.BooleanField, category_slug='system')
|
||||
|
||||
field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED',
|
||||
mixin_class=GreatMixin)
|
||||
field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED', mixin_class=GreatMixin)
|
||||
assert isinstance(field, fields.BooleanField)
|
||||
assert isinstance(field, GreatMixin)
|
||||
assert field.is_great() is True
|
||||
@ -228,12 +132,7 @@ def test_field_with_custom_mixin(reg):
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_default_value_from_settings(reg):
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
field = reg.get_setting_field('AWX_SOME_SETTING')
|
||||
assert field.default == 'DEFAULT'
|
||||
@ -242,16 +141,10 @@ def test_default_value_from_settings(reg):
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_default_value_from_settings_with_custom_representation(reg):
|
||||
class LowercaseCharField(fields.CharField):
|
||||
|
||||
def to_representation(self, value):
|
||||
return value.lower()
|
||||
|
||||
reg.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=LowercaseCharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
reg.register('AWX_SOME_SETTING', field_class=LowercaseCharField, category=_('System'), category_slug='system')
|
||||
|
||||
field = reg.get_setting_field('AWX_SOME_SETTING')
|
||||
assert field.default == 'default'
|
||||
|
||||
@ -53,9 +53,7 @@ def settings(request):
|
||||
|
||||
defaults['DEFAULTS_SNAPSHOT'] = {}
|
||||
settings.configure(**defaults)
|
||||
settings._wrapped = SettingsWrapper(settings._wrapped,
|
||||
cache,
|
||||
registry)
|
||||
settings._wrapped = SettingsWrapper(settings._wrapped, cache, registry)
|
||||
return settings
|
||||
|
||||
|
||||
@ -67,14 +65,7 @@ def test_unregistered_setting(settings):
|
||||
|
||||
|
||||
def test_read_only_setting(settings):
|
||||
settings.registry.register(
|
||||
'AWX_READ_ONLY',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
default='NO-EDITS',
|
||||
read_only=True
|
||||
)
|
||||
settings.registry.register('AWX_READ_ONLY', field_class=fields.CharField, category=_('System'), category_slug='system', default='NO-EDITS', read_only=True)
|
||||
assert settings.AWX_READ_ONLY == 'NO-EDITS'
|
||||
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
|
||||
settings = settings.registry.get_registered_settings(read_only=True)
|
||||
@ -85,13 +76,7 @@ def test_read_only_setting(settings):
|
||||
@pytest.mark.parametrize('read_only', [True, False])
|
||||
def test_setting_defined_in_file(settings, read_only):
|
||||
kwargs = {'read_only': True} if read_only else {}
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
**kwargs
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', **kwargs)
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
|
||||
settings = settings.registry.get_registered_settings(read_only=True)
|
||||
@ -100,13 +85,7 @@ def test_setting_defined_in_file(settings, read_only):
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_setting_defined_in_file_with_empty_default(settings):
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
default='',
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='')
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
|
||||
settings = settings.registry.get_registered_settings(read_only=True)
|
||||
@ -115,13 +94,7 @@ def test_setting_defined_in_file_with_empty_default(settings):
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_setting_defined_in_file_with_specific_default(settings):
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
default=123
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default=123)
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
|
||||
settings = settings.registry.get_registered_settings(read_only=True)
|
||||
@ -131,12 +104,7 @@ def test_setting_defined_in_file_with_specific_default(settings):
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_read_only_defaults_are_cached(settings):
|
||||
"read-only settings are stored in the cache"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
|
||||
|
||||
@ -144,12 +112,7 @@ def test_read_only_defaults_are_cached(settings):
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_cache_respects_timeout(settings):
|
||||
"only preload the cache every SETTING_CACHE_TIMEOUT settings"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
cache_expiration = settings.cache.get('_awx_conf_preload_expires')
|
||||
@ -161,13 +124,7 @@ def test_cache_respects_timeout(settings):
|
||||
|
||||
def test_default_setting(settings, mocker):
|
||||
"settings that specify a default are inserted into the cache"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
default='DEFAULT'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||
|
||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
||||
@ -177,24 +134,13 @@ def test_default_setting(settings, mocker):
|
||||
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_setting_is_from_setting_file(settings, mocker):
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is True
|
||||
|
||||
|
||||
def test_setting_is_not_from_setting_file(settings, mocker):
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
default='DEFAULT'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||
|
||||
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
|
||||
@ -204,19 +150,9 @@ def test_setting_is_not_from_setting_file(settings, mocker):
|
||||
|
||||
def test_empty_setting(settings, mocker):
|
||||
"settings with no default and no defined value are not valid"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
mocks = mocker.Mock(**{
|
||||
'order_by.return_value': mocker.Mock(**{
|
||||
'__iter__': lambda self: iter([]),
|
||||
'first.return_value': None
|
||||
}),
|
||||
})
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([]), 'first.return_value': None})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
with pytest.raises(AttributeError):
|
||||
settings.AWX_SOME_SETTING
|
||||
@ -225,21 +161,10 @@ def test_empty_setting(settings, mocker):
|
||||
|
||||
def test_setting_from_db(settings, mocker):
|
||||
"settings can be loaded from the database"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
default='DEFAULT'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', default='DEFAULT')
|
||||
|
||||
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
mocks = mocker.Mock(**{
|
||||
'order_by.return_value': mocker.Mock(**{
|
||||
'__iter__': lambda self: iter([setting_from_db]),
|
||||
'first.return_value': setting_from_db
|
||||
}),
|
||||
})
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
assert settings.AWX_SOME_SETTING == 'FROM_DB'
|
||||
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
|
||||
@ -248,12 +173,7 @@ def test_setting_from_db(settings, mocker):
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_read_only_setting_assignment(settings):
|
||||
"read-only settings cannot be overwritten"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
with pytest.raises(ImproperlyConfigured):
|
||||
settings.AWX_SOME_SETTING = 'CHANGED'
|
||||
@ -262,41 +182,26 @@ def test_read_only_setting_assignment(settings):
|
||||
|
||||
def test_db_setting_create(settings, mocker):
|
||||
"settings are stored in the database when set for the first time"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': None})
|
||||
with apply_patches([
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter',
|
||||
return_value=setting_list),
|
||||
mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock())
|
||||
]):
|
||||
with apply_patches(
|
||||
[
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list),
|
||||
mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock()),
|
||||
]
|
||||
):
|
||||
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
||||
|
||||
models.Setting.objects.create.assert_called_with(
|
||||
key='AWX_SOME_SETTING',
|
||||
user=None,
|
||||
value='NEW-VALUE'
|
||||
)
|
||||
models.Setting.objects.create.assert_called_with(key='AWX_SOME_SETTING', user=None, value='NEW-VALUE')
|
||||
|
||||
|
||||
def test_db_setting_update(settings, mocker):
|
||||
"settings are updated in the database when their value changes"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
setting_list = mocker.Mock(**{
|
||||
'order_by.return_value.first.return_value': existing_setting
|
||||
})
|
||||
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': existing_setting})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list):
|
||||
settings.AWX_SOME_SETTING = 'NEW-VALUE'
|
||||
|
||||
@ -306,12 +211,7 @@ def test_db_setting_update(settings, mocker):
|
||||
|
||||
def test_db_setting_deletion(settings, mocker):
|
||||
"settings are auto-deleted from the database"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
|
||||
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]):
|
||||
@ -323,12 +223,7 @@ def test_db_setting_deletion(settings, mocker):
|
||||
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
|
||||
def test_read_only_setting_deletion(settings):
|
||||
"read-only settings cannot be deleted"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
assert settings.AWX_SOME_SETTING == 'DEFAULT'
|
||||
with pytest.raises(ImproperlyConfigured):
|
||||
del settings.AWX_SOME_SETTING
|
||||
@ -337,36 +232,22 @@ def test_read_only_setting_deletion(settings):
|
||||
|
||||
def test_charfield_properly_sets_none(settings, mocker):
|
||||
"see: https://github.com/ansible/ansible-tower/issues/5322"
|
||||
settings.registry.register(
|
||||
'AWX_SOME_SETTING',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
allow_null=True
|
||||
)
|
||||
settings.registry.register('AWX_SOME_SETTING', field_class=fields.CharField, category=_('System'), category_slug='system', allow_null=True)
|
||||
|
||||
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': None})
|
||||
with apply_patches([
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter',
|
||||
return_value=setting_list),
|
||||
mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock())
|
||||
]):
|
||||
with apply_patches(
|
||||
[
|
||||
mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list),
|
||||
mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock()),
|
||||
]
|
||||
):
|
||||
settings.AWX_SOME_SETTING = None
|
||||
|
||||
models.Setting.objects.create.assert_called_with(
|
||||
key='AWX_SOME_SETTING',
|
||||
user=None,
|
||||
value=None
|
||||
)
|
||||
models.Setting.objects.create.assert_called_with(key='AWX_SOME_SETTING', user=None, value=None)
|
||||
|
||||
|
||||
def test_settings_use_cache(settings, mocker):
|
||||
settings.registry.register(
|
||||
'AWX_VAR',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
)
|
||||
settings.registry.register('AWX_VAR', field_class=fields.CharField, category=_('System'), category_slug='system')
|
||||
settings.cache.set('AWX_VAR', 'foobar')
|
||||
settings.cache.set('_awx_conf_preload_expires', 100)
|
||||
# Will fail test if database is used
|
||||
@ -374,13 +255,7 @@ def test_settings_use_cache(settings, mocker):
|
||||
|
||||
|
||||
def test_settings_use_an_encrypted_cache(settings, mocker):
|
||||
settings.registry.register(
|
||||
'AWX_ENCRYPTED',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
encrypted=True
|
||||
)
|
||||
settings.registry.register('AWX_ENCRYPTED', field_class=fields.CharField, category=_('System'), category_slug='system', encrypted=True)
|
||||
assert isinstance(settings.cache, EncryptedCacheProxy)
|
||||
assert settings.cache.__dict__['encrypter'] == encrypt_field
|
||||
assert settings.cache.__dict__['decrypter'] == decrypt_field
|
||||
@ -393,34 +268,18 @@ def test_settings_use_an_encrypted_cache(settings, mocker):
|
||||
|
||||
def test_sensitive_cache_data_is_encrypted(settings, mocker):
|
||||
"fields marked as `encrypted` are stored in the cache with encryption"
|
||||
settings.registry.register(
|
||||
'AWX_ENCRYPTED',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
encrypted=True
|
||||
)
|
||||
settings.registry.register('AWX_ENCRYPTED', field_class=fields.CharField, category=_('System'), category_slug='system', encrypted=True)
|
||||
|
||||
def rot13(obj, attribute):
|
||||
assert obj.pk == 123
|
||||
return codecs.encode(getattr(obj, attribute), 'rot_13')
|
||||
|
||||
native_cache = LocMemCache(str(uuid4()), {})
|
||||
cache = EncryptedCacheProxy(
|
||||
native_cache,
|
||||
settings.registry,
|
||||
encrypter=rot13,
|
||||
decrypter=rot13
|
||||
)
|
||||
cache = EncryptedCacheProxy(native_cache, settings.registry, encrypter=rot13, decrypter=rot13)
|
||||
# Insert the setting value into the database; the encryption process will
|
||||
# use its primary key as part of the encryption key
|
||||
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
|
||||
mocks = mocker.Mock(**{
|
||||
'order_by.return_value': mocker.Mock(**{
|
||||
'__iter__': lambda self: iter([setting_from_db]),
|
||||
'first.return_value': setting_from_db
|
||||
}),
|
||||
})
|
||||
mocks = mocker.Mock(**{'order_by.return_value': mocker.Mock(**{'__iter__': lambda self: iter([setting_from_db]), 'first.return_value': setting_from_db})})
|
||||
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
|
||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||
@ -429,26 +288,14 @@ def test_sensitive_cache_data_is_encrypted(settings, mocker):
|
||||
|
||||
def test_readonly_sensitive_cache_data_is_encrypted(settings):
|
||||
"readonly fields marked as `encrypted` are stored in the cache with encryption"
|
||||
settings.registry.register(
|
||||
'AWX_ENCRYPTED',
|
||||
field_class=fields.CharField,
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
read_only=True,
|
||||
encrypted=True
|
||||
)
|
||||
settings.registry.register('AWX_ENCRYPTED', field_class=fields.CharField, category=_('System'), category_slug='system', read_only=True, encrypted=True)
|
||||
|
||||
def rot13(obj, attribute):
|
||||
assert obj.pk is None
|
||||
return codecs.encode(getattr(obj, attribute), 'rot_13')
|
||||
|
||||
native_cache = LocMemCache(str(uuid4()), {})
|
||||
cache = EncryptedCacheProxy(
|
||||
native_cache,
|
||||
settings.registry,
|
||||
encrypter=rot13,
|
||||
decrypter=rot13
|
||||
)
|
||||
cache = EncryptedCacheProxy(native_cache, settings.registry, encrypter=rot13, decrypter=rot13)
|
||||
cache.set('AWX_ENCRYPTED', 'SECRET!')
|
||||
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
|
||||
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
|
||||
|
||||
@ -3,14 +3,10 @@
|
||||
|
||||
|
||||
from django.conf.urls import url
|
||||
from awx.conf.views import (
|
||||
SettingCategoryList,
|
||||
SettingSingletonDetail,
|
||||
SettingLoggingTest,
|
||||
)
|
||||
from awx.conf.views import SettingCategoryList, SettingSingletonDetail, SettingLoggingTest
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
urlpatterns = [
|
||||
url(r'^$', SettingCategoryList.as_view(), name='setting_category_list'),
|
||||
url(r'^(?P<category_slug>[a-z0-9-]+)/$', SettingSingletonDetail.as_view(), name='setting_singleton_detail'),
|
||||
url(r'^logging/test/$', SettingLoggingTest.as_view(), name='setting_logging_test'),
|
||||
|
||||
@ -7,7 +7,4 @@ __all__ = ['conf_to_dict']
|
||||
|
||||
|
||||
def conf_to_dict(obj):
|
||||
return {
|
||||
'category': settings_registry.get_setting_category(obj.key),
|
||||
'name': obj.key,
|
||||
}
|
||||
return {'category': settings_registry.get_setting_category(obj.key), 'name': obj.key}
|
||||
|
||||
@ -22,12 +22,7 @@ from rest_framework import serializers
|
||||
from rest_framework import status
|
||||
|
||||
# Tower
|
||||
from awx.api.generics import (
|
||||
APIView,
|
||||
GenericAPIView,
|
||||
ListAPIView,
|
||||
RetrieveUpdateDestroyAPIView,
|
||||
)
|
||||
from awx.api.generics import APIView, GenericAPIView, ListAPIView, RetrieveUpdateDestroyAPIView
|
||||
from awx.api.permissions import IsSuperUser
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.utils import camelcase_to_underscore
|
||||
@ -81,9 +76,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
if self.category_slug not in category_slugs:
|
||||
raise PermissionDenied()
|
||||
|
||||
registered_settings = settings_registry.get_registered_settings(
|
||||
category_slug=self.category_slug, read_only=False,
|
||||
)
|
||||
registered_settings = settings_registry.get_registered_settings(category_slug=self.category_slug, read_only=False)
|
||||
if self.category_slug == 'user':
|
||||
return Setting.objects.filter(key__in=registered_settings, user=self.request.user)
|
||||
else:
|
||||
@ -91,9 +84,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
def get_object(self):
|
||||
settings_qs = self.get_queryset()
|
||||
registered_settings = settings_registry.get_registered_settings(
|
||||
category_slug=self.category_slug,
|
||||
)
|
||||
registered_settings = settings_registry.get_registered_settings(category_slug=self.category_slug)
|
||||
all_settings = {}
|
||||
for setting in settings_qs:
|
||||
all_settings[setting.key] = setting.value
|
||||
@ -117,9 +108,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
for key, value in serializer.validated_data.items():
|
||||
if key == 'LICENSE' or settings_registry.is_setting_read_only(key):
|
||||
continue
|
||||
if settings_registry.is_setting_encrypted(key) and \
|
||||
isinstance(value, str) and \
|
||||
value.startswith('$encrypted$'):
|
||||
if settings_registry.is_setting_encrypted(key) and isinstance(value, str) and value.startswith('$encrypted$'):
|
||||
continue
|
||||
setattr(serializer.instance, key, value)
|
||||
setting = settings_qs.filter(key=key).order_by('pk').first()
|
||||
@ -133,7 +122,6 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
|
||||
if settings_change_list:
|
||||
connection.on_commit(lambda: handle_setting_changes.delay(settings_change_list))
|
||||
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
self.perform_destroy(instance)
|
||||
@ -170,7 +158,7 @@ class SettingLoggingTest(GenericAPIView):
|
||||
enabled = getattr(settings, 'LOG_AGGREGATOR_ENABLED', False)
|
||||
if not enabled:
|
||||
return Response({'error': 'Logging not enabled'}, status=status.HTTP_409_CONFLICT)
|
||||
|
||||
|
||||
# Send test message to configured logger based on db settings
|
||||
try:
|
||||
default_logger = settings.LOG_AGGREGATOR_LOGGERS[0]
|
||||
@ -179,18 +167,15 @@ class SettingLoggingTest(GenericAPIView):
|
||||
except IndexError:
|
||||
default_logger = 'awx'
|
||||
logging.getLogger(default_logger).error('AWX Connection Test Message')
|
||||
|
||||
|
||||
hostname = getattr(settings, 'LOG_AGGREGATOR_HOST', None)
|
||||
protocol = getattr(settings, 'LOG_AGGREGATOR_PROTOCOL', None)
|
||||
|
||||
try:
|
||||
subprocess.check_output(
|
||||
['rsyslogd', '-N1', '-f', '/var/lib/awx/rsyslog/rsyslog.conf'],
|
||||
stderr=subprocess.STDOUT
|
||||
)
|
||||
subprocess.check_output(['rsyslogd', '-N1', '-f', '/var/lib/awx/rsyslog/rsyslog.conf'], stderr=subprocess.STDOUT)
|
||||
except subprocess.CalledProcessError as exc:
|
||||
return Response({'error': exc.output}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
# Check to ensure port is open at host
|
||||
if protocol in ['udp', 'tcp']:
|
||||
port = getattr(settings, 'LOG_AGGREGATOR_PORT', None)
|
||||
@ -206,7 +191,7 @@ class SettingLoggingTest(GenericAPIView):
|
||||
else:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
s.settimeout(.5)
|
||||
s.settimeout(0.5)
|
||||
s.connect((hostname, int(port)))
|
||||
s.shutdown(SHUT_RDWR)
|
||||
s.close()
|
||||
|
||||
1002
awx/main/access.py
1002
awx/main/access.py
File diff suppressed because it is too large
Load Diff
@ -24,7 +24,7 @@ logger = logging.getLogger('awx.analytics.broadcast_websocket')
|
||||
|
||||
|
||||
def dt_to_seconds(dt):
|
||||
return int((dt - datetime.datetime(1970,1,1)).total_seconds())
|
||||
return int((dt - datetime.datetime(1970, 1, 1)).total_seconds())
|
||||
|
||||
|
||||
def now_seconds():
|
||||
@ -37,7 +37,7 @@ def safe_name(s):
|
||||
|
||||
|
||||
# Second granularity; Per-minute
|
||||
class FixedSlidingWindow():
|
||||
class FixedSlidingWindow:
|
||||
def __init__(self, start_time=None):
|
||||
self.buckets = dict()
|
||||
self.start_time = start_time or now_seconds()
|
||||
@ -65,7 +65,7 @@ class FixedSlidingWindow():
|
||||
return sum(self.buckets.values()) or 0
|
||||
|
||||
|
||||
class BroadcastWebsocketStatsManager():
|
||||
class BroadcastWebsocketStatsManager:
|
||||
def __init__(self, event_loop, local_hostname):
|
||||
self._local_hostname = local_hostname
|
||||
|
||||
@ -74,8 +74,7 @@ class BroadcastWebsocketStatsManager():
|
||||
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
|
||||
|
||||
def new_remote_host_stats(self, remote_hostname):
|
||||
self._stats[remote_hostname] = BroadcastWebsocketStats(self._local_hostname,
|
||||
remote_hostname)
|
||||
self._stats[remote_hostname] = BroadcastWebsocketStats(self._local_hostname, remote_hostname)
|
||||
return self._stats[remote_hostname]
|
||||
|
||||
def delete_remote_host_stats(self, remote_hostname):
|
||||
@ -100,15 +99,15 @@ class BroadcastWebsocketStatsManager():
|
||||
|
||||
@classmethod
|
||||
def get_stats_sync(cls):
|
||||
'''
|
||||
"""
|
||||
Stringified verion of all the stats
|
||||
'''
|
||||
"""
|
||||
redis_conn = redis.Redis.from_url(settings.BROKER_URL)
|
||||
stats_str = redis_conn.get(BROADCAST_WEBSOCKET_REDIS_KEY_NAME) or b''
|
||||
return parser.text_string_to_metric_families(stats_str.decode('UTF-8'))
|
||||
|
||||
|
||||
class BroadcastWebsocketStats():
|
||||
class BroadcastWebsocketStats:
|
||||
def __init__(self, local_hostname, remote_hostname):
|
||||
self._local_hostname = local_hostname
|
||||
self._remote_hostname = remote_hostname
|
||||
@ -118,24 +117,25 @@ class BroadcastWebsocketStats():
|
||||
self.name = safe_name(self._local_hostname)
|
||||
self.remote_name = safe_name(self._remote_hostname)
|
||||
|
||||
self._messages_received_total = Counter(f'awx_{self.remote_name}_messages_received_total',
|
||||
'Number of messages received, to be forwarded, by the broadcast websocket system',
|
||||
registry=self._registry)
|
||||
self._messages_received = Gauge(f'awx_{self.remote_name}_messages_received',
|
||||
'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection',
|
||||
registry=self._registry)
|
||||
self._connection = Enum(f'awx_{self.remote_name}_connection',
|
||||
'Websocket broadcast connection',
|
||||
states=['disconnected', 'connected'],
|
||||
registry=self._registry)
|
||||
self._messages_received_total = Counter(
|
||||
f'awx_{self.remote_name}_messages_received_total',
|
||||
'Number of messages received, to be forwarded, by the broadcast websocket system',
|
||||
registry=self._registry,
|
||||
)
|
||||
self._messages_received = Gauge(
|
||||
f'awx_{self.remote_name}_messages_received',
|
||||
'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection',
|
||||
registry=self._registry,
|
||||
)
|
||||
self._connection = Enum(
|
||||
f'awx_{self.remote_name}_connection', 'Websocket broadcast connection', states=['disconnected', 'connected'], registry=self._registry
|
||||
)
|
||||
self._connection.state('disconnected')
|
||||
self._connection_start = Gauge(f'awx_{self.remote_name}_connection_start',
|
||||
'Time the connection was established',
|
||||
registry=self._registry)
|
||||
self._connection_start = Gauge(f'awx_{self.remote_name}_connection_start', 'Time the connection was established', registry=self._registry)
|
||||
|
||||
self._messages_received_per_minute = Gauge(f'awx_{self.remote_name}_messages_received_per_minute',
|
||||
'Messages received per minute',
|
||||
registry=self._registry)
|
||||
self._messages_received_per_minute = Gauge(
|
||||
f'awx_{self.remote_name}_messages_received_per_minute', 'Messages received per minute', registry=self._registry
|
||||
)
|
||||
self._internal_messages_received_per_minute = FixedSlidingWindow()
|
||||
|
||||
def unregister(self):
|
||||
|
||||
@ -10,8 +10,7 @@ from django.utils.timezone import now
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from awx.conf.license import get_license
|
||||
from awx.main.utils import (get_awx_version, get_ansible_version,
|
||||
get_custom_venv_choices, camelcase_to_underscore)
|
||||
from awx.main.utils import get_awx_version, get_ansible_version, get_custom_venv_choices, camelcase_to_underscore
|
||||
from awx.main import models
|
||||
from django.contrib.sessions.models import Session
|
||||
from awx.main.analytics import register
|
||||
@ -68,96 +67,99 @@ def config(since, **kwargs):
|
||||
@register('counts', '1.0', description=_('Counts of objects such as organizations, inventories, and projects'))
|
||||
def counts(since, **kwargs):
|
||||
counts = {}
|
||||
for cls in (models.Organization, models.Team, models.User,
|
||||
models.Inventory, models.Credential, models.Project,
|
||||
models.JobTemplate, models.WorkflowJobTemplate,
|
||||
models.Host, models.Schedule, models.CustomInventoryScript,
|
||||
models.NotificationTemplate):
|
||||
for cls in (
|
||||
models.Organization,
|
||||
models.Team,
|
||||
models.User,
|
||||
models.Inventory,
|
||||
models.Credential,
|
||||
models.Project,
|
||||
models.JobTemplate,
|
||||
models.WorkflowJobTemplate,
|
||||
models.Host,
|
||||
models.Schedule,
|
||||
models.CustomInventoryScript,
|
||||
models.NotificationTemplate,
|
||||
):
|
||||
counts[camelcase_to_underscore(cls.__name__)] = cls.objects.count()
|
||||
|
||||
venvs = get_custom_venv_choices()
|
||||
counts['custom_virtualenvs'] = len([
|
||||
v for v in venvs
|
||||
if os.path.basename(v.rstrip('/')) != 'ansible'
|
||||
])
|
||||
counts['custom_virtualenvs'] = len([v for v in venvs if os.path.basename(v.rstrip('/')) != 'ansible'])
|
||||
|
||||
inv_counts = dict(models.Inventory.objects.order_by().values_list('kind').annotate(Count('kind')))
|
||||
inv_counts['normal'] = inv_counts.get('', 0)
|
||||
inv_counts.pop('', None)
|
||||
inv_counts['smart'] = inv_counts.get('smart', 0)
|
||||
counts['inventories'] = inv_counts
|
||||
|
||||
counts['unified_job'] = models.UnifiedJob.objects.exclude(launch_type='sync').count() # excludes implicit project_updates
|
||||
counts['active_host_count'] = models.Host.objects.active_count()
|
||||
|
||||
counts['unified_job'] = models.UnifiedJob.objects.exclude(launch_type='sync').count() # excludes implicit project_updates
|
||||
counts['active_host_count'] = models.Host.objects.active_count()
|
||||
active_sessions = Session.objects.filter(expire_date__gte=now()).count()
|
||||
active_user_sessions = models.UserSessionMembership.objects.select_related('session').filter(session__expire_date__gte=now()).count()
|
||||
active_anonymous_sessions = active_sessions - active_user_sessions
|
||||
counts['active_sessions'] = active_sessions
|
||||
counts['active_user_sessions'] = active_user_sessions
|
||||
counts['active_anonymous_sessions'] = active_anonymous_sessions
|
||||
counts['running_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').filter(status__in=('running', 'waiting',)).count()
|
||||
counts['running_jobs'] = (
|
||||
models.UnifiedJob.objects.exclude(launch_type='sync')
|
||||
.filter(
|
||||
status__in=(
|
||||
'running',
|
||||
'waiting',
|
||||
)
|
||||
)
|
||||
.count()
|
||||
)
|
||||
counts['pending_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').filter(status__in=('pending',)).count()
|
||||
return counts
|
||||
|
||||
|
||||
|
||||
@register('org_counts', '1.0', description=_('Counts of users and teams by organization'))
|
||||
def org_counts(since, **kwargs):
|
||||
counts = {}
|
||||
for org in models.Organization.objects.annotate(num_users=Count('member_role__members', distinct=True),
|
||||
num_teams=Count('teams', distinct=True)).values('name', 'id', 'num_users', 'num_teams'):
|
||||
counts[org['id']] = {'name': org['name'],
|
||||
'users': org['num_users'],
|
||||
'teams': org['num_teams']
|
||||
}
|
||||
for org in models.Organization.objects.annotate(num_users=Count('member_role__members', distinct=True), num_teams=Count('teams', distinct=True)).values(
|
||||
'name', 'id', 'num_users', 'num_teams'
|
||||
):
|
||||
counts[org['id']] = {'name': org['name'], 'users': org['num_users'], 'teams': org['num_teams']}
|
||||
return counts
|
||||
|
||||
|
||||
|
||||
|
||||
@register('cred_type_counts', '1.0', description=_('Counts of credentials by credential type'))
|
||||
def cred_type_counts(since, **kwargs):
|
||||
counts = {}
|
||||
for cred_type in models.CredentialType.objects.annotate(num_credentials=Count(
|
||||
'credentials', distinct=True)).values('name', 'id', 'managed_by_tower', 'num_credentials'):
|
||||
counts[cred_type['id']] = {'name': cred_type['name'],
|
||||
'credential_count': cred_type['num_credentials'],
|
||||
'managed_by_tower': cred_type['managed_by_tower']
|
||||
}
|
||||
for cred_type in models.CredentialType.objects.annotate(num_credentials=Count('credentials', distinct=True)).values(
|
||||
'name', 'id', 'managed_by_tower', 'num_credentials'
|
||||
):
|
||||
counts[cred_type['id']] = {
|
||||
'name': cred_type['name'],
|
||||
'credential_count': cred_type['num_credentials'],
|
||||
'managed_by_tower': cred_type['managed_by_tower'],
|
||||
}
|
||||
return counts
|
||||
|
||||
|
||||
|
||||
|
||||
@register('inventory_counts', '1.2', description=_('Inventories, their inventory sources, and host counts'))
|
||||
def inventory_counts(since, **kwargs):
|
||||
counts = {}
|
||||
for inv in models.Inventory.objects.filter(kind='').annotate(num_sources=Count('inventory_sources', distinct=True),
|
||||
num_hosts=Count('hosts', distinct=True)).only('id', 'name', 'kind'):
|
||||
for inv in (
|
||||
models.Inventory.objects.filter(kind='')
|
||||
.annotate(num_sources=Count('inventory_sources', distinct=True), num_hosts=Count('hosts', distinct=True))
|
||||
.only('id', 'name', 'kind')
|
||||
):
|
||||
source_list = []
|
||||
for source in inv.inventory_sources.filter().annotate(num_hosts=Count('hosts', distinct=True)).values('name','source', 'num_hosts'):
|
||||
for source in inv.inventory_sources.filter().annotate(num_hosts=Count('hosts', distinct=True)).values('name', 'source', 'num_hosts'):
|
||||
source_list.append(source)
|
||||
counts[inv.id] = {'name': inv.name,
|
||||
'kind': inv.kind,
|
||||
'hosts': inv.num_hosts,
|
||||
'sources': inv.num_sources,
|
||||
'source_list': source_list
|
||||
}
|
||||
counts[inv.id] = {'name': inv.name, 'kind': inv.kind, 'hosts': inv.num_hosts, 'sources': inv.num_sources, 'source_list': source_list}
|
||||
|
||||
for smart_inv in models.Inventory.objects.filter(kind='smart'):
|
||||
counts[smart_inv.id] = {'name': smart_inv.name,
|
||||
'kind': smart_inv.kind,
|
||||
'hosts': smart_inv.hosts.count(),
|
||||
'sources': 0,
|
||||
'source_list': []
|
||||
}
|
||||
counts[smart_inv.id] = {'name': smart_inv.name, 'kind': smart_inv.kind, 'hosts': smart_inv.hosts.count(), 'sources': 0, 'source_list': []}
|
||||
return counts
|
||||
|
||||
|
||||
@register('projects_by_scm_type', '1.0', description=_('Counts of projects by source control type'))
|
||||
def projects_by_scm_type(since, **kwargs):
|
||||
counts = dict(
|
||||
(t[0] or 'manual', 0)
|
||||
for t in models.Project.SCM_TYPE_CHOICES
|
||||
)
|
||||
for result in models.Project.objects.values('scm_type').annotate(
|
||||
count=Count('scm_type')
|
||||
).order_by('scm_type'):
|
||||
counts = dict((t[0] or 'manual', 0) for t in models.Project.SCM_TYPE_CHOICES)
|
||||
for result in models.Project.objects.values('scm_type').annotate(count=Count('scm_type')).order_by('scm_type'):
|
||||
counts[result['scm_type'] or 'manual'] = result['count']
|
||||
return counts
|
||||
|
||||
@ -172,10 +174,10 @@ def _get_isolated_datetime(last_check):
|
||||
def instance_info(since, include_hostnames=False, **kwargs):
|
||||
info = {}
|
||||
instances = models.Instance.objects.values_list('hostname').values(
|
||||
'uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'hostname', 'last_isolated_check', 'enabled')
|
||||
'uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'hostname', 'last_isolated_check', 'enabled'
|
||||
)
|
||||
for instance in instances:
|
||||
consumed_capacity = sum(x.task_impact for x in models.UnifiedJob.objects.filter(execution_node=instance['hostname'],
|
||||
status__in=('running', 'waiting')))
|
||||
consumed_capacity = sum(x.task_impact for x in models.UnifiedJob.objects.filter(execution_node=instance['hostname'], status__in=('running', 'waiting')))
|
||||
instance_info = {
|
||||
'uuid': instance['uuid'],
|
||||
'version': instance['version'],
|
||||
@ -186,7 +188,7 @@ def instance_info(since, include_hostnames=False, **kwargs):
|
||||
'last_isolated_check': _get_isolated_datetime(instance['last_isolated_check']),
|
||||
'enabled': instance['enabled'],
|
||||
'consumed_capacity': consumed_capacity,
|
||||
'remaining_capacity': instance['capacity'] - consumed_capacity
|
||||
'remaining_capacity': instance['capacity'] - consumed_capacity,
|
||||
}
|
||||
if include_hostnames is True:
|
||||
instance_info['hostname'] = instance['hostname']
|
||||
@ -198,20 +200,22 @@ def job_counts(since, **kwargs):
|
||||
counts = {}
|
||||
counts['total_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').count()
|
||||
counts['status'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list('status').annotate(Count('status')).order_by())
|
||||
counts['launch_type'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
|
||||
'launch_type').annotate(Count('launch_type')).order_by())
|
||||
counts['launch_type'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list('launch_type').annotate(Count('launch_type')).order_by())
|
||||
return counts
|
||||
|
||||
|
||||
|
||||
|
||||
def job_instance_counts(since, **kwargs):
|
||||
counts = {}
|
||||
job_types = models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
|
||||
'execution_node', 'launch_type').annotate(job_launch_type=Count('launch_type')).order_by()
|
||||
job_types = (
|
||||
models.UnifiedJob.objects.exclude(launch_type='sync')
|
||||
.values_list('execution_node', 'launch_type')
|
||||
.annotate(job_launch_type=Count('launch_type'))
|
||||
.order_by()
|
||||
)
|
||||
for job in job_types:
|
||||
counts.setdefault(job[0], {}).setdefault('launch_type', {})[job[1]] = job[2]
|
||||
|
||||
job_statuses = models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
|
||||
'execution_node', 'status').annotate(job_status=Count('status')).order_by()
|
||||
|
||||
job_statuses = models.UnifiedJob.objects.exclude(launch_type='sync').values_list('execution_node', 'status').annotate(job_status=Count('status')).order_by()
|
||||
for job in job_statuses:
|
||||
counts.setdefault(job[0], {}).setdefault('status', {})[job[1]] = job[2]
|
||||
return counts
|
||||
@ -261,12 +265,12 @@ class FileSplitter(io.StringIO):
|
||||
self.files = self.files[:-1]
|
||||
# If we only have one file, remove the suffix
|
||||
if len(self.files) == 1:
|
||||
os.rename(self.files[0],self.files[0].replace('_split0',''))
|
||||
os.rename(self.files[0], self.files[0].replace('_split0', ''))
|
||||
return self.files
|
||||
|
||||
def write(self, s):
|
||||
if not self.header:
|
||||
self.header = s[0:s.index('\n')]
|
||||
self.header = s[0 : s.index('\n')]
|
||||
self.counter += self.currentfile.write(s)
|
||||
if self.counter >= MAX_TABLE_SIZE:
|
||||
self.cycle_file()
|
||||
@ -307,7 +311,9 @@ def events_table(since, full_path, until, **kwargs):
|
||||
FROM main_jobevent
|
||||
WHERE (main_jobevent.created > '{}' AND main_jobevent.created <= '{}')
|
||||
ORDER BY main_jobevent.id ASC) TO STDOUT WITH CSV HEADER
|
||||
'''.format(since.isoformat(),until.isoformat())
|
||||
'''.format(
|
||||
since.isoformat(), until.isoformat()
|
||||
)
|
||||
return _copy_table(table='events', query=events_query, path=full_path)
|
||||
|
||||
|
||||
@ -346,7 +352,9 @@ def unified_jobs_table(since, full_path, until, **kwargs):
|
||||
OR (main_unifiedjob.finished > '{0}' AND main_unifiedjob.finished <= '{1}'))
|
||||
AND main_unifiedjob.launch_type != 'sync'
|
||||
ORDER BY main_unifiedjob.id ASC) TO STDOUT WITH CSV HEADER
|
||||
'''.format(since.isoformat(),until.isoformat())
|
||||
'''.format(
|
||||
since.isoformat(), until.isoformat()
|
||||
)
|
||||
return _copy_table(table='unified_jobs', query=unified_job_query, path=full_path)
|
||||
|
||||
|
||||
@ -369,7 +377,7 @@ def unified_job_template_table(since, full_path, **kwargs):
|
||||
main_unifiedjobtemplate.status
|
||||
FROM main_unifiedjobtemplate, django_content_type
|
||||
WHERE main_unifiedjobtemplate.polymorphic_ctype_id = django_content_type.id
|
||||
ORDER BY main_unifiedjobtemplate.id ASC) TO STDOUT WITH CSV HEADER'''
|
||||
ORDER BY main_unifiedjobtemplate.id ASC) TO STDOUT WITH CSV HEADER'''
|
||||
return _copy_table(table='unified_job_template', query=unified_job_template_query, path=full_path)
|
||||
|
||||
|
||||
@ -405,7 +413,9 @@ def workflow_job_node_table(since, full_path, until, **kwargs):
|
||||
) always_nodes ON main_workflowjobnode.id = always_nodes.from_workflowjobnode_id
|
||||
WHERE (main_workflowjobnode.modified > '{}' AND main_workflowjobnode.modified <= '{}')
|
||||
ORDER BY main_workflowjobnode.id ASC) TO STDOUT WITH CSV HEADER
|
||||
'''.format(since.isoformat(),until.isoformat())
|
||||
'''.format(
|
||||
since.isoformat(), until.isoformat()
|
||||
)
|
||||
return _copy_table(table='workflow_job_node', query=workflow_job_node_query, path=full_path)
|
||||
|
||||
|
||||
@ -437,5 +447,5 @@ def workflow_job_template_node_table(since, full_path, **kwargs):
|
||||
FROM main_workflowjobtemplatenode_always_nodes
|
||||
GROUP BY from_workflowjobtemplatenode_id
|
||||
) always_nodes ON main_workflowjobtemplatenode.id = always_nodes.from_workflowjobtemplatenode_id
|
||||
ORDER BY main_workflowjobtemplatenode.id ASC) TO STDOUT WITH CSV HEADER'''
|
||||
ORDER BY main_workflowjobtemplatenode.id ASC) TO STDOUT WITH CSV HEADER'''
|
||||
return _copy_table(table='workflow_job_template_node', query=workflow_job_template_node_query, path=full_path)
|
||||
|
||||
@ -43,7 +43,7 @@ def all_collectors():
|
||||
key = func.__awx_analytics_key__
|
||||
desc = func.__awx_analytics_description__ or ''
|
||||
version = func.__awx_analytics_version__
|
||||
collector_dict[key] = { 'name': key, 'version': version, 'description': desc}
|
||||
collector_dict[key] = {'name': key, 'version': version, 'description': desc}
|
||||
return collector_dict
|
||||
|
||||
|
||||
@ -82,7 +82,7 @@ def register(key, version, description=None, format='json', expensive=False):
|
||||
return decorate
|
||||
|
||||
|
||||
def gather(dest=None, module=None, subset = None, since = None, until = now(), collection_type='scheduled'):
|
||||
def gather(dest=None, module=None, subset=None, since=None, until=now(), collection_type='scheduled'):
|
||||
"""
|
||||
Gather all defined metrics and write them as JSON files in a .tgz
|
||||
|
||||
@ -90,6 +90,7 @@ def gather(dest=None, module=None, subset = None, since = None, until = now(), c
|
||||
:param module: the module to search for registered analytic collector
|
||||
functions; defaults to awx.main.analytics.collectors
|
||||
"""
|
||||
|
||||
def _write_manifest(destdir, manifest):
|
||||
path = os.path.join(destdir, 'manifest.json')
|
||||
with open(path, 'w', encoding='utf-8') as f:
|
||||
@ -116,13 +117,10 @@ def gather(dest=None, module=None, subset = None, since = None, until = now(), c
|
||||
collector_module = module
|
||||
else:
|
||||
from awx.main.analytics import collectors
|
||||
|
||||
collector_module = collectors
|
||||
for name, func in inspect.getmembers(collector_module):
|
||||
if (
|
||||
inspect.isfunction(func) and
|
||||
hasattr(func, '__awx_analytics_key__') and
|
||||
(not subset or name in subset)
|
||||
):
|
||||
if inspect.isfunction(func) and hasattr(func, '__awx_analytics_key__') and (not subset or name in subset):
|
||||
collector_list.append((name, func))
|
||||
|
||||
manifest = dict()
|
||||
@ -162,6 +160,7 @@ def gather(dest=None, module=None, subset = None, since = None, until = now(), c
|
||||
# Always include config.json if we're using our collectors
|
||||
if 'config.json' not in manifest.keys() and not module:
|
||||
from awx.main.analytics import collectors
|
||||
|
||||
config = collectors.config
|
||||
path = '{}.json'.format(os.path.join(gather_dir, config.__awx_analytics_key__))
|
||||
with open(path, 'w', encoding='utf-8') as f:
|
||||
@ -204,22 +203,14 @@ def gather(dest=None, module=None, subset = None, since = None, until = now(), c
|
||||
for i in range(0, len(stage_dirs)):
|
||||
stage_dir = stage_dirs[i]
|
||||
# can't use isoformat() since it has colons, which GNU tar doesn't like
|
||||
tarname = '_'.join([
|
||||
settings.SYSTEM_UUID,
|
||||
until.strftime('%Y-%m-%d-%H%M%S%z'),
|
||||
str(i)
|
||||
])
|
||||
tgz = shutil.make_archive(
|
||||
os.path.join(os.path.dirname(dest), tarname),
|
||||
'gztar',
|
||||
stage_dir
|
||||
)
|
||||
tarname = '_'.join([settings.SYSTEM_UUID, until.strftime('%Y-%m-%d-%H%M%S%z'), str(i)])
|
||||
tgz = shutil.make_archive(os.path.join(os.path.dirname(dest), tarname), 'gztar', stage_dir)
|
||||
tarfiles.append(tgz)
|
||||
except Exception:
|
||||
shutil.rmtree(stage_dir, ignore_errors = True)
|
||||
shutil.rmtree(stage_dir, ignore_errors=True)
|
||||
logger.exception("Failed to write analytics archive file")
|
||||
finally:
|
||||
shutil.rmtree(dest, ignore_errors = True)
|
||||
shutil.rmtree(dest, ignore_errors=True)
|
||||
return tarfiles
|
||||
|
||||
|
||||
@ -253,16 +244,17 @@ def ship(path):
|
||||
s.headers = get_awx_http_client_headers()
|
||||
s.headers.pop('Content-Type')
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
response = s.post(url,
|
||||
files=files,
|
||||
verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
|
||||
auth=(rh_user, rh_password),
|
||||
headers=s.headers,
|
||||
timeout=(31, 31))
|
||||
response = s.post(
|
||||
url,
|
||||
files=files,
|
||||
verify="/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem",
|
||||
auth=(rh_user, rh_password),
|
||||
headers=s.headers,
|
||||
timeout=(31, 31),
|
||||
)
|
||||
# Accept 2XX status_codes
|
||||
if response.status_code >= 300:
|
||||
return logger.exception('Upload failed with status {}, {}'.format(response.status_code,
|
||||
response.text))
|
||||
return logger.exception('Upload failed with status {}, {}'.format(response.status_code, response.text))
|
||||
finally:
|
||||
# cleanup tar.gz
|
||||
if os.path.exists(path):
|
||||
|
||||
@ -1,16 +1,8 @@
|
||||
from django.conf import settings
|
||||
from prometheus_client import (
|
||||
REGISTRY,
|
||||
PROCESS_COLLECTOR,
|
||||
PLATFORM_COLLECTOR,
|
||||
GC_COLLECTOR,
|
||||
Gauge,
|
||||
Info,
|
||||
generate_latest
|
||||
)
|
||||
from prometheus_client import REGISTRY, PROCESS_COLLECTOR, PLATFORM_COLLECTOR, GC_COLLECTOR, Gauge, Info, generate_latest
|
||||
|
||||
from awx.conf.license import get_license
|
||||
from awx.main.utils import (get_awx_version, get_ansible_version)
|
||||
from awx.main.utils import get_awx_version, get_ansible_version
|
||||
from awx.main.analytics.collectors import (
|
||||
counts,
|
||||
instance_info,
|
||||
@ -31,23 +23,97 @@ INV_COUNT = Gauge('awx_inventories_total', 'Number of inventories')
|
||||
PROJ_COUNT = Gauge('awx_projects_total', 'Number of projects')
|
||||
JT_COUNT = Gauge('awx_job_templates_total', 'Number of job templates')
|
||||
WFJT_COUNT = Gauge('awx_workflow_job_templates_total', 'Number of workflow job templates')
|
||||
HOST_COUNT = Gauge('awx_hosts_total', 'Number of hosts', ['type',])
|
||||
HOST_COUNT = Gauge(
|
||||
'awx_hosts_total',
|
||||
'Number of hosts',
|
||||
[
|
||||
'type',
|
||||
],
|
||||
)
|
||||
SCHEDULE_COUNT = Gauge('awx_schedules_total', 'Number of schedules')
|
||||
INV_SCRIPT_COUNT = Gauge('awx_inventory_scripts_total', 'Number of invetory scripts')
|
||||
USER_SESSIONS = Gauge('awx_sessions_total', 'Number of sessions', ['type',])
|
||||
USER_SESSIONS = Gauge(
|
||||
'awx_sessions_total',
|
||||
'Number of sessions',
|
||||
[
|
||||
'type',
|
||||
],
|
||||
)
|
||||
CUSTOM_VENVS = Gauge('awx_custom_virtualenvs_total', 'Number of virtualenvs')
|
||||
RUNNING_JOBS = Gauge('awx_running_jobs_total', 'Number of running jobs on the Tower system')
|
||||
PENDING_JOBS = Gauge('awx_pending_jobs_total', 'Number of pending jobs on the Tower system')
|
||||
STATUS = Gauge('awx_status_total', 'Status of Job launched', ['status',])
|
||||
STATUS = Gauge(
|
||||
'awx_status_total',
|
||||
'Status of Job launched',
|
||||
[
|
||||
'status',
|
||||
],
|
||||
)
|
||||
|
||||
INSTANCE_CAPACITY = Gauge('awx_instance_capacity', 'Capacity of each node in a Tower system', ['hostname', 'instance_uuid',])
|
||||
INSTANCE_CPU = Gauge('awx_instance_cpu', 'CPU cores on each node in a Tower system', ['hostname', 'instance_uuid',])
|
||||
INSTANCE_MEMORY = Gauge('awx_instance_memory', 'RAM (Kb) on each node in a Tower system', ['hostname', 'instance_uuid',])
|
||||
INSTANCE_INFO = Info('awx_instance', 'Info about each node in a Tower system', ['hostname', 'instance_uuid',])
|
||||
INSTANCE_LAUNCH_TYPE = Gauge('awx_instance_launch_type_total', 'Type of Job launched', ['node', 'launch_type',])
|
||||
INSTANCE_STATUS = Gauge('awx_instance_status_total', 'Status of Job launched', ['node', 'status',])
|
||||
INSTANCE_CONSUMED_CAPACITY = Gauge('awx_instance_consumed_capacity', 'Consumed capacity of each node in a Tower system', ['hostname', 'instance_uuid',])
|
||||
INSTANCE_REMAINING_CAPACITY = Gauge('awx_instance_remaining_capacity', 'Remaining capacity of each node in a Tower system', ['hostname', 'instance_uuid',])
|
||||
INSTANCE_CAPACITY = Gauge(
|
||||
'awx_instance_capacity',
|
||||
'Capacity of each node in a Tower system',
|
||||
[
|
||||
'hostname',
|
||||
'instance_uuid',
|
||||
],
|
||||
)
|
||||
INSTANCE_CPU = Gauge(
|
||||
'awx_instance_cpu',
|
||||
'CPU cores on each node in a Tower system',
|
||||
[
|
||||
'hostname',
|
||||
'instance_uuid',
|
||||
],
|
||||
)
|
||||
INSTANCE_MEMORY = Gauge(
|
||||
'awx_instance_memory',
|
||||
'RAM (Kb) on each node in a Tower system',
|
||||
[
|
||||
'hostname',
|
||||
'instance_uuid',
|
||||
],
|
||||
)
|
||||
INSTANCE_INFO = Info(
|
||||
'awx_instance',
|
||||
'Info about each node in a Tower system',
|
||||
[
|
||||
'hostname',
|
||||
'instance_uuid',
|
||||
],
|
||||
)
|
||||
INSTANCE_LAUNCH_TYPE = Gauge(
|
||||
'awx_instance_launch_type_total',
|
||||
'Type of Job launched',
|
||||
[
|
||||
'node',
|
||||
'launch_type',
|
||||
],
|
||||
)
|
||||
INSTANCE_STATUS = Gauge(
|
||||
'awx_instance_status_total',
|
||||
'Status of Job launched',
|
||||
[
|
||||
'node',
|
||||
'status',
|
||||
],
|
||||
)
|
||||
INSTANCE_CONSUMED_CAPACITY = Gauge(
|
||||
'awx_instance_consumed_capacity',
|
||||
'Consumed capacity of each node in a Tower system',
|
||||
[
|
||||
'hostname',
|
||||
'instance_uuid',
|
||||
],
|
||||
)
|
||||
INSTANCE_REMAINING_CAPACITY = Gauge(
|
||||
'awx_instance_remaining_capacity',
|
||||
'Remaining capacity of each node in a Tower system',
|
||||
[
|
||||
'hostname',
|
||||
'instance_uuid',
|
||||
],
|
||||
)
|
||||
|
||||
LICENSE_INSTANCE_TOTAL = Gauge('awx_license_instance_total', 'Total number of managed hosts provided by your license')
|
||||
LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining managed hosts provided by your license')
|
||||
@ -55,18 +121,20 @@ LICENSE_INSTANCE_FREE = Gauge('awx_license_instance_free', 'Number of remaining
|
||||
|
||||
def metrics():
|
||||
license_info = get_license()
|
||||
SYSTEM_INFO.info({
|
||||
'install_uuid': settings.INSTALL_UUID,
|
||||
'insights_analytics': str(settings.INSIGHTS_TRACKING_STATE),
|
||||
'tower_url_base': settings.TOWER_URL_BASE,
|
||||
'tower_version': get_awx_version(),
|
||||
'ansible_version': get_ansible_version(),
|
||||
'license_type': license_info.get('license_type', 'UNLICENSED'),
|
||||
'license_expiry': str(license_info.get('time_remaining', 0)),
|
||||
'pendo_tracking': settings.PENDO_TRACKING_STATE,
|
||||
'external_logger_enabled': str(settings.LOG_AGGREGATOR_ENABLED),
|
||||
'external_logger_type': getattr(settings, 'LOG_AGGREGATOR_TYPE', 'None')
|
||||
})
|
||||
SYSTEM_INFO.info(
|
||||
{
|
||||
'install_uuid': settings.INSTALL_UUID,
|
||||
'insights_analytics': str(settings.INSIGHTS_TRACKING_STATE),
|
||||
'tower_url_base': settings.TOWER_URL_BASE,
|
||||
'tower_version': get_awx_version(),
|
||||
'ansible_version': get_ansible_version(),
|
||||
'license_type': license_info.get('license_type', 'UNLICENSED'),
|
||||
'license_expiry': str(license_info.get('time_remaining', 0)),
|
||||
'pendo_tracking': settings.PENDO_TRACKING_STATE,
|
||||
'external_logger_enabled': str(settings.LOG_AGGREGATOR_ENABLED),
|
||||
'external_logger_type': getattr(settings, 'LOG_AGGREGATOR_TYPE', 'None'),
|
||||
}
|
||||
)
|
||||
|
||||
LICENSE_INSTANCE_TOTAL.set(str(license_info.get('instance_count', 0)))
|
||||
LICENSE_INSTANCE_FREE.set(str(license_info.get('free_instances', 0)))
|
||||
@ -108,16 +176,18 @@ def metrics():
|
||||
INSTANCE_MEMORY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['memory'])
|
||||
INSTANCE_CONSUMED_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['consumed_capacity'])
|
||||
INSTANCE_REMAINING_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['remaining_capacity'])
|
||||
INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid).info({
|
||||
'enabled': str(instance_data[uuid]['enabled']),
|
||||
'last_isolated_check': getattr(instance_data[uuid], 'last_isolated_check', 'None'),
|
||||
'managed_by_policy': str(instance_data[uuid]['managed_by_policy']),
|
||||
'version': instance_data[uuid]['version']
|
||||
})
|
||||
INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid).info(
|
||||
{
|
||||
'enabled': str(instance_data[uuid]['enabled']),
|
||||
'last_isolated_check': getattr(instance_data[uuid], 'last_isolated_check', 'None'),
|
||||
'managed_by_policy': str(instance_data[uuid]['managed_by_policy']),
|
||||
'version': instance_data[uuid]['version'],
|
||||
}
|
||||
)
|
||||
|
||||
instance_data = job_instance_counts(None)
|
||||
for node in instance_data:
|
||||
# skipping internal execution node (for system jobs)
|
||||
# skipping internal execution node (for system jobs)
|
||||
if node == '':
|
||||
continue
|
||||
types = instance_data[node].get('launch_type', {})
|
||||
@ -127,7 +197,6 @@ def metrics():
|
||||
for status, value in statuses.items():
|
||||
INSTANCE_STATUS.labels(node=node, status=status).set(value)
|
||||
|
||||
|
||||
return generate_latest()
|
||||
|
||||
|
||||
|
||||
231
awx/main/conf.py
231
awx/main/conf.py
@ -37,8 +37,7 @@ register(
|
||||
'ORG_ADMINS_CAN_SEE_ALL_USERS',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('All Users Visible to Organization Admins'),
|
||||
help_text=_('Controls whether any Organization Admin can view all users and teams, '
|
||||
'even those not associated with their Organization.'),
|
||||
help_text=_('Controls whether any Organization Admin can view all users and teams, ' 'even those not associated with their Organization.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@ -47,8 +46,10 @@ register(
|
||||
'MANAGE_ORGANIZATION_AUTH',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Organization Admins Can Manage Users and Teams'),
|
||||
help_text=_('Controls whether any Organization Admin has the privileges to create and manage users and teams. '
|
||||
'You may want to disable this ability if you are using an LDAP or SAML integration.'),
|
||||
help_text=_(
|
||||
'Controls whether any Organization Admin has the privileges to create and manage users and teams. '
|
||||
'You may want to disable this ability if you are using an LDAP or SAML integration.'
|
||||
),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@ -59,8 +60,7 @@ register(
|
||||
schemes=('http', 'https'),
|
||||
allow_plain_hostname=True, # Allow hostname only without TLD.
|
||||
label=_('Base URL of the Tower host'),
|
||||
help_text=_('This setting is used by services like notifications to render '
|
||||
'a valid url to the Tower host.'),
|
||||
help_text=_('This setting is used by services like notifications to render ' 'a valid url to the Tower host.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@ -69,11 +69,13 @@ register(
|
||||
'REMOTE_HOST_HEADERS',
|
||||
field_class=fields.StringListField,
|
||||
label=_('Remote Host Headers'),
|
||||
help_text=_('HTTP headers and meta keys to search to determine remote host '
|
||||
'name or IP. Add additional items to this list, such as '
|
||||
'"HTTP_X_FORWARDED_FOR", if behind a reverse proxy. '
|
||||
'See the "Proxy Support" section of the Adminstrator guide for '
|
||||
'more details.'),
|
||||
help_text=_(
|
||||
'HTTP headers and meta keys to search to determine remote host '
|
||||
'name or IP. Add additional items to this list, such as '
|
||||
'"HTTP_X_FORWARDED_FOR", if behind a reverse proxy. '
|
||||
'See the "Proxy Support" section of the Adminstrator guide for '
|
||||
'more details.'
|
||||
),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@ -82,11 +84,13 @@ register(
|
||||
'PROXY_IP_ALLOWED_LIST',
|
||||
field_class=fields.StringListField,
|
||||
label=_('Proxy IP Allowed List'),
|
||||
help_text=_("If Tower is behind a reverse proxy/load balancer, use this setting "
|
||||
"to configure the proxy IP addresses from which Tower should trust "
|
||||
"custom REMOTE_HOST_HEADERS header values. "
|
||||
"If this setting is an empty list (the default), the headers specified by "
|
||||
"REMOTE_HOST_HEADERS will be trusted unconditionally')"),
|
||||
help_text=_(
|
||||
"If Tower is behind a reverse proxy/load balancer, use this setting "
|
||||
"to configure the proxy IP addresses from which Tower should trust "
|
||||
"custom REMOTE_HOST_HEADERS header values. "
|
||||
"If this setting is an empty list (the default), the headers specified by "
|
||||
"REMOTE_HOST_HEADERS will be trusted unconditionally')"
|
||||
),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@ -97,9 +101,7 @@ register(
|
||||
field_class=fields.DictField,
|
||||
default=lambda: {},
|
||||
label=_('License'),
|
||||
help_text=_('The license controls which features and functionality are '
|
||||
'enabled. Use /api/v2/config/ to update or change '
|
||||
'the license.'),
|
||||
help_text=_('The license controls which features and functionality are ' 'enabled. Use /api/v2/config/ to update or change ' 'the license.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
@ -193,8 +195,7 @@ register(
|
||||
'CUSTOM_VENV_PATHS',
|
||||
field_class=fields.StringListPathField,
|
||||
label=_('Custom virtual environment paths'),
|
||||
help_text=_('Paths where Tower will look for custom virtual environments '
|
||||
'(in addition to /var/lib/awx/venv/). Enter one path per line.'),
|
||||
help_text=_('Paths where Tower will look for custom virtual environments ' '(in addition to /var/lib/awx/venv/). Enter one path per line.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
default=[],
|
||||
@ -244,9 +245,11 @@ register(
|
||||
'AWX_PROOT_BASE_PATH',
|
||||
field_class=fields.CharField,
|
||||
label=_('Job execution path'),
|
||||
help_text=_('The directory in which Tower will create new temporary '
|
||||
'directories for job execution and isolation '
|
||||
'(such as credential files and custom inventory scripts).'),
|
||||
help_text=_(
|
||||
'The directory in which Tower will create new temporary '
|
||||
'directories for job execution and isolation '
|
||||
'(such as credential files and custom inventory scripts).'
|
||||
),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
@ -287,8 +290,10 @@ register(
|
||||
field_class=fields.IntegerField,
|
||||
min_value=0,
|
||||
label=_('Isolated launch timeout'),
|
||||
help_text=_('The timeout (in seconds) for launching jobs on isolated instances. '
|
||||
'This includes the time needed to copy source control files (playbooks) to the isolated instance.'),
|
||||
help_text=_(
|
||||
'The timeout (in seconds) for launching jobs on isolated instances. '
|
||||
'This includes the time needed to copy source control files (playbooks) to the isolated instance.'
|
||||
),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
unit=_('seconds'),
|
||||
@ -300,8 +305,10 @@ register(
|
||||
min_value=0,
|
||||
default=10,
|
||||
label=_('Isolated connection timeout'),
|
||||
help_text=_('Ansible SSH connection timeout (in seconds) to use when communicating with isolated instances. '
|
||||
'Value should be substantially greater than expected network latency.'),
|
||||
help_text=_(
|
||||
'Ansible SSH connection timeout (in seconds) to use when communicating with isolated instances. '
|
||||
'Value should be substantially greater than expected network latency.'
|
||||
),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
unit=_('seconds'),
|
||||
@ -314,7 +321,7 @@ register(
|
||||
help_text=_('When set to True, AWX will enforce strict host key checking for communication with isolated nodes.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
default=False
|
||||
default=False,
|
||||
)
|
||||
|
||||
register(
|
||||
@ -322,9 +329,11 @@ register(
|
||||
field_class=fields.BooleanField,
|
||||
default=True,
|
||||
label=_('Generate RSA keys for isolated instances'),
|
||||
help_text=_('If set, a random RSA key will be generated and distributed to '
|
||||
'isolated instances. To disable this behavior and manage authentication '
|
||||
'for isolated instances outside of Tower, disable this setting.'), # noqa
|
||||
help_text=_(
|
||||
'If set, a random RSA key will be generated and distributed to '
|
||||
'isolated instances. To disable this behavior and manage authentication '
|
||||
'for isolated instances outside of Tower, disable this setting.'
|
||||
), # noqa
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
@ -359,8 +368,7 @@ register(
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Enable detailed resource profiling on all playbook runs'),
|
||||
help_text=_('If set, detailed resource profiling data will be collected on all jobs. '
|
||||
'This data can be gathered with `sosreport`.'), # noqa
|
||||
help_text=_('If set, detailed resource profiling data will be collected on all jobs. ' 'This data can be gathered with `sosreport`.'), # noqa
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
@ -370,8 +378,7 @@ register(
|
||||
field_class=FloatField,
|
||||
default='0.25',
|
||||
label=_('Interval (in seconds) between polls for cpu usage.'),
|
||||
help_text=_('Interval (in seconds) between polls for cpu usage. '
|
||||
'Setting this lower than the default will affect playbook performance.'),
|
||||
help_text=_('Interval (in seconds) between polls for cpu usage. ' 'Setting this lower than the default will affect playbook performance.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
required=False,
|
||||
@ -382,8 +389,7 @@ register(
|
||||
field_class=FloatField,
|
||||
default='0.25',
|
||||
label=_('Interval (in seconds) between polls for memory usage.'),
|
||||
help_text=_('Interval (in seconds) between polls for memory usage. '
|
||||
'Setting this lower than the default will affect playbook performance.'),
|
||||
help_text=_('Interval (in seconds) between polls for memory usage. ' 'Setting this lower than the default will affect playbook performance.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
required=False,
|
||||
@ -394,8 +400,7 @@ register(
|
||||
field_class=FloatField,
|
||||
default='0.25',
|
||||
label=_('Interval (in seconds) between polls for PID count.'),
|
||||
help_text=_('Interval (in seconds) between polls for PID count. '
|
||||
'Setting this lower than the default will affect playbook performance.'),
|
||||
help_text=_('Interval (in seconds) between polls for PID count. ' 'Setting this lower than the default will affect playbook performance.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
required=False,
|
||||
@ -469,10 +474,9 @@ register(
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Ignore Ansible Galaxy SSL Certificate Verification'),
|
||||
help_text=_('If set to true, certificate validation will not be done when '
|
||||
'installing content from any Galaxy server.'),
|
||||
help_text=_('If set to true, certificate validation will not be done when ' 'installing content from any Galaxy server.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs'
|
||||
category_slug='jobs',
|
||||
)
|
||||
|
||||
register(
|
||||
@ -491,7 +495,8 @@ register(
|
||||
min_value=0,
|
||||
label=_('Job Event Standard Output Maximum Display Size'),
|
||||
help_text=_(
|
||||
u'Maximum Size of Standard Output in bytes to display for a single job or ad hoc command event. `stdout` will end with `\u2026` when truncated.'),
|
||||
u'Maximum Size of Standard Output in bytes to display for a single job or ad hoc command event. `stdout` will end with `\u2026` when truncated.'
|
||||
),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
@ -522,8 +527,10 @@ register(
|
||||
min_value=0,
|
||||
default=0,
|
||||
label=_('Default Job Timeout'),
|
||||
help_text=_('Maximum time in seconds to allow jobs to run. Use value of 0 to indicate that no '
|
||||
'timeout should be imposed. A timeout set on an individual job template will override this.'),
|
||||
help_text=_(
|
||||
'Maximum time in seconds to allow jobs to run. Use value of 0 to indicate that no '
|
||||
'timeout should be imposed. A timeout set on an individual job template will override this.'
|
||||
),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
unit=_('seconds'),
|
||||
@ -535,8 +542,10 @@ register(
|
||||
min_value=0,
|
||||
default=0,
|
||||
label=_('Default Inventory Update Timeout'),
|
||||
help_text=_('Maximum time in seconds to allow inventory updates to run. Use value of 0 to indicate that no '
|
||||
'timeout should be imposed. A timeout set on an individual inventory source will override this.'),
|
||||
help_text=_(
|
||||
'Maximum time in seconds to allow inventory updates to run. Use value of 0 to indicate that no '
|
||||
'timeout should be imposed. A timeout set on an individual inventory source will override this.'
|
||||
),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
unit=_('seconds'),
|
||||
@ -548,8 +557,10 @@ register(
|
||||
min_value=0,
|
||||
default=0,
|
||||
label=_('Default Project Update Timeout'),
|
||||
help_text=_('Maximum time in seconds to allow project updates to run. Use value of 0 to indicate that no '
|
||||
'timeout should be imposed. A timeout set on an individual project will override this.'),
|
||||
help_text=_(
|
||||
'Maximum time in seconds to allow project updates to run. Use value of 0 to indicate that no '
|
||||
'timeout should be imposed. A timeout set on an individual project will override this.'
|
||||
),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
unit=_('seconds'),
|
||||
@ -561,10 +572,12 @@ register(
|
||||
min_value=0,
|
||||
default=0,
|
||||
label=_('Per-Host Ansible Fact Cache Timeout'),
|
||||
help_text=_('Maximum time, in seconds, that stored Ansible facts are considered valid since '
|
||||
'the last time they were modified. Only valid, non-stale, facts will be accessible by '
|
||||
'a playbook. Note, this does not influence the deletion of ansible_facts from the database. '
|
||||
'Use a value of 0 to indicate that no timeout should be imposed.'),
|
||||
help_text=_(
|
||||
'Maximum time, in seconds, that stored Ansible facts are considered valid since '
|
||||
'the last time they were modified. Only valid, non-stale, facts will be accessible by '
|
||||
'a playbook. Note, this does not influence the deletion of ansible_facts from the database. '
|
||||
'Use a value of 0 to indicate that no timeout should be imposed.'
|
||||
),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
unit=_('seconds'),
|
||||
@ -576,8 +589,7 @@ register(
|
||||
allow_null=False,
|
||||
default=200,
|
||||
label=_('Maximum number of forks per job'),
|
||||
help_text=_('Saving a Job Template with more than this number of forks will result in an error. '
|
||||
'When set to 0, no limit is applied.'),
|
||||
help_text=_('Saving a Job Template with more than this number of forks will result in an error. ' 'When set to 0, no limit is applied.'),
|
||||
category=_('Jobs'),
|
||||
category_slug='jobs',
|
||||
)
|
||||
@ -598,11 +610,10 @@ register(
|
||||
allow_null=True,
|
||||
default=None,
|
||||
label=_('Logging Aggregator Port'),
|
||||
help_text=_('Port on Logging Aggregator to send logs to (if required and not'
|
||||
' provided in Logging Aggregator).'),
|
||||
help_text=_('Port on Logging Aggregator to send logs to (if required and not' ' provided in Logging Aggregator).'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
required=False
|
||||
required=False,
|
||||
)
|
||||
register(
|
||||
'LOG_AGGREGATOR_TYPE',
|
||||
@ -643,12 +654,14 @@ register(
|
||||
field_class=fields.StringListField,
|
||||
default=['awx', 'activity_stream', 'job_events', 'system_tracking'],
|
||||
label=_('Loggers Sending Data to Log Aggregator Form'),
|
||||
help_text=_('List of loggers that will send HTTP logs to the collector, these can '
|
||||
'include any or all of: \n'
|
||||
'awx - service logs\n'
|
||||
'activity_stream - activity stream records\n'
|
||||
'job_events - callback data from Ansible job events\n'
|
||||
'system_tracking - facts gathered from scan jobs.'),
|
||||
help_text=_(
|
||||
'List of loggers that will send HTTP logs to the collector, these can '
|
||||
'include any or all of: \n'
|
||||
'awx - service logs\n'
|
||||
'activity_stream - activity stream records\n'
|
||||
'job_events - callback data from Ansible job events\n'
|
||||
'system_tracking - facts gathered from scan jobs.'
|
||||
),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
@ -657,10 +670,12 @@ register(
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Log System Tracking Facts Individually'),
|
||||
help_text=_('If set, system tracking facts will be sent for each package, service, or '
|
||||
'other item found in a scan, allowing for greater search query granularity. '
|
||||
'If unset, facts will be sent as a single dictionary, allowing for greater '
|
||||
'efficiency in fact processing.'),
|
||||
help_text=_(
|
||||
'If set, system tracking facts will be sent for each package, service, or '
|
||||
'other item found in a scan, allowing for greater search query granularity. '
|
||||
'If unset, facts will be sent as a single dictionary, allowing for greater '
|
||||
'efficiency in fact processing.'
|
||||
),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
@ -689,9 +704,11 @@ register(
|
||||
choices=[('https', 'HTTPS/HTTP'), ('tcp', 'TCP'), ('udp', 'UDP')],
|
||||
default='https',
|
||||
label=_('Logging Aggregator Protocol'),
|
||||
help_text=_('Protocol used to communicate with log aggregator. '
|
||||
'HTTPS/HTTP assumes HTTPS unless http:// is explicitly used in '
|
||||
'the Logging Aggregator hostname.'),
|
||||
help_text=_(
|
||||
'Protocol used to communicate with log aggregator. '
|
||||
'HTTPS/HTTP assumes HTTPS unless http:// is explicitly used in '
|
||||
'the Logging Aggregator hostname.'
|
||||
),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
@ -700,9 +717,7 @@ register(
|
||||
field_class=fields.IntegerField,
|
||||
default=5,
|
||||
label=_('TCP Connection Timeout'),
|
||||
help_text=_('Number of seconds for a TCP connection to external log '
|
||||
'aggregator to timeout. Applies to HTTPS and TCP log '
|
||||
'aggregator protocols.'),
|
||||
help_text=_('Number of seconds for a TCP connection to external log ' 'aggregator to timeout. Applies to HTTPS and TCP log ' 'aggregator protocols.'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
unit=_('seconds'),
|
||||
@ -712,10 +727,12 @@ register(
|
||||
field_class=fields.BooleanField,
|
||||
default=True,
|
||||
label=_('Enable/disable HTTPS certificate verification'),
|
||||
help_text=_('Flag to control enable/disable of certificate verification'
|
||||
' when LOG_AGGREGATOR_PROTOCOL is "https". If enabled, Tower\'s'
|
||||
' log handler will verify certificate sent by external log aggregator'
|
||||
' before establishing connection.'),
|
||||
help_text=_(
|
||||
'Flag to control enable/disable of certificate verification'
|
||||
' when LOG_AGGREGATOR_PROTOCOL is "https". If enabled, Tower\'s'
|
||||
' log handler will verify certificate sent by external log aggregator'
|
||||
' before establishing connection.'
|
||||
),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
@ -725,10 +742,12 @@ register(
|
||||
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
|
||||
default='WARNING',
|
||||
label=_('Logging Aggregator Level Threshold'),
|
||||
help_text=_('Level threshold used by log handler. Severities from lowest to highest'
|
||||
' are DEBUG, INFO, WARNING, ERROR, CRITICAL. Messages less severe '
|
||||
'than the threshold will be ignored by log handler. (messages under category '
|
||||
'awx.anlytics ignore this setting)'),
|
||||
help_text=_(
|
||||
'Level threshold used by log handler. Severities from lowest to highest'
|
||||
' are DEBUG, INFO, WARNING, ERROR, CRITICAL. Messages less severe '
|
||||
'than the threshold will be ignored by log handler. (messages under category '
|
||||
'awx.anlytics ignore this setting)'
|
||||
),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
@ -738,9 +757,11 @@ register(
|
||||
default=1,
|
||||
min_value=1,
|
||||
label=_('Maximum disk persistance for external log aggregation (in GB)'),
|
||||
help_text=_('Amount of data to store (in gigabytes) during an outage of '
|
||||
'the external log aggregator (defaults to 1). '
|
||||
'Equivalent to the rsyslogd queue.maxdiskspace setting.'),
|
||||
help_text=_(
|
||||
'Amount of data to store (in gigabytes) during an outage of '
|
||||
'the external log aggregator (defaults to 1). '
|
||||
'Equivalent to the rsyslogd queue.maxdiskspace setting.'
|
||||
),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
@ -749,9 +770,11 @@ register(
|
||||
field_class=fields.CharField,
|
||||
default='/var/lib/awx',
|
||||
label=_('File system location for rsyslogd disk persistence'),
|
||||
help_text=_('Location to persist logs that should be retried after an outage '
|
||||
'of the external log aggregator (defaults to /var/lib/awx). '
|
||||
'Equivalent to the rsyslogd queue.spoolDirectory setting.'),
|
||||
help_text=_(
|
||||
'Location to persist logs that should be retried after an outage '
|
||||
'of the external log aggregator (defaults to /var/lib/awx). '
|
||||
'Equivalent to the rsyslogd queue.spoolDirectory setting.'
|
||||
),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
@ -760,21 +783,19 @@ register(
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Enable rsyslogd debugging'),
|
||||
help_text=_('Enabled high verbosity debugging for rsyslogd. '
|
||||
'Useful for debugging connection issues for external log aggregation.'),
|
||||
help_text=_('Enabled high verbosity debugging for rsyslogd. ' 'Useful for debugging connection issues for external log aggregation.'),
|
||||
category=_('Logging'),
|
||||
category_slug='logging',
|
||||
)
|
||||
|
||||
|
||||
|
||||
register(
|
||||
'AUTOMATION_ANALYTICS_LAST_GATHER',
|
||||
field_class=fields.DateTimeField,
|
||||
label=_('Last gather date for Automation Analytics.'),
|
||||
allow_null=True,
|
||||
category=_('System'),
|
||||
category_slug='system'
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
|
||||
@ -783,8 +804,8 @@ register(
|
||||
field_class=fields.IntegerField,
|
||||
label=_('Automation Analytics Gather Interval'),
|
||||
help_text=_('Interval (in seconds) between data gathering.'),
|
||||
default=14400, # every 4 hours
|
||||
min_value=1800, # every 30 minutes
|
||||
default=14400, # every 4 hours
|
||||
min_value=1800, # every 30 minutes
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
unit=_('seconds'),
|
||||
@ -792,17 +813,23 @@ register(
|
||||
|
||||
|
||||
def logging_validate(serializer, attrs):
|
||||
if not serializer.instance or \
|
||||
not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or \
|
||||
not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
||||
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
||||
return attrs
|
||||
errors = []
|
||||
if attrs.get('LOG_AGGREGATOR_ENABLED', False):
|
||||
if not serializer.instance.LOG_AGGREGATOR_HOST and not attrs.get('LOG_AGGREGATOR_HOST', None) or\
|
||||
serializer.instance.LOG_AGGREGATOR_HOST and not attrs.get('LOG_AGGREGATOR_HOST', True):
|
||||
if (
|
||||
not serializer.instance.LOG_AGGREGATOR_HOST
|
||||
and not attrs.get('LOG_AGGREGATOR_HOST', None)
|
||||
or serializer.instance.LOG_AGGREGATOR_HOST
|
||||
and not attrs.get('LOG_AGGREGATOR_HOST', True)
|
||||
):
|
||||
errors.append('Cannot enable log aggregator without providing host.')
|
||||
if not serializer.instance.LOG_AGGREGATOR_TYPE and not attrs.get('LOG_AGGREGATOR_TYPE', None) or\
|
||||
serializer.instance.LOG_AGGREGATOR_TYPE and not attrs.get('LOG_AGGREGATOR_TYPE', True):
|
||||
if (
|
||||
not serializer.instance.LOG_AGGREGATOR_TYPE
|
||||
and not attrs.get('LOG_AGGREGATOR_TYPE', None)
|
||||
or serializer.instance.LOG_AGGREGATOR_TYPE
|
||||
and not attrs.get('LOG_AGGREGATOR_TYPE', True)
|
||||
):
|
||||
errors.append('Cannot enable log aggregator without providing type.')
|
||||
if errors:
|
||||
raise serializers.ValidationError(_('\n'.join(errors)))
|
||||
|
||||
@ -6,17 +6,33 @@ import re
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
__all__ = [
|
||||
'CLOUD_PROVIDERS', 'SCHEDULEABLE_PROVIDERS', 'PRIVILEGE_ESCALATION_METHODS',
|
||||
'ANSI_SGR_PATTERN', 'CAN_CANCEL', 'ACTIVE_STATES', 'STANDARD_INVENTORY_UPDATE_ENV'
|
||||
'CLOUD_PROVIDERS',
|
||||
'SCHEDULEABLE_PROVIDERS',
|
||||
'PRIVILEGE_ESCALATION_METHODS',
|
||||
'ANSI_SGR_PATTERN',
|
||||
'CAN_CANCEL',
|
||||
'ACTIVE_STATES',
|
||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||
]
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'tower')
|
||||
SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + ('custom', 'scm',)
|
||||
SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + (
|
||||
'custom',
|
||||
'scm',
|
||||
)
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')), ('su', _('Su')), ('pbrun', _('Pbrun')), ('pfexec', _('Pfexec')),
|
||||
('dzdo', _('DZDO')), ('pmrun', _('Pmrun')), ('runas', _('Runas')),
|
||||
('enable', _('Enable')), ('doas', _('Doas')), ('ksu', _('Ksu')),
|
||||
('machinectl', _('Machinectl')), ('sesu', _('Sesu')),
|
||||
('sudo', _('Sudo')),
|
||||
('su', _('Su')),
|
||||
('pbrun', _('Pbrun')),
|
||||
('pfexec', _('Pfexec')),
|
||||
('dzdo', _('DZDO')),
|
||||
('pmrun', _('Pmrun')),
|
||||
('runas', _('Runas')),
|
||||
('enable', _('Enable')),
|
||||
('doas', _('Doas')),
|
||||
('ksu', _('Ksu')),
|
||||
('machinectl', _('Machinectl')),
|
||||
('sesu', _('Sesu')),
|
||||
]
|
||||
CHOICES_PRIVILEGE_ESCALATION_METHODS = [('', _('None'))] + PRIVILEGE_ESCALATION_METHODS
|
||||
ANSI_SGR_PATTERN = re.compile(r'\x1b\[[0-9;]*m')
|
||||
@ -26,19 +42,35 @@ STANDARD_INVENTORY_UPDATE_ENV = {
|
||||
# Always use the --export option for ansible-inventory
|
||||
'ANSIBLE_INVENTORY_EXPORT': 'True',
|
||||
# Redirecting output to stderr allows JSON parsing to still work with -vvv
|
||||
'ANSIBLE_VERBOSE_TO_STDERR': 'True'
|
||||
'ANSIBLE_VERBOSE_TO_STDERR': 'True',
|
||||
}
|
||||
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
||||
ACTIVE_STATES = CAN_CANCEL
|
||||
CENSOR_VALUE = '************'
|
||||
ENV_BLOCKLIST = frozenset((
|
||||
'VIRTUAL_ENV', 'PATH', 'PYTHONPATH', 'PROOT_TMP_DIR', 'JOB_ID',
|
||||
'INVENTORY_ID', 'INVENTORY_SOURCE_ID', 'INVENTORY_UPDATE_ID',
|
||||
'AD_HOC_COMMAND_ID', 'REST_API_URL', 'REST_API_TOKEN', 'MAX_EVENT_RES',
|
||||
'CALLBACK_QUEUE', 'CALLBACK_CONNECTION', 'CACHE',
|
||||
'JOB_CALLBACK_DEBUG', 'INVENTORY_HOSTVARS',
|
||||
'AWX_HOST', 'PROJECT_REVISION', 'SUPERVISOR_WEB_CONFIG_PATH'
|
||||
))
|
||||
ENV_BLOCKLIST = frozenset(
|
||||
(
|
||||
'VIRTUAL_ENV',
|
||||
'PATH',
|
||||
'PYTHONPATH',
|
||||
'PROOT_TMP_DIR',
|
||||
'JOB_ID',
|
||||
'INVENTORY_ID',
|
||||
'INVENTORY_SOURCE_ID',
|
||||
'INVENTORY_UPDATE_ID',
|
||||
'AD_HOC_COMMAND_ID',
|
||||
'REST_API_URL',
|
||||
'REST_API_TOKEN',
|
||||
'MAX_EVENT_RES',
|
||||
'CALLBACK_QUEUE',
|
||||
'CALLBACK_CONNECTION',
|
||||
'CACHE',
|
||||
'JOB_CALLBACK_DEBUG',
|
||||
'INVENTORY_HOSTVARS',
|
||||
'AWX_HOST',
|
||||
'PROJECT_REVISION',
|
||||
'SUPERVISOR_WEB_CONFIG_PATH',
|
||||
)
|
||||
)
|
||||
|
||||
# loggers that may be called in process of emitting a log
|
||||
LOGGER_BLOCKLIST = (
|
||||
@ -48,5 +80,5 @@ LOGGER_BLOCKLIST = (
|
||||
'awx.main.utils.encryption',
|
||||
'awx.main.utils.log',
|
||||
# loggers that may be called getting logging settings
|
||||
'awx.conf'
|
||||
'awx.conf',
|
||||
)
|
||||
|
||||
@ -22,7 +22,7 @@ class WebsocketSecretAuthHelper:
|
||||
"""
|
||||
Middlewareish for websockets to verify node websocket broadcast interconnect.
|
||||
|
||||
Note: The "ish" is due to the channels routing interface. Routing occurs
|
||||
Note: The "ish" is due to the channels routing interface. Routing occurs
|
||||
_after_ authentication; making it hard to apply this auth to _only_ a subset of
|
||||
websocket endpoints.
|
||||
"""
|
||||
@ -30,19 +30,13 @@ class WebsocketSecretAuthHelper:
|
||||
@classmethod
|
||||
def construct_secret(cls):
|
||||
nonce_serialized = f"{int(time.time())}"
|
||||
payload_dict = {
|
||||
'secret': settings.BROADCAST_WEBSOCKET_SECRET,
|
||||
'nonce': nonce_serialized
|
||||
}
|
||||
payload_dict = {'secret': settings.BROADCAST_WEBSOCKET_SECRET, 'nonce': nonce_serialized}
|
||||
payload_serialized = json.dumps(payload_dict)
|
||||
|
||||
secret_serialized = hmac.new(force_bytes(settings.BROADCAST_WEBSOCKET_SECRET),
|
||||
msg=force_bytes(payload_serialized),
|
||||
digestmod='sha256').hexdigest()
|
||||
secret_serialized = hmac.new(force_bytes(settings.BROADCAST_WEBSOCKET_SECRET), msg=force_bytes(payload_serialized), digestmod='sha256').hexdigest()
|
||||
|
||||
return 'HMAC-SHA256 {}:{}'.format(nonce_serialized, secret_serialized)
|
||||
|
||||
|
||||
@classmethod
|
||||
def verify_secret(cls, s, nonce_tolerance=300):
|
||||
try:
|
||||
@ -62,9 +56,7 @@ class WebsocketSecretAuthHelper:
|
||||
except Exception:
|
||||
raise ValueError("Failed to create hash to compare to secret.")
|
||||
|
||||
secret_serialized = hmac.new(force_bytes(settings.BROADCAST_WEBSOCKET_SECRET),
|
||||
msg=force_bytes(payload_serialized),
|
||||
digestmod='sha256').hexdigest()
|
||||
secret_serialized = hmac.new(force_bytes(settings.BROADCAST_WEBSOCKET_SECRET), msg=force_bytes(payload_serialized), digestmod='sha256').hexdigest()
|
||||
|
||||
if secret_serialized != secret_parsed:
|
||||
raise ValueError("Invalid secret")
|
||||
@ -90,7 +82,6 @@ class WebsocketSecretAuthHelper:
|
||||
|
||||
|
||||
class BroadcastConsumer(AsyncJsonWebsocketConsumer):
|
||||
|
||||
async def connect(self):
|
||||
try:
|
||||
WebsocketSecretAuthHelper.is_authorized(self.scope)
|
||||
@ -151,13 +142,10 @@ class EventConsumer(AsyncJsonWebsocketConsumer):
|
||||
|
||||
async def receive_json(self, data):
|
||||
from awx.main.access import consumer_access
|
||||
|
||||
user = self.scope['user']
|
||||
xrftoken = data.get('xrftoken')
|
||||
if (
|
||||
not xrftoken or
|
||||
XRF_KEY not in self.scope["session"] or
|
||||
xrftoken != self.scope["session"][XRF_KEY]
|
||||
):
|
||||
if not xrftoken or XRF_KEY not in self.scope["session"] or xrftoken != self.scope["session"][XRF_KEY]:
|
||||
logger.error(f"access denied to channel, XRF mismatch for {user.username}")
|
||||
await self.send_json({"error": "access denied to channel"})
|
||||
return
|
||||
@ -166,7 +154,7 @@ class EventConsumer(AsyncJsonWebsocketConsumer):
|
||||
groups = data['groups']
|
||||
new_groups = set()
|
||||
current_groups = set(self.scope['session'].pop('groups') if 'groups' in self.scope['session'] else [])
|
||||
for group_name,v in groups.items():
|
||||
for group_name, v in groups.items():
|
||||
if type(v) is list:
|
||||
for oid in v:
|
||||
name = '{}-{}'.format(group_name, oid)
|
||||
@ -191,16 +179,9 @@ class EventConsumer(AsyncJsonWebsocketConsumer):
|
||||
|
||||
new_groups_exclusive = new_groups - current_groups
|
||||
for group_name in new_groups_exclusive:
|
||||
await self.channel_layer.group_add(
|
||||
group_name,
|
||||
self.channel_name
|
||||
)
|
||||
await self.channel_layer.group_add(group_name, self.channel_name)
|
||||
self.scope['session']['groups'] = new_groups
|
||||
await self.send_json({
|
||||
"groups_current": list(new_groups),
|
||||
"groups_left": list(old_groups),
|
||||
"groups_joined": list(new_groups_exclusive)
|
||||
})
|
||||
await self.send_json({"groups_current": list(new_groups), "groups_left": list(old_groups), "groups_joined": list(new_groups_exclusive)})
|
||||
|
||||
async def internal_message(self, event):
|
||||
await self.send(event['text'])
|
||||
@ -221,7 +202,7 @@ def _dump_payload(payload):
|
||||
|
||||
|
||||
def emit_channel_notification(group, payload):
|
||||
from awx.main.wsbroadcast import wrap_broadcast_msg # noqa
|
||||
from awx.main.wsbroadcast import wrap_broadcast_msg # noqa
|
||||
|
||||
payload_dumped = _dump_payload(payload)
|
||||
if payload_dumped is None:
|
||||
@ -229,18 +210,19 @@ def emit_channel_notification(group, payload):
|
||||
|
||||
channel_layer = get_channel_layer()
|
||||
|
||||
run_sync(channel_layer.group_send(
|
||||
group,
|
||||
{
|
||||
"type": "internal.message",
|
||||
"text": payload_dumped
|
||||
},
|
||||
))
|
||||
run_sync(
|
||||
channel_layer.group_send(
|
||||
group,
|
||||
{"type": "internal.message", "text": payload_dumped},
|
||||
)
|
||||
)
|
||||
|
||||
run_sync(channel_layer.group_send(
|
||||
settings.BROADCAST_WEBSOCKET_GROUP_NAME,
|
||||
{
|
||||
"type": "internal.message",
|
||||
"text": wrap_broadcast_msg(group, payload_dumped),
|
||||
},
|
||||
))
|
||||
run_sync(
|
||||
channel_layer.group_send(
|
||||
settings.BROADCAST_WEBSOCKET_GROUP_NAME,
|
||||
{
|
||||
"type": "internal.message",
|
||||
"text": wrap_broadcast_msg(group, payload_dumped),
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
@ -6,51 +6,55 @@ from django.utils.translation import ugettext_lazy as _
|
||||
import requests
|
||||
|
||||
aim_inputs = {
|
||||
'fields': [{
|
||||
'id': 'url',
|
||||
'label': _('CyberArk AIM URL'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
}, {
|
||||
'id': 'app_id',
|
||||
'label': _('Application ID'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
}, {
|
||||
'id': 'client_key',
|
||||
'label': _('Client Key'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
}, {
|
||||
'id': 'client_cert',
|
||||
'label': _('Client Certificate'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
}, {
|
||||
'id': 'verify',
|
||||
'label': _('Verify SSL Certificates'),
|
||||
'type': 'boolean',
|
||||
'default': True,
|
||||
}],
|
||||
'metadata': [{
|
||||
'id': 'object_query',
|
||||
'label': _('Object Query'),
|
||||
'type': 'string',
|
||||
'help_text': _('Lookup query for the object. Ex: Safe=TestSafe;Object=testAccountName123'),
|
||||
}, {
|
||||
'id': 'object_query_format',
|
||||
'label': _('Object Query Format'),
|
||||
'type': 'string',
|
||||
'default': 'Exact',
|
||||
'choices': ['Exact', 'Regexp']
|
||||
}, {
|
||||
'id': 'reason',
|
||||
'label': _('Reason'),
|
||||
'type': 'string',
|
||||
'help_text': _('Object request reason. This is only needed if it is required by the object\'s policy.')
|
||||
}],
|
||||
'fields': [
|
||||
{
|
||||
'id': 'url',
|
||||
'label': _('CyberArk AIM URL'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
},
|
||||
{
|
||||
'id': 'app_id',
|
||||
'label': _('Application ID'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
{
|
||||
'id': 'client_key',
|
||||
'label': _('Client Key'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
},
|
||||
{
|
||||
'id': 'client_cert',
|
||||
'label': _('Client Certificate'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
},
|
||||
{
|
||||
'id': 'verify',
|
||||
'label': _('Verify SSL Certificates'),
|
||||
'type': 'boolean',
|
||||
'default': True,
|
||||
},
|
||||
],
|
||||
'metadata': [
|
||||
{
|
||||
'id': 'object_query',
|
||||
'label': _('Object Query'),
|
||||
'type': 'string',
|
||||
'help_text': _('Lookup query for the object. Ex: Safe=TestSafe;Object=testAccountName123'),
|
||||
},
|
||||
{'id': 'object_query_format', 'label': _('Object Query Format'), 'type': 'string', 'default': 'Exact', 'choices': ['Exact', 'Regexp']},
|
||||
{
|
||||
'id': 'reason',
|
||||
'label': _('Reason'),
|
||||
'type': 'string',
|
||||
'help_text': _('Object request reason. This is only needed if it is required by the object\'s policy.'),
|
||||
},
|
||||
],
|
||||
'required': ['url', 'app_id', 'object_query'],
|
||||
}
|
||||
|
||||
@ -88,8 +92,4 @@ def aim_backend(**kwargs):
|
||||
return res.json()['Content']
|
||||
|
||||
|
||||
aim_plugin = CredentialPlugin(
|
||||
'CyberArk AIM Central Credential Provider Lookup',
|
||||
inputs=aim_inputs,
|
||||
backend=aim_backend
|
||||
)
|
||||
aim_plugin = CredentialPlugin('CyberArk AIM Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)
|
||||
|
||||
@ -7,51 +7,48 @@ from msrestazure import azure_cloud
|
||||
|
||||
|
||||
# https://github.com/Azure/msrestazure-for-python/blob/master/msrestazure/azure_cloud.py
|
||||
clouds = [
|
||||
vars(azure_cloud)[n]
|
||||
for n in dir(azure_cloud)
|
||||
if n.startswith("AZURE_") and n.endswith("_CLOUD")
|
||||
]
|
||||
clouds = [vars(azure_cloud)[n] for n in dir(azure_cloud) if n.startswith("AZURE_") and n.endswith("_CLOUD")]
|
||||
default_cloud = vars(azure_cloud)["AZURE_PUBLIC_CLOUD"]
|
||||
|
||||
|
||||
azure_keyvault_inputs = {
|
||||
'fields': [{
|
||||
'id': 'url',
|
||||
'label': _('Vault URL (DNS Name)'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
}, {
|
||||
'id': 'client',
|
||||
'label': _('Client ID'),
|
||||
'type': 'string'
|
||||
}, {
|
||||
'id': 'secret',
|
||||
'label': _('Client Secret'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
}, {
|
||||
'id': 'tenant',
|
||||
'label': _('Tenant ID'),
|
||||
'type': 'string'
|
||||
}, {
|
||||
'id': 'cloud_name',
|
||||
'label': _('Cloud Environment'),
|
||||
'help_text': _('Specify which azure cloud environment to use.'),
|
||||
'choices': list(set([default_cloud.name] + [c.name for c in clouds])),
|
||||
'default': default_cloud.name
|
||||
}],
|
||||
'metadata': [{
|
||||
'id': 'secret_field',
|
||||
'label': _('Secret Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('The name of the secret to look up.'),
|
||||
}, {
|
||||
'id': 'secret_version',
|
||||
'label': _('Secret Version'),
|
||||
'type': 'string',
|
||||
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
|
||||
}],
|
||||
'fields': [
|
||||
{
|
||||
'id': 'url',
|
||||
'label': _('Vault URL (DNS Name)'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
},
|
||||
{'id': 'client', 'label': _('Client ID'), 'type': 'string'},
|
||||
{
|
||||
'id': 'secret',
|
||||
'label': _('Client Secret'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
{'id': 'tenant', 'label': _('Tenant ID'), 'type': 'string'},
|
||||
{
|
||||
'id': 'cloud_name',
|
||||
'label': _('Cloud Environment'),
|
||||
'help_text': _('Specify which azure cloud environment to use.'),
|
||||
'choices': list(set([default_cloud.name] + [c.name for c in clouds])),
|
||||
'default': default_cloud.name,
|
||||
},
|
||||
],
|
||||
'metadata': [
|
||||
{
|
||||
'id': 'secret_field',
|
||||
'label': _('Secret Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('The name of the secret to look up.'),
|
||||
},
|
||||
{
|
||||
'id': 'secret_version',
|
||||
'label': _('Secret Version'),
|
||||
'type': 'string',
|
||||
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
|
||||
},
|
||||
],
|
||||
'required': ['url', 'client', 'secret', 'tenant', 'secret_field'],
|
||||
}
|
||||
|
||||
@ -62,11 +59,11 @@ def azure_keyvault_backend(**kwargs):
|
||||
|
||||
def auth_callback(server, resource, scope):
|
||||
credentials = ServicePrincipalCredentials(
|
||||
url = url,
|
||||
client_id = kwargs['client'],
|
||||
secret = kwargs['secret'],
|
||||
tenant = kwargs['tenant'],
|
||||
resource = f"https://{cloud.suffixes.keyvault_dns.split('.', 1).pop()}",
|
||||
url=url,
|
||||
client_id=kwargs['client'],
|
||||
secret=kwargs['secret'],
|
||||
tenant=kwargs['tenant'],
|
||||
resource=f"https://{cloud.suffixes.keyvault_dns.split('.', 1).pop()}",
|
||||
)
|
||||
token = credentials.token
|
||||
return token['token_type'], token['access_token']
|
||||
@ -75,8 +72,4 @@ def azure_keyvault_backend(**kwargs):
|
||||
return kv.get_secret(url, kwargs['secret_field'], kwargs.get('secret_version', '')).value
|
||||
|
||||
|
||||
azure_keyvault_plugin = CredentialPlugin(
|
||||
'Microsoft Azure Key Vault',
|
||||
inputs=azure_keyvault_inputs,
|
||||
backend=azure_keyvault_backend
|
||||
)
|
||||
azure_keyvault_plugin = CredentialPlugin('Microsoft Azure Key Vault', inputs=azure_keyvault_inputs, backend=azure_keyvault_backend)
|
||||
|
||||
@ -2,68 +2,68 @@ from .plugin import CredentialPlugin, raise_for_status
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from urllib.parse import urljoin
|
||||
import requests
|
||||
pas_inputs = {
|
||||
'fields': [{
|
||||
'id': 'url',
|
||||
'label': _('Centrify Tenant URL'),
|
||||
'type': 'string',
|
||||
'help_text': _('Centrify Tenant URL'),
|
||||
'format': 'url',
|
||||
}, {
|
||||
'id':'client_id',
|
||||
'label':_('Centrify API User'),
|
||||
'type':'string',
|
||||
'help_text': _('Centrify API User, having necessary permissions as mentioned in support doc'),
|
||||
|
||||
}, {
|
||||
'id':'client_password',
|
||||
'label':_('Centrify API Password'),
|
||||
'type':'string',
|
||||
'help_text': _('Password of Centrify API User with necessary permissions'),
|
||||
'secret':True,
|
||||
},{
|
||||
'id':'oauth_application_id',
|
||||
'label':_('OAuth2 Application ID'),
|
||||
'type':'string',
|
||||
'help_text': _('Application ID of the configured OAuth2 Client (defaults to \'awx\')'),
|
||||
'default': 'awx',
|
||||
},{
|
||||
'id':'oauth_scope',
|
||||
'label':_('OAuth2 Scope'),
|
||||
'type':'string',
|
||||
'help_text': _('Scope of the configured OAuth2 Client (defaults to \'awx\')'),
|
||||
'default': 'awx',
|
||||
}],
|
||||
'metadata': [{
|
||||
'id': 'account-name',
|
||||
'label': _('Account Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('Local system account or Domain account name enrolled in Centrify Vault. eg. (root or DOMAIN/Administrator)'),
|
||||
},{
|
||||
'id': 'system-name',
|
||||
'label': _('System Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('Machine Name enrolled with in Centrify Portal'),
|
||||
}],
|
||||
'required': ['url', 'account-name', 'system-name','client_id','client_password'],
|
||||
pas_inputs = {
|
||||
'fields': [
|
||||
{
|
||||
'id': 'url',
|
||||
'label': _('Centrify Tenant URL'),
|
||||
'type': 'string',
|
||||
'help_text': _('Centrify Tenant URL'),
|
||||
'format': 'url',
|
||||
},
|
||||
{
|
||||
'id': 'client_id',
|
||||
'label': _('Centrify API User'),
|
||||
'type': 'string',
|
||||
'help_text': _('Centrify API User, having necessary permissions as mentioned in support doc'),
|
||||
},
|
||||
{
|
||||
'id': 'client_password',
|
||||
'label': _('Centrify API Password'),
|
||||
'type': 'string',
|
||||
'help_text': _('Password of Centrify API User with necessary permissions'),
|
||||
'secret': True,
|
||||
},
|
||||
{
|
||||
'id': 'oauth_application_id',
|
||||
'label': _('OAuth2 Application ID'),
|
||||
'type': 'string',
|
||||
'help_text': _('Application ID of the configured OAuth2 Client (defaults to \'awx\')'),
|
||||
'default': 'awx',
|
||||
},
|
||||
{
|
||||
'id': 'oauth_scope',
|
||||
'label': _('OAuth2 Scope'),
|
||||
'type': 'string',
|
||||
'help_text': _('Scope of the configured OAuth2 Client (defaults to \'awx\')'),
|
||||
'default': 'awx',
|
||||
},
|
||||
],
|
||||
'metadata': [
|
||||
{
|
||||
'id': 'account-name',
|
||||
'label': _('Account Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('Local system account or Domain account name enrolled in Centrify Vault. eg. (root or DOMAIN/Administrator)'),
|
||||
},
|
||||
{
|
||||
'id': 'system-name',
|
||||
'label': _('System Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('Machine Name enrolled with in Centrify Portal'),
|
||||
},
|
||||
],
|
||||
'required': ['url', 'account-name', 'system-name', 'client_id', 'client_password'],
|
||||
}
|
||||
|
||||
|
||||
# generate bearer token to authenticate with PAS portal, Input : Client ID, Client Secret
|
||||
def handle_auth(**kwargs):
|
||||
post_data = {
|
||||
"grant_type": "client_credentials",
|
||||
"scope": kwargs['oauth_scope']
|
||||
}
|
||||
response = requests.post(
|
||||
kwargs['endpoint'],
|
||||
data = post_data,
|
||||
auth = (kwargs['client_id'],kwargs['client_password']),
|
||||
verify = True,
|
||||
timeout = (5, 30)
|
||||
)
|
||||
post_data = {"grant_type": "client_credentials", "scope": kwargs['oauth_scope']}
|
||||
response = requests.post(kwargs['endpoint'], data=post_data, auth=(kwargs['client_id'], kwargs['client_password']), verify=True, timeout=(5, 30))
|
||||
raise_for_status(response)
|
||||
try:
|
||||
try:
|
||||
return response.json()['access_token']
|
||||
except KeyError:
|
||||
raise RuntimeError('OAuth request to tenant was unsuccessful')
|
||||
@ -71,20 +71,11 @@ def handle_auth(**kwargs):
|
||||
|
||||
# fetch the ID of system with RedRock query, Input : System Name, Account Name
|
||||
def get_ID(**kwargs):
|
||||
endpoint = urljoin(kwargs['url'],'/Redrock/query')
|
||||
name=" Name='{0}' and User='{1}'".format(kwargs['system_name'],kwargs['acc_name'])
|
||||
query = 'Select ID from VaultAccount where {0}'.format(name)
|
||||
post_headers = {
|
||||
"Authorization": "Bearer " + kwargs['access_token'],
|
||||
"X-CENTRIFY-NATIVE-CLIENT":"true"
|
||||
}
|
||||
response = requests.post(
|
||||
endpoint,
|
||||
json = {'Script': query},
|
||||
headers = post_headers,
|
||||
verify = True,
|
||||
timeout = (5, 30)
|
||||
)
|
||||
endpoint = urljoin(kwargs['url'], '/Redrock/query')
|
||||
name = " Name='{0}' and User='{1}'".format(kwargs['system_name'], kwargs['acc_name'])
|
||||
query = 'Select ID from VaultAccount where {0}'.format(name)
|
||||
post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"}
|
||||
response = requests.post(endpoint, json={'Script': query}, headers=post_headers, verify=True, timeout=(5, 30))
|
||||
raise_for_status(response)
|
||||
try:
|
||||
result_str = response.json()["Result"]["Results"]
|
||||
@ -95,23 +86,14 @@ def get_ID(**kwargs):
|
||||
|
||||
# CheckOut Password from Centrify Vault, Input : ID
|
||||
def get_passwd(**kwargs):
|
||||
endpoint = urljoin(kwargs['url'],'/ServerManage/CheckoutPassword')
|
||||
post_headers = {
|
||||
"Authorization": "Bearer " + kwargs['access_token'],
|
||||
"X-CENTRIFY-NATIVE-CLIENT":"true"
|
||||
}
|
||||
response = requests.post(
|
||||
endpoint,
|
||||
json = {'ID': kwargs['acc_id']},
|
||||
headers = post_headers,
|
||||
verify = True,
|
||||
timeout = (5, 30)
|
||||
)
|
||||
endpoint = urljoin(kwargs['url'], '/ServerManage/CheckoutPassword')
|
||||
post_headers = {"Authorization": "Bearer " + kwargs['access_token'], "X-CENTRIFY-NATIVE-CLIENT": "true"}
|
||||
response = requests.post(endpoint, json={'ID': kwargs['acc_id']}, headers=post_headers, verify=True, timeout=(5, 30))
|
||||
raise_for_status(response)
|
||||
try:
|
||||
return response.json()["Result"]["Password"]
|
||||
except KeyError:
|
||||
raise RuntimeError("Password Not Found")
|
||||
raise RuntimeError("Password Not Found")
|
||||
|
||||
|
||||
def centrify_backend(**kwargs):
|
||||
@ -122,21 +104,12 @@ def centrify_backend(**kwargs):
|
||||
client_password = kwargs.get('client_password')
|
||||
app_id = kwargs.get('oauth_application_id', 'awx')
|
||||
endpoint = urljoin(url, f'/oauth2/token/{app_id}')
|
||||
endpoint = {
|
||||
'endpoint': endpoint,
|
||||
'client_id': client_id,
|
||||
'client_password': client_password,
|
||||
'oauth_scope': kwargs.get('oauth_scope', 'awx')
|
||||
}
|
||||
endpoint = {'endpoint': endpoint, 'client_id': client_id, 'client_password': client_password, 'oauth_scope': kwargs.get('oauth_scope', 'awx')}
|
||||
token = handle_auth(**endpoint)
|
||||
get_id_args = {'system_name':system_name,'acc_name':acc_name,'url':url,'access_token':token}
|
||||
get_id_args = {'system_name': system_name, 'acc_name': acc_name, 'url': url, 'access_token': token}
|
||||
acc_id = get_ID(**get_id_args)
|
||||
get_pwd_args = {'url':url,'acc_id':acc_id,'access_token':token}
|
||||
get_pwd_args = {'url': url, 'acc_id': acc_id, 'access_token': token}
|
||||
return get_passwd(**get_pwd_args)
|
||||
|
||||
|
||||
centrify_plugin = CredentialPlugin(
|
||||
'Centrify Vault Credential Provider Lookup',
|
||||
inputs=pas_inputs,
|
||||
backend=centrify_backend
|
||||
)
|
||||
centrify_plugin = CredentialPlugin('Centrify Vault Credential Provider Lookup', inputs=pas_inputs, backend=centrify_backend)
|
||||
|
||||
@ -8,41 +8,45 @@ import requests
|
||||
|
||||
|
||||
conjur_inputs = {
|
||||
'fields': [{
|
||||
'id': 'url',
|
||||
'label': _('Conjur URL'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
}, {
|
||||
'id': 'api_key',
|
||||
'label': _('API Key'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
}, {
|
||||
'id': 'account',
|
||||
'label': _('Account'),
|
||||
'type': 'string',
|
||||
}, {
|
||||
'id': 'username',
|
||||
'label': _('Username'),
|
||||
'type': 'string',
|
||||
}, {
|
||||
'id': 'cacert',
|
||||
'label': _('Public Key Certificate'),
|
||||
'type': 'string',
|
||||
'multiline': True
|
||||
}],
|
||||
'metadata': [{
|
||||
'id': 'secret_path',
|
||||
'label': _('Secret Identifier'),
|
||||
'type': 'string',
|
||||
'help_text': _('The identifier for the secret e.g., /some/identifier'),
|
||||
}, {
|
||||
'id': 'secret_version',
|
||||
'label': _('Secret Version'),
|
||||
'type': 'string',
|
||||
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
|
||||
}],
|
||||
'fields': [
|
||||
{
|
||||
'id': 'url',
|
||||
'label': _('Conjur URL'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
},
|
||||
{
|
||||
'id': 'api_key',
|
||||
'label': _('API Key'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
},
|
||||
{
|
||||
'id': 'account',
|
||||
'label': _('Account'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'username',
|
||||
'label': _('Username'),
|
||||
'type': 'string',
|
||||
},
|
||||
{'id': 'cacert', 'label': _('Public Key Certificate'), 'type': 'string', 'multiline': True},
|
||||
],
|
||||
'metadata': [
|
||||
{
|
||||
'id': 'secret_path',
|
||||
'label': _('Secret Identifier'),
|
||||
'type': 'string',
|
||||
'help_text': _('The identifier for the secret e.g., /some/identifier'),
|
||||
},
|
||||
{
|
||||
'id': 'secret_version',
|
||||
'label': _('Secret Version'),
|
||||
'type': 'string',
|
||||
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
|
||||
},
|
||||
],
|
||||
'required': ['url', 'api_key', 'account', 'username'],
|
||||
}
|
||||
|
||||
@ -50,7 +54,7 @@ conjur_inputs = {
|
||||
def conjur_backend(**kwargs):
|
||||
url = kwargs['url']
|
||||
api_key = kwargs['api_key']
|
||||
account = quote(kwargs['account'], safe='')
|
||||
account = quote(kwargs['account'], safe='')
|
||||
username = quote(kwargs['username'], safe='')
|
||||
secret_path = quote(kwargs['secret_path'], safe='')
|
||||
version = kwargs.get('secret_version')
|
||||
@ -65,10 +69,7 @@ def conjur_backend(**kwargs):
|
||||
with CertFiles(cacert) as cert:
|
||||
# https://www.conjur.org/api.html#authentication-authenticate-post
|
||||
auth_kwargs['verify'] = cert
|
||||
resp = requests.post(
|
||||
urljoin(url, '/'.join(['authn', account, username, 'authenticate'])),
|
||||
**auth_kwargs
|
||||
)
|
||||
resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
|
||||
raise_for_status(resp)
|
||||
token = base64.b64encode(resp.content).decode('utf-8')
|
||||
|
||||
@ -78,12 +79,7 @@ def conjur_backend(**kwargs):
|
||||
}
|
||||
|
||||
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
|
||||
path = urljoin(url, '/'.join([
|
||||
'secrets',
|
||||
account,
|
||||
'variable',
|
||||
secret_path
|
||||
]))
|
||||
path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path]))
|
||||
if version:
|
||||
path = '?'.join([path, version])
|
||||
|
||||
@ -94,8 +90,4 @@ def conjur_backend(**kwargs):
|
||||
return resp.text
|
||||
|
||||
|
||||
conjur_plugin = CredentialPlugin(
|
||||
'CyberArk Conjur Secret Lookup',
|
||||
inputs=conjur_inputs,
|
||||
backend=conjur_backend
|
||||
)
|
||||
conjur_plugin = CredentialPlugin('CyberArk Conjur Secret Lookup', inputs=conjur_inputs, backend=conjur_backend)
|
||||
|
||||
@ -9,110 +9,131 @@ import requests
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
base_inputs = {
|
||||
'fields': [{
|
||||
'id': 'url',
|
||||
'label': _('Server URL'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
'help_text': _('The URL to the HashiCorp Vault'),
|
||||
}, {
|
||||
'id': 'token',
|
||||
'label': _('Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': _('The access token used to authenticate to the Vault server'),
|
||||
}, {
|
||||
'id': 'cacert',
|
||||
'label': _('CA Certificate'),
|
||||
'type': 'string',
|
||||
'multiline': True,
|
||||
'help_text': _('The CA certificate used to verify the SSL certificate of the Vault server')
|
||||
}, {
|
||||
'id': 'role_id',
|
||||
'label': _('AppRole role_id'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'help_text': _('The Role ID for AppRole Authentication')
|
||||
}, {
|
||||
'id': 'secret_id',
|
||||
'label': _('AppRole secret_id'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'secret': True,
|
||||
'help_text': _('The Secret ID for AppRole Authentication')
|
||||
}, {
|
||||
'id': 'namespace',
|
||||
'label': _('Namespace name (Vault Enterprise only)'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'help_text': _('Name of the namespace to use when authenticate and retrieve secrets')
|
||||
}, {
|
||||
'id': 'default_auth_path',
|
||||
'label': _('Path to Approle Auth'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'default': 'approle',
|
||||
'help_text': _('The AppRole Authentication path to use if one isn\'t provided in the metadata when linking to an input field. Defaults to \'approle\'')
|
||||
}
|
||||
'fields': [
|
||||
{
|
||||
'id': 'url',
|
||||
'label': _('Server URL'),
|
||||
'type': 'string',
|
||||
'format': 'url',
|
||||
'help_text': _('The URL to the HashiCorp Vault'),
|
||||
},
|
||||
{
|
||||
'id': 'token',
|
||||
'label': _('Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': _('The access token used to authenticate to the Vault server'),
|
||||
},
|
||||
{
|
||||
'id': 'cacert',
|
||||
'label': _('CA Certificate'),
|
||||
'type': 'string',
|
||||
'multiline': True,
|
||||
'help_text': _('The CA certificate used to verify the SSL certificate of the Vault server'),
|
||||
},
|
||||
{'id': 'role_id', 'label': _('AppRole role_id'), 'type': 'string', 'multiline': False, 'help_text': _('The Role ID for AppRole Authentication')},
|
||||
{
|
||||
'id': 'secret_id',
|
||||
'label': _('AppRole secret_id'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'secret': True,
|
||||
'help_text': _('The Secret ID for AppRole Authentication'),
|
||||
},
|
||||
{
|
||||
'id': 'namespace',
|
||||
'label': _('Namespace name (Vault Enterprise only)'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'help_text': _('Name of the namespace to use when authenticate and retrieve secrets'),
|
||||
},
|
||||
{
|
||||
'id': 'default_auth_path',
|
||||
'label': _('Path to Approle Auth'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'default': 'approle',
|
||||
'help_text': _(
|
||||
'The AppRole Authentication path to use if one isn\'t provided in the metadata when linking to an input field. Defaults to \'approle\''
|
||||
),
|
||||
},
|
||||
],
|
||||
'metadata': [
|
||||
{
|
||||
'id': 'secret_path',
|
||||
'label': _('Path to Secret'),
|
||||
'type': 'string',
|
||||
'help_text': _('The path to the secret stored in the secret backend e.g, /some/secret/'),
|
||||
},
|
||||
{
|
||||
'id': 'auth_path',
|
||||
'label': _('Path to Auth'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'help_text': _('The path where the Authentication method is mounted e.g, approle'),
|
||||
},
|
||||
],
|
||||
'metadata': [{
|
||||
'id': 'secret_path',
|
||||
'label': _('Path to Secret'),
|
||||
'type': 'string',
|
||||
'help_text': _('The path to the secret stored in the secret backend e.g, /some/secret/')
|
||||
}, {
|
||||
'id': 'auth_path',
|
||||
'label': _('Path to Auth'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'help_text': _('The path where the Authentication method is mounted e.g, approle')
|
||||
}],
|
||||
'required': ['url', 'secret_path'],
|
||||
}
|
||||
|
||||
hashi_kv_inputs = copy.deepcopy(base_inputs)
|
||||
hashi_kv_inputs['fields'].append({
|
||||
'id': 'api_version',
|
||||
'label': _('API Version'),
|
||||
'choices': ['v1', 'v2'],
|
||||
'help_text': _('API v1 is for static key/value lookups. API v2 is for versioned key/value lookups.'),
|
||||
'default': 'v1',
|
||||
})
|
||||
hashi_kv_inputs['metadata'] = [{
|
||||
'id': 'secret_backend',
|
||||
'label': _('Name of Secret Backend'),
|
||||
'type': 'string',
|
||||
'help_text': _('The name of the kv secret backend (if left empty, the first segment of the secret path will be used).')
|
||||
}] + hashi_kv_inputs['metadata'] + [{
|
||||
'id': 'secret_key',
|
||||
'label': _('Key Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('The name of the key to look up in the secret.'),
|
||||
}, {
|
||||
'id': 'secret_version',
|
||||
'label': _('Secret Version (v2 only)'),
|
||||
'type': 'string',
|
||||
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
|
||||
}]
|
||||
hashi_kv_inputs['fields'].append(
|
||||
{
|
||||
'id': 'api_version',
|
||||
'label': _('API Version'),
|
||||
'choices': ['v1', 'v2'],
|
||||
'help_text': _('API v1 is for static key/value lookups. API v2 is for versioned key/value lookups.'),
|
||||
'default': 'v1',
|
||||
}
|
||||
)
|
||||
hashi_kv_inputs['metadata'] = (
|
||||
[
|
||||
{
|
||||
'id': 'secret_backend',
|
||||
'label': _('Name of Secret Backend'),
|
||||
'type': 'string',
|
||||
'help_text': _('The name of the kv secret backend (if left empty, the first segment of the secret path will be used).'),
|
||||
}
|
||||
]
|
||||
+ hashi_kv_inputs['metadata']
|
||||
+ [
|
||||
{
|
||||
'id': 'secret_key',
|
||||
'label': _('Key Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('The name of the key to look up in the secret.'),
|
||||
},
|
||||
{
|
||||
'id': 'secret_version',
|
||||
'label': _('Secret Version (v2 only)'),
|
||||
'type': 'string',
|
||||
'help_text': _('Used to specify a specific secret version (if left empty, the latest version will be used).'),
|
||||
},
|
||||
]
|
||||
)
|
||||
hashi_kv_inputs['required'].extend(['api_version', 'secret_key'])
|
||||
|
||||
hashi_ssh_inputs = copy.deepcopy(base_inputs)
|
||||
hashi_ssh_inputs['metadata'] = [{
|
||||
'id': 'public_key',
|
||||
'label': _('Unsigned Public Key'),
|
||||
'type': 'string',
|
||||
'multiline': True,
|
||||
}] + hashi_ssh_inputs['metadata'] + [{
|
||||
'id': 'role',
|
||||
'label': _('Role Name'),
|
||||
'type': 'string',
|
||||
'help_text': _('The name of the role used to sign.')
|
||||
}, {
|
||||
'id': 'valid_principals',
|
||||
'label': _('Valid Principals'),
|
||||
'type': 'string',
|
||||
'help_text': _('Valid principals (either usernames or hostnames) that the certificate should be signed for.'),
|
||||
}]
|
||||
hashi_ssh_inputs['metadata'] = (
|
||||
[
|
||||
{
|
||||
'id': 'public_key',
|
||||
'label': _('Unsigned Public Key'),
|
||||
'type': 'string',
|
||||
'multiline': True,
|
||||
}
|
||||
]
|
||||
+ hashi_ssh_inputs['metadata']
|
||||
+ [
|
||||
{'id': 'role', 'label': _('Role Name'), 'type': 'string', 'help_text': _('The name of the role used to sign.')},
|
||||
{
|
||||
'id': 'valid_principals',
|
||||
'label': _('Valid Principals'),
|
||||
'type': 'string',
|
||||
'help_text': _('Valid principals (either usernames or hostnames) that the certificate should be signed for.'),
|
||||
},
|
||||
]
|
||||
)
|
||||
hashi_ssh_inputs['required'].extend(['public_key', 'role'])
|
||||
|
||||
|
||||
@ -209,9 +230,7 @@ def kv_backend(**kwargs):
|
||||
try:
|
||||
return json['data'][secret_key]
|
||||
except KeyError:
|
||||
raise RuntimeError(
|
||||
'{} is not present at {}'.format(secret_key, secret_path)
|
||||
)
|
||||
raise RuntimeError('{} is not present at {}'.format(secret_key, secret_path))
|
||||
return json['data']
|
||||
|
||||
|
||||
@ -248,14 +267,6 @@ def ssh_backend(**kwargs):
|
||||
return resp.json()['data']['signed_key']
|
||||
|
||||
|
||||
hashivault_kv_plugin = CredentialPlugin(
|
||||
'HashiCorp Vault Secret Lookup',
|
||||
inputs=hashi_kv_inputs,
|
||||
backend=kv_backend
|
||||
)
|
||||
hashivault_kv_plugin = CredentialPlugin('HashiCorp Vault Secret Lookup', inputs=hashi_kv_inputs, backend=kv_backend)
|
||||
|
||||
hashivault_ssh_plugin = CredentialPlugin(
|
||||
'HashiCorp Vault Signed SSH',
|
||||
inputs=hashi_ssh_inputs,
|
||||
backend=ssh_backend
|
||||
)
|
||||
hashivault_ssh_plugin = CredentialPlugin('HashiCorp Vault Signed SSH', inputs=hashi_ssh_inputs, backend=ssh_backend)
|
||||
|
||||
@ -16,7 +16,7 @@ def raise_for_status(resp):
|
||||
raise exc
|
||||
|
||||
|
||||
class CertFiles():
|
||||
class CertFiles:
|
||||
"""
|
||||
A context manager used for writing a certificate and (optional) key
|
||||
to $TMPDIR, and cleaning up afterwards.
|
||||
|
||||
@ -16,7 +16,6 @@ __all__ = ['DatabaseWrapper']
|
||||
|
||||
|
||||
class RecordedQueryLog(object):
|
||||
|
||||
def __init__(self, log, db, dest='/var/log/tower/profile'):
|
||||
self.log = log
|
||||
self.db = db
|
||||
@ -70,10 +69,7 @@ class RecordedQueryLog(object):
|
||||
break
|
||||
else:
|
||||
progname = os.path.basename(sys.argv[0])
|
||||
filepath = os.path.join(
|
||||
self.dest,
|
||||
'{}.sqlite'.format(progname)
|
||||
)
|
||||
filepath = os.path.join(self.dest, '{}.sqlite'.format(progname))
|
||||
version = pkg_resources.get_distribution('awx').version
|
||||
log = sqlite3.connect(filepath, timeout=3)
|
||||
log.execute(
|
||||
@ -91,9 +87,8 @@ class RecordedQueryLog(object):
|
||||
)
|
||||
log.commit()
|
||||
log.execute(
|
||||
'INSERT INTO queries (pid, version, argv, time, sql, explain, bt) '
|
||||
'VALUES (?, ?, ?, ?, ?, ?, ?);',
|
||||
(os.getpid(), version, ' ' .join(sys.argv), seconds, sql, explain, bt)
|
||||
'INSERT INTO queries (pid, version, argv, time, sql, explain, bt) ' 'VALUES (?, ?, ?, ?, ?, ?, ?);',
|
||||
(os.getpid(), version, ' '.join(sys.argv), seconds, sql, explain, bt),
|
||||
)
|
||||
log.commit()
|
||||
|
||||
|
||||
@ -47,16 +47,9 @@ class PubSub(object):
|
||||
@contextmanager
|
||||
def pg_bus_conn():
|
||||
conf = settings.DATABASES['default']
|
||||
conn = psycopg2.connect(dbname=conf['NAME'],
|
||||
host=conf['HOST'],
|
||||
user=conf['USER'],
|
||||
password=conf['PASSWORD'],
|
||||
port=conf['PORT'],
|
||||
**conf.get("OPTIONS", {}))
|
||||
conn = psycopg2.connect(dbname=conf['NAME'], host=conf['HOST'], user=conf['USER'], password=conf['PASSWORD'], port=conf['PORT'], **conf.get("OPTIONS", {}))
|
||||
# Django connection.cursor().connection doesn't have autocommit=True on
|
||||
conn.set_session(autocommit=True)
|
||||
pubsub = PubSub(conn)
|
||||
yield pubsub
|
||||
conn.close()
|
||||
|
||||
|
||||
|
||||
@ -48,8 +48,7 @@ class Control(object):
|
||||
|
||||
with pg_bus_conn() as conn:
|
||||
conn.listen(reply_queue)
|
||||
conn.notify(self.queuename,
|
||||
json.dumps({'control': command, 'reply_to': reply_queue}))
|
||||
conn.notify(self.queuename, json.dumps({'control': command, 'reply_to': reply_queue}))
|
||||
|
||||
for reply in conn.events(select_timeout=timeout, yield_timeouts=True):
|
||||
if reply is None:
|
||||
|
||||
@ -14,12 +14,8 @@ logger = logging.getLogger('awx.main.dispatch.periodic')
|
||||
|
||||
|
||||
class Scheduler(Scheduler):
|
||||
|
||||
def run_continuously(self):
|
||||
idle_seconds = max(
|
||||
1,
|
||||
min(self.jobs).period.total_seconds() / 2
|
||||
)
|
||||
idle_seconds = max(1, min(self.jobs).period.total_seconds() / 2)
|
||||
|
||||
def run():
|
||||
ppid = os.getppid()
|
||||
@ -39,9 +35,7 @@ class Scheduler(Scheduler):
|
||||
GuidMiddleware.set_guid(GuidMiddleware._generate_guid())
|
||||
self.run_pending()
|
||||
except Exception:
|
||||
logger.exception(
|
||||
'encountered an error while scheduling periodic tasks'
|
||||
)
|
||||
logger.exception('encountered an error while scheduling periodic tasks')
|
||||
time.sleep(idle_seconds)
|
||||
|
||||
process = Process(target=run)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user