Merge branch 'stable' into devel

* stable: (275 commits)
  Install correct rpm-sign package in RPM builder
  Updating changelog for 3.1 release
  Switch job_type to check from sync when detecting delete_on_update
  use Unicode apostrophes - not single quotes - for French i18n strings
  pin appdirs==1.4.2
  only cancel deps if we can cancel the inv update
  fixing module_name check and adding support for the debug module
  cancel jobs dependent on inv update
  update tests
  CSS tweaks to workflow results panels
  like inventory updates, check if project update deps already processed
  Revert "Merge pull request #5553 from chrismeyersfsu/fix-waiting_blocked"
  Add awx/ui/client/languages to .gitignore
  Delete awx/ui/client/languages/*.json
  refactor based on review
  Add missing permission check.
  Make current_groups a set to easily avoid duplicates, update asgi-amqp requirement
  avoid duplicated related search fields
  Fix workflow audit items
  fixing module name, json blob, and stdout-for-yum-module on host event
  ...
This commit is contained in:
Matthew Jones 2017-02-28 11:37:05 -05:00
commit 1fefa4cfdd
205 changed files with 4246 additions and 2404 deletions

2
.gitignore vendored
View File

@ -22,6 +22,7 @@ tower/tower_warnings.log
celerybeat-schedule
awx/ui/static
awx/ui/build_test
awx/ui/client/languages
# Tower setup playbook testing
setup/test/roles/postgresql
@ -112,3 +113,4 @@ local/
awx/lib/.deps_built
awx/lib/site-packages
venv/*
use_dev_supervisor.txt

View File

@ -378,6 +378,12 @@ server: server_noattach
servercc: server_noattach
tmux -2 -CC attach-session -t tower
supervisor:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/tower/bin/activate; \
fi; \
supervisord --configuration /supervisor.conf --pidfile=/tmp/supervisor_pid
# Alternate approach to tmux to run all development tasks specified in
# Procfile. https://youtu.be/OPMgaibszjk
honcho:

View File

@ -9,9 +9,11 @@ from django.core.exceptions import FieldError, ValidationError
from django.db import models
from django.db.models import Q
from django.db.models.fields import FieldDoesNotExist
from django.db.models.fields.related import ForeignObjectRel
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField, ForeignKey
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
# Django REST Framework
from rest_framework.exceptions import ParseError, PermissionDenied
@ -88,8 +90,8 @@ class FieldLookupBackend(BaseFilterBackend):
# those lookups combined with request.user.get_queryset(Model) to make
# sure user cannot query using objects he could not view.
new_parts = []
for n, name in enumerate(parts[:-1]):
for name in parts[:-1]:
# HACK: Make project and inventory source filtering by old field names work for backwards compatibility.
if model._meta.object_name in ('Project', 'InventorySource'):
name = {
@ -99,15 +101,28 @@ class FieldLookupBackend(BaseFilterBackend):
'last_updated': 'last_job_run',
}.get(name, name)
new_parts.append(name)
if name == 'type' and 'polymorphic_ctype' in model._meta.get_all_field_names():
name = 'polymorphic_ctype'
new_parts.append('polymorphic_ctype__model')
else:
new_parts.append(name)
if name in getattr(model, 'PASSWORD_FIELDS', ()):
raise PermissionDenied('Filtering on password fields is not allowed.')
raise PermissionDenied(_('Filtering on password fields is not allowed.'))
elif name == 'pk':
field = model._meta.pk
else:
field = model._meta.get_field_by_name(name)[0]
name_alt = name.replace("_", "")
if name_alt in model._meta.fields_map.keys():
field = model._meta.fields_map[name_alt]
new_parts.pop()
new_parts.append(name_alt)
else:
field = model._meta.get_field_by_name(name)[0]
if isinstance(field, ForeignObjectRel) and getattr(field.field, '__prevent_search__', False):
raise PermissionDenied(_('Filtering on %s is not allowed.' % name))
elif getattr(field, '__prevent_search__', False):
raise PermissionDenied(_('Filtering on %s is not allowed.' % name))
model = getattr(field, 'related_model', None) or field.model
if parts:
@ -127,14 +142,20 @@ class FieldLookupBackend(BaseFilterBackend):
return to_python_boolean(value, allow_none=True)
elif isinstance(field, models.BooleanField):
return to_python_boolean(value)
elif isinstance(field, ForeignObjectRel):
elif isinstance(field, (ForeignObjectRel, ManyToManyField, GenericForeignKey, ForeignKey)):
return self.to_python_related(value)
else:
return field.to_python(value)
def value_to_python(self, model, lookup, value):
field, new_lookup = self.get_field_from_lookup(model, lookup)
if new_lookup.endswith('__isnull'):
# Type names are stored without underscores internally, but are presented and
# and serialized over the API containing underscores so we remove `_`
# for polymorphic_ctype__model lookups.
if new_lookup.startswith('polymorphic_ctype__model'):
value = value.replace('_','')
elif new_lookup.endswith('__isnull'):
value = to_python_boolean(value)
elif new_lookup.endswith('__in'):
items = []

View File

@ -9,6 +9,7 @@ import time
# Django
from django.conf import settings
from django.db import connection
from django.db.models.fields import FieldDoesNotExist
from django.http import QueryDict
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
@ -26,6 +27,7 @@ from rest_framework import status
from rest_framework import views
# AWX
from awx.api.filters import FieldLookupBackend
from awx.main.models import * # noqa
from awx.main.utils import * # noqa
from awx.api.serializers import ResourceAccessListElementSerializer
@ -41,6 +43,7 @@ __all__ = ['APIView', 'GenericAPIView', 'ListAPIView', 'SimpleListAPIView',
'DeleteLastUnattachLabelMixin',]
logger = logging.getLogger('awx.api.generics')
analytics_logger = logging.getLogger('awx.analytics.performance')
def get_view_name(cls, suffix=None):
@ -117,6 +120,8 @@ class APIView(views.APIView):
q_times = [float(q['time']) for q in connection.queries[queries_before:]]
response['X-API-Query-Count'] = len(q_times)
response['X-API-Query-Time'] = '%0.3fs' % sum(q_times)
analytics_logger.info("api response", extra=dict(python_objects=dict(request=request, response=response)))
return response
def get_authenticate_header(self, request):
@ -274,22 +279,48 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
@property
def related_search_fields(self):
fields = []
def skip_related_name(name):
return (
name is None or name.endswith('_role') or name.startswith('_') or
name.startswith('deprecated_') or name.endswith('_set') or
name == 'polymorphic_ctype')
fields = set([])
for field in self.model._meta.fields:
if field.name.endswith('_role'):
if skip_related_name(field.name):
continue
if getattr(field, 'related_model', None):
fields.append('{}__search'.format(field.name))
fields.add('{}__search'.format(field.name))
for rel in self.model._meta.related_objects:
name = rel.get_accessor_name()
if name.endswith('_set'):
name = rel.related_model._meta.verbose_name.replace(" ", "_")
if skip_related_name(name):
continue
fields.add('{}__search'.format(name))
m2m_rel = []
m2m_rel += self.model._meta.local_many_to_many
if issubclass(self.model, UnifiedJobTemplate) and self.model != UnifiedJobTemplate:
m2m_rel += UnifiedJobTemplate._meta.local_many_to_many
if issubclass(self.model, UnifiedJob) and self.model != UnifiedJob:
m2m_rel += UnifiedJob._meta.local_many_to_many
for relationship in m2m_rel:
if skip_related_name(relationship.name):
continue
fields.append('{}__search'.format(name))
for relationship in self.model._meta.local_many_to_many:
if relationship.related_model._meta.app_label != 'main':
continue
fields.append('{}__search'.format(relationship.name))
return fields
fields.add('{}__search'.format(relationship.name))
fields = list(fields)
allowed_fields = []
for field in fields:
try:
FieldLookupBackend().get_field_from_lookup(self.model, field)
except PermissionDenied:
pass
except FieldDoesNotExist:
allowed_fields.append(field)
else:
allowed_fields.append(field)
return allowed_fields
class ListCreateAPIView(ListAPIView, generics.ListCreateAPIView):

View File

@ -67,7 +67,10 @@ class Metadata(metadata.SimpleMetadata):
# Indicate if a field has a default value.
# FIXME: Still isn't showing all default values?
try:
field_info['default'] = field.get_default()
default = field.get_default()
if field.field_name == 'TOWER_URL_BASE' and default == 'https://towerhost':
default = '{}://{}'.format(self.request.scheme, self.request.get_host())
field_info['default'] = default
except serializers.SkipField:
pass
@ -120,19 +123,20 @@ class Metadata(metadata.SimpleMetadata):
actions = {}
for method in {'GET', 'PUT', 'POST'} & set(view.allowed_methods):
view.request = clone_request(request, method)
obj = None
try:
# Test global permissions
if hasattr(view, 'check_permissions'):
view.check_permissions(view.request)
# Test object permissions
if method == 'PUT' and hasattr(view, 'get_object'):
view.get_object()
obj = view.get_object()
except (exceptions.APIException, PermissionDenied, Http404):
continue
else:
# If user has appropriate permissions for the view, include
# appropriate metadata about the fields that should be supplied.
serializer = view.get_serializer()
serializer = view.get_serializer(instance=obj)
actions[method] = self.get_serializer_info(serializer)
finally:
view.request = request
@ -167,6 +171,10 @@ class Metadata(metadata.SimpleMetadata):
return actions
def determine_metadata(self, request, view):
# store request on self so we can use it to generate field defaults
# (such as TOWER_URL_BASE)
self.request = request
metadata = super(Metadata, self).determine_metadata(request, view)
# Add version number in which view was added to Tower.

View File

@ -42,7 +42,9 @@ from awx.main.constants import SCHEDULEABLE_PROVIDERS
from awx.main.models import * # noqa
from awx.main.access import get_user_capabilities
from awx.main.fields import ImplicitRoleField
from awx.main.utils import get_type_for_model, get_model_for_type, build_url, timestamp_apiformat, camelcase_to_underscore, getattrd
from awx.main.utils import (
get_type_for_model, get_model_for_type, build_url, timestamp_apiformat,
camelcase_to_underscore, getattrd, parse_yaml_or_json)
from awx.main.validators import vars_validate_or_raise
from awx.conf.license import feature_enabled
@ -1307,10 +1309,7 @@ class BaseVariableDataSerializer(BaseSerializer):
if obj is None:
return {}
ret = super(BaseVariableDataSerializer, self).to_representation(obj)
try:
return json.loads(ret.get('variables', '') or '{}')
except ValueError:
return yaml.safe_load(ret.get('variables', ''))
return parse_yaml_or_json(ret.get('variables', '') or '{}')
def to_internal_value(self, data):
data = {'variables': json.dumps(data)}
@ -1622,8 +1621,11 @@ class ResourceAccessListElementSerializer(UserSerializer):
role_dict['user_capabilities'] = {'unattach': False}
return { 'role': role_dict, 'descendant_roles': get_roles_on_resource(obj, role)}
def format_team_role_perm(team_role, permissive_role_ids):
def format_team_role_perm(naive_team_role, permissive_role_ids):
ret = []
team_role = naive_team_role
if naive_team_role.role_field == 'admin_role':
team_role = naive_team_role.content_object.member_role
for role in team_role.children.filter(id__in=permissive_role_ids).all():
role_dict = {
'id': role.id,
@ -1682,11 +1684,11 @@ class ResourceAccessListElementSerializer(UserSerializer):
ret['summary_fields']['direct_access'] \
= [format_role_perm(r) for r in direct_access_roles.distinct()] \
+ [y for x in (format_team_role_perm(r, direct_permissive_role_ids) for r in direct_team_roles.distinct()) for y in x]
+ [y for x in (format_team_role_perm(r, direct_permissive_role_ids) for r in direct_team_roles.distinct()) for y in x] \
+ [y for x in (format_team_role_perm(r, all_permissive_role_ids) for r in indirect_team_roles.distinct()) for y in x]
ret['summary_fields']['indirect_access'] \
= [format_role_perm(r) for r in indirect_access_roles.distinct()] \
+ [y for x in (format_team_role_perm(r, all_permissive_role_ids) for r in indirect_team_roles.distinct()) for y in x]
= [format_role_perm(r) for r in indirect_access_roles.distinct()]
return ret

View File

@ -22,7 +22,7 @@ from django.contrib.auth.models import User, AnonymousUser
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.core.exceptions import FieldError
from django.db.models import Q, Count
from django.db.models import Q, Count, F
from django.db import IntegrityError, transaction, connection
from django.shortcuts import get_object_or_404
from django.utils.encoding import smart_text, force_text
@ -518,7 +518,7 @@ class AuthView(APIView):
def get(self, request):
data = OrderedDict()
err_backend, err_message = request.session.get('social_auth_error', (None, None))
auth_backends = load_backends(settings.AUTHENTICATION_BACKENDS).items()
auth_backends = load_backends(settings.AUTHENTICATION_BACKENDS, force_load=True).items()
# Return auth backends in consistent order: Google, GitHub, SAML.
auth_backends.sort(key=lambda x: 'g' if x[0] == 'google-oauth2' else x[0])
for name, backend in auth_backends:
@ -646,15 +646,16 @@ class OrganizationCountsMixin(object):
self.request.user, 'read_role').values('organization').annotate(
Count('organization')).order_by('organization')
JT_reference = 'project__organization'
db_results['job_templates'] = JobTemplate.accessible_objects(
self.request.user, 'read_role').exclude(job_type='scan').values(JT_reference).annotate(
Count(JT_reference)).order_by(JT_reference)
JT_project_reference = 'project__organization'
JT_inventory_reference = 'inventory__organization'
db_results['job_templates_project'] = JobTemplate.accessible_objects(
self.request.user, 'read_role').exclude(
project__organization=F(JT_inventory_reference)).values(JT_project_reference).annotate(
Count(JT_project_reference)).order_by(JT_project_reference)
JT_scan_reference = 'inventory__organization'
db_results['job_templates_scan'] = JobTemplate.accessible_objects(
self.request.user, 'read_role').filter(job_type='scan').values(JT_scan_reference).annotate(
Count(JT_scan_reference)).order_by(JT_scan_reference)
db_results['job_templates_inventory'] = JobTemplate.accessible_objects(
self.request.user, 'read_role').values(JT_inventory_reference).annotate(
Count(JT_inventory_reference)).order_by(JT_inventory_reference)
db_results['projects'] = project_qs\
.values('organization').annotate(Count('organization')).order_by('organization')
@ -672,16 +673,16 @@ class OrganizationCountsMixin(object):
'inventories': 0, 'teams': 0, 'users': 0, 'job_templates': 0,
'admins': 0, 'projects': 0}
for res in db_results:
if res == 'job_templates':
org_reference = JT_reference
elif res == 'job_templates_scan':
org_reference = JT_scan_reference
for res, count_qs in db_results.items():
if res == 'job_templates_project':
org_reference = JT_project_reference
elif res == 'job_templates_inventory':
org_reference = JT_inventory_reference
elif res == 'users':
org_reference = 'id'
else:
org_reference = 'organization'
for entry in db_results[res]:
for entry in count_qs:
org_id = entry[org_reference]
if org_id in count_context:
if res == 'users':
@ -690,11 +691,13 @@ class OrganizationCountsMixin(object):
continue
count_context[org_id][res] = entry['%s__count' % org_reference]
# Combine the counts for job templates with scan job templates
# Combine the counts for job templates by project and inventory
for org in org_id_list:
org_id = org['id']
if 'job_templates_scan' in count_context[org_id]:
count_context[org_id]['job_templates'] += count_context[org_id].pop('job_templates_scan')
count_context[org_id]['job_templates'] = 0
for related_path in ['job_templates_project', 'job_templates_inventory']:
if related_path in count_context[org_id]:
count_context[org_id]['job_templates'] += count_context[org_id].pop(related_path)
full_context['related_field_counts'] = count_context
@ -1865,6 +1868,16 @@ class GroupChildrenList(EnforceParentRelationshipMixin, SubListCreateAttachDetac
relationship = 'children'
enforce_parent_relationship = 'inventory'
def unattach(self, request, *args, **kwargs):
sub_id = request.data.get('id', None)
if sub_id is not None:
return super(GroupChildrenList, self).unattach(request, *args, **kwargs)
parent = self.get_parent_object()
if not request.user.can_access(self.model, 'delete', parent):
raise PermissionDenied()
parent.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class GroupPotentialChildrenList(SubListAPIView):
@ -2484,7 +2497,7 @@ class JobTemplateSurveySpec(GenericAPIView):
return Response(dict(error=_("'required' missing from survey question %s.") % str(idx)), status=status.HTTP_400_BAD_REQUEST)
if survey_item["type"] == "password":
if "default" in survey_item and survey_item["default"].startswith('$encrypted$'):
if survey_item.get("default") and survey_item["default"].startswith('$encrypted$'):
old_spec = obj.survey_spec
for old_item in old_spec['spec']:
if old_item['variable'] == survey_item['variable']:
@ -3039,6 +3052,9 @@ class WorkflowJobTemplateWorkflowNodesList(WorkflowsEnforcementMixin, SubListCre
data[fd] = None
return super(WorkflowJobTemplateWorkflowNodesList, self).update_raw_data(data)
def get_queryset(self):
return super(WorkflowJobTemplateWorkflowNodesList, self).get_queryset().order_by('id')
class WorkflowJobTemplateJobsList(WorkflowsEnforcementMixin, SubListAPIView):
@ -3149,6 +3165,9 @@ class WorkflowJobWorkflowNodesList(WorkflowsEnforcementMixin, SubListAPIView):
parent_key = 'workflow_job'
new_in_310 = True
def get_queryset(self):
return super(WorkflowJobWorkflowNodesList, self).get_queryset().order_by('id')
class WorkflowJobCancel(WorkflowsEnforcementMixin, RetrieveAPIView):

View File

@ -2,7 +2,7 @@
from django.apps import AppConfig
# from django.core import checks
from django.utils.translation import ugettext_lazy as _
from django.utils.log import configure_logging
from awx.main.utils.handlers import configure_external_logger
from django.conf import settings
@ -15,10 +15,4 @@ class ConfConfig(AppConfig):
self.module.autodiscover()
from .settings import SettingsWrapper
SettingsWrapper.initialize()
if settings.LOG_AGGREGATOR_ENABLED:
LOGGING_DICT = settings.LOGGING
LOGGING_DICT['handlers']['http_receiver']['class'] = 'awx.main.utils.handlers.HTTPSHandler'
if 'awx' in settings.LOG_AGGREGATOR_LOGGERS:
if 'http_receiver' not in LOGGING_DICT['loggers']['awx']['handlers']:
LOGGING_DICT['loggers']['awx']['handlers'] += ['http_receiver']
configure_logging(settings.LOGGING_CONFIG, LOGGING_DICT)
configure_external_logger(settings)

View File

@ -19,6 +19,18 @@ logger = logging.getLogger('awx.conf.fields')
# appropriate Python type to be used in settings.
class CharField(CharField):
def to_representation(self, value):
# django_rest_frameworks' default CharField implementation casts `None`
# to a string `"None"`:
#
# https://github.com/tomchristie/django-rest-framework/blob/cbad236f6d817d992873cd4df6527d46ab243ed1/rest_framework/fields.py#L761
if value is None:
return None
return super(CharField, self).to_representation(value)
class StringListField(ListField):
child = CharField()

View File

@ -8,7 +8,7 @@ import json
from django.db import models
# Tower
from awx.main.models.base import CreatedModifiedModel
from awx.main.models.base import CreatedModifiedModel, prevent_search
from awx.main.fields import JSONField
from awx.main.utils import encrypt_field
from awx.conf import settings_registry
@ -24,14 +24,14 @@ class Setting(CreatedModifiedModel):
value = JSONField(
null=True,
)
user = models.ForeignKey(
user = prevent_search(models.ForeignKey(
'auth.User',
related_name='settings',
default=None,
null=True,
editable=False,
on_delete=models.CASCADE,
)
))
def __unicode__(self):
try:

View File

@ -6,6 +6,8 @@ import sys
import threading
import time
import six
# Django
from django.conf import settings, UserSettingsHolder
from django.core.cache import cache as django_cache
@ -17,6 +19,7 @@ from rest_framework.fields import empty, SkipField
# Tower
from awx.main.utils import encrypt_field, decrypt_field
from awx.main.utils.db import get_tower_migration_version
from awx.conf import settings_registry
from awx.conf.models import Setting
@ -57,7 +60,10 @@ def _log_database_error():
try:
yield
except (ProgrammingError, OperationalError) as e:
logger.warning('Database settings are not available, using defaults (%s)', e, exc_info=True)
if get_tower_migration_version() < '310':
logger.info('Using default settings until version 3.1 migration.')
else:
logger.warning('Database settings are not available, using defaults (%s)', e, exc_info=True)
finally:
pass
@ -88,7 +94,17 @@ class EncryptedCacheProxy(object):
def get(self, key, **kwargs):
value = self.cache.get(key, **kwargs)
return self._handle_encryption(self.decrypter, key, value)
value = self._handle_encryption(self.decrypter, key, value)
# python-memcached auto-encodes unicode on cache set in python2
# https://github.com/linsomniac/python-memcached/issues/79
# https://github.com/linsomniac/python-memcached/blob/288c159720eebcdf667727a859ef341f1e908308/memcache.py#L961
if six.PY2 and isinstance(value, six.binary_type):
try:
six.text_type(value)
except UnicodeDecodeError:
value = value.decode('utf-8')
return value
def set(self, key, value, **kwargs):
self.cache.set(

View File

@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
@ -9,10 +11,10 @@ from django.conf import LazySettings
from django.core.cache.backends.locmem import LocMemCache
from django.core.exceptions import ImproperlyConfigured
from django.utils.translation import ugettext_lazy as _
from rest_framework import fields
import pytest
import six
from awx.conf import models
from awx.conf import models, fields
from awx.conf.settings import SettingsWrapper, EncryptedCacheProxy, SETTING_CACHE_NOTSET
from awx.conf.registry import SettingsRegistry
@ -61,6 +63,15 @@ def test_unregistered_setting(settings):
assert settings.cache.get('DEBUG') is None
def test_cached_settings_unicode_is_auto_decoded(settings):
# https://github.com/linsomniac/python-memcached/issues/79
# https://github.com/linsomniac/python-memcached/blob/288c159720eebcdf667727a859ef341f1e908308/memcache.py#L961
value = six.u('Iñtërnâtiônàlizætiøn').encode('utf-8') # this simulates what python-memcached does on cache.set()
settings.cache.set('DEBUG', value)
assert settings.cache.get('DEBUG') == six.u('Iñtërnâtiônàlizætiøn')
def test_read_only_setting(settings):
settings.registry.register(
'AWX_READ_ONLY',
@ -240,6 +251,31 @@ def test_setting_from_db(settings, mocker):
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
@pytest.mark.parametrize('encrypted', (True, False))
def test_setting_from_db_with_unicode(settings, mocker, encrypted):
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
default='DEFAULT',
encrypted=encrypted
)
# this simulates a bug in python-memcached; see https://github.com/linsomniac/python-memcached/issues/79
value = six.u('Iñtërnâtiônàlizætiøn').encode('utf-8')
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value=value)
mocks = mocker.Mock(**{
'order_by.return_value': mocker.Mock(**{
'__iter__': lambda self: iter([setting_from_db]),
'first.return_value': setting_from_db
}),
})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
assert settings.AWX_SOME_SETTING == six.u('Iñtërnâtiônàlizætiøn')
assert settings.cache.get('AWX_SOME_SETTING') == six.u('Iñtërnâtiônàlizætiøn')
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_read_only_setting_assignment(settings):
"read-only settings cannot be overwritten"
@ -330,6 +366,31 @@ def test_read_only_setting_deletion(settings):
assert settings.AWX_SOME_SETTING == 'DEFAULT'
def test_charfield_properly_sets_none(settings, mocker):
"see: https://github.com/ansible/ansible-tower/issues/5322"
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
allow_null=True
)
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': None})
with apply_patches([
mocker.patch('awx.conf.models.Setting.objects.filter',
return_value=setting_list),
mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock())
]):
settings.AWX_SOME_SETTING = None
models.Setting.objects.create.assert_called_with(
key='AWX_SOME_SETTING',
user=None,
value=None
)
def test_settings_use_an_encrypted_cache(settings):
settings.registry.register(
'AWX_ENCRYPTED',

File diff suppressed because it is too large Load Diff

View File

@ -343,6 +343,9 @@ class BaseAccess(object):
if validation_errors:
user_capabilities[display_method] = False
continue
elif display_method == 'copy' and isinstance(obj, WorkflowJobTemplate) and obj.organization_id is None:
user_capabilities[display_method] = self.user.is_superuser
continue
elif display_method in ['start', 'schedule'] and isinstance(obj, Group):
if obj.inventory_source and not obj.inventory_source._can_update():
user_capabilities[display_method] = False
@ -355,6 +358,9 @@ class BaseAccess(object):
# Grab the answer from the cache, if available
if hasattr(obj, 'capabilities_cache') and display_method in obj.capabilities_cache:
user_capabilities[display_method] = obj.capabilities_cache[display_method]
if self.user.is_superuser and not user_capabilities[display_method]:
# Cache override for models with bad orphaned state
user_capabilities[display_method] = True
continue
# Aliases for going form UI language to API language
@ -1223,6 +1229,13 @@ class JobTemplateAccess(BaseAccess):
"active_jobs": active_jobs})
return True
@check_superuser
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
if isinstance(sub_obj, NotificationTemplate):
return self.check_related('organization', Organization, {}, obj=sub_obj, mandatory=True)
return super(JobTemplateAccess, self).can_attach(
obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
class JobAccess(BaseAccess):
'''
@ -1952,13 +1965,12 @@ class ScheduleAccess(BaseAccess):
qs = qs.prefetch_related('unified_job_template')
if self.user.is_superuser or self.user.is_system_auditor:
return qs.all()
job_template_qs = self.user.get_queryset(JobTemplate)
inventory_source_qs = self.user.get_queryset(InventorySource)
project_qs = self.user.get_queryset(Project)
unified_qs = UnifiedJobTemplate.objects.filter(jobtemplate__in=job_template_qs) | \
UnifiedJobTemplate.objects.filter(Q(project__in=project_qs)) | \
UnifiedJobTemplate.objects.filter(Q(inventorysource__in=inventory_source_qs))
return qs.filter(unified_job_template__in=unified_qs)
unified_pk_qs = UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role')
inv_src_qs = InventorySource.objects.filter(inventory_id=Inventory._accessible_pk_qs(Inventory, self.user, 'read_role'))
return qs.filter(
Q(unified_job_template_id__in=unified_pk_qs) |
Q(unified_job_template_id__in=inv_src_qs.values_list('pk', flat=True)))
@check_superuser
def can_read(self, obj):

View File

@ -111,6 +111,7 @@ register(
help_text=_('List of modules allowed to be used by ad-hoc jobs.'),
category=_('Jobs'),
category_slug='jobs',
required=False,
)
register(
@ -258,7 +259,8 @@ register(
register(
'LOG_AGGREGATOR_USERNAME',
field_class=fields.CharField,
allow_null=True,
allow_blank=True,
default='',
label=_('Logging Aggregator Username'),
help_text=_('Username for external log aggregator (if required).'),
category=_('Logging'),
@ -268,7 +270,8 @@ register(
register(
'LOG_AGGREGATOR_PASSWORD',
field_class=fields.CharField,
allow_null=True,
allow_blank=True,
default='',
encrypted=True,
label=_('Logging Aggregator Password/Token'),
help_text=_('Password or authentication token for external log aggregator (if required).'),
@ -311,3 +314,13 @@ register(
category=_('Logging'),
category_slug='logging',
)
register(
'LOG_AGGREGATOR_TOWER_UUID',
field_class=fields.CharField,
allow_blank=True,
label=_('Cluster-wide Tower unique identifier.'),
help_text=_('Useful to uniquely identify Tower instances.'),
category=_('Logging'),
category_slug='logging',
default=None,
)

View File

@ -63,7 +63,7 @@ def ws_receive(message):
if 'groups' in data:
discard_groups(message)
groups = data['groups']
current_groups = message.channel_session.pop('groups') if 'groups' in message.channel_session else []
current_groups = set(message.channel_session.pop('groups') if 'groups' in message.channel_session else [])
for group_name,v in groups.items():
if type(v) is list:
for oid in v:
@ -74,12 +74,12 @@ def ws_receive(message):
if not user_access.get_queryset().filter(pk=oid).exists():
message.reply_channel.send({"text": json.dumps({"error": "access denied to channel {0} for resource id {1}".format(group_name, oid)})})
continue
current_groups.append(name)
current_groups.add(name)
Group(name).add(message.reply_channel)
else:
current_groups.append(group_name)
current_groups.add(group_name)
Group(group_name).add(message.reply_channel)
message.channel_session['groups'] = current_groups
message.channel_session['groups'] = list(current_groups)
def emit_channel_notification(group, payload):

View File

@ -12,7 +12,17 @@ from django.db import transaction
from django.utils.timezone import now
# AWX
from awx.main.models import Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob, WorkflowJob, Notification
from awx.main.models import (
Job, AdHocCommand, ProjectUpdate, InventoryUpdate,
SystemJob, WorkflowJob, Notification
)
from awx.main.signals import ( # noqa
emit_update_inventory_on_created_or_deleted,
emit_update_inventory_computed_fields,
disable_activity_stream,
disable_computed_fields
)
from django.db.models.signals import post_save, post_delete, m2m_changed # noqa
class Command(NoArgsCommand):
@ -237,10 +247,11 @@ class Command(NoArgsCommand):
models_to_cleanup.add(m)
if not models_to_cleanup:
models_to_cleanup.update(model_names)
for m in model_names:
if m in models_to_cleanup:
skipped, deleted = getattr(self, 'cleanup_%s' % m)()
if self.dry_run:
self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '), deleted, skipped)
else:
self.logger.log(99, '%s: %d deleted, %d skipped.', m.replace('_', ' '), deleted, skipped)
with disable_activity_stream(), disable_computed_fields():
for m in model_names:
if m in models_to_cleanup:
skipped, deleted = getattr(self, 'cleanup_%s' % m)()
if self.dry_run:
self.logger.log(99, '%s: %d would be deleted, %d would be skipped.', m.replace('_', ' '), deleted, skipped)
else:
self.logger.log(99, '%s: %d deleted, %d skipped.', m.replace('_', ' '), deleted, skipped)

View File

@ -1,6 +1,7 @@
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved
import subprocess
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
from awx.main.models import Instance
@ -22,7 +23,11 @@ class Command(BaseCommand):
instance = Instance.objects.filter(hostname=options.get('name'))
if instance.exists():
instance.delete()
print('Successfully removed')
result = subprocess.Popen("rabbitmqctl forget_cluster_node rabbitmq@{}".format(options.get('name')), shell=True).wait()
if result != 0:
print("Node deprovisioning may have failed when attempting to remove the RabbitMQ instance from the cluster")
else:
print('Successfully deprovisioned {}'.format(options.get('name')))
else:
print('No instance found matching name {}'.format(options.get('name')))

View File

@ -70,8 +70,11 @@ class CallbackBrokerWorker(ConsumerMixin):
callbacks=[self.process_task])]
def process_task(self, body, message):
if "uuid" in body:
queue = UUID(body['uuid']).int % settings.JOB_EVENT_WORKERS
if "uuid" in body and body['uuid']:
try:
queue = UUID(body['uuid']).int % settings.JOB_EVENT_WORKERS
except Exception:
queue = self.total_messages % settings.JOB_EVENT_WORKERS
else:
queue = self.total_messages % settings.JOB_EVENT_WORKERS
self.write_queue_worker(queue, body)

View File

@ -42,7 +42,7 @@ _PythonSerializer.handle_m2m_field = _new_handle_m2m_field
# Add custom methods to User model for permissions checks.
from django.contrib.auth.models import User # noqa
from django.contrib.auth.models import User # noqa
from awx.main.access import * # noqa
@ -128,3 +128,6 @@ activity_stream_registrar.connect(User)
activity_stream_registrar.connect(WorkflowJobTemplate)
activity_stream_registrar.connect(WorkflowJobTemplateNode)
activity_stream_registrar.connect(WorkflowJob)
# prevent API filtering on certain Django-supplied sensitive fields
prevent_search(User._meta.get_field('password'))

View File

@ -83,10 +83,10 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
editable=False,
through='AdHocCommandEvent',
)
extra_vars = models.TextField(
extra_vars = prevent_search(models.TextField(
blank=True,
default='',
)
))
extra_vars_dict = VarsDictProperty('extra_vars', True)

View File

@ -23,7 +23,7 @@ from crum import get_current_user
# Ansible Tower
from awx.main.utils import encrypt_field
__all__ = ['VarsDictProperty', 'BaseModel', 'CreatedModifiedModel',
__all__ = ['prevent_search', 'VarsDictProperty', 'BaseModel', 'CreatedModifiedModel',
'PasswordFieldsModel', 'PrimordialModel', 'CommonModel',
'CommonModelNameNotUnique', 'NotificationFieldsModel',
'PERM_INVENTORY_ADMIN', 'PERM_INVENTORY_READ',
@ -343,3 +343,21 @@ class NotificationFieldsModel(BaseModel):
blank=True,
related_name='%(class)s_notification_templates_for_any'
)
def prevent_search(relation):
"""
Used to mark a model field or relation as "restricted from filtering"
e.g.,
class AuthToken(BaseModel):
user = prevent_search(models.ForeignKey(...))
sensitive_data = prevent_search(models.CharField(...))
The flag set by this function is used by
`awx.api.filters.FieldLookupBackend` to blacklist fields and relations that
should not be searchable/filterable via search query params
"""
setattr(relation, '__prevent_search__', True)
return relation

View File

@ -345,6 +345,9 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
if self.has_encrypted_ssh_key_data and not self.ssh_key_unlock:
raise ValidationError(_('SSH key unlock must be set when SSH key '
'is encrypted.'))
if not self.has_encrypted_ssh_key_data and self.ssh_key_unlock:
raise ValidationError(_('SSH key unlock should not be set when '
'SSH key is not encrypted.'))
return self.ssh_key_unlock
def clean(self):

View File

@ -23,6 +23,7 @@ from awx.main.fields import AutoOneToOneField, ImplicitRoleField
from awx.main.managers import HostManager
from awx.main.models.base import * # noqa
from awx.main.models.unified_jobs import * # noqa
from awx.main.models.jobs import Job
from awx.main.models.mixins import ResourceMixin
from awx.main.models.notifications import (
NotificationTemplate,
@ -1276,6 +1277,12 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin):
def get_notification_friendly_name(self):
return "Inventory Update"
def cancel(self):
res = super(InventoryUpdate, self).cancel()
if res:
map(lambda x: x.cancel(), Job.objects.filter(dependent_jobs__in=[self.id]))
return res
class CustomInventoryScript(CommonModelNameNotUnique, ResourceMixin):
@ -1284,11 +1291,11 @@ class CustomInventoryScript(CommonModelNameNotUnique, ResourceMixin):
unique_together = [('name', 'organization')]
ordering = ('name',)
script = models.TextField(
script = prevent_search(models.TextField(
blank=True,
default='',
help_text=_('Inventory script contents'),
)
))
organization = models.ForeignKey(
'Organization',
related_name='custom_inventory_scripts',

View File

@ -117,10 +117,10 @@ class JobOptions(BaseModel):
blank=True,
default=0,
)
extra_vars = models.TextField(
extra_vars = prevent_search(models.TextField(
blank=True,
default='',
)
))
job_tags = models.CharField(
max_length=1024,
blank=True,
@ -1252,10 +1252,10 @@ class SystemJob(UnifiedJob, SystemJobOptions, JobNotificationMixin):
on_delete=models.SET_NULL,
)
extra_vars = models.TextField(
extra_vars = prevent_search(models.TextField(
blank=True,
default='',
)
))
extra_vars_dict = VarsDictProperty('extra_vars', True)

View File

@ -7,6 +7,7 @@ from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User # noqa
# AWX
from awx.main.models.base import prevent_search
from awx.main.models.rbac import (
Role, RoleAncestorEntry, get_roles_on_resource
)
@ -86,10 +87,10 @@ class SurveyJobTemplateMixin(models.Model):
survey_enabled = models.BooleanField(
default=False,
)
survey_spec = JSONField(
survey_spec = prevent_search(JSONField(
blank=True,
default={},
)
))
def survey_password_variables(self):
vars = []
@ -215,11 +216,11 @@ class SurveyJobMixin(models.Model):
class Meta:
abstract = True
survey_passwords = JSONField(
survey_passwords = prevent_search(JSONField(
blank=True,
default={},
editable=False,
)
))
def display_extra_vars(self):
'''

View File

@ -75,7 +75,7 @@ class NotificationTemplate(CommonModel):
setattr(self, '_saved_{}_{}'.format("config", field), value)
self.notification_configuration[field] = ''
else:
encrypted = encrypt_field(self, 'notification_configuration', subfield=field)
encrypted = encrypt_field(self, 'notification_configuration', subfield=field, skip_utf8=True)
self.notification_configuration[field] = encrypted
if 'notification_configuration' not in update_fields:
update_fields.append('notification_configuration')

View File

@ -220,12 +220,13 @@ class AuthToken(BaseModel):
app_label = 'main'
key = models.CharField(max_length=40, primary_key=True)
user = models.ForeignKey('auth.User', related_name='auth_tokens',
on_delete=models.CASCADE)
user = prevent_search(models.ForeignKey('auth.User',
related_name='auth_tokens', on_delete=models.CASCADE))
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
expires = models.DateTimeField(default=tz_now)
request_hash = models.CharField(max_length=40, blank=True, default='')
request_hash = prevent_search(models.CharField(max_length=40, blank=True,
default=''))
reason = models.CharField(
max_length=1024,
blank=True,

View File

@ -174,6 +174,13 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
return []
return ['project', 'inventorysource', 'systemjobtemplate']
@classmethod
def _submodels_with_roles(cls):
ujt_classes = [c for c in cls.__subclasses__()
if c._meta.model_name not in ['inventorysource', 'systemjobtemplate']]
ct_dict = ContentType.objects.get_for_models(*ujt_classes)
return [ct.id for ct in ct_dict.values()]
@classmethod
def accessible_pk_qs(cls, accessor, role_field):
'''
@ -184,12 +191,8 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
# do not use this if in a subclass
if cls != UnifiedJobTemplate:
return super(UnifiedJobTemplate, cls).accessible_pk_qs(accessor, role_field)
ujt_names = [c.__name__.lower() for c in cls.__subclasses__()
if c.__name__.lower() not in ['inventorysource', 'systemjobtemplate']]
subclass_content_types = list(ContentType.objects.filter(
model__in=ujt_names).values_list('id', flat=True))
return ResourceMixin._accessible_pk_qs(cls, accessor, role_field, content_types=subclass_content_types)
return ResourceMixin._accessible_pk_qs(
cls, accessor, role_field, content_types=cls._submodels_with_roles())
def _perform_unique_checks(self, unique_checks):
# Handle the list of unique fields returned above. Replace with an
@ -500,33 +503,33 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
editable=False,
help_text=_("Elapsed time in seconds that the job ran."),
)
job_args = models.TextField(
job_args = prevent_search(models.TextField(
blank=True,
default='',
editable=False,
)
))
job_cwd = models.CharField(
max_length=1024,
blank=True,
default='',
editable=False,
)
job_env = JSONField(
job_env = prevent_search(JSONField(
blank=True,
default={},
editable=False,
)
))
job_explanation = models.TextField(
blank=True,
default='',
editable=False,
help_text=_("A status field to indicate the state of the job if it wasn't able to run and capture stdout"),
)
start_args = models.TextField(
start_args = prevent_search(models.TextField(
blank=True,
default='',
editable=False,
)
))
result_stdout_text = models.TextField(
blank=True,
default='',

View File

@ -11,7 +11,7 @@ from django.core.urlresolvers import reverse
#from django import settings as tower_settings
# AWX
from awx.main.models import UnifiedJobTemplate, UnifiedJob
from awx.main.models import prevent_search, UnifiedJobTemplate, UnifiedJob
from awx.main.models.notifications import (
NotificationTemplate,
JobNotificationMixin
@ -280,10 +280,10 @@ class WorkflowJobOptions(BaseModel):
class Meta:
abstract = True
extra_vars = models.TextField(
extra_vars = prevent_search(models.TextField(
blank=True,
default='',
)
))
extra_vars_dict = VarsDictProperty('extra_vars', True)

View File

@ -37,6 +37,7 @@ class HipChatBackend(TowerBaseEmailBackend):
for rcp in m.recipients():
r = requests.post("{}/v2/room/{}/notification".format(self.api_url, rcp),
params={"auth_token": self.token},
verify=False,
json={"color": self.color,
"message": m.subject,
"notify": self.notify,

View File

@ -251,6 +251,18 @@ class TaskManager():
dep.save()
inventory_task = InventoryUpdateDict.get_partial(dep.id)
'''
Update internal datastructures with the newly created inventory update
'''
# Should be only 1 inventory update. The one for the job (task)
latest_inventory_updates = self.get_latest_inventory_update_tasks([task])
self.process_latest_inventory_updates(latest_inventory_updates)
inventory_sources = self.get_inventory_source_tasks([task])
self.process_inventory_sources(inventory_sources)
self.graph.add_job(inventory_task)
return inventory_task
@ -271,9 +283,15 @@ class TaskManager():
def capture_chain_failure_dependencies(self, task, dependencies):
for dep in dependencies:
dep_obj = task.get_full()
dep_obj = dep.get_full()
dep_obj.dependent_jobs.add(task['id'])
dep_obj.save()
'''
if not 'dependent_jobs__id' in task.data:
task.data['dependent_jobs__id'] = [dep_obj.data['id']]
else:
task.data['dependent_jobs__id'].append(dep_obj.data['id'])
'''
def generate_dependencies(self, task):
dependencies = []
@ -291,6 +309,9 @@ class TaskManager():
'''
inventory_sources_already_updated = task.get_inventory_sources_already_updated()
'''
get_inventory_sources() only return update on launch sources
'''
for inventory_source_task in self.graph.get_inventory_sources(task['inventory_id']):
if inventory_source_task['id'] in inventory_sources_already_updated:
continue
@ -346,10 +367,14 @@ class TaskManager():
for task in all_running_sorted_tasks:
if (task['celery_task_id'] not in active_tasks and not hasattr(settings, 'IGNORE_CELERY_INSPECTOR')):
# NOTE: Pull status again and make sure it didn't finish in
# the meantime?
# TODO: try catch the getting of the job. The job COULD have been deleted
task_obj = task.get_full()
# Ensure job did not finish running between the time we get the
# list of task id's from celery and now.
# Note: This is an actual fix, not a reduction in the time
# window that this can happen.
if task_obj.status != 'running':
continue
task_obj.status = 'failed'
task_obj.job_explanation += ' '.join((
'Task was marked as running in Tower but was not present in',

View File

@ -83,6 +83,11 @@ class DependencyGraph(object):
'''
def should_update_related_project(self, job):
now = self.get_now()
# Already processed dependencies for this job
if job.data['dependent_jobs__id'] is not None:
return False
latest_project_update = self.data[self.LATEST_PROJECT_UPDATES].get(job['project_id'], None)
if not latest_project_update:
return True
@ -113,21 +118,15 @@ class DependencyGraph(object):
def should_update_related_inventory_source(self, job, inventory_source_id):
now = self.get_now()
# Already processed dependencies for this job
if job.data['dependent_jobs__id'] is not None:
return False
latest_inventory_update = self.data[self.LATEST_INVENTORY_UPDATES].get(inventory_source_id, None)
if not latest_inventory_update:
return True
'''
This is a bit of fuzzy logic.
If the latest inventory update has a created time == job_created_time-2
then consider the inventory update found. This is so we don't enter an infinite loop
of updating the project when cache timeout is 0.
'''
if latest_inventory_update['inventory_source__update_cache_timeout'] == 0 and \
latest_inventory_update['launch_type'] == 'dependency' and \
latest_inventory_update['created'] == job['created'] - timedelta(seconds=2):
return False
'''
Normal, expected, cache timeout logic
'''

View File

@ -180,8 +180,16 @@ def rbac_activity_stream(instance, sender, **kwargs):
elif sender.__name__ == 'Role_parents':
role = kwargs['model'].objects.filter(pk__in=kwargs['pk_set']).first()
# don't record implicit creation / parents
if role is not None and role.content_type is not None:
parent = role.content_type.name + "." + role.role_field
if role is not None:
if role.content_type is None:
if role.is_singleton():
parent = 'singleton:' + role.singleton_name
else:
# Ill-defined role, may need additional logic in the
# case of future expansions of the RBAC system
parent = str(role.role_field)
else:
parent = role.content_type.name + "." + role.role_field
# Get the list of implicit parents that were defined at the class level.
# We have to take this list from the class property to avoid including parents
# that may have been added since the creation of the ImplicitRoleField
@ -210,18 +218,24 @@ def cleanup_detached_labels_on_deleted_parent(sender, instance, **kwargs):
l.delete()
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Host)
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Host)
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Group)
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Group)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Group.hosts.through)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Group.parents.through)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Host.inventory_sources.through)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Group.inventory_sources.through)
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=InventorySource)
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=InventorySource)
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Job)
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Job)
def connect_computed_field_signals():
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Host)
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Host)
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Group)
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Group)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Group.hosts.through)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Group.parents.through)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Host.inventory_sources.through)
m2m_changed.connect(emit_update_inventory_computed_fields, sender=Group.inventory_sources.through)
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=InventorySource)
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=InventorySource)
post_save.connect(emit_update_inventory_on_created_or_deleted, sender=Job)
post_delete.connect(emit_update_inventory_on_created_or_deleted, sender=Job)
connect_computed_field_signals()
post_save.connect(emit_job_event_detail, sender=JobEvent)
post_save.connect(emit_ad_hoc_command_event_detail, sender=AdHocCommandEvent)
m2m_changed.connect(rebuild_role_ancestor_list, Role.parents.through)
@ -340,6 +354,24 @@ def disable_activity_stream():
activity_stream_enabled.enabled = previous_value
@contextlib.contextmanager
def disable_computed_fields():
post_save.disconnect(emit_update_inventory_on_created_or_deleted, sender=Host)
post_delete.disconnect(emit_update_inventory_on_created_or_deleted, sender=Host)
post_save.disconnect(emit_update_inventory_on_created_or_deleted, sender=Group)
post_delete.disconnect(emit_update_inventory_on_created_or_deleted, sender=Group)
m2m_changed.disconnect(emit_update_inventory_computed_fields, sender=Group.hosts.through)
m2m_changed.disconnect(emit_update_inventory_computed_fields, sender=Group.parents.through)
m2m_changed.disconnect(emit_update_inventory_computed_fields, sender=Host.inventory_sources.through)
m2m_changed.disconnect(emit_update_inventory_computed_fields, sender=Group.inventory_sources.through)
post_save.disconnect(emit_update_inventory_on_created_or_deleted, sender=InventorySource)
post_delete.disconnect(emit_update_inventory_on_created_or_deleted, sender=InventorySource)
post_save.disconnect(emit_update_inventory_on_created_or_deleted, sender=Job)
post_delete.disconnect(emit_update_inventory_on_created_or_deleted, sender=Job)
yield
connect_computed_field_signals()
model_serializer_mapping = {
Organization: OrganizationSerializer,
Inventory: InventorySerializer,

View File

@ -32,8 +32,7 @@ import pexpect
# Celery
from celery import Task, task
from celery.signals import celeryd_init, worker_ready
from celery import current_app
from celery.signals import celeryd_init, worker_process_init
# Django
from django.conf import settings
@ -54,6 +53,8 @@ from awx.main.task_engine import TaskEnhancer
from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, update_scm_url,
check_proot_installed, build_proot_temp_dir, wrap_args_with_proot,
get_system_task_capacity, OutputEventFilter, parse_yaml_or_json)
from awx.main.utils.reload import restart_local_services
from awx.main.utils.handlers import configure_external_logger
from awx.main.consumers import emit_channel_notification
__all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate',
@ -86,41 +87,10 @@ def celery_startup(conf=None, **kwargs):
logger.error("Failed to rebuild schedule {}: {}".format(sch, e))
def _setup_tower_logger():
global logger
from django.utils.log import configure_logging
LOGGING_DICT = settings.LOGGING
if settings.LOG_AGGREGATOR_ENABLED:
LOGGING_DICT['handlers']['http_receiver']['class'] = 'awx.main.utils.handlers.HTTPSHandler'
LOGGING_DICT['handlers']['http_receiver']['async'] = False
if 'awx' in settings.LOG_AGGREGATOR_LOGGERS:
if 'http_receiver' not in LOGGING_DICT['loggers']['awx']['handlers']:
LOGGING_DICT['loggers']['awx']['handlers'] += ['http_receiver']
configure_logging(settings.LOGGING_CONFIG, LOGGING_DICT)
logger = logging.getLogger('awx.main.tasks')
@worker_ready.connect
@worker_process_init.connect
def task_set_logger_pre_run(*args, **kwargs):
cache.close()
if settings.LOG_AGGREGATOR_ENABLED:
_setup_tower_logger()
logger.debug('Custom Tower logger configured for worker process.')
def _uwsgi_reload():
# http://uwsgi-docs.readthedocs.io/en/latest/MasterFIFO.html#available-commands
logger.warn('Initiating uWSGI chain reload of server')
TRIGGER_CHAIN_RELOAD = 'c'
with open('/var/lib/awx/awxfifo', 'w') as awxfifo:
awxfifo.write(TRIGGER_CHAIN_RELOAD)
def _reset_celery_logging():
# Worker logger reloaded, now send signal to restart pool
app = current_app._get_current_object()
app.control.broadcast('pool_restart', arguments={'reload': True},
destination=['celery@{}'.format(settings.CLUSTER_HOST_ID)], reply=False)
configure_external_logger(settings, async_flag=False, is_startup=False)
def _clear_cache_keys(set_of_keys):
@ -136,8 +106,7 @@ def process_cache_changes(cache_keys):
_clear_cache_keys(set_of_keys)
for setting_key in set_of_keys:
if setting_key.startswith('LOG_AGGREGATOR_'):
_uwsgi_reload()
_reset_celery_logging()
restart_local_services(['uwsgi', 'celery', 'beat', 'callback', 'fact'])
break
@ -864,6 +833,7 @@ class RunJob(BaseTask):
env['INVENTORY_ID'] = str(job.inventory.pk)
if job.project:
env['PROJECT_REVISION'] = job.project.scm_revision
env['ANSIBLE_RETRY_FILES_ENABLED'] = "False"
env['ANSIBLE_CALLBACK_PLUGINS'] = plugin_path
env['ANSIBLE_STDOUT_CALLBACK'] = 'tower_display'
env['REST_API_URL'] = settings.INTERNAL_API_URL
@ -1159,6 +1129,7 @@ class RunProjectUpdate(BaseTask):
'''
Return SSH private key data needed for this project update.
'''
handle, self.revision_path = tempfile.mkstemp()
private_data = {}
if project_update.credential:
credential = project_update.credential
@ -1247,9 +1218,9 @@ class RunProjectUpdate(BaseTask):
'scm_url': scm_url,
'scm_branch': scm_branch,
'scm_clean': project_update.scm_clean,
'scm_delete_on_update': project_update.scm_delete_on_update,
'scm_delete_on_update': project_update.scm_delete_on_update if project_update.job_type == 'check' else False,
'scm_full_checkout': True if project_update.job_type == 'run' else False,
'scm_revision_output': '/tmp/_{}_syncrev'.format(project_update.id) # TODO: TempFile
'scm_revision_output': self.revision_path
})
args.extend(['-e', json.dumps(extra_vars)])
args.append('project_update.yml')
@ -1335,7 +1306,7 @@ class RunProjectUpdate(BaseTask):
def post_run_hook(self, instance, status, **kwargs):
if instance.job_type == 'check' and status not in ('failed', 'canceled',):
p = instance.project
fd = open('/tmp/_{}_syncrev'.format(instance.id), 'r')
fd = open(self.revision_path, 'r')
lines = fd.readlines()
if lines:
p.scm_revision = lines[0].strip()
@ -1343,6 +1314,10 @@ class RunProjectUpdate(BaseTask):
p.save()
else:
logger.error("Could not find scm revision in check")
try:
os.remove(self.revision_path)
except Exception, e:
logger.error("Failed removing revision tmp file: {}".format(e))
class RunInventoryUpdate(BaseTask):

View File

@ -181,6 +181,29 @@ def test_scan_JT_counted(resourced_organization, user, get):
assert detail_response.data['summary_fields']['related_field_counts'] == counts_dict
@pytest.mark.django_db
def test_JT_not_double_counted(resourced_organization, user, get):
admin_user = user('admin', True)
# Add a scan job template to the org
resourced_organization.projects.all()[0].jobtemplates.create(
job_type='run',
inventory=resourced_organization.inventories.all()[0],
project=resourced_organization.projects.all()[0],
name='double-linked-job-template')
counts_dict = COUNTS_PRIMES
counts_dict['job_templates'] += 1
# Test list view
list_response = get(reverse('api:organization_list', args=[]), admin_user)
assert list_response.status_code == 200
assert list_response.data['results'][0]['summary_fields']['related_field_counts'] == counts_dict
# Test detail view
detail_response = get(reverse('api:organization_detail', args=[resourced_organization.pk]), admin_user)
assert detail_response.status_code == 200
assert detail_response.data['summary_fields']['related_field_counts'] == counts_dict
@pytest.mark.django_db
def test_JT_associated_with_project(organizations, project, user, get):
# Check that adding a project to an organization gets the project's JT

View File

@ -7,53 +7,52 @@ from awx.main.models import Role
@pytest.mark.django_db
def test_indirect_access_list(get, organization, project, team_factory, user, admin):
project_admin = user('project_admin')
org_admin_team_member = user('org_admin_team_member')
project_admin_team_member = user('project_admin_team_member')
org_admin_team = team_factory('org-admin-team')
team_admin = user('team_admin')
project_admin_team = team_factory('project-admin-team')
project.admin_role.members.add(project_admin)
org_admin_team.member_role.members.add(org_admin_team_member)
org_admin_team.member_role.children.add(organization.admin_role)
project_admin_team.member_role.members.add(project_admin_team_member)
project_admin_team.member_role.children.add(project.admin_role)
project_admin_team.admin_role.members.add(team_admin)
result = get(reverse('api:project_access_list', args=(project.id,)), admin)
assert result.status_code == 200
# Result should be:
# project_admin should have direct access,
# project_team_admin should have "direct" access through being a team member -> project admin,
# org_admin_team_member should have indirect access through being a team member -> org admin -> project admin,
# team_admin should have direct access the same as the project_team_admin,
# admin should have access through system admin -> org admin -> project admin
assert result.data['count'] == 4
project_admin_res = [r for r in result.data['results'] if r['id'] == project_admin.id][0]
org_admin_team_member_res = [r for r in result.data['results'] if r['id'] == org_admin_team_member.id][0]
team_admin_res = [r for r in result.data['results'] if r['id'] == team_admin.id][0]
project_admin_team_member_res = [r for r in result.data['results'] if r['id'] == project_admin_team_member.id][0]
admin_res = [r for r in result.data['results'] if r['id'] == admin.id][0]
assert len(project_admin_res['summary_fields']['direct_access']) == 1
assert len(project_admin_res['summary_fields']['indirect_access']) == 0
assert len(org_admin_team_member_res['summary_fields']['direct_access']) == 0
assert len(org_admin_team_member_res['summary_fields']['indirect_access']) == 1
assert len(team_admin_res['summary_fields']['direct_access']) == 1
assert len(team_admin_res['summary_fields']['indirect_access']) == 0
assert len(admin_res['summary_fields']['direct_access']) == 0
assert len(admin_res['summary_fields']['indirect_access']) == 1
project_admin_entry = project_admin_res['summary_fields']['direct_access'][0]['role']
assert project_admin_entry['id'] == project.admin_role.id
# assure that results for team admin are the same as for team member
team_admin_entry = team_admin_res['summary_fields']['direct_access'][0]['role']
assert team_admin_entry['id'] == project.admin_role.id
assert team_admin_entry['name'] == 'Admin'
project_admin_team_member_entry = project_admin_team_member_res['summary_fields']['direct_access'][0]['role']
assert project_admin_team_member_entry['id'] == project.admin_role.id
assert project_admin_team_member_entry['team_id'] == project_admin_team.id
assert project_admin_team_member_entry['team_name'] == project_admin_team.name
org_admin_team_member_entry = org_admin_team_member_res['summary_fields']['indirect_access'][0]['role']
assert org_admin_team_member_entry['id'] == organization.admin_role.id
assert org_admin_team_member_entry['team_id'] == org_admin_team.id
assert org_admin_team_member_entry['team_name'] == org_admin_team.name
admin_entry = admin_res['summary_fields']['indirect_access'][0]['role']
assert admin_entry['name'] == Role.singleton('system_administrator').name

View File

@ -30,6 +30,7 @@ def test_license_cannot_be_removed_via_system_settings(mock_no_license_file, get
url = reverse('api:setting_singleton_detail', args=('system',))
response = get(url, user=admin, expect=200)
assert not response.data['LICENSE']
Setting.objects.create(key='TOWER_URL_BASE', value='https://towerhost')
Setting.objects.create(key='LICENSE', value=enterprise_license)
response = get(url, user=admin, expect=200)
assert response.data['LICENSE']
@ -44,6 +45,13 @@ def test_license_cannot_be_removed_via_system_settings(mock_no_license_file, get
assert response.data['LICENSE']
@pytest.mark.django_db
def test_url_base_defaults_to_request(options, admin):
# If TOWER_URL_BASE is not set, default to the Tower request hostname
resp = options(reverse('api:setting_singleton_detail', args=('system',)), user=admin, expect=200)
assert resp.data['actions']['PUT']['TOWER_URL_BASE']['default'] == 'http://testserver'
@pytest.mark.django_db
def test_jobs_settings(get, put, patch, delete, admin):
url = reverse('api:setting_singleton_detail', args=('jobs',))

View File

@ -0,0 +1,11 @@
import pytest
from django.core.urlresolvers import reverse
@pytest.mark.django_db
def test_aliased_forward_reverse_field_searches(instance, options, get, admin):
url = reverse('api:unified_job_template_list')
response = options(url, None, admin)
assert 'job_template__search' in response.data['related_search_fields']
get(reverse("api:unified_job_template_list") + "?job_template__search=anything", user=admin, expect=200)

View File

@ -56,15 +56,6 @@ def clear_cache():
cache.clear()
@pytest.fixture(scope="session", autouse=False)
def disable_signals():
'''
Disable all django model signals.
'''
mocked = mock.patch('django.dispatch.Signal.send', autospec=True)
mocked.start()
@pytest.fixture(scope="session", autouse=True)
def celery_memory_broker():
'''

View File

@ -0,0 +1,13 @@
import pytest
# AWX models
from awx.main.models.organization import Organization
from awx.main.models import ActivityStream
@pytest.mark.django_db
def test_activity_stream_create_entries():
Organization.objects.create(name='test-organization2')
assert ActivityStream.objects.filter(organization__isnull=False).count() == 1

View File

@ -0,0 +1,47 @@
import pytest
# AWX context managers for testing
from awx.main.models.rbac import batch_role_ancestor_rebuilding
from awx.main.signals import (
disable_activity_stream,
disable_computed_fields,
update_inventory_computed_fields
)
# AWX models
from awx.main.models.organization import Organization
from awx.main.models import ActivityStream, Job
@pytest.mark.django_db
def test_rbac_batch_rebuilding(rando, organization):
with batch_role_ancestor_rebuilding():
organization.admin_role.members.add(rando)
inventory = organization.inventories.create(name='test-inventory')
assert rando not in inventory.admin_role
assert rando in inventory.admin_role
@pytest.mark.django_db
def test_disable_activity_stream():
with disable_activity_stream():
Organization.objects.create(name='test-organization')
assert ActivityStream.objects.filter(organization__isnull=False).count() == 0
@pytest.mark.django_db
class TestComputedFields:
def test_computed_fields_normal_use(self, mocker, inventory):
job = Job.objects.create(name='fake-job', inventory=inventory)
with mocker.patch.object(update_inventory_computed_fields, 'delay'):
job.delete()
update_inventory_computed_fields.delay.assert_called_once_with(inventory.id, True)
def test_disable_computed_fields(self, mocker, inventory):
job = Job.objects.create(name='fake-job', inventory=inventory)
with disable_computed_fields():
with mocker.patch.object(update_inventory_computed_fields, 'delay'):
job.delete()
update_inventory_computed_fields.delay.assert_not_called()

View File

@ -1,5 +1,20 @@
import pytest
# Django
from django.contrib.contenttypes.models import ContentType
# AWX
from awx.main.models import UnifiedJobTemplate, JobTemplate, WorkflowJobTemplate, Project
@pytest.mark.django_db
def test_subclass_types(rando):
assert set(UnifiedJobTemplate._submodels_with_roles()) == set([
ContentType.objects.get_for_model(JobTemplate).id,
ContentType.objects.get_for_model(Project).id,
ContentType.objects.get_for_model(WorkflowJobTemplate).id
])
class TestCreateUnifiedJob:
'''

View File

@ -5,13 +5,15 @@ from awx.main.models import (
Permission,
Host,
CustomInventoryScript,
Schedule
)
from awx.main.access import (
InventoryAccess,
InventorySourceAccess,
HostAccess,
InventoryUpdateAccess,
CustomInventoryScriptAccess
CustomInventoryScriptAccess,
ScheduleAccess
)
from django.apps import apps
@ -277,3 +279,14 @@ def test_inventory_source_credential_check(rando, inventory_source, credential):
inventory_source.group.inventory.admin_role.members.add(rando)
access = InventorySourceAccess(rando)
assert not access.can_change(inventory_source, {'credential': credential})
@pytest.mark.django_db
def test_inventory_source_org_admin_schedule_access(org_admin, inventory_source):
schedule = Schedule.objects.create(
unified_job_template=inventory_source,
rrule='DTSTART:20151117T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1')
access = ScheduleAccess(org_admin)
assert access.get_queryset()
assert access.can_read(schedule)
assert access.can_change(schedule, {'rrule': 'DTSTART:20151117T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=2'})

View File

@ -227,11 +227,19 @@ def test_job_template_access_org_admin(jt_objects, rando):
@pytest.mark.django_db
def test_orphan_JT_readable_by_system_auditor(job_template, system_auditor):
assert system_auditor.is_system_auditor
assert job_template.project is None
access = JobTemplateAccess(system_auditor)
assert access.can_read(job_template)
class TestOrphanJobTemplate:
def test_orphan_JT_readable_by_system_auditor(self, job_template, system_auditor):
assert system_auditor.is_system_auditor
assert job_template.project is None
access = JobTemplateAccess(system_auditor)
assert access.can_read(job_template)
def test_system_admin_orphan_capabilities(self, job_template, admin_user):
job_template.capabilities_cache = {'edit': False}
access = JobTemplateAccess(admin_user)
capabilities = access.get_user_capabilities(job_template, method_list=['edit'])
assert capabilities['edit']
@pytest.mark.django_db

View File

@ -2,7 +2,8 @@ import pytest
from awx.main.access import (
NotificationTemplateAccess,
NotificationAccess
NotificationAccess,
JobTemplateAccess
)
@ -119,6 +120,15 @@ def test_notification_access_system_admin(notification, admin):
assert access.can_delete(notification)
@pytest.mark.django_db
def test_system_auditor_JT_attach(system_auditor, job_template, notification_template):
job_template.admin_role.members.add(system_auditor)
access = JobTemplateAccess(system_auditor)
assert not access.can_attach(
job_template, notification_template, 'notification_templates_success',
{'id': notification_template.id})
@pytest.mark.django_db
def test_notification_access_org_admin(notification, org_admin):
access = NotificationAccess(org_admin)

View File

@ -2,21 +2,25 @@ import pytest
from rest_framework.exceptions import PermissionDenied
from awx.api.filters import FieldLookupBackend
from awx.main.models import Credential, JobTemplate
from awx.main.models import (AdHocCommand, AuthToken, CustomInventoryScript,
Credential, Job, JobTemplate, SystemJob,
UnifiedJob, User, WorkflowJob,
WorkflowJobTemplate, WorkflowJobOptions)
from awx.main.models.jobs import JobOptions
@pytest.mark.parametrize(u"empty_value", [u'', ''])
def test_empty_in(empty_value):
field_lookup = FieldLookupBackend()
with pytest.raises(ValueError) as excinfo:
field_lookup.value_to_python(JobTemplate, 'project__in', empty_value)
field_lookup.value_to_python(JobTemplate, 'project__name__in', empty_value)
assert 'empty value for __in' in str(excinfo.value)
@pytest.mark.parametrize(u"valid_value", [u'foo', u'foo,'])
def test_valid_in(valid_value):
field_lookup = FieldLookupBackend()
value, new_lookup = field_lookup.value_to_python(JobTemplate, 'project__in', valid_value)
value, new_lookup = field_lookup.value_to_python(JobTemplate, 'project__name__in', valid_value)
assert 'foo' in value
@ -38,3 +42,28 @@ def test_filter_on_related_password_field(password_field, lookup_suffix):
with pytest.raises(PermissionDenied) as excinfo:
field, new_lookup = field_lookup.get_field_from_lookup(JobTemplate, lookup)
assert 'not allowed' in str(excinfo.value)
@pytest.mark.parametrize('model, query', [
(AuthToken, 'request_hash__icontains'),
(User, 'password__icontains'),
(User, 'auth_tokens__key__icontains'),
(User, 'settings__value__icontains'),
(UnifiedJob, 'job_args__icontains'),
(UnifiedJob, 'job_env__icontains'),
(UnifiedJob, 'start_args__icontains'),
(AdHocCommand, 'extra_vars__icontains'),
(JobOptions, 'extra_vars__icontains'),
(SystemJob, 'extra_vars__icontains'),
(WorkflowJobOptions, 'extra_vars__icontains'),
(Job, 'survey_passwords__icontains'),
(WorkflowJob, 'survey_passwords__icontains'),
(JobTemplate, 'survey_spec__icontains'),
(WorkflowJobTemplate, 'survey_spec__icontains'),
(CustomInventoryScript, 'script__icontains')
])
def test_filter_sensitive_fields_and_relations(model, query):
field_lookup = FieldLookupBackend()
with pytest.raises(PermissionDenied) as excinfo:
field, new_lookup = field_lookup.get_field_from_lookup(model, query)
assert 'not allowed' in str(excinfo.value)

View File

@ -223,7 +223,8 @@ def job_factory(epoch):
'celery_task_id': '',
'project__scm_update_on_launch': project__scm_update_on_launch,
'inventory__inventory_sources': inventory__inventory_sources,
'forks': 5
'forks': 5,
'dependent_jobs__id': None,
})
return fn

View File

@ -17,8 +17,10 @@ def graph():
@pytest.fixture
def job():
return dict(project_id=1)
def job(job_factory):
j = job_factory()
j.project_id = 1
return j
@pytest.fixture
@ -36,13 +38,11 @@ def unsuccessful_last_project(graph, job):
@pytest.fixture
def last_dependent_project(graph):
def last_dependent_project(graph, job):
now = tz_now()
job = {
'project_id': 1,
'created': now,
}
job['project_id'] = 1
job['created'] = now
pu = ProjectUpdateDict(dict(id=1, project_id=1, status='waiting',
project__scm_update_cache_timeout=0,
launch_type='dependency',
@ -57,10 +57,8 @@ def last_dependent_project(graph):
def timedout_project_update(graph, job):
now = tz_now()
job = {
'project_id': 1,
'created': now,
}
job['project_id'] = 1
job['created'] = now
pu = ProjectUpdateDict(dict(id=1, project_id=1, status='successful',
project__scm_update_cache_timeout=10,
launch_type='dependency',
@ -76,10 +74,8 @@ def timedout_project_update(graph, job):
def not_timedout_project_update(graph, job):
now = tz_now()
job = {
'project_id': 1,
'created': now,
}
job['project_id'] = 1
job['created'] = now
pu = ProjectUpdateDict(dict(id=1, project_id=1, status='successful',
project__scm_update_cache_timeout=3600,
launch_type='dependency',

View File

View File

@ -1,24 +1,45 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from awx.conf.models import Setting
from awx.main.utils import common
def test_encrypt_field():
field = Setting(pk=123, value='ANSIBLE')
encrypted = common.encrypt_field(field, 'value')
encrypted = field.value = common.encrypt_field(field, 'value')
assert encrypted == '$encrypted$AES$Ey83gcmMuBBT1OEq2lepnw=='
assert common.decrypt_field(field, 'value') == 'ANSIBLE'
def test_encrypt_field_without_pk():
field = Setting(value='ANSIBLE')
encrypted = common.encrypt_field(field, 'value')
encrypted = field.value = common.encrypt_field(field, 'value')
assert encrypted == '$encrypted$AES$8uIzEoGyY6QJwoTWbMFGhw=='
assert common.decrypt_field(field, 'value') == 'ANSIBLE'
def test_encrypt_field_with_unicode_string():
value = u'Iñtërnâtiônàlizætiøn'
field = Setting(value=value)
encrypted = field.value = common.encrypt_field(field, 'value')
assert encrypted == '$encrypted$UTF8$AES$AESQbqOefpYcLC7x8yZ2aWG4FlXlS66JgavLbDp/DSM='
assert common.decrypt_field(field, 'value') == value
def test_encrypt_field_force_disable_unicode():
value = u"NothingSpecial"
field = Setting(value=value)
encrypted = field.value = common.encrypt_field(field, 'value', skip_utf8=True)
assert "UTF8" not in encrypted
assert common.decrypt_field(field, 'value') == value
def test_encrypt_subfield():
field = Setting(value={'name': 'ANSIBLE'})
encrypted = common.encrypt_field(field, 'value', subfield='name')
encrypted = field.value = common.encrypt_field(field, 'value', subfield='name')
assert encrypted == '$encrypted$AES$8uIzEoGyY6QJwoTWbMFGhw=='
assert common.decrypt_field(field, 'value', subfield='name') == 'ANSIBLE'

View File

@ -0,0 +1,234 @@
import base64
import json
import logging
from django.conf import LazySettings
import pytest
import requests
from requests_futures.sessions import FuturesSession
from awx.main.utils.handlers import BaseHTTPSHandler as HTTPSHandler, PARAM_NAMES
from awx.main.utils.formatters import LogstashFormatter
@pytest.fixture()
def dummy_log_record():
return logging.LogRecord(
'awx', # logger name
20, # loglevel INFO
'./awx/some/module.py', # pathname
100, # lineno
'User joe logged in', # msg
tuple(), # args,
None # exc_info
)
@pytest.fixture()
def ok200_adapter():
class OK200Adapter(requests.adapters.HTTPAdapter):
requests = []
def send(self, request, **kwargs):
self.requests.append(request)
resp = requests.models.Response()
resp.status_code = 200
resp.raw = '200 OK'
resp.request = request
return resp
return OK200Adapter()
def test_https_logging_handler_requests_sync_implementation():
handler = HTTPSHandler(async=False)
assert not isinstance(handler.session, FuturesSession)
assert isinstance(handler.session, requests.Session)
def test_https_logging_handler_requests_async_implementation():
handler = HTTPSHandler(async=True)
assert isinstance(handler.session, FuturesSession)
@pytest.mark.parametrize('param', PARAM_NAMES.keys())
def test_https_logging_handler_defaults(param):
handler = HTTPSHandler()
assert hasattr(handler, param) and getattr(handler, param) is None
@pytest.mark.parametrize('param', PARAM_NAMES.keys())
def test_https_logging_handler_kwargs(param):
handler = HTTPSHandler(**{param: 'EXAMPLE'})
assert hasattr(handler, param) and getattr(handler, param) == 'EXAMPLE'
@pytest.mark.parametrize('param, django_settings_name', PARAM_NAMES.items())
def test_https_logging_handler_from_django_settings(param, django_settings_name):
settings = LazySettings()
settings.configure(**{
django_settings_name: 'EXAMPLE'
})
handler = HTTPSHandler.from_django_settings(settings)
assert hasattr(handler, param) and getattr(handler, param) == 'EXAMPLE'
def test_https_logging_handler_logstash_auth_info():
handler = HTTPSHandler(message_type='logstash', username='bob', password='ansible')
handler.add_auth_information()
assert isinstance(handler.session.auth, requests.auth.HTTPBasicAuth)
assert handler.session.auth.username == 'bob'
assert handler.session.auth.password == 'ansible'
def test_https_logging_handler_splunk_auth_info():
handler = HTTPSHandler(message_type='splunk', password='ansible')
handler.add_auth_information()
assert handler.session.headers['Authorization'] == 'Splunk ansible'
assert handler.session.headers['Content-Type'] == 'application/json'
@pytest.mark.parametrize('host, port, normalized', [
('localhost', None, 'http://localhost'),
('localhost', 80, 'http://localhost'),
('localhost', 8080, 'http://localhost:8080'),
('http://localhost', None, 'http://localhost'),
('http://localhost', 80, 'http://localhost'),
('http://localhost', 8080, 'http://localhost:8080'),
('https://localhost', 443, 'https://localhost:443')
])
def test_https_logging_handler_http_host_format(host, port, normalized):
handler = HTTPSHandler(host=host, port=port)
assert handler.get_http_host() == normalized
@pytest.mark.parametrize('params, logger_name, expected', [
({'enabled_flag': False}, 'awx.main', True), # skip all records if enabled_flag = False
({'host': '', 'enabled_flag': True}, 'awx.main', True), # skip all records if the host is undefined
({'host': '127.0.0.1', 'enabled_flag': True}, 'awx.main', False),
({'host': '127.0.0.1', 'enabled_flag': True, 'enabled_loggers': ['abc']}, 'awx.analytics.xyz', True),
({'host': '127.0.0.1', 'enabled_flag': True, 'enabled_loggers': ['xyz']}, 'awx.analytics.xyz', False),
])
def test_https_logging_handler_skip_log(params, logger_name, expected):
handler = HTTPSHandler(**params)
assert handler.skip_log(logger_name) is expected
@pytest.mark.parametrize('message_type, async', [
('logstash', False),
('logstash', True),
('splunk', False),
('splunk', True),
])
def test_https_logging_handler_emit(ok200_adapter, dummy_log_record,
message_type, async):
handler = HTTPSHandler(host='127.0.0.1', enabled_flag=True,
message_type=message_type,
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'],
async=async)
handler.setFormatter(LogstashFormatter())
handler.session.mount('http://', ok200_adapter)
async_futures = handler.emit(dummy_log_record)
[future.result() for future in async_futures]
assert len(ok200_adapter.requests) == 1
request = ok200_adapter.requests[0]
assert request.url == 'http://127.0.0.1/'
assert request.method == 'POST'
body = json.loads(request.body)
if message_type == 'logstash':
# A username + password weren't used, so this header should be missing
assert 'Authorization' not in request.headers
if message_type == 'splunk':
# splunk messages are nested under the 'event' key
body = body['event']
assert request.headers['Authorization'] == 'Splunk None'
assert body['level'] == 'INFO'
assert body['logger_name'] == 'awx'
assert body['message'] == 'User joe logged in'
@pytest.mark.parametrize('async', (True, False))
def test_https_logging_handler_emit_logstash_with_creds(ok200_adapter,
dummy_log_record, async):
handler = HTTPSHandler(host='127.0.0.1', enabled_flag=True,
username='user', password='pass',
message_type='logstash',
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'],
async=async)
handler.setFormatter(LogstashFormatter())
handler.session.mount('http://', ok200_adapter)
async_futures = handler.emit(dummy_log_record)
[future.result() for future in async_futures]
assert len(ok200_adapter.requests) == 1
request = ok200_adapter.requests[0]
assert request.headers['Authorization'] == 'Basic %s' % base64.b64encode("user:pass")
@pytest.mark.parametrize('async', (True, False))
def test_https_logging_handler_emit_splunk_with_creds(ok200_adapter,
dummy_log_record, async):
handler = HTTPSHandler(host='127.0.0.1', enabled_flag=True,
password='pass', message_type='splunk',
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'],
async=async)
handler.setFormatter(LogstashFormatter())
handler.session.mount('http://', ok200_adapter)
async_futures = handler.emit(dummy_log_record)
[future.result() for future in async_futures]
assert len(ok200_adapter.requests) == 1
request = ok200_adapter.requests[0]
assert request.headers['Authorization'] == 'Splunk pass'
def test_https_logging_handler_emit_one_record_per_fact(ok200_adapter):
handler = HTTPSHandler(host='127.0.0.1', enabled_flag=True,
message_type='logstash', indv_facts=True,
enabled_loggers=['awx', 'activity_stream', 'job_events', 'system_tracking'])
handler.setFormatter(LogstashFormatter())
handler.session.mount('http://', ok200_adapter)
record = logging.LogRecord(
'awx.analytics.system_tracking', # logger name
20, # loglevel INFO
'./awx/some/module.py', # pathname
100, # lineno
None, # msg
tuple(), # args,
None # exc_info
)
record.module_name = 'packages'
record.facts_data = [{
"name": "ansible",
"version": "2.2.1.0"
}, {
"name": "ansible-tower",
"version": "3.1.0"
}]
async_futures = handler.emit(record)
[future.result() for future in async_futures]
assert len(ok200_adapter.requests) == 2
requests = sorted(ok200_adapter.requests, key=lambda request: json.loads(request.body)['version'])
request = requests[0]
assert request.url == 'http://127.0.0.1/'
assert request.method == 'POST'
body = json.loads(request.body)
assert body['level'] == 'INFO'
assert body['logger_name'] == 'awx.analytics.system_tracking'
assert body['name'] == 'ansible'
assert body['version'] == '2.2.1.0'
request = requests[1]
assert request.url == 'http://127.0.0.1/'
assert request.method == 'POST'
body = json.loads(request.body)
assert body['level'] == 'INFO'
assert body['logger_name'] == 'awx.analytics.system_tracking'
assert body['name'] == 'ansible-tower'
assert body['version'] == '3.1.0'

View File

@ -0,0 +1,38 @@
# awx.main.utils.reload
from awx.main.utils import reload
def test_produce_supervisor_command(mocker):
with mocker.patch.object(reload.subprocess, 'Popen'):
reload._supervisor_service_restart(['beat', 'callback', 'fact'])
reload.subprocess.Popen.assert_called_once_with(
['supervisorctl', 'restart', 'tower-processes:receiver', 'tower-processes:factcacher'])
def test_routing_of_service_restarts_works(mocker):
'''
This tests that the parent restart method will call the appropriate
service restart methods, depending on which services are given in args
'''
with mocker.patch.object(reload, '_uwsgi_reload'),\
mocker.patch.object(reload, '_reset_celery_thread_pool'),\
mocker.patch.object(reload, '_supervisor_service_restart'):
reload.restart_local_services(['uwsgi', 'celery', 'flower', 'daphne'])
reload._uwsgi_reload.assert_called_once_with()
reload._reset_celery_thread_pool.assert_called_once_with()
reload._supervisor_service_restart.assert_called_once_with(['flower', 'daphne'])
def test_routing_of_service_restarts_diables(mocker):
'''
Test that methods are not called if not in the args
'''
with mocker.patch.object(reload, '_uwsgi_reload'),\
mocker.patch.object(reload, '_reset_celery_thread_pool'),\
mocker.patch.object(reload, '_supervisor_service_restart'):
reload.restart_local_services(['flower'])
reload._uwsgi_reload.assert_not_called()
reload._reset_celery_thread_pool.assert_not_called()
reload._supervisor_service_restart.assert_called_once_with(['flower'])

View File

@ -21,6 +21,8 @@ import tempfile
# Decorator
from decorator import decorator
import six
# Django
from django.utils.translation import ugettext_lazy as _
from django.db.models import ManyToManyField
@ -181,7 +183,7 @@ def get_encryption_key(field_name, pk=None):
return h.digest()[:16]
def encrypt_field(instance, field_name, ask=False, subfield=None):
def encrypt_field(instance, field_name, ask=False, subfield=None, skip_utf8=False):
'''
Return content of the given instance and field name encrypted.
'''
@ -190,6 +192,10 @@ def encrypt_field(instance, field_name, ask=False, subfield=None):
value = value[subfield]
if not value or value.startswith('$encrypted$') or (ask and value == 'ASK'):
return value
if skip_utf8:
utf8 = False
else:
utf8 = type(value) == six.text_type
value = smart_str(value)
key = get_encryption_key(field_name, getattr(instance, 'pk', None))
cipher = AES.new(key, AES.MODE_ECB)
@ -197,17 +203,31 @@ def encrypt_field(instance, field_name, ask=False, subfield=None):
value += '\x00'
encrypted = cipher.encrypt(value)
b64data = base64.b64encode(encrypted)
return '$encrypted$%s$%s' % ('AES', b64data)
tokens = ['$encrypted', 'AES', b64data]
if utf8:
# If the value to encrypt is utf-8, we need to add a marker so we
# know to decode the data when it's decrypted later
tokens.insert(1, 'UTF8')
return '$'.join(tokens)
def decrypt_value(encryption_key, value):
algo, b64data = value[len('$encrypted$'):].split('$', 1)
raw_data = value[len('$encrypted$'):]
# If the encrypted string contains a UTF8 marker, discard it
utf8 = raw_data.startswith('UTF8$')
if utf8:
raw_data = raw_data[len('UTF8$'):]
algo, b64data = raw_data.split('$', 1)
if algo != 'AES':
raise ValueError('unsupported algorithm: %s' % algo)
encrypted = base64.b64decode(b64data)
cipher = AES.new(encryption_key, AES.MODE_ECB)
value = cipher.decrypt(encrypted)
return value.rstrip('\x00')
value = value.rstrip('\x00')
# If the encrypted string contained a UTF8 marker, decode the data
if utf8:
value = value.decode('utf-8')
return value
def decrypt_field(instance, field_name, subfield=None):

22
awx/main/utils/db.py Normal file
View File

@ -0,0 +1,22 @@
# Copyright (c) 2017 Ansible by Red Hat
# All Rights Reserved.
# Django database
from django.db.migrations.loader import MigrationLoader
from django.db import connection
# Python
import re
def get_tower_migration_version():
loader = MigrationLoader(connection, ignore_no_migrations=True)
v = '000'
for app_name, migration_name in loader.applied_migrations:
if app_name == 'main':
version_captures = re.findall('^[0-9]{4}_v([0-9]{3})_', migration_name)
if len(version_captures) == 1:
migration_version = version_captures[0]
if migration_version > v:
v = migration_version
return v

View File

@ -2,7 +2,6 @@
# All Rights Reserved.
from logstash.formatter import LogstashFormatterVersion1
from django.conf import settings
from copy import copy
import json
import time
@ -10,8 +9,11 @@ import time
class LogstashFormatter(LogstashFormatterVersion1):
def __init__(self, **kwargs):
settings_module = kwargs.pop('settings_module', None)
ret = super(LogstashFormatter, self).__init__(**kwargs)
self.host_id = settings.CLUSTER_HOST_ID
if settings_module:
self.host_id = settings_module.CLUSTER_HOST_ID
self.tower_uuid = settings_module.LOG_AGGREGATOR_TOWER_UUID
return ret
def reformat_data_for_log(self, raw_data, kind=None):
@ -56,6 +58,21 @@ class LogstashFormatter(LogstashFormatterVersion1):
adict[name] = subdict
return adict
def convert_to_type(t, val):
if t is float:
val = val[:-1] if val.endswith('s') else val
try:
return float(val)
except ValueError:
return val
elif t is int:
try:
return int(val)
except ValueError:
return val
elif t is str:
return val
if kind == 'job_events':
data.update(data.get('event_data', {}))
for fd in data:
@ -81,12 +98,35 @@ class LogstashFormatter(LogstashFormatterVersion1):
else:
data_for_log['facts'] = data
data_for_log['module_name'] = module_name
elif kind == 'performance':
request = raw_data['python_objects']['request']
response = raw_data['python_objects']['response']
# Note: All of the below keys may not be in the response "dict"
# For example, X-API-Query-Time and X-API-Query-Count will only
# exist if SQL_DEBUG is turned on in settings.
headers = [
(float, 'X-API-Time'), # may end with an 's' "0.33s"
(int, 'X-API-Query-Count'),
(float, 'X-API-Query-Time'), # may also end with an 's'
(str, 'X-API-Node'),
]
data_for_log['x_api'] = {k: convert_to_type(t, response[k]) for (t, k) in headers if k in response}
data_for_log['request'] = {
'method': request.method,
'path': request.path,
'path_info': request.path_info,
'query_string': request.META['QUERY_STRING'],
'data': request.data,
}
return data_for_log
def get_extra_fields(self, record):
fields = super(LogstashFormatter, self).get_extra_fields(record)
if record.name.startswith('awx.analytics'):
log_kind = record.name.split('.')[-1]
log_kind = record.name[len('awx.analytics.'):]
fields = self.reformat_data_for_log(fields, kind=log_kind)
return fields
@ -104,9 +144,13 @@ class LogstashFormatter(LogstashFormatterVersion1):
# Extra Fields
'level': record.levelname,
'logger_name': record.name,
'cluster_host_id': self.host_id
}
if getattr(self, 'tower_uuid', None):
message['tower_uuid'] = self.tower_uuid
if getattr(self, 'host_id', None):
message['cluster_host_id'] = self.host_id
# Add extra fields
message.update(self.get_extra_fields(record))

View File

@ -12,9 +12,11 @@ import traceback
from requests_futures.sessions import FuturesSession
# custom
from django.conf import settings as django_settings
from django.utils.log import NullHandler
# AWX
from awx.main.utils.formatters import LogstashFormatter
__all__ = ['HTTPSNullHandler', 'BaseHTTPSHandler', 'configure_external_logger']
# AWX external logging handler, generally designed to be used
# with the accompanying LogstashHandler, derives from python-logstash library
@ -38,31 +40,32 @@ def unused_callback(sess, resp):
pass
class HTTPSNullHandler(NullHandler):
class HTTPSNullHandler(logging.NullHandler):
"Placeholder null handler to allow loading without database access"
def __init__(self, host, **kwargs):
def __init__(self, *args, **kwargs):
return super(HTTPSNullHandler, self).__init__()
class HTTPSHandler(logging.Handler):
class BaseHTTPSHandler(logging.Handler):
def __init__(self, fqdn=False, **kwargs):
super(HTTPSHandler, self).__init__()
super(BaseHTTPSHandler, self).__init__()
self.fqdn = fqdn
self.async = kwargs.get('async', True)
for fd in PARAM_NAMES:
# settings values take precedence over the input params
settings_name = PARAM_NAMES[fd]
settings_val = getattr(django_settings, settings_name, None)
if settings_val:
setattr(self, fd, settings_val)
elif fd in kwargs:
setattr(self, fd, kwargs[fd])
else:
setattr(self, fd, None)
self.session = FuturesSession()
setattr(self, fd, kwargs.get(fd, None))
if self.async:
self.session = FuturesSession()
else:
self.session = requests.Session()
self.add_auth_information()
@classmethod
def from_django_settings(cls, settings, *args, **kwargs):
for param, django_setting_name in PARAM_NAMES.items():
kwargs[param] = getattr(settings, django_setting_name, None)
return cls(*args, **kwargs)
def get_full_message(self, record):
if record.exc_info:
return '\n'.join(traceback.format_exception(*record.exc_info))
@ -85,7 +88,7 @@ class HTTPSHandler(logging.Handler):
self.session.headers.update(headers)
def get_http_host(self):
host = self.host
host = self.host or ''
if not host.startswith('http'):
host = 'http://%s' % self.host
if self.port != 80 and self.port is not None:
@ -113,14 +116,25 @@ class HTTPSHandler(logging.Handler):
if not logger_name.startswith('awx.analytics'):
# Tower log emission is only turned off by enablement setting
return False
return self.enabled_loggers is None or logger_name.split('.')[-1] not in self.enabled_loggers
return self.enabled_loggers is None or logger_name[len('awx.analytics.'):] not in self.enabled_loggers
def emit(self, record):
"""
Emit a log record. Returns a list of zero or more
``concurrent.futures.Future`` objects.
When ``self.async`` is True, the list will contain one
Future object for each HTTP request made. When ``self.async`` is
False, the list will be empty.
See:
https://docs.python.org/3/library/concurrent.futures.html#future-objects
http://pythonhosted.org/futures/
"""
if self.skip_log(record.name):
return
return []
try:
payload = self.format(record)
host = self.get_http_host()
# Special action for System Tracking, queue up multiple log messages
if self.indv_facts:
@ -129,18 +143,56 @@ class HTTPSHandler(logging.Handler):
module_name = payload_data['module_name']
if module_name in ['services', 'packages', 'files']:
facts_dict = payload_data.pop(module_name)
async_futures = []
for key in facts_dict:
fact_payload = copy(payload_data)
fact_payload.update(facts_dict[key])
self.session.post(host, **self.get_post_kwargs(fact_payload))
return
if self.async:
async_futures.append(self._send(fact_payload))
else:
self._send(fact_payload)
return async_futures
if self.async:
self.session.post(host, **self.get_post_kwargs(payload))
else:
requests.post(host, auth=requests.auth.HTTPBasicAuth(self.username, self.password), **self.get_post_kwargs(payload))
return [self._send(payload)]
self._send(payload)
return []
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
def _send(self, payload):
return self.session.post(self.get_http_host(),
**self.get_post_kwargs(payload))
def add_or_remove_logger(address, instance):
specific_logger = logging.getLogger(address)
for i, handler in enumerate(specific_logger.handlers):
if isinstance(handler, (HTTPSNullHandler, BaseHTTPSHandler)):
specific_logger.handlers[i] = instance or HTTPSNullHandler()
break
else:
if instance is not None:
specific_logger.handlers.append(instance)
def configure_external_logger(settings_module, async_flag=True, is_startup=True):
is_enabled = settings_module.LOG_AGGREGATOR_ENABLED
if is_startup and (not is_enabled):
# Pass-through if external logging not being used
return
instance = None
if is_enabled:
instance = BaseHTTPSHandler.from_django_settings(settings_module, async=async_flag)
instance.setFormatter(LogstashFormatter(settings_module=settings_module))
awx_logger_instance = instance
if is_enabled and 'awx' not in settings_module.LOG_AGGREGATOR_LOGGERS:
awx_logger_instance = None
add_or_remove_logger('awx.analytics', instance)
add_or_remove_logger('awx', awx_logger_instance)

68
awx/main/utils/reload.py Normal file
View File

@ -0,0 +1,68 @@
# Copyright (c) 2017 Ansible Tower by Red Hat
# All Rights Reserved.
# Python
import subprocess
import logging
# Django
from django.conf import settings
# Celery
from celery import current_app
logger = logging.getLogger('awx.main.utils.reload')
def _uwsgi_reload():
# http://uwsgi-docs.readthedocs.io/en/latest/MasterFIFO.html#available-commands
logger.warn('Initiating uWSGI chain reload of server')
TRIGGER_CHAIN_RELOAD = 'c'
with open(settings.UWSGI_FIFO_LOCATION, 'w') as awxfifo:
awxfifo.write(TRIGGER_CHAIN_RELOAD)
def _reset_celery_thread_pool():
# Send signal to restart thread pool
app = current_app._get_current_object()
app.control.broadcast('pool_restart', arguments={'reload': True},
destination=['celery@{}'.format(settings.CLUSTER_HOST_ID)], reply=False)
def _supervisor_service_restart(service_internal_names):
'''
Service internal name options:
- beat - celery - callback - channels - uwsgi - daphne
- fact - nginx
example use pattern of supervisorctl:
# supervisorctl restart tower-processes:receiver tower-processes:factcacher
'''
group_name = 'tower-processes'
args = ['supervisorctl']
if settings.DEBUG:
args.extend(['-c', '/supervisor.conf'])
programs = []
name_translation_dict = settings.SERVICE_NAME_DICT
for n in service_internal_names:
if n in name_translation_dict:
programs.append('{}:{}'.format(group_name, name_translation_dict[n]))
args.extend(['restart'])
args.extend(programs)
logger.debug('Issuing command to restart services, args={}'.format(args))
subprocess.Popen(args)
def restart_local_services(service_internal_names):
logger.warn('Restarting services {} on this node in response to user action'.format(service_internal_names))
if 'uwsgi' in service_internal_names:
_uwsgi_reload()
service_internal_names.remove('uwsgi')
restart_celery = False
if 'celery' in service_internal_names:
restart_celery = True
service_internal_names.remove('celery')
_supervisor_service_restart(service_internal_names)
if restart_celery:
# Celery restarted last because this probably includes current process
_reset_celery_thread_pool()

View File

@ -431,7 +431,8 @@ CELERY_ROUTES = {'awx.main.tasks.run_job': {'queue': 'jobs',
CELERYBEAT_SCHEDULE = {
'tower_scheduler': {
'task': 'awx.main.tasks.tower_periodic_scheduler',
'schedule': timedelta(seconds=30)
'schedule': timedelta(seconds=30),
'options': {'expires': 20,}
},
'admin_checks': {
'task': 'awx.main.tasks.run_administrative_checks',
@ -443,7 +444,8 @@ CELERYBEAT_SCHEDULE = {
},
'cluster_heartbeat': {
'task': 'awx.main.tasks.cluster_node_heartbeat',
'schedule': timedelta(seconds=60)
'schedule': timedelta(seconds=60),
'options': {'expires': 50,}
},
'purge_stdout_files': {
'task': 'awx.main.tasks.purge_old_stdout_files',
@ -451,11 +453,13 @@ CELERYBEAT_SCHEDULE = {
},
'task_manager': {
'task': 'awx.main.scheduler.tasks.run_task_manager',
'schedule': timedelta(seconds=20)
'schedule': timedelta(seconds=20),
'options': {'expires': 20,}
},
'task_fail_inconsistent_running_jobs': {
'task': 'awx.main.scheduler.tasks.run_fail_inconsistent_running_jobs',
'schedule': timedelta(seconds=30)
'schedule': timedelta(seconds=30),
'options': {'expires': 20,}
},
}
@ -893,16 +897,16 @@ LOGGING = {
'formatter': 'simple',
},
'null': {
'class': 'django.utils.log.NullHandler',
'class': 'logging.NullHandler',
},
'file': {
'class': 'django.utils.log.NullHandler',
'class': 'logging.NullHandler',
'formatter': 'simple',
},
'syslog': {
'level': 'WARNING',
'filters': ['require_debug_false'],
'class': 'django.utils.log.NullHandler',
'class': 'logging.NullHandler',
'formatter': 'simple',
},
'http_receiver': {

View File

@ -112,3 +112,15 @@ except ImportError:
CLUSTER_HOST_ID = socket.gethostname()
CELERY_ROUTES['awx.main.tasks.cluster_node_heartbeat'] = {'queue': CLUSTER_HOST_ID, 'routing_key': CLUSTER_HOST_ID}
# Supervisor service name dictionary used for programatic restart
SERVICE_NAME_DICT = {
"celery": "celeryd",
"callback": "receiver",
"runworker": "channels",
"uwsgi": "uwsgi",
"daphne": "daphne",
"fact": "factcacher",
"nginx": "nginx"}
# Used for sending commands in automatic restart
UWSGI_FIFO_LOCATION = '/awxfifo'

View File

@ -57,6 +57,18 @@ LOGGING['handlers']['fact_receiver']['filename'] = '/var/log/tower/fact_receiver
LOGGING['handlers']['system_tracking_migrations']['filename'] = '/var/log/tower/tower_system_tracking_migrations.log'
LOGGING['handlers']['rbac_migrations']['filename'] = '/var/log/tower/tower_rbac_migrations.log'
# Supervisor service name dictionary used for programatic restart
SERVICE_NAME_DICT = {
"beat": "awx-celeryd-beat",
"celery": "awx-celeryd",
"callback": "awx-callback-receiver",
"channels": "awx-channels-worker",
"uwsgi": "awx-uwsgi",
"daphne": "awx-daphne",
"fact": "awx-fact-cache-receiver"}
# Used for sending commands in automatic restart
UWSGI_FIFO_LOCATION = '/var/lib/awx/awxfifo'
# Store a snapshot of default settings at this point before loading any
# customizable config files.
DEFAULTS_SNAPSHOT = {}

View File

@ -23,6 +23,10 @@ from awx.main.models import AuthToken
class SocialAuthMiddleware(SocialAuthExceptionMiddleware):
def process_view(self, request, callback, callback_args, callback_kwargs):
if request.path.startswith('/sso/login/'):
request.session['social_auth_last_backend'] = callback_kwargs['backend']
def process_request(self, request):
token_key = request.COOKIES.get('token', '')
token_key = urllib.quote(urllib.unquote(token_key).strip('"'))
@ -57,6 +61,7 @@ class SocialAuthMiddleware(SocialAuthExceptionMiddleware):
if auth_token and request.user and request.user.is_authenticated():
request.session.pop('social_auth_error', None)
request.session.pop('social_auth_last_backend', None)
def process_exception(self, request, exception):
strategy = getattr(request, 'social_strategy', None)
@ -66,6 +71,12 @@ class SocialAuthMiddleware(SocialAuthExceptionMiddleware):
if isinstance(exception, SocialAuthBaseException) or request.path.startswith('/sso/'):
backend = getattr(request, 'backend', None)
backend_name = getattr(backend, 'name', 'unknown-backend')
message = self.get_message(request, exception)
if request.session.get('social_auth_last_backend') != backend_name:
backend_name = request.session.get('social_auth_last_backend')
message = request.GET.get('error_description', message)
full_backend_name = backend_name
try:
idp_name = strategy.request_data()['RelayState']
@ -73,7 +84,6 @@ class SocialAuthMiddleware(SocialAuthExceptionMiddleware):
except KeyError:
pass
message = self.get_message(request, exception)
social_logger.error(message)
url = self.get_redirect_uri(request, exception)

View File

@ -921,7 +921,7 @@ input[type="checkbox"].checkbox-no-label {
/* Display list actions next to search widget */
.list-actions {
text-align: right;
margin-bottom: 20px;
margin-bottom: -34px;
.fa-lg {
vertical-align: -8%;
@ -1939,10 +1939,16 @@ tr td button i {
padding-right: 15px;
}
}
// lists.less uses 600px as the breakpoint, doing same for consistency
@media (max-width: 600px) {
.list-actions {
text-align: left;
margin-bottom: 20px;
}
}
.nvtooltip {
@ -2241,6 +2247,10 @@ html input[disabled] {
cursor: not-allowed;
}
.CodeMirror {
font-family: Monaco, Menlo, Consolas, "Courier New", monospace;
}
.CodeMirror--disabled .CodeMirror.cm-s-default,
.CodeMirror--disabled .CodeMirror-line {
background-color: #f6f6f6;

View File

@ -245,13 +245,13 @@
.Form-textArea{
border-radius: 5px;
color: @field-input-text;
background-color: @field-secondary-bg!important;
background-color: @field-secondary-bg;
width:100%!important;
}
.Form-textInput{
height: 30px;
background-color: @field-secondary-bg!important;
background-color: @field-secondary-bg;
border-radius: 5px;
border:1px solid @field-border;
color: @field-input-text;

View File

@ -153,10 +153,13 @@ table, tbody {
.List-actionHolder {
justify-content: flex-end;
display: flex;
// margin-bottom: 20px;
// float: right;
}
.List-actions {
display: flex;
margin-bottom: -32px;
}
.List-auxAction {
@ -275,6 +278,7 @@ table, tbody {
}
.List-noItems {
margin-top: 52px;
display: flex;
align-items: center;
justify-content: center;
@ -287,6 +291,9 @@ table, tbody {
text-transform: uppercase;
}
.modal-body > .List-noItems {
margin-top: 0px;
}
.List-editButton--selected {
background-color: @list-actn-bg-hov !important;
color: @list-actn-icn-hov;
@ -419,7 +426,51 @@ table, tbody {
flex: 1 0 auto;
margin-top: 12px;
}
.List-actions {
margin-bottom: 20px;
}
.List-well {
margin-top: 20px;
}
.List-action:not(.ng-hide) ~ .List-action:not(.ng-hide) {
margin-left: 0;
}
}
.InventoryManage-container, .modal-body {
.List-header {
flex-direction: column;
align-items: stretch;
}
.List-actionHolder {
justify-content: flex-start;
align-items: center;
flex: 1 0 auto;
margin-top: 12px;
}
.List-actions {
margin-bottom: 20px;
}
.List-well {
margin-top: 20px;
}
.List-action:not(.ng-hide) ~ .List-action:not(.ng-hide) {
margin-left: 0;
}
}
// Inventory Manage exceptions
.InventoryManage-container {
.List-actionHolder {
justify-content: flex-end;
margin-top: -52px;
}
.List-action button {
margin-left: 12px;
}
.SmartSearch-searchTermContainer {
width: 100%;
}
}

View File

@ -46,7 +46,7 @@
.ansi3 { font-weight: italic; }
.ansi4 { text-decoration: underline; }
.ansi9 { text-decoration: line-through; }
.ansi30 { color: @default-stdout-txt; }
.ansi30 { color: @default-data-txt; }
.ansi31 { color: @default-err; }
.ansi1.ansi31 {
color: @default-unreachable;

View File

@ -62,13 +62,13 @@ export default ['$rootScope', '$scope', 'GetBasePath', 'Rest', '$q', 'Wait', 'Pr
user.username;
}
if (item.isSelected) {
if (value.isSelected) {
if (item.type === 'user') {
item.name = buildName(item);
}
scope.allSelected.push(item);
} else {
scope.allSelected = _.remove(scope.allSelected, { id: item.id });
_.remove(scope.allSelected, { id: item.id });
}
});

View File

@ -44,7 +44,7 @@
</div>
</div>
<div id="AddPermissions-users" class="AddPermissions-list" ng-if="usersSelected">
<div id="AddPermissions-users" class="AddPermissions-list" ng-show="usersSelected">
<rbac-multiselect-list view="Users" all-selected="allSelected" dataset="usersDataset"></rbac-multiselect-list>
</div>
<div id="AddPermissions-teams" class="AddPermissions-list" ng-if="teamsSelected">
@ -62,7 +62,7 @@
<translate>Please assign roles to the selected users/teams</translate>
<div class="AddPermissions-keyToggle btn"
ng-class="{'is-active': showKeyPane}"
ng-click="toggleKeyPane()">
ng-click="toggleKeyPane()" translate>
Key
</div>
</div>
@ -104,13 +104,13 @@
<div class="buttons Form-buttons AddPermissions-buttons">
<button type="button"
class="btn btn-sm Form-cancelButton"
ng-click="closeModal()">
ng-click="closeModal()" translate>
Cancel
</button>
<button type="button"
class="btn btn-sm Form-saveButton"
ng-click="updatePermissions()"
ng-disabled="userRoleForm.$invalid || !allSelected || !allSelected.length">
ng-disabled="userRoleForm.$invalid || !allSelected || !allSelected.length" translate>
Save
</button>
</div>

View File

@ -127,7 +127,7 @@ function(rootScope, scope, $state, i18n, CreateSelect2, GetBasePath, Rest, $q, W
let resourceType = scope.currentTab(),
item = value.value;
if (item.isSelected) {
if (value.isSelected) {
scope.selected[resourceType][item.id] = item;
scope.selected[resourceType][item.id].roles = [];
aggregateKey(item, resourceType);

View File

@ -9,12 +9,6 @@
return {
name: 'users',
iterator: 'user',
defaultSearchParams: function(term){
return {or__username__icontains: term,
or__first_name__icontains: term,
or__last_name__icontains: term
};
},
title: false,
listTitleBadge: false,
multiSelect: true,

View File

@ -65,6 +65,7 @@ export default ['addPermissionsTeamsList', 'addPermissionsUsersList', 'TemplateL
description: list.fields.description
};
list.fields.name.columnClass = 'col-md-6 col-sm-6 col-xs-11';
list.fields.name.ngHref = '#/templates/job_template/{{job_template.id}}';
list.fields.description.columnClass = 'col-md-5 col-sm-5 hidden-xs';
break;
@ -77,6 +78,7 @@ export default ['addPermissionsTeamsList', 'addPermissionsUsersList', 'TemplateL
description: list.fields.description
};
list.fields.name.columnClass = 'col-md-6 col-sm-6 col-xs-11';
list.fields.name.ngHref = '#/templates/workflow_job_template/{{workflow_template.id}}';
list.fields.description.columnClass = 'col-md-5 col-sm-5 hidden-xs';
break;
case 'Users':
@ -119,10 +121,39 @@ export default ['addPermissionsTeamsList', 'addPermissionsUsersList', 'TemplateL
scope.$watch(list.name, function(){
_.forEach(scope[`${list.name}`], isSelected);
optionsRequestDataProcessing();
});
scope.$on(`${list.iterator}_options`, function(event, data){
scope.options = data.data.actions.GET;
optionsRequestDataProcessing();
});
// iterate over the list and add fields like type label, after the
// OPTIONS request returns, or the list is sorted/paginated/searched
function optionsRequestDataProcessing(){
if(scope.list.name === 'projects'){
if (scope[list.name] !== undefined) {
scope[list.name].forEach(function(item, item_idx) {
var itm = scope[list.name][item_idx];
// Set the item type label
if (list.fields.scm_type && scope.options &&
scope.options.hasOwnProperty('scm_type')) {
scope.options.scm_type.choices.forEach(function(choice) {
if (choice[0] === item.scm_type) {
itm.type_label = choice[1];
}
});
}
});
}
}
}
function isSelected(item){
if(_.find(scope.allSelected, {id: item.id})){
if(_.find(scope.allSelected, {id: item.id, type: item.type})){
item.isSelected = true;
}
return item;

View File

@ -382,8 +382,11 @@ var tower = angular.module('Tower', [
Authorization.restoreUserInfo(); //user must have hit browser refresh
}
if (next && (next.name !== "signIn" && next.name !== "signOut" && next.name !== "license")) {
// if not headed to /login or /logout, then check the license
CheckLicense.test(event);
if($rootScope.configReady === true){
// if not headed to /login or /logout, then check the license
CheckLicense.test(event);
}
}
}
activateTab();

View File

@ -60,9 +60,11 @@ export default [
}
var activeForm = function() {
if(!$scope.$parent[formTracker.currentFormName()].$dirty) {
authVm.activeAuthForm = authVm.dropdownValue;
formTracker.setCurrentAuth(authVm.activeAuthForm);
startCodeMirrors();
} else {
var msg = i18n._('You have unsaved changes. Would you like to proceed <strong>without</strong> saving?');
var title = i18n._('Warning: Unsaved Changes');
@ -115,28 +117,36 @@ export default [
var authForms = [{
formDef: configurationAzureForm,
id: 'auth-azure-form'
id: 'auth-azure-form',
name: 'azure'
}, {
formDef: configurationGithubForm,
id: 'auth-github-form'
id: 'auth-github-form',
name: 'github'
}, {
formDef: configurationGithubOrgForm,
id: 'auth-github-org-form'
id: 'auth-github-org-form',
name: 'github_org'
}, {
formDef: configurationGithubTeamForm,
id: 'auth-github-team-form'
id: 'auth-github-team-form',
name: 'github_team'
}, {
formDef: configurationGoogleForm,
id: 'auth-google-form'
id: 'auth-google-form',
name: 'google_oauth'
}, {
formDef: configurationLdapForm,
id: 'auth-ldap-form'
id: 'auth-ldap-form',
name: 'ldap'
}, {
formDef: configurationRadiusForm,
id: 'auth-radius-form'
id: 'auth-radius-form',
name: 'radius'
}, {
formDef: configurationSamlForm,
id: 'auth-saml-form'
id: 'auth-saml-form',
name: 'saml'
}, ];
var forms = _.pluck(authForms, 'formDef');
@ -161,9 +171,42 @@ export default [
form.buttons.save.disabled = $rootScope.user_is_system_auditor;
});
function startCodeMirrors(key){
var form = _.find(authForms, function(f){
return f.name === $scope.authVm.activeAuthForm;
});
if(!key){
// Attach codemirror to fields that need it
_.each(form.formDef.fields, function(field) {
// Codemirror balks at empty values so give it one
if($scope.$parent[field.name] === null && field.codeMirror) {
$scope.$parent[field.name] = '{}';
}
if(field.codeMirror) {
createIt(field.name);
}
});
}
else if(key){
createIt(key);
}
function createIt(name){
ParseTypeChange({
scope: $scope.$parent,
variable: name,
parse_variable: 'parseType',
field_id: form.formDef.name + '_' + name
});
$scope.parseTypeChange('parseType', name);
}
}
function addFieldInfo(form, key) {
_.extend(form.fields[key], {
awPopOver: $scope.$parent.configDataResolve[key].help_text,
awPopOver: ($scope.$parent.configDataResolve[key].defined_in_file) ?
null: $scope.$parent.configDataResolve[key].help_text,
label: $scope.$parent.configDataResolve[key].label,
name: key,
toggleSource: key,
@ -186,40 +229,23 @@ export default [
id: form.id,
mode: 'edit',
scope: $scope.$parent,
related: true
related: true,
noPanel: true
});
});
// Flag to avoid re-rendering and breaking Select2 dropdowns on tab switching
var dropdownRendered = false;
$scope.$on('populated', function() {
// Attach codemirror to fields that need it
_.each(authForms, function(form) {
_.each(form.formDef.fields, function(field) {
// Codemirror balks at empty values so give it one
if($scope.$parent[field.name] === null && field.codeMirror) {
$scope.$parent[field.name] = '{}';
}
if(field.codeMirror) {
ParseTypeChange({
scope: $scope.$parent,
variable: field.name,
parse_variable: 'parseType',
field_id: form.formDef.name + '_' + field.name,
readonly: true,
});
}
});
});
// Create Select2 fields
var opts = [];
function populateLDAPGroupType(flag){
if($scope.$parent.AUTH_LDAP_GROUP_TYPE !== null) {
opts.push({
id: $scope.$parent.AUTH_LDAP_GROUP_TYPE,
text: $scope.$parent.AUTH_LDAP_GROUP_TYPE
});
$scope.$parent.AUTH_LDAP_GROUP_TYPE = _.find($scope.$parent.AUTH_LDAP_GROUP_TYPE_options, { value: $scope.$parent.AUTH_LDAP_GROUP_TYPE });
}
if(flag !== undefined){
dropdownRendered = flag;
}
if(!dropdownRendered) {
@ -228,15 +254,21 @@ export default [
element: '#configuration_ldap_template_AUTH_LDAP_GROUP_TYPE',
multiple: false,
placeholder: i18n._('Select group types'),
opts: opts
});
// Fix for bug where adding selected opts causes form to be $dirty and triggering modal
// TODO Find better solution for this bug
$timeout(function(){
$scope.$parent.configuration_ldap_template_form.$setPristine();
}, 1000);
}
}
$scope.$on('AUTH_LDAP_GROUP_TYPE_populated', function(e, data, flag) {
populateLDAPGroupType(flag);
});
$scope.$on('codeMirror_populated', function(e, key) {
startCodeMirrors(key);
});
$scope.$on('populated', function() {
startCodeMirrors();
populateLDAPGroupType(false);
});
angular.extend(authVm, {

View File

@ -24,11 +24,15 @@ export default ['i18n', function(i18n) {
reset: 'SOCIAL_AUTH_SAML_SP_ENTITY_ID'
},
SOCIAL_AUTH_SAML_SP_PUBLIC_CERT: {
type: 'text',
type: 'textarea',
rows: 6,
elementClass: 'Form-monospace',
reset: 'SOCIAL_AUTH_SAML_SP_PUBLIC_CERT'
},
SOCIAL_AUTH_SAML_SP_PRIVATE_KEY: {
type: 'sensitive',
type: 'textarea',
rows: 6,
elementClass: 'Form-monospace',
hasShowInputButton: true,
reset: 'SOCIAL_AUTH_SAML_SP_PRIVATE_KEY'
},

View File

@ -147,6 +147,8 @@ textarea[disabled="disabled"] + div[id*="-container"]{
//Needed to show the not-allowed cursor over a Codemirror instance
.Form-formGroup--disabled {
cursor: not-allowed;
position: relative;
display: inline-block;
// Filepicker and toggle disabling
.Form-filePicker--pickerButton, .Form-filePicker--textBox,
@ -155,4 +157,46 @@ textarea[disabled="disabled"] + div[id*="-container"]{
cursor: not-allowed;
}
// Adding explanatory tooltips for disabled fields
// Borrows styling from .popover
.Form-tooltip--disabled {
visibility: hidden;
background-color: @default-interface-txt;
color: @default-bg;
text-align: center;
border-radius: 6px;
position: absolute;
z-index: 1;
width: 200px;
bottom: 110%;
left: 50%;
margin-left: -100px;
background-clip: padding-box;
border: 1px solid rgba(0,0,0,.2);
-webkit-box-shadow: 0 5px 10px rgba(0,0,0,.2);
box-shadow: 0 5px 10px rgba(0,0,0,.2);
white-space: normal;
padding: 9px 14px;
font-size: 12px;
font-weight: bold;
}
&:hover .Form-tooltip--disabled {
visibility: visible;
}
.Form-tooltip--disabled::after {
content: " ";
position: absolute;
top: 100%;
left: 50%;
margin-left: -11px;
border-width: 11px;
border-style: solid;
border-color: @default-interface-txt transparent transparent transparent;
}
}

View File

@ -7,7 +7,7 @@
export default [
'$scope', '$rootScope', '$state', '$stateParams', '$timeout', '$q', 'Alert', 'ClearScope',
'ConfigurationService', 'ConfigurationUtils', 'CreateDialog', 'CreateSelect2', 'i18n', 'ParseTypeChange', 'ProcessErrors', 'Store',
'Wait', 'configDataResolve',
'Wait', 'configDataResolve', 'ToJSON',
//Form definitions
'configurationAzureForm',
'configurationGithubForm',
@ -25,7 +25,7 @@ export default [
function(
$scope, $rootScope, $state, $stateParams, $timeout, $q, Alert, ClearScope,
ConfigurationService, ConfigurationUtils, CreateDialog, CreateSelect2, i18n, ParseTypeChange, ProcessErrors, Store,
Wait, configDataResolve,
Wait, configDataResolve, ToJSON,
//Form definitions
configurationAzureForm,
configurationGithubForm,
@ -71,7 +71,7 @@ export default [
// we want the options w/o a space, and
// the ConfigurationUtils.arrayToList()
// does a string.split(', ') w/ an extra space
// behind the comma.
// behind the comma.
if(key === "AD_HOC_COMMANDS"){
$scope[key] = data[key].toString();
}
@ -295,9 +295,20 @@ export default [
ConfigurationService.patchConfiguration(payload)
.then(function() {
$scope[key] = $scope.configDataResolve[key].default;
if(key === "AD_HOC_COMMANDS"){
$scope.AD_HOC_COMMANDS = $scope.AD_HOC_COMMANDS.toString();
$scope.$broadcast('adhoc_populated', null, false);
if($scope[key + '_field'].type === "select"){
// We need to re-instantiate the Select2 element
// after resetting the value. Example:
$scope.$broadcast(key+'_populated', null, false);
}
else if($scope[key + '_field'].reset === "CUSTOM_LOGO"){
$scope.$broadcast(key+'_reverted');
}
else if($scope[key + '_field'].type === "textarea" && _.isArray($scope.configDataResolve[key].default)){
$scope[key] = ConfigurationUtils.arrayToList($scope[key], key);
}
else if($scope[key + '_field'].hasOwnProperty('codeMirror')){
$scope[key] = '{}';
$scope.$broadcast('codeMirror_populated', key);
}
loginUpdate();
})
@ -353,7 +364,12 @@ export default [
payload[key] = _.map($scope[key], 'value').join(',');
}
} else {
payload[key] = $scope[key].value;
if(multiselectDropdowns.indexOf(key) !== -1) {
// Default AD_HOC_COMMANDS to an empty list
payload[key] = $scope[key].value || [];
} else {
payload[key] = $scope[key].value;
}
}
} else if($scope.configDataResolve[key].type === 'list' && $scope[key] !== null) {
// Parse lists
@ -363,7 +379,9 @@ export default [
if($scope[key] === '') {
payload[key] = {};
} else {
payload[key] = JSON.parse($scope[key]);
// payload[key] = JSON.parse($scope[key]);
payload[key] = ToJSON($scope.parseType,
$scope[key]);
}
}
else {
@ -431,6 +449,7 @@ export default [
.then(function() {
populateFromApi();
$scope[formTracker.currentFormName()].$setPristine();
$scope.$broadcast('CUSTOM_LOGO_reverted');
})
.catch(function(error) {
ProcessErrors($scope, error, status, formDefs[formTracker.getCurrent()],

View File

@ -49,7 +49,8 @@ export default [
function addFieldInfo(form, key) {
_.extend(form.fields[key], {
awPopOver: $scope.$parent.configDataResolve[key].help_text,
awPopOver: ($scope.$parent.configDataResolve[key].defined_in_file) ?
null: $scope.$parent.configDataResolve[key].help_text,
label: $scope.$parent.configDataResolve[key].label,
name: key,
toggleSource: key,
@ -67,7 +68,8 @@ export default [
id: 'configure-jobs-form',
mode: 'edit',
scope: $scope.$parent,
related: false
related: false,
noPanel: true
});
// Flag to avoid re-rendering and breaking Select2 dropdowns on tab switching
@ -75,6 +77,7 @@ export default [
function populateAdhocCommand(flag){
$scope.$parent.AD_HOC_COMMANDS = $scope.$parent.AD_HOC_COMMANDS.toString();
var ad_hoc_commands = $scope.$parent.AD_HOC_COMMANDS.split(',');
$scope.$parent.AD_HOC_COMMANDS = _.map(ad_hoc_commands, (item) => _.find($scope.$parent.AD_HOC_COMMANDS_options, { value: item }));
@ -92,12 +95,12 @@ export default [
}
}
$scope.$on('adhoc_populated', function(e, data, flag) {
$scope.$on('AD_HOC_COMMANDS_populated', function(e, data, flag) {
populateAdhocCommand(flag);
});
$scope.$on('populated', function(e, data, flag) {
populateAdhocCommand(flag);
$scope.$on('populated', function() {
populateAdhocCommand(false);
});
// Fix for bug where adding selected opts causes form to be $dirty and triggering modal

View File

@ -122,7 +122,8 @@ export default [
function addFieldInfo(form, key) {
_.extend(form.fields[key], {
awPopOver: $scope.$parent.configDataResolve[key].help_text,
awPopOver: ($scope.$parent.configDataResolve[key].defined_in_file) ?
null: $scope.$parent.configDataResolve[key].help_text,
label: $scope.$parent.configDataResolve[key].label,
name: key,
toggleSource: key,
@ -144,35 +145,40 @@ export default [
id: form.id,
mode: 'edit',
scope: $scope.$parent,
related: true
related: true,
noPanel: true
});
});
var dropdownRendered = false;
$scope.$on('populated', function() {
populateLogAggregator(false);
});
var opts = [];
$scope.$on('LOG_AGGREGATOR_TYPE_populated', function(e, data, flag) {
populateLogAggregator(flag);
});
function populateLogAggregator(flag){
if($scope.$parent.LOG_AGGREGATOR_TYPE !== null) {
_.each(ConfigurationUtils.listToArray($scope.$parent.LOG_AGGREGATOR_TYPE), function(type) {
opts.push({
id: type,
text: type
});
});
$scope.$parent.LOG_AGGREGATOR_TYPE = _.find($scope.$parent.LOG_AGGREGATOR_TYPE_options, { value: $scope.$parent.LOG_AGGREGATOR_TYPE });
}
if(flag !== undefined){
dropdownRendered = flag;
}
if(!dropdownRendered) {
dropdownRendered = true;
CreateSelect2({
element: '#configuration_logging_template_LOG_AGGREGATOR_TYPE',
multiple: true,
multiple: false,
placeholder: i18n._('Select types'),
opts: opts
});
$scope.$parent.configuration_logging_template_form.LOG_AGGREGATOR_TYPE.$setPristine();
}
});
}
// Fix for bug where adding selected opts causes form to be $dirty and triggering modal
// TODO Find better solution for this bug

View File

@ -23,7 +23,6 @@
type: 'select',
reset: 'LOG_AGGREGATOR_TYPE',
ngOptions: 'type.label for type in LOG_AGGREGATOR_TYPE_options track by type.value',
multiSelect: true
},
LOG_AGGREGATOR_USERNAME: {
type: 'text',

View File

@ -52,7 +52,8 @@
function addFieldInfo(form, key) {
_.extend(form.fields[key], {
awPopOver: $scope.$parent.configDataResolve[key].help_text,
awPopOver: ($scope.$parent.configDataResolve[key].defined_in_file) ?
null: $scope.$parent.configDataResolve[key].help_text,
label: $scope.$parent.configDataResolve[key].label,
name: key,
toggleSource: key,
@ -70,30 +71,38 @@
id: 'configure-ui-form',
mode: 'edit',
scope: $scope.$parent,
related: true
related: true,
noPanel: true
});
// Flag to avoid re-rendering and breaking Select2 dropdowns on tab switching
var dropdownRendered = false;
$scope.$on('populated', function(){
function populatePendoTrackingState(flag){
if($scope.$parent.PENDO_TRACKING_STATE !== null) {
$scope.$parent.PENDO_TRACKING_STATE = _.find($scope.$parent.PENDO_TRACKING_STATE_options, { value: $scope.$parent.PENDO_TRACKING_STATE });
}
if(flag !== undefined){
dropdownRendered = flag;
}
if(!dropdownRendered) {
dropdownRendered = true;
CreateSelect2({
element: '#configuration_ui_template_PENDO_TRACKING_STATE',
multiple: false,
placeholder: i18n._('Select commands'),
opts: [{
id: $scope.$parent.PENDO_TRACKING_STATE,
text: $scope.$parent.PENDO_TRACKING_STATE
}]
placeholder: i18n._('Select commands')
});
// Fix for bug where adding selected opts causes form to be $dirty and triggering modal
// TODO Find better solution for this bug
$timeout(function(){
$scope.$parent.configuration_ui_template_form.$setPristine();
}, 1000);
}
}
$scope.$on('PENDO_TRACKING_STATE_populated', function(e, data, flag) {
populatePendoTrackingState(flag);
});
$scope.$on('populated', function(){
populatePendoTrackingState(false);
});
angular.extend(uiVm, {

View File

@ -431,7 +431,7 @@ export default
awToolTip: '{{permissionsTooltip}}',
dataTipWatch: 'permissionsTooltip',
awToolTipTabEnabledInEditMode: true,
dataPlacement: 'top',
dataPlacement: 'right',
basePath: 'api/v1/credentials/{{$stateParams.credential_id}}/access_list/',
search: {
order_by: 'username'

View File

@ -24,6 +24,7 @@ export default
// form generator inspects the current state name to determine whether or not to set an active (.is-selected) class on a form tab
// this setting is optional on most forms, except where the form's edit state name is not parentStateName.edit
activeEditState: 'inventoryManage.editGroup',
detailsClick: "$state.go('inventoryManage.editGroup')",
well: false,
fields: {
name: {

View File

@ -12,9 +12,10 @@
export default
angular.module('HostFormDefinition', [])
.value('HostForm', {
.factory('HostForm', ['i18n', function(i18n) {
return {
addTitle: 'Create Host',
addTitle: i18n._('Create Host'),
editTitle: '{{ host.name }}',
name: 'host',
basePath: 'hosts',
@ -27,46 +28,54 @@ export default
class: 'Form-header-field',
ngClick: 'toggleHostEnabled(host)',
type: 'toggle',
awToolTip: "<p>Indicates if a host is available and should be included in running jobs.</p><p>For hosts that " +
"are part of an external inventory, this flag cannot be changed. It will be set by the inventory sync process.</p>",
dataTitle: 'Host Enabled',
awToolTip: "<p>" +
i18n._("Indicates if a host is available and should be included in running jobs.") +
"</p><p>" +
i18n._("For hosts that are part of an external" +
" inventory, this flag cannot be changed. It will be" +
" set by the inventory sync process.") +
"</p>",
dataTitle: i18n._('Host Enabled'),
ngDisabled: 'host.has_inventory_sources'
}
},
fields: {
name: {
label: 'Host Name',
label: i18n._('Host Name'),
type: 'text',
required: true,
awPopOver: "<p>Provide a host name, ip address, or ip address:port. Examples include:</p>" +
awPopOver: "<p>" +
i18n._("Provide a host name, ip address, or ip address:port. Examples include:") +
"</p>" +
"<blockquote>myserver.domain.com<br/>" +
"127.0.0.1<br />" +
"10.1.0.140:25<br />" +
"server.example.com:25" +
"</blockquote>",
dataTitle: 'Host Name',
dataTitle: i18n._('Host Name'),
dataPlacement: 'right',
dataContainer: 'body',
ngDisabled: '!(host.summary_fields.user_capabilities.edit || canAdd)'
},
description: {
label: 'Description',
label: i18n._('Description'),
ngDisabled: '!(host.summary_fields.user_capabilities.edit || canAdd)',
type: 'text'
},
variables: {
label: 'Variables',
label: i18n._('Variables'),
type: 'textarea',
rows: 6,
class: 'Form-formGroup--fullWidth',
"default": "---",
awPopOver: "<p>Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.</p>" +
awPopOver: "<p>" + i18n._("Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "</p>" +
"JSON:<br />\n" +
"<blockquote>{<br />&emsp;\"somevar\": \"somevalue\",<br />&emsp;\"password\": \"magic\"<br /> }</blockquote>\n" +
"YAML:<br />\n" +
"<blockquote>---<br />somevar: somevalue<br />password: magic<br /></blockquote>\n" +
'<p>View JSON examples at <a href="http://www.json.org" target="_blank">www.json.org</a></p>' +
'<p>View YAML examples at <a href="http://docs.ansible.com/YAMLSyntax.html" target="_blank">docs.ansible.com</a></p>',
dataTitle: 'Host Variables',
'<p>' + i18n.sprintf(i18n._('View JSON examples at %s'), '<a href="http://www.json.org" target="_blank">www.json.org</a>') + '</p>' +
'<p>' + i18n.sprintf(i18n._('View YAML examples at %s'), '<a href="http://docs.ansible.com/YAMLSyntax.html" target="_blank">docs.ansible.com</a>') + '</p>',
dataTitle: i18n._('Host Variables'),
dataPlacement: 'right',
dataContainer: 'body'
},
@ -92,4 +101,5 @@ export default
ngShow: '(host.summary_fields.user_capabilities.edit || canAdd)'
}
},
});
};
}]);

View File

@ -54,6 +54,7 @@ export default
related: {
users: {
name: 'users',
dataPlacement: 'top',
awToolTip: i18n._('Please save before adding users'),
basePath: 'api/v1/organizations/{{$stateParams.organization_id}}/access_list/',

View File

@ -65,6 +65,7 @@ export default
related: {
users: {
name: 'users',
dataPlacement: 'top',
awToolTip: i18n._('Please save before adding users'),
basePath: 'api/v1/teams/{{$stateParams.team_id}}/access_list/',

View File

@ -119,6 +119,7 @@ export default
related: {
organizations: {
name: 'organizations',
awToolTip: i18n._('Please save before assigning to organizations'),
basePath: 'api/v1/users/{{$stateParams.user_id}}/organizations',
emptyListText: i18n._('Please add user to an Organization.'),
@ -146,6 +147,7 @@ export default
//hideOnSuperuser: true // RBAC defunct
},
teams: {
name: 'teams',
awToolTip: i18n._('Please save before assigning to teams'),
basePath: 'api/v1/users/{{$stateParams.user_id}}/teams',
search: {

View File

@ -34,7 +34,7 @@ export default
label: i18n._('Type'),
type: 'radio_group',
ngShow: 'selectedTemplate && edgeFlags.showTypeOptions',
ngDisabled: '!canAddWorkflowJobTemplate',
ngDisabled: '!(workflowJobTemplateObj.summary_fields.user_capabilities.edit || canAddWorkflowJobTemplate)',
options: [
{
label: i18n._('On&nbsp;Success'),
@ -70,7 +70,7 @@ export default
dataPlacement: 'right',
dataContainer: "body",
ngShow: "selectedTemplate.ask_credential_on_launch",
ngDisabled: '!canAddWorkflowJobTemplate',
ngDisabled: '!(workflowJobTemplateObj.summary_fields.user_capabilities.edit || canAddWorkflowJobTemplate)',
awRequiredWhen: {
reqExpression: 'selectedTemplate && selectedTemplate.ask_credential_on_launch'
}
@ -90,7 +90,7 @@ export default
dataPlacement: 'right',
dataContainer: "body",
ngShow: "selectedTemplate.ask_inventory_on_launch",
ngDisabled: '!canAddWorkflowJobTemplate',
ngDisabled: '!(workflowJobTemplateObj.summary_fields.user_capabilities.edit || canAddWorkflowJobTemplate)',
awRequiredWhen: {
reqExpression: 'selectedTemplate && selectedTemplate.ask_inventory_on_launch'
}
@ -111,7 +111,7 @@ export default
dataPlacement: 'right',
dataContainer: "body",
ngShow: "selectedTemplate.ask_job_type_on_launch",
ngDisabled: '!canAddWorkflowJobTemplate',
ngDisabled: '!(workflowJobTemplateObj.summary_fields.user_capabilities.edit || canAddWorkflowJobTemplate)',
awRequiredWhen: {
reqExpression: 'selectedTemplate && selectedTemplate.ask_job_type_on_launch'
}
@ -128,7 +128,7 @@ export default
dataPlacement: 'right',
dataContainer: "body",
ngShow: "selectedTemplate.ask_limit_on_launch",
ngDisabled: '!canAddWorkflowJobTemplate'
ngDisabled: '!(workflowJobTemplateObj.summary_fields.user_capabilities.edit || canAddWorkflowJobTemplate)'
},
job_tags: {
label: i18n._('Job Tags'),
@ -143,7 +143,7 @@ export default
dataPlacement: "right",
dataContainer: "body",
ngShow: "selectedTemplate.ask_tags_on_launch",
ngDisabled: '!canAddWorkflowJobTemplate'
ngDisabled: '!(workflowJobTemplateObj.summary_fields.user_capabilities.edit || canAddWorkflowJobTemplate)'
},
skip_tags: {
label: i18n._('Skip Tags'),
@ -158,22 +158,22 @@ export default
dataPlacement: "right",
dataContainer: "body",
ngShow: "selectedTemplate.ask_skip_tags_on_launch",
ngDisabled: '!canAddWorkflowJobTemplate'
ngDisabled: '!(workflowJobTemplateObj.summary_fields.user_capabilities.edit || canAddWorkflowJobTemplate)'
}
},
buttons: {
cancel: {
ngClick: 'cancelNodeForm()',
ngShow: 'canAddWorkflowJobTemplate'
ngShow: '(workflowJobTemplateObj.summary_fields.user_capabilities.edit || canAddWorkflowJobTemplate)'
},
close: {
ngClick: 'cancelNodeForm()',
ngShow: '!canAddWorkflowJobTemplate'
ngShow: '!(workflowJobTemplateObj.summary_fields.user_capabilities.edit || canAddWorkflowJobTemplate)'
},
select: {
ngClick: 'saveNodeForm()',
ngDisabled: "workflow_maker_form.$invalid || !selectedTemplate",
ngShow: 'canAddWorkflowJobTemplate'
ngShow: '(workflowJobTemplateObj.summary_fields.user_capabilities.edit || canAddWorkflowJobTemplate)'
}
}
};}])

View File

@ -104,7 +104,9 @@ export default
Rest.post(postData)
.success(function (data) {
Wait('stop');
$state.go('adHocJobStdout', {id: data.id});
if($location.path().replace(/^\//, '').split('/')[0] !== 'jobs') {
$state.go('adHocJobStdout', {id: data.id});
}
})
.error(function (data, status) {
ProcessErrors(scope, data, status, {

View File

@ -78,7 +78,7 @@ angular.module('GroupsHelper', [ 'RestServices', 'Utilities', listGenerator.name
scope.removeSourceReady = scope.$on('SourceReady', function(e, source) {
// Get the ID from the correct summary field
var update_id = (source.current_update) ? source.summary_fields.current_update.id : source.summary_fields.last_update.id;
var update_id = (source.summary_fields.current_update) ? source.summary_fields.current_update.id : source.summary_fields.last_update.id;
$state.go('inventorySyncStdout', {id: update_id});

View File

@ -46,6 +46,7 @@ function($compile, CreateDialog, Wait, ParseTypeChange) {
label: "Launch",
onClick: function() {
scope.$emit(callback);
$('#password-modal').dialog('close');
},
icon: "fa-check",
"class": "btn btn-primary",

View File

@ -155,8 +155,7 @@ angular.module('JobTemplatesHelper', ['Utilities'])
scope.can_edit = data.summary_fields.user_capabilities.edit;
if (scope.project === "" && scope.playbook === "") {
if (scope.job_type.value === "scan" && (!scope.project || scope.project === "") && (!scope.playbook || scope.playbook === "")) {
scope.resetProjectToDefault();
}

View File

@ -88,21 +88,17 @@ export default
// Set the item type label
if (list.fields.type) {
parent_scope.type_choices.every(function(choice) {
parent_scope.type_choices.forEach(function(choice) {
if (choice.value === item.type) {
itm.type_label = choice.label;
return false;
}
return true;
});
}
// Set the job status label
parent_scope.status_choices.every(function(status) {
parent_scope.status_choices.forEach(function(status) {
if (status.value === item.status) {
itm.status_label = status.label;
return false;
}
return true;
});
if (list.name === 'completed_jobs' || list.name === 'running_jobs') {

View File

@ -12,6 +12,9 @@
$state.go('^', null, {reload: true});
};
$scope.toggleHostEnabled = function(){
if ($scope.host.has_inventory_sources){
return;
}
$scope.host.enabled = !$scope.host.enabled;
};
$scope.toggleEnabled = function(){

View File

@ -43,6 +43,9 @@ export default ['$scope', '$state', '$stateParams', 'GetBasePath', 'DashboardHos
};
$scope.toggleHostEnabled = function(host) {
if (host.has_inventory_sources){
return;
}
DashboardHostService.setHostStatus(host, !host.enabled)
.then(function(res) {
var index = _.findIndex($scope.hosts, function(o) {

View File

@ -4,7 +4,7 @@
* All Rights Reserved
*************************************************/
export default function(){
export default ['i18n', function(i18n){
return {
editTitle: '{{host.name}}',
name: 'host',
@ -19,48 +19,55 @@ export default function(){
class: 'Form-header-field',
ngClick: 'toggleHostEnabled()',
type: 'toggle',
awToolTip: "<p>Indicates if a host is available and should be included in running jobs.</p><p>For hosts that " +
"are part of an external inventory, this flag cannot be changed. It will be set by the inventory sync process.</p>",
dataTitle: 'Host Enabled'
awToolTip: "<p>" +
i18n._("Indicates if a host is available and should be included in running jobs.") +
"</p><p>" +
i18n._("For hosts that are part of an external inventory, this" +
" flag cannot be changed. It will be set by the inventory" +
" sync process.") +
"</p>",
dataTitle: i18n._('Host Enabled'),
ngDisabled: 'host.has_inventory_sources'
}
},
fields: {
name: {
label: 'Host Name',
label: i18n._('Host Name'),
type: 'text',
value: '{{name}}',
awPopOver: "<p>Provide a host name, ip address, or ip address:port. Examples include:</p>" +
awPopOver: "<p>" +
i18n._("Provide a host name, ip address, or ip address:port. Examples include:") +
"</p>" +
"<blockquote>myserver.domain.com<br/>" +
"127.0.0.1<br />" +
"10.1.0.140:25<br />" +
"server.example.com:25" +
"</blockquote>",
dataTitle: 'Host Name',
dataTitle: i18n._('Host Name'),
dataPlacement: 'right',
dataContainer: 'body'
},
description: {
label: 'Description',
label: i18n._('Description'),
type: 'text',
},
variables: {
label: 'Variables',
label: i18n._('Variables'),
type: 'textarea',
rows: 6,
class: 'modal-input-xlarge Form-textArea Form-formGroup--fullWidth',
dataTitle: 'Host Variables',
dataTitle: i18n._('Host Variables'),
dataPlacement: 'right',
dataContainer: 'body',
default: '---',
awPopOver: "<p>Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.</p>" +
awPopOver: "<p>" + i18n._("Enter inventory variables using either JSON or YAML syntax. Use the radio button to toggle between the two.") + "</p>" +
"JSON:<br />\n" +
"<blockquote>{<br />&emsp;\"somevar\": \"somevalue\",<br />&emsp;\"password\": \"magic\"<br /> }</blockquote>\n" +
"YAML:<br />\n" +
"<blockquote>---<br />somevar: somevalue<br />password: magic<br /></blockquote>\n" +
'<p>View JSON examples at <a href="http://www.json.org" target="_blank">www.json.org</a></p>' +
'<p>View YAML examples at <a href="http://docs.ansible.com/YAMLSyntax.html" target="_blank">docs.ansible.com</a></p>',
'<p>' + i18n.sprintf(i18n._('View JSON examples at %s'), '<a href="http://www.json.org" target="_blank">www.json.org</a>') + '</p>' +
'<p>' + i18n.sprintf(i18n._('View YAML examples at %s'), '<a href="http://docs.ansible.com/YAMLSyntax.html" target="_blank">docs.ansible.com</a>') + '</p>',
}
},
buttons: {
@ -73,4 +80,4 @@ export default function(){
}
}
};
}
}];

View File

@ -10,8 +10,8 @@ export default [ 'i18n', function(i18n){
name: 'hosts',
iterator: 'host',
selectTitle: i18n._('Add Existing Hosts'),
editTitle: 'Hosts',
listTitle: 'Hosts',
editTitle: i18n._('Hosts'),
listTitle: i18n._('Hosts'),
index: false,
hover: true,
well: true,
@ -33,7 +33,7 @@ export default [ 'i18n', function(i18n){
},
name: {
key: true,
label: 'Name',
label: i18n._('Name'),
columnClass: 'col-lg-5 col-md-5 col-sm-5 col-xs-8 ellipsis List-staticColumnAdjacent',
ngClick: 'editHost(host.id)'
},
@ -52,6 +52,7 @@ export default [ 'i18n', function(i18n){
nosort: true,
awToolTip: "<p>" + i18n._("Indicates if a host is available and should be included in running jobs.") + "</p><p>" + i18n._("For hosts that are part of an external inventory, this flag cannot be changed. It will be set by the inventory sync process.") + "</p>",
dataTitle: i18n._('Host Enabled'),
ngDisabled: 'host.has_inventory_sources'
}
},

View File

@ -86,6 +86,18 @@
color: @default-err;
}
.DashboardList-status--failed{
color: @default-err;
margin-top: 10px;
margin-bottom: 10px;
padding: 0px;
margin-right: 5px;
}
.DashboardList-status--failed:before {
content: "\f06a";
}
.DashboardList-nameCell {
padding-left: 15px;
width: 100%;

View File

@ -30,7 +30,7 @@
</a>
</td>
<td class="DashboardList-activityCell">
<aw-smart-status jobs="template.recent_jobs"></aw-smart-status>
<aw-smart-status jobs="template.recent_jobs" template-type="template.type"></aw-smart-status>
</td>
<td class="List-actionsContainer">
<div class="List-actionButtonCell">

View File

@ -29,7 +29,7 @@ export default
// detailsUrl, status, name, time
scope.jobs = _.map(list, function(job){
return {
detailsUrl: job.url.replace("api/v1", "#"),
detailsUrl: job.type && job.type === 'workflow_job' ? job.url.replace("api/v1/workflow_jobs", "#/workflows") : job.url.replace("api/v1", "#"),
status: job.status,
name: job.name,
id: job.id,

Some files were not shown because too many files have changed in this diff Show More