Merge branch 'release_3.1.0' into devel

* release_3.1.0: (186 commits)
  check related credential for inventory source
  Fixed org jt's smart status
  Show the data that would have been a problem sending payload
  Implement optional message persistence on callback events
  changing the related tab on an edit of an organization to be for users, instead of users & teams
  fixing cancel function for workflow results
  Handle deleting root node while adding a child to that root node
  Skip sending task to process cache changes when migrating to database settings.
  only loop over project OPTIONS response if the response is valid
  Added the ability to pass in a maximum number of pages shown to pagination directive.  This is useful for narrow lists particularly in modals.
  fix a CTinT bug in static config parsing
  Recursively fetch workflow nodes when there is more than 1 page of nodes
  enforce a sane default OPT_NETWORK_TIMEOUT for LDAP connections
  Labels expects the iterator to be `template` not `job_template`
  Disable workflow editor save when currently adding a node
  Fixed up organizations/job templates list
  surface dry_run option for system jobs through the API
  fix bugs with system auditors and orphan JTs
  incorporating organization into the custom inv script option selection for a group
  catch and log LDAP auth plugin misconfigurations
  ...
This commit is contained in:
Matthew Jones 2017-02-08 14:01:39 -05:00
commit 986ef9e2eb
232 changed files with 13972 additions and 6749 deletions

View File

@ -45,7 +45,7 @@ ifeq ($(OFFICIAL),yes)
AW_REPO_URL ?= http://releases.ansible.com/ansible-tower
else
RELEASE ?= $(BUILD)
AW_REPO_URL ?= http://jenkins.testing.ansible.com/ansible-tower_nightlies_RTYUIOPOIUYTYU/$(GIT_BRANCH)
AW_REPO_URL ?= http://jenkins.testing.ansible.com/ansible-tower_nightlies_f8b8c5588b2505970227a7b0900ef69040ad5a00/$(GIT_BRANCH)
endif
# Allow AMI license customization
@ -402,7 +402,7 @@ uwsgi: collectstatic
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/tower/bin/activate; \
fi; \
uwsgi -b 32768 --socket :8050 --module=awx.wsgi:application --home=/venv/tower --chdir=/tower_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --master-fifo=/var/lib/awx/awxfifo --lazy-apps
uwsgi -b 32768 --socket :8050 --module=awx.wsgi:application --home=/venv/tower --chdir=/tower_devel/ --vacuum --processes=5 --harakiri=120 --master --no-orphans --py-autoreload 1 --max-requests=1000 --stats /tmp/stats.socket --master-fifo=/awxfifo --lazy-apps
daphne:
@if [ "$(VENV_BASE)" ]; then \
@ -473,7 +473,7 @@ pylint: reports
check: flake8 pep8 # pyflakes pylint
TEST_DIRS ?= awx/main/tests
TEST_DIRS ?= awx/main/tests awx/conf/tests awx/sso/tests
# Run all API unit tests.
test:
@if [ "$(VENV_BASE)" ]; then \
@ -485,7 +485,7 @@ test_unit:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/tower/bin/activate; \
fi; \
py.test awx/main/tests/unit
py.test awx/main/tests/unit awx/conf/tests/unit awx/sso/tests/unit
# Run all API unit tests with coverage enabled.
test_coverage:
@ -690,6 +690,7 @@ rpm-build:
rpm-build/$(SDIST_TAR_FILE): rpm-build dist/$(SDIST_TAR_FILE)
cp packaging/rpm/$(NAME).spec rpm-build/
cp packaging/rpm/tower.te rpm-build/
cp packaging/rpm/tower.fc rpm-build/
cp packaging/rpm/$(NAME).sysconfig rpm-build/
cp packaging/remove_tower_source.py rpm-build/
cp packaging/bytecompile.sh rpm-build/
@ -892,11 +893,5 @@ clean-elk:
docker rm tools_elasticsearch_1
docker rm tools_kibana_1
mongo-debug-ui:
docker run -it --rm --name mongo-express --link tools_mongo_1:mongo -e ME_CONFIG_OPTIONS_EDITORTHEME=ambiance -e ME_CONFIG_BASICAUTH_USERNAME=admin -e ME_CONFIG_BASICAUTH_PASSWORD=password -p 8081:8081 knickers/mongo-express
mongo-container:
docker run -it --link tools_mongo_1:mongo --rm mongo sh -c 'exec mongo "$MONGO_PORT_27017_TCP_ADDR:$MONGO_PORT_27017_TCP_PORT/system_tracking_dev"'
psql-container:
docker run -it --link tools_postgres_1:postgres --rm postgres:9.4.1 sh -c 'exec psql -h "$$POSTGRES_PORT_5432_TCP_ADDR" -p "$$POSTGRES_PORT_5432_TCP_PORT" -U postgres'
docker run -it --net tools_default --rm postgres:9.4.1 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'

View File

@ -285,6 +285,10 @@ class ListAPIView(generics.ListAPIView, GenericAPIView):
if name.endswith('_set'):
continue
fields.append('{}__search'.format(name))
for relationship in self.model._meta.local_many_to_many:
if relationship.related_model._meta.app_label != 'main':
continue
fields.append('{}__search'.format(relationship.name))
return fields

View File

@ -32,6 +32,7 @@ class Metadata(metadata.SimpleMetadata):
'min_length', 'max_length',
'min_value', 'max_value',
'category', 'category_slug',
'defined_in_file'
]
for attr in text_attrs:
@ -156,6 +157,10 @@ class Metadata(metadata.SimpleMetadata):
# For PUT/POST methods, remove read-only fields.
if method in ('PUT', 'POST'):
# This value should always be False for PUT/POST, so don't
# show it (file-based read-only settings can't be updated)
meta.pop('defined_in_file', False)
if meta.pop('read_only', False):
actions[method].pop(field)
@ -187,6 +192,10 @@ class Metadata(metadata.SimpleMetadata):
if getattr(view, 'related_search_fields', None):
metadata['related_search_fields'] = view.related_search_fields
from rest_framework import generics
if isinstance(view, generics.ListAPIView) and hasattr(view, 'paginator'):
metadata['max_page_size'] = view.paginator.max_page_size
return metadata

View File

@ -25,6 +25,7 @@ from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_text
from django.utils.text import capfirst
from django.utils.timezone import now
from django.utils.functional import cached_property
# Django REST Framework
from rest_framework.exceptions import ValidationError
@ -330,13 +331,7 @@ class BaseSerializer(serializers.ModelSerializer):
roles = {}
for field in obj._meta.get_fields():
if type(field) is ImplicitRoleField:
role = getattr(obj, field.name)
#roles[field.name] = RoleSerializer(data=role).to_representation(role)
roles[field.name] = {
'id': role.id,
'name': role.name,
'description': role.get_description(reference_content_object=obj),
}
roles[field.name] = role_summary_fields_generator(obj, field.name)
if len(roles) > 0:
summary_fields['object_roles'] = roles
@ -980,7 +975,16 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
args=(obj.last_update.pk,))
return res
def to_representation(self, obj):
ret = super(ProjectSerializer, self).to_representation(obj)
if 'scm_revision' in ret and obj.scm_type == '':
ret['scm_revision'] = ''
return ret
def validate(self, attrs):
def get_field_from_model_or_attrs(fd):
return attrs.get(fd, self.instance and getattr(self.instance, fd) or None)
organization = None
if 'organization' in attrs:
organization = attrs['organization']
@ -991,6 +995,10 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
if not organization and not view.request.user.is_superuser:
# Only allow super users to create orgless projects
raise serializers.ValidationError(_('Organization is missing'))
elif get_field_from_model_or_attrs('scm_type') == '':
for fd in ('scm_update_on_launch', 'scm_delete_on_update', 'scm_clean'):
if get_field_from_model_or_attrs(fd):
raise serializers.ValidationError({fd: _('Update options must be set to false for manual projects.')})
return super(ProjectSerializer, self).validate(attrs)
@ -1717,11 +1725,11 @@ class CredentialSerializer(BaseSerializer):
owner_teams = reverse('api:credential_owner_teams_list', args=(obj.pk,)),
))
parents = obj.admin_role.parents.exclude(object_id__isnull=True)
if parents.count() > 0:
parents = [role for role in obj.admin_role.parents.all() if role.object_id is not None]
if parents:
res.update({parents[0].content_type.name:parents[0].content_object.get_absolute_url()})
elif obj.admin_role.members.count() > 0:
user = obj.admin_role.members.first()
elif len(obj.admin_role.members.all()) > 0:
user = obj.admin_role.members.all()[0]
res.update({'user': reverse('api:user_detail', args=(user.pk,))})
return res
@ -1739,7 +1747,7 @@ class CredentialSerializer(BaseSerializer):
'url': reverse('api:user_detail', args=(user.pk,)),
})
for parent in obj.admin_role.parents.exclude(object_id__isnull=True).all():
for parent in [role for role in obj.admin_role.parents.all() if role.object_id is not None]:
summary_dict['owners'].append({
'id': parent.content_object.pk,
'type': camelcase_to_underscore(parent.content_object.__class__.__name__),
@ -1825,11 +1833,15 @@ class OrganizationCredentialSerializerCreate(CredentialSerializerCreate):
class LabelsListMixin(object):
def _summary_field_labels(self, obj):
label_list = [{'id': x.id, 'name': x.name} for x in obj.labels.all().order_by('name')[:10]]
if len(label_list) < 10:
label_ct = len(label_list)
if hasattr(obj, '_prefetched_objects_cache') and obj.labels.prefetch_cache_name in obj._prefetched_objects_cache:
label_list = [{'id': x.id, 'name': x.name} for x in obj.labels.all()[:10]]
label_ct = len(obj.labels.all())
else:
label_ct = obj.labels.count()
label_list = [{'id': x.id, 'name': x.name} for x in obj.labels.all().order_by('name')[:10]]
if len(label_list) < 10:
label_ct = len(label_list)
else:
label_ct = obj.labels.count()
return {'count': label_ct, 'results': label_list}
def get_summary_fields(self, obj):
@ -1950,16 +1962,25 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
return res
def validate(self, attrs):
survey_enabled = attrs.get('survey_enabled', self.instance and self.instance.survey_enabled or False)
job_type = attrs.get('job_type', self.instance and self.instance.job_type or None)
inventory = attrs.get('inventory', self.instance and self.instance.inventory or None)
project = attrs.get('project', self.instance and self.instance.project or None)
def get_field_from_model_or_attrs(fd):
return attrs.get(fd, self.instance and getattr(self.instance, fd) or None)
survey_enabled = get_field_from_model_or_attrs('survey_enabled')
job_type = get_field_from_model_or_attrs('job_type')
inventory = get_field_from_model_or_attrs('inventory')
credential = get_field_from_model_or_attrs('credential')
project = get_field_from_model_or_attrs('project')
prompting_error_message = _("Must either set a default value or ask to prompt on launch.")
if job_type == "scan":
if inventory is None or attrs.get('ask_inventory_on_launch', False):
raise serializers.ValidationError({'inventory': _('Scan jobs must be assigned a fixed inventory.')})
elif project is None:
raise serializers.ValidationError({'project': _("Job types 'run' and 'check' must have assigned a project.")})
elif credential is None and not get_field_from_model_or_attrs('ask_credential_on_launch'):
raise serializers.ValidationError({'credential': prompting_error_message})
elif inventory is None and not get_field_from_model_or_attrs('ask_inventory_on_launch'):
raise serializers.ValidationError({'inventory': prompting_error_message})
if survey_enabled and job_type == PERM_INVENTORY_SCAN:
raise serializers.ValidationError({'survey_enabled': _('Survey Enabled cannot be used with scan jobs.')})
@ -2959,6 +2980,23 @@ class ActivityStreamSerializer(BaseSerializer):
changes = serializers.SerializerMethodField()
object_association = serializers.SerializerMethodField()
@cached_property
def _local_summarizable_fk_fields(self):
summary_dict = copy.copy(SUMMARIZABLE_FK_FIELDS)
# Special requests
summary_dict['group'] = summary_dict['group'] + ('inventory_id',)
for key in summary_dict.keys():
if 'id' not in summary_dict[key]:
summary_dict[key] = summary_dict[key] + ('id',)
field_list = summary_dict.items()
# Needed related fields that are not in the default summary fields
field_list += [
('workflow_job_template_node', ('id', 'unified_job_template_id')),
('label', ('id', 'name', 'organization_id')),
('notification', ('id', 'status', 'notification_type', 'notification_template_id'))
]
return field_list
class Meta:
model = ActivityStream
fields = ('*', '-name', '-description', '-created', '-modified',
@ -2999,7 +3037,7 @@ class ActivityStreamSerializer(BaseSerializer):
rel = {}
if obj.actor is not None:
rel['actor'] = reverse('api:user_detail', args=(obj.actor.pk,))
for fk, __ in SUMMARIZABLE_FK_FIELDS.items():
for fk, __ in self._local_summarizable_fk_fields:
if not hasattr(obj, fk):
continue
allm2m = getattr(obj, fk).all()
@ -3021,7 +3059,7 @@ class ActivityStreamSerializer(BaseSerializer):
def get_summary_fields(self, obj):
summary_fields = OrderedDict()
for fk, related_fields in SUMMARIZABLE_FK_FIELDS.items():
for fk, related_fields in self._local_summarizable_fk_fields:
try:
if not hasattr(obj, fk):
continue
@ -3046,14 +3084,10 @@ class ActivityStreamSerializer(BaseSerializer):
summary_fields[get_type_for_model(unified_job_template)] = {'id': unified_job_template.id,
'name': unified_job_template.name}
thisItemDict = {}
if 'id' not in related_fields:
related_fields = related_fields + ('id',)
for field in related_fields:
fval = getattr(thisItem, field, None)
if fval is not None:
thisItemDict[field] = fval
if fk == 'group':
thisItemDict['inventory_id'] = getattr(thisItem, 'inventory_id', None)
if thisItemDict.get('id', None):
if thisItemDict.get('id', None) in [obj_dict.get('id', None) for obj_dict in summary_fields[fk]]:
continue

View File

@ -32,3 +32,6 @@ agent that originally obtained it.
Each request that uses the token for authentication will refresh its expiration
timestamp and keep it from expiring. A token only expires when it is not used
for the configured timeout interval (default 1800 seconds).
A DELETE request with the token set will cause the token to be invalidated and
no further requests can be made with it.

View File

@ -1,4 +1,4 @@
POST requests to this resource should include the full specification for a Job Template Survey
POST requests to this resource should include the full specification for a {{ model_verbose_name|title }}'s Survey
Here is an example survey specification:
@ -30,7 +30,7 @@ Within each survey item `type` must be one of:
* multiselect: For survey questions where multiple items from a presented list can be selected
Each item must contain a `question_name` and `question_description` field that describes the survey question itself.
The `variable` elements of each survey items represents the key that will be given to the playbook when the job template
The `variable` elements of each survey items represents the key that will be given to the playbook when the {{model_verbose_name}}
is launched. It will contain the value as a result of the survey.
Here is a more comprehensive example showing the various question types and their acceptable parameters:

View File

@ -8,16 +8,20 @@ on the host system via the `tower-manage` command.
For example on `cleanup_jobs` and `cleanup_activitystream`:
`{"days": 30}`
`{"extra_vars": {"days": 30}}`
Which will act on data older than 30 days.
For `cleanup_facts`:
`{"older_than": "4w", "granularity": "3d"}`
`{"extra_vars": {"older_than": "4w", "granularity": "3d"}}`
Which will reduce the granularity of scan data to one scan per 3 days when the data is older than 4w.
For `cleanup_activitystream` and `cleanup_jobs` commands, providing
`"dry_run": true` inside of `extra_vars` will show items that will be
removed without deleting them.
Each individual system job task has its own default values, which are
applicable either when running it from the command line or launching its
system job template with empty `extra_vars`.

View File

@ -1,13 +1,13 @@
Copy a Workflow Job Template:
Make a GET request to this resource to determine if the current user has
permission to copy the workflow_job_template and whether any linked
permission to copy the {{model_verbose_name}} and whether any linked
templates or prompted fields will be ignored due to permissions problems.
The response will include the following fields:
* `can_copy`: Flag indicating whether the active user has permission to make
a copy of this workflow_job_template, provides same content as the
workflow_job_template detail view summary_fields.user_capabilities.copy
a copy of this {{model_verbose_name}}, provides same content as the
{{model_verbose_name}} detail view summary_fields.user_capabilities.copy
(boolean, read-only)
* `can_copy_without_user_input`: Flag indicating if the user should be
prompted for confirmation before the copy is executed (boolean, read-only)
@ -22,11 +22,11 @@ The response will include the following fields:
to use and will be missing in workflow nodes of the copy (array, read-only)
Make a POST request to this endpoint to save a copy of this
workflow_job_template. No POST data is accepted for this action.
{{model_verbose_name}}. No POST data is accepted for this action.
If successful, the response status code will be 201. The response body will
contain serialized data about the new workflow_job_template, which will be
similar to the original workflow_job_template, but with an additional `@`
contain serialized data about the new {{model_verbose_name}}, which will be
similar to the original {{model_verbose_name}}, but with an additional `@`
and a timestamp in the name.
All workflow nodes and connections in the original will also exist in the

View File

@ -3,11 +3,14 @@
# All Rights Reserved.
# Python
import os
import re
import cgi
import datetime
import dateutil
import time
import socket
import subprocess
import sys
import logging
from base64 import b64encode
@ -20,7 +23,7 @@ from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.core.exceptions import FieldError
from django.db.models import Q, Count
from django.db import IntegrityError, transaction
from django.db import IntegrityError, transaction, connection
from django.shortcuts import get_object_or_404
from django.utils.encoding import smart_text, force_text
from django.utils.safestring import mark_safe
@ -606,6 +609,15 @@ class AuthTokenView(APIView):
extra=dict(actor=request.data['username']))
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request):
if 'HTTP_AUTHORIZATION' in request.META:
token_match = re.match("Token\s(.+)", request.META['HTTP_AUTHORIZATION'])
if token_match:
filter_tokens = AuthToken.objects.filter(key=token_match.groups()[0])
if filter_tokens.exists():
filter_tokens[0].invalidate()
return Response(status=status.HTTP_204_NO_CONTENT)
class OrganizationCountsMixin(object):
@ -1071,7 +1083,7 @@ class ProjectTeamsList(ListAPIView):
return self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=[t.content_object.pk for t in all_roles])
class ProjectSchedulesList(SubListCreateAttachDetachAPIView):
class ProjectSchedulesList(SubListCreateAPIView):
view_name = _("Project Schedules")
@ -1434,6 +1446,7 @@ class CredentialList(ListCreateAPIView):
model = Credential
serializer_class = CredentialSerializerCreate
capabilities_prefetch = ['admin', 'use']
class CredentialOwnerUsersList(SubListAPIView):
@ -1681,6 +1694,7 @@ class HostList(ListCreateAPIView):
always_allow_superuser = False
model = Host
serializer_class = HostSerializer
capabilities_prefetch = ['inventory.admin']
class HostDetail(RetrieveUpdateDestroyAPIView):
@ -2157,7 +2171,7 @@ class InventorySourceDetail(RetrieveUpdateAPIView):
return super(InventorySourceDetail, self).destroy(request, *args, **kwargs)
class InventorySourceSchedulesList(SubListCreateAttachDetachAPIView):
class InventorySourceSchedulesList(SubListCreateAPIView):
view_name = _("Inventory Source Schedules")
@ -2380,11 +2394,8 @@ class JobTemplateLaunch(RetrieveAPIView, GenericAPIView):
if request.user not in new_inventory.use_role:
raise PermissionDenied()
kv = prompted_fields
kv.update(passwords)
new_job = obj.create_unified_job(**kv)
result = new_job.signal_start(**kv)
new_job = obj.create_unified_job(**prompted_fields)
result = new_job.signal_start(**passwords)
if not result:
data = dict(passwords_needed_to_start=new_job.passwords_needed_to_start)
@ -2398,7 +2409,7 @@ class JobTemplateLaunch(RetrieveAPIView, GenericAPIView):
return Response(data, status=status.HTTP_201_CREATED)
class JobTemplateSchedulesList(SubListCreateAttachDetachAPIView):
class JobTemplateSchedulesList(SubListCreateAPIView):
view_name = _("Job Template Schedules")
@ -2554,6 +2565,9 @@ class JobTemplateLabelList(DeleteLastUnattachLabelMixin, SubListCreateAttachDeta
request.data['id'] = existing.id
del request.data['name']
del request.data['organization']
if Label.objects.filter(unifiedjobtemplate_labels=self.kwargs['pk']).count() > 100:
return Response(dict(msg=_('Maximum number of labels for {} reached.'.format(
self.parent_model._meta.verbose_name_raw))), status=status.HTTP_400_BAD_REQUEST)
return super(JobTemplateLabelList, self).post(request, *args, **kwargs)
@ -2688,7 +2702,7 @@ class JobTemplateCallback(GenericAPIView):
return Response(data, status=status.HTTP_400_BAD_REQUEST)
else:
host = list(matching_hosts)[0]
if not job_template.can_start_without_user_input():
if not job_template.can_start_without_user_input(callback_extra_vars=extra_vars):
data = dict(msg=_('Cannot start automatically, user input required!'))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
limit = host.name
@ -2975,7 +2989,7 @@ class WorkflowJobTemplateLaunch(WorkflowsEnforcementMixin, RetrieveAPIView):
prompted_fields, ignored_fields = obj._accept_or_ignore_job_kwargs(**request.data)
new_job = obj.create_unified_job(**prompted_fields)
new_job.signal_start(**prompted_fields)
new_job.signal_start()
data = OrderedDict()
data['ignored_fields'] = ignored_fields
@ -3036,7 +3050,7 @@ class WorkflowJobTemplateJobsList(WorkflowsEnforcementMixin, SubListAPIView):
new_in_310 = True
class WorkflowJobTemplateSchedulesList(WorkflowsEnforcementMixin, SubListCreateAttachDetachAPIView):
class WorkflowJobTemplateSchedulesList(WorkflowsEnforcementMixin, SubListCreateAPIView):
view_name = _("Workflow Job Template Schedules")
@ -3108,7 +3122,7 @@ class WorkflowJobTemplateActivityStreamList(WorkflowsEnforcementMixin, ActivityS
self.check_parent_access(parent)
qs = self.request.user.get_queryset(self.model)
return qs.filter(Q(workflow_job_template=parent) |
Q(workflow_job_template_node__workflow_job_template=parent))
Q(workflow_job_template_node__workflow_job_template=parent)).distinct()
class WorkflowJobList(WorkflowsEnforcementMixin, ListCreateAPIView):
@ -3210,7 +3224,7 @@ class SystemJobTemplateLaunch(GenericAPIView):
return Response(data, status=status.HTTP_201_CREATED)
class SystemJobTemplateSchedulesList(SubListCreateAttachDetachAPIView):
class SystemJobTemplateSchedulesList(SubListCreateAPIView):
view_name = _("System Job Template Schedules")
@ -3406,6 +3420,11 @@ class BaseJobHostSummariesList(SubListAPIView):
relationship = 'job_host_summaries'
view_name = _('Job Host Summaries List')
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).select_related('job', 'job__job_template', 'host')
class HostJobHostSummariesList(BaseJobHostSummariesList):
@ -3475,6 +3494,13 @@ class HostJobEventsList(BaseJobEventsList):
parent_model = Host
def get_queryset(self):
parent_obj = self.get_parent_object()
self.check_parent_access(parent_obj)
qs = self.request.user.get_queryset(self.model).filter(
Q(host=parent_obj) | Q(hosts=parent_obj)).distinct()
return qs
class GroupJobEventsList(BaseJobEventsList):
@ -3757,6 +3783,12 @@ class UnifiedJobTemplateList(ListAPIView):
model = UnifiedJobTemplate
serializer_class = UnifiedJobTemplateSerializer
new_in_148 = True
capabilities_prefetch = [
'admin', 'execute',
{'copy': ['jobtemplate.project.use', 'jobtemplate.inventory.use', 'jobtemplate.credential.use',
'jobtemplate.cloud_credential.use', 'jobtemplate.network_credential.use',
'workflowjobtemplate.organization.admin']}
]
class UnifiedJobList(ListAPIView):
@ -3852,6 +3884,17 @@ class UnifiedJobStdout(RetrieveAPIView):
elif request.accepted_renderer.format == 'ansi':
return Response(unified_job.result_stdout_raw)
elif request.accepted_renderer.format in {'txt_download', 'ansi_download'}:
if not os.path.exists(unified_job.result_stdout_file):
write_fd = open(unified_job.result_stdout_file, 'w')
with connection.cursor() as cursor:
try:
cursor.copy_expert("copy (select stdout from main_jobevent where job_id={} order by start_line) to stdout".format(unified_job.id),
write_fd)
write_fd.close()
subprocess.Popen("sed -i 's/\\\\r\\\\n/\\n/g' {}".format(unified_job.result_stdout_file),
shell=True).wait()
except Exception as e:
return Response({"error": _("Error generating stdout download file: {}".format(e))})
try:
content_fd = open(unified_job.result_stdout_file, 'r')
if request.accepted_renderer.format == 'txt_download':

View File

@ -22,4 +22,3 @@ class ConfConfig(AppConfig):
if 'http_receiver' not in LOGGING_DICT['loggers']['awx']['handlers']:
LOGGING_DICT['loggers']['awx']['handlers'] += ['http_receiver']
configure_logging(settings.LOGGING_CONFIG, LOGGING_DICT)
# checks.register(SettingsWrapper._check_settings)

View File

@ -52,7 +52,7 @@ class URLField(CharField):
if url_parts.hostname and '.' not in url_parts.hostname:
netloc = '{}.local'.format(url_parts.hostname)
if url_parts.port:
netloc = '{}:{}'.format(netloc, port)
netloc = '{}:{}'.format(netloc, url_parts.port)
if url_parts.username:
if url_parts.password:
netloc = '{}:{}@{}' % (url_parts.username, url_parts.password, netloc)

View File

@ -18,9 +18,18 @@ __all__ = ['settings_registry']
class SettingsRegistry(object):
"""Registry of all API-configurable settings and categories."""
def __init__(self):
def __init__(self, settings=None):
"""
:param settings: a ``django.conf.LazySettings`` object used to lookup
file-based field values (e.g., ``local_settings.py``
and ``/etc/tower/conf.d/example.py``). If unspecified,
defaults to ``django.conf.settings``.
"""
if settings is None:
from django.conf import settings
self._registry = OrderedDict()
self._dependent_settings = {}
self.settings = settings
def register(self, setting, **kwargs):
if setting in self._registry:
@ -94,7 +103,6 @@ class SettingsRegistry(object):
return bool(self._registry.get(setting, {}).get('encrypted', False))
def get_setting_field(self, setting, mixin_class=None, for_user=False, **kwargs):
from django.conf import settings
from rest_framework.fields import empty
field_kwargs = {}
field_kwargs.update(self._registry[setting])
@ -108,6 +116,7 @@ class SettingsRegistry(object):
placeholder = field_kwargs.pop('placeholder', empty)
feature_required = field_kwargs.pop('feature_required', empty)
encrypted = bool(field_kwargs.pop('encrypted', False))
defined_in_file = bool(field_kwargs.pop('defined_in_file', False))
if getattr(field_kwargs.get('child', None), 'source', None) is not None:
field_kwargs['child'].source = None
field_instance = field_class(**field_kwargs)
@ -118,18 +127,25 @@ class SettingsRegistry(object):
field_instance.placeholder = placeholder
if feature_required is not empty:
field_instance.feature_required = feature_required
field_instance.defined_in_file = defined_in_file
if field_instance.defined_in_file:
field_instance.help_text = (
str(_('This value has been set manually in a settings file.')) +
'\n\n' +
str(field_instance.help_text)
)
field_instance.encrypted = encrypted
original_field_instance = field_instance
if field_class != original_field_class:
original_field_instance = original_field_class(**field_kwargs)
if category_slug == 'user' and for_user:
try:
field_instance.default = original_field_instance.to_representation(getattr(settings, setting))
field_instance.default = original_field_instance.to_representation(getattr(self.settings, setting))
except:
logger.warning('Unable to retrieve default value for user setting "%s".', setting, exc_info=True)
elif not field_instance.read_only or field_instance.default is empty:
elif not field_instance.read_only or field_instance.default is empty or field_instance.defined_in_file:
try:
field_instance.default = original_field_instance.to_representation(settings._awx_conf_settings._get_default(setting))
field_instance.default = original_field_instance.to_representation(self.settings._awx_conf_settings._get_default(setting))
except AttributeError:
pass
except:

View File

@ -1,4 +1,5 @@
# Python
from collections import namedtuple
import contextlib
import logging
import sys
@ -7,8 +8,7 @@ import time
# Django
from django.conf import settings, UserSettingsHolder
from django.core.cache import cache
from django.core import checks
from django.core.cache import cache as django_cache
from django.core.exceptions import ImproperlyConfigured
from django.db import ProgrammingError, OperationalError
@ -16,7 +16,7 @@ from django.db import ProgrammingError, OperationalError
from rest_framework.fields import empty, SkipField
# Tower
from awx.main.utils import decrypt_field
from awx.main.utils import encrypt_field, decrypt_field
from awx.conf import settings_registry
from awx.conf.models import Setting
@ -62,38 +62,118 @@ def _log_database_error():
pass
class EncryptedCacheProxy(object):
def __init__(self, cache, registry, encrypter=None, decrypter=None):
"""
This proxy wraps a Django cache backend and overwrites the
`get`/`set`/`set_many` methods to handle field encryption/decryption
for sensitive values.
:param cache: the Django cache backend to proxy to
:param registry: the settings registry instance used to determine if
a field is encrypted or not.
:param encrypter: a callable used to encrypt field values; defaults to
``awx.main.utils.encrypt_field``
:param decrypter: a callable used to decrypt field values; defaults to
``awx.main.utils.decrypt_field``
"""
# These values have to be stored via self.__dict__ in this way to get
# around the magic __setattr__ method on this class.
self.__dict__['cache'] = cache
self.__dict__['registry'] = registry
self.__dict__['encrypter'] = encrypter or encrypt_field
self.__dict__['decrypter'] = decrypter or decrypt_field
def get(self, key, **kwargs):
value = self.cache.get(key, **kwargs)
return self._handle_encryption(self.decrypter, key, value)
def set(self, key, value, **kwargs):
self.cache.set(
key,
self._handle_encryption(self.encrypter, key, value),
**kwargs
)
def set_many(self, data, **kwargs):
for key, value in data.items():
self.set(key, value, **kwargs)
def _handle_encryption(self, method, key, value):
TransientSetting = namedtuple('TransientSetting', ['pk', 'value'])
if value is not empty and self.registry.is_setting_encrypted(key):
# If the setting exists in the database, we'll use its primary key
# as part of the AES key when encrypting/decrypting
return method(
TransientSetting(
pk=getattr(self._get_setting_from_db(key), 'pk', None),
value=value
),
'value'
)
# If the field in question isn't an "encrypted" field, this function is
# a no-op; it just returns the provided value
return value
def _get_setting_from_db(self, key):
field = self.registry.get_setting_field(key)
if not field.read_only:
return Setting.objects.filter(key=key, user__isnull=True).order_by('pk').first()
def __getattr__(self, name):
return getattr(self.cache, name)
def __setattr__(self, name, value):
setattr(self.cache, name, value)
class SettingsWrapper(UserSettingsHolder):
@classmethod
def initialize(cls):
def initialize(cls, cache=None, registry=None):
"""
Used to initialize and wrap the Django settings context.
:param cache: the Django cache backend to use for caching setting
values. ``django.core.cache`` is used by default.
:param registry: the settings registry instance used. The global
``awx.conf.settings_registry`` is used by default.
"""
if not getattr(settings, '_awx_conf_settings', False):
settings_wrapper = cls(settings._wrapped)
settings_wrapper = cls(
settings._wrapped,
cache=cache or django_cache,
registry=registry or settings_registry
)
settings._wrapped = settings_wrapper
@classmethod
def _check_settings(cls, app_configs, **kwargs):
errors = []
# FIXME: Warn if database not available!
for setting in Setting.objects.filter(key__in=settings_registry.get_registered_settings(), user__isnull=True):
field = settings_registry.get_setting_field(setting.key)
try:
field.to_internal_value(setting.value)
except Exception as e:
errors.append(checks.Error(str(e)))
return errors
def __init__(self, default_settings, cache, registry):
"""
This constructor is generally not called directly, but by
``SettingsWrapper.initialize`` at app startup time when settings are
parsed.
"""
def __init__(self, default_settings):
# These values have to be stored via self.__dict__ in this way to get
# around the magic __setattr__ method on this class (which is used to
# store API-assigned settings in the database).
self.__dict__['default_settings'] = default_settings
self.__dict__['_awx_conf_settings'] = self
self.__dict__['_awx_conf_preload_expires'] = None
self.__dict__['_awx_conf_preload_lock'] = threading.RLock()
self.__dict__['_awx_conf_init_readonly'] = False
self.__dict__['cache'] = EncryptedCacheProxy(cache, registry)
self.__dict__['registry'] = registry
def _get_supported_settings(self):
return settings_registry.get_registered_settings()
return self.registry.get_registered_settings()
def _get_writeable_settings(self):
return settings_registry.get_registered_settings(read_only=False)
return self.registry.get_registered_settings(read_only=False)
def _get_cache_value(self, value):
if value is None:
@ -123,12 +203,13 @@ class SettingsWrapper(UserSettingsHolder):
except AttributeError:
file_default = None
if file_default != init_default and file_default is not None:
logger.warning('Setting %s has been marked read-only!', key)
settings_registry._registry[key]['read_only'] = True
logger.debug('Setting %s has been marked read-only!', key)
self.registry._registry[key]['read_only'] = True
self.registry._registry[key]['defined_in_file'] = True
self.__dict__['_awx_conf_init_readonly'] = True
# If local preload timer has expired, check to see if another process
# has already preloaded the cache and skip preloading if so.
if cache.get('_awx_conf_preload_expires', empty) is not empty:
if self.cache.get('_awx_conf_preload_expires', default=empty) is not empty:
return
# Initialize all database-configurable settings with a marker value so
# to indicate from the cache that the setting is not configured without
@ -138,7 +219,7 @@ class SettingsWrapper(UserSettingsHolder):
for setting in Setting.objects.filter(key__in=settings_to_cache.keys(), user__isnull=True).order_by('pk'):
if settings_to_cache[setting.key] != SETTING_CACHE_NOTSET:
continue
if settings_registry.is_setting_encrypted(setting.key):
if self.registry.is_setting_encrypted(setting.key):
value = decrypt_field(setting, 'value')
else:
value = setting.value
@ -148,7 +229,7 @@ class SettingsWrapper(UserSettingsHolder):
for key, value in settings_to_cache.items():
if value != SETTING_CACHE_NOTSET:
continue
field = settings_registry.get_setting_field(key)
field = self.registry.get_setting_field(key)
try:
settings_to_cache[key] = self._get_cache_value(field.get_default())
except SkipField:
@ -157,13 +238,13 @@ class SettingsWrapper(UserSettingsHolder):
settings_to_cache = dict([(Setting.get_cache_key(k), v) for k, v in settings_to_cache.items()])
settings_to_cache['_awx_conf_preload_expires'] = self._awx_conf_preload_expires
logger.debug('cache set_many(%r, %r)', settings_to_cache, SETTING_CACHE_TIMEOUT)
cache.set_many(settings_to_cache, SETTING_CACHE_TIMEOUT)
self.cache.set_many(settings_to_cache, timeout=SETTING_CACHE_TIMEOUT)
def _get_local(self, name):
self._preload_cache()
cache_key = Setting.get_cache_key(name)
try:
cache_value = cache.get(cache_key, empty)
cache_value = self.cache.get(cache_key, default=empty)
except ValueError:
cache_value = empty
logger.debug('cache get(%r, %r) -> %r', cache_key, empty, cache_value)
@ -177,7 +258,7 @@ class SettingsWrapper(UserSettingsHolder):
value = {}
else:
value = cache_value
field = settings_registry.get_setting_field(name)
field = self.registry.get_setting_field(name)
if value is empty:
setting = None
if not field.read_only:
@ -198,8 +279,10 @@ class SettingsWrapper(UserSettingsHolder):
if value is None and SETTING_CACHE_NOTSET == SETTING_CACHE_NONE:
value = SETTING_CACHE_NOTSET
if cache_value != value:
logger.debug('cache set(%r, %r, %r)', cache_key, self._get_cache_value(value), SETTING_CACHE_TIMEOUT)
cache.set(cache_key, self._get_cache_value(value), SETTING_CACHE_TIMEOUT)
logger.debug('cache set(%r, %r, %r)', cache_key,
self._get_cache_value(value),
SETTING_CACHE_TIMEOUT)
self.cache.set(cache_key, self._get_cache_value(value), timeout=SETTING_CACHE_TIMEOUT)
if value == SETTING_CACHE_NOTSET and not SETTING_CACHE_DEFAULTS:
try:
value = field.get_default()
@ -214,7 +297,9 @@ class SettingsWrapper(UserSettingsHolder):
else:
return field.run_validation(value)
except:
logger.warning('The current value "%r" for setting "%s" is invalid.', value, name, exc_info=True)
logger.warning(
'The current value "%r" for setting "%s" is invalid.',
value, name, exc_info=True)
return empty
def _get_default(self, name):
@ -234,7 +319,7 @@ class SettingsWrapper(UserSettingsHolder):
return self._get_default(name)
def _set_local(self, name, value):
field = settings_registry.get_setting_field(name)
field = self.registry.get_setting_field(name)
if field.read_only:
logger.warning('Attempt to set read only setting "%s".', name)
raise ImproperlyConfigured('Setting "%s" is read only.'.format(name))
@ -244,7 +329,8 @@ class SettingsWrapper(UserSettingsHolder):
setting_value = field.run_validation(data)
db_value = field.to_representation(setting_value)
except Exception as e:
logger.exception('Unable to assign value "%r" to setting "%s".', value, name, exc_info=True)
logger.exception('Unable to assign value "%r" to setting "%s".',
value, name, exc_info=True)
raise e
setting = Setting.objects.filter(key=name, user__isnull=True).order_by('pk').first()
@ -264,7 +350,7 @@ class SettingsWrapper(UserSettingsHolder):
setattr(self.default_settings, name, value)
def _del_local(self, name):
field = settings_registry.get_setting_field(name)
field = self.registry.get_setting_field(name)
if field.read_only:
logger.warning('Attempt to delete read only setting "%s".', name)
raise ImproperlyConfigured('Setting "%s" is read only.'.format(name))
@ -282,7 +368,8 @@ class SettingsWrapper(UserSettingsHolder):
def __dir__(self):
keys = []
with _log_database_error():
for setting in Setting.objects.filter(key__in=self._get_supported_settings(), user__isnull=True):
for setting in Setting.objects.filter(
key__in=self._get_supported_settings(), user__isnull=True):
# Skip returning settings that have been overridden but are
# considered to be "not set".
if setting.value is None and SETTING_CACHE_NOTSET == SETTING_CACHE_NONE:

View File

@ -1,5 +1,6 @@
# Python
import logging
import sys
# Django
from django.conf import settings
@ -32,7 +33,8 @@ def handle_setting_change(key, for_delete=False):
cache_keys = set([Setting.get_cache_key(k) for k in setting_keys])
logger.debug('sending signals to delete cache keys(%r)', cache_keys)
cache.delete_many(cache_keys)
process_cache_changes.delay(list(cache_keys))
if 'migrate_to_database_settings' not in sys.argv:
process_cache_changes.delay(list(cache_keys))
# Send setting_changed signal with new value for each setting.
for setting_key in setting_keys:

View File

@ -0,0 +1,2 @@
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.

View File

@ -0,0 +1,311 @@
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from uuid import uuid4
from django.conf import LazySettings
from django.core.cache.backends.locmem import LocMemCache
from django.core.exceptions import ImproperlyConfigured
from django.utils.translation import ugettext_lazy as _
from rest_framework.fields import empty
import pytest
from awx.conf import fields
from awx.conf.settings import SettingsWrapper
from awx.conf.registry import SettingsRegistry
@pytest.fixture()
def reg(request):
"""
This fixture initializes an awx settings registry object and passes it as
an argument into the test function.
"""
cache = LocMemCache(str(uuid4()), {}) # make a new random cache each time
settings = LazySettings()
registry = SettingsRegistry(settings)
# @pytest.mark.defined_in_file can be used to mark specific setting values
# as "defined in a settings file". This is analogous to manually
# specifying a setting on the filesystem (e.g., in a local_settings.py in
# development, or in /etc/tower/conf.d/<something>.py)
defaults = request.node.get_marker('defined_in_file')
if defaults:
settings.configure(**defaults.kwargs)
settings._wrapped = SettingsWrapper(settings._wrapped,
cache,
registry)
return registry
def test_simple_setting_registration(reg):
assert reg.get_registered_settings() == []
reg.register(
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system',
)
assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED']
def test_simple_setting_unregistration(reg):
reg.register(
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system',
)
assert reg.get_registered_settings() == ['AWX_SOME_SETTING_ENABLED']
reg.unregister('AWX_SOME_SETTING_ENABLED')
assert reg.get_registered_settings() == []
def test_duplicate_setting_registration(reg):
"ensure that settings cannot be registered twice."
with pytest.raises(ImproperlyConfigured):
for i in range(2):
reg.register(
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system',
)
def test_field_class_required_for_registration(reg):
"settings must specify a field class to register"
with pytest.raises(ImproperlyConfigured):
reg.register('AWX_SOME_SETTING_ENABLED')
def test_get_registered_settings_by_slug(reg):
reg.register(
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system',
)
assert reg.get_registered_settings(category_slug='system') == [
'AWX_SOME_SETTING_ENABLED'
]
assert reg.get_registered_settings(category_slug='other') == []
def test_get_registered_read_only_settings(reg):
reg.register(
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system'
)
reg.register(
'AWX_SOME_READ_ONLY',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system',
read_only=True
)
assert reg.get_registered_settings(read_only=True) ==[
'AWX_SOME_READ_ONLY'
]
assert reg.get_registered_settings(read_only=False) == [
'AWX_SOME_SETTING_ENABLED'
]
assert reg.get_registered_settings() == [
'AWX_SOME_SETTING_ENABLED',
'AWX_SOME_READ_ONLY'
]
def test_get_registered_settings_with_required_features(reg):
reg.register(
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system',
feature_required='superpowers',
)
assert reg.get_registered_settings(features_enabled=[]) == []
assert reg.get_registered_settings(features_enabled=['superpowers']) == [
'AWX_SOME_SETTING_ENABLED'
]
def test_get_dependent_settings(reg):
reg.register(
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system'
)
reg.register(
'AWX_SOME_DEPENDENT_SETTING',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system',
depends_on=['AWX_SOME_SETTING_ENABLED']
)
assert reg.get_dependent_settings('AWX_SOME_SETTING_ENABLED') == set([
'AWX_SOME_DEPENDENT_SETTING'
])
def test_get_registered_categories(reg):
reg.register(
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system'
)
reg.register(
'AWX_SOME_OTHER_SETTING_ENABLED',
field_class=fields.BooleanField,
category=_('OtherSystem'),
category_slug='other-system'
)
assert reg.get_registered_categories() == {
'all': _('All'),
'changed': _('Changed'),
'system': _('System'),
'other-system': _('OtherSystem'),
}
def test_get_registered_categories_with_required_features(reg):
reg.register(
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category=_('System'),
category_slug='system',
feature_required='superpowers'
)
reg.register(
'AWX_SOME_OTHER_SETTING_ENABLED',
field_class=fields.BooleanField,
category=_('OtherSystem'),
category_slug='other-system',
feature_required='sortapowers'
)
assert reg.get_registered_categories(features_enabled=[]) == {
'all': _('All'),
'changed': _('Changed'),
}
assert reg.get_registered_categories(features_enabled=['superpowers']) == {
'all': _('All'),
'changed': _('Changed'),
'system': _('System'),
}
assert reg.get_registered_categories(features_enabled=['sortapowers']) == {
'all': _('All'),
'changed': _('Changed'),
'other-system': _('OtherSystem'),
}
assert reg.get_registered_categories(
features_enabled=['superpowers', 'sortapowers']
) == {
'all': _('All'),
'changed': _('Changed'),
'system': _('System'),
'other-system': _('OtherSystem'),
}
def test_is_setting_encrypted(reg):
reg.register(
'AWX_SOME_SETTING_ENABLED',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
reg.register(
'AWX_SOME_ENCRYPTED_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
encrypted=True
)
assert reg.is_setting_encrypted('AWX_SOME_SETTING_ENABLED') is False
assert reg.is_setting_encrypted('AWX_SOME_ENCRYPTED_SETTING') is True
def test_simple_field(reg):
reg.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
placeholder='Example Value',
feature_required='superpowers'
)
field = reg.get_setting_field('AWX_SOME_SETTING')
assert isinstance(field, fields.CharField)
assert field.category == _('System')
assert field.category_slug == 'system'
assert field.default is empty
assert field.placeholder == 'Example Value'
assert field.feature_required == 'superpowers'
def test_field_with_custom_attribute(reg):
reg.register(
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category_slug='system',
)
field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED',
category_slug='other-system')
assert field.category_slug == 'other-system'
def test_field_with_custom_mixin(reg):
class GreatMixin(object):
def is_great(self):
return True
reg.register(
'AWX_SOME_SETTING_ENABLED',
field_class=fields.BooleanField,
category_slug='system',
)
field = reg.get_setting_field('AWX_SOME_SETTING_ENABLED',
mixin_class=GreatMixin)
assert isinstance(field, fields.BooleanField)
assert isinstance(field, GreatMixin)
assert field.is_great() is True
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_default_value_from_settings(reg):
reg.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
)
field = reg.get_setting_field('AWX_SOME_SETTING')
assert field.default == 'DEFAULT'
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_default_value_from_settings_with_custom_representation(reg):
class LowercaseCharField(fields.CharField):
def to_representation(self, value):
return value.lower()
reg.register(
'AWX_SOME_SETTING',
field_class=LowercaseCharField,
category=_('System'),
category_slug='system',
)
field = reg.get_setting_field('AWX_SOME_SETTING')
assert field.default == 'default'

View File

@ -0,0 +1,406 @@
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from contextlib import contextmanager
from uuid import uuid4
import time
from django.conf import LazySettings
from django.core.cache.backends.locmem import LocMemCache
from django.core.exceptions import ImproperlyConfigured
from django.utils.translation import ugettext_lazy as _
from rest_framework import fields
import pytest
from awx.conf import models
from awx.conf.settings import SettingsWrapper, EncryptedCacheProxy, SETTING_CACHE_NOTSET
from awx.conf.registry import SettingsRegistry
from awx.main.utils import encrypt_field, decrypt_field
@contextmanager
def apply_patches(_patches):
[p.start() for p in _patches]
yield
[p.stop() for p in _patches]
@pytest.fixture()
def settings(request):
"""
This fixture initializes a Django settings object that wraps our
`awx.conf.settings.SettingsWrapper` and passes it as an argument into the
test function.
This mimics the work done by `awx.conf.settings.SettingsWrapper.initialize`
on `django.conf.settings`.
"""
cache = LocMemCache(str(uuid4()), {}) # make a new random cache each time
settings = LazySettings()
registry = SettingsRegistry(settings)
# @pytest.mark.defined_in_file can be used to mark specific setting values
# as "defined in a settings file". This is analogous to manually
# specifying a setting on the filesystem (e.g., in a local_settings.py in
# development, or in /etc/tower/conf.d/<something>.py)
in_file_marker = request.node.get_marker('defined_in_file')
defaults = in_file_marker.kwargs if in_file_marker else {}
defaults['DEFAULTS_SNAPSHOT'] = {}
settings.configure(**defaults)
settings._wrapped = SettingsWrapper(settings._wrapped,
cache,
registry)
return settings
@pytest.mark.defined_in_file(DEBUG=True)
def test_unregistered_setting(settings):
"native Django settings are not stored in DB, and aren't cached"
assert settings.DEBUG is True
assert settings.cache.get('DEBUG') is None
def test_read_only_setting(settings):
settings.registry.register(
'AWX_READ_ONLY',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
default='NO-EDITS',
read_only=True
)
assert settings.AWX_READ_ONLY == 'NO-EDITS'
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
settings = settings.registry.get_registered_settings(read_only=True)
assert settings == ['AWX_READ_ONLY']
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
@pytest.mark.parametrize('read_only', [True, False])
def test_setting_defined_in_file(settings, read_only):
kwargs = {'read_only': True} if read_only else {}
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
**kwargs
)
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
settings = settings.registry.get_registered_settings(read_only=True)
assert settings == ['AWX_SOME_SETTING']
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_setting_defined_in_file_with_empty_default(settings):
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
default='',
)
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
settings = settings.registry.get_registered_settings(read_only=True)
assert settings == ['AWX_SOME_SETTING']
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_setting_defined_in_file_with_specific_default(settings):
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
default=123
)
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert len(settings.registry.get_registered_settings(read_only=False)) == 0
settings = settings.registry.get_registered_settings(read_only=True)
assert settings == ['AWX_SOME_SETTING']
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_read_only_defaults_are_cached(settings):
"read-only settings are stored in the cache"
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_cache_respects_timeout(settings):
"only preload the cache every SETTING_CACHE_TIMEOUT settings"
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
assert settings.AWX_SOME_SETTING == 'DEFAULT'
cache_expiration = settings.cache.get('_awx_conf_preload_expires')
assert cache_expiration > time.time()
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.cache.get('_awx_conf_preload_expires') == cache_expiration
def test_default_setting(settings, mocker):
"settings that specify a default are inserted into the cache"
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
default='DEFAULT'
)
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.cache.get('AWX_SOME_SETTING') == 'DEFAULT'
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_setting_is_from_setting_file(settings, mocker):
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is True
def test_setting_is_not_from_setting_file(settings, mocker):
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
default='DEFAULT'
)
settings_to_cache = mocker.Mock(**{'order_by.return_value': []})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=settings_to_cache):
assert settings.AWX_SOME_SETTING == 'DEFAULT'
assert settings.registry.get_setting_field('AWX_SOME_SETTING').defined_in_file is False
def test_empty_setting(settings, mocker):
"settings with no default and no defined value are not valid"
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
mocks = mocker.Mock(**{
'order_by.return_value': mocker.Mock(**{
'__iter__': lambda self: iter([]),
'first.return_value': None
}),
})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
with pytest.raises(AttributeError):
settings.AWX_SOME_SETTING
assert settings.cache.get('AWX_SOME_SETTING') == SETTING_CACHE_NOTSET
def test_setting_from_db(settings, mocker):
"settings can be loaded from the database"
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
default='DEFAULT'
)
setting_from_db = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
mocks = mocker.Mock(**{
'order_by.return_value': mocker.Mock(**{
'__iter__': lambda self: iter([setting_from_db]),
'first.return_value': setting_from_db
}),
})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
assert settings.AWX_SOME_SETTING == 'FROM_DB'
assert settings.cache.get('AWX_SOME_SETTING') == 'FROM_DB'
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_read_only_setting_assignment(settings):
"read-only settings cannot be overwritten"
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
assert settings.AWX_SOME_SETTING == 'DEFAULT'
with pytest.raises(ImproperlyConfigured):
settings.AWX_SOME_SETTING = 'CHANGED'
assert settings.AWX_SOME_SETTING == 'DEFAULT'
def test_db_setting_create(settings, mocker):
"settings are stored in the database when set for the first time"
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
setting_list = mocker.Mock(**{'order_by.return_value.first.return_value': None})
with apply_patches([
mocker.patch('awx.conf.models.Setting.objects.filter',
return_value=setting_list),
mocker.patch('awx.conf.models.Setting.objects.create', mocker.Mock())
]):
settings.AWX_SOME_SETTING = 'NEW-VALUE'
models.Setting.objects.create.assert_called_with(
key='AWX_SOME_SETTING',
user=None,
value='NEW-VALUE'
)
def test_db_setting_update(settings, mocker):
"settings are updated in the database when their value changes"
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
setting_list = mocker.Mock(**{
'order_by.return_value.first.return_value': existing_setting
})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=setting_list):
settings.AWX_SOME_SETTING = 'NEW-VALUE'
assert existing_setting.value == 'NEW-VALUE'
existing_setting.save.assert_called_with(update_fields=['value'])
def test_db_setting_deletion(settings, mocker):
"settings are auto-deleted from the database"
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
existing_setting = mocker.Mock(key='AWX_SOME_SETTING', value='FROM_DB')
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=[existing_setting]):
del settings.AWX_SOME_SETTING
assert existing_setting.delete.call_count == 1
@pytest.mark.defined_in_file(AWX_SOME_SETTING='DEFAULT')
def test_read_only_setting_deletion(settings):
"read-only settings cannot be deleted"
settings.registry.register(
'AWX_SOME_SETTING',
field_class=fields.CharField,
category=_('System'),
category_slug='system'
)
assert settings.AWX_SOME_SETTING == 'DEFAULT'
with pytest.raises(ImproperlyConfigured):
del settings.AWX_SOME_SETTING
assert settings.AWX_SOME_SETTING == 'DEFAULT'
def test_settings_use_an_encrypted_cache(settings):
settings.registry.register(
'AWX_ENCRYPTED',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
encrypted=True
)
assert isinstance(settings.cache, EncryptedCacheProxy)
assert settings.cache.__dict__['encrypter'] == encrypt_field
assert settings.cache.__dict__['decrypter'] == decrypt_field
def test_sensitive_cache_data_is_encrypted(settings, mocker):
"fields marked as `encrypted` are stored in the cache with encryption"
settings.registry.register(
'AWX_ENCRYPTED',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
encrypted=True
)
def rot13(obj, attribute):
assert obj.pk == 123
return getattr(obj, attribute).encode('rot13')
native_cache = LocMemCache(str(uuid4()), {})
cache = EncryptedCacheProxy(
native_cache,
settings.registry,
encrypter=rot13,
decrypter=rot13
)
# Insert the setting value into the database; the encryption process will
# use its primary key as part of the encryption key
setting_from_db = mocker.Mock(pk=123, key='AWX_ENCRYPTED', value='SECRET!')
mocks = mocker.Mock(**{
'order_by.return_value': mocker.Mock(**{
'__iter__': lambda self: iter([setting_from_db]),
'first.return_value': setting_from_db
}),
})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=mocks):
cache.set('AWX_ENCRYPTED', 'SECRET!')
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'
def test_readonly_sensitive_cache_data_is_encrypted(settings):
"readonly fields marked as `encrypted` are stored in the cache with encryption"
settings.registry.register(
'AWX_ENCRYPTED',
field_class=fields.CharField,
category=_('System'),
category_slug='system',
read_only=True,
encrypted=True
)
def rot13(obj, attribute):
assert obj.pk is None
return getattr(obj, attribute).encode('rot13')
native_cache = LocMemCache(str(uuid4()), {})
cache = EncryptedCacheProxy(
native_cache,
settings.registry,
encrypter=rot13,
decrypter=rot13
)
cache.set('AWX_ENCRYPTED', 'SECRET!')
assert cache.get('AWX_ENCRYPTED') == 'SECRET!'
assert native_cache.get('AWX_ENCRYPTED') == 'FRPERG!'

View File

@ -71,7 +71,10 @@ def terminate_ssh_control_masters():
# Terminate then kill control master processes. Workaround older
# version of psutil that may not have wait_procs implemented.
for proc in ssh_cm_procs:
proc.terminate()
try:
proc.terminate()
except psutil.NoSuchProcess:
continue
procs_gone, procs_alive = psutil.wait_procs(ssh_cm_procs, timeout=5)
for proc in procs_alive:
proc.kill()

View File

@ -182,7 +182,7 @@ class BaseCallbackModule(CallbackBase):
def v2_playbook_on_include(self, included_file):
event_data = dict(
included_file=included_file,
included_file=included_file._filename if included_file is not None else None,
)
with self.capture_event_data('playbook_on_include', **event_data):
super(BaseCallbackModule, self).v2_playbook_on_include(included_file)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -8,7 +8,7 @@ import logging
# Django
from django.conf import settings
from django.db.models import Q
from django.db.models import Q, Prefetch
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext_lazy as _
@ -25,7 +25,7 @@ from awx.main.task_engine import TaskEnhancer
from awx.conf.license import LicenseForbids
__all__ = ['get_user_queryset', 'check_user_access', 'check_user_access_with_errors',
'user_accessible_objects',
'user_accessible_objects', 'consumer_access',
'user_admin_role', 'StateConflict',]
PERMISSION_TYPES = [
@ -164,6 +164,17 @@ def check_superuser(func):
return wrapper
def consumer_access(group_name):
'''
consumer_access returns the proper Access class based on group_name
for a channels consumer.
'''
class_map = {'job_events': JobAccess,
'workflow_events': WorkflowJobAccess,
'ad_hoc_command_events': AdHocCommandAccess}
return class_map.get(group_name)
class BaseAccess(object):
'''
Base class for checking user access to a given model. Subclasses should
@ -625,7 +636,7 @@ class HostAccess(BaseAccess):
raise PermissionDenied(_('Unable to change inventory on a host.'))
# Prevent renaming a host that might exceed license count
if 'name' in data:
if data and 'name' in data:
self.check_license(add_host_name=data['name'])
# Checks for admin or change permission on inventory, controls whether
@ -744,7 +755,10 @@ class InventorySourceAccess(BaseAccess):
def can_change(self, obj, data):
# Checks for admin or change permission on group.
if obj and obj.group:
return self.user.can_access(Group, 'change', obj.group, None)
return (
self.user.can_access(Group, 'change', obj.group, None) and
self.check_related('credential', Credential, data, obj=obj, role_field='use_role')
)
# Can't change inventory sources attached to only the inventory, since
# these are created automatically from the management command.
else:
@ -817,7 +831,11 @@ class CredentialAccess(BaseAccess):
permitted to see.
"""
qs = self.model.accessible_objects(self.user, 'read_role')
return qs.select_related('created_by', 'modified_by').all()
qs = qs.select_related('created_by', 'modified_by')
qs = qs.prefetch_related(
'admin_role', 'use_role', 'read_role',
'admin_role__parents', 'admin_role__members')
return qs
@check_superuser
def can_read(self, obj):
@ -1033,10 +1051,6 @@ class JobTemplateAccess(BaseAccess):
return qs.select_related('created_by', 'modified_by', 'inventory', 'project',
'credential', 'cloud_credential', 'next_schedule').all()
@check_superuser
def can_read(self, obj):
return self.user in obj.read_role
def can_add(self, data):
'''
a user can create a job template if they are a superuser, an org admin
@ -1357,7 +1371,7 @@ class SystemJobTemplateAccess(BaseAccess):
model = SystemJobTemplate
@check_superuser
def can_start(self, obj):
def can_start(self, obj, validate_license=True):
'''Only a superuser can start a job from a SystemJobTemplate'''
return False
@ -1549,7 +1563,7 @@ class WorkflowJobTemplateAccess(BaseAccess):
missing_credentials = []
missing_inventories = []
qs = obj.workflow_job_template_nodes
qs.select_related('unified_job_template', 'inventory', 'credential')
qs = qs.prefetch_related('unified_job_template', 'inventory__use_role', 'credential__use_role')
for node in qs.all():
node_errors = {}
if node.inventory and self.user not in node.inventory.use_role:
@ -1853,6 +1867,7 @@ class UnifiedJobTemplateAccess(BaseAccess):
qs = qs.prefetch_related(
'last_job',
'current_job',
Prefetch('labels', queryset=Label.objects.all().order_by('name'))
)
# WISH - sure would be nice if the following worked, but it does not.
@ -1900,6 +1915,7 @@ class UnifiedJobAccess(BaseAccess):
'modified_by',
'unified_job_node__workflow_job',
'unified_job_template',
Prefetch('labels', queryset=Label.objects.all().order_by('name'))
)
# WISH - sure would be nice if the following worked, but it does not.
@ -2109,7 +2125,7 @@ class ActivityStreamAccess(BaseAccess):
'job_template', 'job', 'ad_hoc_command',
'notification_template', 'notification', 'label', 'role', 'actor',
'schedule', 'custom_inventory_script', 'unified_job_template',
'workflow_job_template', 'workflow_job')
'workflow_job_template', 'workflow_job', 'workflow_job_template_node')
if self.user.is_superuser or self.user.is_system_auditor:
return qs.all()

View File

@ -263,6 +263,7 @@ register(
help_text=_('Username for external log aggregator (if required).'),
category=_('Logging'),
category_slug='logging',
required=False,
)
register(
'LOG_AGGREGATOR_PASSWORD',
@ -273,6 +274,7 @@ register(
help_text=_('Password or authentication token for external log aggregator (if required).'),
category=_('Logging'),
category_slug='logging',
required=False,
)
register(
'LOG_AGGREGATOR_LOGGERS',

View File

@ -1,12 +1,14 @@
import json
import urlparse
import logging
import urllib
from channels import Group
from channels.sessions import channel_session
from channels.handler import AsgiRequest
from django.core.serializers.json import DjangoJSONEncoder
from django.contrib.auth.models import User
from django.core.serializers.json import DjangoJSONEncoder
from awx.main.models.organization import AuthToken
@ -19,31 +21,24 @@ def discard_groups(message):
Group(group).discard(message.reply_channel)
def validate_token(token):
try:
auth_token = AuthToken.objects.get(key=token)
if not auth_token.in_valid_tokens:
return None
except AuthToken.DoesNotExist:
return None
return auth_token
def user_from_token(auth_token):
try:
return User.objects.get(pk=auth_token.user_id)
except User.DoesNotExist:
return None
@channel_session
def ws_connect(message):
token = None
qs = urlparse.parse_qs(message['query_string'])
if 'token' in qs:
if len(qs['token']) > 0:
token = qs['token'].pop()
message.channel_session['token'] = token
connect_text = {'accept':False, 'user':None}
message.content['method'] = 'FAKE'
request = AsgiRequest(message)
token = request.COOKIES.get('token', None)
if token is not None:
token = urllib.unquote(token).strip('"')
try:
auth_token = AuthToken.objects.get(key=token)
if auth_token.in_valid_tokens:
message.channel_session['user_id'] = auth_token.user_id
connect_text['accept'] = True
connect_text['user'] = auth_token.user_id
except AuthToken.DoesNotExist:
logger.error("auth_token provided was invalid.")
message.reply_channel.send({"text": json.dumps(connect_text)})
@channel_session
@ -53,20 +48,15 @@ def ws_disconnect(message):
@channel_session
def ws_receive(message):
token = message.channel_session.get('token')
from awx.main.access import consumer_access
auth_token = validate_token(token)
if auth_token is None:
logger.error("Authentication Failure validating user")
message.reply_channel.send({"text": json.dumps({"error": "invalid auth token"})})
return None
user = user_from_token(auth_token)
if user is None:
logger.error("No valid user corresponding to submitted auth_token")
user_id = message.channel_session.get('user_id', None)
if user_id is None:
logger.error("No valid user found for websocket.")
message.reply_channel.send({"text": json.dumps({"error": "no valid user"})})
return None
user = User.objects.get(pk=user_id)
raw_data = message.content['text']
data = json.loads(raw_data)
@ -78,6 +68,12 @@ def ws_receive(message):
if type(v) is list:
for oid in v:
name = '{}-{}'.format(group_name, oid)
access_cls = consumer_access(group_name)
if access_cls is not None:
user_access = access_cls(user)
if not user_access.get_queryset().filter(pk=oid).exists():
message.reply_channel.send({"text": json.dumps({"error": "access denied to channel {0} for resource id {1}".format(group_name, oid)})})
continue
current_groups.append(name)
Group(name).add(message.reply_channel)
else:
@ -87,4 +83,7 @@ def ws_receive(message):
def emit_channel_notification(group, payload):
Group(group).send({"text": json.dumps(payload, cls=DjangoJSONEncoder)})
try:
Group(group).send({"text": json.dumps(payload, cls=DjangoJSONEncoder)})
except ValueError:
logger.error("Invalid payload emitting channel {} on topic: {}".format(group, payload))

View File

@ -56,8 +56,8 @@ class Command(NoArgsCommand):
#jobs_qs = jobs_qs.filter(created__lte=self.cutoff)
skipped, deleted = 0, 0
for job in Job.objects.all():
job_display = '"%s" (started %s, %d host summaries, %d events)' % \
(unicode(job), unicode(job.created),
job_display = '"%s" (%d host summaries, %d events)' % \
(unicode(job),
job.job_host_summaries.count(), job.job_events.count())
if job.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
@ -78,8 +78,8 @@ class Command(NoArgsCommand):
def cleanup_ad_hoc_commands(self):
skipped, deleted = 0, 0
for ad_hoc_command in AdHocCommand.objects.all():
ad_hoc_command_display = '"%s" (started %s, %d events)' % \
(unicode(ad_hoc_command), unicode(ad_hoc_command.created),
ad_hoc_command_display = '"%s" (%d events)' % \
(unicode(ad_hoc_command),
ad_hoc_command.ad_hoc_command_events.count())
if ad_hoc_command.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
@ -100,7 +100,7 @@ class Command(NoArgsCommand):
def cleanup_project_updates(self):
skipped, deleted = 0, 0
for pu in ProjectUpdate.objects.all():
pu_display = '"%s" (started %s)' % (unicode(pu), unicode(pu.created))
pu_display = '"%s" (type %s)' % (unicode(pu), unicode(pu.launch_type))
if pu.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s project update %s', action_text, pu.status, pu_display)
@ -124,7 +124,7 @@ class Command(NoArgsCommand):
def cleanup_inventory_updates(self):
skipped, deleted = 0, 0
for iu in InventoryUpdate.objects.all():
iu_display = '"%s" (started %s)' % (unicode(iu), unicode(iu.created))
iu_display = '"%s" (source %s)' % (unicode(iu), unicode(iu.source))
if iu.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s inventory update %s', action_text, iu.status, iu_display)
@ -148,7 +148,7 @@ class Command(NoArgsCommand):
def cleanup_management_jobs(self):
skipped, deleted = 0, 0
for sj in SystemJob.objects.all():
sj_display = '"%s" (started %s)' % (unicode(sj), unicode(sj.created))
sj_display = '"%s" (type %s)' % (unicode(sj), unicode(sj.job_type))
if sj.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s system_job %s', action_text, sj.status, sj_display)
@ -178,8 +178,8 @@ class Command(NoArgsCommand):
def cleanup_workflow_jobs(self):
skipped, deleted = 0, 0
for workflow_job in WorkflowJob.objects.all():
workflow_job_display = '"{}" (started {}, {} nodes)'.format(
unicode(workflow_job), unicode(workflow_job.created),
workflow_job_display = '"{}" ({} nodes)'.format(
unicode(workflow_job),
workflow_job.workflow_nodes.count())
if workflow_job.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'

View File

@ -8,6 +8,7 @@ from uuid import UUID
from multiprocessing import Process
from multiprocessing import Queue as MPQueue
from Queue import Empty as QueueEmpty
from Queue import Full as QueueFull
from kombu import Connection, Exchange, Queue
from kombu.mixins import ConsumerMixin
@ -79,18 +80,22 @@ class CallbackBrokerWorker(ConsumerMixin):
def write_queue_worker(self, preferred_queue, body):
queue_order = sorted(range(settings.JOB_EVENT_WORKERS), cmp=lambda x, y: -1 if x==preferred_queue else 0)
write_attempt_order = []
for queue_actual in queue_order:
try:
worker_actual = self.worker_queues[queue_actual]
worker_actual[1].put(body, block=True, timeout=5)
worker_actual[0] += 1
return queue_actual
except QueueFull:
pass
except Exception:
import traceback
tb = traceback.format_exc()
logger.warn("Could not write to queue %s" % preferred_queue)
logger.warn("Detail: {}".format(tb))
continue
write_attempt_order.append(preferred_queue)
logger.warn("Could not write payload to any queue, attempted order: {}".format(write_attempt_order))
return None
def callback_worker(self, queue_actual, idx):

View File

@ -126,4 +126,5 @@ activity_stream_registrar.connect(Notification)
activity_stream_registrar.connect(Label)
activity_stream_registrar.connect(User)
activity_stream_registrar.connect(WorkflowJobTemplate)
activity_stream_registrar.connect(WorkflowJobTemplateNode)
activity_stream_registrar.connect(WorkflowJob)

View File

@ -31,7 +31,6 @@ from awx.main.utils import (
ignore_inventory_computed_fields,
parse_yaml_or_json,
)
from awx.main.redact import PlainTextCleaner
from awx.main.fields import ImplicitRoleField
from awx.main.models.mixins import ResourceMixin, SurveyJobTemplateMixin, SurveyJobMixin
from awx.main.models.base import PERM_INVENTORY_SCAN
@ -296,18 +295,27 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
def get_absolute_url(self):
return reverse('api:job_template_detail', args=(self.pk,))
def can_start_without_user_input(self):
def can_start_without_user_input(self, callback_extra_vars=None):
'''
Return whether job template can be used to start a new job without
requiring any user input.
'''
variables_needed = False
if callback_extra_vars:
extra_vars_dict = parse_yaml_or_json(callback_extra_vars)
for var in self.variables_needed_to_start:
if var not in extra_vars_dict:
variables_needed = True
break
elif self.variables_needed_to_start:
variables_needed = True
prompting_needed = False
for value in self._ask_for_vars_dict().values():
if value:
prompting_needed = True
return (not prompting_needed and
not self.passwords_needed_to_start and
not self.variables_needed_to_start)
not variables_needed)
def _ask_for_vars_dict(self):
return dict(
@ -601,25 +609,6 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin):
return "$hidden due to Ansible no_log flag$"
return artifacts
def _survey_search_and_replace(self, content):
# Use job template survey spec to identify password fields.
# Then lookup password fields in extra_vars and save the values
jt = self.job_template
if jt and jt.survey_enabled and 'spec' in jt.survey_spec:
# Use password vars to find in extra_vars
for key in jt.survey_password_variables():
if key in self.extra_vars_dict:
content = PlainTextCleaner.remove_sensitive(content, self.extra_vars_dict[key])
return content
def _result_stdout_raw_limited(self, *args, **kwargs):
buff, start, end, abs_end = super(Job, self)._result_stdout_raw_limited(*args, **kwargs)
return self._survey_search_and_replace(buff), start, end, abs_end
def _result_stdout_raw(self, *args, **kwargs):
content = super(Job, self)._result_stdout_raw(*args, **kwargs)
return self._survey_search_and_replace(content)
# Job Credential required
@property
def can_start(self):

View File

@ -127,14 +127,15 @@ class SurveyJobTemplateMixin(models.Model):
# Overwrite with job template extra vars with survey default vars
if self.survey_enabled and 'spec' in self.survey_spec:
for survey_element in self.survey_spec.get("spec", []):
default = survey_element['default']
variable_key = survey_element['variable']
default = survey_element.get('default')
variable_key = survey_element.get('variable')
if survey_element.get('type') == 'password':
if variable_key in kwargs_extra_vars:
if variable_key in kwargs_extra_vars and default:
kw_value = kwargs_extra_vars[variable_key]
if kw_value.startswith('$encrypted$') and kw_value != default:
kwargs_extra_vars[variable_key] = default
extra_vars[variable_key] = default
if default is not None:
extra_vars[variable_key] = default
# Overwrite job template extra vars with explicit job extra vars
# and add on job extra vars

View File

@ -473,7 +473,7 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin):
def _update_parent_instance(self):
parent_instance = self._get_parent_instance()
if parent_instance:
if parent_instance and self.job_type == 'check':
update_fields = self._update_parent_instance_no_save(parent_instance)
if self.status in ('successful', 'failed', 'error', 'canceled'):
if not self.failed and parent_instance.scm_delete_on_next_update:

View File

@ -10,9 +10,9 @@ import re
# Django
from django.db import models, transaction, connection
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
from django.utils.translation import ugettext_lazy as _
# AWX
@ -25,6 +25,7 @@ __all__ = [
'get_roles_on_resource',
'ROLE_SINGLETON_SYSTEM_ADMINISTRATOR',
'ROLE_SINGLETON_SYSTEM_AUDITOR',
'role_summary_fields_generator'
]
logger = logging.getLogger('awx.main.models.rbac')
@ -33,29 +34,29 @@ ROLE_SINGLETON_SYSTEM_ADMINISTRATOR='system_administrator'
ROLE_SINGLETON_SYSTEM_AUDITOR='system_auditor'
role_names = {
'system_administrator' : 'System Administrator',
'system_auditor' : 'System Auditor',
'adhoc_role' : 'Ad Hoc',
'admin_role' : 'Admin',
'auditor_role' : 'Auditor',
'execute_role' : 'Execute',
'member_role' : 'Member',
'read_role' : 'Read',
'update_role' : 'Update',
'use_role' : 'Use',
'system_administrator' : _('System Administrator'),
'system_auditor' : _('System Auditor'),
'adhoc_role' : _('Ad Hoc'),
'admin_role' : _('Admin'),
'auditor_role' : _('Auditor'),
'execute_role' : _('Execute'),
'member_role' : _('Member'),
'read_role' : _('Read'),
'update_role' : _('Update'),
'use_role' : _('Use'),
}
role_descriptions = {
'system_administrator' : 'Can manage all aspects of the system',
'system_auditor' : 'Can view all settings on the system',
'adhoc_role' : 'May run ad hoc commands on an inventory',
'admin_role' : 'Can manage all aspects of the %s',
'auditor_role' : 'Can view all settings for the %s',
'execute_role' : 'May run the %s',
'member_role' : 'User is a member of the %s',
'read_role' : 'May view settings for the %s',
'update_role' : 'May update project or inventory or group using the configured source update system',
'use_role' : 'Can use the %s in a job template',
'system_administrator' : _('Can manage all aspects of the system'),
'system_auditor' : _('Can view all settings on the system'),
'adhoc_role' : _('May run ad hoc commands on an inventory'),
'admin_role' : _('Can manage all aspects of the %s'),
'auditor_role' : _('Can view all settings for the %s'),
'execute_role' : _('May run the %s'),
'member_role' : _('User is a member of the %s'),
'read_role' : _('May view settings for the %s'),
'update_role' : _('May update project or inventory or group using the configured source update system'),
'use_role' : _('Can use the %s in a job template'),
}
@ -165,13 +166,11 @@ class Role(models.Model):
global role_names
return role_names[self.role_field]
def get_description(self, reference_content_object=None):
@property
def description(self):
global role_descriptions
description = role_descriptions[self.role_field]
if reference_content_object:
content_type = ContentType.objects.get_for_model(reference_content_object)
else:
content_type = self.content_type
content_type = self.content_type
if '%s' in description and content_type:
model = content_type.model_class()
model_name = re.sub(r'([a-z])([A-Z])', r'\1 \2', model.__name__).lower()
@ -179,8 +178,6 @@ class Role(models.Model):
return description
description = property(get_description)
@staticmethod
def rebuild_role_ancestor_list(additions, removals):
'''
@ -474,3 +471,20 @@ def get_roles_on_resource(resource, accessor):
object_id=resource.id
).values_list('role_field', flat=True).distinct()
]
def role_summary_fields_generator(content_object, role_field):
global role_descriptions
global role_names
summary = {}
description = role_descriptions[role_field]
content_type = ContentType.objects.get_for_model(content_object)
if '%s' in description and content_type:
model = content_object.__class__
model_name = re.sub(r'([a-z])([A-Z])', r'\1 \2', model.__name__).lower()
description = description % model_name
summary['description'] = description
summary['name'] = role_names[role_field]
summary['id'] = getattr(content_object, '{}_id'.format(role_field))
return summary

View File

@ -168,6 +168,12 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
else:
return super(UnifiedJobTemplate, self).unique_error_message(model_class, unique_check)
@classmethod
def invalid_user_capabilities_prefetch_models(cls):
if cls != UnifiedJobTemplate:
return []
return ['project', 'inventorysource', 'systemjobtemplate']
@classmethod
def accessible_pk_qs(cls, accessor, role_field):
'''
@ -175,6 +181,9 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
Does not return inventory sources or system JTs, these should
be handled inside of get_queryset where it is utilized.
'''
# do not use this if in a subclass
if cls != UnifiedJobTemplate:
return super(UnifiedJobTemplate, cls).accessible_pk_qs(accessor, role_field)
ujt_names = [c.__name__.lower() for c in cls.__subclasses__()
if c.__name__.lower() not in ['inventorysource', 'systemjobtemplate']]
subclass_content_types = list(ContentType.objects.filter(

View File

@ -6,6 +6,7 @@
# Django
from django.db import models
from django.conf import settings
from django.core.urlresolvers import reverse
#from django import settings as tower_settings
@ -27,6 +28,7 @@ from awx.main.utils import parse_yaml_or_json
from awx.main.fields import JSONField
from copy import copy
from urlparse import urljoin
__all__ = ['WorkflowJobTemplate', 'WorkflowJob', 'WorkflowJobOptions', 'WorkflowJobNode', 'WorkflowJobTemplateNode',]
@ -468,6 +470,9 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
def get_absolute_url(self):
return reverse('api:workflow_job_detail', args=(self.pk,))
def get_ui_url(self):
return urljoin(settings.TOWER_URL_BASE, '/#/workflows/{}'.format(self.pk))
def notification_data(self):
result = super(WorkflowJob, self).notification_data()
str_arr = ['Workflow job summary:', '']

View File

@ -43,6 +43,7 @@ class CallbackQueueDispatcher(object):
compression='bzip2',
exchange=self.exchange,
declare=[self.exchange],
delivery_mode="persistent" if settings.PERSISTENT_CALLBACK_MESSAGES else "transient",
routing_key=self.connection_queue)
return
except Exception, e:

View File

@ -58,4 +58,6 @@ class PlainTextCleaner(object):
@staticmethod
def remove_sensitive(cleartext, sensitive):
if sensitive == '':
return cleartext
return re.sub(r'%s' % re.escape(sensitive), '$encrypted$', cleartext)

View File

@ -123,12 +123,15 @@ class TaskManager():
spawn_node.save()
if job._resources_sufficient_for_launch():
can_start = job.signal_start(**kv)
if not can_start:
job.job_explanation = _("Job spawned from workflow could not start because it "
"was not in the right state or required manual credentials")
else:
can_start = False
job.job_explanation = _("Job spawned from workflow could not start because it "
"was missing a related resource such as project or inventory")
if not can_start:
job.status = 'failed'
job.job_explanation = _("Job spawned from workflow could not start because it "
"was not in the right state or required manual credentials")
job.save(update_fields=['status', 'job_explanation'])
connection.on_commit(lambda: job.websocket_emit_status('failed'))

View File

@ -230,7 +230,7 @@ class InventorySourceDict(PartialModelDict):
class SystemJobDict(PartialModelDict):
FIELDS = (
'id', 'created', 'status',
'id', 'created', 'status', 'celery_task_id',
)
model = SystemJob
@ -271,5 +271,5 @@ class WorkflowJobDict(PartialModelDict):
return 'workflow_job'
def task_impact(self):
return 10
return 0

View File

@ -53,13 +53,13 @@ from awx.main.queue import CallbackQueueDispatcher
from awx.main.task_engine import TaskEnhancer
from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field, update_scm_url,
check_proot_installed, build_proot_temp_dir, wrap_args_with_proot,
get_system_task_capacity, OutputEventFilter)
get_system_task_capacity, OutputEventFilter, parse_yaml_or_json)
from awx.main.consumers import emit_channel_notification
__all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate',
'RunAdHocCommand', 'handle_work_error',
'handle_work_success', 'update_inventory_computed_fields',
'send_notifications', 'run_administrative_checks']
'send_notifications', 'run_administrative_checks', 'purge_old_stdout_files']
HIDDEN_PASSWORD = '**********'
@ -193,6 +193,15 @@ def cleanup_authtokens(self):
AuthToken.objects.filter(expires__lt=now()).delete()
@task(bind=True)
def purge_old_stdout_files(self):
nowtime = time.time()
for f in os.listdir(settings.JOBOUTPUT_ROOT):
if os.path.getctime(os.path.join(settings.JOBOUTPUT_ROOT,f)) < nowtime - settings.LOCAL_STDOUT_EXPIRE_TIME:
os.unlink(os.path.join(settings.JOBOUTPUT_ROOT,f))
logger.info("Removing {}".format(os.path.join(settings.JOBOUTPUT_ROOT,f)))
@task(bind=True)
def cluster_node_heartbeat(self):
logger.debug("Cluster node heartbeat task.")
@ -225,7 +234,7 @@ def tower_periodic_scheduler(self):
logger.warn("Cache timeout is in the future, bypassing schedule for template %s" % str(template.id))
continue
new_unified_job = template.create_unified_job(launch_type='scheduled', schedule=schedule)
can_start = new_unified_job.signal_start(extra_vars=schedule.extra_data)
can_start = new_unified_job.signal_start(extra_vars=parse_yaml_or_json(schedule.extra_data))
if not can_start:
new_unified_job.status = 'failed'
new_unified_job.job_explanation = "Scheduled job could not start because it was not in the right state or required manual credentials"
@ -721,7 +730,7 @@ class BaseTask(Task):
stdout_handle = self.get_stdout_handle(instance)
if self.should_use_proot(instance, **kwargs):
if not check_proot_installed():
raise RuntimeError('proot is not installed')
raise RuntimeError('bubblewrap is not installed')
kwargs['proot_temp_dir'] = build_proot_temp_dir()
args = wrap_args_with_proot(args, cwd, **kwargs)
safe_args = wrap_args_with_proot(safe_args, cwd, **kwargs)
@ -874,7 +883,7 @@ class RunJob(BaseTask):
cp_dir = os.path.join(kwargs['private_data_dir'], 'cp')
if not os.path.exists(cp_dir):
os.mkdir(cp_dir, 0700)
env['ANSIBLE_SSH_CONTROL_PATH'] = os.path.join(cp_dir, 'ansible-ssh-%%h-%%p-%%r')
env['ANSIBLE_SSH_CONTROL_PATH'] = os.path.join(cp_dir, '%%h%%p%%r')
# Allow the inventory script to include host variables inline via ['_meta']['hostvars'].
env['INVENTORY_HOSTVARS'] = str(True)
@ -1314,6 +1323,15 @@ class RunProjectUpdate(BaseTask):
'''
return kwargs.get('private_data_files', {}).get('scm_credential', '')
def get_stdout_handle(self, instance):
stdout_handle = super(RunProjectUpdate, self).get_stdout_handle(instance)
def raw_callback(data):
instance_actual = ProjectUpdate.objects.get(pk=instance.pk)
instance_actual.result_stdout_text += data
instance_actual.save()
return OutputEventFilter(stdout_handle, raw_callback=raw_callback)
def post_run_hook(self, instance, status, **kwargs):
if instance.job_type == 'check' and status not in ('failed', 'canceled',):
p = instance.project
@ -1349,7 +1367,7 @@ class RunInventoryUpdate(BaseTask):
project_name=credential.project)
if credential.domain not in (None, ''):
openstack_auth['domain_name'] = credential.domain
private_state = str(inventory_update.source_vars_dict.get('private', 'true'))
private_state = inventory_update.source_vars_dict.get('private', True)
# Retrieve cache path from inventory update vars if available,
# otherwise create a temporary cache path only for this update.
cache = inventory_update.source_vars_dict.get('cache', {})
@ -1600,7 +1618,6 @@ class RunInventoryUpdate(BaseTask):
if inventory_update.overwrite_vars:
args.append('--overwrite-vars')
args.append('--source')
# If this is a cloud-based inventory (e.g. from AWS, Rackspace, etc.)
# then we need to set some extra flags based on settings in
# Tower.
@ -1656,22 +1673,41 @@ class RunInventoryUpdate(BaseTask):
os.chmod(path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
args.append(runpath)
args.append("--custom")
# try:
# shutil.rmtree(runpath, True)
# except OSError:
# pass
self.custom_dir_path.append(runpath)
verbosity = getattr(settings, 'INVENTORY_UPDATE_VERBOSITY', 1)
args.append('-v%d' % verbosity)
if settings.DEBUG:
args.append('--traceback')
return args
def get_stdout_handle(self, instance):
stdout_handle = super(RunInventoryUpdate, self).get_stdout_handle(instance)
def raw_callback(data):
instance_actual = InventoryUpdate.objects.get(pk=instance.pk)
instance_actual.result_stdout_text += data
instance_actual.save()
return OutputEventFilter(stdout_handle, raw_callback=raw_callback)
def build_cwd(self, inventory_update, **kwargs):
return self.get_path_to('..', 'plugins', 'inventory')
def get_idle_timeout(self):
return getattr(settings, 'INVENTORY_UPDATE_IDLE_TIMEOUT', None)
def pre_run_hook(self, instance, **kwargs):
self.custom_dir_path = []
def post_run_hook(self, instance, status, **kwargs):
print("In post run hook")
if self.custom_dir_path:
for p in self.custom_dir_path:
try:
shutil.rmtree(p, True)
except OSError:
pass
class RunAdHocCommand(BaseTask):
'''
@ -1878,6 +1914,8 @@ class RunSystemJob(BaseTask):
json_vars = json.loads(system_job.extra_vars)
if 'days' in json_vars and system_job.job_type != 'cleanup_facts':
args.extend(['--days', str(json_vars.get('days', 60))])
if 'dry_run' in json_vars and json_vars['dry_run'] and system_job.job_type != 'cleanup_facts':
args.extend(['--dry-run'])
if system_job.job_type == 'cleanup_jobs':
args.extend(['--jobs', '--project-updates', '--inventory-updates',
'--management-jobs', '--ad-hoc-commands', '--workflow-jobs',
@ -1891,6 +1929,15 @@ class RunSystemJob(BaseTask):
logger.error("Failed to parse system job: " + str(e))
return args
def get_stdout_handle(self, instance):
stdout_handle = super(RunSystemJob, self).get_stdout_handle(instance)
def raw_callback(data):
instance_actual = SystemJob.objects.get(pk=instance.pk)
instance_actual.result_stdout_text += data
instance_actual.save()
return OutputEventFilter(stdout_handle, raw_callback=raw_callback)
def build_env(self, instance, **kwargs):
env = super(RunSystemJob, self).build_env(instance,
**kwargs)

View File

@ -83,17 +83,19 @@ def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, ad
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
with mocker.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job', return_value=mock_job):
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
response = post(reverse('api:job_template_launch', args=[job_template.pk]),
runtime_data, admin_user, expect=201)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == ({'extra_vars':{}},)
# Check that job is serialized correctly
job_id = response.data['job']
assert job_id == 968
# If job is created with no arguments, it will inherit JT attributes
mock_job.signal_start.assert_called_once_with(extra_vars={})
mock_job.signal_start.assert_called_once()
# Check that response tells us what things were ignored
assert 'job_launch_var' in response.data['ignored_fields']['extra_vars']
@ -112,15 +114,17 @@ def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, admi
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
with mocker.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job', return_value=mock_job):
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
response = post(reverse('api:job_template_launch', args=[job_template.pk]),
runtime_data, admin_user, expect=201)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == (runtime_data,)
job_id = response.data['job']
assert job_id == 968
mock_job.signal_start.assert_called_once_with(**runtime_data)
mock_job.signal_start.assert_called_once()
@pytest.mark.django_db
@ -130,12 +134,14 @@ def test_job_accept_null_tags(job_template_prompts, post, admin_user, mocker):
mock_job = mocker.MagicMock(spec=Job, id=968)
with mocker.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job', return_value=mock_job):
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
post(reverse('api:job_template_launch', args=[job_template.pk]),
{'job_tags': '', 'skip_tags': ''}, admin_user, expect=201)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == ({'job_tags':'', 'skip_tags':''},)
mock_job.signal_start.assert_called_once_with(job_tags='', skip_tags='')
mock_job.signal_start.assert_called_once()
@pytest.mark.django_db
@ -154,14 +160,16 @@ def test_job_accept_prompted_vars_null(runtime_data, job_template_prompts_null,
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
with mocker.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job', return_value=mock_job):
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
response = post(reverse('api:job_template_launch', args=[job_template.pk]),
runtime_data, rando, expect=201)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == (runtime_data,)
job_id = response.data['job']
assert job_id == 968
mock_job.signal_start.assert_called_once_with(**runtime_data)
mock_job.signal_start.assert_called_once()
@pytest.mark.django_db
@ -321,15 +329,18 @@ def test_job_launch_unprompted_vars_with_survey(mocker, survey_spec_factory, job
with mocker.patch('awx.main.access.BaseAccess.check_license'):
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
with mocker.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job', return_value=mock_job):
with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job):
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
response = post(
reverse('api:job_template_launch', args=[job_template.pk]),
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
admin_user, expect=201)
assert JobTemplate.create_unified_job.called
assert JobTemplate.create_unified_job.call_args == ({'extra_vars':{'survey_var': 4}},)
job_id = response.data['job']
assert job_id == 968
# Check that the survey variable is accepted and the job variable isn't
mock_job.signal_start.assert_called_once_with(extra_vars={"survey_var": 4})
mock_job.signal_start.assert_called_once()

View File

@ -3,8 +3,7 @@ import pytest
from django.core.urlresolvers import reverse
from django.test.client import RequestFactory
from awx.main.models.jobs import JobTemplate
from awx.main.models import Role, Group
from awx.main.models import Role, Group, UnifiedJobTemplate, JobTemplate
from awx.main.access import (
access_registry,
get_user_capabilities
@ -283,6 +282,25 @@ def test_prefetch_jt_capabilities(job_template, rando):
assert qs[0].capabilities_cache == {'edit': False, 'start': True}
@pytest.mark.django_db
def test_prefetch_ujt_job_template_capabilities(alice, bob, job_template):
job_template.execute_role.members.add(alice)
qs = UnifiedJobTemplate.objects.all()
cache_list_capabilities(qs, ['admin', 'execute'], UnifiedJobTemplate, alice)
assert qs[0].capabilities_cache == {'edit': False, 'start': True}
qs = UnifiedJobTemplate.objects.all()
cache_list_capabilities(qs, ['admin', 'execute'], UnifiedJobTemplate, bob)
assert qs[0].capabilities_cache == {'edit': False, 'start': False}
@pytest.mark.django_db
def test_prefetch_ujt_project_capabilities(alice, project):
project.update_role.members.add(alice)
qs = UnifiedJobTemplate.objects.all()
cache_list_capabilities(qs, ['admin', 'execute'], UnifiedJobTemplate, alice)
assert qs[0].capabilities_cache == {}
@pytest.mark.django_db
def test_prefetch_group_capabilities(group, rando):
group.inventory.adhoc_role.members.add(rando)

View File

@ -8,6 +8,7 @@ from awx.main.models import (
)
from awx.main.access import (
InventoryAccess,
InventorySourceAccess,
HostAccess,
InventoryUpdateAccess,
CustomInventoryScriptAccess
@ -271,4 +272,8 @@ def test_host_access(organization, inventory, group, user, group_factory):
assert inventory_admin_access.can_read(host) is False
@pytest.mark.django_db
def test_inventory_source_credential_check(rando, inventory_source, credential):
inventory_source.group.inventory.admin_role.members.add(rando)
access = InventorySourceAccess(rando)
assert not access.can_change(inventory_source, {'credential': credential})

View File

@ -226,6 +226,14 @@ def test_job_template_access_org_admin(jt_objects, rando):
assert access.can_delete(jt_objects.job_template)
@pytest.mark.django_db
def test_orphan_JT_readable_by_system_auditor(job_template, system_auditor):
assert system_auditor.is_system_auditor
assert job_template.project is None
access = JobTemplateAccess(system_auditor)
assert access.can_read(job_template)
@pytest.mark.django_db
@pytest.mark.job_permissions
def test_job_template_creator_access(project, rando, post):

View File

@ -110,7 +110,7 @@ class TestJobTemplateSerializerGetSummaryFields():
view.request = request
serializer.context['view'] = view
with mocker.patch("awx.main.models.rbac.Role.get_description", return_value='Can eat pie'):
with mocker.patch("awx.api.serializers.role_summary_fields_generator", return_value='Can eat pie'):
with mocker.patch("awx.main.access.JobTemplateAccess.can_change", return_value='foobar'):
with mocker.patch("awx.main.access.JobTemplateAccess.can_add", return_value='foo'):
response = serializer.get_summary_fields(jt_obj)

View File

@ -1,9 +1,12 @@
import mock
import pytest
from collections import namedtuple
from awx.api.views import (
ApiV1RootView,
JobTemplateLabelList,
JobTemplateSurveySpec,
)
@ -65,3 +68,16 @@ class TestJobTemplateLabelList:
super(JobTemplateLabelList, view).unattach(mock_request, None, None)
assert mixin_unattach.called_with(mock_request, None, None)
class TestJobTemplateSurveySpec(object):
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
def test_get_password_type(self, mocker, mock_response_new):
JobTemplate = namedtuple('JobTemplate', 'survey_spec')
obj = JobTemplate(survey_spec={'spec':[{'type': 'password', 'default': 'my_default'}]})
with mocker.patch.object(JobTemplateSurveySpec, 'get_object', return_value=obj):
view = JobTemplateSurveySpec()
response = view.get(mocker.MagicMock())
assert response == mock_response_new
# which there was a better way to do this!
assert response.call_args[0][1]['spec'][0]['default'] == '$encrypted$'

View File

@ -0,0 +1,33 @@
from awx.conf.models import Setting
from awx.main.utils import common
def test_encrypt_field():
field = Setting(pk=123, value='ANSIBLE')
encrypted = common.encrypt_field(field, 'value')
assert encrypted == '$encrypted$AES$Ey83gcmMuBBT1OEq2lepnw=='
assert common.decrypt_field(field, 'value') == 'ANSIBLE'
def test_encrypt_field_without_pk():
field = Setting(value='ANSIBLE')
encrypted = common.encrypt_field(field, 'value')
assert encrypted == '$encrypted$AES$8uIzEoGyY6QJwoTWbMFGhw=='
assert common.decrypt_field(field, 'value') == 'ANSIBLE'
def test_encrypt_subfield():
field = Setting(value={'name': 'ANSIBLE'})
encrypted = common.encrypt_field(field, 'value', subfield='name')
assert encrypted == '$encrypted$AES$8uIzEoGyY6QJwoTWbMFGhw=='
assert common.decrypt_field(field, 'value', subfield='name') == 'ANSIBLE'
def test_encrypt_field_with_ask():
encrypted = common.encrypt_field(Setting(value='ASK'), 'value', ask=True)
assert encrypted == 'ASK'
def test_encrypt_field_with_empty_value():
encrypted = common.encrypt_field(Setting(value=None), 'value')
assert encrypted is None

View File

@ -84,3 +84,34 @@ def test_job_template_survey_variable_validation(job_template_factory):
}
obj.survey_enabled = True
assert obj.survey_variable_validation({"a": 5}) == ["Value 5 for 'a' expected to be a string."]
def test_job_template_survey_mixin(job_template_factory):
objects = job_template_factory(
'survey_mixin_test',
organization='org1',
inventory='inventory1',
credential='cred1',
persisted=False,
)
obj = objects.job_template
obj.survey_enabled = True
obj.survey_spec = {'spec': [{'default':'my_default', 'type':'password', 'variable':'my_variable'}]}
kwargs = obj._update_unified_job_kwargs(extra_vars={'my_variable':'$encrypted$'})
assert kwargs['extra_vars'] == '{"my_variable": "my_default"}'
def test_job_template_survey_mixin_length(job_template_factory):
objects = job_template_factory(
'survey_mixin_test',
organization='org1',
inventory='inventory1',
credential='cred1',
persisted=False,
)
obj = objects.job_template
obj.survey_enabled = True
obj.survey_spec = {'spec': [{'default':'my_default', 'type':'password', 'variable':'my_variable'},
{'type':'password', 'variable':'my_other_variable'}]}
kwargs = obj._update_unified_job_kwargs(extra_vars={'my_variable':'$encrypted$'})
assert kwargs['extra_vars'] == '{"my_variable": "my_default"}'

View File

@ -46,13 +46,13 @@ class TestLabelFilterMocked:
def test_is_candidate_for_detach(self, mocker, jt_count, j_count, expected):
mock_job_qs = mocker.MagicMock()
mock_job_qs.count = mocker.MagicMock(return_value=j_count)
UnifiedJob.objects = mocker.MagicMock()
UnifiedJob.objects.filter = mocker.MagicMock(return_value=mock_job_qs)
mocker.patch.object(UnifiedJob, 'objects', mocker.MagicMock(
filter=mocker.MagicMock(return_value=mock_job_qs)))
mock_jt_qs = mocker.MagicMock()
mock_jt_qs.count = mocker.MagicMock(return_value=jt_count)
UnifiedJobTemplate.objects = mocker.MagicMock()
UnifiedJobTemplate.objects.filter = mocker.MagicMock(return_value=mock_jt_qs)
mocker.patch.object(UnifiedJobTemplate, 'objects', mocker.MagicMock(
filter=mocker.MagicMock(return_value=mock_jt_qs)))
label = Label(id=37)
ret = label.is_candidate_for_detach()

View File

@ -20,11 +20,9 @@ def job(mocker):
return ret
@pytest.mark.survey
def test_job_survey_password_redaction():
"""Tests the Job model's funciton to redact passwords from
extra_vars - used when displaying job information"""
job = Job(
@pytest.fixture
def job_with_survey():
return Job(
name="test-job-with-passwords",
extra_vars=json.dumps({
'submitter_email': 'foobar@redhat.com',
@ -33,7 +31,13 @@ def test_job_survey_password_redaction():
survey_passwords={
'secret_key': '$encrypted$',
'SSN': '$encrypted$'})
assert json.loads(job.display_extra_vars()) == {
@pytest.mark.survey
def test_job_survey_password_redaction(job_with_survey):
"""Tests the Job model's funciton to redact passwords from
extra_vars - used when displaying job information"""
assert json.loads(job_with_survey.display_extra_vars()) == {
'submitter_email': 'foobar@redhat.com',
'secret_key': '$encrypted$',
'SSN': '$encrypted$'}

View File

@ -1,15 +1,14 @@
import pytest
from contextlib import contextmanager
import pytest
import yaml
from awx.main.models import (
UnifiedJob,
Notification,
)
from awx.main.tasks import (
send_notifications,
run_administrative_checks,
)
from awx.main import tasks
from awx.main.task_engine import TaskEnhancer
@ -22,12 +21,12 @@ def apply_patches(_patches):
def test_send_notifications_not_list():
with pytest.raises(TypeError):
send_notifications(None)
tasks.send_notifications(None)
def test_send_notifications_job_id(mocker):
with mocker.patch('awx.main.models.UnifiedJob.objects.get'):
send_notifications([], job_id=1)
tasks.send_notifications([], job_id=1)
assert UnifiedJob.objects.get.called
assert UnifiedJob.objects.get.called_with(id=1)
@ -42,7 +41,7 @@ def test_send_notifications_list(mocker):
patches.append(mocker.patch('awx.main.models.Notification.objects.filter', return_value=mock_notifications))
with apply_patches(patches):
send_notifications([1,2], job_id=1)
tasks.send_notifications([1,2], job_id=1)
assert Notification.objects.filter.call_count == 1
assert mock_notifications[0].status == "successful"
assert mock_notifications[0].save.called
@ -64,9 +63,64 @@ def test_run_admin_checks_usage(mocker, current_instances, call_count):
patches.append(mocker.patch('awx.main.tasks.send_mail', wraps=mock_sm))
with apply_patches(patches):
run_administrative_checks()
tasks.run_administrative_checks()
assert mock_sm.called
if call_count == 2:
assert '90%' in mock_sm.call_args_list[0][0][0]
else:
assert 'expire' in mock_sm.call_args_list[0][0][0]
def test_openstack_client_config_generation(mocker):
update = tasks.RunInventoryUpdate()
inventory_update = mocker.Mock(**{
'source': 'openstack',
'credential.host': 'https://keystone.openstack.example.org',
'credential.username': 'demo',
'credential.password': 'secrete',
'credential.project': 'demo-project',
'credential.domain': None,
'source_vars_dict': {}
})
cloud_config = update.build_private_data(inventory_update)
cloud_credential = yaml.load(cloud_config['cloud_credential'])
assert cloud_credential['clouds'] == {
'devstack': {
'auth': {
'auth_url': 'https://keystone.openstack.example.org',
'password': 'secrete',
'project_name': 'demo-project',
'username': 'demo'
},
'private': True
}
}
@pytest.mark.parametrize("source,expected", [
(False, False), (True, True)
])
def test_openstack_client_config_generation_with_private_source_vars(mocker, source, expected):
update = tasks.RunInventoryUpdate()
inventory_update = mocker.Mock(**{
'source': 'openstack',
'credential.host': 'https://keystone.openstack.example.org',
'credential.username': 'demo',
'credential.password': 'secrete',
'credential.project': 'demo-project',
'credential.domain': None,
'source_vars_dict': {'private': source}
})
cloud_config = update.build_private_data(inventory_update)
cloud_credential = yaml.load(cloud_config['cloud_credential'])
assert cloud_credential['clouds'] == {
'devstack': {
'auth': {
'auth_url': 'https://keystone.openstack.example.org',
'password': 'secrete',
'project_name': 'demo-project',
'username': 'demo'
},
'private': expected
}
}

View File

@ -163,22 +163,24 @@ def get_awx_version():
return __version__
def get_encryption_key_for_pk(pk, field_name):
def get_encryption_key(field_name, pk=None):
'''
Generate key for encrypted password based on instance pk and field name.
Generate key for encrypted password based on field name,
``settings.SECRET_KEY``, and instance pk (if available).
:param pk: (optional) the primary key of the ``awx.conf.model.Setting``;
can be omitted in situations where you're encrypting a setting
that is not database-persistent (like a read-only setting)
'''
from django.conf import settings
h = hashlib.sha1()
h.update(settings.SECRET_KEY)
h.update(str(pk))
if pk is not None:
h.update(str(pk))
h.update(field_name)
return h.digest()[:16]
def get_encryption_key(instance, field_name):
return get_encryption_key_for_pk(instance.pk, field_name)
def encrypt_field(instance, field_name, ask=False, subfield=None):
'''
Return content of the given instance and field name encrypted.
@ -189,7 +191,7 @@ def encrypt_field(instance, field_name, ask=False, subfield=None):
if not value or value.startswith('$encrypted$') or (ask and value == 'ASK'):
return value
value = smart_str(value)
key = get_encryption_key(instance, field_name)
key = get_encryption_key(field_name, getattr(instance, 'pk', None))
cipher = AES.new(key, AES.MODE_ECB)
while len(value) % cipher.block_size != 0:
value += '\x00'
@ -217,13 +219,13 @@ def decrypt_field(instance, field_name, subfield=None):
value = value[subfield]
if not value or not value.startswith('$encrypted$'):
return value
key = get_encryption_key(instance, field_name)
key = get_encryption_key(field_name, getattr(instance, 'pk', None))
return decrypt_value(key, value)
def decrypt_field_value(pk, field_name, value):
key = get_encryption_key_for_pk(pk, field_name)
key = get_encryption_key(field_name, pk)
return decrypt_value(key, value)
@ -517,6 +519,10 @@ def cache_list_capabilities(page, prefetch_list, model, user):
for obj in page:
obj.capabilities_cache = {}
skip_models = []
if hasattr(model, 'invalid_user_capabilities_prefetch_models'):
skip_models = model.invalid_user_capabilities_prefetch_models()
for prefetch_entry in prefetch_list:
display_method = None
@ -530,19 +536,20 @@ def cache_list_capabilities(page, prefetch_list, model, user):
paths = [paths]
# Build the query for accessible_objects according the user & role(s)
qs_obj = None
filter_args = []
for role_path in paths:
if '.' in role_path:
res_path = '__'.join(role_path.split('.')[:-1])
role_type = role_path.split('.')[-1]
if qs_obj is None:
qs_obj = model.objects
parent_model = model._meta.get_field(res_path).related_model
kwargs = {'%s__in' % res_path: parent_model.accessible_objects(user, '%s_role' % role_type)}
qs_obj = qs_obj.filter(Q(**kwargs) | Q(**{'%s__isnull' % res_path: True}))
parent_model = model
for subpath in role_path.split('.')[:-1]:
parent_model = parent_model._meta.get_field(subpath).related_model
filter_args.append(Q(
Q(**{'%s__pk__in' % res_path: parent_model.accessible_pk_qs(user, '%s_role' % role_type)}) |
Q(**{'%s__isnull' % res_path: True})))
else:
role_type = role_path
qs_obj = model.accessible_objects(user, '%s_role' % role_type)
filter_args.append(Q(**{'pk__in': model.accessible_pk_qs(user, '%s_role' % role_type)}))
if display_method is None:
# Role name translation to UI names for methods
@ -553,10 +560,13 @@ def cache_list_capabilities(page, prefetch_list, model, user):
display_method = 'start'
# Union that query with the list of items on page
ids_with_role = set(qs_obj.filter(pk__in=page_ids).values_list('pk', flat=True))
filter_args.append(Q(pk__in=page_ids))
ids_with_role = set(model.objects.filter(*filter_args).values_list('pk', flat=True))
# Save data item-by-item
for obj in page:
if skip_models and obj.__class__.__name__.lower() in skip_models:
continue
obj.capabilities_cache[display_method] = False
if obj.pk in ids_with_role:
obj.capabilities_cache[display_method] = True
@ -766,9 +776,10 @@ class OutputEventFilter(object):
EVENT_DATA_RE = re.compile(r'\x1b\[K((?:[A-Za-z0-9+/=]+\x1b\[\d+D)+)\x1b\[K')
def __init__(self, fileobj=None, event_callback=None):
def __init__(self, fileobj=None, event_callback=None, raw_callback=None):
self._fileobj = fileobj
self._event_callback = event_callback
self._raw_callback = raw_callback
self._counter = 1
self._start_line = 0
self._buffer = ''
@ -781,6 +792,8 @@ class OutputEventFilter(object):
if self._fileobj:
self._fileobj.write(data)
self._buffer += data
if self._raw_callback:
self._raw_callback(data)
while True:
match = self.EVENT_DATA_RE.search(self._buffer)
if not match:
@ -813,7 +826,7 @@ class OutputEventFilter(object):
for stdout_chunk in stdout_chunks:
event_data['counter'] = self._counter
self._counter += 1
event_data['stdout'] = stdout_chunk
event_data['stdout'] = stdout_chunk[:-2] if len(stdout_chunk) > 2 else ""
n_lines = stdout_chunk.count('\n')
event_data['start_line'] = self._start_line
event_data['end_line'] = self._start_line + n_lines

View File

@ -163,8 +163,15 @@ MAX_EVENT_RES_DATA = 700000
# Note: This setting may be overridden by database settings.
EVENT_STDOUT_MAX_BYTES_DISPLAY = 1024
# The amount of time before a stdout file is expired and removed locally
# Note that this can be recreated if the stdout is downloaded
LOCAL_STDOUT_EXPIRE_TIME = 2592000
# The number of processes spawned by the callback receiver to process job
# events into the database
JOB_EVENT_WORKERS = 4
# The maximum size of the job event worker queue before requests are blocked
JOB_EVENT_MAX_QUEUE_SIZE = 10000
# Disallow sending session cookies over insecure connections
@ -297,6 +304,7 @@ AUTH_LDAP_SERVER_URI = None
# Note: This setting may be overridden by database settings.
AUTH_LDAP_CONNECTION_OPTIONS = {
ldap.OPT_REFERRALS: 0,
ldap.OPT_NETWORK_TIMEOUT: 30
}
# Radius server settings (default to empty string to skip using Radius auth).
@ -416,6 +424,8 @@ CELERY_ROUTES = {'awx.main.tasks.run_job': {'queue': 'jobs',
'awx.main.scheduler.tasks.run_job_complete': {'queue': 'scheduler',
'routing_key': 'scheduler.job.complete'},
'awx.main.tasks.cluster_node_heartbeat': {'queue': 'default',
'routing_key': 'cluster.heartbeat'},
'awx.main.tasks.purge_old_stdout_files': {'queue': 'default',
'routing_key': 'cluster.heartbeat'}}
CELERYBEAT_SCHEDULE = {
@ -435,6 +445,10 @@ CELERYBEAT_SCHEDULE = {
'task': 'awx.main.tasks.cluster_node_heartbeat',
'schedule': timedelta(seconds=60)
},
'purge_stdout_files': {
'task': 'awx.main.tasks.purge_old_stdout_files',
'schedule': timedelta(days=7)
},
'task_manager': {
'task': 'awx.main.scheduler.tasks.run_task_manager',
'schedule': timedelta(seconds=20)
@ -821,6 +835,7 @@ ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC = False
# Internal API URL for use by inventory scripts and callback plugin.
INTERNAL_API_URL = 'http://127.0.0.1:%s' % DEVSERVER_DEFAULT_PORT
PERSISTENT_CALLBACK_MESSAGES = True
USE_CALLBACK_QUEUE = True
CALLBACK_QUEUE = "callback_tasks"
FACT_QUEUE = "facts"

View File

@ -5,11 +5,14 @@
import logging
import uuid
import ldap
# Django
from django.dispatch import receiver
from django.contrib.auth.models import User
from django.conf import settings as django_settings
from django.core.signals import setting_changed
from django.core.exceptions import ImproperlyConfigured
# django-auth-ldap
from django_auth_ldap.backend import LDAPSettings as BaseLDAPSettings
@ -37,6 +40,16 @@ class LDAPSettings(BaseLDAPSettings):
'TEAM_MAP': {},
}.items())
def __init__(self, prefix='AUTH_LDAP_', defaults={}):
super(LDAPSettings, self).__init__(prefix, defaults)
# If a DB-backed setting is specified that wipes out the
# OPT_NETWORK_TIMEOUT, fall back to a sane default
if ldap.OPT_NETWORK_TIMEOUT not in getattr(self, 'CONNECTION_OPTIONS', {}):
options = getattr(self, 'CONNECTION_OPTIONS', {})
options[ldap.OPT_NETWORK_TIMEOUT] = 30
self.CONNECTION_OPTIONS = options
class LDAPBackend(BaseLDAPBackend):
'''
@ -75,7 +88,11 @@ class LDAPBackend(BaseLDAPBackend):
if not feature_enabled('ldap'):
logger.error("Unable to authenticate, license does not support LDAP authentication")
return None
return super(LDAPBackend, self).authenticate(username, password)
try:
return super(LDAPBackend, self).authenticate(username, password)
except ImproperlyConfigured:
logger.error("Unable to authenticate, LDAP is improperly configured")
return None
def get_user(self, user_id):
if not self.settings.SERVER_URI:

View File

@ -228,7 +228,7 @@ register(
register(
'AUTH_LDAP_CONNECTION_OPTIONS',
field_class=fields.LDAPConnectionOptionsField,
default={'OPT_REFERRALS': 0},
default={'OPT_REFERRALS': 0, 'OPT_NETWORK_TIMEOUT': 30},
label=_('LDAP Connection Options'),
help_text=_('Additional options to set for the LDAP connection. LDAP '
'referrals are disabled by default (to prevent certain LDAP '
@ -240,6 +240,7 @@ register(
category_slug='ldap',
placeholder=collections.OrderedDict([
('OPT_REFERRALS', 0),
('OPT_NETWORK_TIMEOUT', 30)
]),
feature_required='ldap',
)
@ -270,7 +271,7 @@ register(
field_class=fields.LDAPDNWithUserField,
allow_blank=True,
allow_null=True,
default='',
default=None,
label=_('LDAP User DN Template'),
help_text=_('Alternative to user search, if user DNs are all of the same '
'format. This approach will be more efficient for user lookups than '
@ -340,7 +341,7 @@ register(
field_class=fields.LDAPDNField,
allow_blank=True,
allow_null=True,
default='',
default=None,
label=_('LDAP Require Group'),
help_text=_('Group DN required to login. If specified, user must be a member '
'of this group to login via LDAP. If not set, everyone in LDAP '
@ -357,7 +358,7 @@ register(
field_class=fields.LDAPDNField,
allow_blank=True,
allow_null=True,
default='',
default=None,
label=_('LDAP Deny Group'),
help_text=_('Group DN denied from login. If specified, user will not be '
'allowed to login if a member of this group. Only one deny group '

View File

View File

View File

@ -0,0 +1,24 @@
from django.test.utils import override_settings
import ldap
import pytest
from awx.sso.backends import LDAPSettings
@override_settings(AUTH_LDAP_CONNECTION_OPTIONS = {ldap.OPT_NETWORK_TIMEOUT: 60})
@pytest.mark.django_db
def test_ldap_with_custom_timeout():
settings = LDAPSettings()
assert settings.CONNECTION_OPTIONS == {
ldap.OPT_NETWORK_TIMEOUT: 60
}
@override_settings(AUTH_LDAP_CONNECTION_OPTIONS = {ldap.OPT_REFERRALS: 0})
@pytest.mark.django_db
def test_ldap_with_missing_timeout():
settings = LDAPSettings()
assert settings.CONNECTION_OPTIONS == {
ldap.OPT_REFERRALS: 0,
ldap.OPT_NETWORK_TIMEOUT: 30
}

View File

@ -0,0 +1,21 @@
import ldap
from awx.sso.backends import LDAPSettings
def test_ldap_default_settings(mocker):
from_db = mocker.Mock(**{'order_by.return_value': []})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=from_db):
settings = LDAPSettings()
assert settings.ORGANIZATION_MAP == {}
assert settings.TEAM_MAP == {}
def test_ldap_default_network_timeout(mocker):
from_db = mocker.Mock(**{'order_by.return_value': []})
with mocker.patch('awx.conf.models.Setting.objects.filter', return_value=from_db):
settings = LDAPSettings()
assert settings.CONNECTION_OPTIONS == {
ldap.OPT_REFERRALS: 0,
ldap.OPT_NETWORK_TIMEOUT: 30
}

View File

@ -1675,6 +1675,7 @@ tr td button i {
.alert {
padding: 10px;
margin: 0;
word-wrap: break-word;
}
.alert-danger {
background-color: @default-bg;
@ -2239,3 +2240,22 @@ button[disabled],
html input[disabled] {
cursor: not-allowed;
}
.CodeMirror--disabled .CodeMirror.cm-s-default,
.CodeMirror--disabled .CodeMirror-line {
background-color: #f6f6f6;
}
.CodeMirror--disabled .CodeMirror-gutter.CodeMirror-lint-markers,
.CodeMirror--disabled .CodeMirror-gutter.CodeMirror-linenumbers {
background-color: #ebebeb;
color: @b7grey;
}
.CodeMirror--disabled .CodeMirror-lines {
cursor: default;
}
.CodeMirror--disabled .CodeMirror-cursors {
display: none;
}

View File

@ -167,7 +167,7 @@ table, tbody {
}
.List-auxActionStream {
width: 175px;
width: 200px;
}
.List-action:not(.ng-hide) ~ .List-action:not(.ng-hide) {

View File

@ -1,12 +1,13 @@
import {templateUrl} from '../shared/template-url/template-url.factory';
import controller from './about.controller';
import { N_ } from '../i18n';
export default {
name: 'setup.about',
route: '/about',
controller: controller,
ncyBreadcrumb: {
label: "ABOUT"
label: N_("ABOUT")
},
onExit: function(){
// hacky way to handle user browsing away via URL bar

View File

@ -23,10 +23,10 @@
<span class="AddPermissions-directionNumber">
1
</span>
<div ng-hide='withoutTeamPermissions'>
<div ng-hide='withoutTeamPermissions' translate>
Please select Users / Teams from the lists below.
</div>
<div ng-show='withoutTeamPermissions'>
<div ng-show='withoutTeamPermissions' translate>
Please select Users from the list below.
</div>
</div>
@ -34,12 +34,12 @@
<div class="Form-tabHolder" ng-hide='withoutTeamPermissions'>
<div id="users_tab" class="Form-tab"
ng-click="toggleFormTabs('users')"
ng-class="{'is-selected': usersSelected }">
ng-class="{'is-selected': usersSelected }" translate>
Users
</div>
<div id="teams_tab" class="Form-tab"
ng-click="toggleFormTabs('teams')"
ng-class="{'is-selected': teamsSelected }">
ng-class="{'is-selected': teamsSelected }" translate>
Teams
</div>
</div>
@ -59,7 +59,7 @@
<span class="AddPermissions-directionNumber">
2
</span>
Please assign roles to the selected users/teams
<translate>Please assign roles to the selected users/teams</translate>
<div class="AddPermissions-keyToggle btn"
ng-class="{'is-active': showKeyPane}"
ng-click="toggleKeyPane()">

View File

@ -26,7 +26,7 @@
<span class="AddPermissions-directionNumber">
1
</span>
<div>
<div translate>
Please select resources from the lists below.
</div>
</div>
@ -34,30 +34,30 @@
<div class="Form-tabHolder">
<div class="Form-tab"
ng-click="selectTab('job_templates')"
ng-class="{'is-selected': tab.job_templates }">
ng-class="{'is-selected': tab.job_templates }" translate>
Job Templates
</div>
<div class="Form-tab"
ng-click="selectTab('workflow_templates')"
ng-class="{'is-selected': tab.workflow_templates}"
>
translate>
Workflow Templates
</div>
<div class="Form-tab"
ng-click="selectTab('projects')"
ng-class="{'is-selected': tab.projects }">
ng-class="{'is-selected': tab.projects }" translate>
Projects
</div>
<div class="Form-tab"
ng-click="selectTab('inventories')"
ng-class="{'is-selected': tab.inventories}"
>
translate>
Inventories
</div>
<div class="Form-tab"
ng-click="selectTab('credentials')"
ng-class="{'is-selected': tab.credentials}"
>
translate>
Credentials
</div>
</div>
@ -86,10 +86,10 @@
<span class="AddPermissions-directionNumber">
2
</span>
Please assign roles to the selected resources
<translate>Please assign roles to the selected resources</translate>
<div class="AddPermissions-keyToggle btn"
ng-class="{'is-active': showKeyPane}"
ng-click="toggleKeyPane()">
ng-click="toggleKeyPane()" translate>
Key
</div>
</div>
@ -97,34 +97,34 @@
<div class="Form-tab"
ng-click="selectTab('job_templates')"
ng-class="{'is-selected': tab.job_templates }"
ng-show="showSection2Tab('job_templates')">
ng-show="showSection2Tab('job_templates')" translate>
Job Templates
</div>
<div class="Form-tab"
ng-click="selectTab('workflow_templates')"
ng-class="{'is-selected': tab.workflow_templates }"
ng-show="showSection2Tab('workflow_templates')">
ng-show="showSection2Tab('workflow_templates')" translate>
Workflow Templates
</div>
<div class="Form-tab"
ng-click="selectTab('projects')"
ng-class="{'is-selected': tab.projects}"
ng-show="showSection2Tab('projects')"
>
translate>
Projects
</div>
<div class="Form-tab"
ng-click="selectTab('inventories')"
ng-class="{'is-selected': tab.inventories}"
ng-show="showSection2Tab('inventories')"
>
translate>
Inventories
</div>
<div class="Form-tab"
ng-click="selectTab('credentials')"
ng-class="{'is-selected': tab.credentials}"
ng-show="showSection2Tab('credentials')"
>
translate>
Credentials
</div>
</div>
@ -170,13 +170,13 @@
<div class="buttons Form-buttons AddPermissions-buttons">
<button type="button"
class="btn btn-sm Form-cancelButton"
ng-click="closeModal()">
ng-click="closeModal()" translate>
Cancel
</button>
<button type="button"
class="btn btn-sm Form-saveButton"
ng-click="saveForm()"
ng-disabled="!saveEnabled()">
ng-disabled="!saveEnabled()" translate>
Save
</button>
</div>

View File

@ -7,10 +7,12 @@
import roleList from './rbac-role-column/roleList.directive';
import addRbacResource from './add-rbac-resource/main';
import addRbacUserTeam from './add-rbac-user-team/main';
import permissionsList from './permissions-list.controller';
export default
angular.module('RbacModule', [
addRbacResource.name,
addRbacUserTeam.name
])
.controller('PermissionsList', permissionsList)
.directive('roleList', roleList);

View File

@ -0,0 +1,82 @@
/*************************************************
* Copyright (c) 2016 Ansible, Inc.
*
* All Rights Reserved
*************************************************/
export default ['$scope', 'ListDefinition', 'Dataset', 'Wait', 'Rest', 'ProcessErrors', 'Prompt', '$state',
function($scope, list, Dataset, Wait, Rest, ProcessErrors, Prompt, $state) {
init();
function init() {
$scope.list = list;
$scope[`${list.iterator}_dataset`] = Dataset.data;
$scope[`${list.iterator}s`] = $scope[`${list.iterator}_dataset`].results;
}
$scope.deletePermissionFromUser = function(userId, userName, roleName, roleType, url) {
var action = function() {
$('#prompt-modal').modal('hide');
Wait('start');
Rest.setUrl(url);
Rest.post({ "disassociate": true, "id": userId })
.success(function() {
Wait('stop');
$state.go('.', null, {reload: true});
})
.error(function(data, status) {
ProcessErrors($scope, data, status, null, {
hdr: 'Error!',
msg: 'Could not disassociate user from role. Call to ' + url + ' failed. DELETE returned status: ' + status
});
});
};
Prompt({
hdr: `Remove role`,
body: `
<div class="Prompt-bodyQuery">
Confirm the removal of the ${roleType}
<span class="Prompt-emphasis"> ${roleName} </span>
role associated with ${userName}.
</div>
`,
action: action,
actionText: 'REMOVE'
});
};
$scope.deletePermissionFromTeam = function(teamId, teamName, roleName, roleType, url) {
var action = function() {
$('#prompt-modal').modal('hide');
Wait('start');
Rest.setUrl(url);
Rest.post({ "disassociate": true, "id": teamId })
.success(function() {
Wait('stop');
$state.go('.', null, {reload: true});
})
.error(function(data, status) {
ProcessErrors($scope, data, status, null, {
hdr: 'Error!',
msg: 'Could not disassociate team from role. Call to ' + url + ' failed. DELETE returned status: ' + status
});
});
};
Prompt({
hdr: `Remove role`,
body: `
<div class="Prompt-bodyQuery">
Confirm the removal of the ${roleType}
<span class="Prompt-emphasis"> ${roleName} </span>
role associated with the ${teamName} team.
</div>
`,
action: action,
actionText: 'REMOVE'
});
};
}
];

View File

@ -5,7 +5,7 @@
*************************************************/
export default function() {
export default ['i18n', function(i18n) {
return {
searchSize: 'col-lg-12 col-md-12 col-sm-12 col-xs-12',
name: 'teams',
@ -15,19 +15,20 @@
multiSelectExtended: true,
index: false,
hover: true,
emptyListText : 'No Teams exist',
emptyListText : i18n._('No Teams exist'),
fields: {
name: {
key: true,
label: 'name'
label: i18n._('name')
},
organization: {
label: 'organization',
label: i18n._('organization'),
ngBind: 'team.summary_fields.organization.name',
sourceModel: 'organization',
sourceField: 'name'
sourceField: 'name',
searchable: true
}
}
};
}
}];

View File

@ -5,7 +5,7 @@
*************************************************/
export default function() {
export default ['i18n', function(i18n) {
return {
name: 'users',
iterator: 'user',
@ -21,22 +21,22 @@
multiSelectExtended: true,
index: false,
hover: true,
emptyListText : 'No Users exist',
emptyListText : i18n._('No Users exist'),
fields: {
first_name: {
label: 'First Name',
label: i18n._('First Name'),
columnClass: 'col-md-3 col-sm-3 hidden-xs'
},
last_name: {
label: 'Last Name',
label: i18n._('Last Name'),
columnClass: 'col-md-3 col-sm-3 hidden-xs'
},
username: {
key: true,
label: 'Username',
label: i18n._('Username'),
columnClass: 'col-md-5 col-sm-5 col-xs-11'
},
},
};
}
}];

View File

@ -59,6 +59,7 @@ export default ['addPermissionsTeamsList', 'addPermissionsUsersList', 'TemplateL
case 'JobTemplates':
list.name = 'job_templates';
list.iterator = 'job_template';
list.basePath = 'job_templates';
list.fields = {
name: list.fields.name,
description: list.fields.description

View File

@ -8,7 +8,8 @@
export default
[
'CreateSelect2',
function(CreateSelect2) {
'i18n',
function(CreateSelect2, i18n) {
return {
restrict: 'E',
scope: {
@ -21,7 +22,7 @@ export default
CreateSelect2({
element: '.roleSelect2',
multiple: true,
placeholder: 'Select roles'
placeholder: i18n._('Select roles')
});
}
};

View File

@ -9,8 +9,16 @@
<div class="RoleList-tag"
ng-class="{'RoleList-tag--deletable': entry.explicit && entry.user_capabilities.unattach,
'RoleList-tag--team': entry.team_id}"
aw-tool-tip='<div>Organization: {{ entry.team_organization_name | sanitize }}</div><div>Team: {{entry.team_name | sanitize}}</div>' aw-tip-placement='bottom'>
aw-tool-tip='<div>Organization: {{ entry.team_organization_name | sanitize }}</div><div>Team: {{entry.team_name | sanitize}}</div>' aw-tip-placement='bottom'
ng-if="entry.team_id">
<span class="RoleList-name">{{ entry.name }}</span>
<i ng-show='entry.team_id' class="fa fa-users"></i>
</div>
<div class="RoleList-tag"
ng-class="{'RoleList-tag--deletable': entry.explicit && entry.user_capabilities.unattach,
'RoleList-tag--team': entry.team_id}"
ng-if="!entry.team_id">
<span class="RoleList-name">{{ entry.name }}</span>
</div>
</div>

View File

@ -4,6 +4,8 @@
* All Rights Reserved
*************************************************/
import { N_ } from '../i18n';
export default {
name: 'activityStream',
route: '/activity_stream?target&id',
@ -22,7 +24,7 @@ export default {
}
},
ncyBreadcrumb: {
label: "ACTIVITY STREAM"
label: N_("ACTIVITY STREAM")
},
onExit: function() {
$('#stream-detail-modal').modal('hide');

View File

@ -9,15 +9,15 @@
</div>
<div class="Modal-body" id="detail-body">
<div ng-show="user" class="StreamDetail-inlineRow">
<div class="StreamDetail-rowTitle StreamDetail-inlineRowTitle">INITIATED BY</div>
<div class="StreamDetail-rowTitle StreamDetail-inlineRowTitle" translate>INITIATED BY</div>
<div class="StreamDetail-inlineRowData" ng-bind="user"></div>
</div>
<div ng-show="operation" class="StreamDetail-inlineRow">
<div class="StreamDetail-rowTitle StreamDetail-inlineRowTitle">ACTION</div>
<div class="StreamDetail-rowTitle StreamDetail-inlineRowTitle" translate>ACTION</div>
<div class="StreamDetail-inlineRowData StreamDetail-actions" ng-bind-html="operation"></div>
</div>
<div ng-show="changes">
<div class="StreamDetail-rowTitle StreamDetail-changesRowTitle">CHANGES</div>
<div class="StreamDetail-rowTitle StreamDetail-changesRowTitle" translate>CHANGES</div>
<pre class="StreamDetail-changes">{{ changes | json : spacing}}</pre>
</div>
</div>

View File

@ -4,7 +4,7 @@
* All Rights Reserved
*************************************************/
export default ['templateUrl', function(templateUrl) {
export default ['templateUrl', 'i18n', function(templateUrl, i18n) {
return {
restrict: 'E',
scope: true,
@ -12,21 +12,27 @@ export default ['templateUrl', function(templateUrl) {
templateUrl: templateUrl('activity-stream/streamDropdownNav/stream-dropdown-nav'),
controller: ['$scope', '$state', '$stateParams','CreateSelect2', function($scope, $state, $stateParams, CreateSelect2) {
$scope.streamTarget = ($state.params && $state.params.target) ? $state.params.target : 'dashboard';
if($state.params && $state.params.target) {
$scope.streamTarget = ($state.params.target === 'job_template' || $state.params.target === 'workflow_job_template') ? 'template' : $state.params.target;
}
else {
$scope.streamTarget = 'dashboard';
}
$scope.options = [
{label: 'All Activity', value: 'dashboard'},
{label: 'Credentials', value: 'credential'},
{label: 'Hosts', value: 'host'},
{label: 'Inventories', value: 'inventory'},
{label: 'Inventory Scripts', value: 'inventory_script'},
{label: 'Jobs', value: 'job'},
{label: 'Organizations', value: 'organization'},
{label: 'Projects', value: 'project'},
{label: 'Schedules', value: 'schedule'},
{label: 'Teams', value: 'team'},
{label: 'Templates', value: 'template'},
{label: 'Users', value: 'user'}
{label: i18n._('All Activity'), value: 'dashboard'},
{label: i18n._('Credentials'), value: 'credential'},
{label: i18n._('Hosts'), value: 'host'},
{label: i18n._('Inventories'), value: 'inventory'},
{label: i18n._('Inventory Scripts'), value: 'custom_inventory_script'},
{label: i18n._('Jobs'), value: 'job'},
{label: i18n._('Notification Templates'), value: 'notification_template'},
{label: i18n._('Organizations'), value: 'organization'},
{label: i18n._('Projects'), value: 'project'},
{label: i18n._('Schedules'), value: 'schedule'},
{label: i18n._('Teams'), value: 'team'},
{label: i18n._('Templates'), value: 'template'},
{label: i18n._('Users'), value: 'user'}
];
CreateSelect2({
@ -40,12 +46,14 @@ export default ['templateUrl', function(templateUrl) {
$state.go('activityStream', {target: null, activity_search: {page_size:"20", order_by: '-timestamp'}});
}
else {
let search = _.merge($stateParams.activity_search, {
let search = {
or__object1__in: $scope.streamTarget && $scope.streamTarget === 'template' ? 'job_template,workflow_job_template' : $scope.streamTarget,
or__object2__in: $scope.streamTarget && $scope.streamTarget === 'template' ? 'job_template,workflow_job_template' : $scope.streamTarget
});
or__object2__in: $scope.streamTarget && $scope.streamTarget === 'template' ? 'job_template,workflow_job_template' : $scope.streamTarget,
page_size: '20',
order_by: '-timestamp'
};
// Attach the taget to the query parameters
$state.go('activityStream', {target: $scope.streamTarget, activity_search: search});
$state.go('activityStream', {target: $scope.streamTarget, id: null, activity_search: search});
}
};

View File

@ -44,7 +44,6 @@ import './filters';
import { Home } from './controllers/Home';
import { SocketsController } from './controllers/Sockets';
import { CredentialsAdd, CredentialsEdit, CredentialsList } from './controllers/Credentials';
import { JobsListController } from './controllers/Jobs';
import portalMode from './portal-mode/main';
import systemTracking from './system-tracking/main';
import inventories from './inventories/main';
@ -70,6 +69,7 @@ import activityStream from './activity-stream/main';
import standardOut from './standard-out/main';
import Templates from './templates/main';
import credentials from './credentials/main';
import jobs from './jobs/main';
import { ProjectsList, ProjectsAdd, ProjectsEdit } from './controllers/Projects';
import { UsersList, UsersAdd, UsersEdit } from './controllers/Users';
import { TeamsList, TeamsAdd, TeamsEdit } from './controllers/Teams';
@ -99,6 +99,8 @@ var tower = angular.module('Tower', [
require('angular-tz-extensions'),
require('lr-infinite-scroll'),
require('ng-toast'),
'gettext',
'I18N',
uiRouter,
'ui.router.state.events',
@ -132,6 +134,7 @@ var tower = angular.module('Tower', [
portalMode.name,
config.name,
credentials.name,
jobs.name,
//'templates',
'Utilities',
'OrganizationFormDefinition',
@ -201,8 +204,6 @@ var tower = angular.module('Tower', [
scheduler.name,
'ApiModelHelper',
'ActivityStreamHelper',
'gettext',
'I18N',
'WorkflowFormDefinition',
'InventorySourcesListDefinition',
'WorkflowMakerFormDefinition'
@ -290,6 +291,9 @@ var tower = angular.module('Tower', [
"jobs": ["status_changed"]
}
}
},
ncyBreadcrumb: {
label: N_('PROJECTS')
}
})
});
@ -371,12 +375,12 @@ var tower = angular.module('Tower', [
'CheckLicense', '$location', 'Authorization', 'LoadBasePaths', 'Timer',
'ClearScope', 'LoadConfig', 'Store', 'pendoService', 'Prompt', 'Rest',
'Wait', 'ProcessErrors', '$state', 'GetBasePath', 'ConfigService',
'FeaturesService', '$filter', 'SocketService', 'I18NInit',
'FeaturesService', '$filter', 'SocketService',
function($stateExtender, $q, $compile, $cookieStore, $rootScope, $log, $stateParams,
CheckLicense, $location, Authorization, LoadBasePaths, Timer,
ClearScope, LoadConfig, Store, pendoService, Prompt, Rest, Wait,
ProcessErrors, $state, GetBasePath, ConfigService, FeaturesService,
$filter, SocketService, I18NInit) {
$filter, SocketService) {
$rootScope.$state = $state;
$rootScope.$state.matches = function(stateName) {
@ -388,7 +392,6 @@ var tower = angular.module('Tower', [
$log.debug(`$state.defaultErrorHandler: ${error}`);
});
I18NInit();
$stateExtender.addState({
name: 'dashboard',
url: '/home',
@ -420,53 +423,6 @@ var tower = angular.module('Tower', [
}
});
$stateExtender.addState({
searchPrefix: 'job',
name: 'jobs',
url: '/jobs',
ncyBreadcrumb: {
label: N_("JOBS")
},
params: {
job_search: {
value: {
not__launch_type: 'sync',
order_by: '-finished'
},
squash: ''
}
},
data: {
socket: {
"groups": {
"jobs": ["status_changed"],
"schedules": ["changed"]
}
}
},
resolve: {
Dataset: ['AllJobsList', 'QuerySet', '$stateParams', 'GetBasePath', (list, qs, $stateParams, GetBasePath) => {
let path = GetBasePath(list.basePath) || GetBasePath(list.name);
return qs.search(path, $stateParams[`${list.iterator}_search`]);
}]
},
views: {
'@': {
templateUrl: urlPrefix + 'partials/jobs.html',
},
'list@jobs': {
templateProvider: function(AllJobsList, generateList) {
let html = generateList.build({
list: AllJobsList,
mode: 'edit'
});
return html;
},
controller: JobsListController
}
}
});
$stateExtender.addState({
name: 'userCredentials',
url: '/users/:user_id/credentials',
@ -498,70 +454,6 @@ var tower = angular.module('Tower', [
}
});
$rootScope.deletePermissionFromUser = function(userId, userName, roleName, roleType, url) {
var action = function() {
$('#prompt-modal').modal('hide');
Wait('start');
Rest.setUrl(url);
Rest.post({ "disassociate": true, "id": userId })
.success(function() {
Wait('stop');
$rootScope.$broadcast("refreshList", "permission");
})
.error(function(data, status) {
ProcessErrors($rootScope, data, status, null, {
hdr: 'Error!',
msg: 'Could not disassociate user from role. Call to ' + url + ' failed. DELETE returned status: ' + status
});
});
};
Prompt({
hdr: `Remove role`,
body: `
<div class="Prompt-bodyQuery">
Confirm the removal of the ${roleType}
<span class="Prompt-emphasis"> ${roleName} </span>
role associated with ${userName}.
</div>
`,
action: action,
actionText: 'REMOVE'
});
};
$rootScope.deletePermissionFromTeam = function(teamId, teamName, roleName, roleType, url) {
var action = function() {
$('#prompt-modal').modal('hide');
Wait('start');
Rest.setUrl(url);
Rest.post({ "disassociate": true, "id": teamId })
.success(function() {
Wait('stop');
$rootScope.$broadcast("refreshList", "role");
})
.error(function(data, status) {
ProcessErrors($rootScope, data, status, null, {
hdr: 'Error!',
msg: 'Could not disassociate team from role. Call to ' + url + ' failed. DELETE returned status: ' + status
});
});
};
Prompt({
hdr: `Remove role`,
body: `
<div class="Prompt-bodyQuery">
Confirm the removal of the ${roleType}
<span class="Prompt-emphasis"> ${roleName} </span>
role associated with the ${teamName} team.
</div>
`,
action: action,
actionText: 'REMOVE'
});
};
function activateTab() {
// Make the correct tab active
var base = $location.path().replace(/^\//, '').split('/')[0];

View File

@ -73,6 +73,10 @@
vertical-align: bottom;
}
.BreadCrumb-invItem {
max-width: 400px;
}
.BreadCrumb-item + .BreadCrumb-item:before {
content: "/";
padding: 0 5px;

View File

@ -1,6 +1,6 @@
export default
['templateUrl', '$state', 'FeaturesService', 'ProcessErrors','$rootScope', 'Store', 'Empty', '$window', 'BreadCrumbService',
function(templateUrl, $state, FeaturesService, ProcessErrors, $rootScope, Store, Empty, $window, BreadCrumbService) {
['templateUrl', '$state', 'FeaturesService', 'ProcessErrors','$rootScope', 'Store', 'Empty', '$window', 'BreadCrumbService', 'i18n',
function(templateUrl, $state, FeaturesService, ProcessErrors, $rootScope, Store, Empty, $window, BreadCrumbService, i18n) {
return {
restrict: 'E',
templateUrl: templateUrl('bread-crumb/bread-crumb'),
@ -41,9 +41,10 @@ export default
if(streamConfig && streamConfig.activityStream) {
if(streamConfig.activityStreamTarget) {
stateGoParams.target = streamConfig.activityStreamTarget;
let isTemplateTarget = _.contains(['template', 'job_template', 'workflow_job_template'], streamConfig.activityStreamTarget);
stateGoParams.activity_search = {
or__object1__in: streamConfig.activityStreamTarget === 'template' ? 'job_template,workflow_job_template' : streamConfig.activityStreamTarget,
or__object2__in: streamConfig.activityStreamTarget === 'template' ? 'job_template,workflow_job_template' : streamConfig.activityStreamTarget,
or__object1__in: isTemplateTarget ? 'job_template,workflow_job_template' : streamConfig.activityStreamTarget,
or__object2__in: isTemplateTarget ? 'job_template,workflow_job_template' : streamConfig.activityStreamTarget,
order_by: '-timestamp',
page_size: '20',
};
@ -60,6 +61,10 @@ export default
if(streamConfig.activityStreamId) {
stateGoParams.id = $state.params[streamConfig.activityStreamId];
}
if(stateGoParams.target === "custom_inventory_script"){
stateGoParams.activity_search[streamConfig.activityStreamTarget] = $state.params.inventory_script_id;
stateGoParams.id = $state.params.inventory_script_id;
}
}
originalRoute = $state.current;
@ -103,7 +108,7 @@ export default
if(features){
scope.loadingLicense = false;
scope.activityStreamActive = (toState.name === 'activityStream') ? true : false;
scope.activityStreamTooltip = (toState.name === 'activityStream') ? 'Hide Activity Stream' : 'View Activity Stream';
scope.activityStreamTooltip = (toState.name === 'activityStream') ? i18n._('Hide Activity Stream') : i18n._('View Activity Stream');
scope.showActivityStreamButton = (FeaturesService.featureEnabled('activity_streams') || toState.name ==='activityStream') ? true : false;
}
}

View File

@ -174,6 +174,7 @@ export default [
ngDisabled: $rootScope.user_is_system_auditor,
disabled: $scope.$parent.configDataResolve[key].disabled || null,
readonly: $scope.$parent.configDataResolve[key].readonly || null,
definedInFile: $scope.$parent.configDataResolve[key].defined_in_file || null
});
}

View File

@ -1,6 +1,6 @@
<div class="tab-pane Configuration-container" id="configuration_edit">
<div class="Form-nav--dropdownContainer">
<div class="Form-nav--dropdownLabel">Sub Category</div>
<div class="Form-nav--dropdownLabel" translate>Sub Category</div>
<div class="Form-nav--dropdown">
<select
id="configure-dropdown-nav"

View File

@ -85,6 +85,15 @@ input#filePickerText {
background-color: #fff;
}
.Form-filePicker--selectedFile {
margin: 12px 0;
}
.Form-filePicker--thumbnail {
max-height: 40px;
max-width: 40px;
}
// Messagebar for system auditor role notifications
.Section-messageBar {
width: 120%;

View File

@ -1,21 +1,21 @@
<div class="Section-messageBar" ng-if="vm.show_auditor_bar">
<i class="fa fa-warning"></i>
System auditors have read-only permissions in this section.
<span translate>System auditors have read-only permissions in this section.</span>
<button class="Section-messageBar--close" ng-click="vm.closeMessageBar()"><i class="fa fa-times-circle"></i></button>
</div>
<div class="tab-pane" id="configuration-panel">
<div ng-cloak id="htmlTemplate" class="Panel">
<div class="Form-header">
<div class="Form-title">Configure Tower</div>
<div class="Form-title" translate>Configure Tower</div>
</div>
<div class="row Form-tabRow">
<div class="col-lg-12">
<div class="Form-tabHolder">
<div class="Form-tab" ng-click="vm.activeTabCheck('auth')" ng-class="{'is-selected': vm.activeTab === 'auth' }">Authentication</div>
<div class="Form-tab" ng-click="vm.activeTabCheck('jobs')" ng-class="{'is-selected': vm.activeTab === 'jobs' }">Jobs</div>
<div class="Form-tab" ng-click="vm.activeTabCheck('system')" ng-class="{'is-selected': vm.activeTab === 'system' }">System</div>
<div class="Form-tab" ng-click="vm.activeTabCheck('ui')" ng-class="{'is-selected': vm.activeTab === 'ui' }">User Interface</div>
<div class="Form-tab" ng-click="vm.activeTabCheck('auth')" ng-class="{'is-selected': vm.activeTab === 'auth' }" translate>Authentication</div>
<div class="Form-tab" ng-click="vm.activeTabCheck('jobs')" ng-class="{'is-selected': vm.activeTab === 'jobs' }" translate>Jobs</div>
<div class="Form-tab" ng-click="vm.activeTabCheck('system')" ng-class="{'is-selected': vm.activeTab === 'system' }" translate>System</div>
<div class="Form-tab" ng-click="vm.activeTabCheck('ui')" ng-class="{'is-selected': vm.activeTab === 'ui' }" translate>User Interface</div>
</div>
</div>
</div>

View File

@ -6,6 +6,7 @@
import {templateUrl} from '../shared/template-url/template-url.factory';
import ConfigurationController from './configuration.controller';
import { N_ } from '../i18n';
// Import form controllers
import ConfigurationAuthController from './auth-form/configuration-auth.controller';
@ -26,7 +27,7 @@
},
ncyBreadcrumb: {
parent: 'setup',
label: "Edit Configuration"
label: N_("Edit Configuration")
},
controller: ConfigurationController,
resolve: {

View File

@ -59,6 +59,7 @@ export default [
ngDisabled: $rootScope.user_is_system_auditor,
disabled: $scope.$parent.configDataResolve[key].disabled || null,
readonly: $scope.$parent.configDataResolve[key].readonly || null,
definedInFile: $scope.$parent.configDataResolve[key].defined_in_file || null
});
}

View File

@ -17,10 +17,6 @@
reset: 'AD_HOC_COMMANDS',
multiSelect: true
},
STDOUT_MAX_BYTES_DISPLAY: {
type: 'number',
reset: 'STDOUT_MAX_BYTES_DISPLAY'
},
AWX_PROOT_BASE_PATH: {
type: 'text',
reset: 'AWX_PROOT_BASE_PATH',

View File

@ -133,6 +133,7 @@ export default [
ngDisabled: $rootScope.user_is_system_auditor,
disabled: $scope.$parent.configDataResolve[key].disabled || null,
readonly: $scope.$parent.configDataResolve[key].readonly || null,
definedInFile: $scope.$parent.configDataResolve[key].defined_in_file || null
});
}

View File

@ -62,6 +62,7 @@
ngDisabled: $rootScope.user_is_system_auditor,
disabled: $scope.$parent.configDataResolve[key].disabled || null,
readonly: $scope.$parent.configDataResolve[key].readonly || null,
definedInFile: $scope.$parent.configDataResolve[key].defined_in_file || null
});
}

View File

@ -13,7 +13,8 @@
export function CredentialsList($scope, $rootScope, $location, $log,
$stateParams, Rest, Alert, CredentialList, Prompt, ClearScope,
ProcessErrors, GetBasePath, Wait, $state, $filter, rbacUiControlService, Dataset) {
ProcessErrors, GetBasePath, Wait, $state, $filter, rbacUiControlService, Dataset,
i18n) {
ClearScope();
@ -42,27 +43,29 @@ export function CredentialsList($scope, $rootScope, $location, $log,
});
$scope.$watchCollection(`${$scope.list.name}`, function() {
optionsRequestDataProcessing();
}
);
optionsRequestDataProcessing();
});
// iterate over the list and add fields like type label, after the
// OPTIONS request returns, or the list is sorted/paginated/searched
function optionsRequestDataProcessing(){
$scope[list.name].forEach(function(item, item_idx) {
var itm = $scope[list.name][item_idx];
if ($scope[list.name] !== undefined) {
$scope[list.name].forEach(function(item, item_idx) {
var itm = $scope[list.name][item_idx];
// Set the item type label
if (list.fields.kind && $scope.options &&
$scope.options.hasOwnProperty('kind')) {
$scope.options.kind.choices.every(function(choice) {
if (choice[0] === item.kind) {
itm.kind_label = choice[1];
return false;
}
return true;
});
}
});
// Set the item type label
if (list.fields.kind && $scope.options &&
$scope.options.hasOwnProperty('kind')) {
$scope.options.kind.choices.every(function(choice) {
if (choice[0] === item.kind) {
itm.kind_label = choice[1];
return false;
}
return true;
});
}
});
}
}
$scope.addCredential = function() {
@ -97,24 +100,24 @@ export function CredentialsList($scope, $rootScope, $location, $log,
};
Prompt({
hdr: 'Delete',
body: '<div class="Prompt-bodyQuery">Are you sure you want to delete the credential below?</div><div class="Prompt-bodyTarget">' + $filter('sanitize')(name) + '</div>',
hdr: i18n._('Delete'),
body: '<div class="Prompt-bodyQuery">' + i18n._('Are you sure you want to delete the credential below?') + '</div><div class="Prompt-bodyTarget">' + $filter('sanitize')(name) + '</div>',
action: action,
actionText: 'DELETE'
actionText: i18n._('DELETE')
});
};
}
CredentialsList.$inject = ['$scope', '$rootScope', '$location', '$log',
'$stateParams', 'Rest', 'Alert', 'CredentialList', 'Prompt', 'ClearScope',
'ProcessErrors', 'GetBasePath', 'Wait', '$state', '$filter', 'rbacUiControlService', 'Dataset'
'ProcessErrors', 'GetBasePath', 'Wait', '$state', '$filter', 'rbacUiControlService', 'Dataset', 'i18n'
];
export function CredentialsAdd($scope, $rootScope, $compile, $location, $log,
$stateParams, CredentialForm, GenerateForm, Rest, Alert, ProcessErrors,
ClearScope, GetBasePath, GetChoices, Empty, KindChange, BecomeMethodChange,
OwnerChange, FormSave, $state, CreateSelect2) {
OwnerChange, FormSave, $state, CreateSelect2, i18n) {
ClearScope();
// Inject dynamic view
@ -154,7 +157,7 @@ export function CredentialsAdd($scope, $rootScope, $compile, $location, $log,
GenerateForm.applyDefaults(form, $scope);
$scope.keyEntered = false;
$scope.permissionsTooltip = 'Please save before assigning permissions';
$scope.permissionsTooltip = i18n._('Please save before assigning permissions');
// determine if the currently logged-in user may share this credential
// previous commentary said: "$rootScope.current_user isn't available because a call to the config endpoint hasn't finished resolving yet"
@ -281,7 +284,7 @@ export function CredentialsAdd($scope, $rootScope, $compile, $location, $log,
CredentialsAdd.$inject = ['$scope', '$rootScope', '$compile', '$location',
'$log', '$stateParams', 'CredentialForm', 'GenerateForm', 'Rest', 'Alert',
'ProcessErrors', 'ClearScope', 'GetBasePath', 'GetChoices', 'Empty', 'KindChange', 'BecomeMethodChange',
'OwnerChange', 'FormSave', '$state', 'CreateSelect2'
'OwnerChange', 'FormSave', '$state', 'CreateSelect2', 'i18n'
];
export function CredentialsEdit($scope, $rootScope, $compile, $location, $log,
@ -571,10 +574,10 @@ export function CredentialsEdit($scope, $rootScope, $compile, $location, $log,
};
Prompt({
hdr: 'Delete',
body: '<div class="Prompt-bodyQuery">Are you sure you want to remove the ' + title + ' below from ' + $scope.name + '?</div><div class="Prompt-bodyTarget">' + name + '</div>',
hdr: i18n._('Delete'),
body: '<div class="Prompt-bodyQuery">' + i18n.sprintf(i18n._('Are you sure you want to remove the %s below from %s?'), title, $scope.name) + '</div><div class="Prompt-bodyTarget">' + name + '</div>',
action: action,
actionText: 'DELETE'
actionText: i18n._('DELETE')
});
};

View File

@ -47,7 +47,7 @@ export function Home($scope, $compile, $stateParams, $rootScope, $location, $log
ProcessErrors($scope, data, status, null, { hdr: 'Error!', msg: 'Failed to get dashboard jobs list: ' + status });
});
Rest.setUrl(GetBasePath("job_templates") + "?order_by=-last_job_run&page_size=5&last_job_run__isnull=false");
Rest.setUrl(GetBasePath("unified_job_templates") + "?order_by=-last_job_run&page_size=5&last_job_run__isnull=false&type=workflow_job_template,job_template");
Rest.get()
.success(function (data) {
$scope.dashboardJobTemplatesListData = data.results;
@ -123,7 +123,7 @@ export function Home($scope, $compile, $stateParams, $rootScope, $location, $log
.error(function (data, status) {
ProcessErrors($scope, data, status, null, { hdr: 'Error!', msg: 'Failed to get dashboard jobs list: ' + status });
});
Rest.setUrl(GetBasePath("job_templates") + "?order_by=-last_job_run&page_size=5&last_job_run__isnull=false");
Rest.setUrl(GetBasePath("unified_job_templates") + "?order_by=-last_job_run&page_size=5&last_job_run__isnull=false&type=workflow_job_template,job_template");
Rest.get()
.success(function (data) {
data = data.results;

View File

@ -51,24 +51,26 @@ export function ProjectsList($scope, $rootScope, $location, $log, $stateParams,
// iterate over the list and add fields like type label, after the
// OPTIONS request returns, or the list is sorted/paginated/searched
function optionsRequestDataProcessing(){
$scope[list.name].forEach(function(item, item_idx) {
var itm = $scope[list.name][item_idx];
if ($scope[list.name] !== undefined) {
$scope[list.name].forEach(function(item, item_idx) {
var itm = $scope[list.name][item_idx];
// Set the item type label
if (list.fields.scm_type && $scope.options &&
$scope.options.hasOwnProperty('scm_type')) {
$scope.options.scm_type.choices.every(function(choice) {
if (choice[0] === item.scm_type) {
itm.type_label = choice[1];
return false;
}
return true;
});
}
// Set the item type label
if (list.fields.scm_type && $scope.options &&
$scope.options.hasOwnProperty('scm_type')) {
$scope.options.scm_type.choices.every(function(choice) {
if (choice[0] === item.scm_type) {
itm.type_label = choice[1];
return false;
}
return true;
});
}
buildTooltips(itm);
buildTooltips(itm);
});
});
}
}
function buildTooltips(project) {

View File

@ -9,6 +9,7 @@ import form from './dashboard-hosts.form';
import listController from './dashboard-hosts-list.controller';
import editController from './dashboard-hosts-edit.controller';
import service from './dashboard-hosts.service';
import { N_ } from '../../i18n';
export default
angular.module('dashboardHosts', [])
@ -51,7 +52,7 @@ angular.module('dashboardHosts', [])
},
ncyBreadcrumb: {
parent: 'dashboard',
label: "HOSTS"
label: N_("HOSTS")
},
})
});

View File

@ -3,7 +3,8 @@ export default
[ 'InitiatePlaybookRun',
'templateUrl',
'$state',
function JobTemplatesList(InitiatePlaybookRun, templateUrl, $state) {
'Alert',
function JobTemplatesList(InitiatePlaybookRun, templateUrl, $state, Alert) {
return {
restrict: 'E',
link: link,
@ -27,12 +28,13 @@ export default
function createList(list) {
// smartStatus?, launchUrl, editUrl, name
scope.job_templates = _.map(list, function(job_template){ return {
recent_jobs: job_template.summary_fields.recent_jobs,
launch_url: job_template.url,
edit_url: job_template.url.replace('api/v1', '#'),
name: job_template.name,
id: job_template.id
scope.templates = _.map(list, function(template){ return {
recent_jobs: template.summary_fields.recent_jobs,
launch_url: template.url,
edit_url: template.url.replace('api/v1', '#'),
name: template.name,
id: template.id,
type: template.type
}; });
scope.snapRows = (list.length < 4);
@ -42,12 +44,34 @@ export default
return (status === "successful");
};
scope.launchJobTemplate = function(jobTemplateId){
InitiatePlaybookRun({ scope: scope, id: jobTemplateId, job_type: 'job_template' });
scope.launchTemplate = function(template){
if(template) {
if(template.type && (template.type === 'Job Template' || template.type === 'job_template')) {
InitiatePlaybookRun({ scope: scope, id: template.id, job_type: 'job_template' });
}
else if(template.type && (template.type === 'Workflow Job Template' || template.type === 'workflow_job_template')) {
InitiatePlaybookRun({ scope: scope, id: template.id, job_type: 'workflow_job_template' });
}
else {
// Something went wrong - Let the user know that we're unable to launch because we don't know
// what type of job template this is
Alert('Error: Unable to determine template type', 'We were unable to determine this template\'s type while launching.');
}
}
else {
Alert('Error: Unable to launch template', 'Template parameter is missing');
}
};
scope.editJobTemplate = function (jobTemplateId) {
$state.go('templates.editJobTemplate', {job_template_id: jobTemplateId});
scope.editTemplate = function (template) {
if(template) {
if(template.type && (template.type === 'Job Template' || template.type === 'job_template')) {
$state.go('templates.editJobTemplate', {job_template_id: template.id});
}
else if(template.type && (template.type === 'Workflow Job Template' || template.type === 'workflow_job_template')) {
$state.go('templates.editWorkflowJobTemplate', {workflow_job_template_id: template.id});
}
}
};
}
}];

View File

@ -1,7 +1,7 @@
<div class="DashboardList" ng-hide="noJobTemplates">
<div class="DashboardList-header">
<h3 class="DashboardList-headerText">
<translate>RECENTLY USED JOB TEMPLATES</translate>
<translate>RECENTLY USED TEMPLATES</translate>
</h3>
<a href="/#/templates" class="DashboardList-viewAll">
<translate>VIEW ALL</translate>
@ -23,21 +23,21 @@
<tr class="List-tableRow"
ng-class-odd="'List-tableRow--oddRow'"
ng-class-even="'List-tableRow--evenRow'"
ng-repeat = "job_template in job_templates">
ng-repeat = "template in templates">
<td class="DashboardList-nameCell">
<a href="#/templates/{{ job_template.id }}" class="DashboardList-nameContainer">
{{ job_template.name }}
<a ng-href="#/templates/{{template.type}}/{{template.id}}" class="DashboardList-nameContainer">
{{ template.name }}
</a>
</td>
<td class="DashboardList-activityCell">
<aw-smart-status jobs="job_template.recent_jobs"></aw-smart-status>
<aw-smart-status jobs="template.recent_jobs"></aw-smart-status>
</td>
<td class="List-actionsContainer">
<div class="List-actionButtonCell">
<button class="List-actionButton" ng-click="launchJobTemplate(job_template.id)">
<button class="List-actionButton" ng-click="launchTemplate(template)">
<i class="icon-launch"></i>
</button>
<button class="List-actionButton" ng-click="editJobTemplate(job_template.id)">
<button class="List-actionButton" ng-click="editTemplate(template)">
<i class="fa fa-pencil"></i>
</button>
</div>
@ -53,7 +53,8 @@
</h3>
</div>
<div class="DashboardList-container">
<p class="DashboardList-noJobs">No job templates were recently used.<br />
You can create a job template <a href="#/templates/add_job_template">here</a>.</p>
<p class="DashboardList-noJobs"><translate>No job templates were recently used.</translate><br />
<!-- TODO: Seems $sce.trustAsHtml() does not work here. -->
<translate>You can create a job template <a href="#/templates/add_job_template">here</a>.</translate></p>
</div>
</div>

View File

@ -18,10 +18,11 @@
export default
angular.module('ActivityDetailDefinition', [])
.value('ActivityDetailForm', {
.factory('ActivityDetailForm', ['i18n', function(i18n) {
return {
name: 'activity',
editTitle: 'Activity Detail',
editTitle: i18n._('Activity Detail'),
well: false,
'class': 'horizontal-narrow',
formFieldSize: 'col-lg-10',
@ -29,17 +30,17 @@ export default
fields: {
user: {
label: "Initiated by",
label: i18n._("Initiated by"),
type: 'text',
readonly: true
},
operation: {
label: 'Action',
label: i18n._('Action'),
type: 'text',
readonly: true
},
changes: {
label: 'Changes',
label: i18n._('Changes'),
type: 'textarea',
class: 'Form-textAreaLabel',
ngHide: "!changes || changes =='' || changes == 'null'",
@ -47,4 +48,4 @@ export default
}
}
}); //Form
};}]); //Form

View File

@ -424,6 +424,7 @@ export default
related: {
permissions: {
name: 'permissions',
disabled: '(organization === undefined ? true : false)',
// Do not transition the state if organization is undefined
ngClick: `(organization === undefined ? true : false)||$state.go('credentials.edit.permissions')`,
@ -460,25 +461,16 @@ export default
role: {
label: i18n._('Role'),
type: 'role',
noSort: true,
nosort: true,
class: 'col-lg-4 col-md-4 col-sm-4 col-xs-4'
},
team_roles: {
label: i18n._('Team Roles'),
type: 'team_roles',
noSort: true,
nosort: true,
class: 'col-lg-5 col-md-5 col-sm-5 col-xs-4'
}
}
}
},
relatedSets: function(urls) {
return {
permissions: {
iterator: 'permission',
url: urls.access_list,
}
};
}
};}]);

View File

@ -12,73 +12,74 @@
export default
angular.module('EventsViewerFormDefinition', [])
.value('EventsViewerForm', {
.factory('EventsViewerForm', ['i18n', function(i18n) {
return {
fields: {
host_name: {
label: 'Host',
section: 'Event'
label: i18n._('Host'),
section: i18n._('Event')
},
status: {
label: 'Status',
section: 'Event'
labellabel: i18n._('Status'),
section: i18n._('Event')
},
id: {
label: 'ID',
section: 'Event'
labellabel: i18n._('ID'),
section: i18n._('Event')
},
created: {
label: 'Created On',
section: 'Event'
labellabel: i18n._('Created On'),
section: i18n._('Event')
},
role: {
label: 'Role',
section: 'Event'
labellabel: i18n._('Role'),
section: i18n._('Event')
},
play: {
label: 'Play',
labellabel: i18n._('Play'),
type: 'text',
section: 'Event'
section: i18n._('Event')
},
task: {
label: 'Task',
section: 'Event'
labellabel: i18n._('Task'),
section: i18n._('Event')
},
item: {
label: 'Item',
section: 'Event'
labellabel: i18n._('Item'),
section: i18n._('Event')
},
module_name: {
label: 'Module',
section: 'Event'
labellabel: i18n._('Module'),
section: i18n._('Event')
},
module_args: {
label: 'Arguments',
section: 'Event'
labellabel: i18n._('Arguments'),
section: i18n._('Event')
},
rc: {
label: 'Return Code',
section: 'Results'
labellabel: i18n._('Return Code'),
section: i18n._('Results')
},
msg: {
label: 'Message',
section: 'Results'
labellabel: i18n._('Message'),
section: i18n._('Results')
},
results: {
label: 'Results',
section: 'Results'
labellabel: i18n._('Results'),
section: i18n._('Results')
},
start: {
label: 'Start',
section: 'Timing'
labellabel: i18n._('Start'),
section: i18n._('Timing')
},
end: {
label: 'End',
section: 'Timing'
labellabel: i18n._('End'),
section: i18n._('Timing')
},
delta: {
label: 'Elapsed',
section: 'Timing'
labellabel: i18n._('Elapsed'),
section: i18n._('Timing')
}
}
});
};}]);

View File

@ -305,6 +305,7 @@ export default
label: "Cache Timeout <span class=\"small-text\"> (seconds)</span>",
id: 'source-cache-timeout',
type: 'number',
ngDisabled: '!(group_obj.summary_fields.user_capabilities.edit || canAdd)',
integer: true,
min: 0,
ngShow: "source && source.value !== '' && update_on_launch",
@ -339,14 +340,6 @@ export default
"notifications": {
include: "NotificationsList"
}
},
relatedSets: function() {
return {
notifications: {
iterator: 'notification',
url: 'api/v1/notification_templates/'
}
};
}
})

View File

@ -88,11 +88,12 @@ angular.module('InventoryFormDefinition', [])
},
related: {
permissions: {
name: 'permissions',
awToolTip: i18n._('Please save before assigning permissions'),
dataPlacement: 'top',
basePath: 'api/v1/inventories/{{$stateParams.inventory_id}}/access_list/',
type: 'collection',
title: 'Permissions',
title: i18n._('Permissions'),
iterator: 'permission',
index: false,
open: false,
@ -119,66 +120,17 @@ angular.module('InventoryFormDefinition', [])
role: {
label: i18n._('Role'),
type: 'role',
noSort: true,
nosort: true,
class: 'col-lg-4 col-md-4 col-sm-4 col-xs-4',
},
team_roles: {
label: i18n._('Team Roles'),
type: 'team_roles',
noSort: true,
nosort: true,
class: 'col-lg-5 col-md-5 col-sm-5 col-xs-4',
}
}
}
},
relatedSets: function() {
return {
permissions: {
awToolTip: i18n._('Please save before assigning permissions'),
dataPlacement: 'top',
basePath: 'inventories/:id/access_list/',
type: 'collection',
title: i18n._('Permissions'),
iterator: 'permission',
index: false,
open: false,
searchType: 'select',
actions: {
add: {
ngClick: "addPermission",
label: i18n._('Add'),
awToolTip: i18n._('Add a permission'),
actionClass: 'btn List-buttonSubmit',
buttonContent: '&#43; ' + i18n._('ADD'),
ngShow: '(inventory_obj.summary_fields.user_capabilities.edit || canAdd)'
}
},
fields: {
username: {
key: true,
label: i18n._('User'),
linkBase: 'users',
class: 'col-lg-3 col-md-3 col-sm-3 col-xs-4'
},
role: {
label: i18n._('Role'),
type: 'role',
noSort: true,
class: 'col-lg-4 col-md-4 col-sm-4 col-xs-4',
noSearch: true
},
team_roles: {
label: i18n._('Team Roles'),
type: 'team_roles',
noSort: true,
class: 'col-lg-5 col-md-5 col-sm-5 col-xs-4',
noSearch: true
}
}
}
};
}
};}]);

View File

@ -89,6 +89,7 @@ export default
dataContainer: "body",
subCheckbox: {
variable: 'ask_inventory_on_launch',
ngChange: 'job_template_form.inventory_name.$validate()',
ngShow: "!job_type.value || job_type.value !== 'scan'",
text: i18n._('Prompt on launch')
},
@ -121,7 +122,7 @@ export default
label: i18n._('Playbook'),
type:'select',
ngOptions: 'book for book in playbook_options track by book',
ngDisabled: "(job_type.value === 'scan' && project_name === 'Default') || !(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)",
ngDisabled: "(job_type.value === 'scan' && project_name === 'Default') || !(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate) || disablePlaybookBecausePermissionDenied",
id: 'playbook-select',
awRequiredWhen: {
reqExpression: "playbookrequired",
@ -158,7 +159,8 @@ export default
dataContainer: "body",
subCheckbox: {
variable: 'ask_credential_on_launch',
text: i18n._('Prompt on launch')
text: i18n._('Prompt on launch'),
ngChange: 'job_template_form.credential_name.$validate()',
},
ngDisabled: '!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)'
},
@ -214,7 +216,7 @@ export default
dataTitle: i18n._('Forks'),
dataPlacement: 'right',
dataContainer: "body",
ngDisabled: '!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)' // TODO: get working
ngDisabled: '!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)'
},
limit: {
label: i18n._('Limit'),
@ -396,7 +398,7 @@ export default
},
save: {
ngClick: 'formSave()', //$scope.function to call on click, optional
ngDisabled: "job_templates_form.$invalid",//true //Disable when $pristine or $invalid, optional and when can_edit = false, for permission reasons
ngDisabled: "job_template_form.$invalid",//true //Disable when $pristine or $invalid, optional and when can_edit = false, for permission reasons
ngShow: '(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)'
}
},
@ -406,6 +408,7 @@ export default
include: "CompletedJobsList"
},
permissions: {
name: 'permissions',
awToolTip: i18n._('Please save before assigning permissions'),
dataPlacement: 'top',
basePath: 'api/v1/job_templates/{{$stateParams.job_template_id}}/access_list/',
@ -439,13 +442,13 @@ export default
role: {
label: 'Role',
type: 'role',
noSort: true,
nosort: true,
class: 'col-lg-4 col-md-4 col-sm-4 col-xs-4',
},
team_roles: {
label: 'Team Roles',
type: 'team_roles',
noSort: true,
nosort: true,
class: 'col-lg-5 col-md-5 col-sm-5 col-xs-4',
}
}
@ -479,23 +482,6 @@ export default
label: i18n._('Edit Survey'),
class: 'Form-primaryButton'
}
},
relatedSets: function(urls) {
return {
completed_jobs: {
iterator: 'completed_job',
url: urls.jobs + '?or__status=successful&or__status=failed&or__status=error&or__status=canceled'
},
permissions: {
iterator: 'permission',
url: urls.access_list
},
notifications: {
iterator: 'notification',
url: '/api/v1/notification_templates/'
}
};
}
};}])

View File

@ -12,49 +12,50 @@
export default
angular.module('LogViewerStatusDefinition', [])
.value('LogViewerStatusForm', {
.factory('LogViewerStatusForm', ['i18n', function(i18n) {
return {
name: 'status',
well: false,
fields: {
"name": {
label: "Name",
label: i18n._("Name"),
type: "text",
readonly: true,
},
"status": {
label: "Status",
label: i18n._("Status"),
type: "text",
readonly: true
},
"license_error": {
label: "License Error",
label: i18n._("License Error"),
type: "text",
readonly: true
},
"started": {
label: "Started",
label: i18n._("Started"),
type: "date",
"filter": "longDate",
readonly: true
},
"finished": {
label: "Finished",
label: i18n._("Finished"),
type: "date",
"filter": "longDate",
readonly: true
},
"elapsed": {
label: "Elapsed",
label: i18n._("Elapsed"),
type: "text",
readonly: true
},
"launch_type": {
label: "Launch Type",
label: i18n._("Launch Type"),
type: "text",
readonly: true
}
}
});
};}]);

View File

@ -53,24 +53,23 @@ export default
},
related: {
permissions: {
awToolTip: i18n._('Please save before assigning permissions'),
users: {
dataPlacement: 'top',
awToolTip: i18n._('Please save before adding users'),
basePath: 'api/v1/organizations/{{$stateParams.organization_id}}/access_list/',
search: {
order_by: 'username'
},
dataPlacement: 'top',
type: 'collection',
title: i18n._('Permissions'),
iterator: 'permission',
title: i18n._('Users'),
iterator: 'user',
index: false,
open: false,
searchType: 'select',
actions: {
add: {
ngClick: "$state.go('.add')",
label: i18n._('Add'),
awToolTip: i18n._('Add a permission'),
awToolTip: i18n._('Add Users to this organization.'),
actionClass: 'btn List-buttonSubmit',
buttonContent: '&#43; ' + i18n._('ADD'),
ngShow: '(organization_obj.summary_fields.user_capabilities.edit || canAdd)'
@ -87,14 +86,8 @@ export default
role: {
label: i18n._('Role'),
type: 'role',
noSort: true,
nosort: true,
class: 'col-lg-4 col-md-4 col-sm-4 col-xs-4'
},
team_roles: {
label: i18n._('Team Roles'),
type: 'team_roles',
noSort: true,
class: 'col-lg-5 col-md-5 col-sm-5 col-xs-4'
}
}
},
@ -103,18 +96,6 @@ export default
}
},
relatedSets: function(urls) {
return {
permissions: {
iterator: 'permission',
url: urls.access_list
},
notifications: {
iterator: 'notification',
url: '/api/v1/notification_templates/'
}
};
}
};}])

Some files were not shown because too many files have changed in this diff Show More