Merge branch 'release_3.1.0' into multi_worker_callbacks

This commit is contained in:
Matthew Jones
2017-01-18 20:39:15 -05:00
committed by GitHub
178 changed files with 25728 additions and 761 deletions

1
.gitignore vendored
View File

@@ -106,7 +106,6 @@ reports
*.log.[0-9] *.log.[0-9]
*.results *.results
local/ local/
*.mo
# AWX python libs populated by requirements.txt # AWX python libs populated by requirements.txt
awx/lib/.deps_built awx/lib/.deps_built

View File

@@ -428,7 +428,7 @@ celeryd:
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/tower/bin/activate; \ . $(VENV_BASE)/tower/bin/activate; \
fi; \ fi; \
$(PYTHON) manage.py celeryd -l DEBUG -B --autoreload --autoscale=20,3 --schedule=$(CELERY_SCHEDULE_FILE) -Q projects,jobs,default,scheduler,broadcast_all,$(COMPOSE_HOST) $(PYTHON) manage.py celeryd -l DEBUG -B --autoreload --autoscale=20,3 --schedule=$(CELERY_SCHEDULE_FILE) -Q projects,jobs,default,scheduler,broadcast_all,$(COMPOSE_HOST) -n celery@$(COMPOSE_HOST)
#$(PYTHON) manage.py celery multi show projects jobs default -l DEBUG -Q:projects projects -Q:jobs jobs -Q:default default -c:projects 1 -c:jobs 3 -c:default 3 -Ofair -B --schedule=$(CELERY_SCHEDULE_FILE) #$(PYTHON) manage.py celery multi show projects jobs default -l DEBUG -Q:projects projects -Q:jobs jobs -Q:default default -c:projects 1 -c:jobs 3 -c:default 3 -Ofair -B --schedule=$(CELERY_SCHEDULE_FILE)
# Run to start the zeromq callback receiver # Run to start the zeromq callback receiver
@@ -559,9 +559,12 @@ messages:
fi; \ fi; \
$(PYTHON) manage.py makemessages -l $(LANG) --keep-pot $(PYTHON) manage.py makemessages -l $(LANG) --keep-pot
# generate l10n .json .mo # generate l10n .json
languages: $(UI_DEPS_FLAG_FILE) check-po ui-languages: $(UI_DEPS_FLAG_FILE) check-po
$(NPM_BIN) --prefix awx/ui run languages $(NPM_BIN) --prefix awx/ui run languages
# generate l10n .mo
api-languages:
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/tower/bin/activate; \ . $(VENV_BASE)/tower/bin/activate; \
fi; \ fi; \
@@ -592,8 +595,7 @@ ui-devel: $(UI_DEPS_FLAG_FILE)
ui-release: $(UI_RELEASE_FLAG_FILE) ui-release: $(UI_RELEASE_FLAG_FILE)
# todo: include languages target when .po deliverables are added to source control $(UI_RELEASE_FLAG_FILE): ui-languages $(UI_DEPS_FLAG_FILE)
$(UI_RELEASE_FLAG_FILE): $(UI_DEPS_FLAG_FILE)
$(NPM_BIN) --prefix awx/ui run build-release $(NPM_BIN) --prefix awx/ui run build-release
touch $(UI_RELEASE_FLAG_FILE) touch $(UI_RELEASE_FLAG_FILE)

View File

@@ -13,7 +13,7 @@ from django.utils.translation import ugettext_lazy as _
from rest_framework import exceptions from rest_framework import exceptions
from rest_framework import metadata from rest_framework import metadata
from rest_framework import serializers from rest_framework import serializers
from rest_framework.relations import RelatedField from rest_framework.relations import RelatedField, ManyRelatedField
from rest_framework.request import clone_request from rest_framework.request import clone_request
# Ansible Tower # Ansible Tower
@@ -75,7 +75,7 @@ class Metadata(metadata.SimpleMetadata):
elif getattr(field, 'fields', None): elif getattr(field, 'fields', None):
field_info['children'] = self.get_serializer_info(field) field_info['children'] = self.get_serializer_info(field)
if hasattr(field, 'choices') and not isinstance(field, RelatedField): if not isinstance(field, (RelatedField, ManyRelatedField)) and hasattr(field, 'choices'):
field_info['choices'] = [(choice_value, choice_name) for choice_value, choice_name in field.choices.items()] field_info['choices'] = [(choice_value, choice_name) for choice_value, choice_name in field.choices.items()]
# Indicate if a field is write-only. # Indicate if a field is write-only.

View File

@@ -16,6 +16,7 @@ class Pagination(pagination.PageNumberPagination):
if not self.page.has_next(): if not self.page.has_next():
return None return None
url = self.request and self.request.get_full_path() or '' url = self.request and self.request.get_full_path() or ''
url = url.encode('utf-8')
page_number = self.page.next_page_number() page_number = self.page.next_page_number()
return replace_query_param(url, self.page_query_param, page_number) return replace_query_param(url, self.page_query_param, page_number)
@@ -23,5 +24,6 @@ class Pagination(pagination.PageNumberPagination):
if not self.page.has_previous(): if not self.page.has_previous():
return None return None
url = self.request and self.request.get_full_path() or '' url = self.request and self.request.get_full_path() or ''
url = url.encode('utf-8')
page_number = self.page.previous_page_number() page_number = self.page.previous_page_number()
return replace_query_param(url, self.page_query_param, page_number) return replace_query_param(url, self.page_query_param, page_number)

View File

@@ -251,6 +251,7 @@ class BaseSerializer(serializers.ModelSerializer):
'inventory_update': _('Inventory Sync'), 'inventory_update': _('Inventory Sync'),
'system_job': _('Management Job'), 'system_job': _('Management Job'),
'workflow_job': _('Workflow Job'), 'workflow_job': _('Workflow Job'),
'workflow_job_template': _('Workflow Template'),
} }
choices = [] choices = []
for t in self.get_types(): for t in self.get_types():
@@ -1619,7 +1620,8 @@ class ResourceAccessListElementSerializer(UserSerializer):
'name': role.name, 'name': role.name,
'description': role.description, 'description': role.description,
'team_id': team_role.object_id, 'team_id': team_role.object_id,
'team_name': team_role.content_object.name 'team_name': team_role.content_object.name,
'team_organization_name': team_role.content_object.organization.name,
} }
if role.content_type is not None: if role.content_type is not None:
role_dict['resource_name'] = role.content_object.name role_dict['resource_name'] = role.content_object.name
@@ -2369,7 +2371,7 @@ class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer):
if view and view.request: if view and view.request:
request_method = view.request.method request_method = view.request.method
if request_method in ['PATCH']: if request_method in ['PATCH']:
obj = view.get_object() obj = self.instance
char_prompts = copy.copy(obj.char_prompts) char_prompts = copy.copy(obj.char_prompts)
char_prompts.update(self.extract_char_prompts(data)) char_prompts.update(self.extract_char_prompts(data))
else: else:
@@ -2708,18 +2710,15 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
variables_needed_to_start = serializers.ReadOnlyField() variables_needed_to_start = serializers.ReadOnlyField()
survey_enabled = serializers.SerializerMethodField() survey_enabled = serializers.SerializerMethodField()
extra_vars = VerbatimField(required=False, write_only=True) extra_vars = VerbatimField(required=False, write_only=True)
warnings = serializers.SerializerMethodField()
workflow_job_template_data = serializers.SerializerMethodField() workflow_job_template_data = serializers.SerializerMethodField()
class Meta: class Meta:
model = WorkflowJobTemplate model = WorkflowJobTemplate
fields = ('can_start_without_user_input', 'extra_vars', 'warnings', fields = ('can_start_without_user_input', 'extra_vars',
'survey_enabled', 'variables_needed_to_start', 'survey_enabled', 'variables_needed_to_start',
'node_templates_missing', 'node_prompts_rejected',
'workflow_job_template_data') 'workflow_job_template_data')
def get_warnings(self, obj):
return obj.get_warnings()
def get_survey_enabled(self, obj): def get_survey_enabled(self, obj):
if obj: if obj:
return obj.survey_enabled and 'spec' in obj.survey_spec return obj.survey_enabled and 'spec' in obj.survey_spec

View File

@@ -13,7 +13,7 @@ Which will act on data older than 30 days.
For `cleanup_facts`: For `cleanup_facts`:
`{"older_than": "4w", `granularity`: "3d"}` `{"older_than": "4w", "granularity": "3d"}`
Which will reduce the granularity of scan data to one scan per 3 days when the data is older than 4w. Which will reduce the granularity of scan data to one scan per 3 days when the data is older than 4w.

View File

@@ -851,6 +851,7 @@ class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView)
serializer_class = NotificationTemplateSerializer serializer_class = NotificationTemplateSerializer
parent_model = Organization parent_model = Organization
relationship = 'notification_templates_any' relationship = 'notification_templates_any'
new_in_300 = True
class OrganizationNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView): class OrganizationNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView):
@@ -859,6 +860,7 @@ class OrganizationNotificationTemplatesErrorList(SubListCreateAttachDetachAPIVie
serializer_class = NotificationTemplateSerializer serializer_class = NotificationTemplateSerializer
parent_model = Organization parent_model = Organization
relationship = 'notification_templates_error' relationship = 'notification_templates_error'
new_in_300 = True
class OrganizationNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView): class OrganizationNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView):
@@ -867,6 +869,7 @@ class OrganizationNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIV
serializer_class = NotificationTemplateSerializer serializer_class = NotificationTemplateSerializer
parent_model = Organization parent_model = Organization
relationship = 'notification_templates_success' relationship = 'notification_templates_success'
new_in_300 = True
class OrganizationAccessList(ResourceAccessList): class OrganizationAccessList(ResourceAccessList):
@@ -921,6 +924,7 @@ class TeamRolesList(SubListCreateAttachDetachAPIView):
metadata_class = RoleMetadata metadata_class = RoleMetadata
parent_model = Team parent_model = Team
relationship='member_role.children' relationship='member_role.children'
new_in_300 = True
def get_queryset(self): def get_queryset(self):
team = get_object_or_404(Team, pk=self.kwargs['pk']) team = get_object_or_404(Team, pk=self.kwargs['pk'])
@@ -1103,6 +1107,7 @@ class ProjectNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
serializer_class = NotificationTemplateSerializer serializer_class = NotificationTemplateSerializer
parent_model = Project parent_model = Project
relationship = 'notification_templates_any' relationship = 'notification_templates_any'
new_in_300 = True
class ProjectNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView): class ProjectNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView):
@@ -1111,6 +1116,7 @@ class ProjectNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView):
serializer_class = NotificationTemplateSerializer serializer_class = NotificationTemplateSerializer
parent_model = Project parent_model = Project
relationship = 'notification_templates_error' relationship = 'notification_templates_error'
new_in_300 = True
class ProjectNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView): class ProjectNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView):
@@ -1119,6 +1125,7 @@ class ProjectNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView):
serializer_class = NotificationTemplateSerializer serializer_class = NotificationTemplateSerializer
parent_model = Project parent_model = Project
relationship = 'notification_templates_success' relationship = 'notification_templates_success'
new_in_300 = True
class ProjectUpdatesList(SubListAPIView): class ProjectUpdatesList(SubListAPIView):
@@ -1156,6 +1163,7 @@ class ProjectUpdateList(ListAPIView):
model = ProjectUpdate model = ProjectUpdate
serializer_class = ProjectUpdateListSerializer serializer_class = ProjectUpdateListSerializer
new_in_13 = True
class ProjectUpdateDetail(RetrieveDestroyAPIView): class ProjectUpdateDetail(RetrieveDestroyAPIView):
@@ -1196,6 +1204,7 @@ class ProjectUpdateNotificationsList(SubListAPIView):
serializer_class = NotificationSerializer serializer_class = NotificationSerializer
parent_model = ProjectUpdate parent_model = ProjectUpdate
relationship = 'notifications' relationship = 'notifications'
new_in_300 = True
class ProjectAccessList(ResourceAccessList): class ProjectAccessList(ResourceAccessList):
@@ -1271,6 +1280,7 @@ class UserRolesList(SubListCreateAttachDetachAPIView):
parent_model = User parent_model = User
relationship='roles' relationship='roles'
permission_classes = (IsAuthenticated,) permission_classes = (IsAuthenticated,)
new_in_300 = True
def get_queryset(self): def get_queryset(self):
u = get_object_or_404(User, pk=self.kwargs['pk']) u = get_object_or_404(User, pk=self.kwargs['pk'])
@@ -1543,12 +1553,14 @@ class InventoryScriptList(ListCreateAPIView):
model = CustomInventoryScript model = CustomInventoryScript
serializer_class = CustomInventoryScriptSerializer serializer_class = CustomInventoryScriptSerializer
new_in_210 = True
class InventoryScriptDetail(RetrieveUpdateDestroyAPIView): class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
model = CustomInventoryScript model = CustomInventoryScript
serializer_class = CustomInventoryScriptSerializer serializer_class = CustomInventoryScriptSerializer
new_in_210 = True
def destroy(self, request, *args, **kwargs): def destroy(self, request, *args, **kwargs):
instance = self.get_object() instance = self.get_object()
@@ -2170,6 +2182,7 @@ class InventorySourceNotificationTemplatesAnyList(SubListCreateAttachDetachAPIVi
serializer_class = NotificationTemplateSerializer serializer_class = NotificationTemplateSerializer
parent_model = InventorySource parent_model = InventorySource
relationship = 'notification_templates_any' relationship = 'notification_templates_any'
new_in_300 = True
def post(self, request, *args, **kwargs): def post(self, request, *args, **kwargs):
parent = self.get_parent_object() parent = self.get_parent_object()
@@ -2281,6 +2294,7 @@ class InventoryUpdateNotificationsList(SubListAPIView):
serializer_class = NotificationSerializer serializer_class = NotificationSerializer
parent_model = InventoryUpdate parent_model = InventoryUpdate
relationship = 'notifications' relationship = 'notifications'
new_in_300 = True
class JobTemplateList(ListCreateAPIView): class JobTemplateList(ListCreateAPIView):
@@ -2316,7 +2330,10 @@ class JobTemplateLaunch(RetrieveAPIView, GenericAPIView):
always_allow_superuser = False always_allow_superuser = False
def update_raw_data(self, data): def update_raw_data(self, data):
obj = self.get_object() try:
obj = self.get_object()
except PermissionDenied:
return data
extra_vars = data.pop('extra_vars', None) or {} extra_vars = data.pop('extra_vars', None) or {}
if obj: if obj:
for p in obj.passwords_needed_to_start: for p in obj.passwords_needed_to_start:
@@ -2396,6 +2413,7 @@ class JobTemplateSurveySpec(GenericAPIView):
model = JobTemplate model = JobTemplate
parent_model = JobTemplate parent_model = JobTemplate
serializer_class = EmptySerializer serializer_class = EmptySerializer
new_in_210 = True
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
obj = self.get_object() obj = self.get_object()
@@ -2462,6 +2480,7 @@ class WorkflowJobTemplateSurveySpec(WorkflowsEnforcementMixin, JobTemplateSurvey
model = WorkflowJobTemplate model = WorkflowJobTemplate
parent_model = WorkflowJobTemplate parent_model = WorkflowJobTemplate
new_in_310 = True
class JobTemplateActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): class JobTemplateActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView):
@@ -2479,6 +2498,7 @@ class JobTemplateNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
serializer_class = NotificationTemplateSerializer serializer_class = NotificationTemplateSerializer
parent_model = JobTemplate parent_model = JobTemplate
relationship = 'notification_templates_any' relationship = 'notification_templates_any'
new_in_300 = True
class JobTemplateNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView): class JobTemplateNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView):
@@ -2487,6 +2507,7 @@ class JobTemplateNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView
serializer_class = NotificationTemplateSerializer serializer_class = NotificationTemplateSerializer
parent_model = JobTemplate parent_model = JobTemplate
relationship = 'notification_templates_error' relationship = 'notification_templates_error'
new_in_300 = True
class JobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView): class JobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView):
@@ -2495,6 +2516,7 @@ class JobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIVi
serializer_class = NotificationTemplateSerializer serializer_class = NotificationTemplateSerializer
parent_model = JobTemplate parent_model = JobTemplate
relationship = 'notification_templates_success' relationship = 'notification_templates_success'
new_in_300 = True
class JobTemplateLabelList(DeleteLastUnattachLabelMixin, SubListCreateAttachDetachAPIView): class JobTemplateLabelList(DeleteLastUnattachLabelMixin, SubListCreateAttachDetachAPIView):
@@ -2884,10 +2906,16 @@ class WorkflowJobTemplateCopy(WorkflowsEnforcementMixin, GenericAPIView):
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
obj = self.get_object() obj = self.get_object()
data = {} can_copy, messages = request.user.can_access_with_errors(self.model, 'copy', obj)
copy_TF, messages = request.user.can_access_with_errors(self.model, 'copy', obj) data = OrderedDict([
data['can_copy'] = copy_TF ('can_copy', can_copy), ('can_copy_without_user_input', can_copy),
data['warnings'] = messages ('templates_unable_to_copy', [] if can_copy else ['all']),
('credentials_unable_to_copy', [] if can_copy else ['all']),
('inventories_unable_to_copy', [] if can_copy else ['all'])
])
if messages and can_copy:
data['can_copy_without_user_input'] = False
data.update(messages)
return Response(data) return Response(data)
def post(self, request, *args, **kwargs): def post(self, request, *args, **kwargs):
@@ -2918,7 +2946,10 @@ class WorkflowJobTemplateLaunch(WorkflowsEnforcementMixin, RetrieveAPIView):
always_allow_superuser = False always_allow_superuser = False
def update_raw_data(self, data): def update_raw_data(self, data):
obj = self.get_object() try:
obj = self.get_object()
except PermissionDenied:
return data
extra_vars = data.pop('extra_vars', None) or {} extra_vars = data.pop('extra_vars', None) or {}
if obj: if obj:
for v in obj.variables_needed_to_start: for v in obj.variables_needed_to_start:
@@ -2953,6 +2984,14 @@ class WorkflowJobRelaunch(WorkflowsEnforcementMixin, GenericAPIView):
model = WorkflowJob model = WorkflowJob
serializer_class = EmptySerializer serializer_class = EmptySerializer
is_job_start = True is_job_start = True
new_in_310 = True
def check_object_permissions(self, request, obj):
if request.method == 'POST' and obj:
relaunch_perm, messages = request.user.can_access_with_errors(self.model, 'start', obj)
if not relaunch_perm and 'workflow_job_template' in messages:
self.permission_denied(request, message=messages['workflow_job_template'])
return super(WorkflowJobRelaunch, self).check_object_permissions(request, obj)
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
return Response({}) return Response({})
@@ -3129,6 +3168,7 @@ class SystemJobTemplateList(ListAPIView):
model = SystemJobTemplate model = SystemJobTemplate
serializer_class = SystemJobTemplateSerializer serializer_class = SystemJobTemplateSerializer
new_in_210 = True
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
if not request.user.is_superuser and not request.user.is_system_auditor: if not request.user.is_superuser and not request.user.is_system_auditor:
@@ -3140,6 +3180,7 @@ class SystemJobTemplateDetail(RetrieveAPIView):
model = SystemJobTemplate model = SystemJobTemplate
serializer_class = SystemJobTemplateSerializer serializer_class = SystemJobTemplateSerializer
new_in_210 = True
class SystemJobTemplateLaunch(GenericAPIView): class SystemJobTemplateLaunch(GenericAPIView):
@@ -3147,6 +3188,7 @@ class SystemJobTemplateLaunch(GenericAPIView):
model = SystemJobTemplate model = SystemJobTemplate
serializer_class = EmptySerializer serializer_class = EmptySerializer
is_job_start = True is_job_start = True
new_in_210 = True
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
return Response({}) return Response({})
@@ -3154,8 +3196,8 @@ class SystemJobTemplateLaunch(GenericAPIView):
def post(self, request, *args, **kwargs): def post(self, request, *args, **kwargs):
obj = self.get_object() obj = self.get_object()
new_job = obj.create_unified_job(**request.data) new_job = obj.create_unified_job(extra_vars=request.data.get('extra_vars', {}))
new_job.signal_start(**request.data) new_job.signal_start()
data = dict(system_job=new_job.id) data = dict(system_job=new_job.id)
return Response(data, status=status.HTTP_201_CREATED) return Response(data, status=status.HTTP_201_CREATED)
@@ -3169,6 +3211,7 @@ class SystemJobTemplateSchedulesList(SubListCreateAttachDetachAPIView):
parent_model = SystemJobTemplate parent_model = SystemJobTemplate
relationship = 'schedules' relationship = 'schedules'
parent_key = 'unified_job_template' parent_key = 'unified_job_template'
new_in_210 = True
class SystemJobTemplateJobsList(SubListAPIView): class SystemJobTemplateJobsList(SubListAPIView):
@@ -3178,6 +3221,7 @@ class SystemJobTemplateJobsList(SubListAPIView):
parent_model = SystemJobTemplate parent_model = SystemJobTemplate
relationship = 'jobs' relationship = 'jobs'
parent_key = 'system_job_template' parent_key = 'system_job_template'
new_in_210 = True
class SystemJobTemplateNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView): class SystemJobTemplateNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
@@ -3186,6 +3230,7 @@ class SystemJobTemplateNotificationTemplatesAnyList(SubListCreateAttachDetachAPI
serializer_class = NotificationTemplateSerializer serializer_class = NotificationTemplateSerializer
parent_model = SystemJobTemplate parent_model = SystemJobTemplate
relationship = 'notification_templates_any' relationship = 'notification_templates_any'
new_in_300 = True
class SystemJobTemplateNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView): class SystemJobTemplateNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView):
@@ -3194,6 +3239,7 @@ class SystemJobTemplateNotificationTemplatesErrorList(SubListCreateAttachDetachA
serializer_class = NotificationTemplateSerializer serializer_class = NotificationTemplateSerializer
parent_model = SystemJobTemplate parent_model = SystemJobTemplate
relationship = 'notification_templates_error' relationship = 'notification_templates_error'
new_in_300 = True
class SystemJobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView): class SystemJobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView):
@@ -3202,6 +3248,7 @@ class SystemJobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetac
serializer_class = NotificationTemplateSerializer serializer_class = NotificationTemplateSerializer
parent_model = SystemJobTemplate parent_model = SystemJobTemplate
relationship = 'notification_templates_success' relationship = 'notification_templates_success'
new_in_300 = True
class JobList(ListCreateAPIView): class JobList(ListCreateAPIView):
@@ -3239,10 +3286,12 @@ class JobLabelList(SubListAPIView):
parent_model = Job parent_model = Job
relationship = 'labels' relationship = 'labels'
parent_key = 'job' parent_key = 'job'
new_in_300 = True
class WorkflowJobLabelList(WorkflowsEnforcementMixin, JobLabelList): class WorkflowJobLabelList(WorkflowsEnforcementMixin, JobLabelList):
parent_model = WorkflowJob parent_model = WorkflowJob
new_in_310 = True
class JobActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView): class JobActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView):
@@ -3337,6 +3386,7 @@ class JobNotificationsList(SubListAPIView):
serializer_class = NotificationSerializer serializer_class = NotificationSerializer
parent_model = Job parent_model = Job
relationship = 'notifications' relationship = 'notifications'
new_in_300 = True
class BaseJobHostSummariesList(SubListAPIView): class BaseJobHostSummariesList(SubListAPIView):
@@ -3847,12 +3897,14 @@ class AdHocCommandNotificationsList(SubListAPIView):
serializer_class = NotificationSerializer serializer_class = NotificationSerializer
parent_model = AdHocCommand parent_model = AdHocCommand
relationship = 'notifications' relationship = 'notifications'
new_in_300 = True
class SystemJobList(ListCreateAPIView): class SystemJobList(ListCreateAPIView):
model = SystemJob model = SystemJob
serializer_class = SystemJobListSerializer serializer_class = SystemJobListSerializer
new_in_210 = True
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
if not request.user.is_superuser and not request.user.is_system_auditor: if not request.user.is_superuser and not request.user.is_system_auditor:
@@ -3864,6 +3916,7 @@ class SystemJobDetail(RetrieveDestroyAPIView):
model = SystemJob model = SystemJob
serializer_class = SystemJobSerializer serializer_class = SystemJobSerializer
new_in_210 = True
class SystemJobCancel(RetrieveAPIView): class SystemJobCancel(RetrieveAPIView):
@@ -3871,6 +3924,7 @@ class SystemJobCancel(RetrieveAPIView):
model = SystemJob model = SystemJob
serializer_class = SystemJobCancelSerializer serializer_class = SystemJobCancelSerializer
is_job_cancel = True is_job_cancel = True
new_in_210 = True
def post(self, request, *args, **kwargs): def post(self, request, *args, **kwargs):
obj = self.get_object() obj = self.get_object()
@@ -3887,6 +3941,7 @@ class SystemJobNotificationsList(SubListAPIView):
serializer_class = NotificationSerializer serializer_class = NotificationSerializer
parent_model = SystemJob parent_model = SystemJob
relationship = 'notifications' relationship = 'notifications'
new_in_300 = True
class UnifiedJobTemplateList(ListAPIView): class UnifiedJobTemplateList(ListAPIView):
@@ -4009,6 +4064,7 @@ class UnifiedJobStdout(RetrieveAPIView):
class ProjectUpdateStdout(UnifiedJobStdout): class ProjectUpdateStdout(UnifiedJobStdout):
model = ProjectUpdate model = ProjectUpdate
new_in_13 = True
class InventoryUpdateStdout(UnifiedJobStdout): class InventoryUpdateStdout(UnifiedJobStdout):
@@ -4079,6 +4135,7 @@ class NotificationTemplateNotificationList(SubListAPIView):
parent_model = NotificationTemplate parent_model = NotificationTemplate
relationship = 'notifications' relationship = 'notifications'
parent_key = 'notification_template' parent_key = 'notification_template'
new_in_300 = True
class NotificationList(ListAPIView): class NotificationList(ListAPIView):

View File

@@ -16,7 +16,10 @@ class ConfConfig(AppConfig):
from .settings import SettingsWrapper from .settings import SettingsWrapper
SettingsWrapper.initialize() SettingsWrapper.initialize()
if settings.LOG_AGGREGATOR_ENABLED: if settings.LOG_AGGREGATOR_ENABLED:
LOGGING = settings.LOGGING LOGGING_DICT = settings.LOGGING
LOGGING['handlers']['http_receiver']['class'] = 'awx.main.utils.handlers.HTTPSHandler' LOGGING_DICT['handlers']['http_receiver']['class'] = 'awx.main.utils.handlers.HTTPSHandler'
configure_logging(settings.LOGGING_CONFIG, LOGGING) if 'awx' in settings.LOG_AGGREGATOR_LOGGERS:
if 'http_receiver' not in LOGGING_DICT['loggers']['awx']['handlers']:
LOGGING_DICT['loggers']['awx']['handlers'] += ['http_receiver']
configure_logging(settings.LOGGING_CONFIG, LOGGING_DICT)
# checks.register(SettingsWrapper._check_settings) # checks.register(SettingsWrapper._check_settings)

View File

@@ -13,7 +13,7 @@ import awx.main.signals
from awx.conf import settings_registry from awx.conf import settings_registry
from awx.conf.models import Setting from awx.conf.models import Setting
from awx.conf.serializers import SettingSerializer from awx.conf.serializers import SettingSerializer
from awx.main.tasks import clear_cache_keys from awx.main.tasks import process_cache_changes
logger = logging.getLogger('awx.conf.signals') logger = logging.getLogger('awx.conf.signals')
@@ -32,7 +32,7 @@ def handle_setting_change(key, for_delete=False):
cache_keys = set([Setting.get_cache_key(k) for k in setting_keys]) cache_keys = set([Setting.get_cache_key(k) for k in setting_keys])
logger.debug('sending signals to delete cache keys(%r)', cache_keys) logger.debug('sending signals to delete cache keys(%r)', cache_keys)
cache.delete_many(cache_keys) cache.delete_many(cache_keys)
clear_cache_keys.delay(list(cache_keys)) process_cache_changes.delay(list(cache_keys))
# Send setting_changed signal with new value for each setting. # Send setting_changed signal with new value for each setting.
for setting_key in setting_keys: for setting_key in setting_keys:

View File

@@ -181,7 +181,7 @@ class EventContext(object):
event_data['res'] = {} event_data['res'] = {}
event_dict = dict(event=event, event_data=event_data) event_dict = dict(event=event, event_data=event_data)
for key in event_data.keys(): for key in event_data.keys():
if key in ('job_id', 'ad_hoc_command_id', 'uuid', 'parent_uuid', 'created', 'artifact_data'): if key in ('job_id', 'ad_hoc_command_id', 'uuid', 'parent_uuid', 'created',):
event_dict[key] = event_data.pop(key) event_dict[key] = event_data.pop(key)
elif key in ('verbosity', 'pid'): elif key in ('verbosity', 'pid'):
event_dict[key] = event_data[key] event_dict[key] = event_data[key]

View File

@@ -19,8 +19,6 @@ from __future__ import (absolute_import, division, print_function)
# Python # Python
import contextlib import contextlib
import copy
import re
import sys import sys
import uuid import uuid
@@ -77,45 +75,11 @@ class BaseCallbackModule(CallbackBase):
super(BaseCallbackModule, self).__init__() super(BaseCallbackModule, self).__init__()
self.task_uuids = set() self.task_uuids = set()
def censor_result(self, res, no_log=False):
if not isinstance(res, dict):
if no_log:
return "the output has been hidden due to the fact that 'no_log: true' was specified for this result"
return res
if res.get('_ansible_no_log', no_log):
new_res = {}
for k in self.CENSOR_FIELD_WHITELIST:
if k in res:
new_res[k] = res[k]
if k == 'cmd' and k in res:
if isinstance(res['cmd'], list):
res['cmd'] = ' '.join(res['cmd'])
if re.search(r'\s', res['cmd']):
new_res['cmd'] = re.sub(r'^(([^\s\\]|\\\s)+).*$',
r'\1 <censored>',
res['cmd'])
new_res['censored'] = "the output has been hidden due to the fact that 'no_log: true' was specified for this result"
res = new_res
if 'results' in res:
if isinstance(res['results'], list):
for i in xrange(len(res['results'])):
res['results'][i] = self.censor_result(res['results'][i], res.get('_ansible_no_log', no_log))
elif res.get('_ansible_no_log', False):
res['results'] = "the output has been hidden due to the fact that 'no_log: true' was specified for this result"
return res
@contextlib.contextmanager @contextlib.contextmanager
def capture_event_data(self, event, **event_data): def capture_event_data(self, event, **event_data):
event_data.setdefault('uuid', str(uuid.uuid4())) event_data.setdefault('uuid', str(uuid.uuid4()))
if 'res' in event_data:
event_data['res'] = self.censor_result(copy.copy(event_data['res']))
res = event_data.get('res', None)
if res and isinstance(res, dict):
if 'artifact_data' in res:
event_data['artifact_data'] = res['artifact_data']
if event not in self.EVENTS_WITHOUT_TASK: if event not in self.EVENTS_WITHOUT_TASK:
task = event_data.pop('task', None) task = event_data.pop('task', None)
else: else:
@@ -262,7 +226,7 @@ class BaseCallbackModule(CallbackBase):
if task_uuid in self.task_uuids: if task_uuid in self.task_uuids:
# FIXME: When this task UUID repeats, it means the play is using the # FIXME: When this task UUID repeats, it means the play is using the
# free strategy, so different hosts may be running different tasks # free strategy, so different hosts may be running different tasks
# within a play. # within a play.
return return
self.task_uuids.add(task_uuid) self.task_uuids.add(task_uuid)
self.set_task(task) self.set_task(task)
@@ -319,6 +283,9 @@ class BaseCallbackModule(CallbackBase):
with self.capture_event_data('playbook_on_notify', **event_data): with self.capture_event_data('playbook_on_notify', **event_data):
super(BaseCallbackModule, self).v2_playbook_on_notify(result, handler) super(BaseCallbackModule, self).v2_playbook_on_notify(result, handler)
'''
ansible_stats is, retoractively, added in 2.2
'''
def v2_playbook_on_stats(self, stats): def v2_playbook_on_stats(self, stats):
self.clear_play() self.clear_play()
# FIXME: Add count of plays/tasks. # FIXME: Add count of plays/tasks.
@@ -329,7 +296,9 @@ class BaseCallbackModule(CallbackBase):
ok=stats.ok, ok=stats.ok,
processed=stats.processed, processed=stats.processed,
skipped=stats.skipped, skipped=stats.skipped,
artifact_data=stats.custom.get('_run', {}) if hasattr(stats, 'custom') else {}
) )
with self.capture_event_data('playbook_on_stats', **event_data): with self.capture_event_data('playbook_on_stats', **event_data):
super(BaseCallbackModule, self).v2_playbook_on_stats(stats) super(BaseCallbackModule, self).v2_playbook_on_stats(stats)

Binary file not shown.

File diff suppressed because it is too large Load Diff

Binary file not shown.

File diff suppressed because it is too large Load Diff

Binary file not shown.

File diff suppressed because it is too large Load Diff

Binary file not shown.

File diff suppressed because it is too large Load Diff

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@@ -353,7 +353,7 @@ class BaseAccess(object):
# Shortcuts in certain cases by deferring to earlier property # Shortcuts in certain cases by deferring to earlier property
if display_method == 'schedule': if display_method == 'schedule':
user_capabilities['schedule'] = user_capabilities['edit'] user_capabilities['schedule'] = user_capabilities['start']
continue continue
elif display_method == 'delete' and not isinstance(obj, (User, UnifiedJob)): elif display_method == 'delete' and not isinstance(obj, (User, UnifiedJob)):
user_capabilities['delete'] = user_capabilities['edit'] user_capabilities['delete'] = user_capabilities['edit']
@@ -363,27 +363,30 @@ class BaseAccess(object):
continue continue
# Compute permission # Compute permission
data = {} user_capabilities[display_method] = self.get_method_capability(method, obj, parent_obj)
access_method = getattr(self, "can_%s" % method)
if method in ['change']: # 3 args
user_capabilities[display_method] = access_method(obj, data)
elif method in ['delete', 'run_ad_hoc_commands', 'copy']:
user_capabilities[display_method] = access_method(obj)
elif method in ['start']:
user_capabilities[display_method] = access_method(obj, validate_license=False)
elif method in ['add']: # 2 args with data
user_capabilities[display_method] = access_method(data)
elif method in ['attach', 'unattach']: # parent/sub-object call
if type(parent_obj) == Team:
relationship = 'parents'
parent_obj = parent_obj.member_role
else:
relationship = 'members'
user_capabilities[display_method] = access_method(
obj, parent_obj, relationship, skip_sub_obj_read_check=True, data=data)
return user_capabilities return user_capabilities
def get_method_capability(self, method, obj, parent_obj):
if method in ['change']: # 3 args
return self.can_change(obj, {})
elif method in ['delete', 'run_ad_hoc_commands', 'copy']:
access_method = getattr(self, "can_%s" % method)
return access_method(obj)
elif method in ['start']:
return self.can_start(obj, validate_license=False)
elif method in ['add']: # 2 args with data
return self.can_add({})
elif method in ['attach', 'unattach']: # parent/sub-object call
access_method = getattr(self, "can_%s" % method)
if type(parent_obj) == Team:
relationship = 'parents'
parent_obj = parent_obj.member_role
else:
relationship = 'members'
return access_method(obj, parent_obj, relationship, skip_sub_obj_read_check=True, data={})
return False
class UserAccess(BaseAccess): class UserAccess(BaseAccess):
''' '''
@@ -982,8 +985,6 @@ class ProjectUpdateAccess(BaseAccess):
@check_superuser @check_superuser
def can_cancel(self, obj): def can_cancel(self, obj):
if not obj.can_cancel:
return False
if self.user == obj.created_by: if self.user == obj.created_by:
return True return True
# Project updates cascade delete with project, admin role descends from org admin # Project updates cascade delete with project, admin role descends from org admin
@@ -1040,7 +1041,7 @@ class JobTemplateAccess(BaseAccess):
Project.accessible_objects(self.user, 'use_role').exists() or Project.accessible_objects(self.user, 'use_role').exists() or
Inventory.accessible_objects(self.user, 'use_role').exists()) Inventory.accessible_objects(self.user, 'use_role').exists())
# if reference_obj is provided, determine if it can be coppied # if reference_obj is provided, determine if it can be copied
reference_obj = data.get('reference_obj', None) reference_obj = data.get('reference_obj', None)
if 'job_type' in data and data['job_type'] == PERM_INVENTORY_SCAN: if 'job_type' in data and data['job_type'] == PERM_INVENTORY_SCAN:
@@ -1392,7 +1393,8 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
qs = self.model.objects.filter( qs = self.model.objects.filter(
workflow_job_template__in=WorkflowJobTemplate.accessible_objects( workflow_job_template__in=WorkflowJobTemplate.accessible_objects(
self.user, 'read_role')) self.user, 'read_role'))
qs = qs.prefetch_related('success_nodes', 'failure_nodes', 'always_nodes') qs = qs.prefetch_related('success_nodes', 'failure_nodes', 'always_nodes',
'unified_job_template')
return qs return qs
def can_use_prompted_resources(self, data): def can_use_prompted_resources(self, data):
@@ -1478,8 +1480,14 @@ class WorkflowJobNodeAccess(BaseAccess):
qs = qs.prefetch_related('success_nodes', 'failure_nodes', 'always_nodes') qs = qs.prefetch_related('success_nodes', 'failure_nodes', 'always_nodes')
return qs return qs
@check_superuser
def can_add(self, data): def can_add(self, data):
return False if data is None: # Hide direct creation in API browser
return False
return (
self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role') and
self.check_related('credential', Credential, data, role_field='use_role') and
self.check_related('inventory', Inventory, data, role_field='use_role'))
def can_change(self, obj, data): def can_change(self, obj, data):
return False return False
@@ -1528,22 +1536,28 @@ class WorkflowJobTemplateAccess(BaseAccess):
def can_copy(self, obj): def can_copy(self, obj):
if self.save_messages: if self.save_messages:
wfjt_errors = {} missing_ujt = []
missing_credentials = []
missing_inventories = []
qs = obj.workflow_job_template_nodes qs = obj.workflow_job_template_nodes
qs.select_related('unified_job_template', 'inventory', 'credential') qs.select_related('unified_job_template', 'inventory', 'credential')
for node in qs.all(): for node in qs.all():
node_errors = {} node_errors = {}
if node.inventory and self.user not in node.inventory.use_role: if node.inventory and self.user not in node.inventory.use_role:
node_errors['inventory'] = 'Prompted inventory %s can not be coppied.' % node.inventory.name missing_inventories.append(node.inventory.name)
if node.credential and self.user not in node.credential.use_role: if node.credential and self.user not in node.credential.use_role:
node_errors['credential'] = 'Prompted credential %s can not be coppied.' % node.credential.name missing_credentials.append(node.credential.name)
ujt = node.unified_job_template ujt = node.unified_job_template
if ujt and not self.user.can_access(UnifiedJobTemplate, 'start', ujt, validate_license=False): if ujt and not self.user.can_access(UnifiedJobTemplate, 'start', ujt, validate_license=False):
node_errors['unified_job_template'] = ( missing_ujt.append(ujt.name)
'Prompted %s %s can not be coppied.' % (ujt._meta.verbose_name_raw, ujt.name))
if node_errors: if node_errors:
wfjt_errors[node.id] = node_errors wfjt_errors[node.id] = node_errors
self.messages.update(wfjt_errors) if missing_ujt:
self.messages['templates_unable_to_copy'] = missing_ujt
if missing_credentials:
self.messages['credentials_unable_to_copy'] = missing_credentials
if missing_inventories:
self.messages['inventories_unable_to_copy'] = missing_inventories
return self.check_related('organization', Organization, {'reference_obj': obj}, mandatory=True) return self.check_related('organization', Organization, {'reference_obj': obj}, mandatory=True)
@@ -1611,11 +1625,19 @@ class WorkflowJobAccess(BaseAccess):
def can_change(self, obj, data): def can_change(self, obj, data):
return False return False
@check_superuser
def can_delete(self, obj): def can_delete(self, obj):
if obj.workflow_job_template is None: return (obj.workflow_job_template and
# only superusers can delete orphaned workflow jobs obj.workflow_job_template.organization and
return self.user.is_superuser self.user in obj.workflow_job_template.organization.admin_role)
return self.user in obj.workflow_job_template.admin_role
def get_method_capability(self, method, obj, parent_obj):
if method == 'start':
# Return simplistic permission, will perform detailed check on POST
if not obj.workflow_job_template:
return self.user.is_superuser
return self.user in obj.workflow_job_template.execute_role
return super(WorkflowJobAccess, self).get_method_capability(method, obj, parent_obj)
def can_start(self, obj, validate_license=True): def can_start(self, obj, validate_license=True):
if validate_license: if validate_license:
@@ -1624,7 +1646,29 @@ class WorkflowJobAccess(BaseAccess):
if self.user.is_superuser: if self.user.is_superuser:
return True return True
return (obj.workflow_job_template and self.user in obj.workflow_job_template.execute_role) wfjt = obj.workflow_job_template
# only superusers can relaunch orphans
if not wfjt:
return False
# execute permission to WFJT is mandatory for any relaunch
if self.user not in wfjt.execute_role:
return False
# user's WFJT access doesn't guarentee permission to launch, introspect nodes
return self.can_recreate(obj)
def can_recreate(self, obj):
node_qs = obj.workflow_job_nodes.all().prefetch_related('inventory', 'credential', 'unified_job_template')
node_access = WorkflowJobNodeAccess(user=self.user)
wj_add_perm = True
for node in node_qs:
if not node_access.can_add({'reference_obj': node}):
wj_add_perm = False
if not wj_add_perm and self.save_messages:
self.messages['workflow_job_template'] = _('You do not have permission to the workflow job '
'resources required for relaunch.')
return wj_add_perm
def can_cancel(self, obj): def can_cancel(self, obj):
if not obj.can_cancel: if not obj.can_cancel:
@@ -1912,11 +1956,17 @@ class ScheduleAccess(BaseAccess):
@check_superuser @check_superuser
def can_add(self, data): def can_add(self, data):
return self.check_related('unified_job_template', UnifiedJobTemplate, data, mandatory=True) return self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role', mandatory=True)
@check_superuser @check_superuser
def can_change(self, obj, data): def can_change(self, obj, data):
return self.check_related('unified_job_template', UnifiedJobTemplate, data, obj=obj, mandatory=True) if self.check_related('unified_job_template', UnifiedJobTemplate, data, obj=obj, mandatory=True):
return True
# Users with execute role can modify the schedules they created
return (
obj.created_by == self.user and
self.check_related('unified_job_template', UnifiedJobTemplate, data, obj=obj, role_field='execute_role', mandatory=True))
def can_delete(self, obj): def can_delete(self, obj):
return self.can_change(obj, {}) return self.can_change(obj, {})

View File

@@ -96,12 +96,12 @@ class Command(BaseCommand):
option_list = BaseCommand.option_list + ( option_list = BaseCommand.option_list + (
make_option('--older_than', make_option('--older_than',
dest='older_than', dest='older_than',
default=None, default='30d',
help='Specify the relative time to consider facts older than (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y).'), help='Specify the relative time to consider facts older than (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y). Defaults to 30d.'),
make_option('--granularity', make_option('--granularity',
dest='granularity', dest='granularity',
default=None, default='1w',
help='Window duration to group same hosts by for deletion (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y).'), help='Window duration to group same hosts by for deletion (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y). Defaults to 1w.'),
make_option('--module', make_option('--module',
dest='module', dest='module',
default=None, default=None,

View File

@@ -12,7 +12,7 @@ from django.db import transaction
from django.utils.timezone import now from django.utils.timezone import now
# AWX # AWX
from awx.main.models import Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob from awx.main.models import Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob, WorkflowJob
class Command(NoArgsCommand): class Command(NoArgsCommand):
@@ -30,19 +30,22 @@ class Command(NoArgsCommand):
'be removed)'), 'be removed)'),
make_option('--jobs', dest='only_jobs', action='store_true', make_option('--jobs', dest='only_jobs', action='store_true',
default=False, default=False,
help='Only remove jobs'), help='Remove jobs'),
make_option('--ad-hoc-commands', dest='only_ad_hoc_commands', make_option('--ad-hoc-commands', dest='only_ad_hoc_commands',
action='store_true', default=False, action='store_true', default=False,
help='Only remove ad hoc commands'), help='Remove ad hoc commands'),
make_option('--project-updates', dest='only_project_updates', make_option('--project-updates', dest='only_project_updates',
action='store_true', default=False, action='store_true', default=False,
help='Only remove project updates'), help='Remove project updates'),
make_option('--inventory-updates', dest='only_inventory_updates', make_option('--inventory-updates', dest='only_inventory_updates',
action='store_true', default=False, action='store_true', default=False,
help='Only remove inventory updates'), help='Remove inventory updates'),
make_option('--management-jobs', default=False, make_option('--management-jobs', default=False,
action='store_true', dest='only_management_jobs', action='store_true', dest='only_management_jobs',
help='Only remove management jobs') help='Remove management jobs'),
make_option('--workflow-jobs', default=False,
action='store_true', dest='only_workflow_jobs',
help='Remove workflow jobs')
) )
def cleanup_jobs(self): def cleanup_jobs(self):
@@ -169,6 +172,28 @@ class Command(NoArgsCommand):
self.logger.addHandler(handler) self.logger.addHandler(handler)
self.logger.propagate = False self.logger.propagate = False
def cleanup_workflow_jobs(self):
skipped, deleted = 0, 0
for workflow_job in WorkflowJob.objects.all():
workflow_job_display = '"{}" (started {}, {} nodes)'.format(
unicode(workflow_job), unicode(workflow_job.created),
workflow_job.workflow_nodes.count())
if workflow_job.status in ('pending', 'waiting', 'running'):
action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s job %s', action_text, workflow_job.status, workflow_job_display)
skipped += 1
elif workflow_job.created >= self.cutoff:
action_text = 'would skip' if self.dry_run else 'skipping'
self.logger.debug('%s %s', action_text, workflow_job_display)
skipped += 1
else:
action_text = 'would delete' if self.dry_run else 'deleting'
self.logger.info('%s %s', action_text, workflow_job_display)
if not self.dry_run:
workflow_job.delete()
deleted += 1
return skipped, deleted
@transaction.atomic @transaction.atomic
def handle_noargs(self, **options): def handle_noargs(self, **options):
self.verbosity = int(options.get('verbosity', 1)) self.verbosity = int(options.get('verbosity', 1))
@@ -179,7 +204,7 @@ class Command(NoArgsCommand):
self.cutoff = now() - datetime.timedelta(days=self.days) self.cutoff = now() - datetime.timedelta(days=self.days)
except OverflowError: except OverflowError:
raise CommandError('--days specified is too large. Try something less than 99999 (about 270 years).') raise CommandError('--days specified is too large. Try something less than 99999 (about 270 years).')
model_names = ('jobs', 'ad_hoc_commands', 'project_updates', 'inventory_updates', 'management_jobs') model_names = ('jobs', 'ad_hoc_commands', 'project_updates', 'inventory_updates', 'management_jobs', 'workflow_jobs')
models_to_cleanup = set() models_to_cleanup = set()
for m in model_names: for m in model_names:
if options.get('only_%s' % m, False): if options.get('only_%s' % m, False):

View File

@@ -64,7 +64,7 @@ class MemObject(object):
all_vars = {} all_vars = {}
files_found = 0 files_found = 0
for suffix in ('', '.yml', '.yaml', '.json'): for suffix in ('', '.yml', '.yaml', '.json'):
path = ''.join([base_path, suffix]) path = ''.join([base_path, suffix]).encode("utf-8")
if not os.path.exists(path): if not os.path.exists(path):
continue continue
if not os.path.isfile(path): if not os.path.isfile(path):
@@ -462,7 +462,7 @@ class ExecutableJsonLoader(BaseLoader):
# to set their variables # to set their variables
for k,v in self.all_group.all_hosts.iteritems(): for k,v in self.all_group.all_hosts.iteritems():
if 'hostvars' not in _meta: if 'hostvars' not in _meta:
data = self.command_to_json([self.source, '--host', k]) data = self.command_to_json([self.source, '--host', k.encode("utf-8")])
else: else:
data = _meta['hostvars'].get(k, {}) data = _meta['hostvars'].get(k, {})
if isinstance(data, dict): if isinstance(data, dict):

View File

@@ -20,7 +20,7 @@ from django.core.urlresolvers import reverse
# AWX # AWX
from awx.main.models.base import * # noqa from awx.main.models.base import * # noqa
from awx.main.models.unified_jobs import * # noqa from awx.main.models.unified_jobs import * # noqa
from awx.main.models.notifications import JobNotificationMixin from awx.main.models.notifications import JobNotificationMixin, NotificationTemplate
from awx.main.fields import JSONField from awx.main.fields import JSONField
logger = logging.getLogger('awx.main.models.ad_hoc_commands') logger = logging.getLogger('awx.main.models.ad_hoc_commands')
@@ -157,18 +157,20 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
@property @property
def notification_templates(self): def notification_templates(self):
all_inventory_sources = set() all_orgs = set()
for h in self.hosts.all(): for h in self.hosts.all():
for invsrc in h.inventory_sources.all(): all_orgs.add(h.inventory.organization)
all_inventory_sources.add(invsrc)
active_templates = dict(error=set(), active_templates = dict(error=set(),
success=set(), success=set(),
any=set()) any=set())
for invsrc in all_inventory_sources: base_notification_templates = NotificationTemplate.objects
notifications_dict = invsrc.notification_templates for org in all_orgs:
for notification_type in active_templates.keys(): for templ in base_notification_templates.filter(organization_notification_templates_for_errors=org):
for templ in notifications_dict[notification_type]: active_templates['error'].add(templ)
active_templates[notification_type].add(templ) for templ in base_notification_templates.filter(organization_notification_templates_for_success=org):
active_templates['success'].add(templ)
for templ in base_notification_templates.filter(organization_notification_templates_for_any=org):
active_templates['any'].add(templ)
active_templates['error'] = list(active_templates['error']) active_templates['error'] = list(active_templates['error'])
active_templates['any'] = list(active_templates['any']) active_templates['any'] = list(active_templates['any'])
active_templates['success'] = list(active_templates['success']) active_templates['success'] = list(active_templates['success'])

View File

@@ -1002,9 +1002,8 @@ class InventorySourceOptions(BaseModel):
if r not in valid_regions and r not in invalid_regions: if r not in valid_regions and r not in invalid_regions:
invalid_regions.append(r) invalid_regions.append(r)
if invalid_regions: if invalid_regions:
raise ValidationError(_('Invalid %(source)s region%(plural)s: %(region)s') % { raise ValidationError(_('Invalid %(source)s region: %(region)s') % {
'source': self.source, 'plural': '' if len(invalid_regions) == 1 else 's', 'source': self.source, 'region': ', '.join(invalid_regions)})
'region': ', '.join(invalid_regions)})
return ','.join(regions) return ','.join(regions)
source_vars_dict = VarsDictProperty('source_vars') source_vars_dict = VarsDictProperty('source_vars')
@@ -1028,9 +1027,8 @@ class InventorySourceOptions(BaseModel):
if instance_filter_name not in self.INSTANCE_FILTER_NAMES: if instance_filter_name not in self.INSTANCE_FILTER_NAMES:
invalid_filters.append(instance_filter) invalid_filters.append(instance_filter)
if invalid_filters: if invalid_filters:
raise ValidationError(_('Invalid filter expression%(plural)s: %(filter)s') % raise ValidationError(_('Invalid filter expression: %(filter)s') %
{'plural': '' if len(invalid_filters) == 1 else 's', {'filter': ', '.join(invalid_filters)})
'filter': ', '.join(invalid_filters)})
return instance_filters return instance_filters
def clean_group_by(self): def clean_group_by(self):
@@ -1047,9 +1045,8 @@ class InventorySourceOptions(BaseModel):
if c not in valid_choices and c not in invalid_choices: if c not in valid_choices and c not in invalid_choices:
invalid_choices.append(c) invalid_choices.append(c)
if invalid_choices: if invalid_choices:
raise ValidationError(_('Invalid group by choice%(plural)s: %(choice)s') % raise ValidationError(_('Invalid group by choice: %(choice)s') %
{'plural': '' if len(invalid_choices) == 1 else 's', {'choice': ', '.join(invalid_choices)})
'choice': ', '.join(invalid_choices)})
return ','.join(choices) return ','.join(choices)

View File

@@ -606,6 +606,11 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin):
evars.update(extra_vars) evars.update(extra_vars)
self.update_fields(extra_vars=json.dumps(evars)) self.update_fields(extra_vars=json.dumps(evars))
def _resources_sufficient_for_launch(self):
if self.job_type == PERM_INVENTORY_SCAN:
return self.inventory_id is not None
return not (self.inventory_id is None or self.project_id is None)
def display_artifacts(self): def display_artifacts(self):
''' '''
Hides artifacts if they are marked as no_log type artifacts. Hides artifacts if they are marked as no_log type artifacts.
@@ -1175,7 +1180,6 @@ class JobEvent(CreatedModifiedModel):
# Save UUID and parent UUID for determining parent-child relationship. # Save UUID and parent UUID for determining parent-child relationship.
job_event_uuid = kwargs.get('uuid', None) job_event_uuid = kwargs.get('uuid', None)
parent_event_uuid = kwargs.get('parent_uuid', None) parent_event_uuid = kwargs.get('parent_uuid', None)
artifact_dict = kwargs.get('artifact_data', None)
# Sanity check: Don't honor keys that we don't recognize. # Sanity check: Don't honor keys that we don't recognize.
valid_keys = {'job_id', 'event', 'event_data', 'playbook', 'play', valid_keys = {'job_id', 'event', 'event_data', 'playbook', 'play',
@@ -1185,6 +1189,11 @@ class JobEvent(CreatedModifiedModel):
if key not in valid_keys: if key not in valid_keys:
kwargs.pop(key) kwargs.pop(key)
event_data = kwargs.get('event_data', None)
artifact_dict = None
if event_data:
artifact_dict = event_data.pop('artifact_data', None)
# Try to find a parent event based on UUID. # Try to find a parent event based on UUID.
if parent_event_uuid: if parent_event_uuid:
cache_key = '{}_{}'.format(kwargs['job_id'], parent_event_uuid) cache_key = '{}_{}'.format(kwargs['job_id'], parent_event_uuid)
@@ -1208,12 +1217,21 @@ class JobEvent(CreatedModifiedModel):
# Save artifact data to parent job (if provided). # Save artifact data to parent job (if provided).
if artifact_dict: if artifact_dict:
event_data = kwargs.get('event_data', None)
if event_data and isinstance(event_data, dict): if event_data and isinstance(event_data, dict):
res = event_data.get('res', None) # Note: Core has not added support for marking artifacts as
if res and isinstance(res, dict): # sensitive yet. Going forward, core will not use
if res.get('_ansible_no_log', False): # _ansible_no_log to denote sensitive set_stats calls.
artifact_dict['_ansible_no_log'] = True # Instead, they plan to add a flag outside of the traditional
# no_log mechanism. no_log will not work for this feature,
# in core, because sensitive data is scrubbed before sending
# data to the callback. The playbook_on_stats is the callback
# in which the set_stats data is used.
# Again, the sensitive artifact feature has not yet landed in
# core. The below is how we mark artifacts payload as
# senstive
# artifact_dict['_ansible_no_log'] = True
#
parent_job = Job.objects.filter(pk=kwargs['job_id']).first() parent_job = Job.objects.filter(pk=kwargs['job_id']).first()
if parent_job and parent_job.artifacts != artifact_dict: if parent_job and parent_job.artifacts != artifact_dict:
parent_job.artifacts = artifact_dict parent_job.artifacts = artifact_dict

View File

@@ -561,6 +561,9 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
"Override in child classes, None value indicates this is not configurable" "Override in child classes, None value indicates this is not configurable"
return None return None
def _resources_sufficient_for_launch(self):
return True
def __unicode__(self): def __unicode__(self):
return u'%s-%s-%s' % (self.created, self.id, self.status) return u'%s-%s-%s' % (self.created, self.id, self.status)

View File

@@ -134,7 +134,7 @@ class WorkflowNodeBase(CreatedModifiedModel):
scan_errors = ujt_obj._extra_job_type_errors(accepted_fields) scan_errors = ujt_obj._extra_job_type_errors(accepted_fields)
ignored_dict.update(scan_errors) ignored_dict.update(scan_errors)
for fd in ['inventory', 'credential']: for fd in ['inventory', 'credential']:
if getattr(ujt_obj, fd) is None and not (ask_for_vars_dict.get(fd, False) and fd in prompts_dict): if getattr(ujt_obj, "{}_id".format(fd)) is None and not (ask_for_vars_dict.get(fd, False) and fd in prompts_dict):
missing_dict[fd] = 'Job Template does not have this field and workflow node does not provide it' missing_dict[fd] = 'Job Template does not have this field and workflow node does not provide it'
data = {} data = {}
@@ -418,18 +418,22 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
def can_start_without_user_input(self): def can_start_without_user_input(self):
'''Return whether WFJT can be launched without survey passwords.''' '''Return whether WFJT can be launched without survey passwords.'''
return not bool(self.variables_needed_to_start) return not bool(
self.variables_needed_to_start or
self.node_templates_missing() or
self.node_prompts_rejected())
def get_warnings(self): def node_templates_missing(self):
warning_data = {} return [node.pk for node in self.workflow_job_template_nodes.filter(
for node in self.workflow_job_template_nodes.all(): unified_job_template__isnull=True).all()]
if node.unified_job_template is None:
warning_data[node.pk] = 'Node is missing a linked unified_job_template' def node_prompts_rejected(self):
continue node_list = []
for node in self.workflow_job_template_nodes.select_related('unified_job_template').all():
node_prompts_warnings = node.get_prompts_warnings() node_prompts_warnings = node.get_prompts_warnings()
if node_prompts_warnings: if node_prompts_warnings:
warning_data[node.pk] = node_prompts_warnings node_list.append(node.pk)
return warning_data return node_list
def user_copy(self, user): def user_copy(self, user):
new_wfjt = self.copy_unified_jt() new_wfjt = self.copy_unified_jt()

View File

@@ -10,6 +10,7 @@ from sets import Set
from django.conf import settings from django.conf import settings
from django.db import transaction, connection from django.db import transaction, connection
from django.db.utils import DatabaseError from django.db.utils import DatabaseError
from django.utils.translation import ugettext_lazy as _
# AWX # AWX
from awx.main.models import * # noqa from awx.main.models import * # noqa
@@ -114,14 +115,20 @@ class TaskManager():
dag = WorkflowDAG(workflow_job) dag = WorkflowDAG(workflow_job)
spawn_nodes = dag.bfs_nodes_to_run() spawn_nodes = dag.bfs_nodes_to_run()
for spawn_node in spawn_nodes: for spawn_node in spawn_nodes:
if spawn_node.unified_job_template is None:
continue
kv = spawn_node.get_job_kwargs() kv = spawn_node.get_job_kwargs()
job = spawn_node.unified_job_template.create_unified_job(**kv) job = spawn_node.unified_job_template.create_unified_job(**kv)
spawn_node.job = job spawn_node.job = job
spawn_node.save() spawn_node.save()
can_start = job.signal_start(**kv) if job._resources_sufficient_for_launch():
can_start = job.signal_start(**kv)
else:
can_start = False
if not can_start: if not can_start:
job.status = 'failed' job.status = 'failed'
job.job_explanation = "Workflow job could not start because it was not in the right state or required manual credentials" job.job_explanation = _("Job spawned from workflow could not start because it "
"was not in the right state or required manual credentials")
job.save(update_fields=['status', 'job_explanation']) job.save(update_fields=['status', 'job_explanation'])
connection.on_commit(lambda: job.websocket_emit_status('failed')) connection.on_commit(lambda: job.websocket_emit_status('failed'))

View File

@@ -67,6 +67,8 @@ class WorkflowDAG(SimpleDAG):
obj = n['node_object'] obj = n['node_object']
job = obj.job job = obj.job
if obj.unified_job_template is None:
continue
if not job: if not job:
return False return False
# Job is about to run or is running. Hold our horses and wait for # Job is about to run or is running. Hold our horses and wait for

View File

@@ -34,11 +34,13 @@ def run_job_complete(job_id):
@task @task
def run_task_manager(): def run_task_manager():
logger.debug("Running Tower task manager.")
TaskManager().schedule() TaskManager().schedule()
@task @task
def run_fail_inconsistent_running_jobs(): def run_fail_inconsistent_running_jobs():
logger.debug("Running task to fail inconsistent running jobs.")
with transaction.atomic(): with transaction.atomic():
# Lock # Lock
try: try:

View File

@@ -32,7 +32,8 @@ import pexpect
# Celery # Celery
from celery import Task, task from celery import Task, task
from celery.signals import celeryd_init from celery.signals import celeryd_init, worker_process_init
from celery import current_app
# Django # Django
from django.conf import settings from django.conf import settings
@@ -75,7 +76,8 @@ logger = logging.getLogger('awx.main.tasks')
def celery_startup(conf=None, **kwargs): def celery_startup(conf=None, **kwargs):
# Re-init all schedules # Re-init all schedules
# NOTE: Rework this during the Rampart work # NOTE: Rework this during the Rampart work
logger.info("Syncing Tower Schedules") startup_logger = logging.getLogger('awx.main.tasks')
startup_logger.info("Syncing Tower Schedules")
for sch in Schedule.objects.all(): for sch in Schedule.objects.all():
try: try:
sch.update_computed_fields() sch.update_computed_fields()
@@ -84,7 +86,28 @@ def celery_startup(conf=None, **kwargs):
logger.error("Failed to rebuild schedule {}: {}".format(sch, e)) logger.error("Failed to rebuild schedule {}: {}".format(sch, e))
def uwsgi_reload(): def _setup_tower_logger():
global logger
from django.utils.log import configure_logging
LOGGING_DICT = settings.LOGGING
if settings.LOG_AGGREGATOR_ENABLED:
LOGGING_DICT['handlers']['http_receiver']['class'] = 'awx.main.utils.handlers.HTTPSHandler'
LOGGING_DICT['handlers']['http_receiver']['async'] = False
if 'awx' in settings.LOG_AGGREGATOR_LOGGERS:
if 'http_receiver' not in LOGGING_DICT['loggers']['awx']['handlers']:
LOGGING_DICT['loggers']['awx']['handlers'] += ['http_receiver']
configure_logging(settings.LOGGING_CONFIG, LOGGING_DICT)
logger = logging.getLogger('awx.main.tasks')
@worker_process_init.connect
def task_set_logger_pre_run(*args, **kwargs):
if settings.LOG_AGGREGATOR_ENABLED:
_setup_tower_logger()
logger.debug('Custom Tower logger configured for worker process.')
def _uwsgi_reload():
# http://uwsgi-docs.readthedocs.io/en/latest/MasterFIFO.html#available-commands # http://uwsgi-docs.readthedocs.io/en/latest/MasterFIFO.html#available-commands
logger.warn('Initiating uWSGI chain reload of server') logger.warn('Initiating uWSGI chain reload of server')
TRIGGER_CHAIN_RELOAD = 'c' TRIGGER_CHAIN_RELOAD = 'c'
@@ -92,14 +115,28 @@ def uwsgi_reload():
awxfifo.write(TRIGGER_CHAIN_RELOAD) awxfifo.write(TRIGGER_CHAIN_RELOAD)
@task(queue='broadcast_all') def _reset_celery_logging():
def clear_cache_keys(cache_keys): # Worker logger reloaded, now send signal to restart pool
set_of_keys = set([key for key in cache_keys]) app = current_app._get_current_object()
app.control.broadcast('pool_restart', arguments={'reload': True},
destination=['celery@{}'.format(settings.CLUSTER_HOST_ID)], reply=False)
def _clear_cache_keys(set_of_keys):
logger.debug('cache delete_many(%r)', set_of_keys) logger.debug('cache delete_many(%r)', set_of_keys)
cache.delete_many(set_of_keys) cache.delete_many(set_of_keys)
@task(queue='broadcast_all')
def process_cache_changes(cache_keys):
logger.warn('Processing cache changes, task args: {0.args!r} kwargs: {0.kwargs!r}'.format(
process_cache_changes.request))
set_of_keys = set([key for key in cache_keys])
_clear_cache_keys(set_of_keys)
for setting_key in set_of_keys: for setting_key in set_of_keys:
if setting_key.startswith('LOG_AGGREGATOR_'): if setting_key.startswith('LOG_AGGREGATOR_'):
uwsgi_reload() _uwsgi_reload()
_reset_celery_logging()
break break
@@ -129,6 +166,7 @@ def send_notifications(notification_list, job_id=None):
@task(bind=True, queue='default') @task(bind=True, queue='default')
def run_administrative_checks(self): def run_administrative_checks(self):
logger.warn("Running administrative checks.")
if not settings.TOWER_ADMIN_ALERTS: if not settings.TOWER_ADMIN_ALERTS:
return return
validation_info = TaskEnhancer().validate_enhancements() validation_info = TaskEnhancer().validate_enhancements()
@@ -150,11 +188,13 @@ def run_administrative_checks(self):
@task(bind=True, queue='default') @task(bind=True, queue='default')
def cleanup_authtokens(self): def cleanup_authtokens(self):
logger.warn("Cleaning up expired authtokens.")
AuthToken.objects.filter(expires__lt=now()).delete() AuthToken.objects.filter(expires__lt=now()).delete()
@task(bind=True) @task(bind=True)
def cluster_node_heartbeat(self): def cluster_node_heartbeat(self):
logger.debug("Cluster node heartbeat task.")
inst = Instance.objects.filter(hostname=settings.CLUSTER_HOST_ID) inst = Instance.objects.filter(hostname=settings.CLUSTER_HOST_ID)
if inst.exists(): if inst.exists():
inst = inst[0] inst = inst[0]
@@ -1832,7 +1872,7 @@ class RunSystemJob(BaseTask):
if 'days' in json_vars and system_job.job_type != 'cleanup_facts': if 'days' in json_vars and system_job.job_type != 'cleanup_facts':
args.extend(['--days', str(json_vars.get('days', 60))]) args.extend(['--days', str(json_vars.get('days', 60))])
if system_job.job_type == 'cleanup_jobs': if system_job.job_type == 'cleanup_jobs':
args.extend(['--jobs', '--project-updates', '--inventory-updates', '--management-jobs', '--ad-hoc-commands']) args.extend(['--jobs', '--project-updates', '--inventory-updates', '--management-jobs', '--ad-hoc-commands', '--workflow-jobs'])
if system_job.job_type == 'cleanup_facts': if system_job.job_type == 'cleanup_facts':
if 'older_than' in json_vars: if 'older_than' in json_vars:
args.extend(['--older_than', str(json_vars['older_than'])]) args.extend(['--older_than', str(json_vars['older_than'])])

View File

@@ -65,6 +65,17 @@ def test_edit_sensitive_fields(patch, job_template_factory, alice, grant_project
}, alice, expect=expect) }, alice, expect=expect)
@pytest.mark.django_db
def test_reject_dict_extra_vars_patch(patch, job_template_factory, admin_user):
# Expect a string for extra_vars, raise 400 in this case that would
# otherwise have been saved incorrectly
jt = job_template_factory(
'jt', organization='org1', project='prj', inventory='inv', credential='cred'
).job_template
patch(reverse('api:job_template_detail', args=(jt.id,)),
{'extra_vars': {'foo': 5}}, admin_user, expect=400)
@pytest.mark.django_db @pytest.mark.django_db
def test_edit_playbook(patch, job_template_factory, alice): def test_edit_playbook(patch, job_template_factory, alice):
objs = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred') objs = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')

View File

@@ -1,3 +1,5 @@
# -*- coding: utf-8 -*-
import mock # noqa import mock # noqa
import pytest import pytest
@@ -22,6 +24,84 @@ def team_project_list(organization_factory):
return objects return objects
@pytest.mark.django_db
def test_user_project_paged_list(get, organization_factory):
'Test project listing that spans multiple pages'
# 3 total projects, 1 per page, 3 pages
objects = organization_factory(
'org1',
projects=['project-%s' % i for i in range(3)],
users=['alice'],
roles=['project-%s.admin_role:alice' % i for i in range(3)],
)
# first page has first project and no previous page
pk = objects.users.alice.pk
url = reverse('api:user_projects_list', args=(pk,))
results = get(url, objects.users.alice, QUERY_STRING='page_size=1').data
assert results['count'] == 3
assert len(results['results']) == 1
assert results['previous'] is None
assert results['next'] == (
'/api/v1/users/%s/projects/?page=2&page_size=1' % pk
)
# second page has one more, a previous and next page
results = get(url, objects.users.alice,
QUERY_STRING='page=2&page_size=1').data
assert len(results['results']) == 1
assert results['previous'] == (
'/api/v1/users/%s/projects/?page=1&page_size=1' % pk
)
assert results['next'] == (
'/api/v1/users/%s/projects/?page=3&page_size=1' % pk
)
# third page has last project and a previous page
results = get(url, objects.users.alice,
QUERY_STRING='page=3&page_size=1').data
assert len(results['results']) == 1
assert results['previous'] == (
'/api/v1/users/%s/projects/?page=2&page_size=1' % pk
)
assert results['next'] is None
@pytest.mark.django_db
def test_user_project_paged_list_with_unicode(get, organization_factory):
'Test project listing that contains unicode chars in the next/prev links'
# Create 2 projects that contain a "cloud" unicode character, make sure we
# can search it and properly generate next/previous page links
objects = organization_factory(
'org1',
projects=['project-☁-1','project-☁-2'],
users=['alice'],
roles=['project-☁-1.admin_role:alice','project-☁-2.admin_role:alice'],
)
pk = objects.users.alice.pk
url = reverse('api:user_projects_list', args=(pk,))
# first on first page, next page link contains unicode char
results = get(url, objects.users.alice,
QUERY_STRING='page_size=1&search=%E2%98%81').data
assert results['count'] == 2
assert len(results['results']) == 1
assert results['next'] == (
'/api/v1/users/%s/projects/?page=2&page_size=1&search=%%E2%%98%%81' % pk # noqa
)
# second project on second page, previous page link contains unicode char
results = get(url, objects.users.alice,
QUERY_STRING='page=2&page_size=1&search=%E2%98%81').data
assert results['count'] == 2
assert len(results['results']) == 1
assert results['previous'] == (
'/api/v1/users/%s/projects/?page=1&page_size=1&search=%%E2%%98%%81' % pk # noqa
)
@pytest.mark.django_db @pytest.mark.django_db
def test_user_project_list(get, organization_factory): def test_user_project_list(get, organization_factory):
'List of projects a user has access to, filtered by projects you can also see' 'List of projects a user has access to, filtered by projects you can also see'

View File

@@ -259,22 +259,37 @@ def test_associate_label(label, user, job_template):
@pytest.mark.django_db @pytest.mark.django_db
def test_move_schedule_to_JT_no_access(job_template, rando): class TestJobTemplateSchedules:
schedule = Schedule.objects.create(
unified_job_template=job_template, rrule = 'DTSTART:20151117T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1'
rrule='DTSTART:20151117T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1') rrule2 = 'DTSTART:20151117T050000Z RRULE:FREQ=WEEKLY;INTERVAL=1;COUNT=1'
job_template.admin_role.members.add(rando)
jt2 = JobTemplate.objects.create(name="other-jt") @pytest.fixture
access = ScheduleAccess(rando) def jt2(self):
assert not access.can_change(schedule, data=dict(unified_job_template=jt2.pk)) return JobTemplate.objects.create(name="other-jt")
def test_move_schedule_to_JT_no_access(self, job_template, rando, jt2):
schedule = Schedule.objects.create(unified_job_template=job_template, rrule=self.rrule)
job_template.admin_role.members.add(rando)
access = ScheduleAccess(rando)
assert not access.can_change(schedule, data=dict(unified_job_template=jt2.pk))
@pytest.mark.django_db def test_move_schedule_from_JT_no_access(self, job_template, rando, jt2):
def test_move_schedule_from_JT_no_access(job_template, rando): schedule = Schedule.objects.create(unified_job_template=job_template, rrule=self.rrule)
schedule = Schedule.objects.create( jt2.admin_role.members.add(rando)
unified_job_template=job_template, access = ScheduleAccess(rando)
rrule='DTSTART:20151117T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1') assert not access.can_change(schedule, data=dict(unified_job_template=jt2.pk))
jt2 = JobTemplate.objects.create(name="other-jt")
jt2.admin_role.members.add(rando)
access = ScheduleAccess(rando) def test_can_create_schedule_with_execute(self, job_template, rando):
assert not access.can_change(schedule, data=dict(unified_job_template=jt2.pk)) job_template.execute_role.members.add(rando)
access = ScheduleAccess(rando)
assert access.can_add({'unified_job_template': job_template})
def test_can_modify_ones_own_schedule(self, job_template, rando):
job_template.execute_role.members.add(rando)
schedule = Schedule.objects.create(unified_job_template=job_template, rrule=self.rrule, created_by=rando)
access = ScheduleAccess(rando)
assert access.can_change(schedule, {'rrule': self.rrule2})

View File

@@ -86,11 +86,15 @@ class TestWorkflowJobTemplateNodeAccess:
@pytest.mark.django_db @pytest.mark.django_db
class TestWorkflowJobAccess: class TestWorkflowJobAccess:
def test_wfjt_admin_delete(self, wfjt, workflow_job, rando): def test_org_admin_can_delete_workflow_job(self, workflow_job, org_admin):
wfjt.admin_role.members.add(rando) access = WorkflowJobAccess(org_admin)
access = WorkflowJobAccess(rando)
assert access.can_delete(workflow_job) assert access.can_delete(workflow_job)
def test_wfjt_admin_can_delete_workflow_job(self, workflow_job, rando):
workflow_job.workflow_job_template.admin_role.members.add(rando)
access = WorkflowJobAccess(rando)
assert not access.can_delete(workflow_job)
def test_cancel_your_own_job(self, wfjt, workflow_job, rando): def test_cancel_your_own_job(self, wfjt, workflow_job, rando):
wfjt.execute_role.members.add(rando) wfjt.execute_role.members.add(rando)
workflow_job.created_by = rando workflow_job.created_by = rando
@@ -120,13 +124,11 @@ class TestWorkflowJobAccess:
access = WorkflowJobTemplateAccess(rando, save_messages=True) access = WorkflowJobTemplateAccess(rando, save_messages=True)
assert not access.can_copy(wfjt) assert not access.can_copy(wfjt)
warnings = access.messages warnings = access.messages
assert 1 in warnings assert 'inventories_unable_to_copy' in warnings
assert 'inventory' in warnings[1]
def test_workflow_copy_warnings_jt(self, wfjt, rando, job_template): def test_workflow_copy_warnings_jt(self, wfjt, rando, job_template):
wfjt.workflow_job_template_nodes.create(unified_job_template=job_template) wfjt.workflow_job_template_nodes.create(unified_job_template=job_template)
access = WorkflowJobTemplateAccess(rando, save_messages=True) access = WorkflowJobTemplateAccess(rando, save_messages=True)
assert not access.can_copy(wfjt) assert not access.can_copy(wfjt)
warnings = access.messages warnings = access.messages
assert 1 in warnings assert 'templates_unable_to_copy' in warnings
assert 'unified_job_template' in warnings[1]

View File

@@ -125,6 +125,7 @@ class TestWorkflowJobTemplateNodeSerializerCharPrompts():
serializer = WorkflowJobTemplateNodeSerializer() serializer = WorkflowJobTemplateNodeSerializer()
node = WorkflowJobTemplateNode(pk=1) node = WorkflowJobTemplateNode(pk=1)
node.char_prompts = {'limit': 'webservers'} node.char_prompts = {'limit': 'webservers'}
serializer.instance = node
view = FakeView(node) view = FakeView(node)
view.request = FakeRequest() view.request = FakeRequest()
view.request.method = "PATCH" view.request.method = "PATCH"

View File

@@ -5,7 +5,7 @@ import pytest
# AWX # AWX
from awx.main.scheduler.dag_simple import SimpleDAG from awx.main.scheduler.dag_simple import SimpleDAG
from awx.main.scheduler.dag_workflow import WorkflowDAG from awx.main.scheduler.dag_workflow import WorkflowDAG
from awx.main.models import Job from awx.main.models import Job, JobTemplate
from awx.main.models.workflow import WorkflowJobNode from awx.main.models.workflow import WorkflowJobNode
@@ -72,6 +72,7 @@ def factory_node():
if status: if status:
j = Job(status=status) j = Job(status=status)
wfn.job = j wfn.job = j
wfn.unified_job_template = JobTemplate(name='JT{}'.format(id))
return wfn return wfn
return fn return fn

View File

@@ -30,6 +30,7 @@ PARAM_NAMES = {
'password': 'LOG_AGGREGATOR_PASSWORD', 'password': 'LOG_AGGREGATOR_PASSWORD',
'enabled_loggers': 'LOG_AGGREGATOR_LOGGERS', 'enabled_loggers': 'LOG_AGGREGATOR_LOGGERS',
'indv_facts': 'LOG_AGGREGATOR_INDIVIDUAL_FACTS', 'indv_facts': 'LOG_AGGREGATOR_INDIVIDUAL_FACTS',
'enabled_flag': 'LOG_AGGREGATOR_ENABLED',
} }
@@ -48,6 +49,7 @@ class HTTPSHandler(logging.Handler):
def __init__(self, fqdn=False, **kwargs): def __init__(self, fqdn=False, **kwargs):
super(HTTPSHandler, self).__init__() super(HTTPSHandler, self).__init__()
self.fqdn = fqdn self.fqdn = fqdn
self.async = kwargs.get('async', True)
for fd in PARAM_NAMES: for fd in PARAM_NAMES:
# settings values take precedence over the input params # settings values take precedence over the input params
settings_name = PARAM_NAMES[fd] settings_name = PARAM_NAMES[fd]
@@ -100,11 +102,21 @@ class HTTPSHandler(logging.Handler):
payload_str = json.dumps(payload_input) payload_str = json.dumps(payload_input)
else: else:
payload_str = payload_input payload_str = payload_input
return dict(data=payload_str, background_callback=unused_callback) if self.async:
return dict(data=payload_str, background_callback=unused_callback)
else:
return dict(data=payload_str)
def skip_log(self, logger_name):
if self.host == '' or (not self.enabled_flag):
return True
if not logger_name.startswith('awx.analytics'):
# Tower log emission is only turned off by enablement setting
return False
return self.enabled_loggers is None or logger_name.split('.')[-1] not in self.enabled_loggers
def emit(self, record): def emit(self, record):
if (self.host == '' or self.enabled_loggers is None or if self.skip_log(record.name):
record.name.split('.')[-1] not in self.enabled_loggers):
return return
try: try:
payload = self.format(record) payload = self.format(record)
@@ -123,7 +135,10 @@ class HTTPSHandler(logging.Handler):
self.session.post(host, **self.get_post_kwargs(fact_payload)) self.session.post(host, **self.get_post_kwargs(fact_payload))
return return
self.session.post(host, **self.get_post_kwargs(payload)) if self.async:
self.session.post(host, **self.get_post_kwargs(payload))
else:
requests.post(host, auth=requests.auth.HTTPBasicAuth(self.username, self.password), **self.get_post_kwargs(payload))
except (KeyboardInterrupt, SystemExit): except (KeyboardInterrupt, SystemExit):
raise raise
except: except:

View File

@@ -185,8 +185,9 @@ def vars_validate_or_raise(vars_str):
except ValueError: except ValueError:
pass pass
try: try:
yaml.safe_load(vars_str) r = yaml.safe_load(vars_str)
return vars_str if not (isinstance(r, basestring) and r.startswith('OrderedDict(')):
return vars_str
except yaml.YAMLError: except yaml.YAMLError:
pass pass
raise RestValidationError(_('Must be valid JSON or YAML.')) raise RestValidationError(_('Must be valid JSON or YAML.'))

View File

@@ -115,6 +115,12 @@
chdir: "{{project_path|quote}}/roles" chdir: "{{project_path|quote}}/roles"
when: doesRequirementsExist.stat.exists and scm_full_checkout|bool when: doesRequirementsExist.stat.exists and scm_full_checkout|bool
# format provided by ansible is ["Revision: 12345", "URL: ..."]
- name: parse subversion version string properly
set_fact:
scm_version: "{{scm_version|regex_replace('^.*Revision: ([0-9]+).*$', '\\1')}}"
when: scm_type == 'svn'
- name: Repository Version - name: Repository Version
debug: msg="Repository Version {{ scm_version }}" debug: msg="Repository Version {{ scm_version }}"
when: scm_version is defined when: scm_version is defined

View File

@@ -73,7 +73,7 @@ DATABASES = {
# timezone as the operating system. # timezone as the operating system.
# If running in a Windows environment this must be set to the same as your # If running in a Windows environment this must be set to the same as your
# system time zone. # system time zone.
TIME_ZONE = 'America/New_York' TIME_ZONE = None
# Language code for this installation. All choices can be found here: # Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html # http://www.i18nguy.com/unicode/language-identifiers.html
@@ -154,7 +154,7 @@ STDOUT_MAX_BYTES_DISPLAY = 1048576
# Returned in the header on event api lists as a recommendation to the UI # Returned in the header on event api lists as a recommendation to the UI
# on how many events to display before truncating/hiding # on how many events to display before truncating/hiding
RECOMMENDED_MAX_EVENTS_DISPLAY_HEADER = 10000 RECOMMENDED_MAX_EVENTS_DISPLAY_HEADER = 4000
# The maximum size of the ansible callback event's res data structure # The maximum size of the ansible callback event's res data structure
# beyond this limit and the value will be removed # beyond this limit and the value will be removed
@@ -167,6 +167,15 @@ JOB_EVENT_WORKERS = 4
JOB_EVENT_MAX_QUEUE_SIZE = 5000 JOB_EVENT_MAX_QUEUE_SIZE = 5000
# Disallow sending session cookies over insecure connections
SESSION_COOKIE_SECURE = True
# Disallow sending csrf cookies over insecure connections
CSRF_COOKIE_SECURE = True
# Limit CSRF cookies to browser sessions
CSRF_COOKIE_AGE = None
TEMPLATE_CONTEXT_PROCESSORS = ( # NOQA TEMPLATE_CONTEXT_PROCESSORS = ( # NOQA
'django.contrib.auth.context_processors.auth', 'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug', 'django.core.context_processors.debug',
@@ -380,6 +389,7 @@ CELERY_ACCEPT_CONTENT = ['json']
CELERY_TRACK_STARTED = True CELERY_TRACK_STARTED = True
CELERYD_TASK_TIME_LIMIT = None CELERYD_TASK_TIME_LIMIT = None
CELERYD_TASK_SOFT_TIME_LIMIT = None CELERYD_TASK_SOFT_TIME_LIMIT = None
CELERYD_POOL_RESTARTS = True
CELERYBEAT_SCHEDULER = 'celery.beat.PersistentScheduler' CELERYBEAT_SCHEDULER = 'celery.beat.PersistentScheduler'
CELERYBEAT_MAX_LOOP_INTERVAL = 60 CELERYBEAT_MAX_LOOP_INTERVAL = 60
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend' CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
@@ -882,7 +892,7 @@ LOGGING = {
}, },
'http_receiver': { 'http_receiver': {
'class': 'awx.main.utils.handlers.HTTPSNullHandler', 'class': 'awx.main.utils.handlers.HTTPSNullHandler',
'level': 'INFO', 'level': 'DEBUG',
'formatter': 'json', 'formatter': 'json',
'host': '', 'host': '',
}, },
@@ -981,7 +991,7 @@ LOGGING = {
'handlers': ['callback_receiver'], 'handlers': ['callback_receiver'],
}, },
'awx.main.tasks': { 'awx.main.tasks': {
'handlers': ['task_system'] 'handlers': ['task_system'],
}, },
'awx.main.scheduler': { 'awx.main.scheduler': {
'handlers': ['task_system'], 'handlers': ['task_system'],
@@ -1009,18 +1019,6 @@ LOGGING = {
'level': 'INFO', 'level': 'INFO',
'propagate': False 'propagate': False
}, },
'awx.analytics.job_events': {
'handlers': ['null'],
'level': 'INFO'
},
'awx.analytics.activity_stream': {
'handlers': ['null'],
'level': 'INFO'
},
'awx.analytics.system_tracking': {
'handlers': ['null'],
'level': 'INFO'
},
'django_auth_ldap': { 'django_auth_ldap': {
'handlers': ['console', 'file', 'tower_warnings'], 'handlers': ['console', 'file', 'tower_warnings'],
'level': 'DEBUG', 'level': 'DEBUG',

View File

@@ -24,11 +24,11 @@ ALLOWED_HOSTS = ['*']
mimetypes.add_type("image/svg+xml", ".svg", True) mimetypes.add_type("image/svg+xml", ".svg", True)
mimetypes.add_type("image/svg+xml", ".svgz", True) mimetypes.add_type("image/svg+xml", ".svgz", True)
MONGO_HOST = '127.0.0.1' # Disallow sending session cookies over insecure connections
MONGO_PORT = 27017 SESSION_COOKIE_SECURE = False
MONGO_USERNAME = None
MONGO_PASSWORD = None # Disallow sending csrf cookies over insecure connections
MONGO_DB = 'system_tracking_dev' CSRF_COOKIE_SECURE = False
# Override django.template.loaders.cached.Loader in defaults.py # Override django.template.loaders.cached.Loader in defaults.py
TEMPLATE_LOADERS = ( TEMPLATE_LOADERS = (

View File

@@ -114,7 +114,7 @@ SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
# timezone as the operating system. # timezone as the operating system.
# If running in a Windows environment this must be set to the same as your # If running in a Windows environment this must be set to the same as your
# system time zone. # system time zone.
TIME_ZONE = 'America/New_York' TIME_ZONE = None
# Language code for this installation. All choices can be found here: # Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html # http://www.i18nguy.com/unicode/language-identifiers.html

View File

@@ -71,7 +71,7 @@ SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
# timezone as the operating system. # timezone as the operating system.
# If running in a Windows environment this must be set to the same as your # If running in a Windows environment this must be set to the same as your
# system time zone. # system time zone.
TIME_ZONE = 'America/New_York' TIME_ZONE = None
# Language code for this installation. All choices can be found here: # Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html # http://www.i18nguy.com/unicode/language-identifiers.html

View File

@@ -299,7 +299,10 @@ class LDAPGroupTypeField(fields.ChoiceField):
data = super(LDAPGroupTypeField, self).to_internal_value(data) data = super(LDAPGroupTypeField, self).to_internal_value(data)
if not data: if not data:
return None return None
return getattr(django_auth_ldap.config, data)() if data.endswith('MemberDNGroupType'):
return getattr(django_auth_ldap.config, data)(member_attr='member')
else:
return getattr(django_auth_ldap.config, data)()
class LDAPUserFlagsField(fields.DictField): class LDAPUserFlagsField(fields.DictField):

View File

@@ -52,7 +52,7 @@
<div class="col-sm-6"> <div class="col-sm-6">
</div> </div>
<div class="col-sm-6 footer-copyright"> <div class="col-sm-6 footer-copyright">
Copyright &copy; 2016 <a href="http://www.redhat.com" target="_blank">Red Hat</a>, Inc. All Rights Reserved. Copyright &copy; 2017 <a href="http://www.redhat.com" target="_blank">Red Hat</a>, Inc. All Rights Reserved.
</div> </div>
</div> </div>
</div> </div>

View File

@@ -729,18 +729,6 @@ legend {
.navigation { .navigation {
margin: 15px 0 15px 0; margin: 15px 0 15px 0;
} }
.modal-body {
.alert {
padding: 0;
border: none;
margin: 0;
}
.alert-danger {
background-color: @default-bg;
border: none;
color: @default-interface-txt;
}
}
.footer-navigation { .footer-navigation {
margin: 10px 0 10px 0; margin: 10px 0 10px 0;
@@ -1638,17 +1626,19 @@ tr td button i {
} }
/* overrides to TB modal */ /* overrides to TB modal */
.modal-content {
padding: 20px;
}
.modal-header { .modal-header {
color: @default-interface-txt; color: @default-interface-txt;
margin: .1em 0;
white-space: nowrap; white-space: nowrap;
width: 90%; width: 90%;
overflow: hidden; overflow: hidden;
text-overflow: ellipsis; text-overflow: ellipsis;
width: 100%; width: 100%;
border: none; border: none;
padding: 12px 14px 0 12px; padding: 0;
} }
.modal { .modal {
@@ -1677,8 +1667,18 @@ tr td button i {
} }
.modal-body { .modal-body {
padding: 20px 14px 7px 14px;
min-height: 120px; min-height: 120px;
padding: 20px 0;
.alert {
padding: 10px;
margin: 0;
}
.alert-danger {
background-color: @default-bg;
border: none;
color: @default-interface-txt;
}
} }
#prompt-modal .modal-body { #prompt-modal .modal-body {
@@ -1690,15 +1690,15 @@ tr td button i {
} }
.modal-footer { .modal-footer {
padding: .3em 1em .5em .4em; padding: 0;
border: none; border: none;
margin-top: 0;
.btn.btn-primary { .btn.btn-primary {
text-transform: uppercase; text-transform: uppercase;
background-color: @default-succ; background-color: @default-succ;
border-color: @default-succ; border-color: @default-succ;
padding: 5px 15px; padding: 5px 15px;
margin: .5em .4em .5em 0;
cursor: pointer; cursor: pointer;
&:hover { &:hover {
@@ -1720,8 +1720,7 @@ tr td button i {
/* PW progress bar */ /* PW progress bar */
.pw-progress { .pw-progress { margin-top: 10px;
margin-top: 10px;
li { li {
line-height: normal; line-height: normal;
@@ -2219,10 +2218,6 @@ a:hover {
font-family: 'Open Sans'; font-family: 'Open Sans';
} }
.modal-body .alert {
padding: 10px;
}
.WorkflowBadge{ .WorkflowBadge{
background-color: @b7grey; background-color: @b7grey;
border-radius: 10px; border-radius: 10px;

View File

@@ -44,11 +44,10 @@
color: @list-header-txt; color: @list-header-txt;
font-size: 14px; font-size: 14px;
font-weight: bold; font-weight: bold;
padding-bottom: 25px;
min-height: 45px;
word-break: break-all; word-break: break-all;
max-width: 90%; max-width: 90%;
word-wrap: break-word; word-wrap: break-word;
margin-bottom: 20px;
} }
.Form-secondaryTitle{ .Form-secondaryTitle{

View File

@@ -11,19 +11,19 @@
<!-- Don't indent this properly, you'll break the cow --> <!-- Don't indent this properly, you'll break the cow -->
<pre class="About-cowsay--code"> <pre class="About-cowsay--code">
________________ ________________
/ Tower {{version_str}} \\ / Tower {{version_str}} \
\\<span>{{version}}</span>/ \<span>{{version}}</span>/
---------------- ----------------
\\ ^__^ \ ^__^
\\ (oo)\\_______ \ (oo)\_______
(__) A )\\/\\ (__) A )\/\
||----w | ||----w |
|| || || ||
</pre> </pre>
</div> </div>
<div class="About-modal--footer"> <div class="About-modal--footer">
<img class="About-brand--redhat img-responsive" src="/static/assets/tower-logo-login.svg" /> <img class="About-brand--redhat img-responsive" src="/static/assets/tower-logo-login.svg" />
<p class="text-right">Copyright &copy; 2016 Red Hat, Inc. <br> <p class="text-right">Copyright &copy; 2017 Red Hat, Inc. <br>
Visit <a href="http://www.ansible.com/" target="_blank">Ansible.com</a> for more information.<br> Visit <a href="http://www.ansible.com/" target="_blank">Ansible.com</a> for more information.<br>
</div> </div>
</div> </div>

View File

@@ -15,7 +15,8 @@ export default ['templateUrl', '$state',
usersDataset: '=', usersDataset: '=',
teamsDataset: '=', teamsDataset: '=',
resourceData: '=', resourceData: '=',
withoutTeamPermissions: '@' withoutTeamPermissions: '@',
title: '@'
}, },
controller: controller, controller: controller,
templateUrl: templateUrl('access/add-rbac-resource/rbac-resource'), templateUrl: templateUrl('access/add-rbac-resource/rbac-resource'),

View File

@@ -6,9 +6,9 @@
<div class="List-header"> <div class="List-header">
<div class="List-title"> <div class="List-title">
<div class="List-titleText ng-binding"> <div class="List-titleText ng-binding">
{{ object.name }} {{ object.name || object.username }}
<div class="List-titleLockup"></div> <div class="List-titleLockup"></div>
Add Permissions {{ title }}
</div> </div>
</div> </div>
<div class="Form-exitHolder"> <div class="Form-exitHolder">

View File

@@ -42,8 +42,6 @@ export default ['$compile','templateUrl', 'i18n', 'generateList',
list.listTitleBadge = false; list.listTitleBadge = false;
// @issue - fix field.columnClass values for this view
switch(scope.resourceType){ switch(scope.resourceType){
case 'projects': case 'projects':
@@ -51,6 +49,8 @@ export default ['$compile','templateUrl', 'i18n', 'generateList',
name: list.fields.name, name: list.fields.name,
scm_type: list.fields.scm_type scm_type: list.fields.scm_type
}; };
list.fields.name.columnClass = 'col-md-5 col-sm-5 col-xs-10';
list.fields.scm_type.columnClass = 'col-md-5 col-sm-5 hidden-xs';
break; break;
case 'inventories': case 'inventories':
@@ -58,6 +58,8 @@ export default ['$compile','templateUrl', 'i18n', 'generateList',
name: list.fields.name, name: list.fields.name,
organization: list.fields.organization organization: list.fields.organization
}; };
list.fields.name.columnClass = 'col-md-5 col-sm-5 col-xs-10';
list.fields.organization.columnClass = 'col-md-5 col-sm-5 hidden-xs';
break; break;
case 'job_templates': case 'job_templates':
@@ -67,6 +69,8 @@ export default ['$compile','templateUrl', 'i18n', 'generateList',
name: list.fields.name, name: list.fields.name,
description: list.fields.description description: list.fields.description
}; };
list.fields.name.columnClass = 'col-md-5 col-sm-5 col-xs-10';
list.fields.description.columnClass = 'col-md-5 col-sm-5 hidden-xs';
break; break;
case 'workflow_templates': case 'workflow_templates':
@@ -77,12 +81,16 @@ export default ['$compile','templateUrl', 'i18n', 'generateList',
name: list.fields.name, name: list.fields.name,
description: list.fields.description description: list.fields.description
}; };
list.fields.name.columnClass = 'col-md-5 col-sm-5 col-xs-10';
list.fields.description.columnClass = 'col-md-5 col-sm-5 hidden-xs';
break; break;
case 'credentials': case 'credentials':
list.fields = { list.fields = {
name: list.fields.name, name: list.fields.name,
description: list.fields.description description: list.fields.description
}; };
list.fields.name.columnClass = 'col-md-5 col-sm-5 col-xs-10';
list.fields.description.columnClass = 'col-md-5 col-sm-5 hidden-xs';
} }
list.fields = _.each(list.fields, (field) => field.nosort = true); list.fields = _.each(list.fields, (field) => field.nosort = true);

View File

@@ -11,7 +11,8 @@ export default ['templateUrl',
return { return {
restrict: 'E', restrict: 'E',
scope: { scope: {
resolve: "=" resolve: "=",
title: "@",
}, },
controller: controller, controller: controller,
templateUrl: templateUrl('access/add-rbac-user-team/rbac-user-team'), templateUrl: templateUrl('access/add-rbac-user-team/rbac-user-team'),

View File

@@ -7,9 +7,9 @@
<div class="List-header"> <div class="List-header">
<div class="List-title"> <div class="List-title">
<div class="List-titleText ng-binding"> <div class="List-titleText ng-binding">
{{ owner.name }} {{ owner.name || owner.username }}
<div class="List-titleLockup"></div> <div class="List-titleLockup"></div>
Add Permissions {{ title }}
</div> </div>
</div> </div>
<div class="Form-exitHolder"> <div class="Form-exitHolder">

View File

@@ -51,6 +51,10 @@
padding-top: 20px; padding-top: 20px;
} }
.AddPermissions-list {
margin-bottom: 20px;
}
.AddPermissions-list .List-searchRow { .AddPermissions-list .List-searchRow {
height: 0px; height: 0px;
} }

View File

@@ -34,7 +34,7 @@
username: { username: {
key: true, key: true,
label: 'Username', label: 'Username',
columnClass: 'col-md-3 col-sm-3 col-xs-9' columnClass: 'col-md-5 col-sm-5 col-xs-11'
}, },
}, },

View File

@@ -43,6 +43,8 @@ export default ['addPermissionsTeamsList', 'addPermissionsUsersList', 'TemplateL
name: list.fields.name, name: list.fields.name,
scm_type: list.fields.scm_type scm_type: list.fields.scm_type
}; };
list.fields.name.columnClass = 'col-md-6 col-sm-6 col-xs-11';
list.fields.scm_type.columnClass = 'col-md-5 col-sm-5 hidden-xs';
break; break;
case 'Inventories': case 'Inventories':
@@ -50,6 +52,8 @@ export default ['addPermissionsTeamsList', 'addPermissionsUsersList', 'TemplateL
name: list.fields.name, name: list.fields.name,
organization: list.fields.organization organization: list.fields.organization
}; };
list.fields.name.columnClass = 'col-md-6 col-sm-6 col-xs-11';
list.fields.organization.columnClass = 'col-md-5 col-sm-5 hidden-xs';
break; break;
case 'JobTemplates': case 'JobTemplates':
@@ -59,6 +63,8 @@ export default ['addPermissionsTeamsList', 'addPermissionsUsersList', 'TemplateL
name: list.fields.name, name: list.fields.name,
description: list.fields.description description: list.fields.description
}; };
list.fields.name.columnClass = 'col-md-6 col-sm-6 col-xs-11';
list.fields.description.columnClass = 'col-md-5 col-sm-5 hidden-xs';
break; break;
case 'WorkflowTemplates': case 'WorkflowTemplates':
@@ -69,6 +75,8 @@ export default ['addPermissionsTeamsList', 'addPermissionsUsersList', 'TemplateL
name: list.fields.name, name: list.fields.name,
description: list.fields.description description: list.fields.description
}; };
list.fields.name.columnClass = 'col-md-6 col-sm-6 col-xs-11';
list.fields.description.columnClass = 'col-md-5 col-sm-5 hidden-xs';
break; break;
case 'Users': case 'Users':
list.fields = { list.fields = {
@@ -76,12 +84,25 @@ export default ['addPermissionsTeamsList', 'addPermissionsUsersList', 'TemplateL
first_name: list.fields.first_name, first_name: list.fields.first_name,
last_name: list.fields.last_name last_name: list.fields.last_name
}; };
list.fields.username.columnClass = 'col-md-5 col-sm-5 col-xs-11';
list.fields.first_name.columnClass = 'col-md-3 col-sm-3 hidden-xs';
list.fields.last_name.columnClass = 'col-md-3 col-sm-3 hidden-xs';
break;
case 'Teams':
list.fields = {
name: list.fields.name,
organization: list.fields.organization,
};
list.fields.name.columnClass = 'col-md-6 col-sm-6 col-xs-11';
list.fields.organization.columnClass = 'col-md-5 col-sm-5 hidden-xs';
break; break;
default: default:
list.fields = { list.fields = {
name: list.fields.name, name: list.fields.name,
description: list.fields.description description: list.fields.description
}; };
list.fields.name.columnClass = 'col-md-6 col-sm-6 col-xs-11';
list.fields.description.columnClass = 'col-md-5 col-sm-5 hidden-xs';
} }
list_html = generateList.build({ list_html = generateList.build({

View File

@@ -9,7 +9,7 @@
<div class="RoleList-tag" <div class="RoleList-tag"
ng-class="{'RoleList-tag--deletable': entry.explicit && entry.user_capabilities.unattach, ng-class="{'RoleList-tag--deletable': entry.explicit && entry.user_capabilities.unattach,
'RoleList-tag--team': entry.team_id}" 'RoleList-tag--team': entry.team_id}"
aw-tool-tip='{{entry.team_name | sanitize}}' aw-tip-placement='bottom'> aw-tool-tip='<div>Organization: {{ entry.team_organization_name | sanitize }}</div><div>Team: {{entry.team_name | sanitize}}</div>' aw-tip-placement='bottom'>
<span class="RoleList-name">{{ entry.name }}</span> <span class="RoleList-name">{{ entry.name }}</span>
<i ng-show='entry.team_id' class="fa fa-users"></i> <i ng-show='entry.team_id' class="fa fa-users"></i>
</div> </div>

View File

@@ -12,6 +12,7 @@
function activityStreamController($scope, $state, subTitle, Stream, GetTargetTitle, list, Dataset) { function activityStreamController($scope, $state, subTitle, Stream, GetTargetTitle, list, Dataset) {
init(); init();
initOmitSmartTags();
function init() { function init() {
// search init // search init
@@ -33,6 +34,20 @@ function activityStreamController($scope, $state, subTitle, Stream, GetTargetTit
}); });
} }
// Specification of smart-tags omission from the UI is done in the route/state init.
// A limitation is that this specficiation is static and the key for which to be omitted from
// the smart-tags must be known at that time.
// In the case of activity stream, we won't to dynamically ommit the resource for which we are
// displaying the activity stream for. i.e. 'project', 'credential', etc.
function initOmitSmartTags() {
let defaults, route = _.find($state.$current.path, (step) => {
return step.params.hasOwnProperty('activity_search');
});
if (route && $state.params.target !== undefined) {
defaults = route.params.activity_search.config.value;
defaults[$state.params.target] = null;
}
}
} }
export default ['$scope', '$state', 'subTitle', 'Stream', 'GetTargetTitle', 'StreamList', 'Dataset', activityStreamController]; export default ['$scope', '$state', 'subTitle', 'Stream', 'GetTargetTitle', 'StreamList', 'Dataset', activityStreamController];

View File

@@ -35,5 +35,6 @@
margin-bottom: 0; margin-bottom: 0;
max-height: 200px; max-height: 200px;
overflow: scroll; overflow: scroll;
overflow-x: auto;
color: @as-detail-changes-txt; color: @as-detail-changes-txt;
} }

View File

@@ -22,7 +22,7 @@
</div> </div>
</div> </div>
<div class="Modal-footer"> <div class="Modal-footer">
<a href="#" data-target="#stream-detail-modal" data-dismiss="modal" id="action_cancel_btn" class="btn btn-primary StreamDetail-actionButton">OK</a> <a href="#" data-target="#stream-detail-modal" data-dismiss="modal" id="action_cancel_btn" class="btn btn-default StreamDetail-actionButton">OK</a>
</div> </div>
</div> </div>
</div> </div>

View File

@@ -47,6 +47,9 @@ export default
order_by: '-timestamp', order_by: '-timestamp',
page_size: '20', page_size: '20',
}; };
if (streamConfig.activityStreamTarget && streamConfig.activityStreamId) {
stateGoParams.activity_search[streamConfig.activityStreamTarget] = $state.params[streamConfig.activityStreamId];
}
} }
else { else {
stateGoParams.activity_search = { stateGoParams.activity_search = {

View File

@@ -21,7 +21,7 @@ export default
}); });
}); });
// Remove the clone from the dom // Remove the clone from the dom
$breadcrumbClone.remove();console.log(availableWidth); $breadcrumbClone.remove();
if(expandedBreadcrumbWidth > availableWidth) { if(expandedBreadcrumbWidth > availableWidth) {
let widthToTrim = expandedBreadcrumbWidth - availableWidth; let widthToTrim = expandedBreadcrumbWidth - availableWidth;
// Sort the crumbs from biggest to smallest // Sort the crumbs from biggest to smallest

View File

@@ -16,7 +16,8 @@
reset: 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY' reset: 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY'
}, },
SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET: { SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET: {
type: 'text', type: 'sensitive',
hasShowInputButton: true,
reset: 'SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET' reset: 'SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET'
}, },
SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP: { SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP: {
@@ -38,8 +39,8 @@
buttons: { buttons: {
reset: { reset: {
ngClick: 'vm.resetAllConfirm()', ngClick: 'vm.resetAllConfirm()',
label: i18n._('Reset All'), label: i18n._('Revert all to default'),
class: 'Form-button--left Form-cancelButton' class: 'Form-resetAll'
}, },
cancel: { cancel: {
ngClick: 'vm.formCancel()', ngClick: 'vm.formCancel()',

View File

@@ -16,7 +16,8 @@ export default ['i18n', function(i18n) {
reset: 'SOCIAL_AUTH_GITHUB_ORG_KEY' reset: 'SOCIAL_AUTH_GITHUB_ORG_KEY'
}, },
SOCIAL_AUTH_GITHUB_ORG_SECRET: { SOCIAL_AUTH_GITHUB_ORG_SECRET: {
type: 'text', type: 'sensitive',
hasShowInputButton: true,
reset: 'SOCIAL_AUTH_GITHUB_ORG_SECRET' reset: 'SOCIAL_AUTH_GITHUB_ORG_SECRET'
}, },
SOCIAL_AUTH_GITHUB_ORG_NAME: { SOCIAL_AUTH_GITHUB_ORG_NAME: {
@@ -28,8 +29,8 @@ export default ['i18n', function(i18n) {
buttons: { buttons: {
reset: { reset: {
ngClick: 'vm.resetAllConfirm()', ngClick: 'vm.resetAllConfirm()',
label: i18n._('Reset All'), label: i18n._('Revert all to default'),
class: 'Form-button--left Form-cancelButton' class: 'Form-resetAll'
}, },
cancel: { cancel: {
ngClick: 'vm.formCancel()', ngClick: 'vm.formCancel()',

View File

@@ -16,7 +16,8 @@ export default ['i18n', function(i18n) {
reset: 'SOCIAL_AUTH_GITHUB_TEAM_KEY' reset: 'SOCIAL_AUTH_GITHUB_TEAM_KEY'
}, },
SOCIAL_AUTH_GITHUB_TEAM_SECRET: { SOCIAL_AUTH_GITHUB_TEAM_SECRET: {
type: 'text', type: 'sensitive',
hasShowInputButton: true,
reset: 'SOCIAL_AUTH_GITHUB_TEAM_SECRET' reset: 'SOCIAL_AUTH_GITHUB_TEAM_SECRET'
}, },
SOCIAL_AUTH_GITHUB_TEAM_ID: { SOCIAL_AUTH_GITHUB_TEAM_ID: {
@@ -28,8 +29,8 @@ export default ['i18n', function(i18n) {
buttons: { buttons: {
reset: { reset: {
ngClick: 'vm.resetAllConfirm()', ngClick: 'vm.resetAllConfirm()',
label: i18n._('Reset All'), label: i18n._('Revert all to default'),
class: 'Form-button--left Form-cancelButton' class: 'Form-resetAll'
}, },
cancel: { cancel: {
ngClick: 'vm.formCancel()', ngClick: 'vm.formCancel()',

View File

@@ -16,7 +16,8 @@ export default ['i18n', function(i18n) {
reset: 'SOCIAL_AUTH_GITHUB_KEY' reset: 'SOCIAL_AUTH_GITHUB_KEY'
}, },
SOCIAL_AUTH_GITHUB_SECRET: { SOCIAL_AUTH_GITHUB_SECRET: {
type: 'text', type: 'sensitive',
hasShowInputButton: true,
reset: 'SOCIAL_AUTH_GITHUB_SECRET' reset: 'SOCIAL_AUTH_GITHUB_SECRET'
} }
}, },
@@ -24,8 +25,8 @@ export default ['i18n', function(i18n) {
buttons: { buttons: {
reset: { reset: {
ngClick: 'vm.resetAllConfirm()', ngClick: 'vm.resetAllConfirm()',
label: i18n._('Reset All'), label: i18n._('Revert all to default'),
class: 'Form-button--left Form-cancelButton' class: 'Form-resetAll'
}, },
cancel: { cancel: {
ngClick: 'vm.formCancel()', ngClick: 'vm.formCancel()',

View File

@@ -16,7 +16,8 @@ export default ['i18n', function(i18n) {
reset: 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY' reset: 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY'
}, },
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET: { SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET: {
type: 'text', type: 'sensitive',
hasShowInputButton: true,
reset: 'SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET' reset: 'SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET'
}, },
SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS: { SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS: {
@@ -36,8 +37,8 @@ export default ['i18n', function(i18n) {
buttons: { buttons: {
reset: { reset: {
ngClick: 'vm.resetAllConfirm()', ngClick: 'vm.resetAllConfirm()',
label: i18n._('Reset All'), label: i18n._('Revert all to default'),
class: 'Form-button--left Form-cancelButton' class: 'Form-resetAll'
}, },
cancel: { cancel: {
ngClick: 'vm.formCancel()', ngClick: 'vm.formCancel()',

View File

@@ -21,7 +21,8 @@ export default ['i18n', function(i18n) {
reset: 'AUTH_LDAP_BIND_DN' reset: 'AUTH_LDAP_BIND_DN'
}, },
AUTH_LDAP_BIND_PASSWORD: { AUTH_LDAP_BIND_PASSWORD: {
type: 'password' type: 'sensitive',
hasShowInputButton: true,
}, },
AUTH_LDAP_USER_SEARCH: { AUTH_LDAP_USER_SEARCH: {
type: 'textarea', type: 'textarea',
@@ -84,8 +85,8 @@ export default ['i18n', function(i18n) {
buttons: { buttons: {
reset: { reset: {
ngClick: 'vm.resetAllConfirm()', ngClick: 'vm.resetAllConfirm()',
label: i18n._('Reset All'), label: i18n._('Revert all to default'),
class: 'Form-button--left Form-cancelButton' class: 'Form-resetAll'
}, },
cancel: { cancel: {
ngClick: 'vm.formCancel()', ngClick: 'vm.formCancel()',

View File

@@ -21,7 +21,8 @@ export default ['i18n', function(i18n) {
reset: 'RADIUS_PORT' reset: 'RADIUS_PORT'
}, },
RADIUS_SECRET: { RADIUS_SECRET: {
type: 'text', type: 'sensitive',
hasShowInputButton: true,
reset: 'RADIUS_SECRET' reset: 'RADIUS_SECRET'
} }
}, },
@@ -29,8 +30,8 @@ export default ['i18n', function(i18n) {
buttons: { buttons: {
reset: { reset: {
ngClick: 'vm.resetAllConfirm()', ngClick: 'vm.resetAllConfirm()',
label: i18n._('Reset All'), label: i18n._('Revert all to default'),
class: 'Form-button--left Form-cancelButton' class: 'Form-resetAll'
}, },
cancel: { cancel: {
ngClick: 'vm.formCancel()', ngClick: 'vm.formCancel()',

View File

@@ -20,7 +20,8 @@ export default ['i18n', function(i18n) {
reset: 'SOCIAL_AUTH_SAML_SP_PUBLIC_CERT' reset: 'SOCIAL_AUTH_SAML_SP_PUBLIC_CERT'
}, },
SOCIAL_AUTH_SAML_SP_PRIVATE_KEY: { SOCIAL_AUTH_SAML_SP_PRIVATE_KEY: {
type: 'text', type: 'sensitive',
hasShowInputButton: true,
reset: 'SOCIAL_AUTH_SAML_SP_PRIVATE_KEY' reset: 'SOCIAL_AUTH_SAML_SP_PRIVATE_KEY'
}, },
SOCIAL_AUTH_SAML_ORG_INFO: { SOCIAL_AUTH_SAML_ORG_INFO: {
@@ -56,8 +57,8 @@ export default ['i18n', function(i18n) {
buttons: { buttons: {
reset: { reset: {
ngClick: 'vm.resetAllConfirm()', ngClick: 'vm.resetAllConfirm()',
label: i18n._('Reset All'), label: i18n._('Revert all to default'),
class: 'Form-button--left Form-cancelButton' class: 'Form-resetAll'
}, },
cancel: { cancel: {
ngClick: 'vm.formCancel()', ngClick: 'vm.formCancel()',

View File

@@ -12,6 +12,19 @@
float: right float: right
} }
.Form-resetAll {
border: none;
padding: 0;
background-color: @white;
margin-right: auto;
color: @default-link;
font-size: 12px;
&:hover {
color: @default-link-hov;
}
}
.Form-tab { .Form-tab {
min-width: 77px; min-width: 77px;
} }

View File

@@ -64,8 +64,8 @@
buttons: { buttons: {
reset: { reset: {
ngClick: 'vm.resetAllConfirm()', ngClick: 'vm.resetAllConfirm()',
label: i18n._('Reset All'), label: i18n._('Revert all to default'),
class: 'Form-button--left Form-cancelButton' class: 'Form-resetAll'
}, },
cancel: { cancel: {
ngClick: 'vm.formCancel()', ngClick: 'vm.formCancel()',

View File

@@ -22,8 +22,8 @@
buttons: { buttons: {
reset: { reset: {
ngClick: 'vm.resetAllConfirm()', ngClick: 'vm.resetAllConfirm()',
label: i18n._('Reset All'), label: i18n._('Revert all to default'),
class: 'Form-button--left Form-cancelButton' class: 'Form-resetAll'
}, },
cancel: { cancel: {
ngClick: 'vm.formCancel()', ngClick: 'vm.formCancel()',

View File

@@ -30,7 +30,8 @@
reset: 'LOG_AGGREGATOR_USERNAME' reset: 'LOG_AGGREGATOR_USERNAME'
}, },
LOG_AGGREGATOR_PASSWORD: { LOG_AGGREGATOR_PASSWORD: {
type: 'text', type: 'sensitive',
hasShowInputButton: true,
reset: 'LOG_AGGREGATOR_PASSWORD' reset: 'LOG_AGGREGATOR_PASSWORD'
}, },
LOG_AGGREGATOR_LOGGERS: { LOG_AGGREGATOR_LOGGERS: {
@@ -48,8 +49,8 @@
buttons: { buttons: {
reset: { reset: {
ngClick: 'vm.resetAllConfirm()', ngClick: 'vm.resetAllConfirm()',
label: i18n._('Reset All'), label: i18n._('Revert all to default'),
class: 'Form-button--left Form-cancelButton' class: 'Form-resetAll'
}, },
cancel: { cancel: {
ngClick: 'vm.formCancel()', ngClick: 'vm.formCancel()',

View File

@@ -26,8 +26,8 @@ export default ['i18n', function(i18n) {
buttons: { buttons: {
reset: { reset: {
ngClick: 'vm.resetAllConfirm()', ngClick: 'vm.resetAllConfirm()',
label: i18n._('Reset All'), label: i18n._('Revert all to default'),
class: 'Form-button--left Form-cancelButton' class: 'Form-resetAll'
}, },
cancel: { cancel: {
ngClick: 'vm.formCancel()', ngClick: 'vm.formCancel()',

View File

@@ -32,8 +32,8 @@ export default ['i18n', function(i18n) {
buttons: { buttons: {
reset: { reset: {
ngClick: 'vm.resetAllConfirm()', ngClick: 'vm.resetAllConfirm()',
label: i18n._('Reset All'), label: i18n._('Revert all to default'),
class: 'Form-button--left Form-cancelButton' class: 'Form-resetAll'
}, },
cancel: { cancel: {
ngClick: 'vm.formCancel()', ngClick: 'vm.formCancel()',

View File

@@ -113,7 +113,7 @@ CredentialsList.$inject = ['$scope', '$rootScope', '$location', '$log',
export function CredentialsAdd($scope, $rootScope, $compile, $location, $log, export function CredentialsAdd($scope, $rootScope, $compile, $location, $log,
$stateParams, CredentialForm, GenerateForm, Rest, Alert, ProcessErrors, $stateParams, CredentialForm, GenerateForm, Rest, Alert, ProcessErrors,
ClearScope, GetBasePath, GetChoices, Empty, KindChange, ClearScope, GetBasePath, GetChoices, Empty, KindChange, BecomeMethodChange,
OwnerChange, FormSave, $state, CreateSelect2) { OwnerChange, FormSave, $state, CreateSelect2) {
ClearScope(); ClearScope();
@@ -221,6 +221,10 @@ export function CredentialsAdd($scope, $rootScope, $compile, $location, $log,
KindChange({ scope: $scope, form: form, reset: true }); KindChange({ scope: $scope, form: form, reset: true });
}; };
$scope.becomeMethodChange = function() {
BecomeMethodChange({ scope: $scope });
};
// Save // Save
$scope.formSave = function() { $scope.formSave = function() {
if ($scope[form.name + '_form'].$valid) { if ($scope[form.name + '_form'].$valid) {
@@ -276,14 +280,14 @@ export function CredentialsAdd($scope, $rootScope, $compile, $location, $log,
CredentialsAdd.$inject = ['$scope', '$rootScope', '$compile', '$location', CredentialsAdd.$inject = ['$scope', '$rootScope', '$compile', '$location',
'$log', '$stateParams', 'CredentialForm', 'GenerateForm', 'Rest', 'Alert', '$log', '$stateParams', 'CredentialForm', 'GenerateForm', 'Rest', 'Alert',
'ProcessErrors', 'ClearScope', 'GetBasePath', 'GetChoices', 'Empty', 'KindChange', 'ProcessErrors', 'ClearScope', 'GetBasePath', 'GetChoices', 'Empty', 'KindChange', 'BecomeMethodChange',
'OwnerChange', 'FormSave', '$state', 'CreateSelect2' 'OwnerChange', 'FormSave', '$state', 'CreateSelect2'
]; ];
export function CredentialsEdit($scope, $rootScope, $compile, $location, $log, export function CredentialsEdit($scope, $rootScope, $compile, $location, $log,
$stateParams, CredentialForm, Rest, Alert, ProcessErrors, ClearScope, Prompt, $stateParams, CredentialForm, Rest, Alert, ProcessErrors, ClearScope, Prompt,
GetBasePath, GetChoices, KindChange, Empty, OwnerChange, FormSave, Wait, GetBasePath, GetChoices, KindChange, BecomeMethodChange, Empty, OwnerChange, FormSave, Wait,
$state, CreateSelect2, Authorization) { $state, CreateSelect2, Authorization, i18n) {
ClearScope(); ClearScope();
@@ -336,19 +340,15 @@ export function CredentialsEdit($scope, $rootScope, $compile, $location, $log,
}); });
} }
// if the credential is assigned to an organization, allow permission delegation $scope.$watch('organization', function(val) {
// do NOT use $scope.organization in a view directive to determine if a credential is associated with an org if (val === undefined) {
// @todo why not? ^ and what is this type check for a number doing - should this be a type check for undefined? $scope.permissionsTooltip = i18n._('Credentials are only shared within an organization. Assign credentials to an organization to delegate credential permissions. The organization cannot be edited after credentials are assigned.');
$scope.disablePermissionAssignment = typeof($scope.organization) === 'number' ? false : true; } else {
if ($scope.disablePermissionAssignment) { $scope.permissionsTooltip = '';
$scope.permissionsTooltip = 'Credentials are only shared within an organization. Assign credentials to an organization to delegate credential permissions. The organization cannot be edited after credentials are assigned.'; }
}
setAskCheckboxes();
KindChange({
scope: $scope,
form: form,
reset: false
}); });
setAskCheckboxes();
OwnerChange({ scope: $scope }); OwnerChange({ scope: $scope });
$scope.$watch("ssh_key_data", function(val) { $scope.$watch("ssh_key_data", function(val) {
if (val === "" || val === null || val === undefined) { if (val === "" || val === null || val === undefined) {
@@ -453,6 +453,13 @@ export function CredentialsEdit($scope, $rootScope, $compile, $location, $log,
break; break;
} }
} }
KindChange({
scope: $scope,
form: form,
reset: false
});
master.kind = $scope.kind; master.kind = $scope.kind;
CreateSelect2({ CreateSelect2({
@@ -518,6 +525,10 @@ export function CredentialsEdit($scope, $rootScope, $compile, $location, $log,
KindChange({ scope: $scope, form: form, reset: true }); KindChange({ scope: $scope, form: form, reset: true });
}; };
$scope.becomeMethodChange = function() {
BecomeMethodChange({ scope: $scope });
};
$scope.formCancel = function() { $scope.formCancel = function() {
$state.transitionTo('credentials'); $state.transitionTo('credentials');
}; };
@@ -612,6 +623,6 @@ export function CredentialsEdit($scope, $rootScope, $compile, $location, $log,
CredentialsEdit.$inject = ['$scope', '$rootScope', '$compile', '$location', CredentialsEdit.$inject = ['$scope', '$rootScope', '$compile', '$location',
'$log', '$stateParams', 'CredentialForm', 'Rest', 'Alert', '$log', '$stateParams', 'CredentialForm', 'Rest', 'Alert',
'ProcessErrors', 'ClearScope', 'Prompt', 'GetBasePath', 'GetChoices', 'ProcessErrors', 'ClearScope', 'Prompt', 'GetBasePath', 'GetChoices',
'KindChange', 'Empty', 'OwnerChange', 'KindChange', 'BecomeMethodChange', 'Empty', 'OwnerChange',
'FormSave', 'Wait', '$state', 'CreateSelect2', 'Authorization' 'FormSave', 'Wait', '$state', 'CreateSelect2', 'Authorization', 'i18n',
]; ];

View File

@@ -0,0 +1,36 @@
/** @define OwnerList */
@import "./client/src/shared/branding/colors.default.less";
.OwnerList {
display: flex;
flex-wrap: wrap;
align-items: flex-start;
}
.OwnerList-seeBase {
display: flex;
max-width: 100%;
color: @default-link;
text-transform: uppercase;
padding: 2px 15px;
cursor: pointer;
border-radius: 5px;
font-size: 11px;
}
.OwnerList-seeBase:hover {
color: @default-link-hov;
}
.OwnerList-seeLess {
.OwnerList-seeBase;
}
.OwnerList-seeMore {
.OwnerList-seeBase;
}
.OwnerList-Container {
margin-right: 5px;
}

View File

@@ -1,5 +1,12 @@
<div ng-repeat="owner in owners_list"> <div class="OwnerList" ng-init="ownersLimit = 5; ownersLimitConst = 5; ">
<a ng-if="owner.type === 'organization'" ui-sref="organizations.edit({ organization_id: owner.id })">{{ owner.name }}{{$last ? '' : ', '}}</a> <div class="OwnerList-Container" ng-repeat="owner in owners_list | limitTo:ownersLimit">
<a ng-if="owner.type === 'user'" ui-sref="users.edit({ user_id: owner.id })">{{ owner.name }}{{$last ? '' : ', '}}</a> <a ng-if="owner.type === 'organization'" ui-sref="organizations.edit({ organization_id: owner.id })">{{ owner.name }}{{$last ? '' : ', '}}</a>
<a ng-if="owner.type === 'team'" ui-sref="teams.edit({ team_id: owner.id })">{{ owner.name }}{{$last ? '' : ', '}}</a> <a ng-if="owner.type === 'user'" ui-sref="users.edit({ user_id: owner.id })">{{ owner.name }}{{$last ? '' : ', '}}</a>
</div> <a ng-if="owner.type === 'team'" ui-sref="teams.edit({ team_id: owner.id })">{{ owner.name }}{{$last ? '' : ', '}}</a>
</div>
<div class="OwnerList-seeMore" ng-show="owners_list.length > ownersLimitConst && ownersLimit == ownersLimitConst"
ng-click="ownersLimit = owners_list.length">View More</div>
<div class="OwnerList-seeLess" ng-show="owners_list.length > ownersLimitConst && ownersLimit != ownersLimitConst"
ng-click="ownersLimit = ownersLimitConst">View Less</div>
</div>

View File

@@ -32,34 +32,21 @@
} }
.DashboardList-viewAll { .DashboardList-viewAll {
color: @btn-txt; font-size: 11px;
background-color: @btn-bg;
font-size: 12px;
border: 1px solid @default-icon-hov;
border-radius: 5px;
margin-right: 15px; margin-right: 15px;
margin-top: 10px; margin-top: 13px;
margin-bottom: 10px; margin-bottom: 10px;
padding-left: 10px; padding-left: 10px;
padding-right: 10px; padding-right: 10px;
padding-bottom: 5px; padding-bottom: 5px;
padding-top: 5px; padding-top: 5px;
transition: background-color 0.2s;
}
.DashboardList-viewAll:hover {
color: @btn-txt;
background-color: @btn-bg-hov;
}
.DashboardList-viewAll:focus {
color: @btn-txt;
} }
.DashboardList-container { .DashboardList-container {
flex: 1; flex: 1;
width: 100%; width: 100%;
padding: 20px; padding: 20px;
padding-top: 0;
} }
.DashboardList-tableHeader--name { .DashboardList-tableHeader--name {

View File

@@ -1,3 +1,3 @@
<footer class='Footer'> <footer class='Footer'>
<div class="Footer-copyright" ng-class="{'is-loggedOut' : !current_user || !current_user.username}">Copyright &copy 2016 <a class="Footer-link" href="http://www.redhat.com" target="_blank">Red Hat</a>, Inc.</div> <div class="Footer-copyright" ng-class="{'is-loggedOut' : !current_user || !current_user.username}">Copyright &copy 2017 <a class="Footer-link" href="http://www.redhat.com" target="_blank">Red Hat</a>, Inc.</div>
</footer> </footer>

View File

@@ -288,7 +288,8 @@ export default
dataPlacement: 'right', dataPlacement: 'right',
dataContainer: "body", dataContainer: "body",
subForm: 'credentialSubForm', subForm: 'credentialSubForm',
ngDisabled: '!(credential_obj.summary_fields.user_capabilities.edit || canAdd)' ngDisabled: '!(credential_obj.summary_fields.user_capabilities.edit || canAdd)',
ngChange: 'becomeMethodChange()',
}, },
"become_username": { "become_username": {
labelBind: 'becomeUsernameLabel', labelBind: 'becomeUsernameLabel',
@@ -420,9 +421,12 @@ export default
related: { related: {
permissions: { permissions: {
disabled: 'disablePermissionAssignment', disabled: '(organization === undefined ? true : false)',
// Do not transition the state if organization is undefined
ngClick: `(organization === undefined ? true : false)||$state.go('credentials.edit.permissions')`,
awToolTip: '{{permissionsTooltip}}', awToolTip: '{{permissionsTooltip}}',
dataTipWatch: 'permissionsTooltip', dataTipWatch: 'permissionsTooltip',
awToolTipTabEnabledInEditMode: true,
dataPlacement: 'top', dataPlacement: 'top',
basePath: 'api/v1/credentials/{{$stateParams.credential_id}}/access_list/', basePath: 'api/v1/credentials/{{$stateParams.credential_id}}/access_list/',
search: { search: {

View File

@@ -43,7 +43,7 @@ export default
label: 'Variables', label: 'Variables',
type: 'textarea', type: 'textarea',
class: 'Form-textAreaLabel Form-formGroup--fullWidth', class: 'Form-textAreaLabel Form-formGroup--fullWidth',
rows: 12, rows: 6,
'default': '---', 'default': '---',
dataTitle: 'Group Variables', dataTitle: 'Group Variables',
dataPlacement: 'right', dataPlacement: 'right',
@@ -69,6 +69,11 @@ export default
ngModel: 'source' ngModel: 'source'
}, },
credential: { credential: {
// initializes a default value for this search param
// search params with default values set will not generate user-interactable search tags
search: {
kind: null
},
label: 'Cloud Credential', label: 'Cloud Credential',
type: 'lookup', type: 'lookup',
list: 'CredentialList', list: 'CredentialList',

View File

@@ -78,7 +78,7 @@ angular.module('InventoryFormDefinition', ['ScanJobsListDefinition'])
}, },
close: { close: {
ngClick: 'formCancel()', ngClick: 'formCancel()',
ngHide: '(inventory_obj.summary_fields.user_capabilities.edit || canAdd)' ngShow: '!(inventory_obj.summary_fields.user_capabilities.edit || canAdd)'
}, },
save: { save: {
ngClick: 'formSave()', ngClick: 'formSave()',
@@ -103,7 +103,7 @@ angular.module('InventoryFormDefinition', ['ScanJobsListDefinition'])
add: { add: {
label: i18n._('Add'), label: i18n._('Add'),
ngClick: "$state.go('.add')", ngClick: "$state.go('.add')",
awToolTip: 'Add a permission', awToolTip: i18n._('Add a permission'),
actionClass: 'btn List-buttonSubmit', actionClass: 'btn List-buttonSubmit',
buttonContent: '&#43; ADD', buttonContent: '&#43; ADD',
ngShow: '(inventory_obj.summary_fields.user_capabilities.edit || canAdd)' ngShow: '(inventory_obj.summary_fields.user_capabilities.edit || canAdd)'

View File

@@ -68,7 +68,7 @@ export default
searchType: 'select', searchType: 'select',
actions: { actions: {
add: { add: {
ngClick: "addPermission", ngClick: "$state.go('.add')",
label: i18n._('Add'), label: i18n._('Add'),
awToolTip: i18n._('Add a permission'), awToolTip: i18n._('Add a permission'),
actionClass: 'btn List-buttonSubmit', actionClass: 'btn List-buttonSubmit',

View File

@@ -121,6 +121,7 @@ export default
organizations: { organizations: {
awToolTip: i18n._('Please save before assigning to organizations'), awToolTip: i18n._('Please save before assigning to organizations'),
basePath: 'api/v1/users/{{$stateParams.user_id}}/organizations', basePath: 'api/v1/users/{{$stateParams.user_id}}/organizations',
emptyListText: i18n._('Please add user to an Organization.'),
search: { search: {
page_size: '10' page_size: '10'
}, },

View File

@@ -122,7 +122,7 @@ export default
add: { add: {
ngClick: "$state.go('.add')", ngClick: "$state.go('.add')",
label: i18n._('Add'), label: i18n._('Add'),
awToolTip: 'Add a permission', awToolTip: i18n._('Add a permission'),
actionClass: 'btn List-buttonSubmit', actionClass: 'btn List-buttonSubmit',
buttonContent: '&#43; '+ i18n._('ADD'), buttonContent: '&#43; '+ i18n._('ADD'),
ngShow: '(workflow_job_template_obj.summary_fields.user_capabilities.edit || canAddWorkflowJobTemplate)' ngShow: '(workflow_job_template_obj.summary_fields.user_capabilities.edit || canAddWorkflowJobTemplate)'

View File

@@ -74,6 +74,7 @@ angular.module('CredentialsHelper', ['Utilities'])
scope.projectPopOver = "<p>" + i18n._("The project value") + "</p>"; scope.projectPopOver = "<p>" + i18n._("The project value") + "</p>";
scope.hostPopOver = "<p>" + i18n._("The host value") + "</p>"; scope.hostPopOver = "<p>" + i18n._("The host value") + "</p>";
scope.ssh_key_data_api_error = ''; scope.ssh_key_data_api_error = '';
if (!Empty(scope.kind)) { if (!Empty(scope.kind)) {
// Apply kind specific settings // Apply kind specific settings
switch (scope.kind.value) { switch (scope.kind.value) {
@@ -204,6 +205,111 @@ angular.module('CredentialsHelper', ['Utilities'])
} }
]) ])
.factory('BecomeMethodChange', ['Empty', 'i18n',
function (Empty, i18n) {
return function (params) {
console.log('become method has changed');
var scope = params.scope;
if (!Empty(scope.kind)) {
// Apply kind specific settings
switch (scope.kind.value) {
case 'aws':
scope.aws_required = true;
break;
case 'rax':
scope.rackspace_required = true;
scope.username_required = true;
break;
case 'ssh':
scope.usernameLabel = i18n._('Username'); //formally 'SSH Username'
scope.becomeUsernameLabel = i18n._('Privilege Escalation Username');
scope.becomePasswordLabel = i18n._('Privilege Escalation Password');
break;
case 'scm':
scope.sshKeyDataLabel = i18n._('SCM Private Key');
scope.passwordLabel = i18n._('Password');
break;
case 'gce':
scope.usernameLabel = i18n._('Service Account Email Address');
scope.sshKeyDataLabel = i18n._('RSA Private Key');
scope.email_required = true;
scope.key_required = true;
scope.project_required = true;
scope.key_description = i18n._('Paste the contents of the PEM file associated with the service account email.');
scope.projectLabel = i18n._("Project");
scope.project_required = false;
scope.projectPopOver = "<p>" + i18n._("The Project ID is the " +
"GCE assigned identification. It is constructed as " +
"two words followed by a three digit number. Such " +
"as: ") + "</p><p>adjective-noun-000</p>";
break;
case 'azure':
scope.sshKeyDataLabel = i18n._('Management Certificate');
scope.subscription_required = true;
scope.key_required = true;
scope.key_description = i18n._("Paste the contents of the PEM file that corresponds to the certificate you uploaded in the Microsoft Azure console.");
break;
case 'azure_rm':
scope.usernameLabel = i18n._("Username");
scope.subscription_required = true;
scope.passwordLabel = i18n._('Password');
scope.azure_rm_required = true;
break;
case 'vmware':
scope.username_required = true;
scope.host_required = true;
scope.password_required = true;
scope.hostLabel = "vCenter Host";
scope.passwordLabel = i18n._('Password');
scope.hostPopOver = i18n._("Enter the hostname or IP address which corresponds to your VMware vCenter.");
break;
case 'openstack':
scope.hostLabel = i18n._("Host (Authentication URL)");
scope.projectLabel = i18n._("Project (Tenant Name)");
scope.domainLabel = i18n._("Domain Name");
scope.password_required = true;
scope.project_required = true;
scope.host_required = true;
scope.username_required = true;
scope.projectPopOver = "<p>" + i18n._("This is the tenant name. " +
" This value is usually the same " +
" as the username.") + "</p>";
scope.hostPopOver = "<p>" + i18n._("The host to authenticate with.") +
"<br />" + i18n.sprintf(i18n._("For example, %s"), "https://openstack.business.com/v2.0/");
break;
case 'satellite6':
scope.username_required = true;
scope.password_required = true;
scope.passwordLabel = i18n._('Password');
scope.host_required = true;
scope.hostLabel = i18n._("Satellite 6 URL");
scope.hostPopOver = i18n.sprintf(i18n._("Enter the URL which corresponds to your %s" +
"Red Hat Satellite 6 server. %s" +
"For example, %s"), "<br />", "<br />", "https://satellite.example.org");
break;
case 'cloudforms':
scope.username_required = true;
scope.password_required = true;
scope.passwordLabel = i18n._('Password');
scope.host_required = true;
scope.hostLabel = i18n._("CloudForms URL");
scope.hostPopOver = i18n.sprintf(i18n._("Enter the URL for the virtual machine which %s" +
"corresponds to your CloudForm instance. %s" +
"For example, %s"), "<br />", "<br />", "https://cloudforms.example.org");
break;
case 'net':
scope.username_required = true;
scope.password_required = false;
scope.passwordLabel = i18n._('Password');
scope.sshKeyDataLabel = i18n._('SSH Key');
break;
}
}
};
}
])
.factory('OwnerChange', [ .factory('OwnerChange', [
function () { function () {

View File

@@ -233,7 +233,7 @@ export default
hdr: hdr, hdr: hdr,
body: (action_label === 'cancel' || job.status === 'new') ? cancelBody : deleteBody, body: (action_label === 'cancel' || job.status === 'new') ? cancelBody : deleteBody,
action: action, action: action,
actionText: (action_label === 'cancel' || job.status === 'new') ? "YES" : "DELETE" actionText: (action_label === 'cancel' || job.status === 'new') ? "OK" : "DELETE"
}); });
}); });

View File

@@ -24,10 +24,7 @@ export default
var langUrl = langInfo.replace('-', '_'); var langUrl = langInfo.replace('-', '_');
//gettextCatalog.debug = true; //gettextCatalog.debug = true;
gettextCatalog.setCurrentLanguage(langInfo); gettextCatalog.setCurrentLanguage(langInfo);
// TODO: the line below is commented out temporarily until gettextCatalog.loadRemote('/static/languages/' + langUrl + '.json');
// the .po files are received from the i18n team, in order to avoid
// 404 file not found console errors in dev
// gettextCatalog.loadRemote('/static/languages/' + langUrl + '.json');
}; };
}]) }])
.factory('i18n', ['gettextCatalog', .factory('i18n', ['gettextCatalog',

View File

@@ -11,10 +11,16 @@
*/ */
function InventoriesAdd($scope, $rootScope, $compile, $location, $log, function InventoriesAdd($scope, $rootScope, $compile, $location, $log,
$stateParams, GenerateForm, InventoryForm, Rest, Alert, ProcessErrors, $stateParams, GenerateForm, InventoryForm, rbacUiControlService, Rest, Alert, ProcessErrors,
ClearScope, GetBasePath, ParseTypeChange, Wait, ToJSON, ClearScope, GetBasePath, ParseTypeChange, Wait, ToJSON,
$state) { $state) {
$scope.canAdd = false;
rbacUiControlService.canAdd(GetBasePath('inventory'))
.then(function(canAdd) {
$scope.canAdd = canAdd;
});
Rest.setUrl(GetBasePath('inventory')); Rest.setUrl(GetBasePath('inventory'));
Rest.options() Rest.options()
.success(function(data) { .success(function(data) {
@@ -91,7 +97,7 @@ function InventoriesAdd($scope, $rootScope, $compile, $location, $log,
} }
export default ['$scope', '$rootScope', '$compile', '$location', export default ['$scope', '$rootScope', '$compile', '$location',
'$log', '$stateParams', 'GenerateForm', 'InventoryForm', 'Rest', 'Alert', '$log', '$stateParams', 'GenerateForm', 'InventoryForm', 'rbacUiControlService', 'Rest', 'Alert',
'ProcessErrors', 'ClearScope', 'GetBasePath', 'ParseTypeChange', 'ProcessErrors', 'ClearScope', 'GetBasePath', 'ParseTypeChange',
'Wait', 'ToJSON', '$state', InventoriesAdd 'Wait', 'ToJSON', '$state', InventoriesAdd
]; ];

View File

@@ -32,7 +32,7 @@ function InventoriesEdit($scope, $rootScope, $compile, $location,
form.formFieldSize = null; form.formFieldSize = null;
$scope.inventory_id = inventory_id; $scope.inventory_id = inventory_id;
$scope.$watch('invnentory_obj.summary_fields.user_capabilities.edit', function(val) { $scope.$watch('inventory_obj.summary_fields.user_capabilities.edit', function(val) {
if (val === false) { if (val === false) {
$scope.canAdd = false; $scope.canAdd = false;
} }

View File

@@ -66,7 +66,19 @@ angular.module('inventory', [
], ],
ParentObject: ['groupData', function(groupData) { ParentObject: ['groupData', function(groupData) {
return groupData; return groupData;
}] }],
UnifiedJobsOptions: ['Rest', 'GetBasePath', '$stateParams', '$q',
function(Rest, GetBasePath, $stateParams, $q) {
Rest.setUrl(GetBasePath('unified_jobs'));
var val = $q.defer();
Rest.options()
.then(function(data) {
val.resolve(data.data);
}, function(data) {
val.reject(data);
});
return val.promise;
}]
}, },
views: { views: {
// clear form template when views render in this substate // clear form template when views render in this substate

View File

@@ -11,7 +11,7 @@
$scope.item = group; $scope.item = group;
$scope.submitMode = $stateParams.groups === undefined ? 'move' : 'copy'; $scope.submitMode = $stateParams.groups === undefined ? 'move' : 'copy';
$scope['toggle_'+ list.iterator] = function(id){ $scope.toggle_row = function(id){
// toggle off anything else currently selected // toggle off anything else currently selected
_.forEach($scope.groups, (item) => {return item.id === id ? item.checked = 1 : item.checked = null;}); _.forEach($scope.groups, (item) => {return item.id === id ? item.checked = 1 : item.checked = null;});
// yoink the currently selected thing // yoink the currently selected thing
@@ -60,9 +60,6 @@
}; };
function init(){ function init(){
var url = GetBasePath('inventory') + $stateParams.inventory_id + '/groups/';
url += $stateParams.group ? '?not__id__in=' + group.id + ',' + _.last($stateParams.group) : '?not__id=' + group.id;
list.basePath = url;
$scope.atRootLevel = $stateParams.group ? false : true; $scope.atRootLevel = $stateParams.group ? false : true;
// search init // search init

View File

@@ -8,10 +8,10 @@
['$scope', '$state', '$stateParams', 'generateList', 'HostManageService', 'GetBasePath', 'CopyMoveGroupList', 'host', 'Dataset', ['$scope', '$state', '$stateParams', 'generateList', 'HostManageService', 'GetBasePath', 'CopyMoveGroupList', 'host', 'Dataset',
function($scope, $state, $stateParams, GenerateList, HostManageService, GetBasePath, CopyMoveGroupList, host, Dataset){ function($scope, $state, $stateParams, GenerateList, HostManageService, GetBasePath, CopyMoveGroupList, host, Dataset){
var list = CopyMoveGroupList; var list = CopyMoveGroupList;
$scope.item = host; $scope.item = host;
$scope.submitMode = 'copy'; $scope.submitMode = 'copy';
$scope['toggle_'+ list.iterator] = function(id){ $scope.toggle_row = function(id){
// toggle off anything else currently selected // toggle off anything else currently selected
_.forEach($scope.groups, (item) => {return item.id === id ? item.checked = 1 : item.checked = null;}); _.forEach($scope.groups, (item) => {return item.id === id ? item.checked = 1 : item.checked = null;});
// yoink the currently selected thing // yoink the currently selected thing

View File

@@ -30,8 +30,8 @@ var copyMoveGroupRoute = {
resolve: { resolve: {
Dataset: ['CopyMoveGroupList', 'QuerySet', '$stateParams', 'GetBasePath', 'group', Dataset: ['CopyMoveGroupList', 'QuerySet', '$stateParams', 'GetBasePath', 'group',
function(list, qs, $stateParams, GetBasePath, group) { function(list, qs, $stateParams, GetBasePath, group) {
$stateParams.copy_search.not__id__in = ($stateParams.group.length > 0 ? group.id + ',' + _.last($stateParams.group) : group.id); $stateParams.copy_search.not__id__in = ($stateParams.group && $stateParams.group.length > 0 ? group.id + ',' + _.last($stateParams.group) : group.id.toString());
let path = GetBasePath(list.name); let path = GetBasePath('inventory') + $stateParams.inventory_id + '/groups/';
return qs.search(path, $stateParams.copy_search); return qs.search(path, $stateParams.copy_search);
} }
], ],
@@ -66,7 +66,7 @@ var copyMoveHostRoute = {
resolve: { resolve: {
Dataset: ['CopyMoveGroupList', 'QuerySet', '$stateParams', 'GetBasePath', Dataset: ['CopyMoveGroupList', 'QuerySet', '$stateParams', 'GetBasePath',
function(list, qs, $stateParams, GetBasePath) { function(list, qs, $stateParams, GetBasePath) {
let path = GetBasePath(list.name); let path = GetBasePath('inventory') + $stateParams.inventory_id + '/hosts/';
return qs.search(path, $stateParams.copy_search); return qs.search(path, $stateParams.copy_search);
} }
], ],
@@ -80,7 +80,9 @@ var copyMoveHostRoute = {
controller: CopyMoveHostsController, controller: CopyMoveHostsController,
}, },
'copyMoveList@inventoryManage.copyMoveHost': { 'copyMoveList@inventoryManage.copyMoveHost': {
templateProvider: function(CopyMoveGroupList, generateList) { templateProvider: function(CopyMoveGroupList, generateList, $stateParams, GetBasePath) {
let list = CopyMoveGroupList;
list.basePath = GetBasePath('inventory') + $stateParams.inventory_id + '/hosts/';
let html = generateList.build({ let html = generateList.build({
list: CopyMoveGroupList, list: CopyMoveGroupList,
mode: 'lookup', mode: 'lookup',

Some files were not shown because too many files have changed in this diff Show More