mirror of
https://github.com/ansible/awx.git
synced 2026-01-11 18:09:57 -03:30
Merge branch 'release_3.1.0' into multi_worker_callbacks
This commit is contained in:
commit
7ca496b6ef
1
.gitignore
vendored
1
.gitignore
vendored
@ -106,7 +106,6 @@ reports
|
||||
*.log.[0-9]
|
||||
*.results
|
||||
local/
|
||||
*.mo
|
||||
|
||||
# AWX python libs populated by requirements.txt
|
||||
awx/lib/.deps_built
|
||||
|
||||
12
Makefile
12
Makefile
@ -428,7 +428,7 @@ celeryd:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
fi; \
|
||||
$(PYTHON) manage.py celeryd -l DEBUG -B --autoreload --autoscale=20,3 --schedule=$(CELERY_SCHEDULE_FILE) -Q projects,jobs,default,scheduler,broadcast_all,$(COMPOSE_HOST)
|
||||
$(PYTHON) manage.py celeryd -l DEBUG -B --autoreload --autoscale=20,3 --schedule=$(CELERY_SCHEDULE_FILE) -Q projects,jobs,default,scheduler,broadcast_all,$(COMPOSE_HOST) -n celery@$(COMPOSE_HOST)
|
||||
#$(PYTHON) manage.py celery multi show projects jobs default -l DEBUG -Q:projects projects -Q:jobs jobs -Q:default default -c:projects 1 -c:jobs 3 -c:default 3 -Ofair -B --schedule=$(CELERY_SCHEDULE_FILE)
|
||||
|
||||
# Run to start the zeromq callback receiver
|
||||
@ -559,9 +559,12 @@ messages:
|
||||
fi; \
|
||||
$(PYTHON) manage.py makemessages -l $(LANG) --keep-pot
|
||||
|
||||
# generate l10n .json .mo
|
||||
languages: $(UI_DEPS_FLAG_FILE) check-po
|
||||
# generate l10n .json
|
||||
ui-languages: $(UI_DEPS_FLAG_FILE) check-po
|
||||
$(NPM_BIN) --prefix awx/ui run languages
|
||||
|
||||
# generate l10n .mo
|
||||
api-languages:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
fi; \
|
||||
@ -592,8 +595,7 @@ ui-devel: $(UI_DEPS_FLAG_FILE)
|
||||
|
||||
ui-release: $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
# todo: include languages target when .po deliverables are added to source control
|
||||
$(UI_RELEASE_FLAG_FILE): $(UI_DEPS_FLAG_FILE)
|
||||
$(UI_RELEASE_FLAG_FILE): ui-languages $(UI_DEPS_FLAG_FILE)
|
||||
$(NPM_BIN) --prefix awx/ui run build-release
|
||||
touch $(UI_RELEASE_FLAG_FILE)
|
||||
|
||||
|
||||
@ -13,7 +13,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import exceptions
|
||||
from rest_framework import metadata
|
||||
from rest_framework import serializers
|
||||
from rest_framework.relations import RelatedField
|
||||
from rest_framework.relations import RelatedField, ManyRelatedField
|
||||
from rest_framework.request import clone_request
|
||||
|
||||
# Ansible Tower
|
||||
@ -75,7 +75,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
elif getattr(field, 'fields', None):
|
||||
field_info['children'] = self.get_serializer_info(field)
|
||||
|
||||
if hasattr(field, 'choices') and not isinstance(field, RelatedField):
|
||||
if not isinstance(field, (RelatedField, ManyRelatedField)) and hasattr(field, 'choices'):
|
||||
field_info['choices'] = [(choice_value, choice_name) for choice_value, choice_name in field.choices.items()]
|
||||
|
||||
# Indicate if a field is write-only.
|
||||
|
||||
@ -16,6 +16,7 @@ class Pagination(pagination.PageNumberPagination):
|
||||
if not self.page.has_next():
|
||||
return None
|
||||
url = self.request and self.request.get_full_path() or ''
|
||||
url = url.encode('utf-8')
|
||||
page_number = self.page.next_page_number()
|
||||
return replace_query_param(url, self.page_query_param, page_number)
|
||||
|
||||
@ -23,5 +24,6 @@ class Pagination(pagination.PageNumberPagination):
|
||||
if not self.page.has_previous():
|
||||
return None
|
||||
url = self.request and self.request.get_full_path() or ''
|
||||
url = url.encode('utf-8')
|
||||
page_number = self.page.previous_page_number()
|
||||
return replace_query_param(url, self.page_query_param, page_number)
|
||||
|
||||
@ -251,6 +251,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
'inventory_update': _('Inventory Sync'),
|
||||
'system_job': _('Management Job'),
|
||||
'workflow_job': _('Workflow Job'),
|
||||
'workflow_job_template': _('Workflow Template'),
|
||||
}
|
||||
choices = []
|
||||
for t in self.get_types():
|
||||
@ -1619,7 +1620,8 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
||||
'name': role.name,
|
||||
'description': role.description,
|
||||
'team_id': team_role.object_id,
|
||||
'team_name': team_role.content_object.name
|
||||
'team_name': team_role.content_object.name,
|
||||
'team_organization_name': team_role.content_object.organization.name,
|
||||
}
|
||||
if role.content_type is not None:
|
||||
role_dict['resource_name'] = role.content_object.name
|
||||
@ -2369,7 +2371,7 @@ class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer):
|
||||
if view and view.request:
|
||||
request_method = view.request.method
|
||||
if request_method in ['PATCH']:
|
||||
obj = view.get_object()
|
||||
obj = self.instance
|
||||
char_prompts = copy.copy(obj.char_prompts)
|
||||
char_prompts.update(self.extract_char_prompts(data))
|
||||
else:
|
||||
@ -2708,18 +2710,15 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
|
||||
variables_needed_to_start = serializers.ReadOnlyField()
|
||||
survey_enabled = serializers.SerializerMethodField()
|
||||
extra_vars = VerbatimField(required=False, write_only=True)
|
||||
warnings = serializers.SerializerMethodField()
|
||||
workflow_job_template_data = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJobTemplate
|
||||
fields = ('can_start_without_user_input', 'extra_vars', 'warnings',
|
||||
fields = ('can_start_without_user_input', 'extra_vars',
|
||||
'survey_enabled', 'variables_needed_to_start',
|
||||
'node_templates_missing', 'node_prompts_rejected',
|
||||
'workflow_job_template_data')
|
||||
|
||||
def get_warnings(self, obj):
|
||||
return obj.get_warnings()
|
||||
|
||||
def get_survey_enabled(self, obj):
|
||||
if obj:
|
||||
return obj.survey_enabled and 'spec' in obj.survey_spec
|
||||
|
||||
@ -13,7 +13,7 @@ Which will act on data older than 30 days.
|
||||
|
||||
For `cleanup_facts`:
|
||||
|
||||
`{"older_than": "4w", `granularity`: "3d"}`
|
||||
`{"older_than": "4w", "granularity": "3d"}`
|
||||
|
||||
Which will reduce the granularity of scan data to one scan per 3 days when the data is older than 4w.
|
||||
|
||||
|
||||
@ -851,6 +851,7 @@ class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView)
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates_any'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView):
|
||||
@ -859,6 +860,7 @@ class OrganizationNotificationTemplatesErrorList(SubListCreateAttachDetachAPIVie
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates_error'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class OrganizationNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView):
|
||||
@ -867,6 +869,7 @@ class OrganizationNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIV
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Organization
|
||||
relationship = 'notification_templates_success'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class OrganizationAccessList(ResourceAccessList):
|
||||
@ -921,6 +924,7 @@ class TeamRolesList(SubListCreateAttachDetachAPIView):
|
||||
metadata_class = RoleMetadata
|
||||
parent_model = Team
|
||||
relationship='member_role.children'
|
||||
new_in_300 = True
|
||||
|
||||
def get_queryset(self):
|
||||
team = get_object_or_404(Team, pk=self.kwargs['pk'])
|
||||
@ -1103,6 +1107,7 @@ class ProjectNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Project
|
||||
relationship = 'notification_templates_any'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class ProjectNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView):
|
||||
@ -1111,6 +1116,7 @@ class ProjectNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView):
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Project
|
||||
relationship = 'notification_templates_error'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class ProjectNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView):
|
||||
@ -1119,6 +1125,7 @@ class ProjectNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView):
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = Project
|
||||
relationship = 'notification_templates_success'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class ProjectUpdatesList(SubListAPIView):
|
||||
@ -1156,6 +1163,7 @@ class ProjectUpdateList(ListAPIView):
|
||||
|
||||
model = ProjectUpdate
|
||||
serializer_class = ProjectUpdateListSerializer
|
||||
new_in_13 = True
|
||||
|
||||
|
||||
class ProjectUpdateDetail(RetrieveDestroyAPIView):
|
||||
@ -1196,6 +1204,7 @@ class ProjectUpdateNotificationsList(SubListAPIView):
|
||||
serializer_class = NotificationSerializer
|
||||
parent_model = ProjectUpdate
|
||||
relationship = 'notifications'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class ProjectAccessList(ResourceAccessList):
|
||||
@ -1271,6 +1280,7 @@ class UserRolesList(SubListCreateAttachDetachAPIView):
|
||||
parent_model = User
|
||||
relationship='roles'
|
||||
permission_classes = (IsAuthenticated,)
|
||||
new_in_300 = True
|
||||
|
||||
def get_queryset(self):
|
||||
u = get_object_or_404(User, pk=self.kwargs['pk'])
|
||||
@ -1543,12 +1553,14 @@ class InventoryScriptList(ListCreateAPIView):
|
||||
|
||||
model = CustomInventoryScript
|
||||
serializer_class = CustomInventoryScriptSerializer
|
||||
new_in_210 = True
|
||||
|
||||
|
||||
class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
model = CustomInventoryScript
|
||||
serializer_class = CustomInventoryScriptSerializer
|
||||
new_in_210 = True
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
@ -2170,6 +2182,7 @@ class InventorySourceNotificationTemplatesAnyList(SubListCreateAttachDetachAPIVi
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = InventorySource
|
||||
relationship = 'notification_templates_any'
|
||||
new_in_300 = True
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
parent = self.get_parent_object()
|
||||
@ -2281,6 +2294,7 @@ class InventoryUpdateNotificationsList(SubListAPIView):
|
||||
serializer_class = NotificationSerializer
|
||||
parent_model = InventoryUpdate
|
||||
relationship = 'notifications'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class JobTemplateList(ListCreateAPIView):
|
||||
@ -2316,7 +2330,10 @@ class JobTemplateLaunch(RetrieveAPIView, GenericAPIView):
|
||||
always_allow_superuser = False
|
||||
|
||||
def update_raw_data(self, data):
|
||||
obj = self.get_object()
|
||||
try:
|
||||
obj = self.get_object()
|
||||
except PermissionDenied:
|
||||
return data
|
||||
extra_vars = data.pop('extra_vars', None) or {}
|
||||
if obj:
|
||||
for p in obj.passwords_needed_to_start:
|
||||
@ -2396,6 +2413,7 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
model = JobTemplate
|
||||
parent_model = JobTemplate
|
||||
serializer_class = EmptySerializer
|
||||
new_in_210 = True
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
@ -2462,6 +2480,7 @@ class WorkflowJobTemplateSurveySpec(WorkflowsEnforcementMixin, JobTemplateSurvey
|
||||
|
||||
model = WorkflowJobTemplate
|
||||
parent_model = WorkflowJobTemplate
|
||||
new_in_310 = True
|
||||
|
||||
|
||||
class JobTemplateActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView):
|
||||
@ -2479,6 +2498,7 @@ class JobTemplateNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = JobTemplate
|
||||
relationship = 'notification_templates_any'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class JobTemplateNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView):
|
||||
@ -2487,6 +2507,7 @@ class JobTemplateNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = JobTemplate
|
||||
relationship = 'notification_templates_error'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class JobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView):
|
||||
@ -2495,6 +2516,7 @@ class JobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIVi
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = JobTemplate
|
||||
relationship = 'notification_templates_success'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class JobTemplateLabelList(DeleteLastUnattachLabelMixin, SubListCreateAttachDetachAPIView):
|
||||
@ -2884,10 +2906,16 @@ class WorkflowJobTemplateCopy(WorkflowsEnforcementMixin, GenericAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
data = {}
|
||||
copy_TF, messages = request.user.can_access_with_errors(self.model, 'copy', obj)
|
||||
data['can_copy'] = copy_TF
|
||||
data['warnings'] = messages
|
||||
can_copy, messages = request.user.can_access_with_errors(self.model, 'copy', obj)
|
||||
data = OrderedDict([
|
||||
('can_copy', can_copy), ('can_copy_without_user_input', can_copy),
|
||||
('templates_unable_to_copy', [] if can_copy else ['all']),
|
||||
('credentials_unable_to_copy', [] if can_copy else ['all']),
|
||||
('inventories_unable_to_copy', [] if can_copy else ['all'])
|
||||
])
|
||||
if messages and can_copy:
|
||||
data['can_copy_without_user_input'] = False
|
||||
data.update(messages)
|
||||
return Response(data)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
@ -2918,7 +2946,10 @@ class WorkflowJobTemplateLaunch(WorkflowsEnforcementMixin, RetrieveAPIView):
|
||||
always_allow_superuser = False
|
||||
|
||||
def update_raw_data(self, data):
|
||||
obj = self.get_object()
|
||||
try:
|
||||
obj = self.get_object()
|
||||
except PermissionDenied:
|
||||
return data
|
||||
extra_vars = data.pop('extra_vars', None) or {}
|
||||
if obj:
|
||||
for v in obj.variables_needed_to_start:
|
||||
@ -2953,6 +2984,14 @@ class WorkflowJobRelaunch(WorkflowsEnforcementMixin, GenericAPIView):
|
||||
model = WorkflowJob
|
||||
serializer_class = EmptySerializer
|
||||
is_job_start = True
|
||||
new_in_310 = True
|
||||
|
||||
def check_object_permissions(self, request, obj):
|
||||
if request.method == 'POST' and obj:
|
||||
relaunch_perm, messages = request.user.can_access_with_errors(self.model, 'start', obj)
|
||||
if not relaunch_perm and 'workflow_job_template' in messages:
|
||||
self.permission_denied(request, message=messages['workflow_job_template'])
|
||||
return super(WorkflowJobRelaunch, self).check_object_permissions(request, obj)
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
return Response({})
|
||||
@ -3129,6 +3168,7 @@ class SystemJobTemplateList(ListAPIView):
|
||||
|
||||
model = SystemJobTemplate
|
||||
serializer_class = SystemJobTemplateSerializer
|
||||
new_in_210 = True
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
if not request.user.is_superuser and not request.user.is_system_auditor:
|
||||
@ -3140,6 +3180,7 @@ class SystemJobTemplateDetail(RetrieveAPIView):
|
||||
|
||||
model = SystemJobTemplate
|
||||
serializer_class = SystemJobTemplateSerializer
|
||||
new_in_210 = True
|
||||
|
||||
|
||||
class SystemJobTemplateLaunch(GenericAPIView):
|
||||
@ -3147,6 +3188,7 @@ class SystemJobTemplateLaunch(GenericAPIView):
|
||||
model = SystemJobTemplate
|
||||
serializer_class = EmptySerializer
|
||||
is_job_start = True
|
||||
new_in_210 = True
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
return Response({})
|
||||
@ -3154,8 +3196,8 @@ class SystemJobTemplateLaunch(GenericAPIView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
|
||||
new_job = obj.create_unified_job(**request.data)
|
||||
new_job.signal_start(**request.data)
|
||||
new_job = obj.create_unified_job(extra_vars=request.data.get('extra_vars', {}))
|
||||
new_job.signal_start()
|
||||
data = dict(system_job=new_job.id)
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
|
||||
@ -3169,6 +3211,7 @@ class SystemJobTemplateSchedulesList(SubListCreateAttachDetachAPIView):
|
||||
parent_model = SystemJobTemplate
|
||||
relationship = 'schedules'
|
||||
parent_key = 'unified_job_template'
|
||||
new_in_210 = True
|
||||
|
||||
|
||||
class SystemJobTemplateJobsList(SubListAPIView):
|
||||
@ -3178,6 +3221,7 @@ class SystemJobTemplateJobsList(SubListAPIView):
|
||||
parent_model = SystemJobTemplate
|
||||
relationship = 'jobs'
|
||||
parent_key = 'system_job_template'
|
||||
new_in_210 = True
|
||||
|
||||
|
||||
class SystemJobTemplateNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
|
||||
@ -3186,6 +3230,7 @@ class SystemJobTemplateNotificationTemplatesAnyList(SubListCreateAttachDetachAPI
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = SystemJobTemplate
|
||||
relationship = 'notification_templates_any'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class SystemJobTemplateNotificationTemplatesErrorList(SubListCreateAttachDetachAPIView):
|
||||
@ -3194,6 +3239,7 @@ class SystemJobTemplateNotificationTemplatesErrorList(SubListCreateAttachDetachA
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = SystemJobTemplate
|
||||
relationship = 'notification_templates_error'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class SystemJobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetachAPIView):
|
||||
@ -3202,6 +3248,7 @@ class SystemJobTemplateNotificationTemplatesSuccessList(SubListCreateAttachDetac
|
||||
serializer_class = NotificationTemplateSerializer
|
||||
parent_model = SystemJobTemplate
|
||||
relationship = 'notification_templates_success'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class JobList(ListCreateAPIView):
|
||||
@ -3239,10 +3286,12 @@ class JobLabelList(SubListAPIView):
|
||||
parent_model = Job
|
||||
relationship = 'labels'
|
||||
parent_key = 'job'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class WorkflowJobLabelList(WorkflowsEnforcementMixin, JobLabelList):
|
||||
parent_model = WorkflowJob
|
||||
new_in_310 = True
|
||||
|
||||
|
||||
class JobActivityStreamList(ActivityStreamEnforcementMixin, SubListAPIView):
|
||||
@ -3337,6 +3386,7 @@ class JobNotificationsList(SubListAPIView):
|
||||
serializer_class = NotificationSerializer
|
||||
parent_model = Job
|
||||
relationship = 'notifications'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class BaseJobHostSummariesList(SubListAPIView):
|
||||
@ -3847,12 +3897,14 @@ class AdHocCommandNotificationsList(SubListAPIView):
|
||||
serializer_class = NotificationSerializer
|
||||
parent_model = AdHocCommand
|
||||
relationship = 'notifications'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class SystemJobList(ListCreateAPIView):
|
||||
|
||||
model = SystemJob
|
||||
serializer_class = SystemJobListSerializer
|
||||
new_in_210 = True
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
if not request.user.is_superuser and not request.user.is_system_auditor:
|
||||
@ -3864,6 +3916,7 @@ class SystemJobDetail(RetrieveDestroyAPIView):
|
||||
|
||||
model = SystemJob
|
||||
serializer_class = SystemJobSerializer
|
||||
new_in_210 = True
|
||||
|
||||
|
||||
class SystemJobCancel(RetrieveAPIView):
|
||||
@ -3871,6 +3924,7 @@ class SystemJobCancel(RetrieveAPIView):
|
||||
model = SystemJob
|
||||
serializer_class = SystemJobCancelSerializer
|
||||
is_job_cancel = True
|
||||
new_in_210 = True
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
@ -3887,6 +3941,7 @@ class SystemJobNotificationsList(SubListAPIView):
|
||||
serializer_class = NotificationSerializer
|
||||
parent_model = SystemJob
|
||||
relationship = 'notifications'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class UnifiedJobTemplateList(ListAPIView):
|
||||
@ -4009,6 +4064,7 @@ class UnifiedJobStdout(RetrieveAPIView):
|
||||
class ProjectUpdateStdout(UnifiedJobStdout):
|
||||
|
||||
model = ProjectUpdate
|
||||
new_in_13 = True
|
||||
|
||||
|
||||
class InventoryUpdateStdout(UnifiedJobStdout):
|
||||
@ -4079,6 +4135,7 @@ class NotificationTemplateNotificationList(SubListAPIView):
|
||||
parent_model = NotificationTemplate
|
||||
relationship = 'notifications'
|
||||
parent_key = 'notification_template'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class NotificationList(ListAPIView):
|
||||
|
||||
@ -16,7 +16,10 @@ class ConfConfig(AppConfig):
|
||||
from .settings import SettingsWrapper
|
||||
SettingsWrapper.initialize()
|
||||
if settings.LOG_AGGREGATOR_ENABLED:
|
||||
LOGGING = settings.LOGGING
|
||||
LOGGING['handlers']['http_receiver']['class'] = 'awx.main.utils.handlers.HTTPSHandler'
|
||||
configure_logging(settings.LOGGING_CONFIG, LOGGING)
|
||||
LOGGING_DICT = settings.LOGGING
|
||||
LOGGING_DICT['handlers']['http_receiver']['class'] = 'awx.main.utils.handlers.HTTPSHandler'
|
||||
if 'awx' in settings.LOG_AGGREGATOR_LOGGERS:
|
||||
if 'http_receiver' not in LOGGING_DICT['loggers']['awx']['handlers']:
|
||||
LOGGING_DICT['loggers']['awx']['handlers'] += ['http_receiver']
|
||||
configure_logging(settings.LOGGING_CONFIG, LOGGING_DICT)
|
||||
# checks.register(SettingsWrapper._check_settings)
|
||||
|
||||
@ -13,7 +13,7 @@ import awx.main.signals
|
||||
from awx.conf import settings_registry
|
||||
from awx.conf.models import Setting
|
||||
from awx.conf.serializers import SettingSerializer
|
||||
from awx.main.tasks import clear_cache_keys
|
||||
from awx.main.tasks import process_cache_changes
|
||||
|
||||
logger = logging.getLogger('awx.conf.signals')
|
||||
|
||||
@ -32,7 +32,7 @@ def handle_setting_change(key, for_delete=False):
|
||||
cache_keys = set([Setting.get_cache_key(k) for k in setting_keys])
|
||||
logger.debug('sending signals to delete cache keys(%r)', cache_keys)
|
||||
cache.delete_many(cache_keys)
|
||||
clear_cache_keys.delay(list(cache_keys))
|
||||
process_cache_changes.delay(list(cache_keys))
|
||||
|
||||
# Send setting_changed signal with new value for each setting.
|
||||
for setting_key in setting_keys:
|
||||
|
||||
@ -181,7 +181,7 @@ class EventContext(object):
|
||||
event_data['res'] = {}
|
||||
event_dict = dict(event=event, event_data=event_data)
|
||||
for key in event_data.keys():
|
||||
if key in ('job_id', 'ad_hoc_command_id', 'uuid', 'parent_uuid', 'created', 'artifact_data'):
|
||||
if key in ('job_id', 'ad_hoc_command_id', 'uuid', 'parent_uuid', 'created',):
|
||||
event_dict[key] = event_data.pop(key)
|
||||
elif key in ('verbosity', 'pid'):
|
||||
event_dict[key] = event_data[key]
|
||||
|
||||
@ -19,8 +19,6 @@ from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
# Python
|
||||
import contextlib
|
||||
import copy
|
||||
import re
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
@ -77,45 +75,11 @@ class BaseCallbackModule(CallbackBase):
|
||||
super(BaseCallbackModule, self).__init__()
|
||||
self.task_uuids = set()
|
||||
|
||||
def censor_result(self, res, no_log=False):
|
||||
if not isinstance(res, dict):
|
||||
if no_log:
|
||||
return "the output has been hidden due to the fact that 'no_log: true' was specified for this result"
|
||||
return res
|
||||
if res.get('_ansible_no_log', no_log):
|
||||
new_res = {}
|
||||
for k in self.CENSOR_FIELD_WHITELIST:
|
||||
if k in res:
|
||||
new_res[k] = res[k]
|
||||
if k == 'cmd' and k in res:
|
||||
if isinstance(res['cmd'], list):
|
||||
res['cmd'] = ' '.join(res['cmd'])
|
||||
if re.search(r'\s', res['cmd']):
|
||||
new_res['cmd'] = re.sub(r'^(([^\s\\]|\\\s)+).*$',
|
||||
r'\1 <censored>',
|
||||
res['cmd'])
|
||||
new_res['censored'] = "the output has been hidden due to the fact that 'no_log: true' was specified for this result"
|
||||
res = new_res
|
||||
if 'results' in res:
|
||||
if isinstance(res['results'], list):
|
||||
for i in xrange(len(res['results'])):
|
||||
res['results'][i] = self.censor_result(res['results'][i], res.get('_ansible_no_log', no_log))
|
||||
elif res.get('_ansible_no_log', False):
|
||||
res['results'] = "the output has been hidden due to the fact that 'no_log: true' was specified for this result"
|
||||
return res
|
||||
|
||||
@contextlib.contextmanager
|
||||
def capture_event_data(self, event, **event_data):
|
||||
|
||||
event_data.setdefault('uuid', str(uuid.uuid4()))
|
||||
|
||||
if 'res' in event_data:
|
||||
event_data['res'] = self.censor_result(copy.copy(event_data['res']))
|
||||
res = event_data.get('res', None)
|
||||
if res and isinstance(res, dict):
|
||||
if 'artifact_data' in res:
|
||||
event_data['artifact_data'] = res['artifact_data']
|
||||
|
||||
if event not in self.EVENTS_WITHOUT_TASK:
|
||||
task = event_data.pop('task', None)
|
||||
else:
|
||||
@ -262,7 +226,7 @@ class BaseCallbackModule(CallbackBase):
|
||||
if task_uuid in self.task_uuids:
|
||||
# FIXME: When this task UUID repeats, it means the play is using the
|
||||
# free strategy, so different hosts may be running different tasks
|
||||
# within a play.
|
||||
# within a play.
|
||||
return
|
||||
self.task_uuids.add(task_uuid)
|
||||
self.set_task(task)
|
||||
@ -319,6 +283,9 @@ class BaseCallbackModule(CallbackBase):
|
||||
with self.capture_event_data('playbook_on_notify', **event_data):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_notify(result, handler)
|
||||
|
||||
'''
|
||||
ansible_stats is, retoractively, added in 2.2
|
||||
'''
|
||||
def v2_playbook_on_stats(self, stats):
|
||||
self.clear_play()
|
||||
# FIXME: Add count of plays/tasks.
|
||||
@ -329,7 +296,9 @@ class BaseCallbackModule(CallbackBase):
|
||||
ok=stats.ok,
|
||||
processed=stats.processed,
|
||||
skipped=stats.skipped,
|
||||
artifact_data=stats.custom.get('_run', {}) if hasattr(stats, 'custom') else {}
|
||||
)
|
||||
|
||||
with self.capture_event_data('playbook_on_stats', **event_data):
|
||||
super(BaseCallbackModule, self).v2_playbook_on_stats(stats)
|
||||
|
||||
|
||||
BIN
awx/locale/en-us/LC_MESSAGES/django.mo
Normal file
BIN
awx/locale/en-us/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
3637
awx/locale/en-us/LC_MESSAGES/django.po
Normal file
3637
awx/locale/en-us/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
BIN
awx/locale/fr/LC_MESSAGES/ansible-tower-ui.mo
Normal file
BIN
awx/locale/fr/LC_MESSAGES/ansible-tower-ui.mo
Normal file
Binary file not shown.
2963
awx/locale/fr/LC_MESSAGES/ansible-tower-ui.po
Normal file
2963
awx/locale/fr/LC_MESSAGES/ansible-tower-ui.po
Normal file
File diff suppressed because it is too large
Load Diff
BIN
awx/locale/fr/LC_MESSAGES/django.mo
Normal file
BIN
awx/locale/fr/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
4303
awx/locale/fr/LC_MESSAGES/django.po
Normal file
4303
awx/locale/fr/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
BIN
awx/locale/ja/LC_MESSAGES/ansible-tower-ui.mo
Normal file
BIN
awx/locale/ja/LC_MESSAGES/ansible-tower-ui.mo
Normal file
Binary file not shown.
2803
awx/locale/ja/LC_MESSAGES/ansible-tower-ui.po
Normal file
2803
awx/locale/ja/LC_MESSAGES/ansible-tower-ui.po
Normal file
File diff suppressed because it is too large
Load Diff
BIN
awx/locale/ja/LC_MESSAGES/django.mo
Normal file
BIN
awx/locale/ja/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
3984
awx/locale/ja/LC_MESSAGES/django.po
Normal file
3984
awx/locale/ja/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
@ -353,7 +353,7 @@ class BaseAccess(object):
|
||||
|
||||
# Shortcuts in certain cases by deferring to earlier property
|
||||
if display_method == 'schedule':
|
||||
user_capabilities['schedule'] = user_capabilities['edit']
|
||||
user_capabilities['schedule'] = user_capabilities['start']
|
||||
continue
|
||||
elif display_method == 'delete' and not isinstance(obj, (User, UnifiedJob)):
|
||||
user_capabilities['delete'] = user_capabilities['edit']
|
||||
@ -363,27 +363,30 @@ class BaseAccess(object):
|
||||
continue
|
||||
|
||||
# Compute permission
|
||||
data = {}
|
||||
access_method = getattr(self, "can_%s" % method)
|
||||
if method in ['change']: # 3 args
|
||||
user_capabilities[display_method] = access_method(obj, data)
|
||||
elif method in ['delete', 'run_ad_hoc_commands', 'copy']:
|
||||
user_capabilities[display_method] = access_method(obj)
|
||||
elif method in ['start']:
|
||||
user_capabilities[display_method] = access_method(obj, validate_license=False)
|
||||
elif method in ['add']: # 2 args with data
|
||||
user_capabilities[display_method] = access_method(data)
|
||||
elif method in ['attach', 'unattach']: # parent/sub-object call
|
||||
if type(parent_obj) == Team:
|
||||
relationship = 'parents'
|
||||
parent_obj = parent_obj.member_role
|
||||
else:
|
||||
relationship = 'members'
|
||||
user_capabilities[display_method] = access_method(
|
||||
obj, parent_obj, relationship, skip_sub_obj_read_check=True, data=data)
|
||||
user_capabilities[display_method] = self.get_method_capability(method, obj, parent_obj)
|
||||
|
||||
return user_capabilities
|
||||
|
||||
def get_method_capability(self, method, obj, parent_obj):
|
||||
if method in ['change']: # 3 args
|
||||
return self.can_change(obj, {})
|
||||
elif method in ['delete', 'run_ad_hoc_commands', 'copy']:
|
||||
access_method = getattr(self, "can_%s" % method)
|
||||
return access_method(obj)
|
||||
elif method in ['start']:
|
||||
return self.can_start(obj, validate_license=False)
|
||||
elif method in ['add']: # 2 args with data
|
||||
return self.can_add({})
|
||||
elif method in ['attach', 'unattach']: # parent/sub-object call
|
||||
access_method = getattr(self, "can_%s" % method)
|
||||
if type(parent_obj) == Team:
|
||||
relationship = 'parents'
|
||||
parent_obj = parent_obj.member_role
|
||||
else:
|
||||
relationship = 'members'
|
||||
return access_method(obj, parent_obj, relationship, skip_sub_obj_read_check=True, data={})
|
||||
return False
|
||||
|
||||
|
||||
class UserAccess(BaseAccess):
|
||||
'''
|
||||
@ -982,8 +985,6 @@ class ProjectUpdateAccess(BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_cancel(self, obj):
|
||||
if not obj.can_cancel:
|
||||
return False
|
||||
if self.user == obj.created_by:
|
||||
return True
|
||||
# Project updates cascade delete with project, admin role descends from org admin
|
||||
@ -1040,7 +1041,7 @@ class JobTemplateAccess(BaseAccess):
|
||||
Project.accessible_objects(self.user, 'use_role').exists() or
|
||||
Inventory.accessible_objects(self.user, 'use_role').exists())
|
||||
|
||||
# if reference_obj is provided, determine if it can be coppied
|
||||
# if reference_obj is provided, determine if it can be copied
|
||||
reference_obj = data.get('reference_obj', None)
|
||||
|
||||
if 'job_type' in data and data['job_type'] == PERM_INVENTORY_SCAN:
|
||||
@ -1392,7 +1393,8 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
|
||||
qs = self.model.objects.filter(
|
||||
workflow_job_template__in=WorkflowJobTemplate.accessible_objects(
|
||||
self.user, 'read_role'))
|
||||
qs = qs.prefetch_related('success_nodes', 'failure_nodes', 'always_nodes')
|
||||
qs = qs.prefetch_related('success_nodes', 'failure_nodes', 'always_nodes',
|
||||
'unified_job_template')
|
||||
return qs
|
||||
|
||||
def can_use_prompted_resources(self, data):
|
||||
@ -1478,8 +1480,14 @@ class WorkflowJobNodeAccess(BaseAccess):
|
||||
qs = qs.prefetch_related('success_nodes', 'failure_nodes', 'always_nodes')
|
||||
return qs
|
||||
|
||||
@check_superuser
|
||||
def can_add(self, data):
|
||||
return False
|
||||
if data is None: # Hide direct creation in API browser
|
||||
return False
|
||||
return (
|
||||
self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role') and
|
||||
self.check_related('credential', Credential, data, role_field='use_role') and
|
||||
self.check_related('inventory', Inventory, data, role_field='use_role'))
|
||||
|
||||
def can_change(self, obj, data):
|
||||
return False
|
||||
@ -1528,22 +1536,28 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
|
||||
def can_copy(self, obj):
|
||||
if self.save_messages:
|
||||
wfjt_errors = {}
|
||||
missing_ujt = []
|
||||
missing_credentials = []
|
||||
missing_inventories = []
|
||||
qs = obj.workflow_job_template_nodes
|
||||
qs.select_related('unified_job_template', 'inventory', 'credential')
|
||||
for node in qs.all():
|
||||
node_errors = {}
|
||||
if node.inventory and self.user not in node.inventory.use_role:
|
||||
node_errors['inventory'] = 'Prompted inventory %s can not be coppied.' % node.inventory.name
|
||||
missing_inventories.append(node.inventory.name)
|
||||
if node.credential and self.user not in node.credential.use_role:
|
||||
node_errors['credential'] = 'Prompted credential %s can not be coppied.' % node.credential.name
|
||||
missing_credentials.append(node.credential.name)
|
||||
ujt = node.unified_job_template
|
||||
if ujt and not self.user.can_access(UnifiedJobTemplate, 'start', ujt, validate_license=False):
|
||||
node_errors['unified_job_template'] = (
|
||||
'Prompted %s %s can not be coppied.' % (ujt._meta.verbose_name_raw, ujt.name))
|
||||
missing_ujt.append(ujt.name)
|
||||
if node_errors:
|
||||
wfjt_errors[node.id] = node_errors
|
||||
self.messages.update(wfjt_errors)
|
||||
if missing_ujt:
|
||||
self.messages['templates_unable_to_copy'] = missing_ujt
|
||||
if missing_credentials:
|
||||
self.messages['credentials_unable_to_copy'] = missing_credentials
|
||||
if missing_inventories:
|
||||
self.messages['inventories_unable_to_copy'] = missing_inventories
|
||||
|
||||
return self.check_related('organization', Organization, {'reference_obj': obj}, mandatory=True)
|
||||
|
||||
@ -1611,11 +1625,19 @@ class WorkflowJobAccess(BaseAccess):
|
||||
def can_change(self, obj, data):
|
||||
return False
|
||||
|
||||
@check_superuser
|
||||
def can_delete(self, obj):
|
||||
if obj.workflow_job_template is None:
|
||||
# only superusers can delete orphaned workflow jobs
|
||||
return self.user.is_superuser
|
||||
return self.user in obj.workflow_job_template.admin_role
|
||||
return (obj.workflow_job_template and
|
||||
obj.workflow_job_template.organization and
|
||||
self.user in obj.workflow_job_template.organization.admin_role)
|
||||
|
||||
def get_method_capability(self, method, obj, parent_obj):
|
||||
if method == 'start':
|
||||
# Return simplistic permission, will perform detailed check on POST
|
||||
if not obj.workflow_job_template:
|
||||
return self.user.is_superuser
|
||||
return self.user in obj.workflow_job_template.execute_role
|
||||
return super(WorkflowJobAccess, self).get_method_capability(method, obj, parent_obj)
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
if validate_license:
|
||||
@ -1624,7 +1646,29 @@ class WorkflowJobAccess(BaseAccess):
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
|
||||
return (obj.workflow_job_template and self.user in obj.workflow_job_template.execute_role)
|
||||
wfjt = obj.workflow_job_template
|
||||
# only superusers can relaunch orphans
|
||||
if not wfjt:
|
||||
return False
|
||||
|
||||
# execute permission to WFJT is mandatory for any relaunch
|
||||
if self.user not in wfjt.execute_role:
|
||||
return False
|
||||
|
||||
# user's WFJT access doesn't guarentee permission to launch, introspect nodes
|
||||
return self.can_recreate(obj)
|
||||
|
||||
def can_recreate(self, obj):
|
||||
node_qs = obj.workflow_job_nodes.all().prefetch_related('inventory', 'credential', 'unified_job_template')
|
||||
node_access = WorkflowJobNodeAccess(user=self.user)
|
||||
wj_add_perm = True
|
||||
for node in node_qs:
|
||||
if not node_access.can_add({'reference_obj': node}):
|
||||
wj_add_perm = False
|
||||
if not wj_add_perm and self.save_messages:
|
||||
self.messages['workflow_job_template'] = _('You do not have permission to the workflow job '
|
||||
'resources required for relaunch.')
|
||||
return wj_add_perm
|
||||
|
||||
def can_cancel(self, obj):
|
||||
if not obj.can_cancel:
|
||||
@ -1912,11 +1956,17 @@ class ScheduleAccess(BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_add(self, data):
|
||||
return self.check_related('unified_job_template', UnifiedJobTemplate, data, mandatory=True)
|
||||
return self.check_related('unified_job_template', UnifiedJobTemplate, data, role_field='execute_role', mandatory=True)
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
return self.check_related('unified_job_template', UnifiedJobTemplate, data, obj=obj, mandatory=True)
|
||||
if self.check_related('unified_job_template', UnifiedJobTemplate, data, obj=obj, mandatory=True):
|
||||
return True
|
||||
# Users with execute role can modify the schedules they created
|
||||
return (
|
||||
obj.created_by == self.user and
|
||||
self.check_related('unified_job_template', UnifiedJobTemplate, data, obj=obj, role_field='execute_role', mandatory=True))
|
||||
|
||||
|
||||
def can_delete(self, obj):
|
||||
return self.can_change(obj, {})
|
||||
|
||||
@ -96,12 +96,12 @@ class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--older_than',
|
||||
dest='older_than',
|
||||
default=None,
|
||||
help='Specify the relative time to consider facts older than (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y).'),
|
||||
default='30d',
|
||||
help='Specify the relative time to consider facts older than (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y). Defaults to 30d.'),
|
||||
make_option('--granularity',
|
||||
dest='granularity',
|
||||
default=None,
|
||||
help='Window duration to group same hosts by for deletion (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y).'),
|
||||
default='1w',
|
||||
help='Window duration to group same hosts by for deletion (w)eek (d)ay or (y)ear (i.e. 5d, 2w, 1y). Defaults to 1w.'),
|
||||
make_option('--module',
|
||||
dest='module',
|
||||
default=None,
|
||||
|
||||
@ -12,7 +12,7 @@ from django.db import transaction
|
||||
from django.utils.timezone import now
|
||||
|
||||
# AWX
|
||||
from awx.main.models import Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob
|
||||
from awx.main.models import Job, AdHocCommand, ProjectUpdate, InventoryUpdate, SystemJob, WorkflowJob
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
@ -30,19 +30,22 @@ class Command(NoArgsCommand):
|
||||
'be removed)'),
|
||||
make_option('--jobs', dest='only_jobs', action='store_true',
|
||||
default=False,
|
||||
help='Only remove jobs'),
|
||||
help='Remove jobs'),
|
||||
make_option('--ad-hoc-commands', dest='only_ad_hoc_commands',
|
||||
action='store_true', default=False,
|
||||
help='Only remove ad hoc commands'),
|
||||
help='Remove ad hoc commands'),
|
||||
make_option('--project-updates', dest='only_project_updates',
|
||||
action='store_true', default=False,
|
||||
help='Only remove project updates'),
|
||||
help='Remove project updates'),
|
||||
make_option('--inventory-updates', dest='only_inventory_updates',
|
||||
action='store_true', default=False,
|
||||
help='Only remove inventory updates'),
|
||||
help='Remove inventory updates'),
|
||||
make_option('--management-jobs', default=False,
|
||||
action='store_true', dest='only_management_jobs',
|
||||
help='Only remove management jobs')
|
||||
help='Remove management jobs'),
|
||||
make_option('--workflow-jobs', default=False,
|
||||
action='store_true', dest='only_workflow_jobs',
|
||||
help='Remove workflow jobs')
|
||||
)
|
||||
|
||||
def cleanup_jobs(self):
|
||||
@ -169,6 +172,28 @@ class Command(NoArgsCommand):
|
||||
self.logger.addHandler(handler)
|
||||
self.logger.propagate = False
|
||||
|
||||
def cleanup_workflow_jobs(self):
|
||||
skipped, deleted = 0, 0
|
||||
for workflow_job in WorkflowJob.objects.all():
|
||||
workflow_job_display = '"{}" (started {}, {} nodes)'.format(
|
||||
unicode(workflow_job), unicode(workflow_job.created),
|
||||
workflow_job.workflow_nodes.count())
|
||||
if workflow_job.status in ('pending', 'waiting', 'running'):
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s job %s', action_text, workflow_job.status, workflow_job_display)
|
||||
skipped += 1
|
||||
elif workflow_job.created >= self.cutoff:
|
||||
action_text = 'would skip' if self.dry_run else 'skipping'
|
||||
self.logger.debug('%s %s', action_text, workflow_job_display)
|
||||
skipped += 1
|
||||
else:
|
||||
action_text = 'would delete' if self.dry_run else 'deleting'
|
||||
self.logger.info('%s %s', action_text, workflow_job_display)
|
||||
if not self.dry_run:
|
||||
workflow_job.delete()
|
||||
deleted += 1
|
||||
return skipped, deleted
|
||||
|
||||
@transaction.atomic
|
||||
def handle_noargs(self, **options):
|
||||
self.verbosity = int(options.get('verbosity', 1))
|
||||
@ -179,7 +204,7 @@ class Command(NoArgsCommand):
|
||||
self.cutoff = now() - datetime.timedelta(days=self.days)
|
||||
except OverflowError:
|
||||
raise CommandError('--days specified is too large. Try something less than 99999 (about 270 years).')
|
||||
model_names = ('jobs', 'ad_hoc_commands', 'project_updates', 'inventory_updates', 'management_jobs')
|
||||
model_names = ('jobs', 'ad_hoc_commands', 'project_updates', 'inventory_updates', 'management_jobs', 'workflow_jobs')
|
||||
models_to_cleanup = set()
|
||||
for m in model_names:
|
||||
if options.get('only_%s' % m, False):
|
||||
|
||||
@ -64,7 +64,7 @@ class MemObject(object):
|
||||
all_vars = {}
|
||||
files_found = 0
|
||||
for suffix in ('', '.yml', '.yaml', '.json'):
|
||||
path = ''.join([base_path, suffix])
|
||||
path = ''.join([base_path, suffix]).encode("utf-8")
|
||||
if not os.path.exists(path):
|
||||
continue
|
||||
if not os.path.isfile(path):
|
||||
@ -462,7 +462,7 @@ class ExecutableJsonLoader(BaseLoader):
|
||||
# to set their variables
|
||||
for k,v in self.all_group.all_hosts.iteritems():
|
||||
if 'hostvars' not in _meta:
|
||||
data = self.command_to_json([self.source, '--host', k])
|
||||
data = self.command_to_json([self.source, '--host', k.encode("utf-8")])
|
||||
else:
|
||||
data = _meta['hostvars'].get(k, {})
|
||||
if isinstance(data, dict):
|
||||
|
||||
@ -20,7 +20,7 @@ from django.core.urlresolvers import reverse
|
||||
# AWX
|
||||
from awx.main.models.base import * # noqa
|
||||
from awx.main.models.unified_jobs import * # noqa
|
||||
from awx.main.models.notifications import JobNotificationMixin
|
||||
from awx.main.models.notifications import JobNotificationMixin, NotificationTemplate
|
||||
from awx.main.fields import JSONField
|
||||
|
||||
logger = logging.getLogger('awx.main.models.ad_hoc_commands')
|
||||
@ -157,18 +157,20 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
||||
|
||||
@property
|
||||
def notification_templates(self):
|
||||
all_inventory_sources = set()
|
||||
all_orgs = set()
|
||||
for h in self.hosts.all():
|
||||
for invsrc in h.inventory_sources.all():
|
||||
all_inventory_sources.add(invsrc)
|
||||
all_orgs.add(h.inventory.organization)
|
||||
active_templates = dict(error=set(),
|
||||
success=set(),
|
||||
any=set())
|
||||
for invsrc in all_inventory_sources:
|
||||
notifications_dict = invsrc.notification_templates
|
||||
for notification_type in active_templates.keys():
|
||||
for templ in notifications_dict[notification_type]:
|
||||
active_templates[notification_type].add(templ)
|
||||
base_notification_templates = NotificationTemplate.objects
|
||||
for org in all_orgs:
|
||||
for templ in base_notification_templates.filter(organization_notification_templates_for_errors=org):
|
||||
active_templates['error'].add(templ)
|
||||
for templ in base_notification_templates.filter(organization_notification_templates_for_success=org):
|
||||
active_templates['success'].add(templ)
|
||||
for templ in base_notification_templates.filter(organization_notification_templates_for_any=org):
|
||||
active_templates['any'].add(templ)
|
||||
active_templates['error'] = list(active_templates['error'])
|
||||
active_templates['any'] = list(active_templates['any'])
|
||||
active_templates['success'] = list(active_templates['success'])
|
||||
|
||||
@ -1002,9 +1002,8 @@ class InventorySourceOptions(BaseModel):
|
||||
if r not in valid_regions and r not in invalid_regions:
|
||||
invalid_regions.append(r)
|
||||
if invalid_regions:
|
||||
raise ValidationError(_('Invalid %(source)s region%(plural)s: %(region)s') % {
|
||||
'source': self.source, 'plural': '' if len(invalid_regions) == 1 else 's',
|
||||
'region': ', '.join(invalid_regions)})
|
||||
raise ValidationError(_('Invalid %(source)s region: %(region)s') % {
|
||||
'source': self.source, 'region': ', '.join(invalid_regions)})
|
||||
return ','.join(regions)
|
||||
|
||||
source_vars_dict = VarsDictProperty('source_vars')
|
||||
@ -1028,9 +1027,8 @@ class InventorySourceOptions(BaseModel):
|
||||
if instance_filter_name not in self.INSTANCE_FILTER_NAMES:
|
||||
invalid_filters.append(instance_filter)
|
||||
if invalid_filters:
|
||||
raise ValidationError(_('Invalid filter expression%(plural)s: %(filter)s') %
|
||||
{'plural': '' if len(invalid_filters) == 1 else 's',
|
||||
'filter': ', '.join(invalid_filters)})
|
||||
raise ValidationError(_('Invalid filter expression: %(filter)s') %
|
||||
{'filter': ', '.join(invalid_filters)})
|
||||
return instance_filters
|
||||
|
||||
def clean_group_by(self):
|
||||
@ -1047,9 +1045,8 @@ class InventorySourceOptions(BaseModel):
|
||||
if c not in valid_choices and c not in invalid_choices:
|
||||
invalid_choices.append(c)
|
||||
if invalid_choices:
|
||||
raise ValidationError(_('Invalid group by choice%(plural)s: %(choice)s') %
|
||||
{'plural': '' if len(invalid_choices) == 1 else 's',
|
||||
'choice': ', '.join(invalid_choices)})
|
||||
raise ValidationError(_('Invalid group by choice: %(choice)s') %
|
||||
{'choice': ', '.join(invalid_choices)})
|
||||
return ','.join(choices)
|
||||
|
||||
|
||||
|
||||
@ -606,6 +606,11 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin):
|
||||
evars.update(extra_vars)
|
||||
self.update_fields(extra_vars=json.dumps(evars))
|
||||
|
||||
def _resources_sufficient_for_launch(self):
|
||||
if self.job_type == PERM_INVENTORY_SCAN:
|
||||
return self.inventory_id is not None
|
||||
return not (self.inventory_id is None or self.project_id is None)
|
||||
|
||||
def display_artifacts(self):
|
||||
'''
|
||||
Hides artifacts if they are marked as no_log type artifacts.
|
||||
@ -1175,7 +1180,6 @@ class JobEvent(CreatedModifiedModel):
|
||||
# Save UUID and parent UUID for determining parent-child relationship.
|
||||
job_event_uuid = kwargs.get('uuid', None)
|
||||
parent_event_uuid = kwargs.get('parent_uuid', None)
|
||||
artifact_dict = kwargs.get('artifact_data', None)
|
||||
|
||||
# Sanity check: Don't honor keys that we don't recognize.
|
||||
valid_keys = {'job_id', 'event', 'event_data', 'playbook', 'play',
|
||||
@ -1185,6 +1189,11 @@ class JobEvent(CreatedModifiedModel):
|
||||
if key not in valid_keys:
|
||||
kwargs.pop(key)
|
||||
|
||||
event_data = kwargs.get('event_data', None)
|
||||
artifact_dict = None
|
||||
if event_data:
|
||||
artifact_dict = event_data.pop('artifact_data', None)
|
||||
|
||||
# Try to find a parent event based on UUID.
|
||||
if parent_event_uuid:
|
||||
cache_key = '{}_{}'.format(kwargs['job_id'], parent_event_uuid)
|
||||
@ -1208,12 +1217,21 @@ class JobEvent(CreatedModifiedModel):
|
||||
|
||||
# Save artifact data to parent job (if provided).
|
||||
if artifact_dict:
|
||||
event_data = kwargs.get('event_data', None)
|
||||
if event_data and isinstance(event_data, dict):
|
||||
res = event_data.get('res', None)
|
||||
if res and isinstance(res, dict):
|
||||
if res.get('_ansible_no_log', False):
|
||||
artifact_dict['_ansible_no_log'] = True
|
||||
# Note: Core has not added support for marking artifacts as
|
||||
# sensitive yet. Going forward, core will not use
|
||||
# _ansible_no_log to denote sensitive set_stats calls.
|
||||
# Instead, they plan to add a flag outside of the traditional
|
||||
# no_log mechanism. no_log will not work for this feature,
|
||||
# in core, because sensitive data is scrubbed before sending
|
||||
# data to the callback. The playbook_on_stats is the callback
|
||||
# in which the set_stats data is used.
|
||||
|
||||
# Again, the sensitive artifact feature has not yet landed in
|
||||
# core. The below is how we mark artifacts payload as
|
||||
# senstive
|
||||
# artifact_dict['_ansible_no_log'] = True
|
||||
#
|
||||
parent_job = Job.objects.filter(pk=kwargs['job_id']).first()
|
||||
if parent_job and parent_job.artifacts != artifact_dict:
|
||||
parent_job.artifacts = artifact_dict
|
||||
|
||||
@ -561,6 +561,9 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
"Override in child classes, None value indicates this is not configurable"
|
||||
return None
|
||||
|
||||
def _resources_sufficient_for_launch(self):
|
||||
return True
|
||||
|
||||
def __unicode__(self):
|
||||
return u'%s-%s-%s' % (self.created, self.id, self.status)
|
||||
|
||||
|
||||
@ -134,7 +134,7 @@ class WorkflowNodeBase(CreatedModifiedModel):
|
||||
scan_errors = ujt_obj._extra_job_type_errors(accepted_fields)
|
||||
ignored_dict.update(scan_errors)
|
||||
for fd in ['inventory', 'credential']:
|
||||
if getattr(ujt_obj, fd) is None and not (ask_for_vars_dict.get(fd, False) and fd in prompts_dict):
|
||||
if getattr(ujt_obj, "{}_id".format(fd)) is None and not (ask_for_vars_dict.get(fd, False) and fd in prompts_dict):
|
||||
missing_dict[fd] = 'Job Template does not have this field and workflow node does not provide it'
|
||||
|
||||
data = {}
|
||||
@ -418,18 +418,22 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
|
||||
def can_start_without_user_input(self):
|
||||
'''Return whether WFJT can be launched without survey passwords.'''
|
||||
return not bool(self.variables_needed_to_start)
|
||||
return not bool(
|
||||
self.variables_needed_to_start or
|
||||
self.node_templates_missing() or
|
||||
self.node_prompts_rejected())
|
||||
|
||||
def get_warnings(self):
|
||||
warning_data = {}
|
||||
for node in self.workflow_job_template_nodes.all():
|
||||
if node.unified_job_template is None:
|
||||
warning_data[node.pk] = 'Node is missing a linked unified_job_template'
|
||||
continue
|
||||
def node_templates_missing(self):
|
||||
return [node.pk for node in self.workflow_job_template_nodes.filter(
|
||||
unified_job_template__isnull=True).all()]
|
||||
|
||||
def node_prompts_rejected(self):
|
||||
node_list = []
|
||||
for node in self.workflow_job_template_nodes.select_related('unified_job_template').all():
|
||||
node_prompts_warnings = node.get_prompts_warnings()
|
||||
if node_prompts_warnings:
|
||||
warning_data[node.pk] = node_prompts_warnings
|
||||
return warning_data
|
||||
node_list.append(node.pk)
|
||||
return node_list
|
||||
|
||||
def user_copy(self, user):
|
||||
new_wfjt = self.copy_unified_jt()
|
||||
|
||||
@ -10,6 +10,7 @@ from sets import Set
|
||||
from django.conf import settings
|
||||
from django.db import transaction, connection
|
||||
from django.db.utils import DatabaseError
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# AWX
|
||||
from awx.main.models import * # noqa
|
||||
@ -114,14 +115,20 @@ class TaskManager():
|
||||
dag = WorkflowDAG(workflow_job)
|
||||
spawn_nodes = dag.bfs_nodes_to_run()
|
||||
for spawn_node in spawn_nodes:
|
||||
if spawn_node.unified_job_template is None:
|
||||
continue
|
||||
kv = spawn_node.get_job_kwargs()
|
||||
job = spawn_node.unified_job_template.create_unified_job(**kv)
|
||||
spawn_node.job = job
|
||||
spawn_node.save()
|
||||
can_start = job.signal_start(**kv)
|
||||
if job._resources_sufficient_for_launch():
|
||||
can_start = job.signal_start(**kv)
|
||||
else:
|
||||
can_start = False
|
||||
if not can_start:
|
||||
job.status = 'failed'
|
||||
job.job_explanation = "Workflow job could not start because it was not in the right state or required manual credentials"
|
||||
job.job_explanation = _("Job spawned from workflow could not start because it "
|
||||
"was not in the right state or required manual credentials")
|
||||
job.save(update_fields=['status', 'job_explanation'])
|
||||
connection.on_commit(lambda: job.websocket_emit_status('failed'))
|
||||
|
||||
|
||||
@ -67,6 +67,8 @@ class WorkflowDAG(SimpleDAG):
|
||||
obj = n['node_object']
|
||||
job = obj.job
|
||||
|
||||
if obj.unified_job_template is None:
|
||||
continue
|
||||
if not job:
|
||||
return False
|
||||
# Job is about to run or is running. Hold our horses and wait for
|
||||
|
||||
@ -34,11 +34,13 @@ def run_job_complete(job_id):
|
||||
|
||||
@task
|
||||
def run_task_manager():
|
||||
logger.debug("Running Tower task manager.")
|
||||
TaskManager().schedule()
|
||||
|
||||
|
||||
@task
|
||||
def run_fail_inconsistent_running_jobs():
|
||||
logger.debug("Running task to fail inconsistent running jobs.")
|
||||
with transaction.atomic():
|
||||
# Lock
|
||||
try:
|
||||
|
||||
@ -32,7 +32,8 @@ import pexpect
|
||||
|
||||
# Celery
|
||||
from celery import Task, task
|
||||
from celery.signals import celeryd_init
|
||||
from celery.signals import celeryd_init, worker_process_init
|
||||
from celery import current_app
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@ -75,7 +76,8 @@ logger = logging.getLogger('awx.main.tasks')
|
||||
def celery_startup(conf=None, **kwargs):
|
||||
# Re-init all schedules
|
||||
# NOTE: Rework this during the Rampart work
|
||||
logger.info("Syncing Tower Schedules")
|
||||
startup_logger = logging.getLogger('awx.main.tasks')
|
||||
startup_logger.info("Syncing Tower Schedules")
|
||||
for sch in Schedule.objects.all():
|
||||
try:
|
||||
sch.update_computed_fields()
|
||||
@ -84,7 +86,28 @@ def celery_startup(conf=None, **kwargs):
|
||||
logger.error("Failed to rebuild schedule {}: {}".format(sch, e))
|
||||
|
||||
|
||||
def uwsgi_reload():
|
||||
def _setup_tower_logger():
|
||||
global logger
|
||||
from django.utils.log import configure_logging
|
||||
LOGGING_DICT = settings.LOGGING
|
||||
if settings.LOG_AGGREGATOR_ENABLED:
|
||||
LOGGING_DICT['handlers']['http_receiver']['class'] = 'awx.main.utils.handlers.HTTPSHandler'
|
||||
LOGGING_DICT['handlers']['http_receiver']['async'] = False
|
||||
if 'awx' in settings.LOG_AGGREGATOR_LOGGERS:
|
||||
if 'http_receiver' not in LOGGING_DICT['loggers']['awx']['handlers']:
|
||||
LOGGING_DICT['loggers']['awx']['handlers'] += ['http_receiver']
|
||||
configure_logging(settings.LOGGING_CONFIG, LOGGING_DICT)
|
||||
logger = logging.getLogger('awx.main.tasks')
|
||||
|
||||
|
||||
@worker_process_init.connect
|
||||
def task_set_logger_pre_run(*args, **kwargs):
|
||||
if settings.LOG_AGGREGATOR_ENABLED:
|
||||
_setup_tower_logger()
|
||||
logger.debug('Custom Tower logger configured for worker process.')
|
||||
|
||||
|
||||
def _uwsgi_reload():
|
||||
# http://uwsgi-docs.readthedocs.io/en/latest/MasterFIFO.html#available-commands
|
||||
logger.warn('Initiating uWSGI chain reload of server')
|
||||
TRIGGER_CHAIN_RELOAD = 'c'
|
||||
@ -92,14 +115,28 @@ def uwsgi_reload():
|
||||
awxfifo.write(TRIGGER_CHAIN_RELOAD)
|
||||
|
||||
|
||||
@task(queue='broadcast_all')
|
||||
def clear_cache_keys(cache_keys):
|
||||
set_of_keys = set([key for key in cache_keys])
|
||||
def _reset_celery_logging():
|
||||
# Worker logger reloaded, now send signal to restart pool
|
||||
app = current_app._get_current_object()
|
||||
app.control.broadcast('pool_restart', arguments={'reload': True},
|
||||
destination=['celery@{}'.format(settings.CLUSTER_HOST_ID)], reply=False)
|
||||
|
||||
|
||||
def _clear_cache_keys(set_of_keys):
|
||||
logger.debug('cache delete_many(%r)', set_of_keys)
|
||||
cache.delete_many(set_of_keys)
|
||||
|
||||
|
||||
@task(queue='broadcast_all')
|
||||
def process_cache_changes(cache_keys):
|
||||
logger.warn('Processing cache changes, task args: {0.args!r} kwargs: {0.kwargs!r}'.format(
|
||||
process_cache_changes.request))
|
||||
set_of_keys = set([key for key in cache_keys])
|
||||
_clear_cache_keys(set_of_keys)
|
||||
for setting_key in set_of_keys:
|
||||
if setting_key.startswith('LOG_AGGREGATOR_'):
|
||||
uwsgi_reload()
|
||||
_uwsgi_reload()
|
||||
_reset_celery_logging()
|
||||
break
|
||||
|
||||
|
||||
@ -129,6 +166,7 @@ def send_notifications(notification_list, job_id=None):
|
||||
|
||||
@task(bind=True, queue='default')
|
||||
def run_administrative_checks(self):
|
||||
logger.warn("Running administrative checks.")
|
||||
if not settings.TOWER_ADMIN_ALERTS:
|
||||
return
|
||||
validation_info = TaskEnhancer().validate_enhancements()
|
||||
@ -150,11 +188,13 @@ def run_administrative_checks(self):
|
||||
|
||||
@task(bind=True, queue='default')
|
||||
def cleanup_authtokens(self):
|
||||
logger.warn("Cleaning up expired authtokens.")
|
||||
AuthToken.objects.filter(expires__lt=now()).delete()
|
||||
|
||||
|
||||
@task(bind=True)
|
||||
def cluster_node_heartbeat(self):
|
||||
logger.debug("Cluster node heartbeat task.")
|
||||
inst = Instance.objects.filter(hostname=settings.CLUSTER_HOST_ID)
|
||||
if inst.exists():
|
||||
inst = inst[0]
|
||||
@ -1832,7 +1872,7 @@ class RunSystemJob(BaseTask):
|
||||
if 'days' in json_vars and system_job.job_type != 'cleanup_facts':
|
||||
args.extend(['--days', str(json_vars.get('days', 60))])
|
||||
if system_job.job_type == 'cleanup_jobs':
|
||||
args.extend(['--jobs', '--project-updates', '--inventory-updates', '--management-jobs', '--ad-hoc-commands'])
|
||||
args.extend(['--jobs', '--project-updates', '--inventory-updates', '--management-jobs', '--ad-hoc-commands', '--workflow-jobs'])
|
||||
if system_job.job_type == 'cleanup_facts':
|
||||
if 'older_than' in json_vars:
|
||||
args.extend(['--older_than', str(json_vars['older_than'])])
|
||||
|
||||
@ -65,6 +65,17 @@ def test_edit_sensitive_fields(patch, job_template_factory, alice, grant_project
|
||||
}, alice, expect=expect)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_reject_dict_extra_vars_patch(patch, job_template_factory, admin_user):
|
||||
# Expect a string for extra_vars, raise 400 in this case that would
|
||||
# otherwise have been saved incorrectly
|
||||
jt = job_template_factory(
|
||||
'jt', organization='org1', project='prj', inventory='inv', credential='cred'
|
||||
).job_template
|
||||
patch(reverse('api:job_template_detail', args=(jt.id,)),
|
||||
{'extra_vars': {'foo': 5}}, admin_user, expect=400)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_edit_playbook(patch, job_template_factory, alice):
|
||||
objs = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import mock # noqa
|
||||
import pytest
|
||||
|
||||
@ -22,6 +24,84 @@ def team_project_list(organization_factory):
|
||||
return objects
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_project_paged_list(get, organization_factory):
|
||||
'Test project listing that spans multiple pages'
|
||||
|
||||
# 3 total projects, 1 per page, 3 pages
|
||||
objects = organization_factory(
|
||||
'org1',
|
||||
projects=['project-%s' % i for i in range(3)],
|
||||
users=['alice'],
|
||||
roles=['project-%s.admin_role:alice' % i for i in range(3)],
|
||||
)
|
||||
|
||||
# first page has first project and no previous page
|
||||
pk = objects.users.alice.pk
|
||||
url = reverse('api:user_projects_list', args=(pk,))
|
||||
results = get(url, objects.users.alice, QUERY_STRING='page_size=1').data
|
||||
assert results['count'] == 3
|
||||
assert len(results['results']) == 1
|
||||
assert results['previous'] is None
|
||||
assert results['next'] == (
|
||||
'/api/v1/users/%s/projects/?page=2&page_size=1' % pk
|
||||
)
|
||||
|
||||
# second page has one more, a previous and next page
|
||||
results = get(url, objects.users.alice,
|
||||
QUERY_STRING='page=2&page_size=1').data
|
||||
assert len(results['results']) == 1
|
||||
assert results['previous'] == (
|
||||
'/api/v1/users/%s/projects/?page=1&page_size=1' % pk
|
||||
)
|
||||
assert results['next'] == (
|
||||
'/api/v1/users/%s/projects/?page=3&page_size=1' % pk
|
||||
)
|
||||
|
||||
# third page has last project and a previous page
|
||||
results = get(url, objects.users.alice,
|
||||
QUERY_STRING='page=3&page_size=1').data
|
||||
assert len(results['results']) == 1
|
||||
assert results['previous'] == (
|
||||
'/api/v1/users/%s/projects/?page=2&page_size=1' % pk
|
||||
)
|
||||
assert results['next'] is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_project_paged_list_with_unicode(get, organization_factory):
|
||||
'Test project listing that contains unicode chars in the next/prev links'
|
||||
|
||||
# Create 2 projects that contain a "cloud" unicode character, make sure we
|
||||
# can search it and properly generate next/previous page links
|
||||
objects = organization_factory(
|
||||
'org1',
|
||||
projects=['project-☁-1','project-☁-2'],
|
||||
users=['alice'],
|
||||
roles=['project-☁-1.admin_role:alice','project-☁-2.admin_role:alice'],
|
||||
)
|
||||
pk = objects.users.alice.pk
|
||||
url = reverse('api:user_projects_list', args=(pk,))
|
||||
|
||||
# first on first page, next page link contains unicode char
|
||||
results = get(url, objects.users.alice,
|
||||
QUERY_STRING='page_size=1&search=%E2%98%81').data
|
||||
assert results['count'] == 2
|
||||
assert len(results['results']) == 1
|
||||
assert results['next'] == (
|
||||
'/api/v1/users/%s/projects/?page=2&page_size=1&search=%%E2%%98%%81' % pk # noqa
|
||||
)
|
||||
|
||||
# second project on second page, previous page link contains unicode char
|
||||
results = get(url, objects.users.alice,
|
||||
QUERY_STRING='page=2&page_size=1&search=%E2%98%81').data
|
||||
assert results['count'] == 2
|
||||
assert len(results['results']) == 1
|
||||
assert results['previous'] == (
|
||||
'/api/v1/users/%s/projects/?page=1&page_size=1&search=%%E2%%98%%81' % pk # noqa
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_project_list(get, organization_factory):
|
||||
'List of projects a user has access to, filtered by projects you can also see'
|
||||
|
||||
@ -259,22 +259,37 @@ def test_associate_label(label, user, job_template):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_move_schedule_to_JT_no_access(job_template, rando):
|
||||
schedule = Schedule.objects.create(
|
||||
unified_job_template=job_template,
|
||||
rrule='DTSTART:20151117T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1')
|
||||
job_template.admin_role.members.add(rando)
|
||||
jt2 = JobTemplate.objects.create(name="other-jt")
|
||||
access = ScheduleAccess(rando)
|
||||
assert not access.can_change(schedule, data=dict(unified_job_template=jt2.pk))
|
||||
class TestJobTemplateSchedules:
|
||||
|
||||
rrule = 'DTSTART:20151117T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1'
|
||||
rrule2 = 'DTSTART:20151117T050000Z RRULE:FREQ=WEEKLY;INTERVAL=1;COUNT=1'
|
||||
|
||||
@pytest.fixture
|
||||
def jt2(self):
|
||||
return JobTemplate.objects.create(name="other-jt")
|
||||
|
||||
def test_move_schedule_to_JT_no_access(self, job_template, rando, jt2):
|
||||
schedule = Schedule.objects.create(unified_job_template=job_template, rrule=self.rrule)
|
||||
job_template.admin_role.members.add(rando)
|
||||
access = ScheduleAccess(rando)
|
||||
assert not access.can_change(schedule, data=dict(unified_job_template=jt2.pk))
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_move_schedule_from_JT_no_access(job_template, rando):
|
||||
schedule = Schedule.objects.create(
|
||||
unified_job_template=job_template,
|
||||
rrule='DTSTART:20151117T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1')
|
||||
jt2 = JobTemplate.objects.create(name="other-jt")
|
||||
jt2.admin_role.members.add(rando)
|
||||
access = ScheduleAccess(rando)
|
||||
assert not access.can_change(schedule, data=dict(unified_job_template=jt2.pk))
|
||||
def test_move_schedule_from_JT_no_access(self, job_template, rando, jt2):
|
||||
schedule = Schedule.objects.create(unified_job_template=job_template, rrule=self.rrule)
|
||||
jt2.admin_role.members.add(rando)
|
||||
access = ScheduleAccess(rando)
|
||||
assert not access.can_change(schedule, data=dict(unified_job_template=jt2.pk))
|
||||
|
||||
|
||||
def test_can_create_schedule_with_execute(self, job_template, rando):
|
||||
job_template.execute_role.members.add(rando)
|
||||
access = ScheduleAccess(rando)
|
||||
assert access.can_add({'unified_job_template': job_template})
|
||||
|
||||
|
||||
def test_can_modify_ones_own_schedule(self, job_template, rando):
|
||||
job_template.execute_role.members.add(rando)
|
||||
schedule = Schedule.objects.create(unified_job_template=job_template, rrule=self.rrule, created_by=rando)
|
||||
access = ScheduleAccess(rando)
|
||||
assert access.can_change(schedule, {'rrule': self.rrule2})
|
||||
|
||||
@ -86,11 +86,15 @@ class TestWorkflowJobTemplateNodeAccess:
|
||||
@pytest.mark.django_db
|
||||
class TestWorkflowJobAccess:
|
||||
|
||||
def test_wfjt_admin_delete(self, wfjt, workflow_job, rando):
|
||||
wfjt.admin_role.members.add(rando)
|
||||
access = WorkflowJobAccess(rando)
|
||||
def test_org_admin_can_delete_workflow_job(self, workflow_job, org_admin):
|
||||
access = WorkflowJobAccess(org_admin)
|
||||
assert access.can_delete(workflow_job)
|
||||
|
||||
def test_wfjt_admin_can_delete_workflow_job(self, workflow_job, rando):
|
||||
workflow_job.workflow_job_template.admin_role.members.add(rando)
|
||||
access = WorkflowJobAccess(rando)
|
||||
assert not access.can_delete(workflow_job)
|
||||
|
||||
def test_cancel_your_own_job(self, wfjt, workflow_job, rando):
|
||||
wfjt.execute_role.members.add(rando)
|
||||
workflow_job.created_by = rando
|
||||
@ -120,13 +124,11 @@ class TestWorkflowJobAccess:
|
||||
access = WorkflowJobTemplateAccess(rando, save_messages=True)
|
||||
assert not access.can_copy(wfjt)
|
||||
warnings = access.messages
|
||||
assert 1 in warnings
|
||||
assert 'inventory' in warnings[1]
|
||||
assert 'inventories_unable_to_copy' in warnings
|
||||
|
||||
def test_workflow_copy_warnings_jt(self, wfjt, rando, job_template):
|
||||
wfjt.workflow_job_template_nodes.create(unified_job_template=job_template)
|
||||
access = WorkflowJobTemplateAccess(rando, save_messages=True)
|
||||
assert not access.can_copy(wfjt)
|
||||
warnings = access.messages
|
||||
assert 1 in warnings
|
||||
assert 'unified_job_template' in warnings[1]
|
||||
assert 'templates_unable_to_copy' in warnings
|
||||
|
||||
@ -125,6 +125,7 @@ class TestWorkflowJobTemplateNodeSerializerCharPrompts():
|
||||
serializer = WorkflowJobTemplateNodeSerializer()
|
||||
node = WorkflowJobTemplateNode(pk=1)
|
||||
node.char_prompts = {'limit': 'webservers'}
|
||||
serializer.instance = node
|
||||
view = FakeView(node)
|
||||
view.request = FakeRequest()
|
||||
view.request.method = "PATCH"
|
||||
|
||||
@ -5,7 +5,7 @@ import pytest
|
||||
# AWX
|
||||
from awx.main.scheduler.dag_simple import SimpleDAG
|
||||
from awx.main.scheduler.dag_workflow import WorkflowDAG
|
||||
from awx.main.models import Job
|
||||
from awx.main.models import Job, JobTemplate
|
||||
from awx.main.models.workflow import WorkflowJobNode
|
||||
|
||||
|
||||
@ -72,6 +72,7 @@ def factory_node():
|
||||
if status:
|
||||
j = Job(status=status)
|
||||
wfn.job = j
|
||||
wfn.unified_job_template = JobTemplate(name='JT{}'.format(id))
|
||||
return wfn
|
||||
return fn
|
||||
|
||||
|
||||
@ -30,6 +30,7 @@ PARAM_NAMES = {
|
||||
'password': 'LOG_AGGREGATOR_PASSWORD',
|
||||
'enabled_loggers': 'LOG_AGGREGATOR_LOGGERS',
|
||||
'indv_facts': 'LOG_AGGREGATOR_INDIVIDUAL_FACTS',
|
||||
'enabled_flag': 'LOG_AGGREGATOR_ENABLED',
|
||||
}
|
||||
|
||||
|
||||
@ -48,6 +49,7 @@ class HTTPSHandler(logging.Handler):
|
||||
def __init__(self, fqdn=False, **kwargs):
|
||||
super(HTTPSHandler, self).__init__()
|
||||
self.fqdn = fqdn
|
||||
self.async = kwargs.get('async', True)
|
||||
for fd in PARAM_NAMES:
|
||||
# settings values take precedence over the input params
|
||||
settings_name = PARAM_NAMES[fd]
|
||||
@ -100,11 +102,21 @@ class HTTPSHandler(logging.Handler):
|
||||
payload_str = json.dumps(payload_input)
|
||||
else:
|
||||
payload_str = payload_input
|
||||
return dict(data=payload_str, background_callback=unused_callback)
|
||||
if self.async:
|
||||
return dict(data=payload_str, background_callback=unused_callback)
|
||||
else:
|
||||
return dict(data=payload_str)
|
||||
|
||||
def skip_log(self, logger_name):
|
||||
if self.host == '' or (not self.enabled_flag):
|
||||
return True
|
||||
if not logger_name.startswith('awx.analytics'):
|
||||
# Tower log emission is only turned off by enablement setting
|
||||
return False
|
||||
return self.enabled_loggers is None or logger_name.split('.')[-1] not in self.enabled_loggers
|
||||
|
||||
def emit(self, record):
|
||||
if (self.host == '' or self.enabled_loggers is None or
|
||||
record.name.split('.')[-1] not in self.enabled_loggers):
|
||||
if self.skip_log(record.name):
|
||||
return
|
||||
try:
|
||||
payload = self.format(record)
|
||||
@ -123,7 +135,10 @@ class HTTPSHandler(logging.Handler):
|
||||
self.session.post(host, **self.get_post_kwargs(fact_payload))
|
||||
return
|
||||
|
||||
self.session.post(host, **self.get_post_kwargs(payload))
|
||||
if self.async:
|
||||
self.session.post(host, **self.get_post_kwargs(payload))
|
||||
else:
|
||||
requests.post(host, auth=requests.auth.HTTPBasicAuth(self.username, self.password), **self.get_post_kwargs(payload))
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except:
|
||||
|
||||
@ -185,8 +185,9 @@ def vars_validate_or_raise(vars_str):
|
||||
except ValueError:
|
||||
pass
|
||||
try:
|
||||
yaml.safe_load(vars_str)
|
||||
return vars_str
|
||||
r = yaml.safe_load(vars_str)
|
||||
if not (isinstance(r, basestring) and r.startswith('OrderedDict(')):
|
||||
return vars_str
|
||||
except yaml.YAMLError:
|
||||
pass
|
||||
raise RestValidationError(_('Must be valid JSON or YAML.'))
|
||||
|
||||
@ -115,6 +115,12 @@
|
||||
chdir: "{{project_path|quote}}/roles"
|
||||
when: doesRequirementsExist.stat.exists and scm_full_checkout|bool
|
||||
|
||||
# format provided by ansible is ["Revision: 12345", "URL: ..."]
|
||||
- name: parse subversion version string properly
|
||||
set_fact:
|
||||
scm_version: "{{scm_version|regex_replace('^.*Revision: ([0-9]+).*$', '\\1')}}"
|
||||
when: scm_type == 'svn'
|
||||
|
||||
- name: Repository Version
|
||||
debug: msg="Repository Version {{ scm_version }}"
|
||||
when: scm_version is defined
|
||||
|
||||
@ -73,7 +73,7 @@ DATABASES = {
|
||||
# timezone as the operating system.
|
||||
# If running in a Windows environment this must be set to the same as your
|
||||
# system time zone.
|
||||
TIME_ZONE = 'America/New_York'
|
||||
TIME_ZONE = None
|
||||
|
||||
# Language code for this installation. All choices can be found here:
|
||||
# http://www.i18nguy.com/unicode/language-identifiers.html
|
||||
@ -154,7 +154,7 @@ STDOUT_MAX_BYTES_DISPLAY = 1048576
|
||||
|
||||
# Returned in the header on event api lists as a recommendation to the UI
|
||||
# on how many events to display before truncating/hiding
|
||||
RECOMMENDED_MAX_EVENTS_DISPLAY_HEADER = 10000
|
||||
RECOMMENDED_MAX_EVENTS_DISPLAY_HEADER = 4000
|
||||
|
||||
# The maximum size of the ansible callback event's res data structure
|
||||
# beyond this limit and the value will be removed
|
||||
@ -167,6 +167,15 @@ JOB_EVENT_WORKERS = 4
|
||||
|
||||
JOB_EVENT_MAX_QUEUE_SIZE = 5000
|
||||
|
||||
# Disallow sending session cookies over insecure connections
|
||||
SESSION_COOKIE_SECURE = True
|
||||
|
||||
# Disallow sending csrf cookies over insecure connections
|
||||
CSRF_COOKIE_SECURE = True
|
||||
|
||||
# Limit CSRF cookies to browser sessions
|
||||
CSRF_COOKIE_AGE = None
|
||||
|
||||
TEMPLATE_CONTEXT_PROCESSORS = ( # NOQA
|
||||
'django.contrib.auth.context_processors.auth',
|
||||
'django.core.context_processors.debug',
|
||||
@ -380,6 +389,7 @@ CELERY_ACCEPT_CONTENT = ['json']
|
||||
CELERY_TRACK_STARTED = True
|
||||
CELERYD_TASK_TIME_LIMIT = None
|
||||
CELERYD_TASK_SOFT_TIME_LIMIT = None
|
||||
CELERYD_POOL_RESTARTS = True
|
||||
CELERYBEAT_SCHEDULER = 'celery.beat.PersistentScheduler'
|
||||
CELERYBEAT_MAX_LOOP_INTERVAL = 60
|
||||
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
|
||||
@ -882,7 +892,7 @@ LOGGING = {
|
||||
},
|
||||
'http_receiver': {
|
||||
'class': 'awx.main.utils.handlers.HTTPSNullHandler',
|
||||
'level': 'INFO',
|
||||
'level': 'DEBUG',
|
||||
'formatter': 'json',
|
||||
'host': '',
|
||||
},
|
||||
@ -981,7 +991,7 @@ LOGGING = {
|
||||
'handlers': ['callback_receiver'],
|
||||
},
|
||||
'awx.main.tasks': {
|
||||
'handlers': ['task_system']
|
||||
'handlers': ['task_system'],
|
||||
},
|
||||
'awx.main.scheduler': {
|
||||
'handlers': ['task_system'],
|
||||
@ -1009,18 +1019,6 @@ LOGGING = {
|
||||
'level': 'INFO',
|
||||
'propagate': False
|
||||
},
|
||||
'awx.analytics.job_events': {
|
||||
'handlers': ['null'],
|
||||
'level': 'INFO'
|
||||
},
|
||||
'awx.analytics.activity_stream': {
|
||||
'handlers': ['null'],
|
||||
'level': 'INFO'
|
||||
},
|
||||
'awx.analytics.system_tracking': {
|
||||
'handlers': ['null'],
|
||||
'level': 'INFO'
|
||||
},
|
||||
'django_auth_ldap': {
|
||||
'handlers': ['console', 'file', 'tower_warnings'],
|
||||
'level': 'DEBUG',
|
||||
|
||||
@ -24,11 +24,11 @@ ALLOWED_HOSTS = ['*']
|
||||
mimetypes.add_type("image/svg+xml", ".svg", True)
|
||||
mimetypes.add_type("image/svg+xml", ".svgz", True)
|
||||
|
||||
MONGO_HOST = '127.0.0.1'
|
||||
MONGO_PORT = 27017
|
||||
MONGO_USERNAME = None
|
||||
MONGO_PASSWORD = None
|
||||
MONGO_DB = 'system_tracking_dev'
|
||||
# Disallow sending session cookies over insecure connections
|
||||
SESSION_COOKIE_SECURE = False
|
||||
|
||||
# Disallow sending csrf cookies over insecure connections
|
||||
CSRF_COOKIE_SECURE = False
|
||||
|
||||
# Override django.template.loaders.cached.Loader in defaults.py
|
||||
TEMPLATE_LOADERS = (
|
||||
|
||||
@ -114,7 +114,7 @@ SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
# timezone as the operating system.
|
||||
# If running in a Windows environment this must be set to the same as your
|
||||
# system time zone.
|
||||
TIME_ZONE = 'America/New_York'
|
||||
TIME_ZONE = None
|
||||
|
||||
# Language code for this installation. All choices can be found here:
|
||||
# http://www.i18nguy.com/unicode/language-identifiers.html
|
||||
|
||||
@ -71,7 +71,7 @@ SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
# timezone as the operating system.
|
||||
# If running in a Windows environment this must be set to the same as your
|
||||
# system time zone.
|
||||
TIME_ZONE = 'America/New_York'
|
||||
TIME_ZONE = None
|
||||
|
||||
# Language code for this installation. All choices can be found here:
|
||||
# http://www.i18nguy.com/unicode/language-identifiers.html
|
||||
|
||||
@ -299,7 +299,10 @@ class LDAPGroupTypeField(fields.ChoiceField):
|
||||
data = super(LDAPGroupTypeField, self).to_internal_value(data)
|
||||
if not data:
|
||||
return None
|
||||
return getattr(django_auth_ldap.config, data)()
|
||||
if data.endswith('MemberDNGroupType'):
|
||||
return getattr(django_auth_ldap.config, data)(member_attr='member')
|
||||
else:
|
||||
return getattr(django_auth_ldap.config, data)()
|
||||
|
||||
|
||||
class LDAPUserFlagsField(fields.DictField):
|
||||
|
||||
@ -52,7 +52,7 @@
|
||||
<div class="col-sm-6">
|
||||
</div>
|
||||
<div class="col-sm-6 footer-copyright">
|
||||
Copyright © 2016 <a href="http://www.redhat.com" target="_blank">Red Hat</a>, Inc. All Rights Reserved.
|
||||
Copyright © 2017 <a href="http://www.redhat.com" target="_blank">Red Hat</a>, Inc. All Rights Reserved.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@ -729,18 +729,6 @@ legend {
|
||||
.navigation {
|
||||
margin: 15px 0 15px 0;
|
||||
}
|
||||
.modal-body {
|
||||
.alert {
|
||||
padding: 0;
|
||||
border: none;
|
||||
margin: 0;
|
||||
}
|
||||
.alert-danger {
|
||||
background-color: @default-bg;
|
||||
border: none;
|
||||
color: @default-interface-txt;
|
||||
}
|
||||
}
|
||||
|
||||
.footer-navigation {
|
||||
margin: 10px 0 10px 0;
|
||||
@ -1638,17 +1626,19 @@ tr td button i {
|
||||
}
|
||||
|
||||
/* overrides to TB modal */
|
||||
.modal-content {
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.modal-header {
|
||||
color: @default-interface-txt;
|
||||
margin: .1em 0;
|
||||
white-space: nowrap;
|
||||
width: 90%;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
width: 100%;
|
||||
border: none;
|
||||
padding: 12px 14px 0 12px;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.modal {
|
||||
@ -1677,8 +1667,18 @@ tr td button i {
|
||||
}
|
||||
|
||||
.modal-body {
|
||||
padding: 20px 14px 7px 14px;
|
||||
min-height: 120px;
|
||||
padding: 20px 0;
|
||||
|
||||
.alert {
|
||||
padding: 10px;
|
||||
margin: 0;
|
||||
}
|
||||
.alert-danger {
|
||||
background-color: @default-bg;
|
||||
border: none;
|
||||
color: @default-interface-txt;
|
||||
}
|
||||
}
|
||||
|
||||
#prompt-modal .modal-body {
|
||||
@ -1690,15 +1690,15 @@ tr td button i {
|
||||
}
|
||||
|
||||
.modal-footer {
|
||||
padding: .3em 1em .5em .4em;
|
||||
padding: 0;
|
||||
border: none;
|
||||
margin-top: 0;
|
||||
|
||||
.btn.btn-primary {
|
||||
text-transform: uppercase;
|
||||
background-color: @default-succ;
|
||||
border-color: @default-succ;
|
||||
padding: 5px 15px;
|
||||
margin: .5em .4em .5em 0;
|
||||
cursor: pointer;
|
||||
|
||||
&:hover {
|
||||
@ -1720,8 +1720,7 @@ tr td button i {
|
||||
|
||||
|
||||
/* PW progress bar */
|
||||
.pw-progress {
|
||||
margin-top: 10px;
|
||||
.pw-progress { margin-top: 10px;
|
||||
|
||||
li {
|
||||
line-height: normal;
|
||||
@ -2219,10 +2218,6 @@ a:hover {
|
||||
font-family: 'Open Sans';
|
||||
}
|
||||
|
||||
.modal-body .alert {
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
.WorkflowBadge{
|
||||
background-color: @b7grey;
|
||||
border-radius: 10px;
|
||||
|
||||
@ -44,11 +44,10 @@
|
||||
color: @list-header-txt;
|
||||
font-size: 14px;
|
||||
font-weight: bold;
|
||||
padding-bottom: 25px;
|
||||
min-height: 45px;
|
||||
word-break: break-all;
|
||||
max-width: 90%;
|
||||
word-wrap: break-word;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.Form-secondaryTitle{
|
||||
|
||||
@ -11,19 +11,19 @@
|
||||
<!-- Don't indent this properly, you'll break the cow -->
|
||||
<pre class="About-cowsay--code">
|
||||
________________
|
||||
/ Tower {{version_str}} \\
|
||||
\\<span>{{version}}</span>/
|
||||
/ Tower {{version_str}} \
|
||||
\<span>{{version}}</span>/
|
||||
----------------
|
||||
\\ ^__^
|
||||
\\ (oo)\\_______
|
||||
(__) A )\\/\\
|
||||
\ ^__^
|
||||
\ (oo)\_______
|
||||
(__) A )\/\
|
||||
||----w |
|
||||
|| ||
|
||||
</pre>
|
||||
</div>
|
||||
<div class="About-modal--footer">
|
||||
<img class="About-brand--redhat img-responsive" src="/static/assets/tower-logo-login.svg" />
|
||||
<p class="text-right">Copyright © 2016 Red Hat, Inc. <br>
|
||||
<p class="text-right">Copyright © 2017 Red Hat, Inc. <br>
|
||||
Visit <a href="http://www.ansible.com/" target="_blank">Ansible.com</a> for more information.<br>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@ -15,7 +15,8 @@ export default ['templateUrl', '$state',
|
||||
usersDataset: '=',
|
||||
teamsDataset: '=',
|
||||
resourceData: '=',
|
||||
withoutTeamPermissions: '@'
|
||||
withoutTeamPermissions: '@',
|
||||
title: '@'
|
||||
},
|
||||
controller: controller,
|
||||
templateUrl: templateUrl('access/add-rbac-resource/rbac-resource'),
|
||||
|
||||
@ -6,9 +6,9 @@
|
||||
<div class="List-header">
|
||||
<div class="List-title">
|
||||
<div class="List-titleText ng-binding">
|
||||
{{ object.name }}
|
||||
{{ object.name || object.username }}
|
||||
<div class="List-titleLockup"></div>
|
||||
Add Permissions
|
||||
{{ title }}
|
||||
</div>
|
||||
</div>
|
||||
<div class="Form-exitHolder">
|
||||
|
||||
@ -42,8 +42,6 @@ export default ['$compile','templateUrl', 'i18n', 'generateList',
|
||||
|
||||
list.listTitleBadge = false;
|
||||
|
||||
// @issue - fix field.columnClass values for this view
|
||||
|
||||
switch(scope.resourceType){
|
||||
|
||||
case 'projects':
|
||||
@ -51,6 +49,8 @@ export default ['$compile','templateUrl', 'i18n', 'generateList',
|
||||
name: list.fields.name,
|
||||
scm_type: list.fields.scm_type
|
||||
};
|
||||
list.fields.name.columnClass = 'col-md-5 col-sm-5 col-xs-10';
|
||||
list.fields.scm_type.columnClass = 'col-md-5 col-sm-5 hidden-xs';
|
||||
break;
|
||||
|
||||
case 'inventories':
|
||||
@ -58,6 +58,8 @@ export default ['$compile','templateUrl', 'i18n', 'generateList',
|
||||
name: list.fields.name,
|
||||
organization: list.fields.organization
|
||||
};
|
||||
list.fields.name.columnClass = 'col-md-5 col-sm-5 col-xs-10';
|
||||
list.fields.organization.columnClass = 'col-md-5 col-sm-5 hidden-xs';
|
||||
break;
|
||||
|
||||
case 'job_templates':
|
||||
@ -67,6 +69,8 @@ export default ['$compile','templateUrl', 'i18n', 'generateList',
|
||||
name: list.fields.name,
|
||||
description: list.fields.description
|
||||
};
|
||||
list.fields.name.columnClass = 'col-md-5 col-sm-5 col-xs-10';
|
||||
list.fields.description.columnClass = 'col-md-5 col-sm-5 hidden-xs';
|
||||
break;
|
||||
|
||||
case 'workflow_templates':
|
||||
@ -77,12 +81,16 @@ export default ['$compile','templateUrl', 'i18n', 'generateList',
|
||||
name: list.fields.name,
|
||||
description: list.fields.description
|
||||
};
|
||||
list.fields.name.columnClass = 'col-md-5 col-sm-5 col-xs-10';
|
||||
list.fields.description.columnClass = 'col-md-5 col-sm-5 hidden-xs';
|
||||
break;
|
||||
case 'credentials':
|
||||
list.fields = {
|
||||
name: list.fields.name,
|
||||
description: list.fields.description
|
||||
};
|
||||
list.fields.name.columnClass = 'col-md-5 col-sm-5 col-xs-10';
|
||||
list.fields.description.columnClass = 'col-md-5 col-sm-5 hidden-xs';
|
||||
}
|
||||
|
||||
list.fields = _.each(list.fields, (field) => field.nosort = true);
|
||||
|
||||
@ -11,7 +11,8 @@ export default ['templateUrl',
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
resolve: "="
|
||||
resolve: "=",
|
||||
title: "@",
|
||||
},
|
||||
controller: controller,
|
||||
templateUrl: templateUrl('access/add-rbac-user-team/rbac-user-team'),
|
||||
|
||||
@ -7,9 +7,9 @@
|
||||
<div class="List-header">
|
||||
<div class="List-title">
|
||||
<div class="List-titleText ng-binding">
|
||||
{{ owner.name }}
|
||||
{{ owner.name || owner.username }}
|
||||
<div class="List-titleLockup"></div>
|
||||
Add Permissions
|
||||
{{ title }}
|
||||
</div>
|
||||
</div>
|
||||
<div class="Form-exitHolder">
|
||||
|
||||
@ -51,6 +51,10 @@
|
||||
padding-top: 20px;
|
||||
}
|
||||
|
||||
.AddPermissions-list {
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.AddPermissions-list .List-searchRow {
|
||||
height: 0px;
|
||||
}
|
||||
|
||||
@ -34,7 +34,7 @@
|
||||
username: {
|
||||
key: true,
|
||||
label: 'Username',
|
||||
columnClass: 'col-md-3 col-sm-3 col-xs-9'
|
||||
columnClass: 'col-md-5 col-sm-5 col-xs-11'
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@ -43,6 +43,8 @@ export default ['addPermissionsTeamsList', 'addPermissionsUsersList', 'TemplateL
|
||||
name: list.fields.name,
|
||||
scm_type: list.fields.scm_type
|
||||
};
|
||||
list.fields.name.columnClass = 'col-md-6 col-sm-6 col-xs-11';
|
||||
list.fields.scm_type.columnClass = 'col-md-5 col-sm-5 hidden-xs';
|
||||
break;
|
||||
|
||||
case 'Inventories':
|
||||
@ -50,6 +52,8 @@ export default ['addPermissionsTeamsList', 'addPermissionsUsersList', 'TemplateL
|
||||
name: list.fields.name,
|
||||
organization: list.fields.organization
|
||||
};
|
||||
list.fields.name.columnClass = 'col-md-6 col-sm-6 col-xs-11';
|
||||
list.fields.organization.columnClass = 'col-md-5 col-sm-5 hidden-xs';
|
||||
break;
|
||||
|
||||
case 'JobTemplates':
|
||||
@ -59,6 +63,8 @@ export default ['addPermissionsTeamsList', 'addPermissionsUsersList', 'TemplateL
|
||||
name: list.fields.name,
|
||||
description: list.fields.description
|
||||
};
|
||||
list.fields.name.columnClass = 'col-md-6 col-sm-6 col-xs-11';
|
||||
list.fields.description.columnClass = 'col-md-5 col-sm-5 hidden-xs';
|
||||
break;
|
||||
|
||||
case 'WorkflowTemplates':
|
||||
@ -69,6 +75,8 @@ export default ['addPermissionsTeamsList', 'addPermissionsUsersList', 'TemplateL
|
||||
name: list.fields.name,
|
||||
description: list.fields.description
|
||||
};
|
||||
list.fields.name.columnClass = 'col-md-6 col-sm-6 col-xs-11';
|
||||
list.fields.description.columnClass = 'col-md-5 col-sm-5 hidden-xs';
|
||||
break;
|
||||
case 'Users':
|
||||
list.fields = {
|
||||
@ -76,12 +84,25 @@ export default ['addPermissionsTeamsList', 'addPermissionsUsersList', 'TemplateL
|
||||
first_name: list.fields.first_name,
|
||||
last_name: list.fields.last_name
|
||||
};
|
||||
list.fields.username.columnClass = 'col-md-5 col-sm-5 col-xs-11';
|
||||
list.fields.first_name.columnClass = 'col-md-3 col-sm-3 hidden-xs';
|
||||
list.fields.last_name.columnClass = 'col-md-3 col-sm-3 hidden-xs';
|
||||
break;
|
||||
case 'Teams':
|
||||
list.fields = {
|
||||
name: list.fields.name,
|
||||
organization: list.fields.organization,
|
||||
};
|
||||
list.fields.name.columnClass = 'col-md-6 col-sm-6 col-xs-11';
|
||||
list.fields.organization.columnClass = 'col-md-5 col-sm-5 hidden-xs';
|
||||
break;
|
||||
default:
|
||||
list.fields = {
|
||||
name: list.fields.name,
|
||||
description: list.fields.description
|
||||
};
|
||||
list.fields.name.columnClass = 'col-md-6 col-sm-6 col-xs-11';
|
||||
list.fields.description.columnClass = 'col-md-5 col-sm-5 hidden-xs';
|
||||
}
|
||||
|
||||
list_html = generateList.build({
|
||||
|
||||
@ -9,7 +9,7 @@
|
||||
<div class="RoleList-tag"
|
||||
ng-class="{'RoleList-tag--deletable': entry.explicit && entry.user_capabilities.unattach,
|
||||
'RoleList-tag--team': entry.team_id}"
|
||||
aw-tool-tip='{{entry.team_name | sanitize}}' aw-tip-placement='bottom'>
|
||||
aw-tool-tip='<div>Organization: {{ entry.team_organization_name | sanitize }}</div><div>Team: {{entry.team_name | sanitize}}</div>' aw-tip-placement='bottom'>
|
||||
<span class="RoleList-name">{{ entry.name }}</span>
|
||||
<i ng-show='entry.team_id' class="fa fa-users"></i>
|
||||
</div>
|
||||
|
||||
@ -12,6 +12,7 @@
|
||||
function activityStreamController($scope, $state, subTitle, Stream, GetTargetTitle, list, Dataset) {
|
||||
|
||||
init();
|
||||
initOmitSmartTags();
|
||||
|
||||
function init() {
|
||||
// search init
|
||||
@ -33,6 +34,20 @@ function activityStreamController($scope, $state, subTitle, Stream, GetTargetTit
|
||||
});
|
||||
}
|
||||
|
||||
// Specification of smart-tags omission from the UI is done in the route/state init.
|
||||
// A limitation is that this specficiation is static and the key for which to be omitted from
|
||||
// the smart-tags must be known at that time.
|
||||
// In the case of activity stream, we won't to dynamically ommit the resource for which we are
|
||||
// displaying the activity stream for. i.e. 'project', 'credential', etc.
|
||||
function initOmitSmartTags() {
|
||||
let defaults, route = _.find($state.$current.path, (step) => {
|
||||
return step.params.hasOwnProperty('activity_search');
|
||||
});
|
||||
if (route && $state.params.target !== undefined) {
|
||||
defaults = route.params.activity_search.config.value;
|
||||
defaults[$state.params.target] = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default ['$scope', '$state', 'subTitle', 'Stream', 'GetTargetTitle', 'StreamList', 'Dataset', activityStreamController];
|
||||
|
||||
@ -35,5 +35,6 @@
|
||||
margin-bottom: 0;
|
||||
max-height: 200px;
|
||||
overflow: scroll;
|
||||
overflow-x: auto;
|
||||
color: @as-detail-changes-txt;
|
||||
}
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
</div>
|
||||
</div>
|
||||
<div class="Modal-footer">
|
||||
<a href="#" data-target="#stream-detail-modal" data-dismiss="modal" id="action_cancel_btn" class="btn btn-primary StreamDetail-actionButton">OK</a>
|
||||
<a href="#" data-target="#stream-detail-modal" data-dismiss="modal" id="action_cancel_btn" class="btn btn-default StreamDetail-actionButton">OK</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@ -47,6 +47,9 @@ export default
|
||||
order_by: '-timestamp',
|
||||
page_size: '20',
|
||||
};
|
||||
if (streamConfig.activityStreamTarget && streamConfig.activityStreamId) {
|
||||
stateGoParams.activity_search[streamConfig.activityStreamTarget] = $state.params[streamConfig.activityStreamId];
|
||||
}
|
||||
}
|
||||
else {
|
||||
stateGoParams.activity_search = {
|
||||
|
||||
@ -21,7 +21,7 @@ export default
|
||||
});
|
||||
});
|
||||
// Remove the clone from the dom
|
||||
$breadcrumbClone.remove();console.log(availableWidth);
|
||||
$breadcrumbClone.remove();
|
||||
if(expandedBreadcrumbWidth > availableWidth) {
|
||||
let widthToTrim = expandedBreadcrumbWidth - availableWidth;
|
||||
// Sort the crumbs from biggest to smallest
|
||||
|
||||
@ -16,7 +16,8 @@
|
||||
reset: 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY'
|
||||
},
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET: {
|
||||
type: 'text',
|
||||
type: 'sensitive',
|
||||
hasShowInputButton: true,
|
||||
reset: 'SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET'
|
||||
},
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP: {
|
||||
@ -38,8 +39,8 @@
|
||||
buttons: {
|
||||
reset: {
|
||||
ngClick: 'vm.resetAllConfirm()',
|
||||
label: i18n._('Reset All'),
|
||||
class: 'Form-button--left Form-cancelButton'
|
||||
label: i18n._('Revert all to default'),
|
||||
class: 'Form-resetAll'
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'vm.formCancel()',
|
||||
|
||||
@ -16,7 +16,8 @@ export default ['i18n', function(i18n) {
|
||||
reset: 'SOCIAL_AUTH_GITHUB_ORG_KEY'
|
||||
},
|
||||
SOCIAL_AUTH_GITHUB_ORG_SECRET: {
|
||||
type: 'text',
|
||||
type: 'sensitive',
|
||||
hasShowInputButton: true,
|
||||
reset: 'SOCIAL_AUTH_GITHUB_ORG_SECRET'
|
||||
},
|
||||
SOCIAL_AUTH_GITHUB_ORG_NAME: {
|
||||
@ -28,8 +29,8 @@ export default ['i18n', function(i18n) {
|
||||
buttons: {
|
||||
reset: {
|
||||
ngClick: 'vm.resetAllConfirm()',
|
||||
label: i18n._('Reset All'),
|
||||
class: 'Form-button--left Form-cancelButton'
|
||||
label: i18n._('Revert all to default'),
|
||||
class: 'Form-resetAll'
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'vm.formCancel()',
|
||||
|
||||
@ -16,7 +16,8 @@ export default ['i18n', function(i18n) {
|
||||
reset: 'SOCIAL_AUTH_GITHUB_TEAM_KEY'
|
||||
},
|
||||
SOCIAL_AUTH_GITHUB_TEAM_SECRET: {
|
||||
type: 'text',
|
||||
type: 'sensitive',
|
||||
hasShowInputButton: true,
|
||||
reset: 'SOCIAL_AUTH_GITHUB_TEAM_SECRET'
|
||||
},
|
||||
SOCIAL_AUTH_GITHUB_TEAM_ID: {
|
||||
@ -28,8 +29,8 @@ export default ['i18n', function(i18n) {
|
||||
buttons: {
|
||||
reset: {
|
||||
ngClick: 'vm.resetAllConfirm()',
|
||||
label: i18n._('Reset All'),
|
||||
class: 'Form-button--left Form-cancelButton'
|
||||
label: i18n._('Revert all to default'),
|
||||
class: 'Form-resetAll'
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'vm.formCancel()',
|
||||
|
||||
@ -16,7 +16,8 @@ export default ['i18n', function(i18n) {
|
||||
reset: 'SOCIAL_AUTH_GITHUB_KEY'
|
||||
},
|
||||
SOCIAL_AUTH_GITHUB_SECRET: {
|
||||
type: 'text',
|
||||
type: 'sensitive',
|
||||
hasShowInputButton: true,
|
||||
reset: 'SOCIAL_AUTH_GITHUB_SECRET'
|
||||
}
|
||||
},
|
||||
@ -24,8 +25,8 @@ export default ['i18n', function(i18n) {
|
||||
buttons: {
|
||||
reset: {
|
||||
ngClick: 'vm.resetAllConfirm()',
|
||||
label: i18n._('Reset All'),
|
||||
class: 'Form-button--left Form-cancelButton'
|
||||
label: i18n._('Revert all to default'),
|
||||
class: 'Form-resetAll'
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'vm.formCancel()',
|
||||
|
||||
@ -16,7 +16,8 @@ export default ['i18n', function(i18n) {
|
||||
reset: 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY'
|
||||
},
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET: {
|
||||
type: 'text',
|
||||
type: 'sensitive',
|
||||
hasShowInputButton: true,
|
||||
reset: 'SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET'
|
||||
},
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS: {
|
||||
@ -36,8 +37,8 @@ export default ['i18n', function(i18n) {
|
||||
buttons: {
|
||||
reset: {
|
||||
ngClick: 'vm.resetAllConfirm()',
|
||||
label: i18n._('Reset All'),
|
||||
class: 'Form-button--left Form-cancelButton'
|
||||
label: i18n._('Revert all to default'),
|
||||
class: 'Form-resetAll'
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'vm.formCancel()',
|
||||
|
||||
@ -21,7 +21,8 @@ export default ['i18n', function(i18n) {
|
||||
reset: 'AUTH_LDAP_BIND_DN'
|
||||
},
|
||||
AUTH_LDAP_BIND_PASSWORD: {
|
||||
type: 'password'
|
||||
type: 'sensitive',
|
||||
hasShowInputButton: true,
|
||||
},
|
||||
AUTH_LDAP_USER_SEARCH: {
|
||||
type: 'textarea',
|
||||
@ -84,8 +85,8 @@ export default ['i18n', function(i18n) {
|
||||
buttons: {
|
||||
reset: {
|
||||
ngClick: 'vm.resetAllConfirm()',
|
||||
label: i18n._('Reset All'),
|
||||
class: 'Form-button--left Form-cancelButton'
|
||||
label: i18n._('Revert all to default'),
|
||||
class: 'Form-resetAll'
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'vm.formCancel()',
|
||||
|
||||
@ -21,7 +21,8 @@ export default ['i18n', function(i18n) {
|
||||
reset: 'RADIUS_PORT'
|
||||
},
|
||||
RADIUS_SECRET: {
|
||||
type: 'text',
|
||||
type: 'sensitive',
|
||||
hasShowInputButton: true,
|
||||
reset: 'RADIUS_SECRET'
|
||||
}
|
||||
},
|
||||
@ -29,8 +30,8 @@ export default ['i18n', function(i18n) {
|
||||
buttons: {
|
||||
reset: {
|
||||
ngClick: 'vm.resetAllConfirm()',
|
||||
label: i18n._('Reset All'),
|
||||
class: 'Form-button--left Form-cancelButton'
|
||||
label: i18n._('Revert all to default'),
|
||||
class: 'Form-resetAll'
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'vm.formCancel()',
|
||||
|
||||
@ -20,7 +20,8 @@ export default ['i18n', function(i18n) {
|
||||
reset: 'SOCIAL_AUTH_SAML_SP_PUBLIC_CERT'
|
||||
},
|
||||
SOCIAL_AUTH_SAML_SP_PRIVATE_KEY: {
|
||||
type: 'text',
|
||||
type: 'sensitive',
|
||||
hasShowInputButton: true,
|
||||
reset: 'SOCIAL_AUTH_SAML_SP_PRIVATE_KEY'
|
||||
},
|
||||
SOCIAL_AUTH_SAML_ORG_INFO: {
|
||||
@ -56,8 +57,8 @@ export default ['i18n', function(i18n) {
|
||||
buttons: {
|
||||
reset: {
|
||||
ngClick: 'vm.resetAllConfirm()',
|
||||
label: i18n._('Reset All'),
|
||||
class: 'Form-button--left Form-cancelButton'
|
||||
label: i18n._('Revert all to default'),
|
||||
class: 'Form-resetAll'
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'vm.formCancel()',
|
||||
|
||||
@ -12,6 +12,19 @@
|
||||
float: right
|
||||
}
|
||||
|
||||
.Form-resetAll {
|
||||
border: none;
|
||||
padding: 0;
|
||||
background-color: @white;
|
||||
margin-right: auto;
|
||||
color: @default-link;
|
||||
font-size: 12px;
|
||||
|
||||
&:hover {
|
||||
color: @default-link-hov;
|
||||
}
|
||||
}
|
||||
|
||||
.Form-tab {
|
||||
min-width: 77px;
|
||||
}
|
||||
|
||||
@ -64,8 +64,8 @@
|
||||
buttons: {
|
||||
reset: {
|
||||
ngClick: 'vm.resetAllConfirm()',
|
||||
label: i18n._('Reset All'),
|
||||
class: 'Form-button--left Form-cancelButton'
|
||||
label: i18n._('Revert all to default'),
|
||||
class: 'Form-resetAll'
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'vm.formCancel()',
|
||||
|
||||
@ -22,8 +22,8 @@
|
||||
buttons: {
|
||||
reset: {
|
||||
ngClick: 'vm.resetAllConfirm()',
|
||||
label: i18n._('Reset All'),
|
||||
class: 'Form-button--left Form-cancelButton'
|
||||
label: i18n._('Revert all to default'),
|
||||
class: 'Form-resetAll'
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'vm.formCancel()',
|
||||
|
||||
@ -30,7 +30,8 @@
|
||||
reset: 'LOG_AGGREGATOR_USERNAME'
|
||||
},
|
||||
LOG_AGGREGATOR_PASSWORD: {
|
||||
type: 'text',
|
||||
type: 'sensitive',
|
||||
hasShowInputButton: true,
|
||||
reset: 'LOG_AGGREGATOR_PASSWORD'
|
||||
},
|
||||
LOG_AGGREGATOR_LOGGERS: {
|
||||
@ -48,8 +49,8 @@
|
||||
buttons: {
|
||||
reset: {
|
||||
ngClick: 'vm.resetAllConfirm()',
|
||||
label: i18n._('Reset All'),
|
||||
class: 'Form-button--left Form-cancelButton'
|
||||
label: i18n._('Revert all to default'),
|
||||
class: 'Form-resetAll'
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'vm.formCancel()',
|
||||
|
||||
@ -26,8 +26,8 @@ export default ['i18n', function(i18n) {
|
||||
buttons: {
|
||||
reset: {
|
||||
ngClick: 'vm.resetAllConfirm()',
|
||||
label: i18n._('Reset All'),
|
||||
class: 'Form-button--left Form-cancelButton'
|
||||
label: i18n._('Revert all to default'),
|
||||
class: 'Form-resetAll'
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'vm.formCancel()',
|
||||
|
||||
@ -32,8 +32,8 @@ export default ['i18n', function(i18n) {
|
||||
buttons: {
|
||||
reset: {
|
||||
ngClick: 'vm.resetAllConfirm()',
|
||||
label: i18n._('Reset All'),
|
||||
class: 'Form-button--left Form-cancelButton'
|
||||
label: i18n._('Revert all to default'),
|
||||
class: 'Form-resetAll'
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'vm.formCancel()',
|
||||
|
||||
@ -113,7 +113,7 @@ CredentialsList.$inject = ['$scope', '$rootScope', '$location', '$log',
|
||||
|
||||
export function CredentialsAdd($scope, $rootScope, $compile, $location, $log,
|
||||
$stateParams, CredentialForm, GenerateForm, Rest, Alert, ProcessErrors,
|
||||
ClearScope, GetBasePath, GetChoices, Empty, KindChange,
|
||||
ClearScope, GetBasePath, GetChoices, Empty, KindChange, BecomeMethodChange,
|
||||
OwnerChange, FormSave, $state, CreateSelect2) {
|
||||
ClearScope();
|
||||
|
||||
@ -221,6 +221,10 @@ export function CredentialsAdd($scope, $rootScope, $compile, $location, $log,
|
||||
KindChange({ scope: $scope, form: form, reset: true });
|
||||
};
|
||||
|
||||
$scope.becomeMethodChange = function() {
|
||||
BecomeMethodChange({ scope: $scope });
|
||||
};
|
||||
|
||||
// Save
|
||||
$scope.formSave = function() {
|
||||
if ($scope[form.name + '_form'].$valid) {
|
||||
@ -276,14 +280,14 @@ export function CredentialsAdd($scope, $rootScope, $compile, $location, $log,
|
||||
|
||||
CredentialsAdd.$inject = ['$scope', '$rootScope', '$compile', '$location',
|
||||
'$log', '$stateParams', 'CredentialForm', 'GenerateForm', 'Rest', 'Alert',
|
||||
'ProcessErrors', 'ClearScope', 'GetBasePath', 'GetChoices', 'Empty', 'KindChange',
|
||||
'ProcessErrors', 'ClearScope', 'GetBasePath', 'GetChoices', 'Empty', 'KindChange', 'BecomeMethodChange',
|
||||
'OwnerChange', 'FormSave', '$state', 'CreateSelect2'
|
||||
];
|
||||
|
||||
export function CredentialsEdit($scope, $rootScope, $compile, $location, $log,
|
||||
$stateParams, CredentialForm, Rest, Alert, ProcessErrors, ClearScope, Prompt,
|
||||
GetBasePath, GetChoices, KindChange, Empty, OwnerChange, FormSave, Wait,
|
||||
$state, CreateSelect2, Authorization) {
|
||||
GetBasePath, GetChoices, KindChange, BecomeMethodChange, Empty, OwnerChange, FormSave, Wait,
|
||||
$state, CreateSelect2, Authorization, i18n) {
|
||||
|
||||
ClearScope();
|
||||
|
||||
@ -336,19 +340,15 @@ export function CredentialsEdit($scope, $rootScope, $compile, $location, $log,
|
||||
});
|
||||
}
|
||||
|
||||
// if the credential is assigned to an organization, allow permission delegation
|
||||
// do NOT use $scope.organization in a view directive to determine if a credential is associated with an org
|
||||
// @todo why not? ^ and what is this type check for a number doing - should this be a type check for undefined?
|
||||
$scope.disablePermissionAssignment = typeof($scope.organization) === 'number' ? false : true;
|
||||
if ($scope.disablePermissionAssignment) {
|
||||
$scope.permissionsTooltip = 'Credentials are only shared within an organization. Assign credentials to an organization to delegate credential permissions. The organization cannot be edited after credentials are assigned.';
|
||||
}
|
||||
setAskCheckboxes();
|
||||
KindChange({
|
||||
scope: $scope,
|
||||
form: form,
|
||||
reset: false
|
||||
$scope.$watch('organization', function(val) {
|
||||
if (val === undefined) {
|
||||
$scope.permissionsTooltip = i18n._('Credentials are only shared within an organization. Assign credentials to an organization to delegate credential permissions. The organization cannot be edited after credentials are assigned.');
|
||||
} else {
|
||||
$scope.permissionsTooltip = '';
|
||||
}
|
||||
});
|
||||
|
||||
setAskCheckboxes();
|
||||
OwnerChange({ scope: $scope });
|
||||
$scope.$watch("ssh_key_data", function(val) {
|
||||
if (val === "" || val === null || val === undefined) {
|
||||
@ -453,6 +453,13 @@ export function CredentialsEdit($scope, $rootScope, $compile, $location, $log,
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
KindChange({
|
||||
scope: $scope,
|
||||
form: form,
|
||||
reset: false
|
||||
});
|
||||
|
||||
master.kind = $scope.kind;
|
||||
|
||||
CreateSelect2({
|
||||
@ -518,6 +525,10 @@ export function CredentialsEdit($scope, $rootScope, $compile, $location, $log,
|
||||
KindChange({ scope: $scope, form: form, reset: true });
|
||||
};
|
||||
|
||||
$scope.becomeMethodChange = function() {
|
||||
BecomeMethodChange({ scope: $scope });
|
||||
};
|
||||
|
||||
$scope.formCancel = function() {
|
||||
$state.transitionTo('credentials');
|
||||
};
|
||||
@ -612,6 +623,6 @@ export function CredentialsEdit($scope, $rootScope, $compile, $location, $log,
|
||||
CredentialsEdit.$inject = ['$scope', '$rootScope', '$compile', '$location',
|
||||
'$log', '$stateParams', 'CredentialForm', 'Rest', 'Alert',
|
||||
'ProcessErrors', 'ClearScope', 'Prompt', 'GetBasePath', 'GetChoices',
|
||||
'KindChange', 'Empty', 'OwnerChange',
|
||||
'FormSave', 'Wait', '$state', 'CreateSelect2', 'Authorization'
|
||||
'KindChange', 'BecomeMethodChange', 'Empty', 'OwnerChange',
|
||||
'FormSave', 'Wait', '$state', 'CreateSelect2', 'Authorization', 'i18n',
|
||||
];
|
||||
|
||||
36
awx/ui/client/src/credentials/ownerList.block.less
Normal file
36
awx/ui/client/src/credentials/ownerList.block.less
Normal file
@ -0,0 +1,36 @@
|
||||
/** @define OwnerList */
|
||||
@import "./client/src/shared/branding/colors.default.less";
|
||||
|
||||
.OwnerList {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
align-items: flex-start;
|
||||
}
|
||||
|
||||
.OwnerList-seeBase {
|
||||
display: flex;
|
||||
max-width: 100%;
|
||||
|
||||
color: @default-link;
|
||||
text-transform: uppercase;
|
||||
padding: 2px 15px;
|
||||
cursor: pointer;
|
||||
border-radius: 5px;
|
||||
font-size: 11px;
|
||||
}
|
||||
|
||||
.OwnerList-seeBase:hover {
|
||||
color: @default-link-hov;
|
||||
}
|
||||
|
||||
.OwnerList-seeLess {
|
||||
.OwnerList-seeBase;
|
||||
}
|
||||
|
||||
.OwnerList-seeMore {
|
||||
.OwnerList-seeBase;
|
||||
}
|
||||
|
||||
.OwnerList-Container {
|
||||
margin-right: 5px;
|
||||
}
|
||||
@ -1,5 +1,12 @@
|
||||
<div ng-repeat="owner in owners_list">
|
||||
<a ng-if="owner.type === 'organization'" ui-sref="organizations.edit({ organization_id: owner.id })">{{ owner.name }}{{$last ? '' : ', '}}</a>
|
||||
<a ng-if="owner.type === 'user'" ui-sref="users.edit({ user_id: owner.id })">{{ owner.name }}{{$last ? '' : ', '}}</a>
|
||||
<a ng-if="owner.type === 'team'" ui-sref="teams.edit({ team_id: owner.id })">{{ owner.name }}{{$last ? '' : ', '}}</a>
|
||||
</div>
|
||||
<div class="OwnerList" ng-init="ownersLimit = 5; ownersLimitConst = 5; ">
|
||||
<div class="OwnerList-Container" ng-repeat="owner in owners_list | limitTo:ownersLimit">
|
||||
<a ng-if="owner.type === 'organization'" ui-sref="organizations.edit({ organization_id: owner.id })">{{ owner.name }}{{$last ? '' : ', '}}</a>
|
||||
<a ng-if="owner.type === 'user'" ui-sref="users.edit({ user_id: owner.id })">{{ owner.name }}{{$last ? '' : ', '}}</a>
|
||||
<a ng-if="owner.type === 'team'" ui-sref="teams.edit({ team_id: owner.id })">{{ owner.name }}{{$last ? '' : ', '}}</a>
|
||||
</div>
|
||||
|
||||
<div class="OwnerList-seeMore" ng-show="owners_list.length > ownersLimitConst && ownersLimit == ownersLimitConst"
|
||||
ng-click="ownersLimit = owners_list.length">View More</div>
|
||||
<div class="OwnerList-seeLess" ng-show="owners_list.length > ownersLimitConst && ownersLimit != ownersLimitConst"
|
||||
ng-click="ownersLimit = ownersLimitConst">View Less</div>
|
||||
</div>
|
||||
@ -32,34 +32,21 @@
|
||||
}
|
||||
|
||||
.DashboardList-viewAll {
|
||||
color: @btn-txt;
|
||||
background-color: @btn-bg;
|
||||
font-size: 12px;
|
||||
border: 1px solid @default-icon-hov;
|
||||
border-radius: 5px;
|
||||
font-size: 11px;
|
||||
margin-right: 15px;
|
||||
margin-top: 10px;
|
||||
margin-top: 13px;
|
||||
margin-bottom: 10px;
|
||||
padding-left: 10px;
|
||||
padding-right: 10px;
|
||||
padding-bottom: 5px;
|
||||
padding-top: 5px;
|
||||
transition: background-color 0.2s;
|
||||
}
|
||||
|
||||
.DashboardList-viewAll:hover {
|
||||
color: @btn-txt;
|
||||
background-color: @btn-bg-hov;
|
||||
}
|
||||
|
||||
.DashboardList-viewAll:focus {
|
||||
color: @btn-txt;
|
||||
}
|
||||
|
||||
.DashboardList-container {
|
||||
flex: 1;
|
||||
width: 100%;
|
||||
padding: 20px;
|
||||
padding-top: 0;
|
||||
}
|
||||
|
||||
.DashboardList-tableHeader--name {
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
<footer class='Footer'>
|
||||
<div class="Footer-copyright" ng-class="{'is-loggedOut' : !current_user || !current_user.username}">Copyright © 2016 <a class="Footer-link" href="http://www.redhat.com" target="_blank">Red Hat</a>, Inc.</div>
|
||||
<div class="Footer-copyright" ng-class="{'is-loggedOut' : !current_user || !current_user.username}">Copyright © 2017 <a class="Footer-link" href="http://www.redhat.com" target="_blank">Red Hat</a>, Inc.</div>
|
||||
</footer>
|
||||
|
||||
@ -288,7 +288,8 @@ export default
|
||||
dataPlacement: 'right',
|
||||
dataContainer: "body",
|
||||
subForm: 'credentialSubForm',
|
||||
ngDisabled: '!(credential_obj.summary_fields.user_capabilities.edit || canAdd)'
|
||||
ngDisabled: '!(credential_obj.summary_fields.user_capabilities.edit || canAdd)',
|
||||
ngChange: 'becomeMethodChange()',
|
||||
},
|
||||
"become_username": {
|
||||
labelBind: 'becomeUsernameLabel',
|
||||
@ -420,9 +421,12 @@ export default
|
||||
|
||||
related: {
|
||||
permissions: {
|
||||
disabled: 'disablePermissionAssignment',
|
||||
disabled: '(organization === undefined ? true : false)',
|
||||
// Do not transition the state if organization is undefined
|
||||
ngClick: `(organization === undefined ? true : false)||$state.go('credentials.edit.permissions')`,
|
||||
awToolTip: '{{permissionsTooltip}}',
|
||||
dataTipWatch: 'permissionsTooltip',
|
||||
awToolTipTabEnabledInEditMode: true,
|
||||
dataPlacement: 'top',
|
||||
basePath: 'api/v1/credentials/{{$stateParams.credential_id}}/access_list/',
|
||||
search: {
|
||||
|
||||
@ -43,7 +43,7 @@ export default
|
||||
label: 'Variables',
|
||||
type: 'textarea',
|
||||
class: 'Form-textAreaLabel Form-formGroup--fullWidth',
|
||||
rows: 12,
|
||||
rows: 6,
|
||||
'default': '---',
|
||||
dataTitle: 'Group Variables',
|
||||
dataPlacement: 'right',
|
||||
@ -69,6 +69,11 @@ export default
|
||||
ngModel: 'source'
|
||||
},
|
||||
credential: {
|
||||
// initializes a default value for this search param
|
||||
// search params with default values set will not generate user-interactable search tags
|
||||
search: {
|
||||
kind: null
|
||||
},
|
||||
label: 'Cloud Credential',
|
||||
type: 'lookup',
|
||||
list: 'CredentialList',
|
||||
|
||||
@ -78,7 +78,7 @@ angular.module('InventoryFormDefinition', ['ScanJobsListDefinition'])
|
||||
},
|
||||
close: {
|
||||
ngClick: 'formCancel()',
|
||||
ngHide: '(inventory_obj.summary_fields.user_capabilities.edit || canAdd)'
|
||||
ngShow: '!(inventory_obj.summary_fields.user_capabilities.edit || canAdd)'
|
||||
},
|
||||
save: {
|
||||
ngClick: 'formSave()',
|
||||
@ -103,7 +103,7 @@ angular.module('InventoryFormDefinition', ['ScanJobsListDefinition'])
|
||||
add: {
|
||||
label: i18n._('Add'),
|
||||
ngClick: "$state.go('.add')",
|
||||
awToolTip: 'Add a permission',
|
||||
awToolTip: i18n._('Add a permission'),
|
||||
actionClass: 'btn List-buttonSubmit',
|
||||
buttonContent: '+ ADD',
|
||||
ngShow: '(inventory_obj.summary_fields.user_capabilities.edit || canAdd)'
|
||||
|
||||
@ -68,7 +68,7 @@ export default
|
||||
searchType: 'select',
|
||||
actions: {
|
||||
add: {
|
||||
ngClick: "addPermission",
|
||||
ngClick: "$state.go('.add')",
|
||||
label: i18n._('Add'),
|
||||
awToolTip: i18n._('Add a permission'),
|
||||
actionClass: 'btn List-buttonSubmit',
|
||||
|
||||
@ -121,6 +121,7 @@ export default
|
||||
organizations: {
|
||||
awToolTip: i18n._('Please save before assigning to organizations'),
|
||||
basePath: 'api/v1/users/{{$stateParams.user_id}}/organizations',
|
||||
emptyListText: i18n._('Please add user to an Organization.'),
|
||||
search: {
|
||||
page_size: '10'
|
||||
},
|
||||
|
||||
@ -122,7 +122,7 @@ export default
|
||||
add: {
|
||||
ngClick: "$state.go('.add')",
|
||||
label: i18n._('Add'),
|
||||
awToolTip: 'Add a permission',
|
||||
awToolTip: i18n._('Add a permission'),
|
||||
actionClass: 'btn List-buttonSubmit',
|
||||
buttonContent: '+ '+ i18n._('ADD'),
|
||||
ngShow: '(workflow_job_template_obj.summary_fields.user_capabilities.edit || canAddWorkflowJobTemplate)'
|
||||
|
||||
@ -74,6 +74,7 @@ angular.module('CredentialsHelper', ['Utilities'])
|
||||
scope.projectPopOver = "<p>" + i18n._("The project value") + "</p>";
|
||||
scope.hostPopOver = "<p>" + i18n._("The host value") + "</p>";
|
||||
scope.ssh_key_data_api_error = '';
|
||||
|
||||
if (!Empty(scope.kind)) {
|
||||
// Apply kind specific settings
|
||||
switch (scope.kind.value) {
|
||||
@ -204,6 +205,111 @@ angular.module('CredentialsHelper', ['Utilities'])
|
||||
}
|
||||
])
|
||||
|
||||
.factory('BecomeMethodChange', ['Empty', 'i18n',
|
||||
function (Empty, i18n) {
|
||||
return function (params) {
|
||||
console.log('become method has changed');
|
||||
var scope = params.scope;
|
||||
|
||||
if (!Empty(scope.kind)) {
|
||||
// Apply kind specific settings
|
||||
switch (scope.kind.value) {
|
||||
case 'aws':
|
||||
scope.aws_required = true;
|
||||
break;
|
||||
case 'rax':
|
||||
scope.rackspace_required = true;
|
||||
scope.username_required = true;
|
||||
break;
|
||||
case 'ssh':
|
||||
scope.usernameLabel = i18n._('Username'); //formally 'SSH Username'
|
||||
scope.becomeUsernameLabel = i18n._('Privilege Escalation Username');
|
||||
scope.becomePasswordLabel = i18n._('Privilege Escalation Password');
|
||||
break;
|
||||
case 'scm':
|
||||
scope.sshKeyDataLabel = i18n._('SCM Private Key');
|
||||
scope.passwordLabel = i18n._('Password');
|
||||
break;
|
||||
case 'gce':
|
||||
scope.usernameLabel = i18n._('Service Account Email Address');
|
||||
scope.sshKeyDataLabel = i18n._('RSA Private Key');
|
||||
scope.email_required = true;
|
||||
scope.key_required = true;
|
||||
scope.project_required = true;
|
||||
scope.key_description = i18n._('Paste the contents of the PEM file associated with the service account email.');
|
||||
scope.projectLabel = i18n._("Project");
|
||||
scope.project_required = false;
|
||||
scope.projectPopOver = "<p>" + i18n._("The Project ID is the " +
|
||||
"GCE assigned identification. It is constructed as " +
|
||||
"two words followed by a three digit number. Such " +
|
||||
"as: ") + "</p><p>adjective-noun-000</p>";
|
||||
break;
|
||||
case 'azure':
|
||||
scope.sshKeyDataLabel = i18n._('Management Certificate');
|
||||
scope.subscription_required = true;
|
||||
scope.key_required = true;
|
||||
scope.key_description = i18n._("Paste the contents of the PEM file that corresponds to the certificate you uploaded in the Microsoft Azure console.");
|
||||
break;
|
||||
case 'azure_rm':
|
||||
scope.usernameLabel = i18n._("Username");
|
||||
scope.subscription_required = true;
|
||||
scope.passwordLabel = i18n._('Password');
|
||||
scope.azure_rm_required = true;
|
||||
break;
|
||||
case 'vmware':
|
||||
scope.username_required = true;
|
||||
scope.host_required = true;
|
||||
scope.password_required = true;
|
||||
scope.hostLabel = "vCenter Host";
|
||||
scope.passwordLabel = i18n._('Password');
|
||||
scope.hostPopOver = i18n._("Enter the hostname or IP address which corresponds to your VMware vCenter.");
|
||||
break;
|
||||
case 'openstack':
|
||||
scope.hostLabel = i18n._("Host (Authentication URL)");
|
||||
scope.projectLabel = i18n._("Project (Tenant Name)");
|
||||
scope.domainLabel = i18n._("Domain Name");
|
||||
scope.password_required = true;
|
||||
scope.project_required = true;
|
||||
scope.host_required = true;
|
||||
scope.username_required = true;
|
||||
scope.projectPopOver = "<p>" + i18n._("This is the tenant name. " +
|
||||
" This value is usually the same " +
|
||||
" as the username.") + "</p>";
|
||||
scope.hostPopOver = "<p>" + i18n._("The host to authenticate with.") +
|
||||
"<br />" + i18n.sprintf(i18n._("For example, %s"), "https://openstack.business.com/v2.0/");
|
||||
break;
|
||||
case 'satellite6':
|
||||
scope.username_required = true;
|
||||
scope.password_required = true;
|
||||
scope.passwordLabel = i18n._('Password');
|
||||
scope.host_required = true;
|
||||
scope.hostLabel = i18n._("Satellite 6 URL");
|
||||
scope.hostPopOver = i18n.sprintf(i18n._("Enter the URL which corresponds to your %s" +
|
||||
"Red Hat Satellite 6 server. %s" +
|
||||
"For example, %s"), "<br />", "<br />", "https://satellite.example.org");
|
||||
break;
|
||||
case 'cloudforms':
|
||||
scope.username_required = true;
|
||||
scope.password_required = true;
|
||||
scope.passwordLabel = i18n._('Password');
|
||||
scope.host_required = true;
|
||||
scope.hostLabel = i18n._("CloudForms URL");
|
||||
scope.hostPopOver = i18n.sprintf(i18n._("Enter the URL for the virtual machine which %s" +
|
||||
"corresponds to your CloudForm instance. %s" +
|
||||
"For example, %s"), "<br />", "<br />", "https://cloudforms.example.org");
|
||||
break;
|
||||
case 'net':
|
||||
scope.username_required = true;
|
||||
scope.password_required = false;
|
||||
scope.passwordLabel = i18n._('Password');
|
||||
scope.sshKeyDataLabel = i18n._('SSH Key');
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
])
|
||||
|
||||
|
||||
.factory('OwnerChange', [
|
||||
function () {
|
||||
|
||||
@ -233,7 +233,7 @@ export default
|
||||
hdr: hdr,
|
||||
body: (action_label === 'cancel' || job.status === 'new') ? cancelBody : deleteBody,
|
||||
action: action,
|
||||
actionText: (action_label === 'cancel' || job.status === 'new') ? "YES" : "DELETE"
|
||||
actionText: (action_label === 'cancel' || job.status === 'new') ? "OK" : "DELETE"
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@ -24,10 +24,7 @@ export default
|
||||
var langUrl = langInfo.replace('-', '_');
|
||||
//gettextCatalog.debug = true;
|
||||
gettextCatalog.setCurrentLanguage(langInfo);
|
||||
// TODO: the line below is commented out temporarily until
|
||||
// the .po files are received from the i18n team, in order to avoid
|
||||
// 404 file not found console errors in dev
|
||||
// gettextCatalog.loadRemote('/static/languages/' + langUrl + '.json');
|
||||
gettextCatalog.loadRemote('/static/languages/' + langUrl + '.json');
|
||||
};
|
||||
}])
|
||||
.factory('i18n', ['gettextCatalog',
|
||||
|
||||
@ -11,10 +11,16 @@
|
||||
*/
|
||||
|
||||
function InventoriesAdd($scope, $rootScope, $compile, $location, $log,
|
||||
$stateParams, GenerateForm, InventoryForm, Rest, Alert, ProcessErrors,
|
||||
$stateParams, GenerateForm, InventoryForm, rbacUiControlService, Rest, Alert, ProcessErrors,
|
||||
ClearScope, GetBasePath, ParseTypeChange, Wait, ToJSON,
|
||||
$state) {
|
||||
|
||||
$scope.canAdd = false;
|
||||
rbacUiControlService.canAdd(GetBasePath('inventory'))
|
||||
.then(function(canAdd) {
|
||||
$scope.canAdd = canAdd;
|
||||
});
|
||||
|
||||
Rest.setUrl(GetBasePath('inventory'));
|
||||
Rest.options()
|
||||
.success(function(data) {
|
||||
@ -91,7 +97,7 @@ function InventoriesAdd($scope, $rootScope, $compile, $location, $log,
|
||||
}
|
||||
|
||||
export default ['$scope', '$rootScope', '$compile', '$location',
|
||||
'$log', '$stateParams', 'GenerateForm', 'InventoryForm', 'Rest', 'Alert',
|
||||
'$log', '$stateParams', 'GenerateForm', 'InventoryForm', 'rbacUiControlService', 'Rest', 'Alert',
|
||||
'ProcessErrors', 'ClearScope', 'GetBasePath', 'ParseTypeChange',
|
||||
'Wait', 'ToJSON', '$state', InventoriesAdd
|
||||
];
|
||||
|
||||
@ -32,7 +32,7 @@ function InventoriesEdit($scope, $rootScope, $compile, $location,
|
||||
form.formFieldSize = null;
|
||||
$scope.inventory_id = inventory_id;
|
||||
|
||||
$scope.$watch('invnentory_obj.summary_fields.user_capabilities.edit', function(val) {
|
||||
$scope.$watch('inventory_obj.summary_fields.user_capabilities.edit', function(val) {
|
||||
if (val === false) {
|
||||
$scope.canAdd = false;
|
||||
}
|
||||
|
||||
@ -66,7 +66,19 @@ angular.module('inventory', [
|
||||
],
|
||||
ParentObject: ['groupData', function(groupData) {
|
||||
return groupData;
|
||||
}]
|
||||
}],
|
||||
UnifiedJobsOptions: ['Rest', 'GetBasePath', '$stateParams', '$q',
|
||||
function(Rest, GetBasePath, $stateParams, $q) {
|
||||
Rest.setUrl(GetBasePath('unified_jobs'));
|
||||
var val = $q.defer();
|
||||
Rest.options()
|
||||
.then(function(data) {
|
||||
val.resolve(data.data);
|
||||
}, function(data) {
|
||||
val.reject(data);
|
||||
});
|
||||
return val.promise;
|
||||
}]
|
||||
},
|
||||
views: {
|
||||
// clear form template when views render in this substate
|
||||
|
||||
@ -11,7 +11,7 @@
|
||||
|
||||
$scope.item = group;
|
||||
$scope.submitMode = $stateParams.groups === undefined ? 'move' : 'copy';
|
||||
$scope['toggle_'+ list.iterator] = function(id){
|
||||
$scope.toggle_row = function(id){
|
||||
// toggle off anything else currently selected
|
||||
_.forEach($scope.groups, (item) => {return item.id === id ? item.checked = 1 : item.checked = null;});
|
||||
// yoink the currently selected thing
|
||||
@ -60,9 +60,6 @@
|
||||
};
|
||||
|
||||
function init(){
|
||||
var url = GetBasePath('inventory') + $stateParams.inventory_id + '/groups/';
|
||||
url += $stateParams.group ? '?not__id__in=' + group.id + ',' + _.last($stateParams.group) : '?not__id=' + group.id;
|
||||
list.basePath = url;
|
||||
$scope.atRootLevel = $stateParams.group ? false : true;
|
||||
|
||||
// search init
|
||||
|
||||
@ -8,10 +8,10 @@
|
||||
['$scope', '$state', '$stateParams', 'generateList', 'HostManageService', 'GetBasePath', 'CopyMoveGroupList', 'host', 'Dataset',
|
||||
function($scope, $state, $stateParams, GenerateList, HostManageService, GetBasePath, CopyMoveGroupList, host, Dataset){
|
||||
var list = CopyMoveGroupList;
|
||||
|
||||
|
||||
$scope.item = host;
|
||||
$scope.submitMode = 'copy';
|
||||
$scope['toggle_'+ list.iterator] = function(id){
|
||||
$scope.toggle_row = function(id){
|
||||
// toggle off anything else currently selected
|
||||
_.forEach($scope.groups, (item) => {return item.id === id ? item.checked = 1 : item.checked = null;});
|
||||
// yoink the currently selected thing
|
||||
|
||||
@ -30,8 +30,8 @@ var copyMoveGroupRoute = {
|
||||
resolve: {
|
||||
Dataset: ['CopyMoveGroupList', 'QuerySet', '$stateParams', 'GetBasePath', 'group',
|
||||
function(list, qs, $stateParams, GetBasePath, group) {
|
||||
$stateParams.copy_search.not__id__in = ($stateParams.group.length > 0 ? group.id + ',' + _.last($stateParams.group) : group.id);
|
||||
let path = GetBasePath(list.name);
|
||||
$stateParams.copy_search.not__id__in = ($stateParams.group && $stateParams.group.length > 0 ? group.id + ',' + _.last($stateParams.group) : group.id.toString());
|
||||
let path = GetBasePath('inventory') + $stateParams.inventory_id + '/groups/';
|
||||
return qs.search(path, $stateParams.copy_search);
|
||||
}
|
||||
],
|
||||
@ -66,7 +66,7 @@ var copyMoveHostRoute = {
|
||||
resolve: {
|
||||
Dataset: ['CopyMoveGroupList', 'QuerySet', '$stateParams', 'GetBasePath',
|
||||
function(list, qs, $stateParams, GetBasePath) {
|
||||
let path = GetBasePath(list.name);
|
||||
let path = GetBasePath('inventory') + $stateParams.inventory_id + '/hosts/';
|
||||
return qs.search(path, $stateParams.copy_search);
|
||||
}
|
||||
],
|
||||
@ -80,7 +80,9 @@ var copyMoveHostRoute = {
|
||||
controller: CopyMoveHostsController,
|
||||
},
|
||||
'copyMoveList@inventoryManage.copyMoveHost': {
|
||||
templateProvider: function(CopyMoveGroupList, generateList) {
|
||||
templateProvider: function(CopyMoveGroupList, generateList, $stateParams, GetBasePath) {
|
||||
let list = CopyMoveGroupList;
|
||||
list.basePath = GetBasePath('inventory') + $stateParams.inventory_id + '/hosts/';
|
||||
let html = generateList.build({
|
||||
list: CopyMoveGroupList,
|
||||
mode: 'lookup',
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user