mirror of
https://github.com/ansible/awx.git
synced 2026-01-11 10:00:01 -03:30
Merge branch 'i18n-test' of https://github.com/sundeep-co-in/ansible-tower into STAGE
This commit is contained in:
commit
3166390a84
@ -9,6 +9,7 @@ import logging
|
||||
from django.conf import settings
|
||||
from django.utils.timezone import now as tz_now
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework import authentication
|
||||
@ -62,10 +63,10 @@ class TokenAuthentication(authentication.TokenAuthentication):
|
||||
return None
|
||||
|
||||
if len(auth) == 1:
|
||||
msg = 'Invalid token header. No credentials provided.'
|
||||
msg = _('Invalid token header. No credentials provided.')
|
||||
raise exceptions.AuthenticationFailed(msg)
|
||||
elif len(auth) > 2:
|
||||
msg = 'Invalid token header. Token string should not contain spaces.'
|
||||
msg = _('Invalid token header. Token string should not contain spaces.')
|
||||
raise exceptions.AuthenticationFailed(msg)
|
||||
|
||||
return self.authenticate_credentials(auth[1])
|
||||
@ -100,7 +101,7 @@ class TokenAuthentication(authentication.TokenAuthentication):
|
||||
|
||||
# If the user is inactive, then return an error.
|
||||
if not token.user.is_active:
|
||||
raise exceptions.AuthenticationFailed('User inactive or deleted')
|
||||
raise exceptions.AuthenticationFailed(_('User inactive or deleted'))
|
||||
|
||||
# Refresh the token.
|
||||
# The token is extended from "right now" + configurable setting amount.
|
||||
@ -151,7 +152,7 @@ class TaskAuthentication(authentication.BaseAuthentication):
|
||||
return None
|
||||
token = unified_job.task_auth_token
|
||||
if auth[1] != token:
|
||||
raise exceptions.AuthenticationFailed('Invalid task token')
|
||||
raise exceptions.AuthenticationFailed(_('Invalid task token'))
|
||||
return (None, token)
|
||||
|
||||
def authenticate_header(self, request):
|
||||
|
||||
@ -15,6 +15,7 @@ from django.template.loader import render_to_string
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.authentication import get_authorization_header
|
||||
@ -422,7 +423,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
sub_id = request.data.get('id', None)
|
||||
res = None
|
||||
if not sub_id:
|
||||
data = dict(msg='"id" is required to disassociate')
|
||||
data = dict(msg=_('"id" is required to disassociate'))
|
||||
res = Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
return (sub_id, res)
|
||||
|
||||
|
||||
@ -5,6 +5,7 @@ import json
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils import six
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework import parsers
|
||||
@ -27,4 +28,4 @@ class JSONParser(parsers.JSONParser):
|
||||
data = stream.read().decode(encoding)
|
||||
return json.loads(data, object_pairs_hook=OrderedDict)
|
||||
except ValueError as exc:
|
||||
raise ParseError('JSON parse error - %s' % six.text_type(exc))
|
||||
raise ParseError(_('JSON parse error - %s') % six.text_type(exc))
|
||||
|
||||
@ -242,11 +242,11 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
|
||||
def get_type_choices(self):
|
||||
type_name_map = {
|
||||
'job': 'Playbook Run',
|
||||
'ad_hoc_command': 'Command',
|
||||
'project_update': 'SCM Update',
|
||||
'inventory_update': 'Inventory Sync',
|
||||
'system_job': 'Management Job',
|
||||
'job': _('Playbook Run'),
|
||||
'ad_hoc_command': _('Command'),
|
||||
'project_update': _('SCM Update'),
|
||||
'inventory_update': _('Inventory Sync'),
|
||||
'system_job': _('Management Job'),
|
||||
}
|
||||
choices = []
|
||||
for t in self.get_types():
|
||||
@ -623,8 +623,9 @@ class UnifiedJobSerializer(BaseSerializer):
|
||||
def get_result_stdout(self, obj):
|
||||
obj_size = obj.result_stdout_size
|
||||
if obj_size > settings.STDOUT_MAX_BYTES_DISPLAY:
|
||||
return "Standard Output too large to display (%d bytes), only download supported for sizes over %d bytes" % (obj_size,
|
||||
settings.STDOUT_MAX_BYTES_DISPLAY)
|
||||
return _("Standard Output too large to display (%(text_size)d bytes), "
|
||||
"only download supported for sizes over %(supported_size)d bytes") \
|
||||
% {'text_size': obj_size, 'supported_size': settings.STDOUT_MAX_BYTES_DISPLAY}
|
||||
return obj.result_stdout
|
||||
|
||||
|
||||
@ -680,8 +681,9 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
|
||||
def get_result_stdout(self, obj):
|
||||
obj_size = obj.result_stdout_size
|
||||
if obj_size > settings.STDOUT_MAX_BYTES_DISPLAY:
|
||||
return "Standard Output too large to display (%d bytes), only download supported for sizes over %d bytes" % (obj_size,
|
||||
settings.STDOUT_MAX_BYTES_DISPLAY)
|
||||
return _("Standard Output too large to display (%(text_size)d bytes), "
|
||||
"only download supported for sizes over %(supported_size)d bytes") \
|
||||
% {'text_size': obj_size, 'supported_size': settings.STDOUT_MAX_BYTES_DISPLAY}
|
||||
return obj.result_stdout
|
||||
|
||||
def get_types(self):
|
||||
@ -720,7 +722,7 @@ class UserSerializer(BaseSerializer):
|
||||
|
||||
def validate_password(self, value):
|
||||
if not self.instance and value in (None, ''):
|
||||
raise serializers.ValidationError('Password required for new User.')
|
||||
raise serializers.ValidationError(_('Password required for new User.'))
|
||||
return value
|
||||
|
||||
def _update_password(self, obj, new_password):
|
||||
@ -804,7 +806,7 @@ class UserSerializer(BaseSerializer):
|
||||
ldap_managed_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
|
||||
if field_name in ldap_managed_fields:
|
||||
if value != getattr(self.instance, field_name):
|
||||
raise serializers.ValidationError('Unable to change %s on user managed by LDAP.' % field_name)
|
||||
raise serializers.ValidationError(_('Unable to change %s on user managed by LDAP.') % field_name)
|
||||
return value
|
||||
|
||||
def validate_username(self, value):
|
||||
@ -955,7 +957,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
|
||||
view = self.context.get('view', None)
|
||||
if not organization and not view.request.user.is_superuser:
|
||||
# Only allow super users to create orgless projects
|
||||
raise serializers.ValidationError('Organization is missing')
|
||||
raise serializers.ValidationError(_('Organization is missing'))
|
||||
return super(ProjectSerializer, self).validate(attrs)
|
||||
|
||||
|
||||
@ -1143,7 +1145,7 @@ class HostSerializer(BaseSerializerWithVariables):
|
||||
if port < 1 or port > 65535:
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
raise serializers.ValidationError(u'Invalid port specification: %s' % force_text(port))
|
||||
raise serializers.ValidationError(_(u'Invalid port specification: %s') % force_text(port))
|
||||
return name, port
|
||||
|
||||
def validate_name(self, value):
|
||||
@ -1171,7 +1173,7 @@ class HostSerializer(BaseSerializerWithVariables):
|
||||
vars_dict['ansible_ssh_port'] = port
|
||||
attrs['variables'] = yaml.dump(vars_dict)
|
||||
except (yaml.YAMLError, TypeError):
|
||||
raise serializers.ValidationError('Must be valid JSON or YAML.')
|
||||
raise serializers.ValidationError(_('Must be valid JSON or YAML.'))
|
||||
|
||||
return super(HostSerializer, self).validate(attrs)
|
||||
|
||||
@ -1228,7 +1230,7 @@ class GroupSerializer(BaseSerializerWithVariables):
|
||||
|
||||
def validate_name(self, value):
|
||||
if value in ('all', '_meta'):
|
||||
raise serializers.ValidationError('Invalid group name.')
|
||||
raise serializers.ValidationError(_('Invalid group name.'))
|
||||
return value
|
||||
|
||||
def to_representation(self, obj):
|
||||
@ -1302,7 +1304,7 @@ class CustomInventoryScriptSerializer(BaseSerializer):
|
||||
|
||||
def validate_script(self, value):
|
||||
if not value.startswith("#!"):
|
||||
raise serializers.ValidationError('Script must begin with a hashbang sequence: i.e.... #!/usr/bin/env python')
|
||||
raise serializers.ValidationError(_('Script must begin with a hashbang sequence: i.e.... #!/usr/bin/env python'))
|
||||
return value
|
||||
|
||||
def to_representation(self, obj):
|
||||
@ -1355,13 +1357,13 @@ class InventorySourceOptionsSerializer(BaseSerializer):
|
||||
source_script = attrs.get('source_script', self.instance and self.instance.source_script or '')
|
||||
if source == 'custom':
|
||||
if source_script is None or source_script == '':
|
||||
errors['source_script'] = "If 'source' is 'custom', 'source_script' must be provided."
|
||||
errors['source_script'] = _("If 'source' is 'custom', 'source_script' must be provided.")
|
||||
else:
|
||||
try:
|
||||
if source_script.organization != self.instance.inventory.organization:
|
||||
errors['source_script'] = "The 'source_script' does not belong to the same organization as the inventory."
|
||||
errors['source_script'] = _("The 'source_script' does not belong to the same organization as the inventory.")
|
||||
except Exception as exc:
|
||||
errors['source_script'] = "'source_script' doesn't exist."
|
||||
errors['source_script'] = _("'source_script' doesn't exist.")
|
||||
logger.error(str(exc))
|
||||
|
||||
if errors:
|
||||
@ -1747,7 +1749,7 @@ class CredentialSerializerCreate(CredentialSerializer):
|
||||
else:
|
||||
attrs.pop(field)
|
||||
if not owner_fields:
|
||||
raise serializers.ValidationError({"detail": "Missing 'user', 'team', or 'organization'."})
|
||||
raise serializers.ValidationError({"detail": _("Missing 'user', 'team', or 'organization'.")})
|
||||
return super(CredentialSerializerCreate, self).validate(attrs)
|
||||
|
||||
def create(self, validated_data):
|
||||
@ -1760,7 +1762,7 @@ class CredentialSerializerCreate(CredentialSerializer):
|
||||
credential.admin_role.members.add(user)
|
||||
if team:
|
||||
if not credential.organization or team.organization.id != credential.organization.id:
|
||||
raise serializers.ValidationError({"detail": "Credential organization must be set and match before assigning to a team"})
|
||||
raise serializers.ValidationError({"detail": _("Credential organization must be set and match before assigning to a team")})
|
||||
credential.admin_role.parents.add(team.admin_role)
|
||||
credential.use_role.parents.add(team.member_role)
|
||||
return credential
|
||||
@ -1846,11 +1848,11 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
|
||||
playbook = attrs.get('playbook', self.instance and self.instance.playbook or '')
|
||||
job_type = attrs.get('job_type', self.instance and self.instance.job_type or None)
|
||||
if not project and job_type != PERM_INVENTORY_SCAN:
|
||||
raise serializers.ValidationError({'project': 'This field is required.'})
|
||||
raise serializers.ValidationError({'project': _('This field is required.')})
|
||||
if project and playbook and force_text(playbook) not in project.playbooks:
|
||||
raise serializers.ValidationError({'playbook': 'Playbook not found for project.'})
|
||||
raise serializers.ValidationError({'playbook': _('Playbook not found for project.')})
|
||||
if project and not playbook:
|
||||
raise serializers.ValidationError({'playbook': 'Must select playbook for project.'})
|
||||
raise serializers.ValidationError({'playbook': _('Must select playbook for project.')})
|
||||
|
||||
return super(JobOptionsSerializer, self).validate(attrs)
|
||||
|
||||
@ -1903,12 +1905,12 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer):
|
||||
|
||||
if job_type == "scan":
|
||||
if inventory is None or attrs.get('ask_inventory_on_launch', False):
|
||||
raise serializers.ValidationError({'inventory': 'Scan jobs must be assigned a fixed inventory.'})
|
||||
raise serializers.ValidationError({'inventory': _('Scan jobs must be assigned a fixed inventory.')})
|
||||
elif project is None:
|
||||
raise serializers.ValidationError({'project': "Job types 'run' and 'check' must have assigned a project."})
|
||||
raise serializers.ValidationError({'project': _("Job types 'run' and 'check' must have assigned a project.")})
|
||||
|
||||
if survey_enabled and job_type == PERM_INVENTORY_SCAN:
|
||||
raise serializers.ValidationError({'survey_enabled': 'Survey Enabled can not be used with scan jobs.'})
|
||||
raise serializers.ValidationError({'survey_enabled': _('Survey Enabled can not be used with scan jobs.')})
|
||||
|
||||
return super(JobTemplateSerializer, self).validate(attrs)
|
||||
|
||||
@ -1968,7 +1970,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
||||
try:
|
||||
job_template = JobTemplate.objects.get(pk=data['job_template'])
|
||||
except JobTemplate.DoesNotExist:
|
||||
raise serializers.ValidationError({'job_template': 'Invalid job template.'})
|
||||
raise serializers.ValidationError({'job_template': _('Invalid job template.')})
|
||||
data.setdefault('name', job_template.name)
|
||||
data.setdefault('description', job_template.description)
|
||||
data.setdefault('job_type', job_template.job_type)
|
||||
@ -2053,11 +2055,11 @@ class JobRelaunchSerializer(JobSerializer):
|
||||
def validate(self, attrs):
|
||||
obj = self.context.get('obj')
|
||||
if not obj.credential:
|
||||
raise serializers.ValidationError(dict(credential=["Credential not found or deleted."]))
|
||||
raise serializers.ValidationError(dict(credential=[_("Credential not found or deleted.")]))
|
||||
if obj.job_type != PERM_INVENTORY_SCAN and obj.project is None:
|
||||
raise serializers.ValidationError(dict(errors=["Job Template Project is missing or undefined."]))
|
||||
raise serializers.ValidationError(dict(errors=[_("Job Template Project is missing or undefined.")]))
|
||||
if obj.inventory is None:
|
||||
raise serializers.ValidationError(dict(errors=["Job Template Inventory is missing or undefined."]))
|
||||
raise serializers.ValidationError(dict(errors=[_("Job Template Inventory is missing or undefined.")]))
|
||||
attrs = super(JobRelaunchSerializer, self).validate(attrs)
|
||||
return attrs
|
||||
|
||||
@ -2320,12 +2322,12 @@ class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer):
|
||||
job_types = [t for t, v in JOB_TYPE_CHOICES]
|
||||
if attrs['char_prompts']['job_type'] not in job_types:
|
||||
raise serializers.ValidationError({
|
||||
"job_type": "%s is not a valid job type. The choices are %s." % (
|
||||
attrs['char_prompts']['job_type'], job_types)})
|
||||
"job_type": _("%(job_type)s is not a valid job type. The choices are %(choices)s.") % {
|
||||
'job_type': attrs['char_prompts']['job_type'], 'choices': job_types}})
|
||||
ujt_obj = attrs.get('unified_job_template', None)
|
||||
if isinstance(ujt_obj, (WorkflowJobTemplate, SystemJobTemplate)):
|
||||
raise serializers.ValidationError({
|
||||
"unified_job_template": "Can not nest a %s inside a WorkflowJobTemplate" % ujt_obj.__class__.__name__})
|
||||
"unified_job_template": _("Can not nest a %s inside a WorkflowJobTemplate") % ujt_obj.__class__.__name__})
|
||||
return super(WorkflowJobTemplateNodeSerializer, self).validate(attrs)
|
||||
|
||||
class WorkflowJobNodeSerializer(WorkflowNodeBaseSerializer):
|
||||
@ -2539,7 +2541,7 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
|
||||
for field in obj.resources_needed_to_start:
|
||||
if not (attrs.get(field, False) and obj._ask_for_vars_dict().get(field, False)):
|
||||
errors[field] = "Job Template '%s' is missing or undefined." % field
|
||||
errors[field] = _("Job Template '%s' is missing or undefined.") % field
|
||||
|
||||
if (not obj.ask_credential_on_launch) or (not attrs.get('credential', None)):
|
||||
credential = obj.credential
|
||||
@ -2565,7 +2567,7 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
extra_vars = yaml.safe_load(extra_vars)
|
||||
assert isinstance(extra_vars, dict)
|
||||
except (yaml.YAMLError, TypeError, AttributeError, AssertionError):
|
||||
errors['extra_vars'] = 'Must be a valid JSON or YAML dictionary.'
|
||||
errors['extra_vars'] = _('Must be a valid JSON or YAML dictionary.')
|
||||
|
||||
if not isinstance(extra_vars, dict):
|
||||
extra_vars = {}
|
||||
@ -2649,7 +2651,7 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
else:
|
||||
notification_type = None
|
||||
if not notification_type:
|
||||
raise serializers.ValidationError('Missing required fields for Notification Configuration: notification_type')
|
||||
raise serializers.ValidationError(_('Missing required fields for Notification Configuration: notification_type'))
|
||||
|
||||
notification_class = NotificationTemplate.CLASS_FOR_NOTIFICATION_TYPE[notification_type]
|
||||
missing_fields = []
|
||||
@ -2672,16 +2674,16 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
incorrect_type_fields.append((field, field_type))
|
||||
continue
|
||||
if field_type == "list" and len(field_val) < 1:
|
||||
error_list.append("No values specified for field '{}'".format(field))
|
||||
error_list.append(_("No values specified for field '{}'").format(field))
|
||||
continue
|
||||
if field_type == "password" and field_val == "$encrypted$" and object_actual is not None:
|
||||
attrs['notification_configuration'][field] = object_actual.notification_configuration[field]
|
||||
if missing_fields:
|
||||
error_list.append("Missing required fields for Notification Configuration: {}.".format(missing_fields))
|
||||
error_list.append(_("Missing required fields for Notification Configuration: {}.").format(missing_fields))
|
||||
if incorrect_type_fields:
|
||||
for type_field_error in incorrect_type_fields:
|
||||
error_list.append("Configuration field '{}' incorrect type, expected {}.".format(type_field_error[0],
|
||||
type_field_error[1]))
|
||||
error_list.append(_("Configuration field '{}' incorrect type, expected {}.").format(type_field_error[0],
|
||||
type_field_error[1]))
|
||||
if error_list:
|
||||
raise serializers.ValidationError(error_list)
|
||||
return attrs
|
||||
@ -2730,7 +2732,7 @@ class ScheduleSerializer(BaseSerializer):
|
||||
|
||||
def validate_unified_job_template(self, value):
|
||||
if type(value) == InventorySource and value.source not in SCHEDULEABLE_PROVIDERS:
|
||||
raise serializers.ValidationError('Inventory Source must be a cloud resource.')
|
||||
raise serializers.ValidationError(_('Inventory Source must be a cloud resource.'))
|
||||
return value
|
||||
|
||||
# We reject rrules if:
|
||||
@ -2752,37 +2754,37 @@ class ScheduleSerializer(BaseSerializer):
|
||||
match_multiple_dtstart = re.findall(".*?(DTSTART\:[0-9]+T[0-9]+Z)", rrule_value)
|
||||
match_multiple_rrule = re.findall(".*?(RRULE\:)", rrule_value)
|
||||
if not len(match_multiple_dtstart):
|
||||
raise serializers.ValidationError('DTSTART required in rrule. Value should match: DTSTART:YYYYMMDDTHHMMSSZ')
|
||||
raise serializers.ValidationError(_('DTSTART required in rrule. Value should match: DTSTART:YYYYMMDDTHHMMSSZ'))
|
||||
if len(match_multiple_dtstart) > 1:
|
||||
raise serializers.ValidationError('Multiple DTSTART is not supported.')
|
||||
raise serializers.ValidationError(_('Multiple DTSTART is not supported.'))
|
||||
if not len(match_multiple_rrule):
|
||||
raise serializers.ValidationError('RRULE require in rrule.')
|
||||
raise serializers.ValidationError(_('RRULE require in rrule.'))
|
||||
if len(match_multiple_rrule) > 1:
|
||||
raise serializers.ValidationError('Multiple RRULE is not supported.')
|
||||
raise serializers.ValidationError(_('Multiple RRULE is not supported.'))
|
||||
if 'interval' not in rrule_value.lower():
|
||||
raise serializers.ValidationError('INTERVAL required in rrule.')
|
||||
raise serializers.ValidationError(_('INTERVAL required in rrule.'))
|
||||
if 'tzid' in rrule_value.lower():
|
||||
raise serializers.ValidationError('TZID is not supported.')
|
||||
raise serializers.ValidationError(_('TZID is not supported.'))
|
||||
if 'secondly' in rrule_value.lower():
|
||||
raise serializers.ValidationError('SECONDLY is not supported.')
|
||||
raise serializers.ValidationError(_('SECONDLY is not supported.'))
|
||||
if re.match(multi_by_month_day, rrule_value):
|
||||
raise serializers.ValidationError('Multiple BYMONTHDAYs not supported.')
|
||||
raise serializers.ValidationError(_('Multiple BYMONTHDAYs not supported.'))
|
||||
if re.match(multi_by_month, rrule_value):
|
||||
raise serializers.ValidationError('Multiple BYMONTHs not supported.')
|
||||
raise serializers.ValidationError(_('Multiple BYMONTHs not supported.'))
|
||||
if re.match(by_day_with_numeric_prefix, rrule_value):
|
||||
raise serializers.ValidationError("BYDAY with numeric prefix not supported.")
|
||||
raise serializers.ValidationError(_("BYDAY with numeric prefix not supported."))
|
||||
if 'byyearday' in rrule_value.lower():
|
||||
raise serializers.ValidationError("BYYEARDAY not supported.")
|
||||
raise serializers.ValidationError(_("BYYEARDAY not supported."))
|
||||
if 'byweekno' in rrule_value.lower():
|
||||
raise serializers.ValidationError("BYWEEKNO not supported.")
|
||||
raise serializers.ValidationError(_("BYWEEKNO not supported."))
|
||||
if match_count:
|
||||
count_val = match_count.groups()[0].strip().split("=")
|
||||
if int(count_val[1]) > 999:
|
||||
raise serializers.ValidationError("COUNT > 999 is unsupported.")
|
||||
raise serializers.ValidationError(_("COUNT > 999 is unsupported."))
|
||||
try:
|
||||
rrule.rrulestr(rrule_value)
|
||||
except Exception:
|
||||
raise serializers.ValidationError("rrule parsing failed validation.")
|
||||
raise serializers.ValidationError(_("rrule parsing failed validation."))
|
||||
return value
|
||||
|
||||
class ActivityStreamSerializer(BaseSerializer):
|
||||
@ -2907,9 +2909,9 @@ class AuthTokenSerializer(serializers.Serializer):
|
||||
attrs['user'] = user
|
||||
return attrs
|
||||
else:
|
||||
raise serializers.ValidationError('Unable to login with provided credentials.')
|
||||
raise serializers.ValidationError(_('Unable to login with provided credentials.'))
|
||||
else:
|
||||
raise serializers.ValidationError('Must include "username" and "password".')
|
||||
raise serializers.ValidationError(_('Must include "username" and "password".'))
|
||||
|
||||
|
||||
class FactVersionSerializer(BaseFactSerializer):
|
||||
|
||||
190
awx/api/views.py
190
awx/api/views.py
@ -233,29 +233,29 @@ class ApiV1ConfigView(APIView):
|
||||
if not request.user.is_superuser:
|
||||
return Response(None, status=status.HTTP_404_NOT_FOUND)
|
||||
if not isinstance(request.data, dict):
|
||||
return Response({"error": "Invalid license data"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"error": _("Invalid license data")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
if "eula_accepted" not in request.data:
|
||||
return Response({"error": "Missing 'eula_accepted' property"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"error": _("Missing 'eula_accepted' property")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
eula_accepted = to_python_boolean(request.data["eula_accepted"])
|
||||
except ValueError:
|
||||
return Response({"error": "'eula_accepted' value is invalid"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"error": _("'eula_accepted' value is invalid")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if not eula_accepted:
|
||||
return Response({"error": "'eula_accepted' must be True"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"error": _("'eula_accepted' must be True")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
request.data.pop("eula_accepted")
|
||||
try:
|
||||
data_actual = json.dumps(request.data)
|
||||
except Exception:
|
||||
# FIX: Log
|
||||
return Response({"error": "Invalid JSON"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
try:
|
||||
from awx.main.task_engine import TaskEnhancer
|
||||
license_data = json.loads(data_actual)
|
||||
license_data_validated = TaskEnhancer(**license_data).validate_enhancements()
|
||||
except Exception:
|
||||
# FIX: Log
|
||||
return Response({"error": "Invalid License"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# If the license is valid, write it to the database.
|
||||
if license_data_validated['valid_key']:
|
||||
@ -263,7 +263,7 @@ class ApiV1ConfigView(APIView):
|
||||
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
|
||||
return Response(license_data_validated)
|
||||
|
||||
return Response({"error": "Invalid license"}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"error": _("Invalid license")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def delete(self, request):
|
||||
if not request.user.is_superuser:
|
||||
@ -274,7 +274,7 @@ class ApiV1ConfigView(APIView):
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
except:
|
||||
# FIX: Log
|
||||
return Response({"error": "Failed to remove license (%s)" % has_error}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"error": _("Failed to remove license (%s)") % has_error}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class DashboardView(APIView):
|
||||
@ -420,7 +420,7 @@ class DashboardJobsGraphView(APIView):
|
||||
end_date = start_date - dateutil.relativedelta.relativedelta(days=1)
|
||||
interval = 'hours'
|
||||
else:
|
||||
return Response({'error': 'Unknown period "%s"' % str(period)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
dashboard_data = {"jobs": {"successful": [], "failed": []}}
|
||||
for element in success_qss.time_series(end_date, start_date, interval=interval):
|
||||
@ -653,8 +653,8 @@ class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
# if no organizations exist in the system.
|
||||
if (not feature_enabled('multiple_organizations') and
|
||||
self.model.objects.exists()):
|
||||
raise LicenseForbids('Your Tower license only permits a single '
|
||||
'organization to exist.')
|
||||
raise LicenseForbids(_('Your Tower license only permits a single '
|
||||
'organization to exist.'))
|
||||
|
||||
# Okay, create the organization as usual.
|
||||
return super(OrganizationList, self).create(request, *args, **kwargs)
|
||||
@ -764,8 +764,8 @@ class OrganizationActivityStreamList(SubListAPIView):
|
||||
# Sanity check: Does this license allow activity streams?
|
||||
# If not, forbid this request.
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids('Your license does not allow use of '
|
||||
'the activity stream.')
|
||||
raise LicenseForbids(_('Your license does not allow use of '
|
||||
'the activity stream.'))
|
||||
|
||||
# Okay, let it through.
|
||||
return super(OrganizationActivityStreamList, self).get(request, *args, **kwargs)
|
||||
@ -858,20 +858,20 @@ class TeamRolesList(SubListCreateAttachDetachAPIView):
|
||||
# Forbid implicit role creation here
|
||||
sub_id = request.data.get('id', None)
|
||||
if not sub_id:
|
||||
data = dict(msg="Role 'id' field is missing.")
|
||||
data = dict(msg=_("Role 'id' field is missing."))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
role = get_object_or_400(Role, pk=sub_id)
|
||||
org_content_type = ContentType.objects.get_for_model(Organization)
|
||||
if role.content_type == org_content_type:
|
||||
data = dict(msg="You cannot assign an Organization role as a child role for a Team.")
|
||||
data = dict(msg=_("You cannot assign an Organization role as a child role for a Team."))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
team = get_object_or_404(Team, pk=self.kwargs['pk'])
|
||||
credential_content_type = ContentType.objects.get_for_model(Credential)
|
||||
if role.content_type == credential_content_type:
|
||||
if not role.content_object.organization or role.content_object.organization.id != team.organization.id:
|
||||
data = dict(msg="You cannot grant credential access to a team when the Organization field isn't set, or belongs to a different organization")
|
||||
data = dict(msg=_("You cannot grant credential access to a team when the Organization field isn't set, or belongs to a different organization"))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
return super(TeamRolesList, self).post(request, *args, **kwargs)
|
||||
@ -917,8 +917,8 @@ class TeamActivityStreamList(SubListAPIView):
|
||||
# Sanity check: Does this license allow activity streams?
|
||||
# If not, forbid this request.
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids('Your license does not allow use of '
|
||||
'the activity stream.')
|
||||
raise LicenseForbids(_('Your license does not allow use of '
|
||||
'the activity stream.'))
|
||||
|
||||
# Okay, let it through.
|
||||
return super(TeamActivityStreamList, self).get(request, *args, **kwargs)
|
||||
@ -964,7 +964,7 @@ class ProjectDetail(RetrieveUpdateDestroyAPIView):
|
||||
obj = self.get_object()
|
||||
can_delete = request.user.can_access(Project, 'delete', obj)
|
||||
if not can_delete:
|
||||
raise PermissionDenied("Cannot delete project.")
|
||||
raise PermissionDenied(_("Cannot delete project."))
|
||||
for pu in obj.project_updates.filter(status__in=['new', 'pending', 'waiting', 'running']):
|
||||
pu.cancel()
|
||||
return super(ProjectDetail, self).destroy(request, *args, **kwargs)
|
||||
@ -1011,8 +1011,8 @@ class ProjectActivityStreamList(SubListAPIView):
|
||||
# Sanity check: Does this license allow activity streams?
|
||||
# If not, forbid this request.
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids('Your license does not allow use of '
|
||||
'the activity stream.')
|
||||
raise LicenseForbids(_('Your license does not allow use of '
|
||||
'the activity stream.'))
|
||||
|
||||
# Okay, let it through.
|
||||
return super(ProjectActivityStreamList, self).get(request, *args, **kwargs)
|
||||
@ -1192,26 +1192,26 @@ class UserRolesList(SubListCreateAttachDetachAPIView):
|
||||
# Forbid implicit role creation here
|
||||
sub_id = request.data.get('id', None)
|
||||
if not sub_id:
|
||||
data = dict(msg="Role 'id' field is missing.")
|
||||
data = dict(msg=_("Role 'id' field is missing."))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if sub_id == self.request.user.admin_role.pk:
|
||||
raise PermissionDenied('You may not perform any action with your own admin_role.')
|
||||
raise PermissionDenied(_('You may not perform any action with your own admin_role.'))
|
||||
|
||||
user = get_object_or_400(User, pk=self.kwargs['pk'])
|
||||
role = get_object_or_400(Role, pk=sub_id)
|
||||
user_content_type = ContentType.objects.get_for_model(User)
|
||||
if role.content_type == user_content_type:
|
||||
raise PermissionDenied('You may not change the membership of a users admin_role')
|
||||
raise PermissionDenied(_('You may not change the membership of a users admin_role'))
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(Credential)
|
||||
if role.content_type == credential_content_type:
|
||||
if role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg="You cannot grant credential access to a user not in the credentials' organization")
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if not role.content_object.organization and not request.user.is_superuser:
|
||||
data = dict(msg="You cannot grant private credential access to another user")
|
||||
data = dict(msg=_("You cannot grant private credential access to another user"))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
@ -1274,8 +1274,8 @@ class UserActivityStreamList(SubListAPIView):
|
||||
# Sanity check: Does this license allow activity streams?
|
||||
# If not, forbid this request.
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids('Your license does not allow use of '
|
||||
'the activity stream.')
|
||||
raise LicenseForbids(_('Your license does not allow use of '
|
||||
'the activity stream.'))
|
||||
|
||||
# Okay, let it through.
|
||||
return super(UserActivityStreamList, self).get(request, *args, **kwargs)
|
||||
@ -1315,13 +1315,13 @@ class UserDetail(RetrieveUpdateDestroyAPIView):
|
||||
if left is not None and right is not None and left != right:
|
||||
bad_changes[field] = (left, right)
|
||||
if bad_changes:
|
||||
raise PermissionDenied('Cannot change %s.' % ', '.join(bad_changes.keys()))
|
||||
raise PermissionDenied(_('Cannot change %s.') % ', '.join(bad_changes.keys()))
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
can_delete = request.user.can_access(User, 'delete', obj)
|
||||
if not can_delete:
|
||||
raise PermissionDenied('Cannot delete user.')
|
||||
raise PermissionDenied(_('Cannot delete user.'))
|
||||
return super(UserDetail, self).destroy(request, *args, **kwargs)
|
||||
|
||||
class UserAccessList(ResourceAccessList):
|
||||
@ -1433,8 +1433,8 @@ class CredentialActivityStreamList(SubListAPIView):
|
||||
# Sanity check: Does this license allow activity streams?
|
||||
# If not, forbid this request.
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids('Your license does not allow use of '
|
||||
'the activity stream.')
|
||||
raise LicenseForbids(_('Your license does not allow use of '
|
||||
'the activity stream.'))
|
||||
|
||||
# Okay, let it through.
|
||||
return super(CredentialActivityStreamList, self).get(request, *args, **kwargs)
|
||||
@ -1471,7 +1471,7 @@ class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
|
||||
instance = self.get_object()
|
||||
can_delete = request.user.can_access(self.model, 'delete', instance)
|
||||
if not can_delete:
|
||||
raise PermissionDenied("Cannot delete inventory script.")
|
||||
raise PermissionDenied(_("Cannot delete inventory script."))
|
||||
for inv_src in InventorySource.objects.filter(source_script=instance):
|
||||
inv_src.source_script = None
|
||||
inv_src.save()
|
||||
@ -1522,8 +1522,8 @@ class InventoryActivityStreamList(SubListAPIView):
|
||||
# Sanity check: Does this license allow activity streams?
|
||||
# If not, forbid this request.
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids('Your license does not allow use of '
|
||||
'the activity stream.')
|
||||
raise LicenseForbids(_('Your license does not allow use of '
|
||||
'the activity stream.'))
|
||||
|
||||
# Okay, let it through.
|
||||
return super(InventoryActivityStreamList, self).get(request, *args, **kwargs)
|
||||
@ -1655,8 +1655,8 @@ class HostActivityStreamList(SubListAPIView):
|
||||
# Sanity check: Does this license allow activity streams?
|
||||
# If not, forbid this request.
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids('Your license does not allow use of '
|
||||
'the activity stream.')
|
||||
raise LicenseForbids(_('Your license does not allow use of '
|
||||
'the activity stream.'))
|
||||
|
||||
# Okay, let it through.
|
||||
return super(HostActivityStreamList, self).get(request, *args, **kwargs)
|
||||
@ -1673,8 +1673,8 @@ class SystemTrackingEnforcementMixin(APIView):
|
||||
'''
|
||||
def check_permissions(self, request):
|
||||
if not feature_enabled("system_tracking"):
|
||||
raise LicenseForbids("Your license does not permit use "
|
||||
"of system tracking.")
|
||||
raise LicenseForbids(_("Your license does not permit use "
|
||||
"of system tracking."))
|
||||
return super(SystemTrackingEnforcementMixin, self).check_permissions(request)
|
||||
|
||||
class HostFactVersionsList(ListAPIView, ParentMixin, SystemTrackingEnforcementMixin):
|
||||
@ -1718,7 +1718,7 @@ class HostFactCompareView(SubDetailAPIView, SystemTrackingEnforcementMixin):
|
||||
|
||||
fact_entry = Fact.get_host_fact(host_obj.id, module_spec, datetime_actual)
|
||||
if not fact_entry:
|
||||
return Response({'detail': 'Fact not found.'}, status=status.HTTP_404_NOT_FOUND)
|
||||
return Response({'detail': _('Fact not found.')}, status=status.HTTP_404_NOT_FOUND)
|
||||
return Response(self.serializer_class(instance=fact_entry).data)
|
||||
|
||||
class GroupList(ListCreateAPIView):
|
||||
@ -1840,8 +1840,8 @@ class GroupActivityStreamList(SubListAPIView):
|
||||
# Sanity check: Does this license allow activity streams?
|
||||
# If not, forbid this request.
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids('Your license does not allow use of '
|
||||
'the activity stream.')
|
||||
raise LicenseForbids(_('Your license does not allow use of '
|
||||
'the activity stream.'))
|
||||
|
||||
# Okay, let it through.
|
||||
return super(GroupActivityStreamList, self).get(request, *args, **kwargs)
|
||||
@ -2056,7 +2056,7 @@ class InventorySourceDetail(RetrieveUpdateAPIView):
|
||||
obj = self.get_object()
|
||||
can_delete = request.user.can_access(InventorySource, 'delete', obj)
|
||||
if not can_delete:
|
||||
raise PermissionDenied("Cannot delete inventory source.")
|
||||
raise PermissionDenied(_("Cannot delete inventory source."))
|
||||
for pu in obj.inventory_updates.filter(status__in=['new', 'pending', 'waiting', 'running']):
|
||||
pu.cancel()
|
||||
return super(InventorySourceDetail, self).destroy(request, *args, **kwargs)
|
||||
@ -2084,8 +2084,8 @@ class InventorySourceActivityStreamList(SubListAPIView):
|
||||
# Sanity check: Does this license allow activity streams?
|
||||
# If not, forbid this request.
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids('Your license does not allow use of '
|
||||
'the activity stream.')
|
||||
raise LicenseForbids(_('Your license does not allow use of '
|
||||
'the activity stream.'))
|
||||
|
||||
# Okay, let it through.
|
||||
return super(InventorySourceActivityStreamList, self).get(request, *args, **kwargs)
|
||||
@ -2100,7 +2100,7 @@ class InventorySourceNotificationTemplatesAnyList(SubListCreateAttachDetachAPIVi
|
||||
def post(self, request, *args, **kwargs):
|
||||
parent = self.get_parent_object()
|
||||
if parent.source not in CLOUD_INVENTORY_SOURCES:
|
||||
return Response(dict(msg="Notification Templates can only be assigned when source is one of {}."
|
||||
return Response(dict(msg=_("Notification Templates can only be assigned when source is one of {}.")
|
||||
.format(CLOUD_INVENTORY_SOURCES, parent.source)),
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
return super(InventorySourceNotificationTemplatesAnyList, self).post(request, *args, **kwargs)
|
||||
@ -2302,8 +2302,8 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
def get(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if not feature_enabled('surveys'):
|
||||
raise LicenseForbids('Your license does not allow '
|
||||
'adding surveys.')
|
||||
raise LicenseForbids(_('Your license does not allow '
|
||||
'adding surveys.'))
|
||||
return Response(obj.survey_spec)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
@ -2312,42 +2312,43 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
# Sanity check: Are surveys available on this license?
|
||||
# If not, do not allow them to be used.
|
||||
if not feature_enabled('surveys'):
|
||||
raise LicenseForbids('Your license does not allow '
|
||||
'adding surveys.')
|
||||
raise LicenseForbids(_('Your license does not allow '
|
||||
'adding surveys.'))
|
||||
|
||||
if not request.user.can_access(self.model, 'change', obj, None):
|
||||
raise PermissionDenied()
|
||||
try:
|
||||
obj.survey_spec = json.dumps(request.data)
|
||||
except ValueError:
|
||||
return Response(dict(error="Invalid JSON when parsing survey spec."), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(error=_("Invalid JSON when parsing survey spec.")), status=status.HTTP_400_BAD_REQUEST)
|
||||
if "name" not in obj.survey_spec:
|
||||
return Response(dict(error="'name' missing from survey spec."), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(error=_("'name' missing from survey spec.")), status=status.HTTP_400_BAD_REQUEST)
|
||||
if "description" not in obj.survey_spec:
|
||||
return Response(dict(error="'description' missing from survey spec."), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(error=_("'description' missing from survey spec.")), status=status.HTTP_400_BAD_REQUEST)
|
||||
if "spec" not in obj.survey_spec:
|
||||
return Response(dict(error="'spec' missing from survey spec."), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(error=_("'spec' missing from survey spec.")), status=status.HTTP_400_BAD_REQUEST)
|
||||
if not isinstance(obj.survey_spec["spec"], list):
|
||||
return Response(dict(error="'spec' must be a list of items."), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(error=_("'spec' must be a list of items.")), status=status.HTTP_400_BAD_REQUEST)
|
||||
if len(obj.survey_spec["spec"]) < 1:
|
||||
return Response(dict(error="'spec' doesn't contain any items."), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(error=_("'spec' doesn't contain any items.")), status=status.HTTP_400_BAD_REQUEST)
|
||||
idx = 0
|
||||
variable_set = set()
|
||||
for survey_item in obj.survey_spec["spec"]:
|
||||
if not isinstance(survey_item, dict):
|
||||
return Response(dict(error="Survey question %s is not a json object." % str(idx)), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(error=_("Survey question %s is not a json object.") % str(idx)), status=status.HTTP_400_BAD_REQUEST)
|
||||
if "type" not in survey_item:
|
||||
return Response(dict(error="'type' missing from survey question %s." % str(idx)), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(error=_("'type' missing from survey question %s.") % str(idx)), status=status.HTTP_400_BAD_REQUEST)
|
||||
if "question_name" not in survey_item:
|
||||
return Response(dict(error="'question_name' missing from survey question %s." % str(idx)), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(error=_("'question_name' missing from survey question %s.") % str(idx)), status=status.HTTP_400_BAD_REQUEST)
|
||||
if "variable" not in survey_item:
|
||||
return Response(dict(error="'variable' missing from survey question %s." % str(idx)), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(error=_("'variable' missing from survey question %s.") % str(idx)), status=status.HTTP_400_BAD_REQUEST)
|
||||
if survey_item['variable'] in variable_set:
|
||||
return Response(dict(error="'variable' '%s' duplicated in survey question %s." % (survey_item['variable'], str(idx))), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(error=_("'variable' '%(item)s' duplicated in survey question %(survey)s.") %
|
||||
{'item': survey_item['variable'], 'survey': str(idx)}), status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
variable_set.add(survey_item['variable'])
|
||||
if "required" not in survey_item:
|
||||
return Response(dict(error="'required' missing from survey question %s." % str(idx)), status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response(dict(error=_("'required' missing from survey question %s.") % str(idx)), status=status.HTTP_400_BAD_REQUEST)
|
||||
idx += 1
|
||||
obj.save()
|
||||
return Response()
|
||||
@ -2372,8 +2373,8 @@ class JobTemplateActivityStreamList(SubListAPIView):
|
||||
# Sanity check: Does this license allow activity streams?
|
||||
# If not, forbid this request.
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids('Your license does not allow use of '
|
||||
'the activity stream.')
|
||||
raise LicenseForbids(_('Your license does not allow use of '
|
||||
'the activity stream.'))
|
||||
|
||||
# Okay, let it through.
|
||||
return super(JobTemplateActivityStreamList, self).get(request, *args, **kwargs)
|
||||
@ -2540,22 +2541,22 @@ class JobTemplateCallback(GenericAPIView):
|
||||
matching_hosts = self.find_matching_hosts()
|
||||
# Check matching hosts.
|
||||
if not matching_hosts:
|
||||
data = dict(msg='No matching host could be found!')
|
||||
data = dict(msg=_('No matching host could be found!'))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
elif len(matching_hosts) > 1:
|
||||
data = dict(msg='Multiple hosts matched the request!')
|
||||
data = dict(msg=_('Multiple hosts matched the request!'))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
host = list(matching_hosts)[0]
|
||||
if not job_template.can_start_without_user_input():
|
||||
data = dict(msg='Cannot start automatically, user input required!')
|
||||
data = dict(msg=_('Cannot start automatically, user input required!'))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
limit = host.name
|
||||
|
||||
# NOTE: We limit this to one job waiting per host per callblack to keep them from stacking crazily
|
||||
if Job.objects.filter(status__in=['pending', 'waiting', 'running'], job_template=job_template,
|
||||
limit=limit).count() > 0:
|
||||
data = dict(msg='Host callback job already pending.')
|
||||
data = dict(msg=_('Host callback job already pending.'))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Everything is fine; actually create the job.
|
||||
@ -2568,7 +2569,7 @@ class JobTemplateCallback(GenericAPIView):
|
||||
kv['extra_vars'] = extra_vars
|
||||
result = job.signal_start(**kv)
|
||||
if not result:
|
||||
data = dict(msg='Error starting job!')
|
||||
data = dict(msg=_('Error starting job!'))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
# Return the location of the new job.
|
||||
@ -2789,7 +2790,7 @@ class SystemJobTemplateList(ListAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
if not request.user.is_superuser and not request.user.is_system_auditor:
|
||||
raise PermissionDenied("Superuser privileges needed.")
|
||||
raise PermissionDenied(_("Superuser privileges needed."))
|
||||
return super(SystemJobTemplateList, self).get(request, *args, **kwargs)
|
||||
|
||||
class SystemJobTemplateDetail(RetrieveAPIView):
|
||||
@ -2893,8 +2894,8 @@ class JobActivityStreamList(SubListAPIView):
|
||||
# Sanity check: Does this license allow activity streams?
|
||||
# If not, forbid this request.
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids('Your license does not allow use of '
|
||||
'the activity stream.')
|
||||
raise LicenseForbids(_('Your license does not allow use of '
|
||||
'the activity stream.'))
|
||||
|
||||
# Okay, let it through.
|
||||
return super(JobActivityStreamList, self).get(request, *args, **kwargs)
|
||||
@ -3160,15 +3161,15 @@ class JobJobTasksList(BaseJobEventsList):
|
||||
# If there's no event ID specified, this will return a 404.
|
||||
job = Job.objects.filter(pk=self.kwargs['pk'])
|
||||
if not job.exists():
|
||||
return ({'detail': 'Job not found.'}, -1, status.HTTP_404_NOT_FOUND)
|
||||
return ({'detail': _('Job not found.')}, -1, status.HTTP_404_NOT_FOUND)
|
||||
job = job[0]
|
||||
|
||||
if 'event_id' not in request.query_params:
|
||||
return ({"detail": "'event_id' not provided."}, -1, status.HTTP_400_BAD_REQUEST)
|
||||
return ({"detail": _("'event_id' not provided.")}, -1, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
parent_task = job.job_events.filter(pk=int(request.query_params.get('event_id', -1)))
|
||||
if not parent_task.exists():
|
||||
return ({'detail': 'Parent event not found.'}, -1, status.HTTP_404_NOT_FOUND)
|
||||
return ({'detail': _('Parent event not found.')}, -1, status.HTTP_404_NOT_FOUND)
|
||||
parent_task = parent_task[0]
|
||||
|
||||
STARTING_EVENTS = ('playbook_on_task_start', 'playbook_on_setup')
|
||||
@ -3488,8 +3489,8 @@ class AdHocCommandActivityStreamList(SubListAPIView):
|
||||
# Sanity check: Does this license allow activity streams?
|
||||
# If not, forbid this request.
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids('Your license does not allow use of '
|
||||
'the activity stream.')
|
||||
raise LicenseForbids(_('Your license does not allow use of '
|
||||
'the activity stream.'))
|
||||
|
||||
# Okay, let it through.
|
||||
return super(AdHocCommandActivityStreamList, self).get(request, *args, **kwargs)
|
||||
@ -3510,7 +3511,7 @@ class SystemJobList(ListCreateAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
if not request.user.is_superuser and not request.user.is_system_auditor:
|
||||
raise PermissionDenied("Superuser privileges needed.")
|
||||
raise PermissionDenied(_("Superuser privileges needed."))
|
||||
return super(SystemJobList, self).get(request, *args, **kwargs)
|
||||
|
||||
|
||||
@ -3566,8 +3567,9 @@ class UnifiedJobStdout(RetrieveAPIView):
|
||||
unified_job = self.get_object()
|
||||
obj_size = unified_job.result_stdout_size
|
||||
if request.accepted_renderer.format != 'txt_download' and obj_size > settings.STDOUT_MAX_BYTES_DISPLAY:
|
||||
response_message = "Standard Output too large to display (%d bytes), only download supported for sizes over %d bytes" % (obj_size,
|
||||
settings.STDOUT_MAX_BYTES_DISPLAY)
|
||||
response_message = "Standard Output too large to display (%(text_size)d bytes), " \
|
||||
"only download supported for sizes over %(supported_size)d bytes" % \
|
||||
{'text_size': obj_size, 'supported_size': settings.STDOUT_MAX_BYTES_DISPLAY}
|
||||
if request.accepted_renderer.format == 'json':
|
||||
return Response({'range': {'start': 0, 'end': 1, 'absolute_end': 1}, 'content': response_message})
|
||||
else:
|
||||
@ -3610,7 +3612,7 @@ class UnifiedJobStdout(RetrieveAPIView):
|
||||
response["Content-Disposition"] = 'attachment; filename="job_%s.txt"' % str(unified_job.id)
|
||||
return response
|
||||
except Exception as e:
|
||||
return Response({"error": "Error generating stdout download file: %s" % str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return Response({"error": _("Error generating stdout download file: %s") % str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
elif request.accepted_renderer.format == 'txt':
|
||||
return Response(unified_job.result_stdout)
|
||||
else:
|
||||
@ -3650,7 +3652,7 @@ class NotificationTemplateDetail(RetrieveUpdateDestroyAPIView):
|
||||
if not request.user.can_access(self.model, 'delete', obj):
|
||||
return Response(status=status.HTTP_404_NOT_FOUND)
|
||||
if obj.notifications.filter(status='pending').exists():
|
||||
return Response({"error": "Delete not allowed while there are pending notifications"},
|
||||
return Response({"error": _("Delete not allowed while there are pending notifications")},
|
||||
status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||
return super(NotificationTemplateDetail, self).delete(request, *args, **kwargs)
|
||||
|
||||
@ -3717,8 +3719,8 @@ class ActivityStreamList(SimpleListAPIView):
|
||||
# Sanity check: Does this license allow activity streams?
|
||||
# If not, forbid this request.
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids('Your license does not allow use of '
|
||||
'the activity stream.')
|
||||
raise LicenseForbids(_('Your license does not allow use of '
|
||||
'the activity stream.'))
|
||||
|
||||
# Okay, let it through.
|
||||
return super(ActivityStreamList, self).get(request, *args, **kwargs)
|
||||
@ -3734,8 +3736,8 @@ class ActivityStreamDetail(RetrieveAPIView):
|
||||
# Sanity check: Does this license allow activity streams?
|
||||
# If not, forbid this request.
|
||||
if not feature_enabled('activity_streams'):
|
||||
raise LicenseForbids('Your license does not allow use of '
|
||||
'the activity stream.')
|
||||
raise LicenseForbids(_('Your license does not allow use of '
|
||||
'the activity stream.'))
|
||||
|
||||
# Okay, let it through.
|
||||
return super(ActivityStreamDetail, self).get(request, *args, **kwargs)
|
||||
@ -3785,26 +3787,26 @@ class RoleUsersList(SubListCreateAttachDetachAPIView):
|
||||
# Forbid implicit user creation here
|
||||
sub_id = request.data.get('id', None)
|
||||
if not sub_id:
|
||||
data = dict(msg="User 'id' field is missing.")
|
||||
data = dict(msg=_("User 'id' field is missing."))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
user = get_object_or_400(User, pk=sub_id)
|
||||
role = self.get_parent_object()
|
||||
if role == self.request.user.admin_role:
|
||||
raise PermissionDenied('You may not perform any action with your own admin_role.')
|
||||
raise PermissionDenied(_('You may not perform any action with your own admin_role.'))
|
||||
|
||||
user_content_type = ContentType.objects.get_for_model(User)
|
||||
if role.content_type == user_content_type:
|
||||
raise PermissionDenied('You may not change the membership of a users admin_role')
|
||||
raise PermissionDenied(_('You may not change the membership of a users admin_role'))
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(Credential)
|
||||
if role.content_type == credential_content_type:
|
||||
if role.content_object.organization and user not in role.content_object.organization.member_role:
|
||||
data = dict(msg="You cannot grant credential access to a user not in the credentials' organization")
|
||||
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
if not role.content_object.organization and not request.user.is_superuser:
|
||||
data = dict(msg="You cannot grant private credential access to another user")
|
||||
data = dict(msg=_("You cannot grant private credential access to another user"))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
return super(RoleUsersList, self).post(request, *args, **kwargs)
|
||||
@ -3828,7 +3830,7 @@ class RoleTeamsList(SubListAPIView):
|
||||
# Forbid implicit team creation here
|
||||
sub_id = request.data.get('id', None)
|
||||
if not sub_id:
|
||||
data = dict(msg="Team 'id' field is missing.")
|
||||
data = dict(msg=_("Team 'id' field is missing."))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
team = get_object_or_400(Team, pk=sub_id)
|
||||
@ -3836,13 +3838,13 @@ class RoleTeamsList(SubListAPIView):
|
||||
|
||||
organization_content_type = ContentType.objects.get_for_model(Organization)
|
||||
if role.content_type == organization_content_type:
|
||||
data = dict(msg="You cannot assign an Organization role as a child role for a Team.")
|
||||
data = dict(msg=_("You cannot assign an Organization role as a child role for a Team."))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
credential_content_type = ContentType.objects.get_for_model(Credential)
|
||||
if role.content_type == credential_content_type:
|
||||
if not role.content_object.organization or role.content_object.organization.id != team.organization.id:
|
||||
data = dict(msg="You cannot grant credential access to a team when the Organization field isn't set, or belongs to a different organization")
|
||||
data = dict(msg=_("You cannot grant credential access to a team when the Organization field isn't set, or belongs to a different organization"))
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
action = 'attach'
|
||||
|
||||
@ -14,6 +14,7 @@ from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db import transaction
|
||||
from django.utils.text import slugify
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Tower
|
||||
from awx import MODE
|
||||
@ -36,27 +37,27 @@ class Command(BaseCommand):
|
||||
action='store_true',
|
||||
dest='dry_run',
|
||||
default=False,
|
||||
help='Only show which settings would be commented/migrated.',
|
||||
help=_('Only show which settings would be commented/migrated.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--skip-errors',
|
||||
action='store_true',
|
||||
dest='skip_errors',
|
||||
default=False,
|
||||
help='Skip over settings that would raise an error when commenting/migrating.',
|
||||
help=_('Skip over settings that would raise an error when commenting/migrating.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-comment',
|
||||
action='store_true',
|
||||
dest='no_comment',
|
||||
default=False,
|
||||
help='Skip commenting out settings in files.',
|
||||
help=_('Skip commenting out settings in files.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--backup-suffix',
|
||||
dest='backup_suffix',
|
||||
default=now().strftime('.%Y%m%d%H%M%S'),
|
||||
help='Backup existing settings files with this suffix.',
|
||||
help=_('Backup existing settings files with this suffix.'),
|
||||
)
|
||||
|
||||
@transaction.atomic
|
||||
|
||||
@ -417,7 +417,7 @@ class OrganizationAccess(BaseAccess):
|
||||
active_jobs.extend([dict(type="inventory_update", id=o.id)
|
||||
for o in InventoryUpdate.objects.filter(inventory_source__inventory__organization=obj, status__in=ACTIVE_STATES)])
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": "Resource is being used by running jobs",
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
return True
|
||||
|
||||
@ -491,7 +491,7 @@ class InventoryAccess(BaseAccess):
|
||||
active_jobs.extend([dict(type="inventory_update", id=o.id)
|
||||
for o in InventoryUpdate.objects.filter(inventory_source__inventory=obj, status__in=ACTIVE_STATES)])
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": "Resource is being used by running jobs",
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
return True
|
||||
|
||||
@ -613,7 +613,7 @@ class GroupAccess(BaseAccess):
|
||||
active_jobs.extend([dict(type="inventory_update", id=o.id)
|
||||
for o in InventoryUpdate.objects.filter(inventory_source__in=obj.inventory_sources.all(), status__in=ACTIVE_STATES)])
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": "Resource is being used by running jobs",
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
return True
|
||||
|
||||
@ -890,7 +890,7 @@ class ProjectAccess(BaseAccess):
|
||||
active_jobs.extend([dict(type="project_update", id=o.id)
|
||||
for o in ProjectUpdate.objects.filter(project=obj, status__in=ACTIVE_STATES)])
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": "Resource is being used by running jobs",
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
return True
|
||||
|
||||
@ -1132,7 +1132,7 @@ class JobTemplateAccess(BaseAccess):
|
||||
active_jobs = [dict(type="job", id=o.id)
|
||||
for o in obj.jobs.filter(status__in=ACTIVE_STATES)]
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": "Resource is being used by running jobs",
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
return True
|
||||
|
||||
@ -1524,7 +1524,7 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
active_jobs = [dict(type="job", id=o.id)
|
||||
for o in obj.jobs.filter(status__in=ACTIVE_STATES)]
|
||||
if len(active_jobs) > 0:
|
||||
raise StateConflict({"conflict": "Resource is being used by running jobs",
|
||||
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
|
||||
"active_jobs": active_jobs})
|
||||
return True
|
||||
|
||||
|
||||
@ -94,14 +94,14 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
||||
def clean_inventory(self):
|
||||
inv = self.inventory
|
||||
if not inv:
|
||||
raise ValidationError('No valid inventory.')
|
||||
raise ValidationError(_('No valid inventory.'))
|
||||
return inv
|
||||
|
||||
def clean_credential(self):
|
||||
cred = self.credential
|
||||
if cred and cred.kind != 'ssh':
|
||||
raise ValidationError(
|
||||
'You must provide a machine / SSH credential.',
|
||||
_('You must provide a machine / SSH credential.'),
|
||||
)
|
||||
return cred
|
||||
|
||||
@ -112,18 +112,18 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
||||
|
||||
def clean_module_name(self):
|
||||
if type(self.module_name) not in (str, unicode):
|
||||
raise ValidationError("Invalid type for ad hoc command")
|
||||
raise ValidationError(_("Invalid type for ad hoc command"))
|
||||
module_name = self.module_name.strip() or 'command'
|
||||
if module_name not in settings.AD_HOC_COMMANDS:
|
||||
raise ValidationError('Unsupported module for ad hoc commands.')
|
||||
raise ValidationError(_('Unsupported module for ad hoc commands.'))
|
||||
return module_name
|
||||
|
||||
def clean_module_args(self):
|
||||
if type(self.module_args) not in (str, unicode):
|
||||
raise ValidationError("Invalid type for ad hoc command")
|
||||
raise ValidationError(_("Invalid type for ad hoc command"))
|
||||
module_args = self.module_args
|
||||
if self.module_name in ('command', 'shell') and not module_args:
|
||||
raise ValidationError('No argument passed to %s module.' % self.module_name)
|
||||
raise ValidationError(_('No argument passed to %s module.') % self.module_name)
|
||||
return module_args
|
||||
|
||||
@property
|
||||
|
||||
@ -278,9 +278,9 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
"""
|
||||
host = self.host or ''
|
||||
if not host and self.kind == 'vmware':
|
||||
raise ValidationError('Host required for VMware credential.')
|
||||
raise ValidationError(_('Host required for VMware credential.'))
|
||||
if not host and self.kind == 'openstack':
|
||||
raise ValidationError('Host required for OpenStack credential.')
|
||||
raise ValidationError(_('Host required for OpenStack credential.'))
|
||||
return host
|
||||
|
||||
def clean_domain(self):
|
||||
@ -289,32 +289,32 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
def clean_username(self):
|
||||
username = self.username or ''
|
||||
if not username and self.kind == 'aws':
|
||||
raise ValidationError('Access key required for AWS credential.')
|
||||
raise ValidationError(_('Access key required for AWS credential.'))
|
||||
if not username and self.kind == 'rax':
|
||||
raise ValidationError('Username required for Rackspace '
|
||||
'credential.')
|
||||
raise ValidationError(_('Username required for Rackspace '
|
||||
'credential.'))
|
||||
if not username and self.kind == 'vmware':
|
||||
raise ValidationError('Username required for VMware credential.')
|
||||
raise ValidationError(_('Username required for VMware credential.'))
|
||||
if not username and self.kind == 'openstack':
|
||||
raise ValidationError('Username required for OpenStack credential.')
|
||||
raise ValidationError(_('Username required for OpenStack credential.'))
|
||||
return username
|
||||
|
||||
def clean_password(self):
|
||||
password = self.password or ''
|
||||
if not password and self.kind == 'aws':
|
||||
raise ValidationError('Secret key required for AWS credential.')
|
||||
raise ValidationError(_('Secret key required for AWS credential.'))
|
||||
if not password and self.kind == 'rax':
|
||||
raise ValidationError('API key required for Rackspace credential.')
|
||||
raise ValidationError(_('API key required for Rackspace credential.'))
|
||||
if not password and self.kind == 'vmware':
|
||||
raise ValidationError('Password required for VMware credential.')
|
||||
raise ValidationError(_('Password required for VMware credential.'))
|
||||
if not password and self.kind == 'openstack':
|
||||
raise ValidationError('Password or API key required for OpenStack credential.')
|
||||
raise ValidationError(_('Password or API key required for OpenStack credential.'))
|
||||
return password
|
||||
|
||||
def clean_project(self):
|
||||
project = self.project or ''
|
||||
if self.kind == 'openstack' and not project:
|
||||
raise ValidationError('Project name required for OpenStack credential.')
|
||||
raise ValidationError(_('Project name required for OpenStack credential.'))
|
||||
return project
|
||||
|
||||
def clean_ssh_key_data(self):
|
||||
@ -341,13 +341,13 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
|
||||
def clean_ssh_key_unlock(self):
|
||||
if self.has_encrypted_ssh_key_data and not self.ssh_key_unlock:
|
||||
raise ValidationError('SSH key unlock must be set when SSH key '
|
||||
'is encrypted.')
|
||||
raise ValidationError(_('SSH key unlock must be set when SSH key '
|
||||
'is encrypted.'))
|
||||
return self.ssh_key_unlock
|
||||
|
||||
def clean(self):
|
||||
if self.deprecated_user and self.deprecated_team:
|
||||
raise ValidationError('Credential cannot be assigned to both a user and team.')
|
||||
raise ValidationError(_('Credential cannot be assigned to both a user and team.'))
|
||||
|
||||
def _password_field_allows_ask(self, field):
|
||||
return bool(self.kind == 'ssh' and field != 'ssh_key_data')
|
||||
|
||||
@ -890,16 +890,16 @@ class InventorySourceOptions(BaseModel):
|
||||
@classmethod
|
||||
def get_ec2_group_by_choices(cls):
|
||||
return [
|
||||
('availability_zone', 'Availability Zone'),
|
||||
('ami_id', 'Image ID'),
|
||||
('instance_id', 'Instance ID'),
|
||||
('instance_type', 'Instance Type'),
|
||||
('key_pair', 'Key Name'),
|
||||
('region', 'Region'),
|
||||
('security_group', 'Security Group'),
|
||||
('tag_keys', 'Tags'),
|
||||
('vpc_id', 'VPC ID'),
|
||||
('tag_none', 'Tag None'),
|
||||
('availability_zone', _('Availability Zone')),
|
||||
('ami_id', _('Image ID')),
|
||||
('instance_id', _('Instance ID')),
|
||||
('instance_type', _('Instance Type')),
|
||||
('key_pair', _('Key Name')),
|
||||
('region', _('Region')),
|
||||
('security_group', _('Security Group')),
|
||||
('tag_keys', _('Tags')),
|
||||
('vpc_id', _('VPC ID')),
|
||||
('tag_none', _('Tag None')),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
@ -970,14 +970,14 @@ class InventorySourceOptions(BaseModel):
|
||||
# credentials; Rackspace requires Rackspace credentials; etc...)
|
||||
if self.source.replace('ec2', 'aws') != cred.kind:
|
||||
raise ValidationError(
|
||||
'Cloud-based inventory sources (such as %s) require '
|
||||
'credentials for the matching cloud service.' % self.source
|
||||
_('Cloud-based inventory sources (such as %s) require '
|
||||
'credentials for the matching cloud service.') % self.source
|
||||
)
|
||||
# Allow an EC2 source to omit the credential. If Tower is running on
|
||||
# an EC2 instance with an IAM Role assigned, boto will use credentials
|
||||
# from the instance metadata instead of those explicitly provided.
|
||||
elif self.source in CLOUD_PROVIDERS and self.source != 'ec2':
|
||||
raise ValidationError('Credential is required for a cloud source.')
|
||||
raise ValidationError(_('Credential is required for a cloud source.'))
|
||||
return cred
|
||||
|
||||
def clean_source_regions(self):
|
||||
@ -1002,9 +1002,9 @@ class InventorySourceOptions(BaseModel):
|
||||
if r not in valid_regions and r not in invalid_regions:
|
||||
invalid_regions.append(r)
|
||||
if invalid_regions:
|
||||
raise ValidationError('Invalid %s region%s: %s' % (self.source,
|
||||
'' if len(invalid_regions) == 1 else 's',
|
||||
', '.join(invalid_regions)))
|
||||
raise ValidationError(_('Invalid %(source)s region%(plural)s: %(region)s') % {
|
||||
'source': self.source, 'plural': '' if len(invalid_regions) == 1 else 's',
|
||||
'region': ', '.join(invalid_regions)})
|
||||
return ','.join(regions)
|
||||
|
||||
source_vars_dict = VarsDictProperty('source_vars')
|
||||
@ -1028,9 +1028,9 @@ class InventorySourceOptions(BaseModel):
|
||||
if instance_filter_name not in self.INSTANCE_FILTER_NAMES:
|
||||
invalid_filters.append(instance_filter)
|
||||
if invalid_filters:
|
||||
raise ValidationError('Invalid filter expression%s: %s' %
|
||||
('' if len(invalid_filters) == 1 else 's',
|
||||
', '.join(invalid_filters)))
|
||||
raise ValidationError(_('Invalid filter expression%(plural)s: %(filter)s') %
|
||||
{'plural': '' if len(invalid_filters) == 1 else 's',
|
||||
'filter': ', '.join(invalid_filters)})
|
||||
return instance_filters
|
||||
|
||||
def clean_group_by(self):
|
||||
@ -1047,9 +1047,9 @@ class InventorySourceOptions(BaseModel):
|
||||
if c not in valid_choices and c not in invalid_choices:
|
||||
invalid_choices.append(c)
|
||||
if invalid_choices:
|
||||
raise ValidationError('Invalid group by choice%s: %s' %
|
||||
('' if len(invalid_choices) == 1 else 's',
|
||||
', '.join(invalid_choices)))
|
||||
raise ValidationError(_('Invalid group by choice%(plural)s: %(choice)s') %
|
||||
{'plural': '' if len(invalid_choices) == 1 else 's',
|
||||
'choice': ', '.join(invalid_choices)})
|
||||
return ','.join(choices)
|
||||
|
||||
|
||||
@ -1195,7 +1195,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions):
|
||||
existing_sources = qs.exclude(pk=self.pk)
|
||||
if existing_sources.count():
|
||||
s = u', '.join([x.group.name for x in existing_sources])
|
||||
raise ValidationError('Unable to configure this item for cloud sync. It is already managed by %s.' % s)
|
||||
raise ValidationError(_('Unable to configure this item for cloud sync. It is already managed by %s.') % s)
|
||||
return source
|
||||
|
||||
|
||||
|
||||
@ -154,7 +154,7 @@ class JobOptions(BaseModel):
|
||||
cred = self.credential
|
||||
if cred and cred.kind != 'ssh':
|
||||
raise ValidationError(
|
||||
'You must provide a machine / SSH credential.',
|
||||
_('You must provide a machine / SSH credential.'),
|
||||
)
|
||||
return cred
|
||||
|
||||
@ -162,7 +162,7 @@ class JobOptions(BaseModel):
|
||||
cred = self.network_credential
|
||||
if cred and cred.kind != 'net':
|
||||
raise ValidationError(
|
||||
'You must provide a network credential.',
|
||||
_('You must provide a network credential.'),
|
||||
)
|
||||
return cred
|
||||
|
||||
@ -170,8 +170,8 @@ class JobOptions(BaseModel):
|
||||
cred = self.cloud_credential
|
||||
if cred and cred.kind not in CLOUD_PROVIDERS + ('aws',):
|
||||
raise ValidationError(
|
||||
'Must provide a credential for a cloud provider, such as '
|
||||
'Amazon Web Services or Rackspace.',
|
||||
_('Must provide a credential for a cloud provider, such as '
|
||||
'Amazon Web Services or Rackspace.'),
|
||||
)
|
||||
return cred
|
||||
|
||||
@ -270,19 +270,19 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
|
||||
if self.inventory is None:
|
||||
resources_needed_to_start.append('inventory')
|
||||
if not self.ask_inventory_on_launch:
|
||||
validation_errors['inventory'] = ["Job Template must provide 'inventory' or allow prompting for it.",]
|
||||
validation_errors['inventory'] = [_("Job Template must provide 'inventory' or allow prompting for it."),]
|
||||
if self.credential is None:
|
||||
resources_needed_to_start.append('credential')
|
||||
if not self.ask_credential_on_launch:
|
||||
validation_errors['credential'] = ["Job Template must provide 'credential' or allow prompting for it.",]
|
||||
validation_errors['credential'] = [_("Job Template must provide 'credential' or allow prompting for it."),]
|
||||
|
||||
# Job type dependent checks
|
||||
if self.job_type == PERM_INVENTORY_SCAN:
|
||||
if self.inventory is None or self.ask_inventory_on_launch:
|
||||
validation_errors['inventory'] = ["Scan jobs must be assigned a fixed inventory.",]
|
||||
validation_errors['inventory'] = [_("Scan jobs must be assigned a fixed inventory."),]
|
||||
elif self.project is None:
|
||||
resources_needed_to_start.append('project')
|
||||
validation_errors['project'] = ["Job types 'run' and 'check' must have assigned a project.",]
|
||||
validation_errors['project'] = [_("Job types 'run' and 'check' must have assigned a project."),]
|
||||
|
||||
return (validation_errors, resources_needed_to_start)
|
||||
|
||||
@ -491,10 +491,10 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
|
||||
if 'job_type' in data and self.ask_job_type_on_launch:
|
||||
if ((self.job_type == PERM_INVENTORY_SCAN and not data['job_type'] == PERM_INVENTORY_SCAN) or
|
||||
(data['job_type'] == PERM_INVENTORY_SCAN and not self.job_type == PERM_INVENTORY_SCAN)):
|
||||
errors['job_type'] = 'Can not override job_type to or from a scan job.'
|
||||
errors['job_type'] = _('Can not override job_type to or from a scan job.')
|
||||
if (self.job_type == PERM_INVENTORY_SCAN and ('inventory' in data) and self.ask_inventory_on_launch and
|
||||
self.inventory != data['inventory']):
|
||||
errors['inventory'] = 'Inventory can not be changed at runtime for scan jobs.'
|
||||
errors['inventory'] = _('Inventory can not be changed at runtime for scan jobs.')
|
||||
return errors
|
||||
|
||||
@property
|
||||
|
||||
@ -267,7 +267,7 @@ class AuthToken(BaseModel):
|
||||
|
||||
def invalidate(self, reason='timeout_reached', save=True):
|
||||
if not AuthToken.reason_long(reason):
|
||||
raise ValueError('Invalid reason specified')
|
||||
raise ValueError(_('Invalid reason specified'))
|
||||
self.reason = reason
|
||||
if save:
|
||||
self.save()
|
||||
|
||||
@ -125,10 +125,10 @@ class ProjectOptions(models.Model):
|
||||
scm_url = update_scm_url(self.scm_type, scm_url,
|
||||
check_special_cases=False)
|
||||
except ValueError as e:
|
||||
raise ValidationError((e.args or ('Invalid SCM URL.',))[0])
|
||||
raise ValidationError((e.args or (_('Invalid SCM URL.'),))[0])
|
||||
scm_url_parts = urlparse.urlsplit(scm_url)
|
||||
if self.scm_type and not any(scm_url_parts):
|
||||
raise ValidationError('SCM URL is required.')
|
||||
raise ValidationError(_('SCM URL is required.'))
|
||||
return unicode(self.scm_url or '')
|
||||
|
||||
def clean_credential(self):
|
||||
@ -137,7 +137,7 @@ class ProjectOptions(models.Model):
|
||||
cred = self.credential
|
||||
if cred:
|
||||
if cred.kind != 'scm':
|
||||
raise ValidationError("Credential kind must be 'scm'.")
|
||||
raise ValidationError(_("Credential kind must be 'scm'."))
|
||||
try:
|
||||
scm_url = update_scm_url(self.scm_type, self.scm_url,
|
||||
check_special_cases=False)
|
||||
@ -152,7 +152,7 @@ class ProjectOptions(models.Model):
|
||||
update_scm_url(self.scm_type, self.scm_url, scm_username,
|
||||
scm_password)
|
||||
except ValueError as e:
|
||||
raise ValidationError((e.args or ('Invalid credential.',))[0])
|
||||
raise ValidationError((e.args or (_('Invalid credential.'),))[0])
|
||||
except ValueError:
|
||||
pass
|
||||
return cred
|
||||
|
||||
@ -62,12 +62,12 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
|
||||
]
|
||||
|
||||
COMMON_STATUS_CHOICES = JOB_STATUS_CHOICES + [
|
||||
('never updated', 'Never Updated'), # A job has never been run using this template.
|
||||
('never updated', _('Never Updated')), # A job has never been run using this template.
|
||||
]
|
||||
|
||||
PROJECT_STATUS_CHOICES = COMMON_STATUS_CHOICES + [
|
||||
('ok', 'OK'), # Project is not configured for SCM and path exists.
|
||||
('missing', 'Missing'), # Project path does not exist.
|
||||
('ok', _('OK')), # Project is not configured for SCM and path exists.
|
||||
('missing', _('Missing')), # Project path does not exist.
|
||||
]
|
||||
|
||||
INVENTORY_SOURCE_STATUS_CHOICES = COMMON_STATUS_CHOICES + [
|
||||
|
||||
@ -5,6 +5,8 @@ import json
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.core.mail.backends.base import BaseEmailBackend
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
class TowerBaseEmailBackend(BaseEmailBackend):
|
||||
|
||||
@ -12,9 +14,8 @@ class TowerBaseEmailBackend(BaseEmailBackend):
|
||||
if "body" in body:
|
||||
body_actual = body['body']
|
||||
else:
|
||||
body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'],
|
||||
body['id'],
|
||||
body['status'],
|
||||
body['url']))
|
||||
body_actual = smart_text(_("{} #{} had status {} on Ansible Tower, view details at {}\n\n").format(
|
||||
body['friendly_name'], body['id'], body['status'], body['url'])
|
||||
)
|
||||
body_actual += json.dumps(body, indent=4)
|
||||
return body_actual
|
||||
|
||||
@ -5,6 +5,8 @@ import json
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
from django.core.mail.backends.smtp import EmailBackend
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
class CustomEmailBackend(EmailBackend):
|
||||
|
||||
@ -23,9 +25,8 @@ class CustomEmailBackend(EmailBackend):
|
||||
if "body" in body:
|
||||
body_actual = body['body']
|
||||
else:
|
||||
body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'],
|
||||
body['id'],
|
||||
body['status'],
|
||||
body['url']))
|
||||
body_actual = smart_text(_("{} #{} had status {} on Ansible Tower, view details at {}\n\n").format(
|
||||
body['friendly_name'], body['id'], body['status'], body['url'])
|
||||
)
|
||||
body_actual += json.dumps(body, indent=4)
|
||||
return body_actual
|
||||
|
||||
@ -6,11 +6,12 @@ import logging
|
||||
import requests
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from awx.main.notifications.base import TowerBaseEmailBackend
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.hipchat_backend')
|
||||
|
||||
|
||||
class HipChatBackend(TowerBaseEmailBackend):
|
||||
|
||||
init_parameters = {"token": {"label": "Token", "type": "password"},
|
||||
@ -42,8 +43,8 @@ class HipChatBackend(TowerBaseEmailBackend):
|
||||
"from": m.from_email,
|
||||
"message_format": "text"})
|
||||
if r.status_code != 204:
|
||||
logger.error(smart_text("Error sending messages: {}".format(r.text)))
|
||||
logger.error(smart_text(_("Error sending messages: {}").format(r.text)))
|
||||
if not self.fail_silently:
|
||||
raise Exception(smart_text("Error sending message to hipchat: {}".format(r.text)))
|
||||
raise Exception(smart_text(_("Error sending message to hipchat: {}").format(r.text)))
|
||||
sent_messages += 1
|
||||
return sent_messages
|
||||
|
||||
@ -8,11 +8,12 @@ import logging
|
||||
import irc.client
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from awx.main.notifications.base import TowerBaseEmailBackend
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.irc_backend')
|
||||
|
||||
|
||||
class IrcBackend(TowerBaseEmailBackend):
|
||||
|
||||
init_parameters = {"server": {"label": "IRC Server Address", "type": "string"},
|
||||
@ -50,7 +51,7 @@ class IrcBackend(TowerBaseEmailBackend):
|
||||
connect_factory=connection_factory,
|
||||
)
|
||||
except irc.client.ServerConnectionError as e:
|
||||
logger.error(smart_text("Exception connecting to irc server: {}".format(e)))
|
||||
logger.error(smart_text(_("Exception connecting to irc server: {}").format(e)))
|
||||
if not self.fail_silently:
|
||||
raise
|
||||
return True
|
||||
|
||||
@ -5,11 +5,12 @@ import logging
|
||||
import pygerduty
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from awx.main.notifications.base import TowerBaseEmailBackend
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.pagerduty_backend')
|
||||
|
||||
|
||||
class PagerDutyBackend(TowerBaseEmailBackend):
|
||||
|
||||
init_parameters = {"subdomain": {"label": "Pagerduty subdomain", "type": "string"},
|
||||
@ -35,7 +36,7 @@ class PagerDutyBackend(TowerBaseEmailBackend):
|
||||
except Exception as e:
|
||||
if not self.fail_silently:
|
||||
raise
|
||||
logger.error(smart_text("Exception connecting to PagerDuty: {}".format(e)))
|
||||
logger.error(smart_text(_("Exception connecting to PagerDuty: {}").format(e)))
|
||||
for m in messages:
|
||||
try:
|
||||
pager.trigger_incident(m.recipients()[0],
|
||||
@ -44,7 +45,7 @@ class PagerDutyBackend(TowerBaseEmailBackend):
|
||||
client=m.from_email)
|
||||
sent_messages += 1
|
||||
except Exception as e:
|
||||
logger.error(smart_text("Exception sending messages: {}".format(e)))
|
||||
logger.error(smart_text(_("Exception sending messages: {}").format(e)))
|
||||
if not self.fail_silently:
|
||||
raise
|
||||
return sent_messages
|
||||
|
||||
@ -5,11 +5,12 @@ import logging
|
||||
from slackclient import SlackClient
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from awx.main.notifications.base import TowerBaseEmailBackend
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.slack_backend')
|
||||
|
||||
|
||||
class SlackBackend(TowerBaseEmailBackend):
|
||||
|
||||
init_parameters = {"token": {"label": "Token", "type": "password"},
|
||||
@ -48,7 +49,7 @@ class SlackBackend(TowerBaseEmailBackend):
|
||||
self.connection.rtm_send_message(r, m.subject)
|
||||
sent_messages += 1
|
||||
except Exception as e:
|
||||
logger.error(smart_text("Exception sending messages: {}".format(e)))
|
||||
logger.error(smart_text(_("Exception sending messages: {}").format(e)))
|
||||
if not self.fail_silently:
|
||||
raise
|
||||
return sent_messages
|
||||
|
||||
@ -6,11 +6,12 @@ import logging
|
||||
from twilio.rest import TwilioRestClient
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from awx.main.notifications.base import TowerBaseEmailBackend
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.twilio_backend')
|
||||
|
||||
|
||||
class TwilioBackend(TowerBaseEmailBackend):
|
||||
|
||||
init_parameters = {"account_sid": {"label": "Account SID", "type": "string"},
|
||||
@ -32,7 +33,7 @@ class TwilioBackend(TowerBaseEmailBackend):
|
||||
except Exception as e:
|
||||
if not self.fail_silently:
|
||||
raise
|
||||
logger.error(smart_text("Exception connecting to Twilio: {}".format(e)))
|
||||
logger.error(smart_text(_("Exception connecting to Twilio: {}").format(e)))
|
||||
|
||||
for m in messages:
|
||||
try:
|
||||
@ -42,7 +43,7 @@ class TwilioBackend(TowerBaseEmailBackend):
|
||||
body=m.subject)
|
||||
sent_messages += 1
|
||||
except Exception as e:
|
||||
logger.error(smart_text("Exception sending messages: {}".format(e)))
|
||||
logger.error(smart_text(_("Exception sending messages: {}").format(e)))
|
||||
if not self.fail_silently:
|
||||
raise
|
||||
return sent_messages
|
||||
|
||||
@ -5,12 +5,13 @@ import logging
|
||||
import requests
|
||||
|
||||
from django.utils.encoding import smart_text
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from awx.main.notifications.base import TowerBaseEmailBackend
|
||||
from awx.main.utils import get_awx_version
|
||||
|
||||
logger = logging.getLogger('awx.main.notifications.webhook_backend')
|
||||
|
||||
|
||||
class WebhookBackend(TowerBaseEmailBackend):
|
||||
|
||||
init_parameters = {"url": {"label": "Target URL", "type": "string"},
|
||||
@ -34,8 +35,8 @@ class WebhookBackend(TowerBaseEmailBackend):
|
||||
json=m.body,
|
||||
headers=self.headers)
|
||||
if r.status_code >= 400:
|
||||
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
|
||||
logger.error(smart_text(_("Error sending notification webhook: {}").format(r.text)))
|
||||
if not self.fail_silently:
|
||||
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
|
||||
raise Exception(smart_text(_("Error sending notification webhook: {}").format(r.text)))
|
||||
sent_messages += 1
|
||||
return sent_messages
|
||||
|
||||
@ -42,6 +42,7 @@ from django.utils.timezone import now
|
||||
from django.utils.encoding import smart_str
|
||||
from django.core.mail import send_mail
|
||||
from django.contrib.auth.models import User
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# AWX
|
||||
from awx.main.constants import CLOUD_PROVIDERS
|
||||
@ -111,12 +112,12 @@ def run_administrative_checks(self):
|
||||
tower_admin_emails = User.objects.filter(is_superuser=True).values_list('email', flat=True)
|
||||
if (used_percentage * 100) > 90:
|
||||
send_mail("Ansible Tower host usage over 90%",
|
||||
"Ansible Tower host usage over 90%",
|
||||
_("Ansible Tower host usage over 90%"),
|
||||
tower_admin_emails,
|
||||
fail_silently=True)
|
||||
if validation_info.get('date_warning', False):
|
||||
send_mail("Ansible Tower license will expire soon",
|
||||
"Ansible Tower license will expire soon",
|
||||
_("Ansible Tower license will expire soon"),
|
||||
tower_admin_emails,
|
||||
fail_silently=True)
|
||||
|
||||
@ -181,7 +182,7 @@ def tower_periodic_scheduler(self):
|
||||
|
||||
def _send_notification_templates(instance, status_str):
|
||||
if status_str not in ['succeeded', 'failed']:
|
||||
raise ValueError("status_str must be either succeeded or failed")
|
||||
raise ValueError(_("status_str must be either succeeded or failed"))
|
||||
notification_templates = instance.get_notification_templates()
|
||||
if notification_templates:
|
||||
all_notification_templates = set(notification_templates.get('success', []) + notification_templates.get('any', []))
|
||||
|
||||
@ -19,6 +19,9 @@ import tempfile
|
||||
# Decorator
|
||||
from decorator import decorator
|
||||
|
||||
# Django
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import ParseError, PermissionDenied
|
||||
from django.utils.encoding import smart_str
|
||||
@ -77,7 +80,7 @@ def to_python_boolean(value, allow_none=False):
|
||||
elif allow_none and value.lower() in ('none', 'null'):
|
||||
return None
|
||||
else:
|
||||
raise ValueError(u'Unable to convert "%s" to boolean' % unicode(value))
|
||||
raise ValueError(_(u'Unable to convert "%s" to boolean') % unicode(value))
|
||||
|
||||
def camelcase_to_underscore(s):
|
||||
'''
|
||||
@ -192,7 +195,7 @@ def decrypt_field(instance, field_name, subfield=None):
|
||||
return value
|
||||
algo, b64data = value[len('$encrypted$'):].split('$', 1)
|
||||
if algo != 'AES':
|
||||
raise ValueError('unsupported algorithm: %s' % algo)
|
||||
raise ValueError(_('unsupported algorithm: %s') % algo)
|
||||
encrypted = base64.b64decode(b64data)
|
||||
key = get_encryption_key(instance, field_name)
|
||||
cipher = AES.new(key, AES.MODE_ECB)
|
||||
@ -213,16 +216,16 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
# hg: http://www.selenic.com/mercurial/hg.1.html#url-paths
|
||||
# svn: http://svnbook.red-bean.com/en/1.7/svn-book.html#svn.advanced.reposurls
|
||||
if scm_type not in ('git', 'hg', 'svn'):
|
||||
raise ValueError('Unsupported SCM type "%s"' % str(scm_type))
|
||||
raise ValueError(_('Unsupported SCM type "%s"') % str(scm_type))
|
||||
if not url.strip():
|
||||
return ''
|
||||
parts = urlparse.urlsplit(url)
|
||||
try:
|
||||
parts.port
|
||||
except ValueError:
|
||||
raise ValueError('Invalid %s URL' % scm_type)
|
||||
raise ValueError(_('Invalid %s URL') % scm_type)
|
||||
if parts.scheme == 'git+ssh' and not scp_format:
|
||||
raise ValueError('Unsupported %s URL' % scm_type)
|
||||
raise ValueError(_('Unsupported %s URL') % scm_type)
|
||||
|
||||
if '://' not in url:
|
||||
# Handle SCP-style URLs for git (e.g. [user@]host.xz:path/to/repo.git/).
|
||||
@ -232,7 +235,7 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
else:
|
||||
userpass, hostpath = '', url
|
||||
if hostpath.count(':') > 1:
|
||||
raise ValueError('Invalid %s URL' % scm_type)
|
||||
raise ValueError(_('Invalid %s URL') % scm_type)
|
||||
host, path = hostpath.split(':', 1)
|
||||
#if not path.startswith('/') and not path.startswith('~/'):
|
||||
# path = '~/%s' % path
|
||||
@ -251,7 +254,7 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
else:
|
||||
parts = urlparse.urlsplit('file://%s' % url)
|
||||
else:
|
||||
raise ValueError('Invalid %s URL' % scm_type)
|
||||
raise ValueError(_('Invalid %s URL') % scm_type)
|
||||
|
||||
# Validate that scheme is valid for given scm_type.
|
||||
scm_type_schemes = {
|
||||
@ -260,11 +263,11 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
'svn': ('http', 'https', 'svn', 'svn+ssh', 'file'),
|
||||
}
|
||||
if parts.scheme not in scm_type_schemes.get(scm_type, ()):
|
||||
raise ValueError('Unsupported %s URL' % scm_type)
|
||||
raise ValueError(_('Unsupported %s URL') % scm_type)
|
||||
if parts.scheme == 'file' and parts.netloc not in ('', 'localhost'):
|
||||
raise ValueError('Unsupported host "%s" for file:// URL' % (parts.netloc))
|
||||
raise ValueError(_('Unsupported host "%s" for file:// URL') % (parts.netloc))
|
||||
elif parts.scheme != 'file' and not parts.netloc:
|
||||
raise ValueError('Host is required for %s URL' % parts.scheme)
|
||||
raise ValueError(_('Host is required for %s URL') % parts.scheme)
|
||||
if username is True:
|
||||
netloc_username = parts.username or ''
|
||||
elif username:
|
||||
@ -282,13 +285,13 @@ def update_scm_url(scm_type, url, username=True, password=True,
|
||||
if check_special_cases:
|
||||
special_git_hosts = ('github.com', 'bitbucket.org', 'altssh.bitbucket.org')
|
||||
if scm_type == 'git' and parts.scheme.endswith('ssh') and parts.hostname in special_git_hosts and netloc_username != 'git':
|
||||
raise ValueError('Username must be "git" for SSH access to %s.' % parts.hostname)
|
||||
raise ValueError(_('Username must be "git" for SSH access to %s.') % parts.hostname)
|
||||
if scm_type == 'git' and parts.scheme.endswith('ssh') and parts.hostname in special_git_hosts and netloc_password:
|
||||
#raise ValueError('Password not allowed for SSH access to %s.' % parts.hostname)
|
||||
netloc_password = ''
|
||||
special_hg_hosts = ('bitbucket.org', 'altssh.bitbucket.org')
|
||||
if scm_type == 'hg' and parts.scheme == 'ssh' and parts.hostname in special_hg_hosts and netloc_username != 'hg':
|
||||
raise ValueError('Username must be "hg" for SSH access to %s.' % parts.hostname)
|
||||
raise ValueError(_('Username must be "hg" for SSH access to %s.') % parts.hostname)
|
||||
if scm_type == 'hg' and parts.scheme == 'ssh' and netloc_password:
|
||||
#raise ValueError('Password not supported for SSH with Mercurial.')
|
||||
netloc_password = ''
|
||||
|
||||
@ -188,4 +188,4 @@ def vars_validate_or_raise(vars_str):
|
||||
return vars_str
|
||||
except yaml.YAMLError:
|
||||
pass
|
||||
raise RestValidationError('Must be valid JSON or YAML.')
|
||||
raise RestValidationError(_('Must be valid JSON or YAML.'))
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user