diff --git a/.gitignore b/.gitignore index afd8aa7187..ca9dd12298 100644 --- a/.gitignore +++ b/.gitignore @@ -106,6 +106,7 @@ reports *.log.[0-9] *.results local/ +*.mo # AWX python libs populated by requirements.txt awx/lib/.deps_built diff --git a/Makefile b/Makefile index a1392da135..f4a7bdaf3e 100644 --- a/Makefile +++ b/Makefile @@ -505,9 +505,10 @@ test_tox: # Alias existing make target so old versions run against Jekins the same way test_jenkins : test_coverage -# UI TASKS +# l10n TASKS # -------------------------------------- +# check for UI po files HAVE_PO := $(shell ls awx/ui/po/*.po 2>/dev/null) check-po: ifdef HAVE_PO @@ -537,14 +538,32 @@ else @echo No PO files endif -# generate l10n .json -languages: $(UI_DEPS_FLAG_FILE) check-po - $(NPM_BIN) --prefix awx/ui run languages - -# generate .pot +# generate UI .pot pot: $(UI_DEPS_FLAG_FILE) $(NPM_BIN) --prefix awx/ui run pot +# generate django .pot .po +LANG = "en-us" +messages: + @if [ "$(VENV_BASE)" ]; then \ + . $(VENV_BASE)/tower/bin/activate; \ + fi; \ + $(PYTHON) manage.py makemessages -l $(LANG) --keep-pot + +# generate l10n .json .mo +languages: $(UI_DEPS_FLAG_FILE) check-po + $(NPM_BIN) --prefix awx/ui run languages + @if [ "$(VENV_BASE)" ]; then \ + . $(VENV_BASE)/tower/bin/activate; \ + fi; \ + $(PYTHON) manage.py compilemessages + +# End l10n TASKS +# -------------------------------------- + +# UI TASKS +# -------------------------------------- + ui-deps: $(UI_DEPS_FLAG_FILE) $(UI_DEPS_FLAG_FILE): awx/ui/package.json diff --git a/awx/api/authentication.py b/awx/api/authentication.py index 6be5447507..f00950ed85 100644 --- a/awx/api/authentication.py +++ b/awx/api/authentication.py @@ -9,6 +9,7 @@ import logging from django.conf import settings from django.utils.timezone import now as tz_now from django.utils.encoding import smart_text +from django.utils.translation import ugettext_lazy as _ # Django REST Framework from rest_framework import authentication @@ -62,10 +63,10 @@ class TokenAuthentication(authentication.TokenAuthentication): return None if len(auth) == 1: - msg = 'Invalid token header. No credentials provided.' + msg = _('Invalid token header. No credentials provided.') raise exceptions.AuthenticationFailed(msg) elif len(auth) > 2: - msg = 'Invalid token header. Token string should not contain spaces.' + msg = _('Invalid token header. Token string should not contain spaces.') raise exceptions.AuthenticationFailed(msg) return self.authenticate_credentials(auth[1]) @@ -100,7 +101,7 @@ class TokenAuthentication(authentication.TokenAuthentication): # If the user is inactive, then return an error. if not token.user.is_active: - raise exceptions.AuthenticationFailed('User inactive or deleted') + raise exceptions.AuthenticationFailed(_('User inactive or deleted')) # Refresh the token. # The token is extended from "right now" + configurable setting amount. @@ -151,7 +152,7 @@ class TaskAuthentication(authentication.BaseAuthentication): return None token = unified_job.task_auth_token if auth[1] != token: - raise exceptions.AuthenticationFailed('Invalid task token') + raise exceptions.AuthenticationFailed(_('Invalid task token')) return (None, token) def authenticate_header(self, request): diff --git a/awx/api/generics.py b/awx/api/generics.py index e474a1bafc..a6b7bb12fb 100644 --- a/awx/api/generics.py +++ b/awx/api/generics.py @@ -15,6 +15,7 @@ from django.template.loader import render_to_string from django.utils.encoding import smart_text from django.utils.safestring import mark_safe from django.contrib.contenttypes.models import ContentType +from django.utils.translation import ugettext_lazy as _ # Django REST Framework from rest_framework.authentication import get_authorization_header @@ -432,7 +433,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView): sub_id = request.data.get('id', None) res = None if not sub_id: - data = dict(msg='"id" is required to disassociate') + data = dict(msg=_('"id" is required to disassociate')) res = Response(data, status=status.HTTP_400_BAD_REQUEST) return (sub_id, res) diff --git a/awx/api/metadata.py b/awx/api/metadata.py index b329d83793..54850f285e 100644 --- a/awx/api/metadata.py +++ b/awx/api/metadata.py @@ -7,6 +7,7 @@ from collections import OrderedDict from django.core.exceptions import PermissionDenied from django.http import Http404 from django.utils.encoding import force_text, smart_text +from django.utils.translation import ugettext_lazy as _ # Django REST Framework from rest_framework import exceptions @@ -46,15 +47,15 @@ class Metadata(metadata.SimpleMetadata): serializer = getattr(field, 'parent', None) if serializer: field_help_text = { - 'id': 'Database ID for this {}.', - 'name': 'Name of this {}.', - 'description': 'Optional description of this {}.', - 'type': 'Data type for this {}.', - 'url': 'URL for this {}.', - 'related': 'Data structure with URLs of related resources.', - 'summary_fields': 'Data structure with name/description for related resources.', - 'created': 'Timestamp when this {} was created.', - 'modified': 'Timestamp when this {} was last modified.', + 'id': _('Database ID for this {}.'), + 'name': _('Name of this {}.'), + 'description': _('Optional description of this {}.'), + 'type': _('Data type for this {}.'), + 'url': _('URL for this {}.'), + 'related': _('Data structure with URLs of related resources.'), + 'summary_fields': _('Data structure with name/description for related resources.'), + 'created': _('Timestamp when this {} was created.'), + 'modified': _('Timestamp when this {} was last modified.'), } if field.field_name in field_help_text: if hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'): diff --git a/awx/api/parsers.py b/awx/api/parsers.py index 94ddbec561..8c720201a2 100644 --- a/awx/api/parsers.py +++ b/awx/api/parsers.py @@ -5,6 +5,7 @@ import json # Django from django.conf import settings from django.utils import six +from django.utils.translation import ugettext_lazy as _ # Django REST Framework from rest_framework import parsers @@ -27,4 +28,4 @@ class JSONParser(parsers.JSONParser): data = stream.read().decode(encoding) return json.loads(data, object_pairs_hook=OrderedDict) except ValueError as exc: - raise ParseError('JSON parse error - %s' % six.text_type(exc)) + raise ParseError(_('JSON parse error - %s') % six.text_type(exc)) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index c44758ead1..d7c17238d7 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -20,7 +20,7 @@ from django.contrib.contenttypes.models import ContentType from django.core.urlresolvers import reverse from django.core.exceptions import ObjectDoesNotExist, ValidationError as DjangoValidationError from django.db import models -# from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import ugettext_lazy as _ from django.utils.encoding import force_text from django.utils.text import capfirst @@ -242,11 +242,11 @@ class BaseSerializer(serializers.ModelSerializer): def get_type_choices(self): type_name_map = { - 'job': 'Playbook Run', - 'ad_hoc_command': 'Command', - 'project_update': 'SCM Update', - 'inventory_update': 'Inventory Sync', - 'system_job': 'Management Job', + 'job': _('Playbook Run'), + 'ad_hoc_command': _('Command'), + 'project_update': _('SCM Update'), + 'inventory_update': _('Inventory Sync'), + 'system_job': _('Management Job'), } choices = [] for t in self.get_types(): @@ -623,8 +623,9 @@ class UnifiedJobSerializer(BaseSerializer): def get_result_stdout(self, obj): obj_size = obj.result_stdout_size if obj_size > settings.STDOUT_MAX_BYTES_DISPLAY: - return "Standard Output too large to display (%d bytes), only download supported for sizes over %d bytes" % (obj_size, - settings.STDOUT_MAX_BYTES_DISPLAY) + return _("Standard Output too large to display (%(text_size)d bytes), " + "only download supported for sizes over %(supported_size)d bytes") % { + 'text_size': obj_size, 'supported_size': settings.STDOUT_MAX_BYTES_DISPLAY} return obj.result_stdout @@ -680,8 +681,9 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer): def get_result_stdout(self, obj): obj_size = obj.result_stdout_size if obj_size > settings.STDOUT_MAX_BYTES_DISPLAY: - return "Standard Output too large to display (%d bytes), only download supported for sizes over %d bytes" % (obj_size, - settings.STDOUT_MAX_BYTES_DISPLAY) + return _("Standard Output too large to display (%(text_size)d bytes), " + "only download supported for sizes over %(supported_size)d bytes") % { + 'text_size': obj_size, 'supported_size': settings.STDOUT_MAX_BYTES_DISPLAY} return obj.result_stdout def get_types(self): @@ -694,9 +696,9 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer): class UserSerializer(BaseSerializer): password = serializers.CharField(required=False, default='', write_only=True, - help_text='Write-only field used to change the password.') + help_text=_('Write-only field used to change the password.')) ldap_dn = serializers.CharField(source='profile.ldap_dn', read_only=True) - external_account = serializers.SerializerMethodField(help_text='Set if the account is managed by an external service') + external_account = serializers.SerializerMethodField(help_text=_('Set if the account is managed by an external service')) is_system_auditor = serializers.BooleanField(default=False) show_capabilities = ['edit', 'delete'] @@ -720,7 +722,7 @@ class UserSerializer(BaseSerializer): def validate_password(self, value): if not self.instance and value in (None, ''): - raise serializers.ValidationError('Password required for new User.') + raise serializers.ValidationError(_('Password required for new User.')) return value def _update_password(self, obj, new_password): @@ -804,7 +806,7 @@ class UserSerializer(BaseSerializer): ldap_managed_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys()) if field_name in ldap_managed_fields: if value != getattr(self.instance, field_name): - raise serializers.ValidationError('Unable to change %s on user managed by LDAP.' % field_name) + raise serializers.ValidationError(_('Unable to change %s on user managed by LDAP.') % field_name) return value def validate_username(self, value): @@ -955,13 +957,13 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer): view = self.context.get('view', None) if not organization and not view.request.user.is_superuser: # Only allow super users to create orgless projects - raise serializers.ValidationError('Organization is missing') + raise serializers.ValidationError(_('Organization is missing')) return super(ProjectSerializer, self).validate(attrs) class ProjectPlaybooksSerializer(ProjectSerializer): - playbooks = serializers.SerializerMethodField(help_text='Array of playbooks available within this project.') + playbooks = serializers.SerializerMethodField(help_text=_('Array of playbooks available within this project.')) class Meta: model = Project @@ -1143,7 +1145,7 @@ class HostSerializer(BaseSerializerWithVariables): if port < 1 or port > 65535: raise ValueError except ValueError: - raise serializers.ValidationError(u'Invalid port specification: %s' % force_text(port)) + raise serializers.ValidationError(_(u'Invalid port specification: %s') % force_text(port)) return name, port def validate_name(self, value): @@ -1171,7 +1173,7 @@ class HostSerializer(BaseSerializerWithVariables): vars_dict['ansible_ssh_port'] = port attrs['variables'] = yaml.dump(vars_dict) except (yaml.YAMLError, TypeError): - raise serializers.ValidationError('Must be valid JSON or YAML.') + raise serializers.ValidationError(_('Must be valid JSON or YAML.')) return super(HostSerializer, self).validate(attrs) @@ -1228,7 +1230,7 @@ class GroupSerializer(BaseSerializerWithVariables): def validate_name(self, value): if value in ('all', '_meta'): - raise serializers.ValidationError('Invalid group name.') + raise serializers.ValidationError(_('Invalid group name.')) return value def to_representation(self, obj): @@ -1302,7 +1304,7 @@ class CustomInventoryScriptSerializer(BaseSerializer): def validate_script(self, value): if not value.startswith("#!"): - raise serializers.ValidationError('Script must begin with a hashbang sequence: i.e.... #!/usr/bin/env python') + raise serializers.ValidationError(_('Script must begin with a hashbang sequence: i.e.... #!/usr/bin/env python')) return value def to_representation(self, obj): @@ -1355,13 +1357,13 @@ class InventorySourceOptionsSerializer(BaseSerializer): source_script = attrs.get('source_script', self.instance and self.instance.source_script or '') if source == 'custom': if source_script is None or source_script == '': - errors['source_script'] = "If 'source' is 'custom', 'source_script' must be provided." + errors['source_script'] = _("If 'source' is 'custom', 'source_script' must be provided.") else: try: if source_script.organization != self.instance.inventory.organization: - errors['source_script'] = "The 'source_script' does not belong to the same organization as the inventory." + errors['source_script'] = _("The 'source_script' does not belong to the same organization as the inventory.") except Exception as exc: - errors['source_script'] = "'source_script' doesn't exist." + errors['source_script'] = _("'source_script' doesn't exist.") logger.error(str(exc)) if errors: @@ -1721,18 +1723,18 @@ class CredentialSerializerCreate(CredentialSerializer): user = serializers.PrimaryKeyRelatedField( queryset=User.objects.all(), required=False, default=None, write_only=True, allow_null=True, - help_text='Write-only field used to add user to owner role. If provided, ' - 'do not give either team or organization. Only valid for creation.') + help_text=_('Write-only field used to add user to owner role. If provided, ' + 'do not give either team or organization. Only valid for creation.')) team = serializers.PrimaryKeyRelatedField( queryset=Team.objects.all(), required=False, default=None, write_only=True, allow_null=True, - help_text='Write-only field used to add team to owner role. If provided, ' - 'do not give either user or organization. Only valid for creation.') + help_text=_('Write-only field used to add team to owner role. If provided, ' + 'do not give either user or organization. Only valid for creation.')) organization = serializers.PrimaryKeyRelatedField( queryset=Organization.objects.all(), required=False, default=None, write_only=True, allow_null=True, - help_text='Write-only field used to add organization to owner role. If provided, ' - 'do not give either team or team. Only valid for creation.') + help_text=_('Write-only field used to add organization to owner role. If provided, ' + 'do not give either team or team. Only valid for creation.')) class Meta: model = Credential @@ -1747,7 +1749,7 @@ class CredentialSerializerCreate(CredentialSerializer): else: attrs.pop(field) if not owner_fields: - raise serializers.ValidationError({"detail": "Missing 'user', 'team', or 'organization'."}) + raise serializers.ValidationError({"detail": _("Missing 'user', 'team', or 'organization'.")}) return super(CredentialSerializerCreate, self).validate(attrs) def create(self, validated_data): @@ -1760,7 +1762,7 @@ class CredentialSerializerCreate(CredentialSerializer): credential.admin_role.members.add(user) if team: if not credential.organization or team.organization.id != credential.organization.id: - raise serializers.ValidationError({"detail": "Credential organization must be set and match before assigning to a team"}) + raise serializers.ValidationError({"detail": _("Credential organization must be set and match before assigning to a team")}) credential.admin_role.parents.add(team.admin_role) credential.use_role.parents.add(team.member_role) return credential @@ -1846,11 +1848,11 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer): playbook = attrs.get('playbook', self.instance and self.instance.playbook or '') job_type = attrs.get('job_type', self.instance and self.instance.job_type or None) if not project and job_type != PERM_INVENTORY_SCAN: - raise serializers.ValidationError({'project': 'This field is required.'}) + raise serializers.ValidationError({'project': _('This field is required.')}) if project and playbook and force_text(playbook) not in project.playbook_files: - raise serializers.ValidationError({'playbook': 'Playbook not found for project.'}) + raise serializers.ValidationError({'playbook': _('Playbook not found for project.')}) if project and not playbook: - raise serializers.ValidationError({'playbook': 'Must select playbook for project.'}) + raise serializers.ValidationError({'playbook': _('Must select playbook for project.')}) return super(JobOptionsSerializer, self).validate(attrs) @@ -1903,12 +1905,12 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer): if job_type == "scan": if inventory is None or attrs.get('ask_inventory_on_launch', False): - raise serializers.ValidationError({'inventory': 'Scan jobs must be assigned a fixed inventory.'}) + raise serializers.ValidationError({'inventory': _('Scan jobs must be assigned a fixed inventory.')}) elif project is None: - raise serializers.ValidationError({'project': "Job types 'run' and 'check' must have assigned a project."}) + raise serializers.ValidationError({'project': _("Job types 'run' and 'check' must have assigned a project.")}) if survey_enabled and job_type == PERM_INVENTORY_SCAN: - raise serializers.ValidationError({'survey_enabled': 'Survey Enabled can not be used with scan jobs.'}) + raise serializers.ValidationError({'survey_enabled': _('Survey Enabled can not be used with scan jobs.')}) return super(JobTemplateSerializer, self).validate(attrs) @@ -1968,7 +1970,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer): try: job_template = JobTemplate.objects.get(pk=data['job_template']) except JobTemplate.DoesNotExist: - raise serializers.ValidationError({'job_template': 'Invalid job template.'}) + raise serializers.ValidationError({'job_template': _('Invalid job template.')}) data.setdefault('name', job_template.name) data.setdefault('description', job_template.description) data.setdefault('job_type', job_template.job_type) @@ -2053,11 +2055,11 @@ class JobRelaunchSerializer(JobSerializer): def validate(self, attrs): obj = self.context.get('obj') if not obj.credential: - raise serializers.ValidationError(dict(credential=["Credential not found or deleted."])) + raise serializers.ValidationError(dict(credential=[_("Credential not found or deleted.")])) if obj.job_type != PERM_INVENTORY_SCAN and obj.project is None: - raise serializers.ValidationError(dict(errors=["Job Template Project is missing or undefined."])) + raise serializers.ValidationError(dict(errors=[_("Job Template Project is missing or undefined.")])) if obj.inventory is None: - raise serializers.ValidationError(dict(errors=["Job Template Inventory is missing or undefined."])) + raise serializers.ValidationError(dict(errors=[_("Job Template Inventory is missing or undefined.")])) attrs = super(JobRelaunchSerializer, self).validate(attrs) return attrs @@ -2315,15 +2317,17 @@ class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer): job_types = [t for t, v in JOB_TYPE_CHOICES] if attrs['char_prompts']['job_type'] not in job_types: raise serializers.ValidationError({ - "job_type": "%s is not a valid job type. The choices are %s." % ( - attrs['char_prompts']['job_type'], job_types)}) + "job_type": _("%(job_type)s is not a valid job type. The choices are %(choices)s.") % { + 'job_type': attrs['char_prompts']['job_type'], 'choices': job_types}}) if self.instance is None and ('workflow_job_template' not in attrs or attrs['workflow_job_template'] is None): - raise serializers.ValidationError({"workflow_job_template": "Workflow job template is missing during creation"}) + raise serializers.ValidationError({ + "workflow_job_template": _("Workflow job template is missing during creation") + }) ujt_obj = attrs.get('unified_job_template', None) if isinstance(ujt_obj, (WorkflowJobTemplate, SystemJobTemplate)): raise serializers.ValidationError({ - "unified_job_template": "Can not nest a %s inside a WorkflowJobTemplate" % ujt_obj.__class__.__name__}) + "unified_job_template": _("Can not nest a %s inside a WorkflowJobTemplate") % ujt_obj.__class__.__name__}) return super(WorkflowJobTemplateNodeSerializer, self).validate(attrs) class WorkflowJobNodeSerializer(WorkflowNodeBaseSerializer): @@ -2531,7 +2535,7 @@ class JobLaunchSerializer(BaseSerializer): for field in obj.resources_needed_to_start: if not (attrs.get(field, False) and obj._ask_for_vars_dict().get(field, False)): - errors[field] = "Job Template '%s' is missing or undefined." % field + errors[field] = _("Job Template '%s' is missing or undefined.") % field if (not obj.ask_credential_on_launch) or (not attrs.get('credential', None)): credential = obj.credential @@ -2557,7 +2561,7 @@ class JobLaunchSerializer(BaseSerializer): extra_vars = yaml.safe_load(extra_vars) assert isinstance(extra_vars, dict) except (yaml.YAMLError, TypeError, AttributeError, AssertionError): - errors['extra_vars'] = 'Must be a valid JSON or YAML dictionary.' + errors['extra_vars'] = _('Must be a valid JSON or YAML dictionary.') if not isinstance(extra_vars, dict): extra_vars = {} @@ -2641,7 +2645,7 @@ class NotificationTemplateSerializer(BaseSerializer): else: notification_type = None if not notification_type: - raise serializers.ValidationError('Missing required fields for Notification Configuration: notification_type') + raise serializers.ValidationError(_('Missing required fields for Notification Configuration: notification_type')) notification_class = NotificationTemplate.CLASS_FOR_NOTIFICATION_TYPE[notification_type] missing_fields = [] @@ -2664,16 +2668,16 @@ class NotificationTemplateSerializer(BaseSerializer): incorrect_type_fields.append((field, field_type)) continue if field_type == "list" and len(field_val) < 1: - error_list.append("No values specified for field '{}'".format(field)) + error_list.append(_("No values specified for field '{}'").format(field)) continue if field_type == "password" and field_val == "$encrypted$" and object_actual is not None: attrs['notification_configuration'][field] = object_actual.notification_configuration[field] if missing_fields: - error_list.append("Missing required fields for Notification Configuration: {}.".format(missing_fields)) + error_list.append(_("Missing required fields for Notification Configuration: {}.").format(missing_fields)) if incorrect_type_fields: for type_field_error in incorrect_type_fields: - error_list.append("Configuration field '{}' incorrect type, expected {}.".format(type_field_error[0], - type_field_error[1])) + error_list.append(_("Configuration field '{}' incorrect type, expected {}.").format(type_field_error[0], + type_field_error[1])) if error_list: raise serializers.ValidationError(error_list) return attrs @@ -2722,7 +2726,7 @@ class ScheduleSerializer(BaseSerializer): def validate_unified_job_template(self, value): if type(value) == InventorySource and value.source not in SCHEDULEABLE_PROVIDERS: - raise serializers.ValidationError('Inventory Source must be a cloud resource.') + raise serializers.ValidationError(_('Inventory Source must be a cloud resource.')) return value # We reject rrules if: @@ -2744,37 +2748,37 @@ class ScheduleSerializer(BaseSerializer): match_multiple_dtstart = re.findall(".*?(DTSTART\:[0-9]+T[0-9]+Z)", rrule_value) match_multiple_rrule = re.findall(".*?(RRULE\:)", rrule_value) if not len(match_multiple_dtstart): - raise serializers.ValidationError('DTSTART required in rrule. Value should match: DTSTART:YYYYMMDDTHHMMSSZ') + raise serializers.ValidationError(_('DTSTART required in rrule. Value should match: DTSTART:YYYYMMDDTHHMMSSZ')) if len(match_multiple_dtstart) > 1: - raise serializers.ValidationError('Multiple DTSTART is not supported.') + raise serializers.ValidationError(_('Multiple DTSTART is not supported.')) if not len(match_multiple_rrule): - raise serializers.ValidationError('RRULE require in rrule.') + raise serializers.ValidationError(_('RRULE require in rrule.')) if len(match_multiple_rrule) > 1: - raise serializers.ValidationError('Multiple RRULE is not supported.') + raise serializers.ValidationError(_('Multiple RRULE is not supported.')) if 'interval' not in rrule_value.lower(): - raise serializers.ValidationError('INTERVAL required in rrule.') + raise serializers.ValidationError(_('INTERVAL required in rrule.')) if 'tzid' in rrule_value.lower(): - raise serializers.ValidationError('TZID is not supported.') + raise serializers.ValidationError(_('TZID is not supported.')) if 'secondly' in rrule_value.lower(): - raise serializers.ValidationError('SECONDLY is not supported.') + raise serializers.ValidationError(_('SECONDLY is not supported.')) if re.match(multi_by_month_day, rrule_value): - raise serializers.ValidationError('Multiple BYMONTHDAYs not supported.') + raise serializers.ValidationError(_('Multiple BYMONTHDAYs not supported.')) if re.match(multi_by_month, rrule_value): - raise serializers.ValidationError('Multiple BYMONTHs not supported.') + raise serializers.ValidationError(_('Multiple BYMONTHs not supported.')) if re.match(by_day_with_numeric_prefix, rrule_value): - raise serializers.ValidationError("BYDAY with numeric prefix not supported.") + raise serializers.ValidationError(_("BYDAY with numeric prefix not supported.")) if 'byyearday' in rrule_value.lower(): - raise serializers.ValidationError("BYYEARDAY not supported.") + raise serializers.ValidationError(_("BYYEARDAY not supported.")) if 'byweekno' in rrule_value.lower(): - raise serializers.ValidationError("BYWEEKNO not supported.") + raise serializers.ValidationError(_("BYWEEKNO not supported.")) if match_count: count_val = match_count.groups()[0].strip().split("=") if int(count_val[1]) > 999: - raise serializers.ValidationError("COUNT > 999 is unsupported.") + raise serializers.ValidationError(_("COUNT > 999 is unsupported.")) try: rrule.rrulestr(rrule_value) except Exception: - raise serializers.ValidationError("rrule parsing failed validation.") + raise serializers.ValidationError(_("rrule parsing failed validation.")) return value class ActivityStreamSerializer(BaseSerializer): @@ -2791,15 +2795,15 @@ class ActivityStreamSerializer(BaseSerializer): ret = super(ActivityStreamSerializer, self).get_fields() for key, field in ret.items(): if key == 'changes': - field.help_text = 'A summary of the new and changed values when an object is created, updated, or deleted' + field.help_text = _('A summary of the new and changed values when an object is created, updated, or deleted') if key == 'object1': - field.help_text = ('For create, update, and delete events this is the object type that was affected. ' - 'For associate and disassociate events this is the object type associated or disassociated with object2.') + field.help_text = _('For create, update, and delete events this is the object type that was affected. ' + 'For associate and disassociate events this is the object type associated or disassociated with object2.') if key == 'object2': - field.help_text = ('Unpopulated for create, update, and delete events. For associate and disassociate ' - 'events this is the object type that object1 is being associated with.') + field.help_text = _('Unpopulated for create, update, and delete events. For associate and disassociate ' + 'events this is the object type that object1 is being associated with.') if key == 'operation': - field.help_text = 'The action taken with respect to the given object(s).' + field.help_text = _('The action taken with respect to the given object(s).') return ret def get_changes(self, obj): @@ -2822,7 +2826,7 @@ class ActivityStreamSerializer(BaseSerializer): rel = {} if obj.actor is not None: rel['actor'] = reverse('api:user_detail', args=(obj.actor.pk,)) - for fk, _ in SUMMARIZABLE_FK_FIELDS.items(): + for fk, __ in SUMMARIZABLE_FK_FIELDS.items(): if not hasattr(obj, fk): continue allm2m = getattr(obj, fk).distinct() @@ -2899,9 +2903,9 @@ class AuthTokenSerializer(serializers.Serializer): attrs['user'] = user return attrs else: - raise serializers.ValidationError('Unable to login with provided credentials.') + raise serializers.ValidationError(_('Unable to login with provided credentials.')) else: - raise serializers.ValidationError('Must include "username" and "password".') + raise serializers.ValidationError(_('Must include "username" and "password".')) class FactVersionSerializer(BaseFactSerializer): diff --git a/awx/api/views.py b/awx/api/views.py index bd538641b4..53df80bff1 100644 --- a/awx/api/views.py +++ b/awx/api/views.py @@ -30,6 +30,7 @@ from django.template.loader import render_to_string from django.core.servers.basehttp import FileWrapper from django.http import HttpResponse from django.contrib.contenttypes.models import ContentType +from django.utils.translation import ugettext_lazy as _ # Django REST Framework @@ -88,14 +89,14 @@ class ApiRootView(APIView): authentication_classes = [] permission_classes = (AllowAny,) - view_name = 'REST API' + view_name = _('REST API') def get(self, request, format=None): ''' list supported API versions ''' current = reverse('api:api_v1_root_view', args=[]) data = dict( - description = 'Ansible Tower REST API', + description = _('Ansible Tower REST API'), current_version = current, available_versions = dict( v1 = current @@ -107,7 +108,7 @@ class ApiV1RootView(APIView): authentication_classes = [] permission_classes = (AllowAny,) - view_name = 'Version 1' + view_name = _('Version 1') def get(self, request, format=None): ''' list top level resources ''' @@ -158,7 +159,7 @@ class ApiV1PingView(APIView): """ permission_classes = (AllowAny,) authentication_classes = () - view_name = 'Ping' + view_name = _('Ping') new_in_210 = True def get(self, request, format=None): @@ -185,7 +186,7 @@ class ApiV1PingView(APIView): class ApiV1ConfigView(APIView): permission_classes = (IsAuthenticated,) - view_name = 'Configuration' + view_name = _('Configuration') def get(self, request, format=None): '''Return various sitewide configuration settings.''' @@ -235,29 +236,29 @@ class ApiV1ConfigView(APIView): if not request.user.is_superuser: return Response(None, status=status.HTTP_404_NOT_FOUND) if not isinstance(request.data, dict): - return Response({"error": "Invalid license data"}, status=status.HTTP_400_BAD_REQUEST) + return Response({"error": _("Invalid license data")}, status=status.HTTP_400_BAD_REQUEST) if "eula_accepted" not in request.data: - return Response({"error": "Missing 'eula_accepted' property"}, status=status.HTTP_400_BAD_REQUEST) + return Response({"error": _("Missing 'eula_accepted' property")}, status=status.HTTP_400_BAD_REQUEST) try: eula_accepted = to_python_boolean(request.data["eula_accepted"]) except ValueError: - return Response({"error": "'eula_accepted' value is invalid"}, status=status.HTTP_400_BAD_REQUEST) + return Response({"error": _("'eula_accepted' value is invalid")}, status=status.HTTP_400_BAD_REQUEST) if not eula_accepted: - return Response({"error": "'eula_accepted' must be True"}, status=status.HTTP_400_BAD_REQUEST) + return Response({"error": _("'eula_accepted' must be True")}, status=status.HTTP_400_BAD_REQUEST) request.data.pop("eula_accepted") try: data_actual = json.dumps(request.data) except Exception: # FIX: Log - return Response({"error": "Invalid JSON"}, status=status.HTTP_400_BAD_REQUEST) + return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST) try: from awx.main.task_engine import TaskEnhancer license_data = json.loads(data_actual) license_data_validated = TaskEnhancer(**license_data).validate_enhancements() except Exception: # FIX: Log - return Response({"error": "Invalid License"}, status=status.HTTP_400_BAD_REQUEST) + return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST) # If the license is valid, write it to the database. if license_data_validated['valid_key']: @@ -265,7 +266,7 @@ class ApiV1ConfigView(APIView): settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host()) return Response(license_data_validated) - return Response({"error": "Invalid license"}, status=status.HTTP_400_BAD_REQUEST) + return Response({"error": _("Invalid license")}, status=status.HTTP_400_BAD_REQUEST) def delete(self, request): if not request.user.is_superuser: @@ -276,12 +277,12 @@ class ApiV1ConfigView(APIView): return Response(status=status.HTTP_204_NO_CONTENT) except: # FIX: Log - return Response({"error": "Failed to remove license (%s)" % has_error}, status=status.HTTP_400_BAD_REQUEST) + return Response({"error": _("Failed to remove license (%s)") % has_error}, status=status.HTTP_400_BAD_REQUEST) class DashboardView(APIView): - view_name = "Dashboard" + view_name = _("Dashboard") new_in_14 = True def get(self, request, format=None): @@ -386,7 +387,7 @@ class DashboardView(APIView): class DashboardJobsGraphView(APIView): - view_name = "Dashboard Jobs Graphs" + view_name = _("Dashboard Jobs Graphs") new_in_200 = True def get(self, request, format=None): @@ -422,7 +423,7 @@ class DashboardJobsGraphView(APIView): end_date = start_date - dateutil.relativedelta.relativedelta(days=1) interval = 'hours' else: - return Response({'error': 'Unknown period "%s"' % str(period)}, status=status.HTTP_400_BAD_REQUEST) + return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST) dashboard_data = {"jobs": {"successful": [], "failed": []}} for element in success_qss.time_series(end_date, start_date, interval=interval): @@ -436,7 +437,7 @@ class DashboardJobsGraphView(APIView): class ScheduleList(ListAPIView): - view_name = "Schedules" + view_name = _("Schedules") model = Schedule serializer_class = ScheduleSerializer new_in_148 = True @@ -453,7 +454,7 @@ class ScheduleUnifiedJobsList(SubListAPIView): serializer_class = UnifiedJobSerializer parent_model = Schedule relationship = 'unifiedjob_set' - view_name = 'Schedule Jobs List' + view_name = _('Schedule Jobs List') new_in_148 = True class AuthView(APIView): @@ -655,8 +656,8 @@ class OrganizationList(OrganizationCountsMixin, ListCreateAPIView): # if no organizations exist in the system. if (not feature_enabled('multiple_organizations') and self.model.objects.exists()): - raise LicenseForbids('Your Tower license only permits a single ' - 'organization to exist.') + raise LicenseForbids(_('Your Tower license only permits a single ' + 'organization to exist.')) # Okay, create the organization as usual. return super(OrganizationList, self).create(request, *args, **kwargs) @@ -766,8 +767,8 @@ class OrganizationActivityStreamList(SubListAPIView): # Sanity check: Does this license allow activity streams? # If not, forbid this request. if not feature_enabled('activity_streams'): - raise LicenseForbids('Your license does not allow use of ' - 'the activity stream.') + raise LicenseForbids(_('Your license does not allow use of ' + 'the activity stream.')) # Okay, let it through. return super(OrganizationActivityStreamList, self).get(request, *args, **kwargs) @@ -860,20 +861,20 @@ class TeamRolesList(SubListCreateAttachDetachAPIView): # Forbid implicit role creation here sub_id = request.data.get('id', None) if not sub_id: - data = dict(msg="Role 'id' field is missing.") + data = dict(msg=_("Role 'id' field is missing.")) return Response(data, status=status.HTTP_400_BAD_REQUEST) role = get_object_or_400(Role, pk=sub_id) org_content_type = ContentType.objects.get_for_model(Organization) if role.content_type == org_content_type: - data = dict(msg="You cannot assign an Organization role as a child role for a Team.") + data = dict(msg=_("You cannot assign an Organization role as a child role for a Team.")) return Response(data, status=status.HTTP_400_BAD_REQUEST) team = get_object_or_404(Team, pk=self.kwargs['pk']) credential_content_type = ContentType.objects.get_for_model(Credential) if role.content_type == credential_content_type: if not role.content_object.organization or role.content_object.organization.id != team.organization.id: - data = dict(msg="You cannot grant credential access to a team when the Organization field isn't set, or belongs to a different organization") + data = dict(msg=_("You cannot grant credential access to a team when the Organization field isn't set, or belongs to a different organization")) return Response(data, status=status.HTTP_400_BAD_REQUEST) return super(TeamRolesList, self).post(request, *args, **kwargs) @@ -919,8 +920,8 @@ class TeamActivityStreamList(SubListAPIView): # Sanity check: Does this license allow activity streams? # If not, forbid this request. if not feature_enabled('activity_streams'): - raise LicenseForbids('Your license does not allow use of ' - 'the activity stream.') + raise LicenseForbids(_('Your license does not allow use of ' + 'the activity stream.')) # Okay, let it through. return super(TeamActivityStreamList, self).get(request, *args, **kwargs) @@ -966,7 +967,7 @@ class ProjectDetail(RetrieveUpdateDestroyAPIView): obj = self.get_object() can_delete = request.user.can_access(Project, 'delete', obj) if not can_delete: - raise PermissionDenied("Cannot delete project.") + raise PermissionDenied(_("Cannot delete project.")) for pu in obj.project_updates.filter(status__in=['new', 'pending', 'waiting', 'running']): pu.cancel() return super(ProjectDetail, self).destroy(request, *args, **kwargs) @@ -992,7 +993,7 @@ class ProjectTeamsList(ListAPIView): class ProjectSchedulesList(SubListCreateAttachDetachAPIView): - view_name = "Project Schedules" + view_name = _("Project Schedules") model = Schedule serializer_class = ScheduleSerializer @@ -1013,8 +1014,8 @@ class ProjectActivityStreamList(SubListAPIView): # Sanity check: Does this license allow activity streams? # If not, forbid this request. if not feature_enabled('activity_streams'): - raise LicenseForbids('Your license does not allow use of ' - 'the activity stream.') + raise LicenseForbids(_('Your license does not allow use of ' + 'the activity stream.')) # Okay, let it through. return super(ProjectActivityStreamList, self).get(request, *args, **kwargs) @@ -1161,7 +1162,7 @@ class UserMeList(ListAPIView): model = User serializer_class = UserSerializer - view_name = 'Me' + view_name = _('Me') def get_queryset(self): return self.model.objects.filter(pk=self.request.user.pk) @@ -1199,26 +1200,26 @@ class UserRolesList(SubListCreateAttachDetachAPIView): # Forbid implicit role creation here sub_id = request.data.get('id', None) if not sub_id: - data = dict(msg="Role 'id' field is missing.") + data = dict(msg=_("Role 'id' field is missing.")) return Response(data, status=status.HTTP_400_BAD_REQUEST) if sub_id == self.request.user.admin_role.pk: - raise PermissionDenied('You may not perform any action with your own admin_role.') + raise PermissionDenied(_('You may not perform any action with your own admin_role.')) user = get_object_or_400(User, pk=self.kwargs['pk']) role = get_object_or_400(Role, pk=sub_id) user_content_type = ContentType.objects.get_for_model(User) if role.content_type == user_content_type: - raise PermissionDenied('You may not change the membership of a users admin_role') + raise PermissionDenied(_('You may not change the membership of a users admin_role')) credential_content_type = ContentType.objects.get_for_model(Credential) if role.content_type == credential_content_type: if role.content_object.organization and user not in role.content_object.organization.member_role: - data = dict(msg="You cannot grant credential access to a user not in the credentials' organization") + data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization")) return Response(data, status=status.HTTP_400_BAD_REQUEST) if not role.content_object.organization and not request.user.is_superuser: - data = dict(msg="You cannot grant private credential access to another user") + data = dict(msg=_("You cannot grant private credential access to another user")) return Response(data, status=status.HTTP_400_BAD_REQUEST) @@ -1281,8 +1282,8 @@ class UserActivityStreamList(SubListAPIView): # Sanity check: Does this license allow activity streams? # If not, forbid this request. if not feature_enabled('activity_streams'): - raise LicenseForbids('Your license does not allow use of ' - 'the activity stream.') + raise LicenseForbids(_('Your license does not allow use of ' + 'the activity stream.')) # Okay, let it through. return super(UserActivityStreamList, self).get(request, *args, **kwargs) @@ -1322,13 +1323,13 @@ class UserDetail(RetrieveUpdateDestroyAPIView): if left is not None and right is not None and left != right: bad_changes[field] = (left, right) if bad_changes: - raise PermissionDenied('Cannot change %s.' % ', '.join(bad_changes.keys())) + raise PermissionDenied(_('Cannot change %s.') % ', '.join(bad_changes.keys())) def destroy(self, request, *args, **kwargs): obj = self.get_object() can_delete = request.user.can_access(User, 'delete', obj) if not can_delete: - raise PermissionDenied('Cannot delete user.') + raise PermissionDenied(_('Cannot delete user.')) return super(UserDetail, self).destroy(request, *args, **kwargs) class UserAccessList(ResourceAccessList): @@ -1440,8 +1441,8 @@ class CredentialActivityStreamList(SubListAPIView): # Sanity check: Does this license allow activity streams? # If not, forbid this request. if not feature_enabled('activity_streams'): - raise LicenseForbids('Your license does not allow use of ' - 'the activity stream.') + raise LicenseForbids(_('Your license does not allow use of ' + 'the activity stream.')) # Okay, let it through. return super(CredentialActivityStreamList, self).get(request, *args, **kwargs) @@ -1478,7 +1479,7 @@ class InventoryScriptDetail(RetrieveUpdateDestroyAPIView): instance = self.get_object() can_delete = request.user.can_access(self.model, 'delete', instance) if not can_delete: - raise PermissionDenied("Cannot delete inventory script.") + raise PermissionDenied(_("Cannot delete inventory script.")) for inv_src in InventorySource.objects.filter(source_script=instance): inv_src.source_script = None inv_src.save() @@ -1529,8 +1530,8 @@ class InventoryActivityStreamList(SubListAPIView): # Sanity check: Does this license allow activity streams? # If not, forbid this request. if not feature_enabled('activity_streams'): - raise LicenseForbids('Your license does not allow use of ' - 'the activity stream.') + raise LicenseForbids(_('Your license does not allow use of ' + 'the activity stream.')) # Okay, let it through. return super(InventoryActivityStreamList, self).get(request, *args, **kwargs) @@ -1662,8 +1663,8 @@ class HostActivityStreamList(SubListAPIView): # Sanity check: Does this license allow activity streams? # If not, forbid this request. if not feature_enabled('activity_streams'): - raise LicenseForbids('Your license does not allow use of ' - 'the activity stream.') + raise LicenseForbids(_('Your license does not allow use of ' + 'the activity stream.')) # Okay, let it through. return super(HostActivityStreamList, self).get(request, *args, **kwargs) @@ -1680,8 +1681,8 @@ class SystemTrackingEnforcementMixin(APIView): ''' def check_permissions(self, request): if not feature_enabled("system_tracking"): - raise LicenseForbids("Your license does not permit use " - "of system tracking.") + raise LicenseForbids(_("Your license does not permit use " + "of system tracking.")) return super(SystemTrackingEnforcementMixin, self).check_permissions(request) class HostFactVersionsList(ListAPIView, ParentMixin, SystemTrackingEnforcementMixin): @@ -1725,7 +1726,7 @@ class HostFactCompareView(SubDetailAPIView, SystemTrackingEnforcementMixin): fact_entry = Fact.get_host_fact(host_obj.id, module_spec, datetime_actual) if not fact_entry: - return Response({'detail': 'Fact not found.'}, status=status.HTTP_404_NOT_FOUND) + return Response({'detail': _('Fact not found.')}, status=status.HTTP_404_NOT_FOUND) return Response(self.serializer_class(instance=fact_entry).data) class GroupList(ListCreateAPIView): @@ -1847,8 +1848,8 @@ class GroupActivityStreamList(SubListAPIView): # Sanity check: Does this license allow activity streams? # If not, forbid this request. if not feature_enabled('activity_streams'): - raise LicenseForbids('Your license does not allow use of ' - 'the activity stream.') + raise LicenseForbids(_('Your license does not allow use of ' + 'the activity stream.')) # Okay, let it through. return super(GroupActivityStreamList, self).get(request, *args, **kwargs) @@ -2037,7 +2038,7 @@ class InventoryInventorySourcesList(SubListAPIView): serializer_class = InventorySourceSerializer parent_model = Inventory relationship = None # Not defined since using get_queryset(). - view_name = 'Inventory Source List' + view_name = _('Inventory Source List') new_in_14 = True def get_queryset(self): @@ -2063,14 +2064,14 @@ class InventorySourceDetail(RetrieveUpdateAPIView): obj = self.get_object() can_delete = request.user.can_access(InventorySource, 'delete', obj) if not can_delete: - raise PermissionDenied("Cannot delete inventory source.") + raise PermissionDenied(_("Cannot delete inventory source.")) for pu in obj.inventory_updates.filter(status__in=['new', 'pending', 'waiting', 'running']): pu.cancel() return super(InventorySourceDetail, self).destroy(request, *args, **kwargs) class InventorySourceSchedulesList(SubListCreateAttachDetachAPIView): - view_name = "Inventory Source Schedules" + view_name = _("Inventory Source Schedules") model = Schedule serializer_class = ScheduleSerializer @@ -2091,8 +2092,8 @@ class InventorySourceActivityStreamList(SubListAPIView): # Sanity check: Does this license allow activity streams? # If not, forbid this request. if not feature_enabled('activity_streams'): - raise LicenseForbids('Your license does not allow use of ' - 'the activity stream.') + raise LicenseForbids(_('Your license does not allow use of ' + 'the activity stream.')) # Okay, let it through. return super(InventorySourceActivityStreamList, self).get(request, *args, **kwargs) @@ -2107,7 +2108,7 @@ class InventorySourceNotificationTemplatesAnyList(SubListCreateAttachDetachAPIVi def post(self, request, *args, **kwargs): parent = self.get_parent_object() if parent.source not in CLOUD_INVENTORY_SOURCES: - return Response(dict(msg="Notification Templates can only be assigned when source is one of {}." + return Response(dict(msg=_("Notification Templates can only be assigned when source is one of {}.") .format(CLOUD_INVENTORY_SOURCES, parent.source)), status=status.HTTP_400_BAD_REQUEST) return super(InventorySourceNotificationTemplatesAnyList, self).post(request, *args, **kwargs) @@ -2297,7 +2298,7 @@ class JobTemplateLaunch(RetrieveAPIView, GenericAPIView): class JobTemplateSchedulesList(SubListCreateAttachDetachAPIView): - view_name = "Job Template Schedules" + view_name = _("Job Template Schedules") model = Schedule serializer_class = ScheduleSerializer @@ -2315,8 +2316,8 @@ class JobTemplateSurveySpec(GenericAPIView): def get(self, request, *args, **kwargs): obj = self.get_object() if not feature_enabled('surveys'): - raise LicenseForbids('Your license does not allow ' - 'adding surveys.') + raise LicenseForbids(_('Your license does not allow ' + 'adding surveys.')) return Response(obj.survey_spec) def post(self, request, *args, **kwargs): @@ -2325,42 +2326,43 @@ class JobTemplateSurveySpec(GenericAPIView): # Sanity check: Are surveys available on this license? # If not, do not allow them to be used. if not feature_enabled('surveys'): - raise LicenseForbids('Your license does not allow ' - 'adding surveys.') + raise LicenseForbids(_('Your license does not allow ' + 'adding surveys.')) if not request.user.can_access(self.model, 'change', obj, None): raise PermissionDenied() try: obj.survey_spec = json.dumps(request.data) except ValueError: - return Response(dict(error="Invalid JSON when parsing survey spec."), status=status.HTTP_400_BAD_REQUEST) + return Response(dict(error=_("Invalid JSON when parsing survey spec.")), status=status.HTTP_400_BAD_REQUEST) if "name" not in obj.survey_spec: - return Response(dict(error="'name' missing from survey spec."), status=status.HTTP_400_BAD_REQUEST) + return Response(dict(error=_("'name' missing from survey spec.")), status=status.HTTP_400_BAD_REQUEST) if "description" not in obj.survey_spec: - return Response(dict(error="'description' missing from survey spec."), status=status.HTTP_400_BAD_REQUEST) + return Response(dict(error=_("'description' missing from survey spec.")), status=status.HTTP_400_BAD_REQUEST) if "spec" not in obj.survey_spec: - return Response(dict(error="'spec' missing from survey spec."), status=status.HTTP_400_BAD_REQUEST) + return Response(dict(error=_("'spec' missing from survey spec.")), status=status.HTTP_400_BAD_REQUEST) if not isinstance(obj.survey_spec["spec"], list): - return Response(dict(error="'spec' must be a list of items."), status=status.HTTP_400_BAD_REQUEST) + return Response(dict(error=_("'spec' must be a list of items.")), status=status.HTTP_400_BAD_REQUEST) if len(obj.survey_spec["spec"]) < 1: - return Response(dict(error="'spec' doesn't contain any items."), status=status.HTTP_400_BAD_REQUEST) + return Response(dict(error=_("'spec' doesn't contain any items.")), status=status.HTTP_400_BAD_REQUEST) idx = 0 variable_set = set() for survey_item in obj.survey_spec["spec"]: if not isinstance(survey_item, dict): - return Response(dict(error="Survey question %s is not a json object." % str(idx)), status=status.HTTP_400_BAD_REQUEST) + return Response(dict(error=_("Survey question %s is not a json object.") % str(idx)), status=status.HTTP_400_BAD_REQUEST) if "type" not in survey_item: - return Response(dict(error="'type' missing from survey question %s." % str(idx)), status=status.HTTP_400_BAD_REQUEST) + return Response(dict(error=_("'type' missing from survey question %s.") % str(idx)), status=status.HTTP_400_BAD_REQUEST) if "question_name" not in survey_item: - return Response(dict(error="'question_name' missing from survey question %s." % str(idx)), status=status.HTTP_400_BAD_REQUEST) + return Response(dict(error=_("'question_name' missing from survey question %s.") % str(idx)), status=status.HTTP_400_BAD_REQUEST) if "variable" not in survey_item: - return Response(dict(error="'variable' missing from survey question %s." % str(idx)), status=status.HTTP_400_BAD_REQUEST) + return Response(dict(error=_("'variable' missing from survey question %s.") % str(idx)), status=status.HTTP_400_BAD_REQUEST) if survey_item['variable'] in variable_set: - return Response(dict(error="'variable' '%s' duplicated in survey question %s." % (survey_item['variable'], str(idx))), status=status.HTTP_400_BAD_REQUEST) + return Response(dict(error=_("'variable' '%(item)s' duplicated in survey question %(survey)s.") % { + 'item': survey_item['variable'], 'survey': str(idx)}), status=status.HTTP_400_BAD_REQUEST) else: variable_set.add(survey_item['variable']) if "required" not in survey_item: - return Response(dict(error="'required' missing from survey question %s." % str(idx)), status=status.HTTP_400_BAD_REQUEST) + return Response(dict(error=_("'required' missing from survey question %s.") % str(idx)), status=status.HTTP_400_BAD_REQUEST) idx += 1 obj.save() return Response() @@ -2385,8 +2387,8 @@ class JobTemplateActivityStreamList(SubListAPIView): # Sanity check: Does this license allow activity streams? # If not, forbid this request. if not feature_enabled('activity_streams'): - raise LicenseForbids('Your license does not allow use of ' - 'the activity stream.') + raise LicenseForbids(_('Your license does not allow use of ' + 'the activity stream.')) # Okay, let it through. return super(JobTemplateActivityStreamList, self).get(request, *args, **kwargs) @@ -2554,22 +2556,22 @@ class JobTemplateCallback(GenericAPIView): matching_hosts = self.find_matching_hosts() # Check matching hosts. if not matching_hosts: - data = dict(msg='No matching host could be found!') + data = dict(msg=_('No matching host could be found!')) return Response(data, status=status.HTTP_400_BAD_REQUEST) elif len(matching_hosts) > 1: - data = dict(msg='Multiple hosts matched the request!') + data = dict(msg=_('Multiple hosts matched the request!')) return Response(data, status=status.HTTP_400_BAD_REQUEST) else: host = list(matching_hosts)[0] if not job_template.can_start_without_user_input(): - data = dict(msg='Cannot start automatically, user input required!') + data = dict(msg=_('Cannot start automatically, user input required!')) return Response(data, status=status.HTTP_400_BAD_REQUEST) limit = host.name # NOTE: We limit this to one job waiting per host per callblack to keep them from stacking crazily if Job.objects.filter(status__in=['pending', 'waiting', 'running'], job_template=job_template, limit=limit).count() > 0: - data = dict(msg='Host callback job already pending.') + data = dict(msg=_('Host callback job already pending.')) return Response(data, status=status.HTTP_400_BAD_REQUEST) # Everything is fine; actually create the job. @@ -2582,7 +2584,7 @@ class JobTemplateCallback(GenericAPIView): kv['extra_vars'] = extra_vars result = job.signal_start(**kv) if not result: - data = dict(msg='Error starting job!') + data = dict(msg=_('Error starting job!')) return Response(data, status=status.HTTP_400_BAD_REQUEST) # Return the location of the new job. @@ -2856,7 +2858,7 @@ class SystemJobTemplateList(ListAPIView): def get(self, request, *args, **kwargs): if not request.user.is_superuser and not request.user.is_system_auditor: - raise PermissionDenied("Superuser privileges needed.") + raise PermissionDenied(_("Superuser privileges needed.")) return super(SystemJobTemplateList, self).get(request, *args, **kwargs) class SystemJobTemplateDetail(RetrieveAPIView): @@ -2883,7 +2885,7 @@ class SystemJobTemplateLaunch(GenericAPIView): class SystemJobTemplateSchedulesList(SubListCreateAttachDetachAPIView): - view_name = "System Job Template Schedules" + view_name = _("System Job Template Schedules") model = Schedule serializer_class = ScheduleSerializer @@ -2966,8 +2968,8 @@ class JobActivityStreamList(SubListAPIView): # Sanity check: Does this license allow activity streams? # If not, forbid this request. if not feature_enabled('activity_streams'): - raise LicenseForbids('Your license does not allow use of ' - 'the activity stream.') + raise LicenseForbids(_('Your license does not allow use of ' + 'the activity stream.')) # Okay, let it through. return super(JobActivityStreamList, self).get(request, *args, **kwargs) @@ -3060,7 +3062,7 @@ class BaseJobHostSummariesList(SubListAPIView): serializer_class = JobHostSummarySerializer parent_model = None # Subclasses must define this attribute. relationship = 'job_host_summaries' - view_name = 'Job Host Summaries List' + view_name = _('Job Host Summaries List') class HostJobHostSummariesList(BaseJobHostSummariesList): @@ -3095,7 +3097,7 @@ class JobEventChildrenList(SubListAPIView): serializer_class = JobEventSerializer parent_model = JobEvent relationship = 'children' - view_name = 'Job Event Children List' + view_name = _('Job Event Children List') class JobEventHostsList(SubListAPIView): @@ -3103,7 +3105,7 @@ class JobEventHostsList(SubListAPIView): serializer_class = HostSerializer parent_model = JobEvent relationship = 'hosts' - view_name = 'Job Event Hosts List' + view_name = _('Job Event Hosts List') class BaseJobEventsList(SubListAPIView): @@ -3111,7 +3113,7 @@ class BaseJobEventsList(SubListAPIView): serializer_class = JobEventSerializer parent_model = None # Subclasses must define this attribute. relationship = 'job_events' - view_name = 'Job Events List' + view_name = _('Job Events List') class HostJobEventsList(BaseJobEventsList): @@ -3128,7 +3130,7 @@ class JobJobEventsList(BaseJobEventsList): class JobJobPlaysList(BaseJobEventsList): parent_model = Job - view_name = 'Job Plays List' + view_name = _('Job Plays List') new_in_200 = True @paginated @@ -3203,7 +3205,7 @@ class JobJobTasksList(BaseJobEventsList): and their completion status. """ parent_model = Job - view_name = 'Job Play Tasks List' + view_name = _('Job Play Tasks List') new_in_200 = True @paginated @@ -3218,15 +3220,15 @@ class JobJobTasksList(BaseJobEventsList): # If there's no event ID specified, this will return a 404. job = Job.objects.filter(pk=self.kwargs['pk']) if not job.exists(): - return ({'detail': 'Job not found.'}, -1, status.HTTP_404_NOT_FOUND) + return ({'detail': _('Job not found.')}, -1, status.HTTP_404_NOT_FOUND) job = job[0] if 'event_id' not in request.query_params: - return ({"detail": "'event_id' not provided."}, -1, status.HTTP_400_BAD_REQUEST) + return ({"detail": _("'event_id' not provided.")}, -1, status.HTTP_400_BAD_REQUEST) parent_task = job.job_events.filter(pk=int(request.query_params.get('event_id', -1))) if not parent_task.exists(): - return ({'detail': 'Parent event not found.'}, -1, status.HTTP_404_NOT_FOUND) + return ({'detail': _('Parent event not found.')}, -1, status.HTTP_404_NOT_FOUND) parent_task = parent_task[0] STARTING_EVENTS = ('playbook_on_task_start', 'playbook_on_setup') @@ -3498,7 +3500,7 @@ class BaseAdHocCommandEventsList(SubListAPIView): serializer_class = AdHocCommandEventSerializer parent_model = None # Subclasses must define this attribute. relationship = 'ad_hoc_command_events' - view_name = 'Ad Hoc Command Events List' + view_name = _('Ad Hoc Command Events List') new_in_220 = True @@ -3529,8 +3531,8 @@ class AdHocCommandActivityStreamList(SubListAPIView): # Sanity check: Does this license allow activity streams? # If not, forbid this request. if not feature_enabled('activity_streams'): - raise LicenseForbids('Your license does not allow use of ' - 'the activity stream.') + raise LicenseForbids(_('Your license does not allow use of ' + 'the activity stream.')) # Okay, let it through. return super(AdHocCommandActivityStreamList, self).get(request, *args, **kwargs) @@ -3551,7 +3553,7 @@ class SystemJobList(ListCreateAPIView): def get(self, request, *args, **kwargs): if not request.user.is_superuser and not request.user.is_system_auditor: - raise PermissionDenied("Superuser privileges needed.") + raise PermissionDenied(_("Superuser privileges needed.")) return super(SystemJobList, self).get(request, *args, **kwargs) @@ -3607,8 +3609,9 @@ class UnifiedJobStdout(RetrieveAPIView): unified_job = self.get_object() obj_size = unified_job.result_stdout_size if request.accepted_renderer.format != 'txt_download' and obj_size > settings.STDOUT_MAX_BYTES_DISPLAY: - response_message = "Standard Output too large to display (%d bytes), only download supported for sizes over %d bytes" % (obj_size, - settings.STDOUT_MAX_BYTES_DISPLAY) + response_message = _("Standard Output too large to display (%(text_size)d bytes), " + "only download supported for sizes over %(supported_size)d bytes") % { + 'text_size': obj_size, 'supported_size': settings.STDOUT_MAX_BYTES_DISPLAY} if request.accepted_renderer.format == 'json': return Response({'range': {'start': 0, 'end': 1, 'absolute_end': 1}, 'content': response_message}) else: @@ -3655,7 +3658,7 @@ class UnifiedJobStdout(RetrieveAPIView): response["Content-Disposition"] = 'attachment; filename="job_%s.txt"' % str(unified_job.id) return response except Exception as e: - return Response({"error": "Error generating stdout download file: %s" % str(e)}, status=status.HTTP_400_BAD_REQUEST) + return Response({"error": _("Error generating stdout download file: %s") % str(e)}, status=status.HTTP_400_BAD_REQUEST) elif request.accepted_renderer.format == 'txt': return Response(unified_job.result_stdout) else: @@ -3695,13 +3698,13 @@ class NotificationTemplateDetail(RetrieveUpdateDestroyAPIView): if not request.user.can_access(self.model, 'delete', obj): return Response(status=status.HTTP_404_NOT_FOUND) if obj.notifications.filter(status='pending').exists(): - return Response({"error": "Delete not allowed while there are pending notifications"}, + return Response({"error": _("Delete not allowed while there are pending notifications")}, status=status.HTTP_405_METHOD_NOT_ALLOWED) return super(NotificationTemplateDetail, self).delete(request, *args, **kwargs) class NotificationTemplateTest(GenericAPIView): - view_name = 'NotificationTemplate Test' + view_name = _('NotificationTemplate Test') model = NotificationTemplate serializer_class = EmptySerializer new_in_300 = True @@ -3762,8 +3765,8 @@ class ActivityStreamList(SimpleListAPIView): # Sanity check: Does this license allow activity streams? # If not, forbid this request. if not feature_enabled('activity_streams'): - raise LicenseForbids('Your license does not allow use of ' - 'the activity stream.') + raise LicenseForbids(_('Your license does not allow use of ' + 'the activity stream.')) # Okay, let it through. return super(ActivityStreamList, self).get(request, *args, **kwargs) @@ -3779,8 +3782,8 @@ class ActivityStreamDetail(RetrieveAPIView): # Sanity check: Does this license allow activity streams? # If not, forbid this request. if not feature_enabled('activity_streams'): - raise LicenseForbids('Your license does not allow use of ' - 'the activity stream.') + raise LicenseForbids(_('Your license does not allow use of ' + 'the activity stream.')) # Okay, let it through. return super(ActivityStreamDetail, self).get(request, *args, **kwargs) @@ -3830,26 +3833,26 @@ class RoleUsersList(SubListCreateAttachDetachAPIView): # Forbid implicit user creation here sub_id = request.data.get('id', None) if not sub_id: - data = dict(msg="User 'id' field is missing.") + data = dict(msg=_("User 'id' field is missing.")) return Response(data, status=status.HTTP_400_BAD_REQUEST) user = get_object_or_400(User, pk=sub_id) role = self.get_parent_object() if role == self.request.user.admin_role: - raise PermissionDenied('You may not perform any action with your own admin_role.') + raise PermissionDenied(_('You may not perform any action with your own admin_role.')) user_content_type = ContentType.objects.get_for_model(User) if role.content_type == user_content_type: - raise PermissionDenied('You may not change the membership of a users admin_role') + raise PermissionDenied(_('You may not change the membership of a users admin_role')) credential_content_type = ContentType.objects.get_for_model(Credential) if role.content_type == credential_content_type: if role.content_object.organization and user not in role.content_object.organization.member_role: - data = dict(msg="You cannot grant credential access to a user not in the credentials' organization") + data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization")) return Response(data, status=status.HTTP_400_BAD_REQUEST) if not role.content_object.organization and not request.user.is_superuser: - data = dict(msg="You cannot grant private credential access to another user") + data = dict(msg=_("You cannot grant private credential access to another user")) return Response(data, status=status.HTTP_400_BAD_REQUEST) return super(RoleUsersList, self).post(request, *args, **kwargs) @@ -3873,7 +3876,7 @@ class RoleTeamsList(SubListAPIView): # Forbid implicit team creation here sub_id = request.data.get('id', None) if not sub_id: - data = dict(msg="Team 'id' field is missing.") + data = dict(msg=_("Team 'id' field is missing.")) return Response(data, status=status.HTTP_400_BAD_REQUEST) team = get_object_or_400(Team, pk=sub_id) @@ -3881,13 +3884,13 @@ class RoleTeamsList(SubListAPIView): organization_content_type = ContentType.objects.get_for_model(Organization) if role.content_type == organization_content_type: - data = dict(msg="You cannot assign an Organization role as a child role for a Team.") + data = dict(msg=_("You cannot assign an Organization role as a child role for a Team.")) return Response(data, status=status.HTTP_400_BAD_REQUEST) credential_content_type = ContentType.objects.get_for_model(Credential) if role.content_type == credential_content_type: if not role.content_object.organization or role.content_object.organization.id != team.organization.id: - data = dict(msg="You cannot grant credential access to a team when the Organization field isn't set, or belongs to a different organization") + data = dict(msg=_("You cannot grant credential access to a team when the Organization field isn't set, or belongs to a different organization")) return Response(data, status=status.HTTP_400_BAD_REQUEST) action = 'attach' diff --git a/awx/conf/management/commands/migrate_to_database_settings.py b/awx/conf/management/commands/migrate_to_database_settings.py index f708ae4a1d..c936e223c2 100644 --- a/awx/conf/management/commands/migrate_to_database_settings.py +++ b/awx/conf/management/commands/migrate_to_database_settings.py @@ -14,6 +14,7 @@ from django.core.management.base import BaseCommand, CommandError from django.db import transaction from django.utils.text import slugify from django.utils.timezone import now +from django.utils.translation import ugettext_lazy as _ # Tower from awx import MODE @@ -36,27 +37,27 @@ class Command(BaseCommand): action='store_true', dest='dry_run', default=False, - help='Only show which settings would be commented/migrated.', + help=_('Only show which settings would be commented/migrated.'), ) parser.add_argument( '--skip-errors', action='store_true', dest='skip_errors', default=False, - help='Skip over settings that would raise an error when commenting/migrating.', + help=_('Skip over settings that would raise an error when commenting/migrating.'), ) parser.add_argument( '--no-comment', action='store_true', dest='no_comment', default=False, - help='Skip commenting out settings in files.', + help=_('Skip commenting out settings in files.'), ) parser.add_argument( '--backup-suffix', dest='backup_suffix', default=now().strftime('.%Y%m%d%H%M%S'), - help='Backup existing settings files with this suffix.', + help=_('Backup existing settings files with this suffix.'), ) @transaction.atomic diff --git a/awx/main/access.py b/awx/main/access.py index 39fba096ca..0fea7b8852 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -11,6 +11,7 @@ from django.conf import settings from django.db.models import Q from django.contrib.auth.models import User from django.contrib.contenttypes.models import ContentType +from django.utils.translation import ugettext_lazy as _ # Django REST Framework from rest_framework.exceptions import ParseError, PermissionDenied, ValidationError @@ -199,24 +200,24 @@ class BaseAccess(object): validation_info['grace_period_remaining'] = 99999999 if check_expiration and validation_info.get('time_remaining', None) is None: - raise PermissionDenied("License is missing.") + raise PermissionDenied(_("License is missing.")) if check_expiration and validation_info.get("grace_period_remaining") <= 0: - raise PermissionDenied("License has expired.") + raise PermissionDenied(_("License has expired.")) free_instances = validation_info.get('free_instances', 0) available_instances = validation_info.get('available_instances', 0) if add_host and free_instances == 0: - raise PermissionDenied("License count of %s instances has been reached." % available_instances) + raise PermissionDenied(_("License count of %s instances has been reached.") % available_instances) elif add_host and free_instances < 0: - raise PermissionDenied("License count of %s instances has been exceeded." % available_instances) + raise PermissionDenied(_("License count of %s instances has been exceeded.") % available_instances) elif not add_host and free_instances < 0: - raise PermissionDenied("Host count exceeds available instances.") + raise PermissionDenied(_("Host count exceeds available instances.")) if feature is not None: if "features" in validation_info and not validation_info["features"].get(feature, False): - raise LicenseForbids("Feature %s is not enabled in the active license." % feature) + raise LicenseForbids(_("Feature %s is not enabled in the active license.") % feature) elif "features" not in validation_info: - raise LicenseForbids("Features not found in active license.") + raise LicenseForbids(_("Features not found in active license.")) def get_user_capabilities(self, obj, method_list=[], parent_obj=None): if obj is None: @@ -416,7 +417,7 @@ class OrganizationAccess(BaseAccess): active_jobs.extend([dict(type="inventory_update", id=o.id) for o in InventoryUpdate.objects.filter(inventory_source__inventory__organization=obj, status__in=ACTIVE_STATES)]) if len(active_jobs) > 0: - raise StateConflict({"conflict": "Resource is being used by running jobs", + raise StateConflict({"conflict": _("Resource is being used by running jobs"), "active_jobs": active_jobs}) return True @@ -490,7 +491,7 @@ class InventoryAccess(BaseAccess): active_jobs.extend([dict(type="inventory_update", id=o.id) for o in InventoryUpdate.objects.filter(inventory_source__inventory=obj, status__in=ACTIVE_STATES)]) if len(active_jobs) > 0: - raise StateConflict({"conflict": "Resource is being used by running jobs", + raise StateConflict({"conflict": _("Resource is being used by running jobs"), "active_jobs": active_jobs}) return True @@ -535,7 +536,7 @@ class HostAccess(BaseAccess): # Prevent moving a host to a different inventory. inventory_pk = get_pk_from_dict(data, 'inventory') if obj and inventory_pk and obj.inventory.pk != inventory_pk: - raise PermissionDenied('Unable to change inventory on a host.') + raise PermissionDenied(_('Unable to change inventory on a host.')) # Checks for admin or change permission on inventory, controls whether # the user can edit variable data. return obj and self.user in obj.inventory.admin_role @@ -547,7 +548,7 @@ class HostAccess(BaseAccess): return False # Prevent assignments between different inventories. if obj.inventory != sub_obj.inventory: - raise ParseError('Cannot associate two items from different inventories.') + raise ParseError(_('Cannot associate two items from different inventories.')) return True def can_delete(self, obj): @@ -581,7 +582,7 @@ class GroupAccess(BaseAccess): # Prevent moving a group to a different inventory. inventory_pk = get_pk_from_dict(data, 'inventory') if obj and inventory_pk and obj.inventory.pk != inventory_pk: - raise PermissionDenied('Unable to change inventory on a group.') + raise PermissionDenied(_('Unable to change inventory on a group.')) # Checks for admin or change permission on inventory, controls whether # the user can attach subgroups or edit variable data. return obj and self.user in obj.inventory.admin_role @@ -593,7 +594,7 @@ class GroupAccess(BaseAccess): return False # Prevent assignments between different inventories. if obj.inventory != sub_obj.inventory: - raise ParseError('Cannot associate two items from different inventories.') + raise ParseError(_('Cannot associate two items from different inventories.')) # Prevent group from being assigned as its own (grand)child. if type(obj) == type(sub_obj): parent_pks = set(obj.all_parents.values_list('pk', flat=True)) @@ -612,7 +613,7 @@ class GroupAccess(BaseAccess): active_jobs.extend([dict(type="inventory_update", id=o.id) for o in InventoryUpdate.objects.filter(inventory_source__in=obj.inventory_sources.all(), status__in=ACTIVE_STATES)]) if len(active_jobs) > 0: - raise StateConflict({"conflict": "Resource is being used by running jobs", + raise StateConflict({"conflict": _("Resource is being used by running jobs"), "active_jobs": active_jobs}) return True @@ -804,7 +805,7 @@ class TeamAccess(BaseAccess): # Prevent moving a team to a different organization. org_pk = get_pk_from_dict(data, 'organization') if obj and org_pk and obj.organization.pk != org_pk: - raise PermissionDenied('Unable to change organization on a team.') + raise PermissionDenied(_('Unable to change organization on a team.')) if self.user.is_superuser: return True return self.user in obj.admin_role @@ -817,9 +818,9 @@ class TeamAccess(BaseAccess): of a resource role to the team.""" if isinstance(sub_obj, Role): if sub_obj.content_object is None: - raise PermissionDenied("The {} role cannot be assigned to a team".format(sub_obj.name)) + raise PermissionDenied(_("The {} role cannot be assigned to a team").format(sub_obj.name)) elif isinstance(sub_obj.content_object, User): - raise PermissionDenied("The admin_role for a User cannot be assigned to a team") + raise PermissionDenied(_("The admin_role for a User cannot be assigned to a team")) if isinstance(sub_obj.content_object, ResourceMixin): role_access = RoleAccess(self.user) @@ -888,7 +889,7 @@ class ProjectAccess(BaseAccess): active_jobs.extend([dict(type="project_update", id=o.id) for o in ProjectUpdate.objects.filter(project=obj, status__in=ACTIVE_STATES)]) if len(active_jobs) > 0: - raise StateConflict({"conflict": "Resource is being used by running jobs", + raise StateConflict({"conflict": _("Resource is being used by running jobs"), "active_jobs": active_jobs}) return True @@ -1130,7 +1131,7 @@ class JobTemplateAccess(BaseAccess): active_jobs = [dict(type="job", id=o.id) for o in obj.jobs.filter(status__in=ACTIVE_STATES)] if len(active_jobs) > 0: - raise StateConflict({"conflict": "Resource is being used by running jobs", + raise StateConflict({"conflict": _("Resource is being used by running jobs"), "active_jobs": active_jobs}) return True @@ -1542,7 +1543,7 @@ class WorkflowJobTemplateAccess(BaseAccess): active_jobs = [dict(type="job", id=o.id) for o in obj.jobs.filter(status__in=ACTIVE_STATES)] if len(active_jobs) > 0: - raise StateConflict({"conflict": "Resource is being used by running jobs", + raise StateConflict({"conflict": _("Resource is being used by running jobs"), "active_jobs": active_jobs}) return True diff --git a/awx/main/models/ad_hoc_commands.py b/awx/main/models/ad_hoc_commands.py index aadd34c190..9cb1d96f32 100644 --- a/awx/main/models/ad_hoc_commands.py +++ b/awx/main/models/ad_hoc_commands.py @@ -95,14 +95,14 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin): def clean_inventory(self): inv = self.inventory if not inv: - raise ValidationError('No valid inventory.') + raise ValidationError(_('No valid inventory.')) return inv def clean_credential(self): cred = self.credential if cred and cred.kind != 'ssh': raise ValidationError( - 'You must provide a machine / SSH credential.', + _('You must provide a machine / SSH credential.'), ) return cred @@ -113,18 +113,18 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin): def clean_module_name(self): if type(self.module_name) not in (str, unicode): - raise ValidationError("Invalid type for ad hoc command") + raise ValidationError(_("Invalid type for ad hoc command")) module_name = self.module_name.strip() or 'command' if module_name not in settings.AD_HOC_COMMANDS: - raise ValidationError('Unsupported module for ad hoc commands.') + raise ValidationError(_('Unsupported module for ad hoc commands.')) return module_name def clean_module_args(self): if type(self.module_args) not in (str, unicode): - raise ValidationError("Invalid type for ad hoc command") + raise ValidationError(_("Invalid type for ad hoc command")) module_args = self.module_args if self.module_name in ('command', 'shell') and not module_args: - raise ValidationError('No argument passed to %s module.' % self.module_name) + raise ValidationError(_('No argument passed to %s module.') % self.module_name) return module_args @property diff --git a/awx/main/models/credential.py b/awx/main/models/credential.py index d7bc6a640e..aa0bf3243c 100644 --- a/awx/main/models/credential.py +++ b/awx/main/models/credential.py @@ -278,9 +278,9 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin): """ host = self.host or '' if not host and self.kind == 'vmware': - raise ValidationError('Host required for VMware credential.') + raise ValidationError(_('Host required for VMware credential.')) if not host and self.kind == 'openstack': - raise ValidationError('Host required for OpenStack credential.') + raise ValidationError(_('Host required for OpenStack credential.')) return host def clean_domain(self): @@ -289,32 +289,32 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin): def clean_username(self): username = self.username or '' if not username and self.kind == 'aws': - raise ValidationError('Access key required for AWS credential.') + raise ValidationError(_('Access key required for AWS credential.')) if not username and self.kind == 'rax': - raise ValidationError('Username required for Rackspace ' - 'credential.') + raise ValidationError(_('Username required for Rackspace ' + 'credential.')) if not username and self.kind == 'vmware': - raise ValidationError('Username required for VMware credential.') + raise ValidationError(_('Username required for VMware credential.')) if not username and self.kind == 'openstack': - raise ValidationError('Username required for OpenStack credential.') + raise ValidationError(_('Username required for OpenStack credential.')) return username def clean_password(self): password = self.password or '' if not password and self.kind == 'aws': - raise ValidationError('Secret key required for AWS credential.') + raise ValidationError(_('Secret key required for AWS credential.')) if not password and self.kind == 'rax': - raise ValidationError('API key required for Rackspace credential.') + raise ValidationError(_('API key required for Rackspace credential.')) if not password and self.kind == 'vmware': - raise ValidationError('Password required for VMware credential.') + raise ValidationError(_('Password required for VMware credential.')) if not password and self.kind == 'openstack': - raise ValidationError('Password or API key required for OpenStack credential.') + raise ValidationError(_('Password or API key required for OpenStack credential.')) return password def clean_project(self): project = self.project or '' if self.kind == 'openstack' and not project: - raise ValidationError('Project name required for OpenStack credential.') + raise ValidationError(_('Project name required for OpenStack credential.')) return project def clean_ssh_key_data(self): @@ -341,13 +341,13 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin): def clean_ssh_key_unlock(self): if self.has_encrypted_ssh_key_data and not self.ssh_key_unlock: - raise ValidationError('SSH key unlock must be set when SSH key ' - 'is encrypted.') + raise ValidationError(_('SSH key unlock must be set when SSH key ' + 'is encrypted.')) return self.ssh_key_unlock def clean(self): if self.deprecated_user and self.deprecated_team: - raise ValidationError('Credential cannot be assigned to both a user and team.') + raise ValidationError(_('Credential cannot be assigned to both a user and team.')) def _password_field_allows_ask(self, field): return bool(self.kind == 'ssh' and field != 'ssh_key_data') diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 551efa732b..cbea0be124 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -889,16 +889,16 @@ class InventorySourceOptions(BaseModel): @classmethod def get_ec2_group_by_choices(cls): return [ - ('availability_zone', 'Availability Zone'), - ('ami_id', 'Image ID'), - ('instance_id', 'Instance ID'), - ('instance_type', 'Instance Type'), - ('key_pair', 'Key Name'), - ('region', 'Region'), - ('security_group', 'Security Group'), - ('tag_keys', 'Tags'), - ('vpc_id', 'VPC ID'), - ('tag_none', 'Tag None'), + ('availability_zone', _('Availability Zone')), + ('ami_id', _('Image ID')), + ('instance_id', _('Instance ID')), + ('instance_type', _('Instance Type')), + ('key_pair', _('Key Name')), + ('region', _('Region')), + ('security_group', _('Security Group')), + ('tag_keys', _('Tags')), + ('vpc_id', _('VPC ID')), + ('tag_none', _('Tag None')), ] @classmethod @@ -969,14 +969,14 @@ class InventorySourceOptions(BaseModel): # credentials; Rackspace requires Rackspace credentials; etc...) if self.source.replace('ec2', 'aws') != cred.kind: raise ValidationError( - 'Cloud-based inventory sources (such as %s) require ' - 'credentials for the matching cloud service.' % self.source + _('Cloud-based inventory sources (such as %s) require ' + 'credentials for the matching cloud service.') % self.source ) # Allow an EC2 source to omit the credential. If Tower is running on # an EC2 instance with an IAM Role assigned, boto will use credentials # from the instance metadata instead of those explicitly provided. elif self.source in CLOUD_PROVIDERS and self.source != 'ec2': - raise ValidationError('Credential is required for a cloud source.') + raise ValidationError(_('Credential is required for a cloud source.')) return cred def clean_source_regions(self): @@ -1001,9 +1001,9 @@ class InventorySourceOptions(BaseModel): if r not in valid_regions and r not in invalid_regions: invalid_regions.append(r) if invalid_regions: - raise ValidationError('Invalid %s region%s: %s' % (self.source, - '' if len(invalid_regions) == 1 else 's', - ', '.join(invalid_regions))) + raise ValidationError(_('Invalid %(source)s region%(plural)s: %(region)s') % { + 'source': self.source, 'plural': '' if len(invalid_regions) == 1 else 's', + 'region': ', '.join(invalid_regions)}) return ','.join(regions) source_vars_dict = VarsDictProperty('source_vars') @@ -1027,9 +1027,9 @@ class InventorySourceOptions(BaseModel): if instance_filter_name not in self.INSTANCE_FILTER_NAMES: invalid_filters.append(instance_filter) if invalid_filters: - raise ValidationError('Invalid filter expression%s: %s' % - ('' if len(invalid_filters) == 1 else 's', - ', '.join(invalid_filters))) + raise ValidationError(_('Invalid filter expression%(plural)s: %(filter)s') % + {'plural': '' if len(invalid_filters) == 1 else 's', + 'filter': ', '.join(invalid_filters)}) return instance_filters def clean_group_by(self): @@ -1046,9 +1046,9 @@ class InventorySourceOptions(BaseModel): if c not in valid_choices and c not in invalid_choices: invalid_choices.append(c) if invalid_choices: - raise ValidationError('Invalid group by choice%s: %s' % - ('' if len(invalid_choices) == 1 else 's', - ', '.join(invalid_choices))) + raise ValidationError(_('Invalid group by choice%(plural)s: %(choice)s') % + {'plural': '' if len(invalid_choices) == 1 else 's', + 'choice': ', '.join(invalid_choices)}) return ','.join(choices) @@ -1194,7 +1194,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions): existing_sources = qs.exclude(pk=self.pk) if existing_sources.count(): s = u', '.join([x.group.name for x in existing_sources]) - raise ValidationError('Unable to configure this item for cloud sync. It is already managed by %s.' % s) + raise ValidationError(_('Unable to configure this item for cloud sync. It is already managed by %s.') % s) return source diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 8a4d6c07ae..5c8599052b 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -159,7 +159,7 @@ class JobOptions(BaseModel): cred = self.credential if cred and cred.kind != 'ssh': raise ValidationError( - 'You must provide a machine / SSH credential.', + _('You must provide a machine / SSH credential.'), ) return cred @@ -167,7 +167,7 @@ class JobOptions(BaseModel): cred = self.network_credential if cred and cred.kind != 'net': raise ValidationError( - 'You must provide a network credential.', + _('You must provide a network credential.'), ) return cred @@ -175,8 +175,8 @@ class JobOptions(BaseModel): cred = self.cloud_credential if cred and cred.kind not in CLOUD_PROVIDERS + ('aws',): raise ValidationError( - 'Must provide a credential for a cloud provider, such as ' - 'Amazon Web Services or Rackspace.', + _('Must provide a credential for a cloud provider, such as ' + 'Amazon Web Services or Rackspace.'), ) return cred @@ -275,19 +275,19 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin): if self.inventory is None: resources_needed_to_start.append('inventory') if not self.ask_inventory_on_launch: - validation_errors['inventory'] = ["Job Template must provide 'inventory' or allow prompting for it.",] + validation_errors['inventory'] = [_("Job Template must provide 'inventory' or allow prompting for it."),] if self.credential is None: resources_needed_to_start.append('credential') if not self.ask_credential_on_launch: - validation_errors['credential'] = ["Job Template must provide 'credential' or allow prompting for it.",] + validation_errors['credential'] = [_("Job Template must provide 'credential' or allow prompting for it."),] # Job type dependent checks if self.job_type == PERM_INVENTORY_SCAN: if self.inventory is None or self.ask_inventory_on_launch: - validation_errors['inventory'] = ["Scan jobs must be assigned a fixed inventory.",] + validation_errors['inventory'] = [_("Scan jobs must be assigned a fixed inventory."),] elif self.project is None: resources_needed_to_start.append('project') - validation_errors['project'] = ["Job types 'run' and 'check' must have assigned a project.",] + validation_errors['project'] = [_("Job types 'run' and 'check' must have assigned a project."),] return (validation_errors, resources_needed_to_start) @@ -496,10 +496,10 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin): if 'job_type' in data and self.ask_job_type_on_launch: if ((self.job_type == PERM_INVENTORY_SCAN and not data['job_type'] == PERM_INVENTORY_SCAN) or (data['job_type'] == PERM_INVENTORY_SCAN and not self.job_type == PERM_INVENTORY_SCAN)): - errors['job_type'] = 'Can not override job_type to or from a scan job.' + errors['job_type'] = _('Can not override job_type to or from a scan job.') if (self.job_type == PERM_INVENTORY_SCAN and ('inventory' in data) and self.ask_inventory_on_launch and self.inventory != data['inventory']): - errors['inventory'] = 'Inventory can not be changed at runtime for scan jobs.' + errors['inventory'] = _('Inventory can not be changed at runtime for scan jobs.') return errors @property diff --git a/awx/main/models/organization.py b/awx/main/models/organization.py index 4225bd7e70..e02ad2d75c 100644 --- a/awx/main/models/organization.py +++ b/awx/main/models/organization.py @@ -267,7 +267,7 @@ class AuthToken(BaseModel): def invalidate(self, reason='timeout_reached', save=True): if not AuthToken.reason_long(reason): - raise ValueError('Invalid reason specified') + raise ValueError(_('Invalid reason specified')) self.reason = reason if save: self.save() diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index aa8e726b64..378035c3ac 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -124,10 +124,10 @@ class ProjectOptions(models.Model): scm_url = update_scm_url(self.scm_type, scm_url, check_special_cases=False) except ValueError as e: - raise ValidationError((e.args or ('Invalid SCM URL.',))[0]) + raise ValidationError((e.args or (_('Invalid SCM URL.'),))[0]) scm_url_parts = urlparse.urlsplit(scm_url) if self.scm_type and not any(scm_url_parts): - raise ValidationError('SCM URL is required.') + raise ValidationError(_('SCM URL is required.')) return unicode(self.scm_url or '') def clean_credential(self): @@ -136,7 +136,7 @@ class ProjectOptions(models.Model): cred = self.credential if cred: if cred.kind != 'scm': - raise ValidationError("Credential kind must be 'scm'.") + raise ValidationError(_("Credential kind must be 'scm'.")) try: scm_url = update_scm_url(self.scm_type, self.scm_url, check_special_cases=False) @@ -151,7 +151,7 @@ class ProjectOptions(models.Model): update_scm_url(self.scm_type, self.scm_url, scm_username, scm_password) except ValueError as e: - raise ValidationError((e.args or ('Invalid credential.',))[0]) + raise ValidationError((e.args or (_('Invalid credential.'),))[0]) except ValueError: pass return cred diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 1fab5fcbb3..e9925a6b17 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -62,12 +62,12 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio ] COMMON_STATUS_CHOICES = JOB_STATUS_CHOICES + [ - ('never updated', 'Never Updated'), # A job has never been run using this template. + ('never updated', _('Never Updated')), # A job has never been run using this template. ] PROJECT_STATUS_CHOICES = COMMON_STATUS_CHOICES + [ - ('ok', 'OK'), # Project is not configured for SCM and path exists. - ('missing', 'Missing'), # Project path does not exist. + ('ok', _('OK')), # Project is not configured for SCM and path exists. + ('missing', _('Missing')), # Project path does not exist. ] INVENTORY_SOURCE_STATUS_CHOICES = COMMON_STATUS_CHOICES + [ diff --git a/awx/main/notifications/base.py b/awx/main/notifications/base.py index a68c88ed46..58202f6612 100644 --- a/awx/main/notifications/base.py +++ b/awx/main/notifications/base.py @@ -5,6 +5,8 @@ import json from django.utils.encoding import smart_text from django.core.mail.backends.base import BaseEmailBackend +from django.utils.translation import ugettext_lazy as _ + class TowerBaseEmailBackend(BaseEmailBackend): @@ -12,9 +14,8 @@ class TowerBaseEmailBackend(BaseEmailBackend): if "body" in body: body_actual = body['body'] else: - body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'], - body['id'], - body['status'], - body['url'])) + body_actual = smart_text(_("{} #{} had status {} on Ansible Tower, view details at {}\n\n").format( + body['friendly_name'], body['id'], body['status'], body['url']) + ) body_actual += json.dumps(body, indent=4) return body_actual diff --git a/awx/main/notifications/email_backend.py b/awx/main/notifications/email_backend.py index 7ca5690b28..b3c1db67f7 100644 --- a/awx/main/notifications/email_backend.py +++ b/awx/main/notifications/email_backend.py @@ -5,6 +5,8 @@ import json from django.utils.encoding import smart_text from django.core.mail.backends.smtp import EmailBackend +from django.utils.translation import ugettext_lazy as _ + class CustomEmailBackend(EmailBackend): @@ -23,9 +25,8 @@ class CustomEmailBackend(EmailBackend): if "body" in body: body_actual = body['body'] else: - body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'], - body['id'], - body['status'], - body['url'])) + body_actual = smart_text(_("{} #{} had status {} on Ansible Tower, view details at {}\n\n").format( + body['friendly_name'], body['id'], body['status'], body['url']) + ) body_actual += json.dumps(body, indent=4) return body_actual diff --git a/awx/main/notifications/hipchat_backend.py b/awx/main/notifications/hipchat_backend.py index b34c5e5fd1..586754bd92 100644 --- a/awx/main/notifications/hipchat_backend.py +++ b/awx/main/notifications/hipchat_backend.py @@ -6,11 +6,12 @@ import logging import requests from django.utils.encoding import smart_text - +from django.utils.translation import ugettext_lazy as _ from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.hipchat_backend') + class HipChatBackend(TowerBaseEmailBackend): init_parameters = {"token": {"label": "Token", "type": "password"}, @@ -42,8 +43,8 @@ class HipChatBackend(TowerBaseEmailBackend): "from": m.from_email, "message_format": "text"}) if r.status_code != 204: - logger.error(smart_text("Error sending messages: {}".format(r.text))) + logger.error(smart_text(_("Error sending messages: {}").format(r.text))) if not self.fail_silently: - raise Exception(smart_text("Error sending message to hipchat: {}".format(r.text))) + raise Exception(smart_text(_("Error sending message to hipchat: {}").format(r.text))) sent_messages += 1 return sent_messages diff --git a/awx/main/notifications/irc_backend.py b/awx/main/notifications/irc_backend.py index 61158bbe5d..277364cf07 100644 --- a/awx/main/notifications/irc_backend.py +++ b/awx/main/notifications/irc_backend.py @@ -8,11 +8,12 @@ import logging import irc.client from django.utils.encoding import smart_text - +from django.utils.translation import ugettext_lazy as _ from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.irc_backend') + class IrcBackend(TowerBaseEmailBackend): init_parameters = {"server": {"label": "IRC Server Address", "type": "string"}, @@ -50,7 +51,7 @@ class IrcBackend(TowerBaseEmailBackend): connect_factory=connection_factory, ) except irc.client.ServerConnectionError as e: - logger.error(smart_text("Exception connecting to irc server: {}".format(e))) + logger.error(smart_text(_("Exception connecting to irc server: {}").format(e))) if not self.fail_silently: raise return True diff --git a/awx/main/notifications/pagerduty_backend.py b/awx/main/notifications/pagerduty_backend.py index 390fac3d20..76322c18cf 100644 --- a/awx/main/notifications/pagerduty_backend.py +++ b/awx/main/notifications/pagerduty_backend.py @@ -5,11 +5,12 @@ import logging import pygerduty from django.utils.encoding import smart_text - +from django.utils.translation import ugettext_lazy as _ from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.pagerduty_backend') + class PagerDutyBackend(TowerBaseEmailBackend): init_parameters = {"subdomain": {"label": "Pagerduty subdomain", "type": "string"}, @@ -35,7 +36,7 @@ class PagerDutyBackend(TowerBaseEmailBackend): except Exception as e: if not self.fail_silently: raise - logger.error(smart_text("Exception connecting to PagerDuty: {}".format(e))) + logger.error(smart_text(_("Exception connecting to PagerDuty: {}").format(e))) for m in messages: try: pager.trigger_incident(m.recipients()[0], @@ -44,7 +45,7 @@ class PagerDutyBackend(TowerBaseEmailBackend): client=m.from_email) sent_messages += 1 except Exception as e: - logger.error(smart_text("Exception sending messages: {}".format(e))) + logger.error(smart_text(_("Exception sending messages: {}").format(e))) if not self.fail_silently: raise return sent_messages diff --git a/awx/main/notifications/slack_backend.py b/awx/main/notifications/slack_backend.py index ffc52bc44b..2da5c5d8a3 100644 --- a/awx/main/notifications/slack_backend.py +++ b/awx/main/notifications/slack_backend.py @@ -5,11 +5,12 @@ import logging from slackclient import SlackClient from django.utils.encoding import smart_text - +from django.utils.translation import ugettext_lazy as _ from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.slack_backend') + class SlackBackend(TowerBaseEmailBackend): init_parameters = {"token": {"label": "Token", "type": "password"}, @@ -48,7 +49,7 @@ class SlackBackend(TowerBaseEmailBackend): self.connection.rtm_send_message(r, m.subject) sent_messages += 1 except Exception as e: - logger.error(smart_text("Exception sending messages: {}".format(e))) + logger.error(smart_text(_("Exception sending messages: {}").format(e))) if not self.fail_silently: raise return sent_messages diff --git a/awx/main/notifications/twilio_backend.py b/awx/main/notifications/twilio_backend.py index df411c68c5..e1f75837c2 100644 --- a/awx/main/notifications/twilio_backend.py +++ b/awx/main/notifications/twilio_backend.py @@ -6,11 +6,12 @@ import logging from twilio.rest import TwilioRestClient from django.utils.encoding import smart_text - +from django.utils.translation import ugettext_lazy as _ from awx.main.notifications.base import TowerBaseEmailBackend logger = logging.getLogger('awx.main.notifications.twilio_backend') + class TwilioBackend(TowerBaseEmailBackend): init_parameters = {"account_sid": {"label": "Account SID", "type": "string"}, @@ -32,7 +33,7 @@ class TwilioBackend(TowerBaseEmailBackend): except Exception as e: if not self.fail_silently: raise - logger.error(smart_text("Exception connecting to Twilio: {}".format(e))) + logger.error(smart_text(_("Exception connecting to Twilio: {}").format(e))) for m in messages: try: @@ -42,7 +43,7 @@ class TwilioBackend(TowerBaseEmailBackend): body=m.subject) sent_messages += 1 except Exception as e: - logger.error(smart_text("Exception sending messages: {}".format(e))) + logger.error(smart_text(_("Exception sending messages: {}").format(e))) if not self.fail_silently: raise return sent_messages diff --git a/awx/main/notifications/webhook_backend.py b/awx/main/notifications/webhook_backend.py index e74f39f654..8489a90f7b 100644 --- a/awx/main/notifications/webhook_backend.py +++ b/awx/main/notifications/webhook_backend.py @@ -5,12 +5,13 @@ import logging import requests from django.utils.encoding import smart_text - +from django.utils.translation import ugettext_lazy as _ from awx.main.notifications.base import TowerBaseEmailBackend from awx.main.utils import get_awx_version logger = logging.getLogger('awx.main.notifications.webhook_backend') + class WebhookBackend(TowerBaseEmailBackend): init_parameters = {"url": {"label": "Target URL", "type": "string"}, @@ -34,8 +35,8 @@ class WebhookBackend(TowerBaseEmailBackend): json=m.body, headers=self.headers) if r.status_code >= 400: - logger.error(smart_text("Error sending notification webhook: {}".format(r.text))) + logger.error(smart_text(_("Error sending notification webhook: {}").format(r.text))) if not self.fail_silently: - raise Exception(smart_text("Error sending notification webhook: {}".format(r.text))) + raise Exception(smart_text(_("Error sending notification webhook: {}").format(r.text))) sent_messages += 1 return sent_messages diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 6b98a12b22..c4597b22e2 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -41,6 +41,7 @@ from django.utils.timezone import now from django.utils.encoding import smart_str from django.core.mail import send_mail from django.contrib.auth.models import User +from django.utils.translation import ugettext_lazy as _ # AWX from awx.main.constants import CLOUD_PROVIDERS @@ -112,12 +113,12 @@ def run_administrative_checks(self): tower_admin_emails = User.objects.filter(is_superuser=True).values_list('email', flat=True) if (used_percentage * 100) > 90: send_mail("Ansible Tower host usage over 90%", - "Ansible Tower host usage over 90%", + _("Ansible Tower host usage over 90%"), tower_admin_emails, fail_silently=True) if validation_info.get('date_warning', False): send_mail("Ansible Tower license will expire soon", - "Ansible Tower license will expire soon", + _("Ansible Tower license will expire soon"), tower_admin_emails, fail_silently=True) @@ -165,7 +166,7 @@ def tower_periodic_scheduler(self): def _send_notification_templates(instance, status_str): if status_str not in ['succeeded', 'failed']: - raise ValueError("status_str must be either succeeded or failed") + raise ValueError(_("status_str must be either succeeded or failed")) notification_templates = instance.get_notification_templates() if notification_templates: all_notification_templates = set(notification_templates.get('success', []) + notification_templates.get('any', [])) diff --git a/awx/main/utils.py b/awx/main/utils.py index 76f28f090a..4597975a36 100644 --- a/awx/main/utils.py +++ b/awx/main/utils.py @@ -20,6 +20,9 @@ import tempfile # Decorator from decorator import decorator +# Django +from django.utils.translation import ugettext_lazy as _ + # Django REST Framework from rest_framework.exceptions import ParseError, PermissionDenied from django.utils.encoding import smart_str @@ -78,7 +81,7 @@ def to_python_boolean(value, allow_none=False): elif allow_none and value.lower() in ('none', 'null'): return None else: - raise ValueError(u'Unable to convert "%s" to boolean' % unicode(value)) + raise ValueError(_(u'Unable to convert "%s" to boolean') % unicode(value)) def camelcase_to_underscore(s): ''' @@ -193,7 +196,7 @@ def decrypt_field(instance, field_name, subfield=None): return value algo, b64data = value[len('$encrypted$'):].split('$', 1) if algo != 'AES': - raise ValueError('unsupported algorithm: %s' % algo) + raise ValueError(_('unsupported algorithm: %s') % algo) encrypted = base64.b64decode(b64data) key = get_encryption_key(instance, field_name) cipher = AES.new(key, AES.MODE_ECB) @@ -214,16 +217,16 @@ def update_scm_url(scm_type, url, username=True, password=True, # hg: http://www.selenic.com/mercurial/hg.1.html#url-paths # svn: http://svnbook.red-bean.com/en/1.7/svn-book.html#svn.advanced.reposurls if scm_type not in ('git', 'hg', 'svn'): - raise ValueError('Unsupported SCM type "%s"' % str(scm_type)) + raise ValueError(_('Unsupported SCM type "%s"') % str(scm_type)) if not url.strip(): return '' parts = urlparse.urlsplit(url) try: parts.port except ValueError: - raise ValueError('Invalid %s URL' % scm_type) + raise ValueError(_('Invalid %s URL') % scm_type) if parts.scheme == 'git+ssh' and not scp_format: - raise ValueError('Unsupported %s URL' % scm_type) + raise ValueError(_('Unsupported %s URL') % scm_type) if '://' not in url: # Handle SCP-style URLs for git (e.g. [user@]host.xz:path/to/repo.git/). @@ -233,7 +236,7 @@ def update_scm_url(scm_type, url, username=True, password=True, else: userpass, hostpath = '', url if hostpath.count(':') > 1: - raise ValueError('Invalid %s URL' % scm_type) + raise ValueError(_('Invalid %s URL') % scm_type) host, path = hostpath.split(':', 1) #if not path.startswith('/') and not path.startswith('~/'): # path = '~/%s' % path @@ -252,7 +255,7 @@ def update_scm_url(scm_type, url, username=True, password=True, else: parts = urlparse.urlsplit('file://%s' % url) else: - raise ValueError('Invalid %s URL' % scm_type) + raise ValueError(_('Invalid %s URL') % scm_type) # Validate that scheme is valid for given scm_type. scm_type_schemes = { @@ -261,11 +264,11 @@ def update_scm_url(scm_type, url, username=True, password=True, 'svn': ('http', 'https', 'svn', 'svn+ssh', 'file'), } if parts.scheme not in scm_type_schemes.get(scm_type, ()): - raise ValueError('Unsupported %s URL' % scm_type) + raise ValueError(_('Unsupported %s URL') % scm_type) if parts.scheme == 'file' and parts.netloc not in ('', 'localhost'): - raise ValueError('Unsupported host "%s" for file:// URL' % (parts.netloc)) + raise ValueError(_('Unsupported host "%s" for file:// URL') % (parts.netloc)) elif parts.scheme != 'file' and not parts.netloc: - raise ValueError('Host is required for %s URL' % parts.scheme) + raise ValueError(_('Host is required for %s URL') % parts.scheme) if username is True: netloc_username = parts.username or '' elif username: @@ -283,13 +286,13 @@ def update_scm_url(scm_type, url, username=True, password=True, if check_special_cases: special_git_hosts = ('github.com', 'bitbucket.org', 'altssh.bitbucket.org') if scm_type == 'git' and parts.scheme.endswith('ssh') and parts.hostname in special_git_hosts and netloc_username != 'git': - raise ValueError('Username must be "git" for SSH access to %s.' % parts.hostname) + raise ValueError(_('Username must be "git" for SSH access to %s.') % parts.hostname) if scm_type == 'git' and parts.scheme.endswith('ssh') and parts.hostname in special_git_hosts and netloc_password: #raise ValueError('Password not allowed for SSH access to %s.' % parts.hostname) netloc_password = '' special_hg_hosts = ('bitbucket.org', 'altssh.bitbucket.org') if scm_type == 'hg' and parts.scheme == 'ssh' and parts.hostname in special_hg_hosts and netloc_username != 'hg': - raise ValueError('Username must be "hg" for SSH access to %s.' % parts.hostname) + raise ValueError(_('Username must be "hg" for SSH access to %s.') % parts.hostname) if scm_type == 'hg' and parts.scheme == 'ssh' and netloc_password: #raise ValueError('Password not supported for SSH with Mercurial.') netloc_password = '' diff --git a/awx/main/validators.py b/awx/main/validators.py index 438fb98bcd..afe465c76e 100644 --- a/awx/main/validators.py +++ b/awx/main/validators.py @@ -188,4 +188,4 @@ def vars_validate_or_raise(vars_str): return vars_str except yaml.YAMLError: pass - raise RestValidationError('Must be valid JSON or YAML.') + raise RestValidationError(_('Must be valid JSON or YAML.')) diff --git a/awx/main/views.py b/awx/main/views.py index a1036a96e6..f476f81cfd 100644 --- a/awx/main/views.py +++ b/awx/main/views.py @@ -4,6 +4,7 @@ # Django from django.shortcuts import render from django.utils.html import format_html +from django.utils.translation import ugettext_lazy as _ # Django REST Framework from rest_framework import exceptions, permissions, views @@ -16,7 +17,7 @@ class ApiErrorView(views.APIView): metadata_class = None allowed_methods = ('GET', 'HEAD') exception_class = exceptions.APIException - view_name = 'API Error' + view_name = _('API Error') def get_view_name(self): return self.view_name @@ -45,31 +46,31 @@ def handle_error(request, status=404, **kwargs): def handle_400(request): kwargs = { - 'name': 'Bad Request', - 'content': 'The request could not be understood by the server.', + 'name': _('Bad Request'), + 'content': _('The request could not be understood by the server.'), } return handle_error(request, 400, **kwargs) def handle_403(request): kwargs = { - 'name': 'Forbidden', - 'content': 'You don\'t have permission to access the requested resource.', + 'name': _('Forbidden'), + 'content': _('You don\'t have permission to access the requested resource.'), } return handle_error(request, 403, **kwargs) def handle_404(request): kwargs = { - 'name': 'Not Found', - 'content': 'The requested resource could not be found.', + 'name': _('Not Found'), + 'content': _('The requested resource could not be found.'), } return handle_error(request, 404, **kwargs) def handle_500(request): kwargs = { - 'name': 'Server Error', - 'content': 'A server error has occurred.', + 'name': _('Server Error'), + 'content': _('A server error has occurred.'), } return handle_error(request, 500, **kwargs) diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index 02cbf3fc31..bc9573e896 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -10,8 +10,12 @@ from datetime import timedelta from kombu import Queue, Exchange -# Update this module's local settings from the global settings module. +# global settings from django.conf import global_settings +# ugettext lazy +from django.utils.translation import ugettext_lazy as _ + +# Update this module's local settings from the global settings module. this_module = sys.modules[__name__] for setting in dir(global_settings): if setting == setting.upper(): @@ -117,6 +121,11 @@ LOG_ROOT = os.path.join(BASE_DIR) # The heartbeat file for the tower scheduler SCHEDULE_METADATA_LOCATION = os.path.join(BASE_DIR, '.tower_cycle') +# Django gettext files path: locale//LC_MESSAGES/django.po, django.mo +LOCALE_PATHS = ( + os.path.join(BASE_DIR, 'locale'), +) + # Maximum number of the same job that can be waiting to run when launching from scheduler # Note: This setting may be overridden by database settings. SCHEDULE_MAX_JOBS = 10 @@ -154,8 +163,9 @@ TEMPLATE_CONTEXT_PROCESSORS = ( # NOQA ) MIDDLEWARE_CLASSES = ( # NOQA - 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.locale.LocaleMiddleware', + 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', @@ -571,12 +581,12 @@ AD_HOC_COMMANDS = [ # instead (based on docs from: # http://docs.rackspace.com/loadbalancers/api/v1.0/clb-devguide/content/Service_Access_Endpoints-d1e517.html) RAX_REGION_CHOICES = [ - ('ORD', 'Chicago'), - ('DFW', 'Dallas/Ft. Worth'), - ('IAD', 'Northern Virginia'), - ('LON', 'London'), - ('SYD', 'Sydney'), - ('HKG', 'Hong Kong'), + ('ORD', _('Chicago')), + ('DFW', _('Dallas/Ft. Worth')), + ('IAD', _('Northern Virginia')), + ('LON', _('London')), + ('SYD', _('Sydney')), + ('HKG', _('Hong Kong')), ] # Inventory variable name/values for determining if host is active/enabled. @@ -603,20 +613,20 @@ INV_ENV_VARIABLE_BLACKLIST = ("HOME", "USER", "_", "TERM") # list of names here. The available region IDs will be pulled from boto. # http://docs.aws.amazon.com/general/latest/gr/rande.html#ec2_region EC2_REGION_NAMES = { - 'us-east-1': 'US East (Northern Virginia)', - 'us-east-2': 'US East (Ohio)', - 'us-west-2': 'US West (Oregon)', - 'us-west-1': 'US West (Northern California)', - 'eu-central-1': 'EU (Frankfurt)', - 'eu-west-1': 'EU (Ireland)', - 'ap-southeast-1': 'Asia Pacific (Singapore)', - 'ap-southeast-2': 'Asia Pacific (Sydney)', - 'ap-northeast-1': 'Asia Pacific (Tokyo)', - 'ap-northeast-2': 'Asia Pacific (Seoul)', - 'ap-south-1': 'Asia Pacific (Mumbai)', - 'sa-east-1': 'South America (Sao Paulo)', - 'us-gov-west-1': 'US West (GovCloud)', - 'cn-north-1': 'China (Beijing)', + 'us-east-1': _('US East (Northern Virginia)'), + 'us-east-2': _('US East (Ohio)'), + 'us-west-2': _('US West (Oregon)'), + 'us-west-1': _('US West (Northern California)'), + 'eu-central-1': _('EU (Frankfurt)'), + 'eu-west-1': _('EU (Ireland)'), + 'ap-southeast-1': _('Asia Pacific (Singapore)'), + 'ap-southeast-2': _('Asia Pacific (Sydney)'), + 'ap-northeast-1': _('Asia Pacific (Tokyo)'), + 'ap-northeast-2': _('Asia Pacific (Seoul)'), + 'ap-south-1': _('Asia Pacific (Mumbai)'), + 'sa-east-1': _('South America (Sao Paulo)'), + 'us-gov-west-1': _('US West (GovCloud)'), + 'cn-north-1': _('China (Beijing)'), } EC2_REGIONS_BLACKLIST = [ @@ -665,19 +675,19 @@ VMWARE_EXCLUDE_EMPTY_GROUPS = True # provide a list here. # Source: https://developers.google.com/compute/docs/zones GCE_REGION_CHOICES = [ - ('us-east1-b', 'US East (B)'), - ('us-east1-c', 'US East (C)'), - ('us-east1-d', 'US East (D)'), - ('us-central1-a', 'US Central (A)'), - ('us-central1-b', 'US Central (B)'), - ('us-central1-c', 'US Central (C)'), - ('us-central1-f', 'US Central (F)'), - ('europe-west1-b', 'Europe West (B)'), - ('europe-west1-c', 'Europe West (C)'), - ('europe-west1-d', 'Europe West (D)'), - ('asia-east1-a', 'Asia East (A)'), - ('asia-east1-b', 'Asia East (B)'), - ('asia-east1-c', 'Asia East (C)'), + ('us-east1-b', _('US East (B)')), + ('us-east1-c', _('US East (C)')), + ('us-east1-d', _('US East (D)')), + ('us-central1-a', _('US Central (A)')), + ('us-central1-b', _('US Central (B)')), + ('us-central1-c', _('US Central (C)')), + ('us-central1-f', _('US Central (F)')), + ('europe-west1-b', _('Europe West (B)')), + ('europe-west1-c', _('Europe West (C)')), + ('europe-west1-d', _('Europe West (D)')), + ('asia-east1-a', _('Asia East (A)')), + ('asia-east1-b', _('Asia East (B)')), + ('asia-east1-c', _('Asia East (C)')), ] GCE_REGIONS_BLACKLIST = [] @@ -701,19 +711,19 @@ GCE_INSTANCE_ID_VAR = None # It's not possible to get zones in Azure without authenticating, so we # provide a list here. AZURE_REGION_CHOICES = [ - ('Central_US', 'US Central'), - ('East_US_1', 'US East'), - ('East_US_2', 'US East 2'), - ('North_Central_US', 'US North Central'), - ('South_Central_US', 'US South Central'), - ('West_US', 'US West'), - ('North_Europe', 'Europe North'), - ('West_Europe', 'Europe West'), - ('East_Asia_Pacific', 'Asia Pacific East'), - ('Southest_Asia_Pacific', 'Asia Pacific Southeast'), - ('East_Japan', 'Japan East'), - ('West_Japan', 'Japan West'), - ('South_Brazil', 'Brazil South'), + ('Central_US', _('US Central')), + ('East_US_1', _('US East')), + ('East_US_2', _('US East 2')), + ('North_Central_US', _('US North Central')), + ('South_Central_US', _('US South Central')), + ('West_US', _('US West')), + ('North_Europe', _('Europe North')), + ('West_Europe', _('Europe West')), + ('East_Asia_Pacific', _('Asia Pacific East')), + ('Southest_Asia_Pacific', _('Asia Pacific Southeast')), + ('East_Japan', _('Japan East')), + ('West_Japan', _('Japan West')), + ('South_Brazil', _('Brazil South')), ] AZURE_REGIONS_BLACKLIST = [] diff --git a/awx/sso/pipeline.py b/awx/sso/pipeline.py index 738a9b3b0c..2a16eb25b0 100644 --- a/awx/sso/pipeline.py +++ b/awx/sso/pipeline.py @@ -7,6 +7,9 @@ import re # Python Social Auth from social.exceptions import AuthException +# Django +from django.utils.translation import ugettext_lazy as _ + # Tower from awx.conf.license import feature_enabled @@ -18,13 +21,13 @@ class AuthNotFound(AuthException): super(AuthNotFound, self).__init__(backend, *args, **kwargs) def __str__(self): - return 'An account cannot be found for {0}'.format(self.email_or_uid) + return _('An account cannot be found for {0}').format(self.email_or_uid) class AuthInactive(AuthException): def __str__(self): - return 'Your account is inactive' + return _('Your account is inactive') def check_user_found_or_created(backend, details, user=None, *args, **kwargs): diff --git a/awx/templates/rest_framework/api.html b/awx/templates/rest_framework/api.html index c40d81ff63..746521f542 100644 --- a/awx/templates/rest_framework/api.html +++ b/awx/templates/rest_framework/api.html @@ -36,9 +36,9 @@ {% if user.is_authenticated %}
  • Logged in as {{ user }}{% if user.get_full_name %} ({{ user.get_full_name }}){% endif %}
  • {% endif %} -
  • Ansible Tower API Guide
  • -
  • Back to Ansible Tower
  • - +
  • {% trans 'Ansible Tower API Guide' %}
  • +
  • {% trans 'Back to Ansible Tower' %}
  • + diff --git a/awx/templates/rest_framework/base.html b/awx/templates/rest_framework/base.html index 6ed3cd456f..a6c4169ebd 100644 --- a/awx/templates/rest_framework/base.html +++ b/awx/templates/rest_framework/base.html @@ -1,8 +1,8 @@ + {# Copy of base.html from rest_framework with minor Ansible Tower change. #} {% load staticfiles %} {% load rest_framework %} {% load i18n %} - {% block head %} @@ -75,21 +75,21 @@
    {% if api_settings.URL_FORMAT_OVERRIDE %}
    - GET + GET -
    {% else %} - GET + GET {% endif %}
    @@ -97,13 +97,13 @@ {% if options_form %}
    - +
    {% endif %} {% if delete_form %}
    - +
    {% endif %} @@ -169,7 +169,7 @@ {% csrf_token %} {{ post_form }}
    - +
    @@ -183,7 +183,7 @@
    {% include "rest_framework/raw_data_form.html" %}
    - +
    @@ -213,7 +213,7 @@
    {{ put_form }}
    - +
    @@ -227,10 +227,10 @@ {% include "rest_framework/raw_data_form.html" %}
    {% if raw_data_put_form %} - + {% endif %} {% if raw_data_patch_form %} - + {% endif %}
    diff --git a/awx/ui/templates/ui/index.html b/awx/ui/templates/ui/index.html index c356a712a1..9c8d91918f 100644 --- a/awx/ui/templates/ui/index.html +++ b/awx/ui/templates/ui/index.html @@ -1,10 +1,11 @@ +{% load i18n %} - Ansible Tower + {% trans 'Ansible Tower' %} @@ -44,7 +45,7 @@ - + @@ -80,7 +81,7 @@
    @@ -99,7 +100,7 @@
    @@ -111,27 +112,27 @@ diff --git a/tools/scripts/manage_translations.py b/tools/scripts/manage_translations.py new file mode 100644 index 0000000000..384b2c22b4 --- /dev/null +++ b/tools/scripts/manage_translations.py @@ -0,0 +1,177 @@ +#!/usr/bin/env python +# +# NOTE: This script is based on django's manage_translations.py script +# (https://github.com/django/django/blob/master/scripts/manage_translations.py) +# +# This python file contains utility scripts to manage Ansible-Tower translations. +# It has to be run inside the ansible-tower git root directory. +# +# The following commands are available: +# +# * update: check for new strings in ansible-tower catalogs, and +# output how much strings are new/changed. +# +# * stats: output statistics for each language +# +# * pull: pull/fetch translations from Zanata +# +# * push: update resources in Zanata with the local files +# +# Each command support the --lang option to limit their operation to +# the specified language(s). For example, +# to pull translations for Japanese and French, run: +# +# $ python tools/scripts/manage_translations.py pull --lang ja,fr + +import os +from argparse import ArgumentParser +from subprocess import PIPE, Popen +from xml.etree import ElementTree as ET +from xml.etree.ElementTree import ParseError + +import django +from django.conf import settings +from django.core.management import call_command + + +PROJECT_CONFIG = "tools/scripts/zanata_config/backend-translations.xml" +MIN_TRANS_PERCENT_SETTING = False +MIN_TRANS_PERCENT = '10' + + +def _get_zanata_project_url(): + project_url = '' + try: + zanata_config = ET.parse(PROJECT_CONFIG).getroot() + server_url = zanata_config.getchildren()[0].text + project_id = zanata_config.getchildren()[1].text + version_id = zanata_config.getchildren()[2].text + middle_url = "iteration/view/" if server_url[-1:] == '/' else "/iteration/view/" + project_url = server_url + middle_url + project_id + "/" + version_id + "/documents" + except (ParseError, IndexError): + print("Please re-check zanata project configuration.") + return project_url + + +def _handle_response(output, errors): + if not errors and '\n' in output: + for response in output.split('\n'): + print(response) + return True + else: + print(errors.strip()) + return False + + +def _check_diff(base_path): + """ + Output the approximate number of changed/added strings in the POT + """ + po_path = '%s/django.pot' % base_path + p = Popen("git diff -U0 %s | egrep '^[-+]msgid' | wc -l" % po_path, + stdout=PIPE, stderr=PIPE, shell=True) + output, errors = p.communicate() + num_changes = int(output.strip()) + print("[ %d ] changed/added messages in catalog." % num_changes) + + +def pull(lang=None, both=None): + """ + Pull translations .po from Zanata + """ + + command = "zanata pull --project-config %(config)s --disable-ssl-cert" + + if MIN_TRANS_PERCENT_SETTING: + command += " --min-doc-percent " + MIN_TRANS_PERCENT + if lang: + command += " --lang %s" % lang[0] + + p = Popen(command % {'config': PROJECT_CONFIG}, + stdout=PIPE, stderr=PIPE, shell=True) + output, errors = p.communicate() + _handle_response(output, errors) + + +def push(lang=None, both=None): + """ + Push django.pot to Zanata + At Zanata: + (1) project_type should be podir - {locale}/{filename}.po format + (2) only required languages should be kept enabled + """ + p = Popen("zanata push --project-config %(config)s --push-type source --disable-ssl-cert" % + {'config': PROJECT_CONFIG}, stdout=PIPE, stderr=PIPE, shell=True) + output, errors = p.communicate() + if _handle_response(output, errors): + print("Zanata URL: %s\n" % _get_zanata_project_url()) + + +def stats(lang=None, both=None): + """ + Get translation stats from Zanata + """ + command = "zanata stats --project-config %(config)s --disable-ssl-cert" + if lang: + command += " --lang %s" % lang[0] + + p = Popen(command % {'config': PROJECT_CONFIG}, + stdout=PIPE, stderr=PIPE, shell=True) + output, errors = p.communicate() + _handle_response(output, errors) + + +def update(lang=None, both=None): + """ + Update (1) awx/locale/django.pot and/or + (2) awx/ui/po/ansible-tower.pot files with + new/updated translatable strings. + """ + settings.configure() + django.setup() + print("Updating catalog for Ansible Tower:") + + if both: + print("Angular...") + p = Popen("make pot", stdout=PIPE, stderr=PIPE, shell=True) + output, errors = p.communicate() + _handle_response(output, errors) + + print("Django...") + lang = (lang[0].split(',') if ',' in lang[0] else lang) if lang else [] + os.chdir(os.path.join(os.getcwd(), 'awx')) + call_command('makemessages', '--keep-pot', locale=lang) + # Output changed stats + _check_diff(os.path.join(os.getcwd(), 'locale')) + + +if __name__ == "__main__": + + try: + devnull = open(os.devnull) + Popen(["zanata"], stdout=devnull, stderr=devnull).communicate() + except OSError as e: + if e.errno == os.errno.ENOENT: + print(''' + You need zanata-python-client, install it. + 1. Install zanata-python-client, use + $ dnf install zanata-python-client + 2. Create ~/.config/zanata.ini file: + $ vim ~/.config/zanata.ini + [servers] + translate_zanata_org.url=https://translate.engineering.redhat.com/ + translate_zanata_org.username=ansibletoweruser + translate_zanata_org.key= + ''') + + exit(1) + + RUNABLE_SCRIPTS = ('update', 'stats', 'pull', 'push') + + parser = ArgumentParser() + parser.add_argument('cmd', nargs=1, choices=RUNABLE_SCRIPTS) + parser.add_argument("-l", "--lang", action='append', help="specify comma seperated locales") + parser.add_argument("-u", "--both", action='store_true', help="specify to include ui tasks") + options = parser.parse_args() + + eval(options.cmd[0])(options.lang, options.both)