update strings

This commit is contained in:
sundeep-co-in 2016-10-25 16:12:41 +05:30
parent 78a8ce9479
commit d7b3b90f4d
50 changed files with 892 additions and 450 deletions

View File

@ -185,7 +185,8 @@ UI_RELEASE_FLAG_FILE = awx/ui/.release_built
virtualbox-ovf virtualbox-centos-7 virtualbox-centos-6 \
clean-bundle setup_bundle_tarball \
ui-docker-machine ui-docker ui-release \
ui-test ui-test-ci ui-test-saucelabs
ui-test ui-deps ui-test-ci ui-test-saucelabs jlaska
# Remove setup build files
clean-tar:
@ -391,11 +392,17 @@ flower:
fi; \
$(PYTHON) manage.py celery flower --address=0.0.0.0 --port=5555 --broker=amqp://guest:guest@$(RABBITMQ_HOST):5672//
uwsgi:
collectstatic:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/tower/bin/activate; \
fi; \
uwsgi --socket :8050 --module=awx.wsgi:application --home=/venv/tower --chdir=/tower_devel/ --vacuum --processes=5 --harakiri=60 --static-map /static=/tower_devel/awx/ui/static
mkdir -p awx/public/static && $(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
uwsgi: collectstatic
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/tower/bin/activate; \
fi; \
uwsgi -b 32768 --socket :8050 --module=awx.wsgi:application --home=/venv/tower --chdir=/tower_devel/ --vacuum --processes=5 --harakiri=60 --py-autoreload 1
daphne:
@if [ "$(VENV_BASE)" ]; then \
@ -421,7 +428,7 @@ celeryd:
@if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/tower/bin/activate; \
fi; \
$(PYTHON) manage.py celeryd -l DEBUG -B --autoscale=20,3 --schedule=$(CELERY_SCHEDULE_FILE) -Q projects,jobs,default,scheduler,$(COMPOSE_HOST)
$(PYTHON) manage.py celeryd -l DEBUG -B --autoreload --autoscale=20,3 --schedule=$(CELERY_SCHEDULE_FILE) -Q projects,jobs,default,scheduler,$(COMPOSE_HOST)
#$(PYTHON) manage.py celery multi show projects jobs default -l DEBUG -Q:projects projects -Q:jobs jobs -Q:default default -c:projects 1 -c:jobs 3 -c:default 3 -Ofair -B --schedule=$(CELERY_SCHEDULE_FILE)
# Run to start the zeromq callback receiver
@ -443,6 +450,9 @@ factcacher:
fi; \
$(PYTHON) manage.py run_fact_cache_receiver
nginx:
nginx -g "daemon off;"
reports:
mkdir -p $@
@ -515,6 +525,8 @@ languages:
# UI TASKS
# --------------------------------------
ui-deps: $(UI_DEPS_FLAG_FILE)
$(UI_DEPS_FLAG_FILE): awx/ui/package.json
$(NPM_BIN) --unsafe-perm --prefix awx/ui install awx/ui
touch $(UI_DEPS_FLAG_FILE)
@ -794,7 +806,7 @@ docker-auth:
# Docker Compose Development environment
docker-compose: docker-auth
TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose.yml up --no-recreate nginx tower
TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose.yml up --no-recreate tower
docker-compose-cluster: docker-auth
TAG=$(COMPOSE_TAG) docker-compose -f tools/docker-compose-cluster.yml up

View File

@ -1,3 +1,4 @@
nginx: make nginx
runworker: make runworker
daphne: make daphne
celeryd: make celeryd

View File

@ -9,6 +9,7 @@ import logging
from django.conf import settings
from django.utils.timezone import now as tz_now
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
# Django REST Framework
from rest_framework import authentication
@ -62,10 +63,10 @@ class TokenAuthentication(authentication.TokenAuthentication):
return None
if len(auth) == 1:
msg = 'Invalid token header. No credentials provided.'
msg = _('Invalid token header. No credentials provided.')
raise exceptions.AuthenticationFailed(msg)
elif len(auth) > 2:
msg = 'Invalid token header. Token string should not contain spaces.'
msg = _('Invalid token header. Token string should not contain spaces.')
raise exceptions.AuthenticationFailed(msg)
return self.authenticate_credentials(auth[1])
@ -100,7 +101,7 @@ class TokenAuthentication(authentication.TokenAuthentication):
# If the user is inactive, then return an error.
if not token.user.is_active:
raise exceptions.AuthenticationFailed('User inactive or deleted')
raise exceptions.AuthenticationFailed(_('User inactive or deleted'))
# Refresh the token.
# The token is extended from "right now" + configurable setting amount.
@ -151,7 +152,7 @@ class TaskAuthentication(authentication.BaseAuthentication):
return None
token = unified_job.task_auth_token
if auth[1] != token:
raise exceptions.AuthenticationFailed('Invalid task token')
raise exceptions.AuthenticationFailed(_('Invalid task token'))
return (None, token)
def authenticate_header(self, request):

View File

@ -15,6 +15,7 @@ from django.template.loader import render_to_string
from django.utils.encoding import smart_text
from django.utils.safestring import mark_safe
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext_lazy as _
# Django REST Framework
from rest_framework.authentication import get_authorization_header
@ -422,7 +423,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
sub_id = request.data.get('id', None)
res = None
if not sub_id:
data = dict(msg='"id" is required to disassociate')
data = dict(msg=_('"id" is required to disassociate'))
res = Response(data, status=status.HTTP_400_BAD_REQUEST)
return (sub_id, res)

View File

@ -5,6 +5,7 @@ import json
# Django
from django.conf import settings
from django.utils import six
from django.utils.translation import ugettext_lazy as _
# Django REST Framework
from rest_framework import parsers
@ -27,4 +28,4 @@ class JSONParser(parsers.JSONParser):
data = stream.read().decode(encoding)
return json.loads(data, object_pairs_hook=OrderedDict)
except ValueError as exc:
raise ParseError('JSON parse error - %s' % six.text_type(exc))
raise ParseError(_('JSON parse error - %s') % six.text_type(exc))

View File

@ -242,11 +242,11 @@ class BaseSerializer(serializers.ModelSerializer):
def get_type_choices(self):
type_name_map = {
'job': 'Playbook Run',
'ad_hoc_command': 'Command',
'project_update': 'SCM Update',
'inventory_update': 'Inventory Sync',
'system_job': 'Management Job',
'job': _('Playbook Run'),
'ad_hoc_command': _('Command'),
'project_update': _('SCM Update'),
'inventory_update': _('Inventory Sync'),
'system_job': _('Management Job'),
}
choices = []
for t in self.get_types():
@ -562,7 +562,7 @@ class UnifiedJobSerializer(BaseSerializer):
fields = ('*', 'unified_job_template', 'launch_type', 'status',
'failed', 'started', 'finished', 'elapsed', 'job_args',
'job_cwd', 'job_env', 'job_explanation', 'result_stdout',
'result_traceback')
'execution_node', 'result_traceback')
extra_kwargs = {
'unified_job_template': {
'source': 'unified_job_template_id',
@ -623,8 +623,9 @@ class UnifiedJobSerializer(BaseSerializer):
def get_result_stdout(self, obj):
obj_size = obj.result_stdout_size
if obj_size > settings.STDOUT_MAX_BYTES_DISPLAY:
return "Standard Output too large to display (%d bytes), only download supported for sizes over %d bytes" % (obj_size,
settings.STDOUT_MAX_BYTES_DISPLAY)
return _("Standard Output too large to display (%(text_size)d bytes), "
"only download supported for sizes over %(supported_size)d bytes") \
% {'text_size': obj_size, 'supported_size': settings.STDOUT_MAX_BYTES_DISPLAY}
return obj.result_stdout
@ -680,8 +681,9 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
def get_result_stdout(self, obj):
obj_size = obj.result_stdout_size
if obj_size > settings.STDOUT_MAX_BYTES_DISPLAY:
return "Standard Output too large to display (%d bytes), only download supported for sizes over %d bytes" % (obj_size,
settings.STDOUT_MAX_BYTES_DISPLAY)
return _("Standard Output too large to display (%(text_size)d bytes), "
"only download supported for sizes over %(supported_size)d bytes") \
% {'text_size': obj_size, 'supported_size': settings.STDOUT_MAX_BYTES_DISPLAY}
return obj.result_stdout
def get_types(self):
@ -720,7 +722,7 @@ class UserSerializer(BaseSerializer):
def validate_password(self, value):
if not self.instance and value in (None, ''):
raise serializers.ValidationError('Password required for new User.')
raise serializers.ValidationError(_('Password required for new User.'))
return value
def _update_password(self, obj, new_password):
@ -804,7 +806,7 @@ class UserSerializer(BaseSerializer):
ldap_managed_fields.extend(getattr(settings, 'AUTH_LDAP_USER_FLAGS_BY_GROUP', {}).keys())
if field_name in ldap_managed_fields:
if value != getattr(self.instance, field_name):
raise serializers.ValidationError('Unable to change %s on user managed by LDAP.' % field_name)
raise serializers.ValidationError(_('Unable to change %s on user managed by LDAP.') % field_name)
return value
def validate_username(self, value):
@ -914,7 +916,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
class Meta:
model = Project
fields = ('*', 'organization', 'scm_delete_on_next_update', 'scm_update_on_launch',
'scm_update_cache_timeout') + \
'scm_update_cache_timeout', 'scm_revision', 'timeout',) + \
('last_update_failed', 'last_updated') # Backwards compatibility
read_only_fields = ('scm_delete_on_next_update',)
@ -955,18 +957,21 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
view = self.context.get('view', None)
if not organization and not view.request.user.is_superuser:
# Only allow super users to create orgless projects
raise serializers.ValidationError('Organization is missing')
raise serializers.ValidationError(_('Organization is missing'))
return super(ProjectSerializer, self).validate(attrs)
class ProjectPlaybooksSerializer(ProjectSerializer):
playbooks = serializers.ReadOnlyField(help_text=_('Array of playbooks available within this project.'))
playbooks = serializers.SerializerMethodField(help_text=_('Array of playbooks available within this project.'))
class Meta:
model = Project
fields = ('playbooks',)
def get_playbooks(self, obj):
return obj.playbook_files
@property
def data(self):
ret = super(ProjectPlaybooksSerializer, self).data
@ -986,7 +991,7 @@ class ProjectUpdateSerializer(UnifiedJobSerializer, ProjectOptionsSerializer):
class Meta:
model = ProjectUpdate
fields = ('*', 'project')
fields = ('*', 'project', 'job_type')
def get_related(self, obj):
res = super(ProjectUpdateSerializer, self).get_related(obj)
@ -1140,7 +1145,7 @@ class HostSerializer(BaseSerializerWithVariables):
if port < 1 or port > 65535:
raise ValueError
except ValueError:
raise serializers.ValidationError(u'Invalid port specification: %s' % force_text(port))
raise serializers.ValidationError(_(u'Invalid port specification: %s') % force_text(port))
return name, port
def validate_name(self, value):
@ -1168,7 +1173,7 @@ class HostSerializer(BaseSerializerWithVariables):
vars_dict['ansible_ssh_port'] = port
attrs['variables'] = yaml.dump(vars_dict)
except (yaml.YAMLError, TypeError):
raise serializers.ValidationError('Must be valid JSON or YAML.')
raise serializers.ValidationError(_('Must be valid JSON or YAML.'))
return super(HostSerializer, self).validate(attrs)
@ -1225,7 +1230,7 @@ class GroupSerializer(BaseSerializerWithVariables):
def validate_name(self, value):
if value in ('all', '_meta'):
raise serializers.ValidationError('Invalid group name.')
raise serializers.ValidationError(_('Invalid group name.'))
return value
def to_representation(self, obj):
@ -1299,7 +1304,7 @@ class CustomInventoryScriptSerializer(BaseSerializer):
def validate_script(self, value):
if not value.startswith("#!"):
raise serializers.ValidationError('Script must begin with a hashbang sequence: i.e.... #!/usr/bin/env python')
raise serializers.ValidationError(_('Script must begin with a hashbang sequence: i.e.... #!/usr/bin/env python'))
return value
def to_representation(self, obj):
@ -1329,7 +1334,8 @@ class InventorySourceOptionsSerializer(BaseSerializer):
class Meta:
fields = ('*', 'source', 'source_path', 'source_script', 'source_vars', 'credential',
'source_regions', 'instance_filters', 'group_by', 'overwrite', 'overwrite_vars')
'source_regions', 'instance_filters', 'group_by', 'overwrite', 'overwrite_vars',
'timeout')
def get_related(self, obj):
res = super(InventorySourceOptionsSerializer, self).get_related(obj)
@ -1351,13 +1357,13 @@ class InventorySourceOptionsSerializer(BaseSerializer):
source_script = attrs.get('source_script', self.instance and self.instance.source_script or '')
if source == 'custom':
if source_script is None or source_script == '':
errors['source_script'] = "If 'source' is 'custom', 'source_script' must be provided."
errors['source_script'] = _("If 'source' is 'custom', 'source_script' must be provided.")
else:
try:
if source_script.organization != self.instance.inventory.organization:
errors['source_script'] = "The 'source_script' does not belong to the same organization as the inventory."
errors['source_script'] = _("The 'source_script' does not belong to the same organization as the inventory.")
except Exception as exc:
errors['source_script'] = "'source_script' doesn't exist."
errors['source_script'] = _("'source_script' doesn't exist.")
logger.error(str(exc))
if errors:
@ -1743,7 +1749,7 @@ class CredentialSerializerCreate(CredentialSerializer):
else:
attrs.pop(field)
if not owner_fields:
raise serializers.ValidationError({"detail": "Missing 'user', 'team', or 'organization'."})
raise serializers.ValidationError({"detail": _("Missing 'user', 'team', or 'organization'.")})
return super(CredentialSerializerCreate, self).validate(attrs)
def create(self, validated_data):
@ -1756,7 +1762,7 @@ class CredentialSerializerCreate(CredentialSerializer):
credential.admin_role.members.add(user)
if team:
if not credential.organization or team.organization.id != credential.organization.id:
raise serializers.ValidationError({"detail": "Credential organization must be set and match before assigning to a team"})
raise serializers.ValidationError({"detail": _("Credential organization must be set and match before assigning to a team")})
credential.admin_role.parents.add(team.admin_role)
credential.use_role.parents.add(team.member_role)
return credential
@ -1783,13 +1789,23 @@ class OrganizationCredentialSerializerCreate(CredentialSerializerCreate):
fields = ('*', '-user', '-team')
class JobOptionsSerializer(BaseSerializer):
class LabelsListMixin(object):
def _summary_field_labels(self, obj):
return {'count': obj.labels.count(), 'results': [{'id': x.id, 'name': x.name} for x in obj.labels.all().order_by('name')[:10]]}
def get_summary_fields(self, obj):
res = super(LabelsListMixin, self).get_summary_fields(obj)
res['labels'] = self._summary_field_labels(obj)
return res
class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
class Meta:
fields = ('*', 'job_type', 'inventory', 'project', 'playbook',
'credential', 'cloud_credential', 'network_credential', 'forks', 'limit',
'verbosity', 'extra_vars', 'job_tags', 'force_handlers',
'skip_tags', 'start_at_task',)
'skip_tags', 'start_at_task', 'timeout')
def get_related(self, obj):
res = super(JobOptionsSerializer, self).get_related(obj)
@ -1808,14 +1824,6 @@ class JobOptionsSerializer(BaseSerializer):
args=(obj.network_credential.pk,))
return res
def _summary_field_labels(self, obj):
return {'count': obj.labels.count(), 'results': [{'id': x.id, 'name': x.name} for x in obj.labels.all().order_by('name')[:10]]}
def get_summary_fields(self, obj):
res = super(JobOptionsSerializer, self).get_summary_fields(obj)
res['labels'] = self._summary_field_labels(obj)
return res
def to_representation(self, obj):
ret = super(JobOptionsSerializer, self).to_representation(obj)
if obj is None:
@ -1840,11 +1848,11 @@ class JobOptionsSerializer(BaseSerializer):
playbook = attrs.get('playbook', self.instance and self.instance.playbook or '')
job_type = attrs.get('job_type', self.instance and self.instance.job_type or None)
if not project and job_type != PERM_INVENTORY_SCAN:
raise serializers.ValidationError({'project': 'This field is required.'})
raise serializers.ValidationError({'project': _('This field is required.')})
if project and playbook and force_text(playbook) not in project.playbooks:
raise serializers.ValidationError({'playbook': 'Playbook not found for project.'})
raise serializers.ValidationError({'playbook': _('Playbook not found for project.')})
if project and not playbook:
raise serializers.ValidationError({'playbook': 'Must select playbook for project.'})
raise serializers.ValidationError({'playbook': _('Must select playbook for project.')})
return super(JobOptionsSerializer, self).validate(attrs)
@ -1897,12 +1905,12 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer):
if job_type == "scan":
if inventory is None or attrs.get('ask_inventory_on_launch', False):
raise serializers.ValidationError({'inventory': 'Scan jobs must be assigned a fixed inventory.'})
raise serializers.ValidationError({'inventory': _('Scan jobs must be assigned a fixed inventory.')})
elif project is None:
raise serializers.ValidationError({'project': "Job types 'run' and 'check' must have assigned a project."})
raise serializers.ValidationError({'project': _("Job types 'run' and 'check' must have assigned a project.")})
if survey_enabled and job_type == PERM_INVENTORY_SCAN:
raise serializers.ValidationError({'survey_enabled': 'Survey Enabled can not be used with scan jobs.'})
raise serializers.ValidationError({'survey_enabled': _('Survey Enabled can not be used with scan jobs.')})
return super(JobTemplateSerializer, self).validate(attrs)
@ -1927,7 +1935,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
fields = ('*', 'job_template', 'passwords_needed_to_start', 'ask_variables_on_launch',
'ask_limit_on_launch', 'ask_tags_on_launch', 'ask_skip_tags_on_launch',
'ask_job_type_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch',
'allow_simultaneous', 'artifacts',)
'allow_simultaneous', 'artifacts', 'scm_revision',)
def get_related(self, obj):
res = super(JobSerializer, self).get_related(obj)
@ -1962,7 +1970,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
try:
job_template = JobTemplate.objects.get(pk=data['job_template'])
except JobTemplate.DoesNotExist:
raise serializers.ValidationError({'job_template': 'Invalid job template.'})
raise serializers.ValidationError({'job_template': _('Invalid job template.')})
data.setdefault('name', job_template.name)
data.setdefault('description', job_template.description)
data.setdefault('job_type', job_template.job_type)
@ -2047,11 +2055,11 @@ class JobRelaunchSerializer(JobSerializer):
def validate(self, attrs):
obj = self.context.get('obj')
if not obj.credential:
raise serializers.ValidationError(dict(credential=["Credential not found or deleted."]))
raise serializers.ValidationError(dict(credential=[_("Credential not found or deleted.")]))
if obj.job_type != PERM_INVENTORY_SCAN and obj.project is None:
raise serializers.ValidationError(dict(errors=["Job Template Project is missing or undefined."]))
raise serializers.ValidationError(dict(errors=[_("Job Template Project is missing or undefined.")]))
if obj.inventory is None:
raise serializers.ValidationError(dict(errors=["Job Template Inventory is missing or undefined."]))
raise serializers.ValidationError(dict(errors=[_("Job Template Inventory is missing or undefined.")]))
attrs = super(JobRelaunchSerializer, self).validate(attrs)
return attrs
@ -2178,7 +2186,7 @@ class SystemJobCancelSerializer(SystemJobSerializer):
class Meta:
fields = ('can_cancel',)
class WorkflowJobTemplateSerializer(UnifiedJobTemplateSerializer):
class WorkflowJobTemplateSerializer(LabelsListMixin, UnifiedJobTemplateSerializer):
show_capabilities = ['start', 'edit', 'delete']
class Meta:
@ -2192,6 +2200,7 @@ class WorkflowJobTemplateSerializer(UnifiedJobTemplateSerializer):
#schedules = reverse('api:workflow_job_template_schedules_list', args=(obj.pk,)),
launch = reverse('api:workflow_job_template_launch', args=(obj.pk,)),
workflow_nodes = reverse('api:workflow_job_template_workflow_nodes_list', args=(obj.pk,)),
labels = reverse('api:workflow_job_template_label_list', args=(obj.pk,)),
# TODO: Implement notifications
#notification_templates_any = reverse('api:system_job_template_notification_templates_any_list', args=(obj.pk,)),
#notification_templates_success = reverse('api:system_job_template_notification_templates_success_list', args=(obj.pk,)),
@ -2208,7 +2217,7 @@ class WorkflowJobTemplateListSerializer(WorkflowJobTemplateSerializer):
pass
# TODO:
class WorkflowJobSerializer(UnifiedJobSerializer):
class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
class Meta:
model = WorkflowJob
@ -2222,6 +2231,7 @@ class WorkflowJobSerializer(UnifiedJobSerializer):
# TODO:
#res['notifications'] = reverse('api:system_job_notifications_list', args=(obj.pk,))
res['workflow_nodes'] = reverse('api:workflow_job_workflow_nodes_list', args=(obj.pk,))
res['labels'] = reverse('api:workflow_job_label_list', args=(obj.pk,))
# TODO: Cancel job
'''
if obj.can_cancel or True:
@ -2312,12 +2322,12 @@ class WorkflowJobTemplateNodeSerializer(WorkflowNodeBaseSerializer):
job_types = [t for t, v in JOB_TYPE_CHOICES]
if attrs['char_prompts']['job_type'] not in job_types:
raise serializers.ValidationError({
"job_type": "%s is not a valid job type. The choices are %s." % (
attrs['char_prompts']['job_type'], job_types)})
"job_type": _("%(job_type)s is not a valid job type. The choices are %(choices)s.") % {
'job_type': attrs['char_prompts']['job_type'], 'choices': job_types}})
ujt_obj = attrs.get('unified_job_template', None)
if isinstance(ujt_obj, (WorkflowJobTemplate, SystemJobTemplate)):
raise serializers.ValidationError({
"unified_job_template": "Can not nest a %s inside a WorkflowJobTemplate" % ujt_obj.__class__.__name__})
"unified_job_template": _("Can not nest a %s inside a WorkflowJobTemplate") % ujt_obj.__class__.__name__})
return super(WorkflowJobTemplateNodeSerializer, self).validate(attrs)
class WorkflowJobNodeSerializer(WorkflowNodeBaseSerializer):
@ -2531,7 +2541,7 @@ class JobLaunchSerializer(BaseSerializer):
for field in obj.resources_needed_to_start:
if not (attrs.get(field, False) and obj._ask_for_vars_dict().get(field, False)):
errors[field] = "Job Template '%s' is missing or undefined." % field
errors[field] = _("Job Template '%s' is missing or undefined.") % field
if (not obj.ask_credential_on_launch) or (not attrs.get('credential', None)):
credential = obj.credential
@ -2557,7 +2567,7 @@ class JobLaunchSerializer(BaseSerializer):
extra_vars = yaml.safe_load(extra_vars)
assert isinstance(extra_vars, dict)
except (yaml.YAMLError, TypeError, AttributeError, AssertionError):
errors['extra_vars'] = 'Must be a valid JSON or YAML dictionary.'
errors['extra_vars'] = _('Must be a valid JSON or YAML dictionary.')
if not isinstance(extra_vars, dict):
extra_vars = {}
@ -2641,7 +2651,7 @@ class NotificationTemplateSerializer(BaseSerializer):
else:
notification_type = None
if not notification_type:
raise serializers.ValidationError('Missing required fields for Notification Configuration: notification_type')
raise serializers.ValidationError(_('Missing required fields for Notification Configuration: notification_type'))
notification_class = NotificationTemplate.CLASS_FOR_NOTIFICATION_TYPE[notification_type]
missing_fields = []
@ -2664,16 +2674,16 @@ class NotificationTemplateSerializer(BaseSerializer):
incorrect_type_fields.append((field, field_type))
continue
if field_type == "list" and len(field_val) < 1:
error_list.append("No values specified for field '{}'".format(field))
error_list.append(_("No values specified for field '{}'").format(field))
continue
if field_type == "password" and field_val == "$encrypted$" and object_actual is not None:
attrs['notification_configuration'][field] = object_actual.notification_configuration[field]
if missing_fields:
error_list.append("Missing required fields for Notification Configuration: {}.".format(missing_fields))
error_list.append(_("Missing required fields for Notification Configuration: {}.").format(missing_fields))
if incorrect_type_fields:
for type_field_error in incorrect_type_fields:
error_list.append("Configuration field '{}' incorrect type, expected {}.".format(type_field_error[0],
type_field_error[1]))
error_list.append(_("Configuration field '{}' incorrect type, expected {}.").format(type_field_error[0],
type_field_error[1]))
if error_list:
raise serializers.ValidationError(error_list)
return attrs
@ -2722,7 +2732,7 @@ class ScheduleSerializer(BaseSerializer):
def validate_unified_job_template(self, value):
if type(value) == InventorySource and value.source not in SCHEDULEABLE_PROVIDERS:
raise serializers.ValidationError('Inventory Source must be a cloud resource.')
raise serializers.ValidationError(_('Inventory Source must be a cloud resource.'))
return value
# We reject rrules if:
@ -2744,37 +2754,37 @@ class ScheduleSerializer(BaseSerializer):
match_multiple_dtstart = re.findall(".*?(DTSTART\:[0-9]+T[0-9]+Z)", rrule_value)
match_multiple_rrule = re.findall(".*?(RRULE\:)", rrule_value)
if not len(match_multiple_dtstart):
raise serializers.ValidationError('DTSTART required in rrule. Value should match: DTSTART:YYYYMMDDTHHMMSSZ')
raise serializers.ValidationError(_('DTSTART required in rrule. Value should match: DTSTART:YYYYMMDDTHHMMSSZ'))
if len(match_multiple_dtstart) > 1:
raise serializers.ValidationError('Multiple DTSTART is not supported.')
raise serializers.ValidationError(_('Multiple DTSTART is not supported.'))
if not len(match_multiple_rrule):
raise serializers.ValidationError('RRULE require in rrule.')
raise serializers.ValidationError(_('RRULE require in rrule.'))
if len(match_multiple_rrule) > 1:
raise serializers.ValidationError('Multiple RRULE is not supported.')
raise serializers.ValidationError(_('Multiple RRULE is not supported.'))
if 'interval' not in rrule_value.lower():
raise serializers.ValidationError('INTERVAL required in rrule.')
raise serializers.ValidationError(_('INTERVAL required in rrule.'))
if 'tzid' in rrule_value.lower():
raise serializers.ValidationError('TZID is not supported.')
raise serializers.ValidationError(_('TZID is not supported.'))
if 'secondly' in rrule_value.lower():
raise serializers.ValidationError('SECONDLY is not supported.')
raise serializers.ValidationError(_('SECONDLY is not supported.'))
if re.match(multi_by_month_day, rrule_value):
raise serializers.ValidationError('Multiple BYMONTHDAYs not supported.')
raise serializers.ValidationError(_('Multiple BYMONTHDAYs not supported.'))
if re.match(multi_by_month, rrule_value):
raise serializers.ValidationError('Multiple BYMONTHs not supported.')
raise serializers.ValidationError(_('Multiple BYMONTHs not supported.'))
if re.match(by_day_with_numeric_prefix, rrule_value):
raise serializers.ValidationError("BYDAY with numeric prefix not supported.")
raise serializers.ValidationError(_("BYDAY with numeric prefix not supported."))
if 'byyearday' in rrule_value.lower():
raise serializers.ValidationError("BYYEARDAY not supported.")
raise serializers.ValidationError(_("BYYEARDAY not supported."))
if 'byweekno' in rrule_value.lower():
raise serializers.ValidationError("BYWEEKNO not supported.")
raise serializers.ValidationError(_("BYWEEKNO not supported."))
if match_count:
count_val = match_count.groups()[0].strip().split("=")
if int(count_val[1]) > 999:
raise serializers.ValidationError("COUNT > 999 is unsupported.")
raise serializers.ValidationError(_("COUNT > 999 is unsupported."))
try:
rrule.rrulestr(rrule_value)
except Exception:
raise serializers.ValidationError("rrule parsing failed validation.")
raise serializers.ValidationError(_("rrule parsing failed validation."))
return value
class ActivityStreamSerializer(BaseSerializer):
@ -2899,9 +2909,9 @@ class AuthTokenSerializer(serializers.Serializer):
attrs['user'] = user
return attrs
else:
raise serializers.ValidationError('Unable to login with provided credentials.')
raise serializers.ValidationError(_('Unable to login with provided credentials.'))
else:
raise serializers.ValidationError('Must include "username" and "password".')
raise serializers.ValidationError(_('Must include "username" and "password".'))
class FactVersionSerializer(BaseFactSerializer):

View File

@ -263,6 +263,7 @@ workflow_job_template_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/jobs/$', 'workflow_job_template_jobs_list'),
url(r'^(?P<pk>[0-9]+)/launch/$', 'workflow_job_template_launch'),
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', 'workflow_job_template_workflow_nodes_list'),
url(r'^(?P<pk>[0-9]+)/labels/$', 'workflow_job_template_label_list'),
# url(r'^(?P<pk>[0-9]+)/cancel/$', 'workflow_job_template_cancel'),
)
@ -270,6 +271,7 @@ workflow_job_urls = patterns('awx.api.views',
url(r'^$', 'workflow_job_list'),
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_detail'),
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', 'workflow_job_workflow_nodes_list'),
url(r'^(?P<pk>[0-9]+)/labels/$', 'workflow_job_label_list'),
# url(r'^(?P<pk>[0-9]+)/cancel/$', 'workflow_job_cancel'),
#url(r'^(?P<pk>[0-9]+)/notifications/$', 'workflow_job_notifications_list'),
)

View File

@ -233,29 +233,29 @@ class ApiV1ConfigView(APIView):
if not request.user.is_superuser:
return Response(None, status=status.HTTP_404_NOT_FOUND)
if not isinstance(request.data, dict):
return Response({"error": "Invalid license data"}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": _("Invalid license data")}, status=status.HTTP_400_BAD_REQUEST)
if "eula_accepted" not in request.data:
return Response({"error": "Missing 'eula_accepted' property"}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": _("Missing 'eula_accepted' property")}, status=status.HTTP_400_BAD_REQUEST)
try:
eula_accepted = to_python_boolean(request.data["eula_accepted"])
except ValueError:
return Response({"error": "'eula_accepted' value is invalid"}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": _("'eula_accepted' value is invalid")}, status=status.HTTP_400_BAD_REQUEST)
if not eula_accepted:
return Response({"error": "'eula_accepted' must be True"}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": _("'eula_accepted' must be True")}, status=status.HTTP_400_BAD_REQUEST)
request.data.pop("eula_accepted")
try:
data_actual = json.dumps(request.data)
except Exception:
# FIX: Log
return Response({"error": "Invalid JSON"}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": _("Invalid JSON")}, status=status.HTTP_400_BAD_REQUEST)
try:
from awx.main.task_engine import TaskEnhancer
license_data = json.loads(data_actual)
license_data_validated = TaskEnhancer(**license_data).validate_enhancements()
except Exception:
# FIX: Log
return Response({"error": "Invalid License"}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": _("Invalid License")}, status=status.HTTP_400_BAD_REQUEST)
# If the license is valid, write it to the database.
if license_data_validated['valid_key']:
@ -263,7 +263,7 @@ class ApiV1ConfigView(APIView):
settings.TOWER_URL_BASE = "{}://{}".format(request.scheme, request.get_host())
return Response(license_data_validated)
return Response({"error": "Invalid license"}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": _("Invalid license")}, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request):
if not request.user.is_superuser:
@ -274,7 +274,7 @@ class ApiV1ConfigView(APIView):
return Response(status=status.HTTP_204_NO_CONTENT)
except:
# FIX: Log
return Response({"error": "Failed to remove license (%s)" % has_error}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": _("Failed to remove license (%s)") % has_error}, status=status.HTTP_400_BAD_REQUEST)
class DashboardView(APIView):
@ -420,7 +420,7 @@ class DashboardJobsGraphView(APIView):
end_date = start_date - dateutil.relativedelta.relativedelta(days=1)
interval = 'hours'
else:
return Response({'error': 'Unknown period "%s"' % str(period)}, status=status.HTTP_400_BAD_REQUEST)
return Response({'error': _('Unknown period "%s"') % str(period)}, status=status.HTTP_400_BAD_REQUEST)
dashboard_data = {"jobs": {"successful": [], "failed": []}}
for element in success_qss.time_series(end_date, start_date, interval=interval):
@ -653,8 +653,8 @@ class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
# if no organizations exist in the system.
if (not feature_enabled('multiple_organizations') and
self.model.objects.exists()):
raise LicenseForbids('Your Tower license only permits a single '
'organization to exist.')
raise LicenseForbids(_('Your Tower license only permits a single '
'organization to exist.'))
# Okay, create the organization as usual.
return super(OrganizationList, self).create(request, *args, **kwargs)
@ -764,8 +764,8 @@ class OrganizationActivityStreamList(SubListAPIView):
# Sanity check: Does this license allow activity streams?
# If not, forbid this request.
if not feature_enabled('activity_streams'):
raise LicenseForbids('Your license does not allow use of '
'the activity stream.')
raise LicenseForbids(_('Your license does not allow use of '
'the activity stream.'))
# Okay, let it through.
return super(OrganizationActivityStreamList, self).get(request, *args, **kwargs)
@ -858,20 +858,20 @@ class TeamRolesList(SubListCreateAttachDetachAPIView):
# Forbid implicit role creation here
sub_id = request.data.get('id', None)
if not sub_id:
data = dict(msg="Role 'id' field is missing.")
data = dict(msg=_("Role 'id' field is missing."))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
role = get_object_or_400(Role, pk=sub_id)
org_content_type = ContentType.objects.get_for_model(Organization)
if role.content_type == org_content_type:
data = dict(msg="You cannot assign an Organization role as a child role for a Team.")
data = dict(msg=_("You cannot assign an Organization role as a child role for a Team."))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
team = get_object_or_404(Team, pk=self.kwargs['pk'])
credential_content_type = ContentType.objects.get_for_model(Credential)
if role.content_type == credential_content_type:
if not role.content_object.organization or role.content_object.organization.id != team.organization.id:
data = dict(msg="You cannot grant credential access to a team when the Organization field isn't set, or belongs to a different organization")
data = dict(msg=_("You cannot grant credential access to a team when the Organization field isn't set, or belongs to a different organization"))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
return super(TeamRolesList, self).post(request, *args, **kwargs)
@ -917,8 +917,8 @@ class TeamActivityStreamList(SubListAPIView):
# Sanity check: Does this license allow activity streams?
# If not, forbid this request.
if not feature_enabled('activity_streams'):
raise LicenseForbids('Your license does not allow use of '
'the activity stream.')
raise LicenseForbids(_('Your license does not allow use of '
'the activity stream.'))
# Okay, let it through.
return super(TeamActivityStreamList, self).get(request, *args, **kwargs)
@ -955,15 +955,6 @@ class ProjectList(ListCreateAPIView):
)
return projects_qs
def get(self, request, *args, **kwargs):
# Not optimal, but make sure the project status and last_updated fields
# are up to date here...
projects_qs = Project.objects
projects_qs = projects_qs.select_related('current_job', 'last_job')
for project in projects_qs:
project._set_status_and_last_job_run()
return super(ProjectList, self).get(request, *args, **kwargs)
class ProjectDetail(RetrieveUpdateDestroyAPIView):
model = Project
@ -973,7 +964,7 @@ class ProjectDetail(RetrieveUpdateDestroyAPIView):
obj = self.get_object()
can_delete = request.user.can_access(Project, 'delete', obj)
if not can_delete:
raise PermissionDenied("Cannot delete project.")
raise PermissionDenied(_("Cannot delete project."))
for pu in obj.project_updates.filter(status__in=['new', 'pending', 'waiting', 'running']):
pu.cancel()
return super(ProjectDetail, self).destroy(request, *args, **kwargs)
@ -1020,8 +1011,8 @@ class ProjectActivityStreamList(SubListAPIView):
# Sanity check: Does this license allow activity streams?
# If not, forbid this request.
if not feature_enabled('activity_streams'):
raise LicenseForbids('Your license does not allow use of '
'the activity stream.')
raise LicenseForbids(_('Your license does not allow use of '
'the activity stream.'))
# Okay, let it through.
return super(ProjectActivityStreamList, self).get(request, *args, **kwargs)
@ -1201,26 +1192,26 @@ class UserRolesList(SubListCreateAttachDetachAPIView):
# Forbid implicit role creation here
sub_id = request.data.get('id', None)
if not sub_id:
data = dict(msg="Role 'id' field is missing.")
data = dict(msg=_("Role 'id' field is missing."))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
if sub_id == self.request.user.admin_role.pk:
raise PermissionDenied('You may not perform any action with your own admin_role.')
raise PermissionDenied(_('You may not perform any action with your own admin_role.'))
user = get_object_or_400(User, pk=self.kwargs['pk'])
role = get_object_or_400(Role, pk=sub_id)
user_content_type = ContentType.objects.get_for_model(User)
if role.content_type == user_content_type:
raise PermissionDenied('You may not change the membership of a users admin_role')
raise PermissionDenied(_('You may not change the membership of a users admin_role'))
credential_content_type = ContentType.objects.get_for_model(Credential)
if role.content_type == credential_content_type:
if role.content_object.organization and user not in role.content_object.organization.member_role:
data = dict(msg="You cannot grant credential access to a user not in the credentials' organization")
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
if not role.content_object.organization and not request.user.is_superuser:
data = dict(msg="You cannot grant private credential access to another user")
data = dict(msg=_("You cannot grant private credential access to another user"))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
@ -1283,8 +1274,8 @@ class UserActivityStreamList(SubListAPIView):
# Sanity check: Does this license allow activity streams?
# If not, forbid this request.
if not feature_enabled('activity_streams'):
raise LicenseForbids('Your license does not allow use of '
'the activity stream.')
raise LicenseForbids(_('Your license does not allow use of '
'the activity stream.'))
# Okay, let it through.
return super(UserActivityStreamList, self).get(request, *args, **kwargs)
@ -1324,13 +1315,13 @@ class UserDetail(RetrieveUpdateDestroyAPIView):
if left is not None and right is not None and left != right:
bad_changes[field] = (left, right)
if bad_changes:
raise PermissionDenied('Cannot change %s.' % ', '.join(bad_changes.keys()))
raise PermissionDenied(_('Cannot change %s.') % ', '.join(bad_changes.keys()))
def destroy(self, request, *args, **kwargs):
obj = self.get_object()
can_delete = request.user.can_access(User, 'delete', obj)
if not can_delete:
raise PermissionDenied('Cannot delete user.')
raise PermissionDenied(_('Cannot delete user.'))
return super(UserDetail, self).destroy(request, *args, **kwargs)
class UserAccessList(ResourceAccessList):
@ -1442,8 +1433,8 @@ class CredentialActivityStreamList(SubListAPIView):
# Sanity check: Does this license allow activity streams?
# If not, forbid this request.
if not feature_enabled('activity_streams'):
raise LicenseForbids('Your license does not allow use of '
'the activity stream.')
raise LicenseForbids(_('Your license does not allow use of '
'the activity stream.'))
# Okay, let it through.
return super(CredentialActivityStreamList, self).get(request, *args, **kwargs)
@ -1480,7 +1471,7 @@ class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
instance = self.get_object()
can_delete = request.user.can_access(self.model, 'delete', instance)
if not can_delete:
raise PermissionDenied("Cannot delete inventory script.")
raise PermissionDenied(_("Cannot delete inventory script."))
for inv_src in InventorySource.objects.filter(source_script=instance):
inv_src.source_script = None
inv_src.save()
@ -1531,8 +1522,8 @@ class InventoryActivityStreamList(SubListAPIView):
# Sanity check: Does this license allow activity streams?
# If not, forbid this request.
if not feature_enabled('activity_streams'):
raise LicenseForbids('Your license does not allow use of '
'the activity stream.')
raise LicenseForbids(_('Your license does not allow use of '
'the activity stream.'))
# Okay, let it through.
return super(InventoryActivityStreamList, self).get(request, *args, **kwargs)
@ -1664,8 +1655,8 @@ class HostActivityStreamList(SubListAPIView):
# Sanity check: Does this license allow activity streams?
# If not, forbid this request.
if not feature_enabled('activity_streams'):
raise LicenseForbids('Your license does not allow use of '
'the activity stream.')
raise LicenseForbids(_('Your license does not allow use of '
'the activity stream.'))
# Okay, let it through.
return super(HostActivityStreamList, self).get(request, *args, **kwargs)
@ -1682,8 +1673,8 @@ class SystemTrackingEnforcementMixin(APIView):
'''
def check_permissions(self, request):
if not feature_enabled("system_tracking"):
raise LicenseForbids("Your license does not permit use "
"of system tracking.")
raise LicenseForbids(_("Your license does not permit use "
"of system tracking."))
return super(SystemTrackingEnforcementMixin, self).check_permissions(request)
class HostFactVersionsList(ListAPIView, ParentMixin, SystemTrackingEnforcementMixin):
@ -1727,7 +1718,7 @@ class HostFactCompareView(SubDetailAPIView, SystemTrackingEnforcementMixin):
fact_entry = Fact.get_host_fact(host_obj.id, module_spec, datetime_actual)
if not fact_entry:
return Response({'detail': 'Fact not found.'}, status=status.HTTP_404_NOT_FOUND)
return Response({'detail': _('Fact not found.')}, status=status.HTTP_404_NOT_FOUND)
return Response(self.serializer_class(instance=fact_entry).data)
class GroupList(ListCreateAPIView):
@ -1849,8 +1840,8 @@ class GroupActivityStreamList(SubListAPIView):
# Sanity check: Does this license allow activity streams?
# If not, forbid this request.
if not feature_enabled('activity_streams'):
raise LicenseForbids('Your license does not allow use of '
'the activity stream.')
raise LicenseForbids(_('Your license does not allow use of '
'the activity stream.'))
# Okay, let it through.
return super(GroupActivityStreamList, self).get(request, *args, **kwargs)
@ -2065,7 +2056,7 @@ class InventorySourceDetail(RetrieveUpdateAPIView):
obj = self.get_object()
can_delete = request.user.can_access(InventorySource, 'delete', obj)
if not can_delete:
raise PermissionDenied("Cannot delete inventory source.")
raise PermissionDenied(_("Cannot delete inventory source."))
for pu in obj.inventory_updates.filter(status__in=['new', 'pending', 'waiting', 'running']):
pu.cancel()
return super(InventorySourceDetail, self).destroy(request, *args, **kwargs)
@ -2093,8 +2084,8 @@ class InventorySourceActivityStreamList(SubListAPIView):
# Sanity check: Does this license allow activity streams?
# If not, forbid this request.
if not feature_enabled('activity_streams'):
raise LicenseForbids('Your license does not allow use of '
'the activity stream.')
raise LicenseForbids(_('Your license does not allow use of '
'the activity stream.'))
# Okay, let it through.
return super(InventorySourceActivityStreamList, self).get(request, *args, **kwargs)
@ -2109,7 +2100,7 @@ class InventorySourceNotificationTemplatesAnyList(SubListCreateAttachDetachAPIVi
def post(self, request, *args, **kwargs):
parent = self.get_parent_object()
if parent.source not in CLOUD_INVENTORY_SOURCES:
return Response(dict(msg="Notification Templates can only be assigned when source is one of {}."
return Response(dict(msg=_("Notification Templates can only be assigned when source is one of {}.")
.format(CLOUD_INVENTORY_SOURCES, parent.source)),
status=status.HTTP_400_BAD_REQUEST)
return super(InventorySourceNotificationTemplatesAnyList, self).post(request, *args, **kwargs)
@ -2311,8 +2302,8 @@ class JobTemplateSurveySpec(GenericAPIView):
def get(self, request, *args, **kwargs):
obj = self.get_object()
if not feature_enabled('surveys'):
raise LicenseForbids('Your license does not allow '
'adding surveys.')
raise LicenseForbids(_('Your license does not allow '
'adding surveys.'))
return Response(obj.survey_spec)
def post(self, request, *args, **kwargs):
@ -2321,42 +2312,43 @@ class JobTemplateSurveySpec(GenericAPIView):
# Sanity check: Are surveys available on this license?
# If not, do not allow them to be used.
if not feature_enabled('surveys'):
raise LicenseForbids('Your license does not allow '
'adding surveys.')
raise LicenseForbids(_('Your license does not allow '
'adding surveys.'))
if not request.user.can_access(self.model, 'change', obj, None):
raise PermissionDenied()
try:
obj.survey_spec = json.dumps(request.data)
except ValueError:
return Response(dict(error="Invalid JSON when parsing survey spec."), status=status.HTTP_400_BAD_REQUEST)
return Response(dict(error=_("Invalid JSON when parsing survey spec.")), status=status.HTTP_400_BAD_REQUEST)
if "name" not in obj.survey_spec:
return Response(dict(error="'name' missing from survey spec."), status=status.HTTP_400_BAD_REQUEST)
return Response(dict(error=_("'name' missing from survey spec.")), status=status.HTTP_400_BAD_REQUEST)
if "description" not in obj.survey_spec:
return Response(dict(error="'description' missing from survey spec."), status=status.HTTP_400_BAD_REQUEST)
return Response(dict(error=_("'description' missing from survey spec.")), status=status.HTTP_400_BAD_REQUEST)
if "spec" not in obj.survey_spec:
return Response(dict(error="'spec' missing from survey spec."), status=status.HTTP_400_BAD_REQUEST)
return Response(dict(error=_("'spec' missing from survey spec.")), status=status.HTTP_400_BAD_REQUEST)
if not isinstance(obj.survey_spec["spec"], list):
return Response(dict(error="'spec' must be a list of items."), status=status.HTTP_400_BAD_REQUEST)
return Response(dict(error=_("'spec' must be a list of items.")), status=status.HTTP_400_BAD_REQUEST)
if len(obj.survey_spec["spec"]) < 1:
return Response(dict(error="'spec' doesn't contain any items."), status=status.HTTP_400_BAD_REQUEST)
return Response(dict(error=_("'spec' doesn't contain any items.")), status=status.HTTP_400_BAD_REQUEST)
idx = 0
variable_set = set()
for survey_item in obj.survey_spec["spec"]:
if not isinstance(survey_item, dict):
return Response(dict(error="Survey question %s is not a json object." % str(idx)), status=status.HTTP_400_BAD_REQUEST)
return Response(dict(error=_("Survey question %s is not a json object.") % str(idx)), status=status.HTTP_400_BAD_REQUEST)
if "type" not in survey_item:
return Response(dict(error="'type' missing from survey question %s." % str(idx)), status=status.HTTP_400_BAD_REQUEST)
return Response(dict(error=_("'type' missing from survey question %s.") % str(idx)), status=status.HTTP_400_BAD_REQUEST)
if "question_name" not in survey_item:
return Response(dict(error="'question_name' missing from survey question %s." % str(idx)), status=status.HTTP_400_BAD_REQUEST)
return Response(dict(error=_("'question_name' missing from survey question %s.") % str(idx)), status=status.HTTP_400_BAD_REQUEST)
if "variable" not in survey_item:
return Response(dict(error="'variable' missing from survey question %s." % str(idx)), status=status.HTTP_400_BAD_REQUEST)
return Response(dict(error=_("'variable' missing from survey question %s.") % str(idx)), status=status.HTTP_400_BAD_REQUEST)
if survey_item['variable'] in variable_set:
return Response(dict(error="'variable' '%s' duplicated in survey question %s." % (survey_item['variable'], str(idx))), status=status.HTTP_400_BAD_REQUEST)
return Response(dict(error=_("'variable' '%(item)s' duplicated in survey question %(survey)s.") %
{'item': survey_item['variable'], 'survey': str(idx)}), status=status.HTTP_400_BAD_REQUEST)
else:
variable_set.add(survey_item['variable'])
if "required" not in survey_item:
return Response(dict(error="'required' missing from survey question %s." % str(idx)), status=status.HTTP_400_BAD_REQUEST)
return Response(dict(error=_("'required' missing from survey question %s.") % str(idx)), status=status.HTTP_400_BAD_REQUEST)
idx += 1
obj.save()
return Response()
@ -2381,8 +2373,8 @@ class JobTemplateActivityStreamList(SubListAPIView):
# Sanity check: Does this license allow activity streams?
# If not, forbid this request.
if not feature_enabled('activity_streams'):
raise LicenseForbids('Your license does not allow use of '
'the activity stream.')
raise LicenseForbids(_('Your license does not allow use of '
'the activity stream.'))
# Okay, let it through.
return super(JobTemplateActivityStreamList, self).get(request, *args, **kwargs)
@ -2549,22 +2541,22 @@ class JobTemplateCallback(GenericAPIView):
matching_hosts = self.find_matching_hosts()
# Check matching hosts.
if not matching_hosts:
data = dict(msg='No matching host could be found!')
data = dict(msg=_('No matching host could be found!'))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
elif len(matching_hosts) > 1:
data = dict(msg='Multiple hosts matched the request!')
data = dict(msg=_('Multiple hosts matched the request!'))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
else:
host = list(matching_hosts)[0]
if not job_template.can_start_without_user_input():
data = dict(msg='Cannot start automatically, user input required!')
data = dict(msg=_('Cannot start automatically, user input required!'))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
limit = host.name
# NOTE: We limit this to one job waiting per host per callblack to keep them from stacking crazily
if Job.objects.filter(status__in=['pending', 'waiting', 'running'], job_template=job_template,
limit=limit).count() > 0:
data = dict(msg='Host callback job already pending.')
data = dict(msg=_('Host callback job already pending.'))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
# Everything is fine; actually create the job.
@ -2577,7 +2569,7 @@ class JobTemplateCallback(GenericAPIView):
kv['extra_vars'] = extra_vars
result = job.signal_start(**kv)
if not result:
data = dict(msg='Error starting job!')
data = dict(msg=_('Error starting job!'))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
# Return the location of the new job.
@ -2678,13 +2670,9 @@ class WorkflowJobNodeChildrenBaseList(SubListAPIView):
model = WorkflowJobNode
serializer_class = WorkflowJobNodeListSerializer
always_allow_superuser = True # TODO: RBAC
parent_model = Job
parent_model = WorkflowJobNode
relationship = ''
'''
enforce_parent_relationship = 'workflow_job_template'
new_in_310 = True
'''
#
#Limit the set of WorkflowJobeNodes to the related nodes of specified by
@ -2729,6 +2717,11 @@ class WorkflowJobTemplateDetail(RetrieveUpdateDestroyAPIView):
serializer_class = WorkflowJobTemplateSerializer
always_allow_superuser = False
class WorkflowJobTemplateLabelList(JobTemplateLabelList):
parent_model = WorkflowJobTemplate
# TODO:
class WorkflowJobTemplateLaunch(GenericAPIView):
@ -2797,7 +2790,7 @@ class SystemJobTemplateList(ListAPIView):
def get(self, request, *args, **kwargs):
if not request.user.is_superuser and not request.user.is_system_auditor:
raise PermissionDenied("Superuser privileges needed.")
raise PermissionDenied(_("Superuser privileges needed."))
return super(SystemJobTemplateList, self).get(request, *args, **kwargs)
class SystemJobTemplateDetail(RetrieveAPIView):
@ -2886,6 +2879,9 @@ class JobLabelList(SubListAPIView):
relationship = 'labels'
parent_key = 'job'
class WorkflowJobLabelList(JobLabelList):
parent_model = WorkflowJob
class JobActivityStreamList(SubListAPIView):
model = ActivityStream
@ -2898,8 +2894,8 @@ class JobActivityStreamList(SubListAPIView):
# Sanity check: Does this license allow activity streams?
# If not, forbid this request.
if not feature_enabled('activity_streams'):
raise LicenseForbids('Your license does not allow use of '
'the activity stream.')
raise LicenseForbids(_('Your license does not allow use of '
'the activity stream.'))
# Okay, let it through.
return super(JobActivityStreamList, self).get(request, *args, **kwargs)
@ -3165,15 +3161,15 @@ class JobJobTasksList(BaseJobEventsList):
# If there's no event ID specified, this will return a 404.
job = Job.objects.filter(pk=self.kwargs['pk'])
if not job.exists():
return ({'detail': 'Job not found.'}, -1, status.HTTP_404_NOT_FOUND)
return ({'detail': _('Job not found.')}, -1, status.HTTP_404_NOT_FOUND)
job = job[0]
if 'event_id' not in request.query_params:
return ({"detail": "'event_id' not provided."}, -1, status.HTTP_400_BAD_REQUEST)
return ({"detail": _("'event_id' not provided.")}, -1, status.HTTP_400_BAD_REQUEST)
parent_task = job.job_events.filter(pk=int(request.query_params.get('event_id', -1)))
if not parent_task.exists():
return ({'detail': 'Parent event not found.'}, -1, status.HTTP_404_NOT_FOUND)
return ({'detail': _('Parent event not found.')}, -1, status.HTTP_404_NOT_FOUND)
parent_task = parent_task[0]
STARTING_EVENTS = ('playbook_on_task_start', 'playbook_on_setup')
@ -3493,8 +3489,8 @@ class AdHocCommandActivityStreamList(SubListAPIView):
# Sanity check: Does this license allow activity streams?
# If not, forbid this request.
if not feature_enabled('activity_streams'):
raise LicenseForbids('Your license does not allow use of '
'the activity stream.')
raise LicenseForbids(_('Your license does not allow use of '
'the activity stream.'))
# Okay, let it through.
return super(AdHocCommandActivityStreamList, self).get(request, *args, **kwargs)
@ -3515,7 +3511,7 @@ class SystemJobList(ListCreateAPIView):
def get(self, request, *args, **kwargs):
if not request.user.is_superuser and not request.user.is_system_auditor:
raise PermissionDenied("Superuser privileges needed.")
raise PermissionDenied(_("Superuser privileges needed."))
return super(SystemJobList, self).get(request, *args, **kwargs)
@ -3571,8 +3567,9 @@ class UnifiedJobStdout(RetrieveAPIView):
unified_job = self.get_object()
obj_size = unified_job.result_stdout_size
if request.accepted_renderer.format != 'txt_download' and obj_size > settings.STDOUT_MAX_BYTES_DISPLAY:
response_message = "Standard Output too large to display (%d bytes), only download supported for sizes over %d bytes" % (obj_size,
settings.STDOUT_MAX_BYTES_DISPLAY)
response_message = "Standard Output too large to display (%(text_size)d bytes), " \
"only download supported for sizes over %(supported_size)d bytes" % \
{'text_size': obj_size, 'supported_size': settings.STDOUT_MAX_BYTES_DISPLAY}
if request.accepted_renderer.format == 'json':
return Response({'range': {'start': 0, 'end': 1, 'absolute_end': 1}, 'content': response_message})
else:
@ -3615,7 +3612,7 @@ class UnifiedJobStdout(RetrieveAPIView):
response["Content-Disposition"] = 'attachment; filename="job_%s.txt"' % str(unified_job.id)
return response
except Exception as e:
return Response({"error": "Error generating stdout download file: %s" % str(e)}, status=status.HTTP_400_BAD_REQUEST)
return Response({"error": _("Error generating stdout download file: %s") % str(e)}, status=status.HTTP_400_BAD_REQUEST)
elif request.accepted_renderer.format == 'txt':
return Response(unified_job.result_stdout)
else:
@ -3655,7 +3652,7 @@ class NotificationTemplateDetail(RetrieveUpdateDestroyAPIView):
if not request.user.can_access(self.model, 'delete', obj):
return Response(status=status.HTTP_404_NOT_FOUND)
if obj.notifications.filter(status='pending').exists():
return Response({"error": "Delete not allowed while there are pending notifications"},
return Response({"error": _("Delete not allowed while there are pending notifications")},
status=status.HTTP_405_METHOD_NOT_ALLOWED)
return super(NotificationTemplateDetail, self).delete(request, *args, **kwargs)
@ -3722,8 +3719,8 @@ class ActivityStreamList(SimpleListAPIView):
# Sanity check: Does this license allow activity streams?
# If not, forbid this request.
if not feature_enabled('activity_streams'):
raise LicenseForbids('Your license does not allow use of '
'the activity stream.')
raise LicenseForbids(_('Your license does not allow use of '
'the activity stream.'))
# Okay, let it through.
return super(ActivityStreamList, self).get(request, *args, **kwargs)
@ -3739,8 +3736,8 @@ class ActivityStreamDetail(RetrieveAPIView):
# Sanity check: Does this license allow activity streams?
# If not, forbid this request.
if not feature_enabled('activity_streams'):
raise LicenseForbids('Your license does not allow use of '
'the activity stream.')
raise LicenseForbids(_('Your license does not allow use of '
'the activity stream.'))
# Okay, let it through.
return super(ActivityStreamDetail, self).get(request, *args, **kwargs)
@ -3790,26 +3787,26 @@ class RoleUsersList(SubListCreateAttachDetachAPIView):
# Forbid implicit user creation here
sub_id = request.data.get('id', None)
if not sub_id:
data = dict(msg="User 'id' field is missing.")
data = dict(msg=_("User 'id' field is missing."))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
user = get_object_or_400(User, pk=sub_id)
role = self.get_parent_object()
if role == self.request.user.admin_role:
raise PermissionDenied('You may not perform any action with your own admin_role.')
raise PermissionDenied(_('You may not perform any action with your own admin_role.'))
user_content_type = ContentType.objects.get_for_model(User)
if role.content_type == user_content_type:
raise PermissionDenied('You may not change the membership of a users admin_role')
raise PermissionDenied(_('You may not change the membership of a users admin_role'))
credential_content_type = ContentType.objects.get_for_model(Credential)
if role.content_type == credential_content_type:
if role.content_object.organization and user not in role.content_object.organization.member_role:
data = dict(msg="You cannot grant credential access to a user not in the credentials' organization")
data = dict(msg=_("You cannot grant credential access to a user not in the credentials' organization"))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
if not role.content_object.organization and not request.user.is_superuser:
data = dict(msg="You cannot grant private credential access to another user")
data = dict(msg=_("You cannot grant private credential access to another user"))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
return super(RoleUsersList, self).post(request, *args, **kwargs)
@ -3833,7 +3830,7 @@ class RoleTeamsList(SubListAPIView):
# Forbid implicit team creation here
sub_id = request.data.get('id', None)
if not sub_id:
data = dict(msg="Team 'id' field is missing.")
data = dict(msg=_("Team 'id' field is missing."))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
team = get_object_or_400(Team, pk=sub_id)
@ -3841,13 +3838,13 @@ class RoleTeamsList(SubListAPIView):
organization_content_type = ContentType.objects.get_for_model(Organization)
if role.content_type == organization_content_type:
data = dict(msg="You cannot assign an Organization role as a child role for a Team.")
data = dict(msg=_("You cannot assign an Organization role as a child role for a Team."))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
credential_content_type = ContentType.objects.get_for_model(Credential)
if role.content_type == credential_content_type:
if not role.content_object.organization or role.content_object.organization.id != team.organization.id:
data = dict(msg="You cannot grant credential access to a team when the Organization field isn't set, or belongs to a different organization")
data = dict(msg=_("You cannot grant credential access to a team when the Organization field isn't set, or belongs to a different organization"))
return Response(data, status=status.HTTP_400_BAD_REQUEST)
action = 'attach'

View File

@ -14,6 +14,7 @@ from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from django.utils.text import slugify
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
# Tower
from awx import MODE
@ -36,27 +37,27 @@ class Command(BaseCommand):
action='store_true',
dest='dry_run',
default=False,
help='Only show which settings would be commented/migrated.',
help=_('Only show which settings would be commented/migrated.'),
)
parser.add_argument(
'--skip-errors',
action='store_true',
dest='skip_errors',
default=False,
help='Skip over settings that would raise an error when commenting/migrating.',
help=_('Skip over settings that would raise an error when commenting/migrating.'),
)
parser.add_argument(
'--no-comment',
action='store_true',
dest='no_comment',
default=False,
help='Skip commenting out settings in files.',
help=_('Skip commenting out settings in files.'),
)
parser.add_argument(
'--backup-suffix',
dest='backup_suffix',
default=now().strftime('.%Y%m%d%H%M%S'),
help='Backup existing settings files with this suffix.',
help=_('Backup existing settings files with this suffix.'),
)
@transaction.atomic

View File

@ -417,7 +417,7 @@ class OrganizationAccess(BaseAccess):
active_jobs.extend([dict(type="inventory_update", id=o.id)
for o in InventoryUpdate.objects.filter(inventory_source__inventory__organization=obj, status__in=ACTIVE_STATES)])
if len(active_jobs) > 0:
raise StateConflict({"conflict": "Resource is being used by running jobs",
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
"active_jobs": active_jobs})
return True
@ -491,7 +491,7 @@ class InventoryAccess(BaseAccess):
active_jobs.extend([dict(type="inventory_update", id=o.id)
for o in InventoryUpdate.objects.filter(inventory_source__inventory=obj, status__in=ACTIVE_STATES)])
if len(active_jobs) > 0:
raise StateConflict({"conflict": "Resource is being used by running jobs",
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
"active_jobs": active_jobs})
return True
@ -613,7 +613,7 @@ class GroupAccess(BaseAccess):
active_jobs.extend([dict(type="inventory_update", id=o.id)
for o in InventoryUpdate.objects.filter(inventory_source__in=obj.inventory_sources.all(), status__in=ACTIVE_STATES)])
if len(active_jobs) > 0:
raise StateConflict({"conflict": "Resource is being used by running jobs",
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
"active_jobs": active_jobs})
return True
@ -890,7 +890,7 @@ class ProjectAccess(BaseAccess):
active_jobs.extend([dict(type="project_update", id=o.id)
for o in ProjectUpdate.objects.filter(project=obj, status__in=ACTIVE_STATES)])
if len(active_jobs) > 0:
raise StateConflict({"conflict": "Resource is being used by running jobs",
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
"active_jobs": active_jobs})
return True
@ -1045,6 +1045,8 @@ class JobTemplateAccess(BaseAccess):
self.check_license(feature='system_tracking')
if obj.survey_enabled:
self.check_license(feature='surveys')
if Instance.objects.active_count() > 1:
self.check_license(feature='ha')
# Super users can start any job
if self.user.is_superuser:
@ -1130,7 +1132,7 @@ class JobTemplateAccess(BaseAccess):
active_jobs = [dict(type="job", id=o.id)
for o in obj.jobs.filter(status__in=ACTIVE_STATES)]
if len(active_jobs) > 0:
raise StateConflict({"conflict": "Resource is being used by running jobs",
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
"active_jobs": active_jobs})
return True
@ -1170,6 +1172,29 @@ class JobAccess(BaseAccess):
Q(inventory__organization__in=org_access_qs) |
Q(project__organization__in=org_access_qs)).distinct()
def related_orgs(self, obj):
orgs = []
if obj.inventory and obj.inventory.organization:
orgs.append(obj.inventory.organization)
if obj.project and obj.project.organization and obj.project.organization not in orgs:
orgs.append(obj.project.organization)
return orgs
def org_access(self, obj, role_types=['admin_role']):
orgs = self.related_orgs(obj)
for org in orgs:
for role_type in role_types:
role = getattr(org, role_type)
if self.user in role:
return True
return False
@check_superuser
def can_read(self, obj):
if obj.job_template and self.user in obj.job_template.read_role:
return True
return self.org_access(obj, role_types=['auditor_role', 'admin_role'])
def can_add(self, data):
if not data: # So the browseable API will work
return True
@ -1198,12 +1223,7 @@ class JobAccess(BaseAccess):
@check_superuser
def can_delete(self, obj):
if obj.inventory is not None and self.user in obj.inventory.organization.admin_role:
return True
if (obj.project is not None and obj.project.organization is not None and
self.user in obj.project.organization.admin_role):
return True
return False
return self.org_access(obj)
def can_start(self, obj, validate_license=True):
if validate_license:
@ -1481,6 +1501,8 @@ class WorkflowJobTemplateAccess(BaseAccess):
if self.user.is_superuser:
return True
if data is None:
return self.user in obj.admin_role
org_pk = get_pk_from_dict(data, 'organization')
if ('organization' not in data or
@ -1502,7 +1524,7 @@ class WorkflowJobTemplateAccess(BaseAccess):
active_jobs = [dict(type="job", id=o.id)
for o in obj.jobs.filter(status__in=ACTIVE_STATES)]
if len(active_jobs) > 0:
raise StateConflict({"conflict": "Resource is being used by running jobs",
raise StateConflict({"conflict": _("Resource is being used by running jobs"),
"active_jobs": active_jobs})
return True

View File

@ -31,13 +31,14 @@ class InstanceManager(models.Manager):
hostname='localhost',
uuid='00000000-0000-0000-0000-000000000000')
# If we can determine the instance we are on then return
# that, otherwise None which would be the standalone
# case
# TODO: Replace, this doesn't work if the hostname
# is different from the Instance.name
# node = self.filter(hostname=socket.gethostname())
return self.all()[0]
node = self.filter(hostname=settings.CLUSTER_HOST_ID)
if node.exists():
return node[0]
raise RuntimeError("No instance found with the current cluster host id")
def active_count(self):
"""Return count of active Tower nodes for licensing."""
return self.all().count()
def my_role(self):
# NOTE: TODO: Likely to repurpose this once standalone ramparts are a thing

View File

@ -0,0 +1,44 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0040_v310_artifacts'),
]
operations = [
migrations.AddField(
model_name='inventorysource',
name='timeout',
field=models.PositiveIntegerField(default=0, blank=True),
),
migrations.AddField(
model_name='inventoryupdate',
name='timeout',
field=models.PositiveIntegerField(default=0, blank=True),
),
migrations.AddField(
model_name='job',
name='timeout',
field=models.PositiveIntegerField(default=0, blank=True),
),
migrations.AddField(
model_name='jobtemplate',
name='timeout',
field=models.PositiveIntegerField(default=0, blank=True),
),
migrations.AddField(
model_name='project',
name='timeout',
field=models.PositiveIntegerField(default=0, blank=True),
),
migrations.AddField(
model_name='projectupdate',
name='timeout',
field=models.PositiveIntegerField(default=0, blank=True),
),
]

View File

@ -0,0 +1,19 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0041_v310_job_timeout'),
]
operations = [
migrations.AddField(
model_name='unifiedjob',
name='execution_node',
field=models.TextField(default=b'', editable=False, blank=True),
),
]

View File

@ -0,0 +1,30 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0042_v310_executionnode'),
]
operations = [
migrations.AddField(
model_name='project',
name='scm_revision',
field=models.CharField(default=b'', editable=False, max_length=1024, blank=True, help_text='The last revision fetched by a project update', verbose_name='SCM Revision'),
),
migrations.AddField(
model_name='projectupdate',
name='job_type',
field=models.CharField(default=b'check', max_length=64, choices=[(b'run', 'Run'), (b'check', 'Check')]),
),
migrations.AddField(
model_name='job',
name='scm_revision',
field=models.CharField(default=b'', editable=False, max_length=1024, blank=True, help_text='The SCM Revision from the Project used for this job, if available', verbose_name='SCM Revision'),
),
]

View File

@ -0,0 +1,20 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('main', '0043_v310_scm_revision'),
]
operations = [
migrations.AddField(
model_name='project',
name='playbook_files',
field=jsonfield.fields.JSONField(default=[], help_text='List of playbooks found in the project', verbose_name='Playbook Files', editable=False, blank=True),
),
]

View File

@ -94,14 +94,14 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
def clean_inventory(self):
inv = self.inventory
if not inv:
raise ValidationError('No valid inventory.')
raise ValidationError(_('No valid inventory.'))
return inv
def clean_credential(self):
cred = self.credential
if cred and cred.kind != 'ssh':
raise ValidationError(
'You must provide a machine / SSH credential.',
_('You must provide a machine / SSH credential.'),
)
return cred
@ -112,18 +112,18 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
def clean_module_name(self):
if type(self.module_name) not in (str, unicode):
raise ValidationError("Invalid type for ad hoc command")
raise ValidationError(_("Invalid type for ad hoc command"))
module_name = self.module_name.strip() or 'command'
if module_name not in settings.AD_HOC_COMMANDS:
raise ValidationError('Unsupported module for ad hoc commands.')
raise ValidationError(_('Unsupported module for ad hoc commands.'))
return module_name
def clean_module_args(self):
if type(self.module_args) not in (str, unicode):
raise ValidationError("Invalid type for ad hoc command")
raise ValidationError(_("Invalid type for ad hoc command"))
module_args = self.module_args
if self.module_name in ('command', 'shell') and not module_args:
raise ValidationError('No argument passed to %s module.' % self.module_name)
raise ValidationError(_('No argument passed to %s module.') % self.module_name)
return module_args
@property

View File

@ -29,7 +29,8 @@ __all__ = ['VarsDictProperty', 'BaseModel', 'CreatedModifiedModel',
'PERM_INVENTORY_ADMIN', 'PERM_INVENTORY_READ',
'PERM_INVENTORY_WRITE', 'PERM_INVENTORY_DEPLOY', 'PERM_INVENTORY_SCAN',
'PERM_INVENTORY_CHECK', 'PERM_JOBTEMPLATE_CREATE', 'JOB_TYPE_CHOICES',
'AD_HOC_JOB_TYPE_CHOICES', 'PERMISSION_TYPE_CHOICES', 'CLOUD_INVENTORY_SOURCES',
'AD_HOC_JOB_TYPE_CHOICES', 'PROJECT_UPDATE_JOB_TYPE_CHOICES',
'PERMISSION_TYPE_CHOICES', 'CLOUD_INVENTORY_SOURCES',
'VERBOSITY_CHOICES']
PERM_INVENTORY_ADMIN = 'admin'
@ -51,6 +52,11 @@ AD_HOC_JOB_TYPE_CHOICES = [
(PERM_INVENTORY_CHECK, _('Check')),
]
PROJECT_UPDATE_JOB_TYPE_CHOICES = [
(PERM_INVENTORY_DEPLOY, _('Run')),
(PERM_INVENTORY_CHECK, _('Check')),
]
PERMISSION_TYPE_CHOICES = [
(PERM_INVENTORY_READ, _('Read Inventory')),
(PERM_INVENTORY_WRITE, _('Edit Inventory')),

View File

@ -278,9 +278,9 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
"""
host = self.host or ''
if not host and self.kind == 'vmware':
raise ValidationError('Host required for VMware credential.')
raise ValidationError(_('Host required for VMware credential.'))
if not host and self.kind == 'openstack':
raise ValidationError('Host required for OpenStack credential.')
raise ValidationError(_('Host required for OpenStack credential.'))
return host
def clean_domain(self):
@ -289,32 +289,32 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
def clean_username(self):
username = self.username or ''
if not username and self.kind == 'aws':
raise ValidationError('Access key required for AWS credential.')
raise ValidationError(_('Access key required for AWS credential.'))
if not username and self.kind == 'rax':
raise ValidationError('Username required for Rackspace '
'credential.')
raise ValidationError(_('Username required for Rackspace '
'credential.'))
if not username and self.kind == 'vmware':
raise ValidationError('Username required for VMware credential.')
raise ValidationError(_('Username required for VMware credential.'))
if not username and self.kind == 'openstack':
raise ValidationError('Username required for OpenStack credential.')
raise ValidationError(_('Username required for OpenStack credential.'))
return username
def clean_password(self):
password = self.password or ''
if not password and self.kind == 'aws':
raise ValidationError('Secret key required for AWS credential.')
raise ValidationError(_('Secret key required for AWS credential.'))
if not password and self.kind == 'rax':
raise ValidationError('API key required for Rackspace credential.')
raise ValidationError(_('API key required for Rackspace credential.'))
if not password and self.kind == 'vmware':
raise ValidationError('Password required for VMware credential.')
raise ValidationError(_('Password required for VMware credential.'))
if not password and self.kind == 'openstack':
raise ValidationError('Password or API key required for OpenStack credential.')
raise ValidationError(_('Password or API key required for OpenStack credential.'))
return password
def clean_project(self):
project = self.project or ''
if self.kind == 'openstack' and not project:
raise ValidationError('Project name required for OpenStack credential.')
raise ValidationError(_('Project name required for OpenStack credential.'))
return project
def clean_ssh_key_data(self):
@ -341,13 +341,13 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
def clean_ssh_key_unlock(self):
if self.has_encrypted_ssh_key_data and not self.ssh_key_unlock:
raise ValidationError('SSH key unlock must be set when SSH key '
'is encrypted.')
raise ValidationError(_('SSH key unlock must be set when SSH key '
'is encrypted.'))
return self.ssh_key_unlock
def clean(self):
if self.deprecated_user and self.deprecated_team:
raise ValidationError('Credential cannot be assigned to both a user and team.')
raise ValidationError(_('Credential cannot be assigned to both a user and team.'))
def _password_field_allows_ask(self, field):
return bool(self.kind == 'ssh' and field != 'ssh_key_data')

View File

@ -860,6 +860,10 @@ class InventorySourceOptions(BaseModel):
default=False,
help_text=_('Overwrite local variables from remote inventory source.'),
)
timeout = models.PositiveIntegerField(
blank=True,
default=0,
)
@classmethod
def get_ec2_region_choices(cls):
@ -886,16 +890,16 @@ class InventorySourceOptions(BaseModel):
@classmethod
def get_ec2_group_by_choices(cls):
return [
('availability_zone', 'Availability Zone'),
('ami_id', 'Image ID'),
('instance_id', 'Instance ID'),
('instance_type', 'Instance Type'),
('key_pair', 'Key Name'),
('region', 'Region'),
('security_group', 'Security Group'),
('tag_keys', 'Tags'),
('vpc_id', 'VPC ID'),
('tag_none', 'Tag None'),
('availability_zone', _('Availability Zone')),
('ami_id', _('Image ID')),
('instance_id', _('Instance ID')),
('instance_type', _('Instance Type')),
('key_pair', _('Key Name')),
('region', _('Region')),
('security_group', _('Security Group')),
('tag_keys', _('Tags')),
('vpc_id', _('VPC ID')),
('tag_none', _('Tag None')),
]
@classmethod
@ -966,14 +970,14 @@ class InventorySourceOptions(BaseModel):
# credentials; Rackspace requires Rackspace credentials; etc...)
if self.source.replace('ec2', 'aws') != cred.kind:
raise ValidationError(
'Cloud-based inventory sources (such as %s) require '
'credentials for the matching cloud service.' % self.source
_('Cloud-based inventory sources (such as %s) require '
'credentials for the matching cloud service.') % self.source
)
# Allow an EC2 source to omit the credential. If Tower is running on
# an EC2 instance with an IAM Role assigned, boto will use credentials
# from the instance metadata instead of those explicitly provided.
elif self.source in CLOUD_PROVIDERS and self.source != 'ec2':
raise ValidationError('Credential is required for a cloud source.')
raise ValidationError(_('Credential is required for a cloud source.'))
return cred
def clean_source_regions(self):
@ -998,9 +1002,9 @@ class InventorySourceOptions(BaseModel):
if r not in valid_regions and r not in invalid_regions:
invalid_regions.append(r)
if invalid_regions:
raise ValidationError('Invalid %s region%s: %s' % (self.source,
'' if len(invalid_regions) == 1 else 's',
', '.join(invalid_regions)))
raise ValidationError(_('Invalid %(source)s region%(plural)s: %(region)s') % {
'source': self.source, 'plural': '' if len(invalid_regions) == 1 else 's',
'region': ', '.join(invalid_regions)})
return ','.join(regions)
source_vars_dict = VarsDictProperty('source_vars')
@ -1024,9 +1028,9 @@ class InventorySourceOptions(BaseModel):
if instance_filter_name not in self.INSTANCE_FILTER_NAMES:
invalid_filters.append(instance_filter)
if invalid_filters:
raise ValidationError('Invalid filter expression%s: %s' %
('' if len(invalid_filters) == 1 else 's',
', '.join(invalid_filters)))
raise ValidationError(_('Invalid filter expression%(plural)s: %(filter)s') %
{'plural': '' if len(invalid_filters) == 1 else 's',
'filter': ', '.join(invalid_filters)})
return instance_filters
def clean_group_by(self):
@ -1043,9 +1047,9 @@ class InventorySourceOptions(BaseModel):
if c not in valid_choices and c not in invalid_choices:
invalid_choices.append(c)
if invalid_choices:
raise ValidationError('Invalid group by choice%s: %s' %
('' if len(invalid_choices) == 1 else 's',
', '.join(invalid_choices)))
raise ValidationError(_('Invalid group by choice%(plural)s: %(choice)s') %
{'plural': '' if len(invalid_choices) == 1 else 's',
'choice': ', '.join(invalid_choices)})
return ','.join(choices)
@ -1084,7 +1088,8 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions):
@classmethod
def _get_unified_job_field_names(cls):
return ['name', 'description', 'source', 'source_path', 'source_script', 'source_vars', 'schedule',
'credential', 'source_regions', 'instance_filters', 'group_by', 'overwrite', 'overwrite_vars']
'credential', 'source_regions', 'instance_filters', 'group_by', 'overwrite', 'overwrite_vars',
'timeout']
def save(self, *args, **kwargs):
# If update_fields has been specified, add our field names to it,
@ -1190,7 +1195,7 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions):
existing_sources = qs.exclude(pk=self.pk)
if existing_sources.count():
s = u', '.join([x.group.name for x in existing_sources])
raise ValidationError('Unable to configure this item for cloud sync. It is already managed by %s.' % s)
raise ValidationError(_('Unable to configure this item for cloud sync. It is already managed by %s.') % s)
return source

View File

@ -143,6 +143,10 @@ class JobOptions(BaseModel):
allow_simultaneous = models.BooleanField(
default=False,
)
timeout = models.PositiveIntegerField(
blank=True,
default=0,
)
extra_vars_dict = VarsDictProperty('extra_vars', True)
@ -150,7 +154,7 @@ class JobOptions(BaseModel):
cred = self.credential
if cred and cred.kind != 'ssh':
raise ValidationError(
'You must provide a machine / SSH credential.',
_('You must provide a machine / SSH credential.'),
)
return cred
@ -158,7 +162,7 @@ class JobOptions(BaseModel):
cred = self.network_credential
if cred and cred.kind != 'net':
raise ValidationError(
'You must provide a network credential.',
_('You must provide a network credential.'),
)
return cred
@ -166,8 +170,8 @@ class JobOptions(BaseModel):
cred = self.cloud_credential
if cred and cred.kind not in CLOUD_PROVIDERS + ('aws',):
raise ValidationError(
'Must provide a credential for a cloud provider, such as '
'Amazon Web Services or Rackspace.',
_('Must provide a credential for a cloud provider, such as '
'Amazon Web Services or Rackspace.'),
)
return cred
@ -253,7 +257,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
'playbook', 'credential', 'cloud_credential', 'network_credential', 'forks', 'schedule',
'limit', 'verbosity', 'job_tags', 'extra_vars', 'launch_type',
'force_handlers', 'skip_tags', 'start_at_task', 'become_enabled',
'labels', 'survey_passwords', 'allow_simultaneous',]
'labels', 'survey_passwords', 'allow_simultaneous', 'timeout']
def resource_validation_data(self):
'''
@ -266,19 +270,19 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
if self.inventory is None:
resources_needed_to_start.append('inventory')
if not self.ask_inventory_on_launch:
validation_errors['inventory'] = ["Job Template must provide 'inventory' or allow prompting for it.",]
validation_errors['inventory'] = [_("Job Template must provide 'inventory' or allow prompting for it."),]
if self.credential is None:
resources_needed_to_start.append('credential')
if not self.ask_credential_on_launch:
validation_errors['credential'] = ["Job Template must provide 'credential' or allow prompting for it.",]
validation_errors['credential'] = [_("Job Template must provide 'credential' or allow prompting for it."),]
# Job type dependent checks
if self.job_type == PERM_INVENTORY_SCAN:
if self.inventory is None or self.ask_inventory_on_launch:
validation_errors['inventory'] = ["Scan jobs must be assigned a fixed inventory.",]
validation_errors['inventory'] = [_("Scan jobs must be assigned a fixed inventory."),]
elif self.project is None:
resources_needed_to_start.append('project')
validation_errors['project'] = ["Job types 'run' and 'check' must have assigned a project.",]
validation_errors['project'] = [_("Job types 'run' and 'check' must have assigned a project."),]
return (validation_errors, resources_needed_to_start)
@ -487,10 +491,10 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
if 'job_type' in data and self.ask_job_type_on_launch:
if ((self.job_type == PERM_INVENTORY_SCAN and not data['job_type'] == PERM_INVENTORY_SCAN) or
(data['job_type'] == PERM_INVENTORY_SCAN and not self.job_type == PERM_INVENTORY_SCAN)):
errors['job_type'] = 'Can not override job_type to or from a scan job.'
errors['job_type'] = _('Can not override job_type to or from a scan job.')
if (self.job_type == PERM_INVENTORY_SCAN and ('inventory' in data) and self.ask_inventory_on_launch and
self.inventory != data['inventory']):
errors['inventory'] = 'Inventory can not be changed at runtime for scan jobs.'
errors['inventory'] = _('Inventory can not be changed at runtime for scan jobs.')
return errors
@property
@ -555,6 +559,15 @@ class Job(UnifiedJob, JobOptions, JobNotificationMixin):
default={},
editable=False,
)
scm_revision = models.CharField(
max_length=1024,
blank=True,
default='',
editable=False,
verbose_name=_('SCM Revision'),
help_text=_('The SCM Revision from the Project used for this job, if available'),
)
@classmethod
def _get_parent_field_name(cls):
@ -1328,6 +1341,7 @@ class SystemJobOptions(BaseModel):
default='',
)
class SystemJobTemplate(UnifiedJobTemplate, SystemJobOptions):
class Meta:

View File

@ -267,7 +267,7 @@ class AuthToken(BaseModel):
def invalidate(self, reason='timeout_reached', save=True):
if not AuthToken.reason_long(reason):
raise ValueError('Invalid reason specified')
raise ValueError(_('Invalid reason specified'))
self.reason = reason
if save:
self.save()

View File

@ -7,6 +7,9 @@ import os
import re
import urlparse
# JSONField
from jsonfield import JSONField
# Django
from django.conf import settings
from django.db import models
@ -106,6 +109,10 @@ class ProjectOptions(models.Model):
default=None,
on_delete=models.SET_NULL,
)
timeout = models.PositiveIntegerField(
blank=True,
default=0,
)
def clean_scm_type(self):
return self.scm_type or ''
@ -118,10 +125,10 @@ class ProjectOptions(models.Model):
scm_url = update_scm_url(self.scm_type, scm_url,
check_special_cases=False)
except ValueError as e:
raise ValidationError((e.args or ('Invalid SCM URL.',))[0])
raise ValidationError((e.args or (_('Invalid SCM URL.'),))[0])
scm_url_parts = urlparse.urlsplit(scm_url)
if self.scm_type and not any(scm_url_parts):
raise ValidationError('SCM URL is required.')
raise ValidationError(_('SCM URL is required.'))
return unicode(self.scm_url or '')
def clean_credential(self):
@ -130,7 +137,7 @@ class ProjectOptions(models.Model):
cred = self.credential
if cred:
if cred.kind != 'scm':
raise ValidationError("Credential kind must be 'scm'.")
raise ValidationError(_("Credential kind must be 'scm'."))
try:
scm_url = update_scm_url(self.scm_type, self.scm_url,
check_special_cases=False)
@ -145,7 +152,7 @@ class ProjectOptions(models.Model):
update_scm_url(self.scm_type, self.scm_url, scm_username,
scm_password)
except ValueError as e:
raise ValidationError((e.args or ('Invalid credential.',))[0])
raise ValidationError((e.args or (_('Invalid credential.'),))[0])
except ValueError:
pass
return cred
@ -223,6 +230,23 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin):
blank=True,
)
scm_revision = models.CharField(
max_length=1024,
blank=True,
default='',
editable=False,
verbose_name=_('SCM Revision'),
help_text=_('The last revision fetched by a project update'),
)
playbook_files = JSONField(
blank=True,
default=[],
editable=False,
verbose_name=_('Playbook Files'),
help_text=_('List of playbooks found in the project'),
)
admin_role = ImplicitRoleField(parent_role=[
'organization.admin_role',
'singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
@ -251,7 +275,7 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin):
def _get_unified_job_field_names(cls):
return ['name', 'description', 'local_path', 'scm_type', 'scm_url',
'scm_branch', 'scm_clean', 'scm_delete_on_update',
'credential', 'schedule']
'credential', 'schedule', 'timeout']
def save(self, *args, **kwargs):
new_instance = not bool(self.pk)
@ -294,10 +318,6 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin):
# inherit the child job status on failure
elif self.last_job_failed:
return self.last_job.status
# Even on a successful child run, a missing project path overides
# the successful status
elif not self.get_project_path():
return 'missing'
# Return the successful status
else:
return self.last_job.status
@ -389,6 +409,12 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin):
editable=False,
)
job_type = models.CharField(
max_length=64,
choices=PROJECT_UPDATE_JOB_TYPE_CHOICES,
default='check',
)
@classmethod
def _get_parent_field_name(cls):
return 'project'

View File

@ -62,12 +62,12 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
]
COMMON_STATUS_CHOICES = JOB_STATUS_CHOICES + [
('never updated', 'Never Updated'), # A job has never been run using this template.
('never updated', _('Never Updated')), # A job has never been run using this template.
]
PROJECT_STATUS_CHOICES = COMMON_STATUS_CHOICES + [
('ok', 'OK'), # Project is not configured for SCM and path exists.
('missing', 'Missing'), # Project path does not exist.
('ok', _('OK')), # Project is not configured for SCM and path exists.
('missing', _('Missing')), # Project path does not exist.
]
INVENTORY_SOURCE_STATUS_CHOICES = COMMON_STATUS_CHOICES + [
@ -438,6 +438,11 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
editable=False,
related_name='%(class)s_blocked_jobs+',
)
execution_node = models.TextField(
blank=True,
default='',
editable=False,
)
notifications = models.ManyToManyField(
'Notification',
editable=False,
@ -801,7 +806,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
def pre_start(self, **kwargs):
if not self.can_start:
self.job_explanation = u'%s is not in a startable status: %s, expecting one of %s' % (self._meta.verbose_name, self.status, str(('new', 'waiting')))
self.job_explanation = u'%s is not in a startable state: %s, expecting one of %s' % (self._meta.verbose_name, self.status, str(('new', 'waiting')))
self.save(update_fields=['job_explanation'])
return (False, None)

View File

@ -275,8 +275,7 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, ResourceMixin)
@classmethod
def _get_unified_job_field_names(cls):
# TODO: ADD LABELS
return ['name', 'description', 'extra_vars',]
return ['name', 'description', 'extra_vars', 'labels',]
def get_absolute_url(self):
return reverse('api:workflow_job_template_detail', args=(self.pk,))

View File

@ -5,6 +5,8 @@ import json
from django.utils.encoding import smart_text
from django.core.mail.backends.base import BaseEmailBackend
from django.utils.translation import ugettext_lazy as _
class TowerBaseEmailBackend(BaseEmailBackend):
@ -12,9 +14,8 @@ class TowerBaseEmailBackend(BaseEmailBackend):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'],
body['id'],
body['status'],
body['url']))
body_actual = smart_text(_("{} #{} had status {} on Ansible Tower, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual

View File

@ -5,6 +5,8 @@ import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
@ -23,9 +25,8 @@ class CustomEmailBackend(EmailBackend):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text("{} #{} had status {} on Ansible Tower, view details at {}\n\n".format(body['friendly_name'],
body['id'],
body['status'],
body['url']))
body_actual = smart_text(_("{} #{} had status {} on Ansible Tower, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual

View File

@ -6,11 +6,12 @@ import logging
import requests
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.hipchat_backend')
class HipChatBackend(TowerBaseEmailBackend):
init_parameters = {"token": {"label": "Token", "type": "password"},
@ -42,8 +43,8 @@ class HipChatBackend(TowerBaseEmailBackend):
"from": m.from_email,
"message_format": "text"})
if r.status_code != 204:
logger.error(smart_text("Error sending messages: {}".format(r.text)))
logger.error(smart_text(_("Error sending messages: {}").format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending message to hipchat: {}".format(r.text)))
raise Exception(smart_text(_("Error sending message to hipchat: {}").format(r.text)))
sent_messages += 1
return sent_messages

View File

@ -8,11 +8,12 @@ import logging
import irc.client
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.irc_backend')
class IrcBackend(TowerBaseEmailBackend):
init_parameters = {"server": {"label": "IRC Server Address", "type": "string"},
@ -50,7 +51,7 @@ class IrcBackend(TowerBaseEmailBackend):
connect_factory=connection_factory,
)
except irc.client.ServerConnectionError as e:
logger.error(smart_text("Exception connecting to irc server: {}".format(e)))
logger.error(smart_text(_("Exception connecting to irc server: {}").format(e)))
if not self.fail_silently:
raise
return True

View File

@ -5,11 +5,12 @@ import logging
import pygerduty
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.pagerduty_backend')
class PagerDutyBackend(TowerBaseEmailBackend):
init_parameters = {"subdomain": {"label": "Pagerduty subdomain", "type": "string"},
@ -35,7 +36,7 @@ class PagerDutyBackend(TowerBaseEmailBackend):
except Exception as e:
if not self.fail_silently:
raise
logger.error(smart_text("Exception connecting to PagerDuty: {}".format(e)))
logger.error(smart_text(_("Exception connecting to PagerDuty: {}").format(e)))
for m in messages:
try:
pager.trigger_incident(m.recipients()[0],
@ -44,7 +45,7 @@ class PagerDutyBackend(TowerBaseEmailBackend):
client=m.from_email)
sent_messages += 1
except Exception as e:
logger.error(smart_text("Exception sending messages: {}".format(e)))
logger.error(smart_text(_("Exception sending messages: {}").format(e)))
if not self.fail_silently:
raise
return sent_messages

View File

@ -5,11 +5,12 @@ import logging
from slackclient import SlackClient
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.slack_backend')
class SlackBackend(TowerBaseEmailBackend):
init_parameters = {"token": {"label": "Token", "type": "password"},
@ -48,7 +49,7 @@ class SlackBackend(TowerBaseEmailBackend):
self.connection.rtm_send_message(r, m.subject)
sent_messages += 1
except Exception as e:
logger.error(smart_text("Exception sending messages: {}".format(e)))
logger.error(smart_text(_("Exception sending messages: {}").format(e)))
if not self.fail_silently:
raise
return sent_messages

View File

@ -6,11 +6,12 @@ import logging
from twilio.rest import TwilioRestClient
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.twilio_backend')
class TwilioBackend(TowerBaseEmailBackend):
init_parameters = {"account_sid": {"label": "Account SID", "type": "string"},
@ -32,7 +33,7 @@ class TwilioBackend(TowerBaseEmailBackend):
except Exception as e:
if not self.fail_silently:
raise
logger.error(smart_text("Exception connecting to Twilio: {}".format(e)))
logger.error(smart_text(_("Exception connecting to Twilio: {}").format(e)))
for m in messages:
try:
@ -42,7 +43,7 @@ class TwilioBackend(TowerBaseEmailBackend):
body=m.subject)
sent_messages += 1
except Exception as e:
logger.error(smart_text("Exception sending messages: {}".format(e)))
logger.error(smart_text(_("Exception sending messages: {}").format(e)))
if not self.fail_silently:
raise
return sent_messages

View File

@ -5,12 +5,13 @@ import logging
import requests
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from awx.main.notifications.base import TowerBaseEmailBackend
from awx.main.utils import get_awx_version
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
@ -34,8 +35,8 @@ class WebhookBackend(TowerBaseEmailBackend):
json=m.body,
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
logger.error(smart_text(_("Error sending notification webhook: {}").format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
raise Exception(smart_text(_("Error sending notification webhook: {}").format(r.text)))
sent_messages += 1
return sent_messages

View File

@ -42,6 +42,7 @@ from django.utils.timezone import now
from django.utils.encoding import smart_str
from django.core.mail import send_mail
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
# AWX
from awx.main.constants import CLOUD_PROVIDERS
@ -53,10 +54,9 @@ from awx.main.utils import (get_ansible_version, get_ssh_version, decrypt_field,
from awx.main.consumers import emit_channel_notification
__all__ = ['RunJob', 'RunSystemJob', 'RunProjectUpdate', 'RunInventoryUpdate',
'RunAdHocCommand', 'RunWorkflowJob', 'handle_work_error',
'RunAdHocCommand', 'handle_work_error',
'handle_work_success', 'update_inventory_computed_fields',
'send_notifications', 'run_administrative_checks',
'RunJobLaunch']
'send_notifications', 'run_administrative_checks']
HIDDEN_PASSWORD = '**********'
@ -112,12 +112,12 @@ def run_administrative_checks(self):
tower_admin_emails = User.objects.filter(is_superuser=True).values_list('email', flat=True)
if (used_percentage * 100) > 90:
send_mail("Ansible Tower host usage over 90%",
"Ansible Tower host usage over 90%",
_("Ansible Tower host usage over 90%"),
tower_admin_emails,
fail_silently=True)
if validation_info.get('date_warning', False):
send_mail("Ansible Tower license will expire soon",
"Ansible Tower license will expire soon",
_("Ansible Tower license will expire soon"),
tower_admin_emails,
fail_silently=True)
@ -161,12 +161,6 @@ def tower_periodic_scheduler(self):
logger.debug("Last run was: %s", last_run)
write_last_run(run_now)
# Sanity check: If this is a secondary machine, there is nothing
# on the schedule.
# TODO: Fix for clustering/ha
if Instance.objects.my_role() == 'secondary':
return
old_schedules = Schedule.objects.enabled().before(last_run)
for schedule in old_schedules:
schedule.save()
@ -188,7 +182,7 @@ def tower_periodic_scheduler(self):
def _send_notification_templates(instance, status_str):
if status_str not in ['succeeded', 'failed']:
raise ValueError("status_str must be either succeeded or failed")
raise ValueError(_("status_str must be either succeeded or failed"))
notification_templates = instance.get_notification_templates()
if notification_templates:
all_notification_templates = set(notification_templates.get('success', []) + notification_templates.get('any', []))
@ -234,8 +228,9 @@ def handle_work_error(self, task_id, subtasks=None):
if instance.celery_task_id != task_id:
instance.status = 'failed'
instance.failed = True
instance.job_explanation = 'Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % \
(first_instance_type, first_instance.name, first_instance.id)
if not instance.job_explanation:
instance.job_explanation = 'Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % \
(first_instance_type, first_instance.name, first_instance.id)
instance.save()
instance.websocket_emit_status("failed")
@ -501,7 +496,7 @@ class BaseTask(Task):
return OrderedDict()
def run_pexpect(self, instance, args, cwd, env, passwords, stdout_handle,
output_replacements=None):
output_replacements=None, extra_update_fields=None):
'''
Run the given command using pexpect to capture output and provide
passwords when requested.
@ -517,9 +512,17 @@ class BaseTask(Task):
if pexpect_sleep is not None:
logger.info("Suspending Job Execution for QA Work")
time.sleep(pexpect_sleep)
global_timeout = getattr(settings, 'DEFAULT_JOB_TIMEOUTS', {})
cls_name = instance.__class__.__name__
if cls_name in global_timeout:
local_timeout = getattr(instance, 'timeout', 0)
job_timeout = global_timeout[cls_name] if local_timeout == 0 else local_timeout
else:
job_timeout = 0
child = pexpect.spawnu(args[0], args[1:], cwd=cwd, env=env)
child.logfile_read = logfile
canceled = False
timed_out = False
last_stdout_update = time.time()
idle_timeout = self.get_idle_timeout()
expect_list = []
@ -530,7 +533,9 @@ class BaseTask(Task):
expect_passwords[n] = passwords.get(item[1], '') or ''
expect_list.extend([pexpect.TIMEOUT, pexpect.EOF])
instance = self.update_model(instance.pk, status='running',
execution_node=settings.CLUSTER_HOST_ID,
output_replacements=output_replacements)
job_start = time.time()
while child.isalive():
result_id = child.expect(expect_list, timeout=pexpect_timeout)
if result_id in expect_passwords:
@ -541,45 +546,65 @@ class BaseTask(Task):
# Refresh model instance from the database (to check cancel flag).
instance = self.update_model(instance.pk)
if instance.cancel_flag:
try:
if settings.AWX_PROOT_ENABLED and self.should_use_proot(instance):
# NOTE: Refactor this once we get a newer psutil across the board
if not psutil:
os.kill(child.pid, signal.SIGKILL)
else:
try:
main_proc = psutil.Process(pid=child.pid)
if hasattr(main_proc, "children"):
child_procs = main_proc.children(recursive=True)
else:
child_procs = main_proc.get_children(recursive=True)
for child_proc in child_procs:
os.kill(child_proc.pid, signal.SIGKILL)
os.kill(main_proc.pid, signal.SIGKILL)
except TypeError:
os.kill(child.pid, signal.SIGKILL)
else:
os.kill(child.pid, signal.SIGTERM)
time.sleep(3)
canceled = True
except OSError:
logger.warn("Attempted to cancel already finished job, ignoring")
canceled = True
elif job_timeout != 0 and (time.time() - job_start) > job_timeout:
timed_out = True
if isinstance(extra_update_fields, dict):
extra_update_fields['job_explanation'] = "Job terminated due to timeout"
if canceled or timed_out:
self._handle_termination(instance, child, is_cancel=canceled)
if idle_timeout and (time.time() - last_stdout_update) > idle_timeout:
child.close(True)
canceled = True
if canceled:
return 'canceled', child.exitstatus
elif child.exitstatus == 0:
elif child.exitstatus == 0 and not timed_out:
return 'successful', child.exitstatus
else:
return 'failed', child.exitstatus
def _handle_termination(self, instance, job, is_cancel=True):
'''Helper function to properly terminate specified job.
Args:
instance: The corresponding model instance of this task.
job: The pexpect subprocess running the job.
is_cancel: Flag showing whether this termination is caused by instance's
cancel_flag.
Return:
None.
'''
try:
if settings.AWX_PROOT_ENABLED and self.should_use_proot(instance):
# NOTE: Refactor this once we get a newer psutil across the board
if not psutil:
os.kill(job.pid, signal.SIGKILL)
else:
try:
main_proc = psutil.Process(pid=job.pid)
if hasattr(main_proc, "children"):
child_procs = main_proc.children(recursive=True)
else:
child_procs = main_proc.get_children(recursive=True)
for child_proc in child_procs:
os.kill(child_proc.pid, signal.SIGKILL)
os.kill(main_proc.pid, signal.SIGKILL)
except TypeError:
os.kill(job.pid, signal.SIGKILL)
else:
os.kill(job.pid, signal.SIGTERM)
time.sleep(3)
except OSError:
keyword = 'cancel' if is_cancel else 'timeout'
logger.warn("Attempted to %s already finished job, ignoring" % keyword)
def pre_run_hook(self, instance, **kwargs):
'''
Hook for any steps to run before the job/task starts
'''
def post_run_hook(self, instance, **kwargs):
def post_run_hook(self, instance, status, **kwargs):
'''
Hook for any steps to run after job/task is complete.
'''
@ -588,11 +613,12 @@ class BaseTask(Task):
'''
Run the job/task and capture its output.
'''
instance = self.update_model(pk, status='running', celery_task_id=self.request.id)
instance = self.update_model(pk, status='running', celery_task_id='' if self.request.id is None else self.request.id)
instance.websocket_emit_status("running")
status, rc, tb = 'error', None, ''
output_replacements = []
extra_update_fields = {}
try:
self.pre_run_hook(instance, **kwargs)
if instance.cancel_flag:
@ -636,7 +662,8 @@ class BaseTask(Task):
safe_args = self.wrap_args_with_ssh_agent(safe_args, ssh_key_path, ssh_auth_sock)
instance = self.update_model(pk, job_args=json.dumps(safe_args),
job_cwd=cwd, job_env=safe_env, result_stdout_file=stdout_filename)
status, rc = self.run_pexpect(instance, args, cwd, env, kwargs['passwords'], stdout_handle)
status, rc = self.run_pexpect(instance, args, cwd, env, kwargs['passwords'], stdout_handle,
extra_update_fields=extra_update_fields)
except Exception:
if status != 'canceled':
tb = traceback.format_exc()
@ -657,8 +684,9 @@ class BaseTask(Task):
except Exception:
pass
instance = self.update_model(pk, status=status, result_traceback=tb,
output_replacements=output_replacements)
self.post_run_hook(instance, **kwargs)
output_replacements=output_replacements,
**extra_update_fields)
self.post_run_hook(instance, status, **kwargs)
instance.websocket_emit_status(status)
if status != 'successful' and not hasattr(settings, 'CELERY_UNIT_TEST'):
# Raising an exception will mark the job as 'failed' in celery
@ -749,6 +777,8 @@ class RunJob(BaseTask):
# callbacks to work.
env['JOB_ID'] = str(job.pk)
env['INVENTORY_ID'] = str(job.inventory.pk)
if job.project:
env['PROJECT_REVISION'] = job.project.scm_revision
env['ANSIBLE_CALLBACK_PLUGINS'] = plugin_path
env['REST_API_URL'] = settings.INTERNAL_API_URL
env['REST_API_TOKEN'] = job.task_auth_token or ''
@ -882,6 +912,10 @@ class RunJob(BaseTask):
'tower_job_id': job.pk,
'tower_job_launch_type': job.launch_type,
}
if job.project:
extra_vars.update({
'tower_project_revision': job.project.scm_revision,
})
if job.job_template:
extra_vars.update({
'tower_job_template_id': job.job_template.pk,
@ -958,11 +992,28 @@ class RunJob(BaseTask):
'''
return getattr(settings, 'AWX_PROOT_ENABLED', False)
def post_run_hook(self, job, **kwargs):
def pre_run_hook(self, job, **kwargs):
if job.project and job.project.scm_type:
local_project_sync = job.project.create_project_update()
local_project_sync.job_type = 'run'
local_project_sync.save()
project_update_task = local_project_sync._get_task_class()
try:
project_update_task().run(local_project_sync.id)
job.scm_revision = job.project.scm_revision
job.save()
except Exception:
job.status = 'failed'
job.job_explanation = 'Previous Task Failed: {"job_type": "%s", "job_name": "%s", "job_id": "%s"}' % \
('project_update', local_project_sync.name, local_project_sync.id)
job.save()
raise
def post_run_hook(self, job, status, **kwargs):
'''
Hook for actions to run after job/task has completed.
'''
super(RunJob, self).post_run_hook(job, **kwargs)
super(RunJob, self).post_run_hook(job, status, **kwargs)
try:
inventory = job.inventory
except Inventory.DoesNotExist:
@ -1063,7 +1114,10 @@ class RunProjectUpdate(BaseTask):
args.append('-v')
scm_url, extra_vars = self._build_scm_url_extra_vars(project_update,
**kwargs)
scm_branch = project_update.scm_branch or {'hg': 'tip'}.get(project_update.scm_type, 'HEAD')
if project_update.project.scm_revision and project_update.job_type == 'run':
scm_branch = project_update.project.scm_revision
else:
scm_branch = project_update.scm_branch or {'hg': 'tip'}.get(project_update.scm_type, 'HEAD')
extra_vars.update({
'project_path': project_update.get_project_path(check_if_exists=False),
'scm_type': project_update.scm_type,
@ -1071,6 +1125,8 @@ class RunProjectUpdate(BaseTask):
'scm_branch': scm_branch,
'scm_clean': project_update.scm_clean,
'scm_delete_on_update': project_update.scm_delete_on_update,
'scm_full_checkout': True if project_update.job_type == 'run' else False,
'scm_revision_output': '/tmp/_{}_syncrev'.format(project_update.id) # TODO: TempFile
})
args.extend(['-e', json.dumps(extra_vars)])
args.append('project_update.yml')
@ -1144,6 +1200,18 @@ class RunProjectUpdate(BaseTask):
'''
return kwargs.get('private_data_files', {}).get('scm_credential', '')
def post_run_hook(self, instance, status, **kwargs):
if instance.job_type == 'check' and status not in ('failed', 'canceled',):
p = instance.project
fd = open('/tmp/_{}_syncrev'.format(instance.id), 'r')
lines = fd.readlines()
if lines:
p.scm_revision = lines[0].strip()
p.playbook_files = p.playbooks
p.save()
else:
logger.error("Could not find scm revision in check")
class RunInventoryUpdate(BaseTask):
name = 'awx.main.tasks.run_inventory_update'
@ -1638,7 +1706,7 @@ class RunAdHocCommand(BaseTask):
'''
return getattr(settings, 'AWX_PROOT_ENABLED', False)
def post_run_hook(self, ad_hoc_command, **kwargs):
def post_run_hook(self, ad_hoc_command, status, **kwargs):
'''
Hook for actions to run after ad hoc command has completed.
'''
@ -1675,38 +1743,3 @@ class RunSystemJob(BaseTask):
def build_cwd(self, instance, **kwargs):
return settings.BASE_DIR
'''
class RunWorkflowJob(BaseTask):
name = 'awx.main.tasks.run_workflow_job'
model = WorkflowJob
def run(self, pk, **kwargs):
#Run the job/task and capture its output.
instance = self.update_model(pk, status='running', celery_task_id=self.request.id)
instance.websocket_emit_status("running")
# FIXME: Currently, the workflow job busy waits until the graph run is
# complete. Instead, the workflow job should return or never even run,
# because all of the "launch logic" can be done schedule().
# However, other aspects of our system depend on a 1-1 relationship
# between a Job and a Celery Task.
#
# * If we let the workflow job task (RunWorkflowJob.run()) complete
# then how do we trigger the handle_work_error and
# handle_work_success subtasks?
#
# * How do we handle the recovery process? (i.e. there is an entry in
# the database but not in celery).
while True:
dag = WorkflowDAG(instance)
if dag.is_workflow_done():
# TODO: update with accurate finish status (i.e. canceled, error, etc.)
instance = self.update_model(instance.pk, status='successful')
break
time.sleep(1)
instance.websocket_emit_status(instance.status)
# TODO: Handle cancel
'''

View File

@ -10,7 +10,8 @@ from awx.main.access import (
JobTemplateAccess,
WorkflowJobTemplateAccess,
)
from awx.main.models import Credential, Inventory, Project, Role, Organization
from awx.conf.license import LicenseForbids
from awx.main.models import Credential, Inventory, Project, Role, Organization, Instance
@pytest.fixture
@ -106,6 +107,18 @@ def test_jt_add_scan_job_check(job_template_with_ids, user_unit):
'job_type': 'scan'
})
def mock_raise_license_forbids(self, add_host=False, feature=None, check_expiration=True):
raise LicenseForbids("Feature not enabled")
def mock_raise_none(self, add_host=False, feature=None, check_expiration=True):
return None
def test_jt_can_start_ha(job_template_with_ids):
with mock.patch.object(Instance.objects, 'active_count', return_value=2):
with mock.patch('awx.main.access.BaseAccess.check_license', new=mock_raise_license_forbids):
with pytest.raises(LicenseForbids):
JobTemplateAccess(user_unit).can_start(job_template_with_ids)
def test_jt_can_add_bad_data(user_unit):
"Assure that no server errors are returned if we call JT can_add with bad data"
access = JobTemplateAccess(user_unit)

View File

@ -19,6 +19,9 @@ import tempfile
# Decorator
from decorator import decorator
# Django
from django.utils.translation import ugettext_lazy as _
# Django REST Framework
from rest_framework.exceptions import ParseError, PermissionDenied
from django.utils.encoding import smart_str
@ -77,7 +80,7 @@ def to_python_boolean(value, allow_none=False):
elif allow_none and value.lower() in ('none', 'null'):
return None
else:
raise ValueError(u'Unable to convert "%s" to boolean' % unicode(value))
raise ValueError(_(u'Unable to convert "%s" to boolean') % unicode(value))
def camelcase_to_underscore(s):
'''
@ -192,7 +195,7 @@ def decrypt_field(instance, field_name, subfield=None):
return value
algo, b64data = value[len('$encrypted$'):].split('$', 1)
if algo != 'AES':
raise ValueError('unsupported algorithm: %s' % algo)
raise ValueError(_('unsupported algorithm: %s') % algo)
encrypted = base64.b64decode(b64data)
key = get_encryption_key(instance, field_name)
cipher = AES.new(key, AES.MODE_ECB)
@ -213,16 +216,16 @@ def update_scm_url(scm_type, url, username=True, password=True,
# hg: http://www.selenic.com/mercurial/hg.1.html#url-paths
# svn: http://svnbook.red-bean.com/en/1.7/svn-book.html#svn.advanced.reposurls
if scm_type not in ('git', 'hg', 'svn'):
raise ValueError('Unsupported SCM type "%s"' % str(scm_type))
raise ValueError(_('Unsupported SCM type "%s"') % str(scm_type))
if not url.strip():
return ''
parts = urlparse.urlsplit(url)
try:
parts.port
except ValueError:
raise ValueError('Invalid %s URL' % scm_type)
raise ValueError(_('Invalid %s URL') % scm_type)
if parts.scheme == 'git+ssh' and not scp_format:
raise ValueError('Unsupported %s URL' % scm_type)
raise ValueError(_('Unsupported %s URL') % scm_type)
if '://' not in url:
# Handle SCP-style URLs for git (e.g. [user@]host.xz:path/to/repo.git/).
@ -232,7 +235,7 @@ def update_scm_url(scm_type, url, username=True, password=True,
else:
userpass, hostpath = '', url
if hostpath.count(':') > 1:
raise ValueError('Invalid %s URL' % scm_type)
raise ValueError(_('Invalid %s URL') % scm_type)
host, path = hostpath.split(':', 1)
#if not path.startswith('/') and not path.startswith('~/'):
# path = '~/%s' % path
@ -251,7 +254,7 @@ def update_scm_url(scm_type, url, username=True, password=True,
else:
parts = urlparse.urlsplit('file://%s' % url)
else:
raise ValueError('Invalid %s URL' % scm_type)
raise ValueError(_('Invalid %s URL') % scm_type)
# Validate that scheme is valid for given scm_type.
scm_type_schemes = {
@ -260,11 +263,11 @@ def update_scm_url(scm_type, url, username=True, password=True,
'svn': ('http', 'https', 'svn', 'svn+ssh', 'file'),
}
if parts.scheme not in scm_type_schemes.get(scm_type, ()):
raise ValueError('Unsupported %s URL' % scm_type)
raise ValueError(_('Unsupported %s URL') % scm_type)
if parts.scheme == 'file' and parts.netloc not in ('', 'localhost'):
raise ValueError('Unsupported host "%s" for file:// URL' % (parts.netloc))
raise ValueError(_('Unsupported host "%s" for file:// URL') % (parts.netloc))
elif parts.scheme != 'file' and not parts.netloc:
raise ValueError('Host is required for %s URL' % parts.scheme)
raise ValueError(_('Host is required for %s URL') % parts.scheme)
if username is True:
netloc_username = parts.username or ''
elif username:
@ -282,13 +285,13 @@ def update_scm_url(scm_type, url, username=True, password=True,
if check_special_cases:
special_git_hosts = ('github.com', 'bitbucket.org', 'altssh.bitbucket.org')
if scm_type == 'git' and parts.scheme.endswith('ssh') and parts.hostname in special_git_hosts and netloc_username != 'git':
raise ValueError('Username must be "git" for SSH access to %s.' % parts.hostname)
raise ValueError(_('Username must be "git" for SSH access to %s.') % parts.hostname)
if scm_type == 'git' and parts.scheme.endswith('ssh') and parts.hostname in special_git_hosts and netloc_password:
#raise ValueError('Password not allowed for SSH access to %s.' % parts.hostname)
netloc_password = ''
special_hg_hosts = ('bitbucket.org', 'altssh.bitbucket.org')
if scm_type == 'hg' and parts.scheme == 'ssh' and parts.hostname in special_hg_hosts and netloc_username != 'hg':
raise ValueError('Username must be "hg" for SSH access to %s.' % parts.hostname)
raise ValueError(_('Username must be "hg" for SSH access to %s.') % parts.hostname)
if scm_type == 'hg' and parts.scheme == 'ssh' and netloc_password:
#raise ValueError('Password not supported for SSH with Mercurial.')
netloc_password = ''

View File

@ -188,4 +188,4 @@ def vars_validate_or_raise(vars_str):
return vars_str
except yaml.YAMLError:
pass
raise RestValidationError('Must be valid JSON or YAML.')
raise RestValidationError(_('Must be valid JSON or YAML.'))

View File

@ -17,28 +17,93 @@
tasks:
- name: delete project directory before update
file: path={{project_path|quote}} state=absent
file:
path: "{{project_path|quote}}"
state: absent
when: scm_delete_on_update|default('')
- name: update project using git and accept hostkey
git: dest={{project_path|quote}} repo={{scm_url|quote}} version={{scm_branch|quote}} force={{scm_clean}} accept_hostkey={{scm_accept_hostkey}}
git:
dest: "{{project_path|quote}}"
repo: "{{scm_url|quote}}"
version: "{{scm_branch|quote}}"
force: "{{scm_clean}}"
accept_hostkey: "{{scm_accept_hostkey}}"
#clone: "{{ scm_full_checkout }}"
#update: "{{ scm_full_checkout }}"
when: scm_type == 'git' and scm_accept_hostkey is defined
register: scm_result
- name: Set the git repository version
set_fact:
scm_version: "{{ scm_result['after'] }}"
when: "'after' in scm_result"
- name: update project using git
git: dest={{project_path|quote}} repo={{scm_url|quote}} version={{scm_branch|quote}} force={{scm_clean}}
git:
dest: "{{project_path|quote}}"
repo: "{{scm_url|quote}}"
version: "{{scm_branch|quote}}"
force: "{{scm_clean}}"
#clone: "{{ scm_full_checkout }}"
#update: "{{ scm_full_checkout }}"
when: scm_type == 'git' and scm_accept_hostkey is not defined
register: scm_result
- name: Set the git repository version
set_fact:
scm_version: "{{ scm_result['after'] }}"
when: "'after' in scm_result"
- name: update project using hg
hg: dest={{project_path|quote}} repo={{scm_url|quote}} revision={{scm_branch|quote}} force={{scm_clean}}
hg:
dest: "{{project_path|quote}}"
repo: "{{scm_url|quote}}"
revision: "{{scm_branch|quote}}"
force: "{{scm_clean}}"
#clone: "{{ scm_full_checkout }}"
#update: "{{ scm_full_checkout }}"
when: scm_type == 'hg'
register: scm_result
- name: Set the hg repository version
set_fact:
scm_version: "{{ scm_result['after'] }}"
when: "'after' in scm_result"
- name: update project using svn
subversion: dest={{project_path|quote}} repo={{scm_url|quote}} revision={{scm_branch|quote}} force={{scm_clean}}
subversion:
dest: "{{project_path|quote}}"
repo: "{{scm_url|quote}}"
revision: "{{scm_branch|quote}}"
force: "{{scm_clean}}"
#checkout: "{{ scm_full_checkout }}"
#update: "{{ scm_full_checkout }}"
when: scm_type == 'svn' and not scm_username|default('')
register: scm_result
- name: Set the svn repository version
set_fact:
scm_version: "{{ scm_result['after'] }}"
when: "'after' in scm_result"
- name: update project using svn with auth
subversion: dest={{project_path|quote}} repo={{scm_url|quote}} revision={{scm_branch|quote}} force={{scm_clean}} username={{scm_username|quote}} password={{scm_password|quote}}
subversion:
dest: "{{project_path|quote}}"
repo: "{{scm_url|quote}}"
revision: "{{scm_branch|quote}}"
force: "{{scm_clean}}"
username: "{{scm_username|quote}}"
password: "{{scm_password|quote}}"
#checkout: "{{ scm_full_checkout }}"
#update: "{{ scm_full_checkout }}"
when: scm_type == 'svn' and scm_username|default('')
register: scm_result
- name: Set the svn repository version
set_fact:
scm_version: "{{ scm_result['after'] }}"
when: "'after' in scm_result"
- name: detect requirements.yml
stat: path={{project_path|quote}}/roles/requirements.yml
@ -48,4 +113,14 @@
command: ansible-galaxy install -r requirements.yml -p {{project_path|quote}}/roles/ --force
args:
chdir: "{{project_path|quote}}/roles"
when: doesRequirementsExist.stat.exists
when: doesRequirementsExist.stat.exists and scm_full_checkout|bool
- name: Repository Version
debug: msg="Repository Version {{ scm_version }}"
when: scm_version is defined
- name: Write Repository Version
copy:
dest: "{{ scm_revision_output }}"
content: "{{ scm_version }}"
when: scm_version is defined and scm_revision_output is defined

View File

@ -9,7 +9,6 @@ import djcelery
from datetime import timedelta
from kombu import Queue, Exchange
from kombu.common import Broadcast
# Update this module's local settings from the global settings module.
from django.conf import global_settings
@ -367,11 +366,11 @@ CELERY_QUEUES = (
Queue('jobs', Exchange('jobs'), routing_key='jobs'),
Queue('scheduler', Exchange('scheduler', type='topic'), routing_key='scheduler.job.#', durable=False),
# Projects use a fanout queue, this isn't super well supported
Broadcast('projects'),
)
CELERY_ROUTES = {'awx.main.tasks.run_job': {'queue': 'jobs',
'routing_key': 'jobs'},
'awx.main.tasks.run_project_update': {'queue': 'projects'},
'awx.main.tasks.run_project_update': {'queue': 'jobs',
'routing_key': 'jobs'},
'awx.main.tasks.run_inventory_update': {'queue': 'jobs',
'routing_key': 'jobs'},
'awx.main.tasks.run_ad_hoc_command': {'queue': 'jobs',
@ -383,7 +382,7 @@ CELERY_ROUTES = {'awx.main.tasks.run_job': {'queue': 'jobs',
'awx.main.scheduler.tasks.run_job_complete': {'queue': 'scheduler',
'routing_key': 'scheduler.job.complete'},
'awx.main.tasks.cluster_node_heartbeat': {'queue': 'default',
'routing_key': 'cluster.heartbeat'},}
'routing_key': 'cluster.heartbeat'}}
CELERYBEAT_SCHEDULE = {
'tower_scheduler': {

View File

@ -276,3 +276,10 @@ TEST_OPENSTACK_PROJECT = ''
# Azure credentials.
TEST_AZURE_USERNAME = ''
TEST_AZURE_KEY_DATA = ''
# Exemplary global job timeout settings
# DEFAULT_JOB_TIMEOUTS = {
# 'Job': 10,
# 'InventoryUpdate': 15,
# 'ProjectUpdate': 20,
# }

View File

@ -48,7 +48,7 @@ http {
server_name _;
keepalive_timeout 70;
ssl_certificate /etc/tower/tower.crt;
ssl_certificate /etc/tower/tower.cert;
ssl_certificate_key /etc/tower/tower.key;
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
ssl_ciphers HIGH:!aNULL:!MD5;

View File

@ -21,6 +21,7 @@ It's important to point out a few existing things:
by its needs. Thus we are pretty inflexible to customization beyond what our setup playbook allows. Each Tower node has a
deployment of RabbitMQ that will cluster with the other nodes' RabbitMQ instances.
* Existing old-style HA deployments will be transitioned automatically to the new HA system during the upgrade process.
* Manual projects will need to be synced to all nodes by the customer
## Important Changes
@ -168,6 +169,7 @@ When verifying acceptance we should ensure the following statements are true
can communicate with the database.
* Crucially when network partitioning is resolved all nodes should recover into a consistent state
* Upgrade Testing, verify behavior before and after are the same for the end user.
* Project Updates should be thoroughly tested for all scm types (git, svn, hg) and for manual projects.
## Performance Testing

View File

@ -133,6 +133,6 @@ wrapt==1.10.6
wsgiref==0.1.2
xmltodict==0.9.2
channels==0.17.2
asgi_amqp==0.3
asgi_amqp==0.3.1
uwsgi==2.0.14
daphne==0.15.0

View File

@ -33,7 +33,7 @@ msgpack-python==0.4.7
munch==2.0.4
netaddr==0.7.18
netifaces==0.10.4
os-client-config==1.14.0
os-client-config==1.22.0
os-diskconfig-python-novaclient-ext==0.1.3
os-networksv2-python-novaclient-ext==0.25
os-virtual-interfacesv2-python-novaclient-ext==0.19
@ -72,7 +72,7 @@ rax-default-network-flags-python-novaclient-ext==0.3.2
rax-scheduled-images-python-novaclient-ext==0.3.1
requests==2.11.0
requestsexceptions==1.1.1
shade==1.4.0
shade==1.12.1
simplejson==3.8.1
six==1.9.0
stevedore==1.10.0

View File

@ -12,8 +12,8 @@ services:
ports:
- "8080:8080"
- "5555:5555"
- "8050:8050"
- "8051:8051"
- "8013:8013"
- "8043:8043"
links:
- postgres
- memcached
@ -35,14 +35,6 @@ services:
ports:
- "15672:15672"
nginx:
image: gcr.io/ansible-tower-engineering/tower_nginx:${TAG}
ports:
- "8043:443"
- "8013:80"
links:
- tower
# Source Code Synchronization Container
# sync:
# build:

View File

@ -11,7 +11,7 @@ RUN yum -y update && yum -y install curl epel-release
RUN curl --silent --location https://rpm.nodesource.com/setup_6.x | bash -
RUN yum -y localinstall http://download.postgresql.org/pub/repos/yum/9.4/redhat/rhel-6-x86_64/pgdg-centos94-9.4-3.noarch.rpm
ADD tools/docker-compose/proot.repo /etc/yum.repos.d/proot.repo
RUN yum -y update && yum -y install openssh-server ansible mg vim tmux git mercurial subversion python-devel python-psycopg2 make postgresql postgresql-devel nodejs python-psutil libxml2-devel libxslt-devel libstdc++.so.6 gcc cyrus-sasl-devel cyrus-sasl openldap-devel libffi-devel zeromq-devel proot python-pip xmlsec1-devel swig krb5-devel xmlsec1-openssl xmlsec1 xmlsec1-openssl-devel libtool-ltdl-devel rabbitmq-server
RUN yum -y update && yum -y install openssh-server ansible mg vim tmux git mercurial subversion python-devel python-psycopg2 make postgresql postgresql-devel nginx nodejs python-psutil libxml2-devel libxslt-devel libstdc++.so.6 gcc cyrus-sasl-devel cyrus-sasl openldap-devel libffi-devel zeromq-devel proot python-pip xmlsec1-devel swig krb5-devel xmlsec1-openssl xmlsec1 xmlsec1-openssl-devel libtool-ltdl-devel rabbitmq-server
RUN pip install flake8 pytest==2.9.2 pytest-pythonpath pytest-django pytest-cov pytest-mock dateutils django-debug-toolbar==1.4 pyflakes==1.0.0 virtualenv
RUN /usr/bin/ssh-keygen -q -t rsa -N "" -f /root/.ssh/id_rsa
RUN mkdir -p /etc/tower
@ -23,10 +23,14 @@ ADD tools/docker-compose/ansible-tower.egg-link /tmp/ansible-tower.egg-link
ADD tools/docker-compose/tower-manage /usr/local/bin/tower-manage
ADD tools/docker-compose/awx-manage /usr/local/bin/awx-manage
ADD tools/docker-compose/ansible_tower.egg-info /tmp/ansible_tower.egg-info
RUN ln -Ffs /tower_devel/tools/docker-compose/nginx.conf /etc/nginx/nginx.conf
RUN ln -Ffs /tower_devel/tools/docker-compose/nginx.vh.default.conf /etc/nginx/conf.d/nginx.vh.default.conf
RUN ln -s /tower_devel/tools/docker-compose/start_development.sh /start_development.sh
RUN openssl req -nodes -newkey rsa:2048 -keyout /etc/nginx/nginx.key -out /etc/nginx/nginx.csr -subj "/C=US/ST=North Carolina/L=Durham/O=Ansible/OU=Tower Development/CN=tower.localhost"
RUN openssl x509 -req -days 365 -in /etc/nginx/nginx.csr -signkey /etc/nginx/nginx.key -out /etc/nginx/nginx.crt
WORKDIR /tmp
RUN SWIG_FEATURES="-cpperraswarn -includeall -D__`uname -m`__ -I/usr/include/openssl" VENV_BASE="/venv" make requirements_dev
WORKDIR /
EXPOSE 8050 8051 8080 22
EXPOSE 8043 8013 8080 22
ENTRYPOINT ["/usr/bin/dumb-init"]
CMD /start_development.sh

View File

@ -0,0 +1,37 @@
user nginx;
worker_processes 1;
error_log /var/log/nginx/error.log warn;
pid /var/run/nginx.pid;
events {
worker_connections 1024;
}
http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
map $http_upgrade $connection_upgrade {
default upgrade;
'' close;
}
sendfile on;
#tcp_nopush on;
ssl_session_cache shared:SSL:10m;
ssl_session_timeout 10m;
keepalive_timeout 65;
#gzip on;
include /etc/nginx/conf.d/*.conf;
}

View File

@ -0,0 +1,55 @@
upstream uwsgi {
server localhost:8050;
}
upstream daphne {
server localhost:8051;
}
server {
listen 8013 default_server;
listen 8043 default_server ssl;
# If you have a domain name, this is where to add it
server_name _;
keepalive_timeout 70;
ssl_certificate /etc/nginx/nginx.crt;
ssl_certificate_key /etc/nginx/nginx.key;
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
ssl_ciphers HIGH:!aNULL:!MD5;
location /static/ {
root /tower_devel;
try_files /awx/ui/$uri /awx/$uri /awx/public/$uri =404;
access_log off;
sendfile off;
}
location /websocket {
# Pass request to the upstream alias
proxy_pass http://daphne;
# Require http version 1.1 to allow for upgrade requests
proxy_http_version 1.1;
# We want proxy_buffering off for proxying to websockets.
proxy_buffering off;
# http://en.wikipedia.org/wiki/X-Forwarded-For
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
# enable this if you use HTTPS:
proxy_set_header X-Forwarded-Proto https;
# pass the Host: header from the client for the sake of redirects
proxy_set_header Host $http_host;
# We've set the Host header, so we don't need Nginx to muddle
# about with redirects
proxy_redirect off;
# Depending on the request value, set the Upgrade and
# connection headers
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
}
location / {
include /etc/nginx/uwsgi_params;
uwsgi_pass uwsgi;
}
}

View File

@ -37,7 +37,12 @@ RUN yum install -y nodejs
WORKDIR "/ansible-tower"
# Copy requirements files
COPY requirements/*.txt requirements/
# NOTE: '*' is not used as it invalidates docker caching
COPY requirements/requirements.txt requirements/
COPY requirements/requirements_ansible.txt requirements/
COPY requirements/requirements_dev.txt requirements/
COPY requirements/requirements_jenkins.txt requirements/
# Copy __init__.py so the Makefile can retrieve `awx.__version__`
COPY awx/__init__.py awx/
@ -58,7 +63,7 @@ COPY awx/ui/package.json awx/ui/
RUN npm set progress=false
RUN make ui-deps-built
RUN make ui-deps
ENTRYPOINT ["/bin/bash", "-c"]
CMD ["bash"]

View File

@ -5,6 +5,7 @@ services:
build:
context: ../../../
dockerfile: tools/docker-compose/unit-tests/Dockerfile
image: gcr.io/ansible-tower-engineering/unit-test-runner:latest
environment:
SWIG_FEATURES: "-cpperraswarn -includeall -I/usr/include/openssl"
TEST_DIRS: "awx/main/tests/unit"

View File

@ -1,14 +1,5 @@
#!/bin/bash
# Enable needed Software Collections, if installed
for scl in python27 httpd24; do
if [ -f /etc/scl/prefixes/$scl ]; then
if [ -f `cat /etc/scl/prefixes/$scl`/$scl/enable ]; then
. `cat /etc/scl/prefixes/$scl`/$scl/enable
fi
fi
done
# Enable Tower virtualenv
if [ -f /var/lib/awx/venv/tower/bin/activate ]; then
. /var/lib/awx/venv/tower/bin/activate