Merge branch 'release_3.3.0' into 1458-vault-pass-prompt

This commit is contained in:
Michael Abashian 2018-04-25 10:59:57 -04:00 committed by GitHub
commit e62a8797ae
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
45 changed files with 320 additions and 99 deletions

View File

@ -5,6 +5,7 @@
import copy
import json
import logging
import operator
import re
import six
import urllib
@ -38,7 +39,13 @@ from rest_framework.utils.serializer_helpers import ReturnList
from polymorphic.models import PolymorphicModel
# AWX
from awx.main.constants import SCHEDULEABLE_PROVIDERS, ANSI_SGR_PATTERN, ACTIVE_STATES, TOKEN_CENSOR
from awx.main.constants import (
SCHEDULEABLE_PROVIDERS,
ANSI_SGR_PATTERN,
ACTIVE_STATES,
TOKEN_CENSOR,
CHOICES_PRIVILEGE_ESCALATION_METHODS,
)
from awx.main.models import * # noqa
from awx.main.models.base import NEW_JOB_TYPE_CHOICES
from awx.main.access import get_user_capabilities
@ -2494,6 +2501,9 @@ class CredentialTypeSerializer(BaseSerializer):
field['label'] = _(field['label'])
if 'help_text' in field:
field['help_text'] = _(field['help_text'])
if field['type'] == 'become_method':
field.pop('type')
field['choices'] = map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS)
return value
def filter_field_metadata(self, fields, method):
@ -2663,7 +2673,9 @@ class CredentialSerializer(BaseSerializer):
for field in set(data.keys()) - valid_fields - set(credential_type.defined_fields):
if data.get(field):
raise serializers.ValidationError(
{"detail": _("'%s' is not a valid field for %s") % (field, credential_type.name)}
{"detail": _("'{field_name}' is not a valid field for {credential_type_name}").format(
field_name=field, credential_type_name=credential_type.name
)}
)
value.pop('kind', None)
return value
@ -4575,8 +4587,22 @@ class InstanceGroupSerializer(BaseSerializer):
percent_capacity_remaining = serializers.SerializerMethodField()
jobs_running = serializers.SerializerMethodField()
instances = serializers.SerializerMethodField()
policy_instance_percentage = serializers.IntegerField(min_value=0, max_value=100)
policy_instance_minimum = serializers.IntegerField(min_value=0)
# NOTE: help_text is duplicated from field definitions, no obvious way of
# both defining field details here and also getting the field's help_text
policy_instance_percentage = serializers.IntegerField(
default=0, min_value=0, max_value=100, required=False, initial=0,
help_text=_("Minimum percentage of all instances that will be automatically assigned to "
"this group when new instances come online.")
)
policy_instance_minimum = serializers.IntegerField(
default=0, min_value=0, required=False, initial=0,
help_text=_("Static minimum number of Instances that will be automatically assign to "
"this group when new instances come online.")
)
policy_instance_list = serializers.ListField(
child=serializers.CharField(),
help_text=_("List of exact-match Instances that will be assigned to this group")
)
class Meta:
model = InstanceGroup
@ -4593,6 +4619,14 @@ class InstanceGroupSerializer(BaseSerializer):
res['controller'] = self.reverse('api:instance_group_detail', kwargs={'pk': obj.controller_id})
return res
def validate_policy_instance_list(self, value):
for instance_name in value:
if value.count(instance_name) > 1:
raise serializers.ValidationError(_('Duplicate entry {}.').format(instance_name))
if not Instance.objects.filter(hostname=instance_name).exists():
raise serializers.ValidationError(_('{} is not a valid hostname of an existing instance.').format(instance_name))
return value
def get_jobs_qs(self):
# Store running jobs queryset in context, so it will be shared in ListView
if 'running_jobs' not in self.context:

View File

@ -15,7 +15,10 @@ CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'sate
SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + ('custom', 'scm',)
PRIVILEGE_ESCALATION_METHODS = [
('sudo', _('Sudo')), ('su', _('Su')), ('pbrun', _('Pbrun')), ('pfexec', _('Pfexec')),
('dzdo', _('DZDO')), ('pmrun', _('Pmrun')), ('runas', _('Runas'))]
('dzdo', _('DZDO')), ('pmrun', _('Pmrun')), ('runas', _('Runas')),
('enable', _('Enable')), ('doas', _('Doas')),
]
CHOICES_PRIVILEGE_ESCALATION_METHODS = [('', _('None'))] + PRIVILEGE_ESCALATION_METHODS
ANSI_SGR_PATTERN = re.compile(r'\x1b\[[0-9;]*m')
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
ACTIVE_STATES = CAN_CANCEL

View File

@ -4,6 +4,7 @@
# Python
import copy
import json
import operator
import re
import six
import urllib
@ -45,6 +46,7 @@ from awx.main.utils.filters import SmartFilter
from awx.main.utils.encryption import encrypt_value, decrypt_value, get_encryption_key
from awx.main.validators import validate_ssh_private_key
from awx.main.models.rbac import batch_role_ancestor_rebuilding, Role
from awx.main.constants import CHOICES_PRIVILEGE_ESCALATION_METHODS
from awx.main import utils
@ -57,7 +59,8 @@ __all__ = ['AutoOneToOneField', 'ImplicitRoleField', 'JSONField',
def __enum_validate__(validator, enums, instance, schema):
if instance not in enums:
yield jsonschema.exceptions.ValidationError(
_("'%s' is not one of ['%s']") % (instance, "', '".join(enums))
_("'{value}' is not one of ['{allowed_values}']").format(
value=instance, allowed_values="', '".join(enums))
)
@ -506,6 +509,9 @@ class CredentialInputField(JSONSchemaField):
properties = {}
for field in model_instance.credential_type.inputs.get('fields', []):
field = field.copy()
if field['type'] == 'become_method':
field.pop('type')
field['choices'] = map(operator.itemgetter(0), CHOICES_PRIVILEGE_ESCALATION_METHODS)
properties[field['id']] = field
if field.get('choices', []):
field['enum'] = field['choices'][:]
@ -649,7 +655,7 @@ class CredentialTypeInputField(JSONSchemaField):
'items': {
'type': 'object',
'properties': {
'type': {'enum': ['string', 'boolean']},
'type': {'enum': ['string', 'boolean', 'become_method']},
'format': {'enum': ['ssh_private_key']},
'choices': {
'type': 'array',
@ -710,10 +716,22 @@ class CredentialTypeInputField(JSONSchemaField):
# If no type is specified, default to string
field['type'] = 'string'
if field['type'] == 'become_method':
if not model_instance.managed_by_tower:
raise django_exceptions.ValidationError(
_('become_method is a reserved type name'),
code='invalid',
params={'value': value},
)
else:
field.pop('type')
field['choices'] = CHOICES_PRIVILEGE_ESCALATION_METHODS
for key in ('choices', 'multiline', 'format', 'secret',):
if key in field and field['type'] != 'string':
raise django_exceptions.ValidationError(
_('%s not allowed for %s type (%s)' % (key, field['type'], field['id'])),
_('{sub_key} not allowed for {element_type} type ({element_id})'.format(
sub_key=key, element_type=field['type'], element_id=field['id'])),
code='invalid',
params={'value': value},
)
@ -810,13 +828,15 @@ class CredentialTypeInjectorField(JSONSchemaField):
).from_string(tmpl).render(valid_namespace)
except UndefinedError as e:
raise django_exceptions.ValidationError(
_('%s uses an undefined field (%s)') % (key, e),
_('{sub_key} uses an undefined field ({error_msg})').format(
sub_key=key, error_msg=e),
code='invalid',
params={'value': value},
)
except TemplateSyntaxError as e:
raise django_exceptions.ValidationError(
_('Syntax error rendering template for %s inside of %s (%s)') % (key, type_, e),
_('Syntax error rendering template for {sub_key} inside of {type} ({error_msg})').format(
sub_key=key, type=type_, error_msg=e),
code='invalid',
params={'value': value},
)

View File

@ -178,8 +178,6 @@ class InstanceGroupManager(models.Manager):
if t.status == 'waiting' or not t.execution_node:
# Subtract capacity from any peer groups that share instances
if not t.instance_group:
logger.warning('Excluded %s from capacity algorithm '
'(missing instance_group).', t.log_format)
impacted_groups = []
elif t.instance_group.name not in ig_ig_mapping:
# Waiting job in group with 0 capacity has no collateral impact

View File

@ -0,0 +1,19 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
# AWX
from awx.main.migrations import _credentialtypes as credentialtypes
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0035_v330_more_oauth2_help_text'),
]
operations = [
migrations.RunPython(credentialtypes.remove_become_methods),
]

View File

@ -197,3 +197,9 @@ def add_azure_cloud_environment_field(apps, schema_editor):
name='Microsoft Azure Resource Manager')
azure_rm_credtype.inputs = CredentialType.defaults.get('azure_rm')().inputs
azure_rm_credtype.save()
def remove_become_methods(apps, schema_editor):
become_credtype = CredentialType.objects.filter(kind='ssh', managed_by_tower=True).first()
become_credtype.inputs = CredentialType.defaults.get('ssh')().inputs
become_credtype.save()

View File

@ -256,6 +256,7 @@ class PrimordialModel(CreatedModifiedModel):
def save(self, *args, **kwargs):
update_fields = kwargs.get('update_fields', [])
fields_are_specified = bool(update_fields)
user = get_current_user()
if user and not user.id:
user = None
@ -263,9 +264,14 @@ class PrimordialModel(CreatedModifiedModel):
self.created_by = user
if 'created_by' not in update_fields:
update_fields.append('created_by')
self.modified_by = user
if 'modified_by' not in update_fields:
update_fields.append('modified_by')
# Update modified_by if not called with update_fields, or if any
# editable fields are present in update_fields
if (
(not fields_are_specified) or
any(getattr(self._meta.get_field(name), 'editable', True) for name in update_fields)):
self.modified_by = user
if 'modified_by' not in update_fields:
update_fields.append('modified_by')
super(PrimordialModel, self).save(*args, **kwargs)
def clean_description(self):

View File

@ -4,7 +4,6 @@ from collections import OrderedDict
import functools
import json
import logging
import operator
import os
import re
import stat
@ -22,7 +21,6 @@ from django.utils.encoding import force_text
# AWX
from awx.api.versioning import reverse
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
from awx.main.fields import (ImplicitRoleField, CredentialInputField,
CredentialTypeInputField,
CredentialTypeInjectorField)
@ -35,6 +33,7 @@ from awx.main.models.rbac import (
ROLE_SINGLETON_SYSTEM_AUDITOR,
)
from awx.main.utils import encrypt_field
from awx.main.constants import CHOICES_PRIVILEGE_ESCALATION_METHODS
from . import injectors as builtin_injectors
__all__ = ['Credential', 'CredentialType', 'V1Credential', 'build_safe_env']
@ -165,7 +164,7 @@ class V1Credential(object):
max_length=32,
blank=True,
default='',
choices=[('', _('None'))] + PRIVILEGE_ESCALATION_METHODS,
choices=CHOICES_PRIVILEGE_ESCALATION_METHODS,
help_text=_('Privilege escalation method.')
),
'become_username': models.CharField(
@ -516,7 +515,7 @@ class CredentialType(CommonModelNameNotUnique):
if field['id'] == field_id:
if 'choices' in field:
return field['choices'][0]
return {'string': '', 'boolean': False}[field['type']]
return {'string': '', 'boolean': False, 'become_method': ''}[field['type']]
@classmethod
def default(cls, f):
@ -708,8 +707,7 @@ def ssh(cls):
}, {
'id': 'become_method',
'label': 'Privilege Escalation Method',
'choices': map(operator.itemgetter(0),
V1Credential.FIELDS['become_method'].choices),
'type': 'become_method',
'help_text': ('Specify a method for "become" operations. This is '
'equivalent to specifying the --become-method '
'Ansible parameter.')

View File

@ -2,7 +2,7 @@ import datetime
import logging
from django.conf import settings
from django.db import models
from django.db import models, DatabaseError
from django.utils.dateparse import parse_datetime
from django.utils.timezone import utc
from django.utils.translation import ugettext_lazy as _
@ -15,6 +15,8 @@ from awx.main.utils import ignore_inventory_computed_fields
analytics_logger = logging.getLogger('awx.analytics.job_events')
logger = logging.getLogger('awx.main.models.events')
__all__ = ['JobEvent', 'ProjectUpdateEvent', 'AdHocCommandEvent',
'InventoryUpdateEvent', 'SystemJobEvent']
@ -323,7 +325,10 @@ class BasePlaybookEvent(CreatedModifiedModel):
hostnames = self._hostnames()
self._update_host_summary_from_stats(hostnames)
self.job.inventory.update_computed_fields()
try:
self.job.inventory.update_computed_fields()
except DatabaseError:
logger.exception('Computed fields database error saving event {}'.format(self.pk))
@ -441,6 +446,9 @@ class JobEvent(BasePlaybookEvent):
def _update_host_summary_from_stats(self, hostnames):
with ignore_inventory_computed_fields():
if not self.job or not self.job.inventory:
logger.info('Event {} missing job or inventory, host summaries not updated'.format(self.pk))
return
qs = self.job.inventory.hosts.filter(name__in=hostnames)
job = self.job
for host in hostnames:

View File

@ -192,9 +192,8 @@ class JobOrigin(models.Model):
@receiver(post_save, sender=InstanceGroup)
def on_instance_group_saved(sender, instance, created=False, raw=False, **kwargs):
if created:
from awx.main.tasks import apply_cluster_membership_policies
connection.on_commit(lambda: apply_cluster_membership_policies.apply_async())
from awx.main.tasks import apply_cluster_membership_policies
connection.on_commit(lambda: apply_cluster_membership_policies.apply_async())
@receiver(post_save, sender=Instance)

View File

@ -538,7 +538,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
for virtualenv in (
self.job_template.custom_virtualenv if self.job_template else None,
self.project.custom_virtualenv,
self.project.organization.custom_virtualenv
self.project.organization.custom_virtualenv if self.project.organization else None
):
if virtualenv:
return virtualenv

View File

@ -263,14 +263,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
if field not in update_fields:
update_fields.append(field)
# Do the actual save.
try:
super(UnifiedJobTemplate, self).save(*args, **kwargs)
except ValueError:
# A fix for https://trello.com/c/S4rU1F21
# Does not resolve the root cause. Tis merely a bandaid.
if 'scm_delete_on_next_update' in update_fields:
update_fields.remove('scm_delete_on_next_update')
super(UnifiedJobTemplate, self).save(*args, **kwargs)
super(UnifiedJobTemplate, self).save(*args, **kwargs)
def _get_current_status(self):
@ -722,7 +715,10 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
def _get_parent_instance(self):
return getattr(self, self._get_parent_field_name(), None)
def _update_parent_instance_no_save(self, parent_instance, update_fields=[]):
def _update_parent_instance_no_save(self, parent_instance, update_fields=None):
if update_fields is None:
update_fields = []
def parent_instance_set(key, val):
setattr(parent_instance, key, val)
if key not in update_fields:

View File

@ -474,7 +474,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
@property
def preferred_instance_groups(self):
return self.global_instance_groups
return []
'''
A WorkflowJob is a virtual job. It doesn't result in a celery task.

View File

@ -259,7 +259,7 @@ class TaskManager():
else:
if type(task) is WorkflowJob:
task.status = 'running'
if not task.supports_isolation() and rampart_group.controller_id:
elif not task.supports_isolation() and rampart_group.controller_id:
# non-Ansible jobs on isolated instances run on controller
task.instance_group = rampart_group.controller
logger.info('Submitting isolated %s to queue %s via %s.',
@ -271,7 +271,8 @@ class TaskManager():
task.celery_task_id = str(uuid.uuid4())
task.save()
self.consume_capacity(task, rampart_group.name)
if rampart_group is not None:
self.consume_capacity(task, rampart_group.name)
def post_commit():
task.websocket_emit_status(task.status)
@ -281,7 +282,7 @@ class TaskManager():
connection.on_commit(post_commit)
def process_running_tasks(self, running_tasks):
map(lambda task: self.graph[task.instance_group.name]['graph'].add_job(task), running_tasks)
map(lambda task: self.graph[task.instance_group.name]['graph'].add_job(task) if task.instance_group else None, running_tasks)
def create_project_update(self, task):
project_task = Project.objects.get(id=task.project_id).create_project_update(
@ -447,6 +448,9 @@ class TaskManager():
continue
preferred_instance_groups = task.preferred_instance_groups
found_acceptable_queue = False
if isinstance(task, WorkflowJob):
self.start_task(task, None, task.get_jobs_fail_chain())
continue
for rampart_group in preferred_instance_groups:
remaining_capacity = self.get_remaining_capacity(rampart_group.name)
if remaining_capacity <= 0:

View File

@ -1,6 +1,7 @@
import pytest
from awx.main.models import JobTemplate, Job
from crum import impersonate
@pytest.mark.django_db
@ -49,3 +50,18 @@ def test_awx_custom_virtualenv_without_jt(project):
job = Job.objects.get(pk=job.id)
assert job.ansible_virtualenv_path == '/venv/fancy-proj'
@pytest.mark.django_db
def test_update_parent_instance(job_template, alice):
# jobs are launched as a particular user, user not saved as modified_by
with impersonate(alice):
assert job_template.current_job is None
assert job_template.status == 'never updated'
assert job_template.modified_by is None
job = job_template.jobs.create(status='new')
job.status = 'pending'
job.save()
assert job_template.current_job == job
assert job_template.status == 'pending'
assert job_template.modified_by is None

View File

@ -2,7 +2,7 @@ import pytest
import mock
from datetime import timedelta
from awx.main.scheduler import TaskManager
from awx.main.models import InstanceGroup
from awx.main.models import InstanceGroup, WorkflowJob
from awx.main.tasks import apply_cluster_membership_policies
@ -77,6 +77,18 @@ def test_multi_group_with_shared_dependency(instance_factory, default_instance_g
assert TaskManager.start_task.call_count == 2
@pytest.mark.django_db
def test_workflow_job_no_instancegroup(workflow_job_template_factory, default_instance_group, mocker):
wfjt = workflow_job_template_factory('anicedayforawalk').workflow_job_template
wfj = WorkflowJob.objects.create(workflow_job_template=wfjt)
wfj.status = "pending"
wfj.save()
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
TaskManager().schedule()
TaskManager.start_task.assert_called_once_with(wfj, None, [])
assert wfj.instance_group is None
@pytest.mark.django_db
def test_overcapacity_blocking_other_groups_unaffected(instance_factory, default_instance_group, mocker,
instance_group_factory, job_template_factory):

View File

@ -60,6 +60,21 @@ def test_policy_instance_few_instances(mock, instance_factory, instance_group_fa
assert i2 in ig_4.instances.all()
@pytest.mark.django_db
@mock.patch('awx.main.tasks.handle_ha_toplogy_changes', return_value=None)
def test_policy_instance_distribution_round_up(mock, instance_factory, instance_group_factory):
i1 = instance_factory("i1")
i2 = instance_factory("i2")
i3 = instance_factory("i3")
i4 = instance_factory("i4")
i5 = instance_factory("i5")
ig_1 = instance_group_factory("ig1", percentage=79)
apply_cluster_membership_policies()
assert len(ig_1.instances.all()) == 4
assert set([i1, i2, i3, i4]) == set(ig_1.instances.all())
assert i5 not in ig_1.instances.all()
@pytest.mark.django_db
@mock.patch('awx.main.tasks.handle_ha_toplogy_changes', return_value=None)
def test_policy_instance_distribution_uneven(mock, instance_factory, instance_group_factory):

View File

@ -80,7 +80,7 @@ class NetworkingEvents(object):
type='device_type',
id='cid',
host_id='host_id'), device)
logger.info("Device %s", device)
logger.info("Device created %s", device)
d, _ = Device.objects.get_or_create(topology_id=topology_id, cid=device['cid'], defaults=device)
d.x = device['x']
d.y = device['y']
@ -92,6 +92,7 @@ class NetworkingEvents(object):
.update(device_id_seq=device['cid']))
def onDeviceDestroy(self, device, topology_id, client_id):
logger.info("Device removed %s", device)
Device.objects.filter(topology_id=topology_id, cid=device['id']).delete()
def onDeviceMove(self, device, topology_id, client_id):
@ -101,6 +102,7 @@ class NetworkingEvents(object):
Device.objects.filter(topology_id=topology_id, cid=device['id']).update(host_id=device['host_id'])
def onDeviceLabelEdit(self, device, topology_id, client_id):
logger.debug("Device label edited %s", device)
Device.objects.filter(topology_id=topology_id, cid=device['id']).update(name=device['name'])
def onInterfaceLabelEdit(self, interface, topology_id, client_id):
@ -111,6 +113,7 @@ class NetworkingEvents(object):
.update(name=interface['name']))
def onLinkLabelEdit(self, link, topology_id, client_id):
logger.debug("Link label edited %s", link)
Link.objects.filter(from_device__topology_id=topology_id, cid=link['id']).update(name=link['name'])
def onInterfaceCreate(self, interface, topology_id, client_id):
@ -125,6 +128,7 @@ class NetworkingEvents(object):
.update(interface_id_seq=interface['id']))
def onLinkCreate(self, link, topology_id, client_id):
logger.debug("Link created %s", link)
device_map = dict(Device.objects
.filter(topology_id=topology_id, cid__in=[link['from_device_id'], link['to_device_id']])
.values_list('cid', 'pk'))
@ -141,6 +145,7 @@ class NetworkingEvents(object):
.update(link_id_seq=link['id']))
def onLinkDestroy(self, link, topology_id, client_id):
logger.debug("Link deleted %s", link)
device_map = dict(Device.objects
.filter(topology_id=topology_id, cid__in=[link['from_device_id'], link['to_device_id']])
.values_list('cid', 'pk'))

View File

@ -287,11 +287,23 @@ class VMWareInventory(object):
self.debugl('lower keys is %s' % self.lowerkeys)
self.skip_keys = list(config.get('vmware', 'skip_keys').split(','))
self.debugl('skip keys is %s' % self.skip_keys)
self.host_filters = list(config.get('vmware', 'host_filters').split(','))
temp_host_filters = list(config.get('vmware', 'host_filters').split('}},'))
for host_filter in temp_host_filters:
host_filter = host_filter.rstrip()
if host_filter != "":
if not host_filter.endswith("}}"):
host_filter += "}}"
self.host_filters.append(host_filter)
self.debugl('host filters are %s' % self.host_filters)
self.groupby_patterns = list(config.get('vmware', 'groupby_patterns').split(','))
self.debugl('groupby patterns are %s' % self.groupby_patterns)
temp_groupby_patterns = list(config.get('vmware', 'groupby_patterns').split('}},'))
for groupby_pattern in temp_groupby_patterns:
groupby_pattern = groupby_pattern.rstrip()
if groupby_pattern != "":
if not groupby_pattern.endswith("}}"):
groupby_pattern += "}}"
self.groupby_patterns.append(groupby_pattern)
self.debugl('groupby patterns are %s' % self.groupby_patterns)
# Special feature to disable the brute force serialization of the
# virtulmachine objects. The key name for these properties does not
# matter because the values are just items for a larger list.
@ -491,7 +503,7 @@ class VMWareInventory(object):
keylist = map(lambda x: x.strip(), tv['value'].split(','))
for kl in keylist:
try:
newkey = self.config.get('vmware', 'custom_field_group_prefix') + field_name + '_' + kl
newkey = self.config.get('vmware', 'custom_field_group_prefix') + str(field_name) + '_' + kl
newkey = newkey.strip()
except Exception as e:
self.debugl(e)

View File

@ -13,6 +13,7 @@ function JobsStrings (BaseString) {
ROW_ITEM_LABEL_INVENTORY: t.s('Inventory'),
ROW_ITEM_LABEL_PROJECT: t.s('Project'),
ROW_ITEM_LABEL_CREDENTIALS: t.s('Credentials'),
NO_RUNNING: t.s('There are no running jobs.')
};
}

View File

@ -44,6 +44,10 @@ function ListJobsController (
});
});
if ($state.includes('instanceGroups')) {
vm.emptyListReason = strings.get('list.NO_RUNNING');
}
vm.jobTypes = mapChoices(unifiedJob
.options('actions.GET.type.choices'));

View File

@ -12,7 +12,7 @@
query-set="querySet">
</smart-search>
</div>
<at-list results="jobs">
<at-list results="jobs" empty-list-reason="{{ vm.emptyListReason }}">
<!-- TODO: implement resources are missing red indicator as present in mockup -->
<at-row ng-repeat="job in jobs" job-id="{{ job.id }}">
<div class="at-Row-items">

View File

@ -16,7 +16,8 @@ export default {
job_search: {
value: {
page_size: '10',
order_by: '-finished'
order_by: '-id',
status: 'running'
},
dynamic: true
}

View File

@ -26,7 +26,7 @@ function HostEventsController (
$scope.module_name = 'No result found';
}
if (_.has(hostEvent.event_data, 'res.result.stdout')) {
if (_.has(hostEvent.event_data, 'res.stdout')) {
if (hostEvent.event_data.res.stdout === '') {
$scope.stdout = ' ';
} else {
@ -34,7 +34,7 @@ function HostEventsController (
}
}
if (_.has(hostEvent.event_data, 'res.result.stderr')) {
if (_.has(hostEvent.event_data, 'res.stderr')) {
if (hostEvent.event_data.res.stderr === '') {
$scope.stderr = ' ';
} else {

View File

@ -14,7 +14,7 @@ function exit () {
}
function HostEventResolve (HostEventService, $stateParams) {
return HostEventService.getRelatedJobEvents($stateParams.id, {
return HostEventService.getRelatedJobEvents($stateParams.id, $stateParams.type, {
id: $stateParams.eventId
}).then((response) => response.data.results[0]);
}

View File

@ -4,13 +4,22 @@ function HostEventService (
GetBasePath,
$rootScope
) {
this.getUrl = (id, type, params) => {
let url;
if (type === 'playbook') {
url = `${GetBasePath('jobs')}${id}/job_events/?${this.stringifyParams(params)}`;
} else if (type === 'command') {
url = `${GetBasePath('ad_hoc_commands')}${id}/events/?${this.stringifyParams(params)}`;
}
return url;
};
// GET events related to a job run
// e.g.
// ?event=playbook_on_stats
// ?parent=206&event__startswith=runner&page_size=200&order=host_name,counter
this.getRelatedJobEvents = (id, params) => {
let url = GetBasePath('jobs');
url = `${url}${id}/job_events/?${this.stringifyParams(params)}`;
this.getRelatedJobEvents = (id, type, params) => {
const url = this.getUrl(id, type, params);
Rest.setUrl(url);
return Rest.get()
.then(response => response)

View File

@ -169,7 +169,7 @@ function JobRenderService ($q, $sce, $window) {
}
if (current.isHost) {
tdEvent = `<td class="at-Stdout-event--host" ui-sref="jobz.host-event.json({eventId: ${current.id}, taskUuid: '${current.uuid}' })">${content}</td>`;
tdEvent = `<td class="at-Stdout-event--host" ui-sref="jobz.host-event.json({eventId: ${current.id}, taskUuid: '${current.uuid}' })"><span ng-non-bindable>${content}</span></td>`;
}
if (current.time && current.line === ln) {

View File

@ -77,11 +77,11 @@
</at-row-item>
<at-row-item
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_MODIFIED') }}"
value="{{ vm.getModified(template) }}">
value-bind-html="{{ vm.getModified(template) }}">
</at-row-item>
<at-row-item
label-value="{{:: vm.strings.get('list.ROW_ITEM_LABEL_RAN') }}"
value="{{ vm.getLastRan(template) }}">
value-bind-html="{{ vm.getLastRan(template) }}">
</at-row-item>
<labels-list class="LabelList" show-delete="false" is-row-item="true" state="template">
</labels-list>

View File

@ -1,6 +1,6 @@
const templateUrl = require('~components/layout/layout.partial.html');
function AtLayoutController ($scope, strings, $transitions) {
function AtLayoutController ($scope, $http, strings, ProcessErrors, $transitions) {
const vm = this || {};
$transitions.onSuccess({}, (transition) => {
@ -9,10 +9,14 @@ function AtLayoutController ($scope, strings, $transitions) {
$scope.$watch('$root.current_user', (val) => {
vm.isLoggedIn = val && val.username;
if (val) {
if (!_.isEmpty(val)) {
vm.isSuperUser = $scope.$root.user_is_superuser || $scope.$root.user_is_system_auditor;
vm.currentUsername = val.username;
vm.currentUserId = val.id;
if (!vm.isSuperUser) {
checkOrgAdmin();
}
}
});
@ -32,9 +36,27 @@ function AtLayoutController ($scope, strings, $transitions) {
return strings.get(string);
}
};
function checkOrgAdmin () {
const usersPath = `/api/v2/users/${vm.currentUserId}/admin_of_organizations/`;
$http.get(usersPath)
.then(({ data }) => {
if (data.count > 0) {
vm.isOrgAdmin = true;
} else {
vm.isOrgAdmin = false;
}
})
.catch(({ data, status }) => {
ProcessErrors(null, data, status, null, {
hdr: strings.get('error.HEADER'),
msg: strings.get('error.CALL', { path: usersPath, action: 'GET', status })
});
});
}
}
AtLayoutController.$inject = ['$scope', 'ComponentsStrings', '$transitions'];
AtLayoutController.$inject = ['$scope', '$http', 'ComponentsStrings', 'ProcessErrors', '$transitions'];
function atLayout () {
return {

View File

@ -85,7 +85,7 @@
system-admin-only="true">
</at-side-nav-item>
<at-side-nav-item icon-class="fa-server" route="instanceGroups" name="INSTANCE_GROUPS"
system-admin-only="true">
ng-show="$parent.layoutVm.isSuperUser || $parent.layoutVm.isOrgAdmin">
</at-side-nav-item>
<at-side-nav-item icon-class="fa-cubes" route="applications" name="APPLICATIONS"
system-admin-only="true">

View File

@ -4,7 +4,7 @@ function atSideNavItemLink (scope, element, attrs, ctrl) {
[scope.navVm, scope.layoutVm] = ctrl;
}
function AtSideNavItemController ($state, $scope, strings) {
function AtSideNavItemController ($scope, strings) {
const vm = this || {};
$scope.$watch('layoutVm.currentState', current => {
@ -21,10 +21,6 @@ function AtSideNavItemController ($state, $scope, strings) {
}
});
vm.go = () => {
$state.go($scope.route, {}, { reload: true });
};
vm.tooltip = {
popover: {
text: strings.get(`layout.${$scope.name}`),
@ -36,7 +32,7 @@ function AtSideNavItemController ($state, $scope, strings) {
};
}
AtSideNavItemController.$inject = ['$state', '$scope', 'ComponentsStrings'];
AtSideNavItemController.$inject = ['$scope', 'ComponentsStrings'];
function atSideNavItem () {
return {

View File

@ -1,4 +1,4 @@
<div class="at-Layout-sideNavItem" ng-click="vm.go()" ng-class="{'is-active': vm.isRoute}"
<a class="at-Layout-sideNavItem" ui-sref="{{ route }}" ng-class="{'is-active': vm.isRoute}"
ng-show="(!systemAdminOnly || layoutVm.isSuperUser) && layoutVm.isLoggedIn &&
!layoutVm.licenseIsMissing">
<at-popover state="vm.tooltip" ng-if="!navVm.isExpanded"></at-popover>
@ -7,4 +7,4 @@
<span class="at-Layout-sideNavItemName" ng-show="navVm.isExpanded">
{{ layoutVm.getString(name) }}
</span>
</div>
</a>

View File

@ -20,6 +20,7 @@ function atList () {
templateUrl,
scope: {
results: '=',
emptyListReason: '@'
},
link: atListLink,
controller: AtListController,

View File

@ -19,6 +19,7 @@ function atRowItem () {
labelState: '@',
value: '@',
valueLink: '@',
valueBindHtml: '@',
smartStatus: '=?',
tagValues: '=?',
// TODO: add see more for tags if applicable

View File

@ -1,5 +1,5 @@
<div class="at-RowItem" ng-class="{'at-RowItem--isHeader': headerValue, 'at-RowItem--inline': inline}"
ng-show="status || headerValue || value || (smartStatus && smartStatus.summary_fields.recent_jobs.length) || (tagValues && tagValues.length)">
ng-show="status || headerValue || value || valueBindHtml || (smartStatus && smartStatus.summary_fields.recent_jobs.length) || (tagValues && tagValues.length)">
<div class="at-RowItem-status" ng-if="status">
<a ng-if="headerLink" ng-href="{{ headerLink }}"
aw-tool-tip="{{ statusTip }}" aw-tip-watch="statusTip"
@ -32,6 +32,9 @@
<div class="at-RowItem-value" ng-class="{'at-RowItem-badge': badge}" ng-if="value && !valueLink"
ng-bind-html="value">
</div>
<div class="at-RowItem-value" ng-class="{'at-RowItem-badge': badge}" ng-if="valueBindHtml"
ng-bind-html="valueBindHtml">
</div>
<aw-smart-status jobs="smartStatus.summary_fields.recent_jobs"
template-type="smartStatus.type" ng-if="smartStatus">
</aw-smart-status>

View File

@ -25,10 +25,8 @@ angular.module('credentialTypes', [
function($stateProvider, stateDefinitionsProvider) {
let stateDefinitions = stateDefinitionsProvider.$get();
$stateProvider.state({
name: 'credentialTypes.**',
url: '/credential_type',
lazyLoad: () => stateDefinitions.generateTree({
function generateStateTree() {
let credentialTypesTree = stateDefinitions.generateTree({
parent: 'credentialTypes',
modes: ['add', 'edit'],
list: 'CredentialTypesList',
@ -45,7 +43,22 @@ angular.module('credentialTypes', [
ncyBreadcrumb: {
label: N_('CREDENTIAL TYPES')
}
})
});
});
return Promise.all([
credentialTypesTree
]).then((generated) => {
return {
states: _.reduce(generated, (result, definition) => {
return result.concat(definition.states);
}, [])
};
});
}
let stateTree = {
name: 'credentialTypes.**',
url: '/credential_types',
lazyLoad: () => generateStateTree()
};
$stateProvider.state(stateTree);
}
]);

View File

@ -28,6 +28,7 @@ function EditController ($rootScope, $state, models, strings) {
vm.form.disabled = !instanceGroup.has('options', 'actions.PUT');
vm.form.name._disabled = instanceGroup.get('name') === 'tower';
vm.form.policy_instance_list._lookupTags = true;
vm.form.policy_instance_list._model = instance;
vm.form.policy_instance_list._placeholder = "Policy Instance List";

View File

@ -25,10 +25,8 @@ angular.module('inventoryScripts', [
function($stateProvider, stateDefinitionsProvider) {
let stateDefinitions = stateDefinitionsProvider.$get();
$stateProvider.state({
name: 'inventoryScripts.**',
url: '/inventory_script',
lazyLoad: () => stateDefinitions.generateTree({
function generateStateTree() {
let inventoryScriptTree = stateDefinitions.generateTree({
parent: 'inventoryScripts',
modes: ['add', 'edit'],
list: 'InventoryScriptsList',
@ -66,7 +64,23 @@ angular.module('inventoryScripts', [
ncyBreadcrumb: {
label: N_('INVENTORY SCRIPTS')
}
})
});
});
return Promise.all([
inventoryScriptTree
]).then((generated) => {
return {
states: _.reduce(generated, (result, definition) => {
return result.concat(definition.states);
}, [])
};
});
}
let stateTree = {
name: 'inventoryScripts.**',
url: '/inventory_scripts',
lazyLoad: () => generateStateTree()
};
$stateProvider.state(stateTree);
}
]);

View File

@ -32,7 +32,7 @@
<div class="Prompt-previewRowTitle">{{:: vm.strings.get('prompt.LIMIT') }}</div>
<div class="Prompt-previewRowValue" ng-bind="promptData.prompts.limit.value"></div>
</div>
<div class="Prompt-previewRow--flex" ng-if="promptData.prompts.inventory.value.label">
<div class="Prompt-previewRow--flex" ng-if="promptData.prompts.verbosity.value.label">
<div class="Prompt-previewRowTitle">{{:: vm.strings.get('prompt.VERBOSITY') }}</div>
<div class="Prompt-previewRowValue" ng-bind="promptData.prompts.verbosity.value.label"></div>
</div>

View File

@ -190,9 +190,6 @@ module.exports = {
credentials.section.navigation.expect.element('@credentials').enabled;
credentials.section.navigation.click('@credentials');
credentials.waitForElementVisible('div.spinny');
credentials.waitForElementNotVisible('div.spinny');
credentials.section.list.waitForElementVisible('@add');
credentials.section.list.expect.element('@add').enabled;
credentials.section.list.click('@add');
@ -219,7 +216,6 @@ module.exports = {
credentials.section.navigation.expect.element('@credentials').enabled;
credentials.section.navigation.click('@credentials');
credentials.waitForElementVisible('div.spinny');
credentials.waitForElementNotVisible('div.spinny');
credentials.section.list.waitForElementVisible('@add');

View File

@ -1,6 +1,7 @@
describe('Components | Layout', () => {
let $compile;
let $rootScope;
let $httpBackend;
let element;
let scope;
@ -10,11 +11,14 @@ describe('Components | Layout', () => {
angular.mock.module('ui.router');
angular.mock.module('at.lib.services');
angular.mock.module('at.lib.components');
angular.mock.module('Utilities');
angular.mock.module('ngCookies');
});
beforeEach(angular.mock.inject((_$compile_, _$rootScope_) => {
beforeEach(angular.mock.inject((_$compile_, _$rootScope_, _$httpBackend_) => {
$compile = _$compile_;
$rootScope = _$rootScope_;
$httpBackend = _$httpBackend_;
scope = $rootScope.$new();
element = angular.element('<at-layout></at-layout>');
@ -26,7 +30,15 @@ describe('Components | Layout', () => {
let controller;
beforeEach(() => {
const mockResponse = {
data: {
count: 3
}
};
controller = element.controller('atLayout');
$httpBackend.when('GET', /admin_of_organizations/)
.respond(mockResponse);
});
xit('$scope.$on($stateChangeSuccess) should assign toState name to currentState', () => {

View File

@ -10,6 +10,8 @@ describe('Components | Side Nav Item', () => {
angular.mock.module('ui.router');
angular.mock.module('at.lib.services');
angular.mock.module('at.lib.components');
angular.mock.module('Utilities');
angular.mock.module('ngCookies');
});
beforeEach(angular.mock.inject((_$compile_, _$rootScope_) => {
@ -44,13 +46,6 @@ describe('Components | Side Nav Item', () => {
expect(SideNavItemCtrl.isRoute).toBe(false);
});
it('go() should call $state.go()', angular.mock.inject((_$state_) => {
spyOn(_$state_, 'go');
SideNavItemCtrl.go();
expect(_$state_.go).toHaveBeenCalled();
expect(_$state_.go).toHaveBeenCalledWith('dashboard', jasmine.any(Object), jasmine.any(Object));
}));
it('should load name, icon, and route from scope', () => {
expect(SideNavItem.isolateScope().name).toBeDefined();
expect(SideNavItem.isolateScope().iconClass).toBeDefined();

View File

@ -15,6 +15,8 @@ describe('Components | Side Nav', () => {
angular.mock.module('at.lib.components', ($provide) => {
$provide.value('$window', windowMock);
});
angular.mock.module('Utilities');
angular.mock.module('ngCookies');
});
beforeEach(angular.mock.inject((_$compile_, _$rootScope_) => {

View File

@ -5,8 +5,7 @@ asgiref==1.1.2
azure==3.0.0
backports.ssl-match-hostname==3.5.0.1
boto==2.47.0
boto3==1.6.2
botocore<1.9.8 # botocore 1.9.8 pinned python-dateutil < 2.7.0 (our TZID fixes) https://github.com/boto/botocore/pull/1402
boto3==1.7.6
channels==1.1.8
celery==3.1.25
daphne==1.3.0 # Last before backwards-incompatible channels 2 upgrade

View File

@ -95,9 +95,9 @@ backports.functools-lru-cache==1.5 # via jaraco.functools
backports.ssl-match-hostname==3.5.0.1
baron==0.6.6 # via redbaron
billiard==3.3.0.23 # via celery
boto3==1.6.2
boto3==1.7.6
boto==2.47.0
botocore==1.9.7
botocore==1.10.6
celery==3.1.25
certifi==2018.1.18 # via msrest
cffi==1.11.5 # via azure-datalake-store, cryptography