mirror of
https://github.com/ansible/awx.git
synced 2026-01-11 10:00:01 -03:30
Merge branch 'release_3.0.0' into devel
* release_3.0.0: (270 commits) Inventory Manage > copy/move groups disable copy option where impossible, add to Root Group target, resolves #1749 (#2218) fixes access issue for InventoryScript.admin_role Make sure project team list is filtered for access Fix up the project teams list fix api test fix tests with refreshes adjusting Credential model and migrations adjusting API for new Credential.organization Fix Openstack inventory on Ubuntu 12 (#2318) Attach labels instead of erroring on creation if label already exists Fix system-tracking typo update test_rbac_api to new object_roles naming Fixing Credential access issue Fix an issue calling build_env for system jobs remove dead fields from Groups > Add manual source type, resovles #2288 (#2305) fixes regression on license expiresOn display, resolves #2277 (#2287) fix edit action in Jobs > Schedules tab view, resolves #2258 (#2292) Fixed several bugs with adding permissions where checkboxes weren't checked properly or were disappearing when paging was involved. specify playbook vars in a way that works with 1.9 Change ldap and other sso defaults to remove from team/admin ...
This commit is contained in:
parent
bbc84568a7
commit
22437f80ed
@ -18,7 +18,6 @@ include tools/scripts/request_tower_configuration.sh
|
||||
include tools/scripts/request_tower_configuration.ps1
|
||||
include tools/scripts/ansible-tower-service
|
||||
include tools/scripts/tower-python
|
||||
include tools/munin_monitors/*
|
||||
include tools/sosreport/*
|
||||
include COPYING
|
||||
include Makefile
|
||||
|
||||
12
Makefile
12
Makefile
@ -258,7 +258,9 @@ virtualenv_ansible:
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/ansible" ]; then \
|
||||
virtualenv --system-site-packages $(VENV_BASE)/ansible; \
|
||||
virtualenv --system-site-packages --setuptools $(VENV_BASE)/ansible && \
|
||||
$(VENV_BASE)/ansible/bin/pip install -I setuptools==23.0.0 && \
|
||||
$(VENV_BASE)/ansible/bin/pip install -I pip==8.1.1; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
@ -268,17 +270,17 @@ virtualenv_tower:
|
||||
mkdir $(VENV_BASE); \
|
||||
fi; \
|
||||
if [ ! -d "$(VENV_BASE)/tower" ]; then \
|
||||
virtualenv --system-site-packages $(VENV_BASE)/tower; \
|
||||
virtualenv --system-site-packages --setuptools $(VENV_BASE)/tower && \
|
||||
$(VENV_BASE)/tower/bin/pip install -I setuptools==23.0.0 && \
|
||||
$(VENV_BASE)/tower/bin/pip install -I pip==8.1.1; \
|
||||
fi; \
|
||||
fi
|
||||
|
||||
requirements_ansible: virtualenv_ansible
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/ansible/bin/activate; \
|
||||
$(VENV_BASE)/ansible/bin/pip install -U pip==8.1.1; \
|
||||
$(VENV_BASE)/ansible/bin/pip install -r requirements/requirements_ansible.txt ;\
|
||||
else \
|
||||
pip install -U pip==8.1.1; \
|
||||
pip install -r requirements/requirements_ansible.txt ; \
|
||||
fi
|
||||
|
||||
@ -286,10 +288,8 @@ requirements_ansible: virtualenv_ansible
|
||||
requirements_tower: virtualenv_tower
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/tower/bin/activate; \
|
||||
$(VENV_BASE)/tower/bin/pip install -U pip==8.1.1; \
|
||||
$(VENV_BASE)/tower/bin/pip install -r requirements/requirements.txt ;\
|
||||
else \
|
||||
pip install -U pip==8.1.1; \
|
||||
pip install -r requirements/requirements.txt ; \
|
||||
fi
|
||||
|
||||
|
||||
@ -219,7 +219,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
else:
|
||||
q = Q(**{k:v})
|
||||
queryset = queryset.filter(q)
|
||||
queryset = queryset.filter(*args)
|
||||
queryset = queryset.filter(*args).distinct()
|
||||
return queryset
|
||||
except (FieldError, FieldDoesNotExist, ValueError), e:
|
||||
raise ParseError(e.args[0])
|
||||
|
||||
@ -360,6 +360,13 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
# Base class for a sublist view that allows for creating subobjects and
|
||||
# attaching/detaching them from the parent.
|
||||
|
||||
def get_description_context(self):
|
||||
d = super(SubListCreateAttachDetachAPIView, self).get_description_context()
|
||||
d.update({
|
||||
"has_attach": True,
|
||||
})
|
||||
return d
|
||||
|
||||
def attach(self, request, *args, **kwargs):
|
||||
created = False
|
||||
parent = self.get_parent_object()
|
||||
|
||||
@ -23,6 +23,7 @@ from django.db import models
|
||||
# from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.encoding import force_text
|
||||
from django.utils.text import capfirst
|
||||
from django.forms.models import model_to_dict
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import ValidationError
|
||||
@ -39,7 +40,6 @@ from awx.main.constants import SCHEDULEABLE_PROVIDERS
|
||||
from awx.main.models import * # noqa
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.utils import get_type_for_model, get_model_for_type, build_url, timestamp_apiformat, camelcase_to_underscore, getattrd
|
||||
from awx.main.redact import REPLACE_STR
|
||||
from awx.main.conf import tower_settings
|
||||
|
||||
from awx.api.license import feature_enabled
|
||||
@ -331,7 +331,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
'description': role.description,
|
||||
}
|
||||
if len(roles) > 0:
|
||||
summary_fields['roles'] = roles
|
||||
summary_fields['object_roles'] = roles
|
||||
return summary_fields
|
||||
|
||||
def get_created(self, obj):
|
||||
@ -679,13 +679,14 @@ class UserSerializer(BaseSerializer):
|
||||
password = serializers.CharField(required=False, default='', write_only=True,
|
||||
help_text='Write-only field used to change the password.')
|
||||
ldap_dn = serializers.CharField(source='profile.ldap_dn', read_only=True)
|
||||
external_account = serializers.SerializerMethodField(help_text='Set if the account is managed by an external service')
|
||||
is_system_auditor = serializers.BooleanField(default=False)
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = ('*', '-name', '-description', '-modified',
|
||||
'-summary_fields', 'username', 'first_name', 'last_name',
|
||||
'email', 'is_superuser', 'is_system_auditor', 'password', 'ldap_dn')
|
||||
'email', 'is_superuser', 'is_system_auditor', 'password', 'ldap_dn', 'external_account')
|
||||
|
||||
def to_representation(self, obj):
|
||||
ret = super(UserSerializer, self).to_representation(obj)
|
||||
@ -719,6 +720,8 @@ class UserSerializer(BaseSerializer):
|
||||
getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None) or
|
||||
getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)) and obj.social_auth.all():
|
||||
new_password = None
|
||||
if obj.pk and getattr(settings, 'RADIUS_SERVER', '') and not obj.has_usable_password():
|
||||
new_password = None
|
||||
if new_password:
|
||||
obj.set_password(new_password)
|
||||
obj.save(update_fields=['password'])
|
||||
@ -726,6 +729,24 @@ class UserSerializer(BaseSerializer):
|
||||
obj.set_unusable_password()
|
||||
obj.save(update_fields=['password'])
|
||||
|
||||
def get_external_account(self, obj):
|
||||
account_type = None
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None) and feature_enabled('ldap'):
|
||||
try:
|
||||
if obj.pk and obj.profile.ldap_dn and not obj.has_usable_password():
|
||||
account_type = "ldap"
|
||||
except AttributeError:
|
||||
pass
|
||||
if (getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None) or
|
||||
getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None) or
|
||||
getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None) or
|
||||
getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None) or
|
||||
getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)) and obj.social_auth.all():
|
||||
account_type = "social"
|
||||
if obj.pk and getattr(settings, 'RADIUS_SERVER', '') and not obj.has_usable_password():
|
||||
account_type = "radius"
|
||||
return account_type
|
||||
|
||||
def create(self, validated_data):
|
||||
new_password = validated_data.pop('password', None)
|
||||
obj = super(UserSerializer, self).create(validated_data)
|
||||
@ -747,6 +768,7 @@ class UserSerializer(BaseSerializer):
|
||||
projects = reverse('api:user_projects_list', args=(obj.pk,)),
|
||||
credentials = reverse('api:user_credentials_list', args=(obj.pk,)),
|
||||
roles = reverse('api:user_roles_list', args=(obj.pk,)),
|
||||
object_roles = reverse('api:user_object_roles_list', args=(obj.pk,)),
|
||||
activity_stream = reverse('api:user_activity_stream_list', args=(obj.pk,)),
|
||||
access_list = reverse('api:user_access_list', args=(obj.pk,)),
|
||||
))
|
||||
@ -804,7 +826,7 @@ class OrganizationSerializer(BaseSerializer):
|
||||
notification_templates_any = reverse('api:organization_notification_templates_any_list', args=(obj.pk,)),
|
||||
notification_templates_success = reverse('api:organization_notification_templates_success_list', args=(obj.pk,)),
|
||||
notification_templates_error = reverse('api:organization_notification_templates_error_list', args=(obj.pk,)),
|
||||
roles = reverse('api:organization_roles_list', args=(obj.pk,)),
|
||||
object_roles = reverse('api:organization_object_roles_list', args=(obj.pk,)),
|
||||
access_list = reverse('api:organization_access_list', args=(obj.pk,)),
|
||||
))
|
||||
return res
|
||||
@ -890,7 +912,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
|
||||
notification_templates_success = reverse('api:project_notification_templates_success_list', args=(obj.pk,)),
|
||||
notification_templates_error = reverse('api:project_notification_templates_error_list', args=(obj.pk,)),
|
||||
access_list = reverse('api:project_access_list', args=(obj.pk,)),
|
||||
roles = reverse('api:project_roles_list', args=(obj.pk,)),
|
||||
object_roles = reverse('api:project_object_roles_list', args=(obj.pk,)),
|
||||
))
|
||||
if obj.organization:
|
||||
res['organization'] = reverse('api:organization_detail',
|
||||
@ -994,7 +1016,7 @@ class InventorySerializer(BaseSerializerWithVariables):
|
||||
scan_job_templates = reverse('api:inventory_scan_job_template_list', args=(obj.pk,)),
|
||||
ad_hoc_commands = reverse('api:inventory_ad_hoc_commands_list', args=(obj.pk,)),
|
||||
access_list = reverse('api:inventory_access_list', args=(obj.pk,)),
|
||||
roles = reverse('api:inventory_roles_list', args=(obj.pk,)),
|
||||
object_roles = reverse('api:inventory_object_roles_list', args=(obj.pk,)),
|
||||
#single_fact = reverse('api:inventory_single_fact_view', args=(obj.pk,)),
|
||||
))
|
||||
if obj.organization:
|
||||
@ -1166,7 +1188,7 @@ class GroupSerializer(BaseSerializerWithVariables):
|
||||
inventory_sources = reverse('api:group_inventory_sources_list', args=(obj.pk,)),
|
||||
ad_hoc_commands = reverse('api:group_ad_hoc_commands_list', args=(obj.pk,)),
|
||||
access_list = reverse('api:group_access_list', args=(obj.pk,)),
|
||||
roles = reverse('api:group_roles_list', args=(obj.pk,)),
|
||||
object_roles = reverse('api:group_object_roles_list', args=(obj.pk,)),
|
||||
#single_fact = reverse('api:group_single_fact_view', args=(obj.pk,)),
|
||||
))
|
||||
if obj.inventory:
|
||||
@ -1189,7 +1211,7 @@ class GroupSerializer(BaseSerializerWithVariables):
|
||||
|
||||
class GroupTreeSerializer(GroupSerializer):
|
||||
|
||||
children = serializers.SerializerMethodField('get_children')
|
||||
children = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Group
|
||||
@ -1265,7 +1287,7 @@ class CustomInventoryScriptSerializer(BaseSerializer):
|
||||
def get_related(self, obj):
|
||||
res = super(CustomInventoryScriptSerializer, self).get_related(obj)
|
||||
res.update(dict(
|
||||
roles = reverse('api:inventory_script_roles_list', args=(obj.pk,)),
|
||||
object_roles = reverse('api:inventory_script_object_roles_list', args=(obj.pk,)),
|
||||
))
|
||||
|
||||
if obj.organization:
|
||||
@ -1434,6 +1456,7 @@ class TeamSerializer(BaseSerializer):
|
||||
users = reverse('api:team_users_list', args=(obj.pk,)),
|
||||
credentials = reverse('api:team_credentials_list', args=(obj.pk,)),
|
||||
roles = reverse('api:team_roles_list', args=(obj.pk,)),
|
||||
object_roles = reverse('api:team_object_roles_list', args=(obj.pk,)),
|
||||
activity_stream = reverse('api:team_activity_stream_list', args=(obj.pk,)),
|
||||
access_list = reverse('api:team_access_list', args=(obj.pk,)),
|
||||
))
|
||||
@ -1588,8 +1611,6 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
||||
return ret
|
||||
|
||||
|
||||
|
||||
|
||||
class CredentialSerializer(BaseSerializer):
|
||||
|
||||
# FIXME: may want to make some fields filtered based on user accessing
|
||||
@ -1598,7 +1619,7 @@ class CredentialSerializer(BaseSerializer):
|
||||
model = Credential
|
||||
fields = ('*', 'kind', 'cloud', 'host', 'username',
|
||||
'password', 'security_token', 'project', 'domain',
|
||||
'ssh_key_data', 'ssh_key_unlock',
|
||||
'ssh_key_data', 'ssh_key_unlock', 'organization',
|
||||
'become_method', 'become_username', 'become_password',
|
||||
'vault_password', 'subscription', 'tenant', 'secret', 'client',
|
||||
'authorize', 'authorize_password')
|
||||
@ -1613,10 +1634,16 @@ class CredentialSerializer(BaseSerializer):
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(CredentialSerializer, self).get_related(obj)
|
||||
|
||||
if obj.organization:
|
||||
res['organization'] = reverse('api:organization_detail', args=(obj.organization.pk,))
|
||||
|
||||
res.update(dict(
|
||||
activity_stream = reverse('api:credential_activity_stream_list', args=(obj.pk,)),
|
||||
access_list = reverse('api:credential_access_list', args=(obj.pk,)),
|
||||
roles = reverse('api:credential_roles_list', args=(obj.pk,)),
|
||||
object_roles = reverse('api:credential_object_roles_list', args=(obj.pk,)),
|
||||
owner_users = reverse('api:credential_owner_users_list', args=(obj.pk,)),
|
||||
owner_teams = reverse('api:credential_owner_teams_list', args=(obj.pk,)),
|
||||
))
|
||||
|
||||
parents = obj.owner_role.parents.exclude(object_id__isnull=True)
|
||||
@ -1635,6 +1662,7 @@ class CredentialSerializer(BaseSerializer):
|
||||
for user in obj.owner_role.members.all():
|
||||
summary_dict['owners'].append({
|
||||
'id': user.pk,
|
||||
'type': 'user',
|
||||
'name': user.username,
|
||||
'description': ' '.join([user.first_name, user.last_name]),
|
||||
'url': reverse('api:user_detail', args=(user.pk,)),
|
||||
@ -1643,6 +1671,7 @@ class CredentialSerializer(BaseSerializer):
|
||||
for parent in obj.owner_role.parents.exclude(object_id__isnull=True).all():
|
||||
summary_dict['owners'].append({
|
||||
'id': parent.content_object.pk,
|
||||
'type': camelcase_to_underscore(parent.content_object.__class__.__name__),
|
||||
'name': parent.content_object.name,
|
||||
'description': parent.content_object.description,
|
||||
'url': parent.content_object.get_absolute_url(),
|
||||
@ -1754,7 +1783,7 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer):
|
||||
model = JobTemplate
|
||||
fields = ('*', 'host_config_key', 'ask_variables_on_launch', 'ask_limit_on_launch',
|
||||
'ask_tags_on_launch', 'ask_job_type_on_launch', 'ask_inventory_on_launch',
|
||||
'ask_credential_on_launch', 'survey_enabled', 'become_enabled')
|
||||
'ask_credential_on_launch', 'survey_enabled', 'become_enabled', 'allow_simultaneous')
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(JobTemplateSerializer, self).get_related(obj)
|
||||
@ -1766,10 +1795,10 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer):
|
||||
notification_templates_any = reverse('api:job_template_notification_templates_any_list', args=(obj.pk,)),
|
||||
notification_templates_success = reverse('api:job_template_notification_templates_success_list', args=(obj.pk,)),
|
||||
notification_templates_error = reverse('api:job_template_notification_templates_error_list', args=(obj.pk,)),
|
||||
access_list = reverse('api:job_template_access_list', args=(obj.pk,)),
|
||||
access_list = reverse('api:job_template_access_list', args=(obj.pk,)),
|
||||
survey_spec = reverse('api:job_template_survey_spec', args=(obj.pk,)),
|
||||
labels = reverse('api:job_template_label_list', args=(obj.pk,)),
|
||||
roles = reverse('api:job_template_roles_list', args=(obj.pk,)),
|
||||
object_roles = reverse('api:job_template_object_roles_list', args=(obj.pk,)),
|
||||
))
|
||||
if obj.host_config_key:
|
||||
res['callback'] = reverse('api:job_template_callback', args=(obj.pk,))
|
||||
@ -1783,25 +1812,36 @@ class JobTemplateSerializer(UnifiedJobTemplateSerializer, JobOptionsSerializer):
|
||||
if obj.survey_spec is not None and ('name' in obj.survey_spec and 'description' in obj.survey_spec):
|
||||
d['survey'] = dict(title=obj.survey_spec['name'], description=obj.survey_spec['description'])
|
||||
request = self.context.get('request', None)
|
||||
if request is not None and request.user is not None and obj.inventory is not None and obj.project is not None:
|
||||
d['can_copy'] = request.user.can_access(JobTemplate, 'add',
|
||||
{'inventory': obj.inventory.pk,
|
||||
'project': obj.project.pk})
|
||||
d['can_edit'] = request.user.can_access(JobTemplate, 'change', obj,
|
||||
{'inventory': obj.inventory.pk,
|
||||
'project': obj.project.pk})
|
||||
elif request is not None and request.user is not None and request.user.is_superuser:
|
||||
d['can_copy'] = True
|
||||
d['can_edit'] = True
|
||||
else:
|
||||
|
||||
# Check for conditions that would create a validation error if coppied
|
||||
validation_errors, resources_needed_to_start = obj.resource_validation_data()
|
||||
|
||||
if request is None or request.user is None:
|
||||
d['can_copy'] = False
|
||||
d['can_edit'] = False
|
||||
elif request.user.is_superuser:
|
||||
d['can_copy'] = not validation_errors
|
||||
d['can_edit'] = True
|
||||
else:
|
||||
jt_data = model_to_dict(obj)
|
||||
d['can_copy'] = (not validation_errors) and request.user.can_access(JobTemplate, 'add', jt_data)
|
||||
d['can_edit'] = request.user.can_access(JobTemplate, 'change', obj, jt_data)
|
||||
|
||||
d['recent_jobs'] = self._recent_jobs(obj)
|
||||
return d
|
||||
|
||||
def validate(self, attrs):
|
||||
survey_enabled = attrs.get('survey_enabled', self.instance and self.instance.survey_enabled or False)
|
||||
job_type = attrs.get('job_type', self.instance and self.instance.job_type or None)
|
||||
inventory = attrs.get('inventory', self.instance and self.instance.inventory or None)
|
||||
project = attrs.get('project', self.instance and self.instance.project or None)
|
||||
|
||||
if job_type == "scan":
|
||||
if inventory is None or attrs.get('ask_inventory_on_launch', False):
|
||||
raise serializers.ValidationError({'inventory': 'Scan jobs must be assigned a fixed inventory.'})
|
||||
elif project is None:
|
||||
raise serializers.ValidationError({'project': "Job types 'run' and 'check' must have assigned a project."})
|
||||
|
||||
if survey_enabled and job_type == PERM_INVENTORY_SCAN:
|
||||
raise serializers.ValidationError({'survey_enabled': 'Survey Enabled can not be used with scan jobs.'})
|
||||
|
||||
@ -1899,17 +1939,8 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
|
||||
return ret
|
||||
if 'job_template' in ret and not obj.job_template:
|
||||
ret['job_template'] = None
|
||||
|
||||
if obj.job_template and obj.job_template.survey_enabled:
|
||||
if 'extra_vars' in ret:
|
||||
try:
|
||||
extra_vars = json.loads(ret['extra_vars'])
|
||||
for key in obj.job_template.survey_password_variables():
|
||||
if key in extra_vars:
|
||||
extra_vars[key] = REPLACE_STR
|
||||
ret['extra_vars'] = json.dumps(extra_vars)
|
||||
except ValueError:
|
||||
pass
|
||||
if obj.job_template and obj.job_template.survey_enabled and 'extra_vars' in ret:
|
||||
ret['extra_vars'] = obj.display_extra_vars()
|
||||
return ret
|
||||
|
||||
|
||||
@ -2259,12 +2290,14 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
obj = self.context.get('obj')
|
||||
data = self.context.get('data')
|
||||
|
||||
for field in obj.resources_needed_to_start:
|
||||
if not (field in attrs and obj._ask_for_vars_dict().get(field, False)):
|
||||
errors[field] = "Job Template '%s' is missing or undefined." % field
|
||||
|
||||
if (not obj.ask_credential_on_launch) or (not attrs.get('credential', None)):
|
||||
credential = obj.credential
|
||||
else:
|
||||
credential = attrs.get('credential', None)
|
||||
if not credential:
|
||||
errors['credential'] = 'Credential not provided'
|
||||
|
||||
# fill passwords dict with request data passwords
|
||||
if credential and credential.passwords_needed:
|
||||
@ -2295,11 +2328,6 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
if validation_errors:
|
||||
errors['variables_needed_to_start'] = validation_errors
|
||||
|
||||
if obj.job_type != PERM_INVENTORY_SCAN and (obj.project is None):
|
||||
errors['project'] = 'Job Template Project is missing or undefined.'
|
||||
if (obj.inventory is None) and not attrs.get('inventory', None):
|
||||
errors['inventory'] = 'Job Template Inventory is missing or undefined.'
|
||||
|
||||
# Special prohibited cases for scan jobs
|
||||
if 'job_type' in data and obj.ask_job_type_on_launch:
|
||||
if ((obj.job_type == PERM_INVENTORY_SCAN and not data['job_type'] == PERM_INVENTORY_SCAN) or
|
||||
@ -2369,12 +2397,13 @@ class NotificationTemplateSerializer(BaseSerializer):
|
||||
return d
|
||||
|
||||
def validate(self, attrs):
|
||||
from awx.api.views import NotificationTemplateDetail
|
||||
notification_class = NotificationTemplate.CLASS_FOR_NOTIFICATION_TYPE[attrs['notification_type']]
|
||||
missing_fields = []
|
||||
incorrect_type_fields = []
|
||||
if 'notification_configuration' not in attrs:
|
||||
return attrs
|
||||
if self.context['view'].kwargs:
|
||||
if self.context['view'].kwargs and isinstance(self.context['view'], NotificationTemplateDetail):
|
||||
object_actual = self.context['view'].get_object()
|
||||
else:
|
||||
object_actual = None
|
||||
|
||||
@ -3,7 +3,7 @@ POST requests to this resource should include the full specification for a Job T
|
||||
Here is an example survey specification:
|
||||
|
||||
{
|
||||
"name": "Simple Surveny",
|
||||
"name": "Simple Survey",
|
||||
"description": "Description of the simple survey",
|
||||
"spec": [
|
||||
{
|
||||
@ -23,6 +23,7 @@ list of survey items.
|
||||
Within each survey item `type` must be one of:
|
||||
|
||||
* text: For survey questions expecting a textual answer
|
||||
* password: For survey questions expecting a password or other sensitive information
|
||||
* integer: For survey questions expecting a whole number answer
|
||||
* float: For survey questions expecting a decimal number
|
||||
* multiplechoice: For survey questions where one option from a list is required
|
||||
@ -116,4 +117,4 @@ Here is a more comprehensive example showing the various question types and thei
|
||||
"default": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@ -12,7 +12,7 @@ fields to create a new {{ model_verbose_name }} associated with this
|
||||
|
||||
{% block post_create %}{% endblock %}
|
||||
|
||||
{% if view.attach %}
|
||||
{% if has_attach|default:False %}
|
||||
{% if parent_key %}
|
||||
# Remove {{ parent_model_verbose_name|title }} {{ model_verbose_name_plural|title }}:
|
||||
|
||||
|
||||
@ -25,7 +25,7 @@ organization_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'organization_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'organization_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'organization_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', 'organization_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'organization_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'organization_access_list'),
|
||||
)
|
||||
|
||||
@ -38,6 +38,7 @@ user_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/projects/$', 'user_projects_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', 'user_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', 'user_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'user_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'user_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'user_access_list'),
|
||||
|
||||
@ -55,7 +56,7 @@ project_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_any/$', 'project_notification_templates_any_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'project_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'project_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', 'project_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'project_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'project_access_list'),
|
||||
)
|
||||
|
||||
@ -73,6 +74,7 @@ team_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/users/$', 'team_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/credentials/$', 'team_credentials_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', 'team_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'team_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'team_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'team_access_list'),
|
||||
)
|
||||
@ -92,7 +94,7 @@ inventory_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/scan_job_templates/$', 'inventory_scan_job_template_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'inventory_ad_hoc_commands_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'inventory_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', 'inventory_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'inventory_object_roles_list'),
|
||||
#url(r'^(?P<pk>[0-9]+)/single_fact/$', 'inventory_single_fact_view'),
|
||||
)
|
||||
|
||||
@ -127,7 +129,7 @@ group_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/inventory_sources/$', 'group_inventory_sources_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'group_ad_hoc_commands_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'group_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', 'group_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'group_object_roles_list'),
|
||||
#url(r'^(?P<pk>[0-9]+)/single_fact/$', 'group_single_fact_view'),
|
||||
)
|
||||
|
||||
@ -155,7 +157,7 @@ inventory_update_urls = patterns('awx.api.views',
|
||||
inventory_script_urls = patterns('awx.api.views',
|
||||
url(r'^$', 'inventory_script_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'inventory_script_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', 'inventory_script_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'inventory_script_object_roles_list'),
|
||||
)
|
||||
|
||||
credential_urls = patterns('awx.api.views',
|
||||
@ -163,7 +165,9 @@ credential_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'credential_activity_stream_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'credential_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'credential_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', 'credential_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'credential_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner/users/$', 'credential_owner_users_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/owner/teams/$', 'credential_owner_teams_list'),
|
||||
# See also credentials resources on users/teams.
|
||||
)
|
||||
|
||||
@ -189,7 +193,7 @@ job_template_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_error/$', 'job_template_notification_templates_error_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'job_template_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/roles/$', 'job_template_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'job_template_object_roles_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', 'job_template_label_list'),
|
||||
)
|
||||
|
||||
|
||||
102
awx/api/views.py
102
awx/api/views.py
@ -820,7 +820,7 @@ class OrganizationAccessList(ResourceAccessList):
|
||||
resource_model = Organization
|
||||
new_in_300 = True
|
||||
|
||||
class OrganizationRolesList(SubListAPIView):
|
||||
class OrganizationObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
@ -877,6 +877,18 @@ class TeamRolesList(SubListCreateAttachDetachAPIView):
|
||||
return Response(data, status=status.HTTP_400_BAD_REQUEST)
|
||||
return super(TeamRolesList, self).post(request, *args, **kwargs)
|
||||
|
||||
class TeamObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = Team
|
||||
new_in_300 = True
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
content_type = ContentType.objects.get_for_model(self.parent_model)
|
||||
return Role.objects.filter(content_type=content_type, object_id=po.pk)
|
||||
|
||||
class TeamProjectsList(SubListAPIView):
|
||||
|
||||
model = Project
|
||||
@ -967,12 +979,19 @@ class ProjectPlaybooks(RetrieveAPIView):
|
||||
model = Project
|
||||
serializer_class = ProjectPlaybooksSerializer
|
||||
|
||||
class ProjectTeamsList(SubListCreateAttachDetachAPIView):
|
||||
class ProjectTeamsList(ListAPIView):
|
||||
|
||||
model = Team
|
||||
serializer_class = TeamSerializer
|
||||
parent_model = Project
|
||||
relationship = 'teams'
|
||||
|
||||
def get_queryset(self):
|
||||
p = get_object_or_404(Project, pk=self.kwargs['pk'])
|
||||
if not self.request.user.can_access(Project, 'read', p):
|
||||
raise PermissionDenied()
|
||||
project_ct = ContentType.objects.get_for_model(Project)
|
||||
team_ct = ContentType.objects.get_for_model(self.model)
|
||||
all_roles = Role.objects.filter(Q(descendents__content_type=project_ct) & Q(descendents__object_id=p.pk), content_type=team_ct)
|
||||
return self.model.accessible_objects(self.request.user, 'read_role').filter(pk__in=[t.content_object.pk for t in all_roles])
|
||||
|
||||
class ProjectSchedulesList(SubListCreateAttachDetachAPIView):
|
||||
|
||||
@ -1097,7 +1116,7 @@ class ProjectAccessList(ResourceAccessList):
|
||||
resource_model = Project
|
||||
new_in_300 = True
|
||||
|
||||
class ProjectRolesList(SubListAPIView):
|
||||
class ProjectObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
@ -1182,6 +1201,17 @@ class UserRolesList(SubListCreateAttachDetachAPIView):
|
||||
# We hide roles that shouldn't be seen in our queryset
|
||||
return True
|
||||
|
||||
class UserObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
parent_model = User
|
||||
new_in_300 = True
|
||||
|
||||
def get_queryset(self):
|
||||
po = self.get_parent_object()
|
||||
content_type = ContentType.objects.get_for_model(self.parent_model)
|
||||
return Role.objects.filter(content_type=content_type, object_id=po.pk)
|
||||
|
||||
|
||||
class UserProjectsList(SubListAPIView):
|
||||
@ -1335,10 +1365,37 @@ class CredentialList(ListCreateAPIView):
|
||||
if 'team' in request.data:
|
||||
credential.owner_role.parents.add(team.member_role)
|
||||
if 'organization' in request.data:
|
||||
credential.owner_role.parents.add(organization.admin_role)
|
||||
credential.organization = organization
|
||||
credential.save()
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
class CredentialOwnerUsersList(SubListAPIView):
|
||||
model = User
|
||||
serializer_class = UserSerializer
|
||||
parent_model = Credential
|
||||
relationship = 'owner_role.members'
|
||||
new_in_300 = True
|
||||
|
||||
|
||||
class CredentialOwnerTeamsList(SubListAPIView):
|
||||
model = Team
|
||||
serializer_class = TeamSerializer
|
||||
parent_model = Credential
|
||||
new_in_300 = True
|
||||
|
||||
def get_queryset(self):
|
||||
credential = get_object_or_404(self.parent_model, pk=self.kwargs['pk'])
|
||||
if not self.request.user.can_access(Credential, 'read', None):
|
||||
raise PermissionDenied()
|
||||
|
||||
content_type = ContentType.objects.get_for_model(self.model)
|
||||
teams = [c.content_object.pk for c in credential.owner_role.parents.filter(content_type=content_type)]
|
||||
|
||||
return self.model.objects.filter(pk__in=teams)
|
||||
|
||||
|
||||
class UserCredentialsList(CredentialList):
|
||||
|
||||
model = Credential
|
||||
@ -1432,7 +1489,7 @@ class CredentialAccessList(ResourceAccessList):
|
||||
resource_model = Credential
|
||||
new_in_300 = True
|
||||
|
||||
class CredentialRolesList(SubListAPIView):
|
||||
class CredentialObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
@ -1464,7 +1521,7 @@ class InventoryScriptDetail(RetrieveUpdateDestroyAPIView):
|
||||
inv_src.save()
|
||||
return super(InventoryScriptDetail, self).destroy(request, *args, **kwargs)
|
||||
|
||||
class InventoryScriptRolesList(SubListAPIView):
|
||||
class InventoryScriptObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
@ -1526,7 +1583,7 @@ class InventoryAccessList(ResourceAccessList):
|
||||
resource_model = Inventory
|
||||
new_in_300 = True
|
||||
|
||||
class InventoryRolesList(SubListAPIView):
|
||||
class InventoryObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
@ -1873,7 +1930,7 @@ class GroupAccessList(ResourceAccessList):
|
||||
resource_model = Group
|
||||
new_in_300 = True
|
||||
|
||||
class GroupRolesList(SubListAPIView):
|
||||
class GroupObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
@ -2212,6 +2269,13 @@ class JobTemplateList(ListCreateAPIView):
|
||||
serializer_class = JobTemplateSerializer
|
||||
always_allow_superuser = False
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
ret = super(JobTemplateList, self).post(request, *args, **kwargs)
|
||||
if ret.status_code == 201:
|
||||
job_template = JobTemplate.objects.get(id=ret.data['id'])
|
||||
job_template.admin_role.members.add(request.user)
|
||||
return ret
|
||||
|
||||
class JobTemplateDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
model = JobTemplate
|
||||
@ -2318,8 +2382,6 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
# Sanity check: Are surveys available on this license?
|
||||
# If not, do not allow them to be used.
|
||||
if not feature_enabled('surveys'):
|
||||
raise LicenseForbids('Your license does not allow '
|
||||
'adding surveys.')
|
||||
@ -2425,7 +2487,18 @@ class JobTemplateLabelList(SubListCreateAttachDetachAPIView, DeleteLastUnattachL
|
||||
serializer_class = LabelSerializer
|
||||
parent_model = JobTemplate
|
||||
relationship = 'labels'
|
||||
parent_key = 'job_template'
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
# If a label already exists in the database, attach it instead of erroring out
|
||||
# that it already exists
|
||||
if 'id' not in request.data and 'name' in request.data and 'organization' in request.data:
|
||||
existing = Label.objects.filter(name=request.data['name'], organization_id=request.data['organization'])
|
||||
if existing.exists():
|
||||
existing = existing[0]
|
||||
request.data['id'] = existing.id
|
||||
del request.data['name']
|
||||
del request.data['organization']
|
||||
return super(JobTemplateLabelList, self).post(request, *args, **kwargs)
|
||||
|
||||
class JobTemplateCallback(GenericAPIView):
|
||||
|
||||
@ -2603,7 +2676,7 @@ class JobTemplateAccessList(ResourceAccessList):
|
||||
resource_model = JobTemplate
|
||||
new_in_300 = True
|
||||
|
||||
class JobTemplateRolesList(SubListAPIView):
|
||||
class JobTemplateObjectRolesList(SubListAPIView):
|
||||
|
||||
model = Role
|
||||
serializer_class = RoleSerializer
|
||||
@ -3745,7 +3818,6 @@ class RoleChildrenList(SubListAPIView):
|
||||
|
||||
|
||||
|
||||
|
||||
# Create view functions for all of the class-based views to simplify inclusion
|
||||
# in URL patterns and reverse URL lookups, converting CamelCase names to
|
||||
# lowercase_with_underscore (e.g. MyView.as_view() becomes my_view).
|
||||
|
||||
@ -588,15 +588,22 @@ class CredentialAccess(BaseAccess):
|
||||
return check_user_access(self.user, Organization, 'change', organization_obj, None)
|
||||
return False
|
||||
|
||||
|
||||
@check_superuser
|
||||
def can_use(self, obj):
|
||||
return self.user in obj.use_role
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
if not self.can_add(data):
|
||||
return False
|
||||
if data is not None:
|
||||
keys = data.keys()
|
||||
if 'user' in keys or 'team' in keys or 'organization' in keys:
|
||||
if not self.can_add(data):
|
||||
return False
|
||||
|
||||
if obj.organization:
|
||||
if self.user in obj.organization.admin_role:
|
||||
return True
|
||||
|
||||
return self.user in obj.owner_role
|
||||
|
||||
def can_delete(self, obj):
|
||||
@ -773,7 +780,9 @@ class JobTemplateAccess(BaseAccess):
|
||||
inventory_pk = get_pk_from_dict(data, 'inventory')
|
||||
inventory = Inventory.objects.filter(id=inventory_pk)
|
||||
if not inventory.exists() and not data.get('ask_inventory_on_launch', False):
|
||||
return False # Does this make sense? Maybe should check read access
|
||||
return False
|
||||
if inventory.exists() and self.user not in inventory[0].use_role:
|
||||
return False
|
||||
|
||||
project_pk = get_pk_from_dict(data, 'project')
|
||||
if 'job_type' in data and data['job_type'] == PERM_INVENTORY_SCAN:
|
||||
@ -786,10 +795,8 @@ class JobTemplateAccess(BaseAccess):
|
||||
# If the user has admin access to the project (as an org admin), should
|
||||
# be able to proceed without additional checks.
|
||||
project = get_object_or_400(Project, pk=project_pk)
|
||||
if self.user in project.admin_role:
|
||||
return True
|
||||
|
||||
return self.user in project.admin_role and self.user in inventory.read_role
|
||||
return self.user in project.use_role
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
# Check license.
|
||||
@ -814,20 +821,80 @@ class JobTemplateAccess(BaseAccess):
|
||||
|
||||
def can_change(self, obj, data):
|
||||
data_for_change = data
|
||||
if self.user not in obj.admin_role:
|
||||
if self.user not in obj.admin_role and not self.user.is_superuser:
|
||||
return False
|
||||
if data is not None:
|
||||
data_for_change = dict(data)
|
||||
data = dict(data)
|
||||
|
||||
if self.changes_are_non_sensitive(obj, data):
|
||||
if 'job_type' in data and obj.job_type != data['job_type'] and data['job_type'] == PERM_INVENTORY_SCAN:
|
||||
self.check_license(feature='system_tracking')
|
||||
|
||||
if 'survey_enabled' in data and obj.survey_enabled != data['survey_enabled'] and data['survey_enabled']:
|
||||
self.check_license(feature='surveys')
|
||||
return True
|
||||
|
||||
for required_field in ('credential', 'cloud_credential', 'inventory', 'project'):
|
||||
required_obj = getattr(obj, required_field, None)
|
||||
if required_field not in data_for_change and required_obj is not None:
|
||||
data_for_change[required_field] = required_obj.pk
|
||||
return self.can_read(obj) and self.can_add(data_for_change)
|
||||
|
||||
def changes_are_non_sensitive(self, obj, data):
|
||||
'''
|
||||
Returne true if the changes being made are considered nonsensitive, and
|
||||
thus can be made by a job template administrator which may not have access
|
||||
to the any inventory, project, or credentials associated with the template.
|
||||
'''
|
||||
# We are white listing fields that can
|
||||
field_whitelist = [
|
||||
'name', 'description', 'forks', 'limit', 'verbosity', 'extra_vars',
|
||||
'job_tags', 'force_handlers', 'skip_tags', 'ask_variables_on_launch',
|
||||
'ask_tags_on_launch', 'ask_job_type_on_launch', 'ask_inventory_on_launch',
|
||||
'ask_credential_on_launch', 'survey_enabled'
|
||||
]
|
||||
|
||||
for k, v in data.items():
|
||||
if hasattr(obj, k) and getattr(obj, k) != v:
|
||||
if k not in field_whitelist:
|
||||
return False
|
||||
return True
|
||||
|
||||
def can_update_sensitive_fields(self, obj, data):
|
||||
project_id = data.get('project', obj.project.id if obj.project else None)
|
||||
inventory_id = data.get('inventory', obj.inventory.id if obj.inventory else None)
|
||||
credential_id = data.get('credential', obj.credential.id if obj.credential else None)
|
||||
cloud_credential_id = data.get('cloud_credential', obj.cloud_credential.id if obj.cloud_credential else None)
|
||||
network_credential_id = data.get('network_credential', obj.network_credential.id if obj.network_credential else None)
|
||||
|
||||
if project_id and self.user not in Project.objects.get(pk=project_id).use_role:
|
||||
return False
|
||||
if inventory_id and self.user not in Inventory.objects.get(pk=inventory_id).use_role:
|
||||
return False
|
||||
if credential_id and self.user not in Credential.objects.get(pk=credential_id).use_role:
|
||||
return False
|
||||
if cloud_credential_id and self.user not in Credential.objects.get(pk=cloud_credential_id).use_role:
|
||||
return False
|
||||
if network_credential_id and self.user not in Credential.objects.get(pk=network_credential_id).use_role:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@check_superuser
|
||||
def can_delete(self, obj):
|
||||
return self.user in obj.admin_role
|
||||
|
||||
class JobAccess(BaseAccess):
|
||||
'''
|
||||
I can see jobs when:
|
||||
- I am a superuser.
|
||||
- I can see its job template
|
||||
- I am an admin or auditor of the organization which contains its inventory
|
||||
- I am an admin or auditor of the organization which contains its project
|
||||
I can delete jobs when:
|
||||
- I am an admin of the organization which contains its inventory
|
||||
- I am an admin of the organization which contains its project
|
||||
'''
|
||||
|
||||
model = Job
|
||||
|
||||
@ -839,10 +906,20 @@ class JobAccess(BaseAccess):
|
||||
if self.user.is_superuser:
|
||||
return qs.all()
|
||||
|
||||
return qs.filter(
|
||||
qs_jt = qs.filter(
|
||||
job_template__in=JobTemplate.accessible_objects(self.user, 'read_role')
|
||||
)
|
||||
|
||||
org_access_qs = Organization.objects.filter(
|
||||
Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
|
||||
if not org_access_qs.exists():
|
||||
return qs_jt
|
||||
|
||||
return qs.filter(
|
||||
Q(job_template__in=JobTemplate.accessible_objects(self.user, 'read_role')) |
|
||||
Q(inventory__organization__in=org_access_qs) |
|
||||
Q(project__organization__in=org_access_qs)).distinct()
|
||||
|
||||
def can_add(self, data):
|
||||
if not data or '_method' in data: # So the browseable API will work?
|
||||
return True
|
||||
@ -871,7 +948,11 @@ class JobAccess(BaseAccess):
|
||||
|
||||
@check_superuser
|
||||
def can_delete(self, obj):
|
||||
return self.user in obj.inventory.admin_role
|
||||
if obj.inventory is not None and self.user in obj.inventory.organization.admin_role:
|
||||
return True
|
||||
if obj.project is not None and self.user in obj.project.organization.admin_role:
|
||||
return True
|
||||
return False
|
||||
|
||||
def can_start(self, obj):
|
||||
self.check_license()
|
||||
@ -1393,6 +1474,10 @@ class CustomInventoryScriptAccess(BaseAccess):
|
||||
def can_admin(self, obj):
|
||||
return self.user in obj.admin_role
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
return self.can_admin(obj)
|
||||
|
||||
@check_superuser
|
||||
def can_read(self, obj):
|
||||
return self.user in obj.read_role
|
||||
|
||||
@ -857,7 +857,7 @@ class Command(NoArgsCommand):
|
||||
del_pks = del_host_pks[offset:(offset + self._batch_size)]
|
||||
for db_host in db_hosts.filter(pk__in=del_pks):
|
||||
group_host_count += 1
|
||||
if db_host not in db_group.hosts:
|
||||
if db_host not in db_group.hosts.all():
|
||||
continue
|
||||
db_group.hosts.remove(db_host)
|
||||
self.logger.info('Host "%s" removed from group "%s"',
|
||||
|
||||
@ -86,7 +86,11 @@ class Migration(migrations.Migration):
|
||||
name='credential',
|
||||
unique_together=set([]),
|
||||
),
|
||||
|
||||
migrations.AddField(
|
||||
model_name='credential',
|
||||
name='organization',
|
||||
field=models.ForeignKey(related_name='credentials', default=None, blank=True, to='main.Organization', null=True),
|
||||
),
|
||||
|
||||
#
|
||||
# New RBAC models and fields
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from awx.main.migrations import _rbac as rbac
|
||||
from awx.main.migrations import _ask_for_variables as ask_for_variables
|
||||
from awx.main.migrations import _migration_utils as migration_utils
|
||||
from django.db import migrations
|
||||
@ -15,4 +16,5 @@ class Migration(migrations.Migration):
|
||||
operations = [
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(ask_for_variables.migrate_credential),
|
||||
migrations.RunPython(rbac.rebuild_role_hierarchy),
|
||||
]
|
||||
|
||||
19
awx/main/migrations/0024_v300_jobtemplate_allow_simul.py
Normal file
19
awx/main/migrations/0024_v300_jobtemplate_allow_simul.py
Normal file
@ -0,0 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0023_v300_activity_stream_ordering'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='allow_simultaneous',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@ -123,7 +123,7 @@ def attrfunc(attr_path):
|
||||
return attr
|
||||
|
||||
def _update_credential_parents(org, cred):
|
||||
org.admin_role.children.add(cred.owner_role)
|
||||
cred.organization = org
|
||||
cred.save()
|
||||
|
||||
def _discover_credentials(instances, cred, orgfunc):
|
||||
@ -164,13 +164,12 @@ def _discover_credentials(instances, cred, orgfunc):
|
||||
cred.pk = None
|
||||
cred.save()
|
||||
|
||||
# Unlink the old information from the new credential
|
||||
cred.owner_role, cred.use_role = None, None
|
||||
cred.save()
|
||||
cred.owner_role, cred.use_role, cred.organization = None, None, None
|
||||
|
||||
for i in orgs[org]:
|
||||
i.credential = cred
|
||||
i.save()
|
||||
|
||||
_update_credential_parents(org, cred)
|
||||
|
||||
@log_migration
|
||||
@ -215,7 +214,7 @@ def migrate_inventory(apps, schema_editor):
|
||||
Inventory = apps.get_model('main', 'Inventory')
|
||||
Permission = apps.get_model('main', 'Permission')
|
||||
|
||||
def role_from_permission():
|
||||
def role_from_permission(perm):
|
||||
if perm.permission_type == 'admin':
|
||||
return inventory.admin_role
|
||||
elif perm.permission_type == 'read':
|
||||
@ -233,7 +232,7 @@ def migrate_inventory(apps, schema_editor):
|
||||
role = None
|
||||
execrole = None
|
||||
|
||||
role = role_from_permission()
|
||||
role = role_from_permission(perm)
|
||||
if role is None:
|
||||
raise Exception(smart_text(u'Unhandled permission type for inventory: {}'.format( perm.permission_type)))
|
||||
|
||||
@ -292,10 +291,13 @@ def migrate_projects(apps, schema_editor):
|
||||
else:
|
||||
new_prj = Project.objects.create(
|
||||
created = project.created,
|
||||
modified = project.modified,
|
||||
polymorphic_ctype_id = project.polymorphic_ctype_id,
|
||||
description = project.description,
|
||||
name = smart_text(u'{} - {}'.format(org.name, original_project_name)),
|
||||
old_pk = project.old_pk,
|
||||
created_by_id = project.created_by_id,
|
||||
modified_by_id = project.modified_by_id,
|
||||
scm_type = project.scm_type,
|
||||
scm_url = project.scm_url,
|
||||
scm_branch = project.scm_branch,
|
||||
@ -307,11 +309,31 @@ def migrate_projects(apps, schema_editor):
|
||||
credential = project.credential,
|
||||
organization = org
|
||||
)
|
||||
if project.scm_type == "":
|
||||
new_prj.local_path = project.local_path
|
||||
new_prj.save()
|
||||
for team in project.deprecated_teams.iterator():
|
||||
new_prj.deprecated_teams.add(team)
|
||||
logger.warning(smart_text(u'cloning Project({}) onto {} as Project({})'.format(original_project_name, org, new_prj)))
|
||||
job_templates = JobTemplate.objects.filter(inventory__organization=org).all()
|
||||
job_templates = JobTemplate.objects.filter(project=project, inventory__organization=org).all()
|
||||
for jt in job_templates:
|
||||
jt.project = new_prj
|
||||
jt.save()
|
||||
for perm in Permission.objects.filter(project=project):
|
||||
Permission.objects.create(
|
||||
created = perm.created,
|
||||
modified = perm.modified,
|
||||
created_by = perm.created_by,
|
||||
modified_by = perm.modified_by,
|
||||
description = perm.description,
|
||||
name = perm.name,
|
||||
user = perm.user,
|
||||
team = perm.team,
|
||||
project = new_prj,
|
||||
inventory = perm.inventory,
|
||||
permission_type = perm.permission_type,
|
||||
run_ad_hoc_commands = perm.run_ad_hoc_commands,
|
||||
)
|
||||
|
||||
# Migrate permissions
|
||||
for project in Project.objects.iterator():
|
||||
@ -320,24 +342,30 @@ def migrate_projects(apps, schema_editor):
|
||||
logger.warn(smart_text(u'adding Project({}) admin: {}'.format(project.name, project.created_by.username)))
|
||||
|
||||
for team in project.deprecated_teams.all():
|
||||
team.member_role.children.add(project.use_role)
|
||||
team.member_role.children.add(project.read_role)
|
||||
logger.info(smart_text(u'adding Team({}) access for Project({})'.format(team.name, project.name)))
|
||||
|
||||
if project.organization is not None:
|
||||
for user in project.organization.deprecated_users.all():
|
||||
project.use_role.members.add(user)
|
||||
logger.info(smart_text(u'adding Organization({}) member access to Project({})'.format(project.organization.name, project.name)))
|
||||
|
||||
for perm in Permission.objects.filter(project=project):
|
||||
# All perms at this level just imply a user or team can read
|
||||
if perm.permission_type == 'create':
|
||||
role = project.use_role
|
||||
else:
|
||||
role = project.read_role
|
||||
|
||||
if perm.team:
|
||||
perm.team.member_role.children.add(project.use_role)
|
||||
perm.team.member_role.children.add(role)
|
||||
logger.info(smart_text(u'adding Team({}) access for Project({})'.format(perm.team.name, project.name)))
|
||||
|
||||
if perm.user:
|
||||
project.use_role.members.add(perm.user)
|
||||
role.members.add(perm.user)
|
||||
logger.info(smart_text(u'adding User({}) access for Project({})'.format(perm.user.username, project.name)))
|
||||
|
||||
if project.organization is not None:
|
||||
for user in project.organization.deprecated_users.all():
|
||||
if not (project.use_role.members.filter(pk=user.id).exists() or project.admin_role.members.filter(pk=user.id).exists()):
|
||||
project.read_role.members.add(user)
|
||||
logger.info(smart_text(u'adding Organization({}) member access to Project({})'.format(project.organization.name, project.name)))
|
||||
|
||||
|
||||
|
||||
@log_migration
|
||||
def migrate_job_templates(apps, schema_editor):
|
||||
@ -403,7 +431,7 @@ def migrate_job_templates(apps, schema_editor):
|
||||
|
||||
team_create_permissions = set(
|
||||
jt_permission_qs
|
||||
.filter(permission_type__in=['create'] if jt.job_type == 'check' else ['create'])
|
||||
.filter(permission_type__in=['create'])
|
||||
.values_list('team__id', flat=True)
|
||||
)
|
||||
team_run_permissions = set(
|
||||
@ -413,12 +441,12 @@ def migrate_job_templates(apps, schema_editor):
|
||||
)
|
||||
user_create_permissions = set(
|
||||
jt_permission_qs
|
||||
.filter(permission_type__in=['create'] if jt.job_type == 'check' else ['run'])
|
||||
.filter(permission_type__in=['create'])
|
||||
.values_list('user__id', flat=True)
|
||||
)
|
||||
user_run_permissions = set(
|
||||
jt_permission_qs
|
||||
.filter(permission_type__in=['check', 'run'] if jt.job_type == 'check' else ['create'])
|
||||
.filter(permission_type__in=['check', 'run'] if jt.job_type == 'check' else ['run'])
|
||||
.values_list('user__id', flat=True)
|
||||
)
|
||||
|
||||
@ -446,17 +474,20 @@ def migrate_job_templates(apps, schema_editor):
|
||||
logger.info(smart_text(u'transfering execute access on JobTemplate({}) to Team({})'.format(jt.name, team.name)))
|
||||
|
||||
for user in User.objects.filter(id__in=user_create_permissions).iterator():
|
||||
cred = jt.credential or jt.cloud_credential
|
||||
if (jt.inventory.id in user_inv_permissions[user.id] or
|
||||
any([jt.inventory.id in team_inv_permissions[team.id] for team in user.deprecated_teams.all()])) and \
|
||||
((not jt.credential and not jt.cloud_credential) or
|
||||
Credential.objects.filter(Q(deprecated_user=user) | Q(deprecated_team__deprecated_users=user), jobtemplates=jt).exists()):
|
||||
(not cred or cred.deprecated_user == user or
|
||||
(cred.deprecated_team and cred.deprecated_team.deprecated_users.filter(pk=user.id).exists())):
|
||||
jt.admin_role.members.add(user)
|
||||
logger.info(smart_text(u'transfering admin access on JobTemplate({}) to User({})'.format(jt.name, user.username)))
|
||||
for user in User.objects.filter(id__in=user_run_permissions).iterator():
|
||||
cred = jt.credential or jt.cloud_credential
|
||||
|
||||
if (jt.inventory.id in user_inv_permissions[user.id] or
|
||||
any([jt.inventory.id in team_inv_permissions[team.id] for team in user.deprecated_teams.all()])) and \
|
||||
((not jt.credential and not jt.cloud_credential) or
|
||||
Credential.objects.filter(Q(deprecated_user=user) | Q(deprecated_team__deprecated_users=user), jobtemplates=jt).exists()):
|
||||
(not cred or cred.deprecated_user == user or
|
||||
(cred.deprecated_team and cred.deprecated_team.deprecated_users.filter(pk=user.id).exists())):
|
||||
jt.execute_role.members.add(user)
|
||||
logger.info(smart_text(u'transfering execute access on JobTemplate({}) to User({})'.format(jt.name, user.username)))
|
||||
|
||||
@ -468,8 +499,6 @@ def rebuild_role_hierarchy(apps, schema_editor):
|
||||
start = time()
|
||||
roots = Role.objects \
|
||||
.all() \
|
||||
.exclude(pk__in=Role.parents.through.objects.all()
|
||||
.values_list('from_role_id', flat=True).distinct()) \
|
||||
.values_list('id', flat=True)
|
||||
stop = time()
|
||||
logger.info('Found %d roots in %f seconds, rebuilding ancestry map' % (len(roots), stop - start))
|
||||
|
||||
@ -78,6 +78,14 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
on_delete=models.CASCADE,
|
||||
related_name='deprecated_credentials',
|
||||
)
|
||||
organization = models.ForeignKey(
|
||||
'Organization',
|
||||
null=True,
|
||||
default=None,
|
||||
blank=True,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='credentials',
|
||||
)
|
||||
kind = models.CharField(
|
||||
max_length=32,
|
||||
choices=KIND_CHOICES,
|
||||
@ -209,7 +217,10 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
],
|
||||
)
|
||||
use_role = ImplicitRoleField(
|
||||
parent_role=['owner_role']
|
||||
parent_role=[
|
||||
'organization.admin_role',
|
||||
'owner_role',
|
||||
]
|
||||
)
|
||||
read_role = ImplicitRoleField(parent_role=[
|
||||
'singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
|
||||
@ -26,7 +26,7 @@ from awx.main.models.unified_jobs import * # noqa
|
||||
from awx.main.models.notifications import NotificationTemplate
|
||||
from awx.main.utils import decrypt_field, ignore_inventory_computed_fields
|
||||
from awx.main.utils import emit_websocket_notification
|
||||
from awx.main.redact import PlainTextCleaner
|
||||
from awx.main.redact import PlainTextCleaner, REPLACE_STR
|
||||
from awx.main.conf import tower_settings
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.models.mixins import ResourceMixin
|
||||
@ -229,6 +229,10 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
|
||||
read_role = ImplicitRoleField(
|
||||
parent_role=['project.organization.auditor_role', 'inventory.organization.auditor_role', 'execute_role', 'admin_role'],
|
||||
)
|
||||
allow_simultaneous = models.BooleanField(
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
@classmethod
|
||||
def _get_unified_job_class(cls):
|
||||
@ -242,14 +246,37 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
|
||||
'force_handlers', 'skip_tags', 'start_at_task', 'become_enabled',
|
||||
'labels',]
|
||||
|
||||
def clean(self):
|
||||
if self.job_type == 'scan' and (self.inventory is None or self.ask_inventory_on_launch):
|
||||
raise ValidationError({"inventory": ["Scan jobs must be assigned a fixed inventory.",]})
|
||||
if (not self.ask_inventory_on_launch) and self.inventory is None:
|
||||
raise ValidationError({"inventory": ["Job Template must provide 'inventory' or allow prompting for it.",]})
|
||||
if (not self.ask_credential_on_launch) and self.credential is None:
|
||||
raise ValidationError({"credential": ["Job Template must provide 'credential' or allow prompting for it.",]})
|
||||
return super(JobTemplate, self).clean()
|
||||
def resource_validation_data(self):
|
||||
'''
|
||||
Process consistency errors and need-for-launch related fields.
|
||||
'''
|
||||
resources_needed_to_start = []
|
||||
validation_errors = {}
|
||||
|
||||
# Inventory and Credential related checks
|
||||
if self.inventory is None:
|
||||
resources_needed_to_start.append('inventory')
|
||||
if not self.ask_inventory_on_launch:
|
||||
validation_errors['inventory'] = ["Job Template must provide 'inventory' or allow prompting for it.",]
|
||||
if self.credential is None:
|
||||
resources_needed_to_start.append('credential')
|
||||
if not self.ask_credential_on_launch:
|
||||
validation_errors['credential'] = ["Job Template must provide 'credential' or allow prompting for it.",]
|
||||
|
||||
# Job type dependent checks
|
||||
if self.job_type == 'scan':
|
||||
if self.inventory is None or self.ask_inventory_on_launch:
|
||||
validation_errors['inventory'] = ["Scan jobs must be assigned a fixed inventory.",]
|
||||
elif self.project is None:
|
||||
resources_needed_to_start.append('project')
|
||||
validation_errors['project'] = ["Job types 'run' and 'check' must have assigned a project.",]
|
||||
|
||||
return (validation_errors, resources_needed_to_start)
|
||||
|
||||
@property
|
||||
def resources_needed_to_start(self):
|
||||
validation_errors, resources_needed_to_start = self.resource_validation_data()
|
||||
return resources_needed_to_start
|
||||
|
||||
def create_job(self, **kwargs):
|
||||
'''
|
||||
@ -265,9 +292,9 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
|
||||
Return whether job template can be used to start a new job without
|
||||
requiring any user input.
|
||||
'''
|
||||
return bool(self.credential and not len(self.passwords_needed_to_start) and
|
||||
not len(self.variables_needed_to_start) and
|
||||
self.inventory)
|
||||
return (not self.resources_needed_to_start and
|
||||
not self.passwords_needed_to_start and
|
||||
not self.variables_needed_to_start)
|
||||
|
||||
@property
|
||||
def variables_needed_to_start(self):
|
||||
@ -301,20 +328,20 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
|
||||
errors.append("'%s' value missing" % survey_element['variable'])
|
||||
elif survey_element['type'] in ["textarea", "text", "password"]:
|
||||
if survey_element['variable'] in data:
|
||||
if 'min' in survey_element and survey_element['min'] not in ["", None] and len(data[survey_element['variable']]) < survey_element['min']:
|
||||
errors.append("'%s' value %s is too small (must be at least %s)." %
|
||||
(survey_element['variable'], data[survey_element['variable']], survey_element['min']))
|
||||
if 'max' in survey_element and survey_element['max'] not in ["", None] and len(data[survey_element['variable']]) > survey_element['max']:
|
||||
if 'min' in survey_element and survey_element['min'] not in ["", None] and len(data[survey_element['variable']]) < int(survey_element['min']):
|
||||
errors.append("'%s' value %s is too small (length is %s must be at least %s)." %
|
||||
(survey_element['variable'], data[survey_element['variable']], len(data[survey_element['variable']]), survey_element['min']))
|
||||
if 'max' in survey_element and survey_element['max'] not in ["", None] and len(data[survey_element['variable']]) > int(survey_element['max']):
|
||||
errors.append("'%s' value %s is too large (must be no more than %s)." %
|
||||
(survey_element['variable'], data[survey_element['variable']], survey_element['max']))
|
||||
elif survey_element['type'] == 'integer':
|
||||
if survey_element['variable'] in data:
|
||||
if 'min' in survey_element and survey_element['min'] not in ["", None] and survey_element['variable'] in data and \
|
||||
data[survey_element['variable']] < survey_element['min']:
|
||||
data[survey_element['variable']] < int(survey_element['min']):
|
||||
errors.append("'%s' value %s is too small (must be at least %s)." %
|
||||
(survey_element['variable'], data[survey_element['variable']], survey_element['min']))
|
||||
if 'max' in survey_element and survey_element['max'] not in ["", None] and survey_element['variable'] in data and \
|
||||
data[survey_element['variable']] > survey_element['max']:
|
||||
data[survey_element['variable']] > int(survey_element['max']):
|
||||
errors.append("'%s' value %s is too large (must be no more than %s)." %
|
||||
(survey_element['variable'], data[survey_element['variable']], survey_element['max']))
|
||||
if type(data[survey_element['variable']]) != int:
|
||||
@ -322,10 +349,10 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
|
||||
survey_element['variable']))
|
||||
elif survey_element['type'] == 'float':
|
||||
if survey_element['variable'] in data:
|
||||
if 'min' in survey_element and survey_element['min'] not in ["", None] and data[survey_element['variable']] < survey_element['min']:
|
||||
if 'min' in survey_element and survey_element['min'] not in ["", None] and data[survey_element['variable']] < float(survey_element['min']):
|
||||
errors.append("'%s' value %s is too small (must be at least %s)." %
|
||||
(survey_element['variable'], data[survey_element['variable']], survey_element['min']))
|
||||
if 'max' in survey_element and survey_element['max'] not in ["", None] and data[survey_element['variable']] > survey_element['max']:
|
||||
if 'max' in survey_element and survey_element['max'] not in ["", None] and data[survey_element['variable']] > float(survey_element['max']):
|
||||
errors.append("'%s' value %s is too large (must be no more than %s)." %
|
||||
(survey_element['variable'], data[survey_element['variable']], survey_element['max']))
|
||||
if type(data[survey_element['variable']]) not in (float, int):
|
||||
@ -408,9 +435,9 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, ResourceMixin):
|
||||
if ask_for_vars_dict[field]:
|
||||
prompted_fields[field] = kwargs[field]
|
||||
else:
|
||||
if field == 'extra_vars' and self.survey_enabled:
|
||||
if field == 'extra_vars' and self.survey_enabled and self.survey_spec:
|
||||
# Accept vars defined in the survey and no others
|
||||
survey_vars = [question['variable'] for question in self.survey_spec['spec']]
|
||||
survey_vars = [question['variable'] for question in self.survey_spec.get('spec', [])]
|
||||
for key in kwargs[field]:
|
||||
if key in survey_vars:
|
||||
prompted_fields[field][key] = kwargs[field][key]
|
||||
@ -551,6 +578,8 @@ class Job(UnifiedJob, JobOptions):
|
||||
if obj.job_template is not None and obj.inventory is not None:
|
||||
if obj.job_template == self.job_template and \
|
||||
obj.inventory == self.inventory:
|
||||
if self.job_template.allow_simultaneous:
|
||||
return False
|
||||
if obj.launch_type == 'callback' and self.launch_type == 'callback' and \
|
||||
obj.limit != self.limit:
|
||||
return False
|
||||
@ -606,14 +635,12 @@ class Job(UnifiedJob, JobOptions):
|
||||
|
||||
def generate_dependencies(self, active_tasks):
|
||||
from awx.main.models import InventoryUpdate, ProjectUpdate
|
||||
if self.inventory is None or self.project is None:
|
||||
return []
|
||||
inventory_sources = self.inventory.inventory_sources.filter( update_on_launch=True)
|
||||
inventory_sources = self.inventory.inventory_sources.filter(update_on_launch=True)
|
||||
project_found = False
|
||||
inventory_sources_found = []
|
||||
dependencies = []
|
||||
for obj in active_tasks:
|
||||
if type(obj) == ProjectUpdate:
|
||||
if type(obj) == ProjectUpdate and self.project is not None:
|
||||
if obj.project == self.project:
|
||||
project_found = True
|
||||
if type(obj) == InventoryUpdate:
|
||||
@ -631,7 +658,7 @@ class Job(UnifiedJob, JobOptions):
|
||||
for source in inventory_sources.filter(pk__in=inventory_sources_already_updated):
|
||||
if source not in inventory_sources_found:
|
||||
inventory_sources_found.append(source)
|
||||
if not project_found and self.project.needs_update_on_launch:
|
||||
if not project_found and self.project is not None and self.project.needs_update_on_launch:
|
||||
dependencies.append(self.project.create_project_update(launch_type='dependency'))
|
||||
if inventory_sources.count(): # and not has_setup_failures? Probably handled as an error scenario in the task runner
|
||||
for source in inventory_sources:
|
||||
@ -651,7 +678,7 @@ class Job(UnifiedJob, JobOptions):
|
||||
processed=h.processed,
|
||||
skipped=h.skipped)
|
||||
data.update(dict(inventory=self.inventory.name,
|
||||
project=self.project.name,
|
||||
project=self.project.name if self.project else None,
|
||||
playbook=self.playbook,
|
||||
credential=self.credential.name,
|
||||
limit=self.limit,
|
||||
@ -676,6 +703,21 @@ class Job(UnifiedJob, JobOptions):
|
||||
evars.update(extra_vars)
|
||||
self.update_fields(extra_vars=json.dumps(evars))
|
||||
|
||||
def display_extra_vars(self):
|
||||
'''
|
||||
Hides fields marked as passwords in survey.
|
||||
'''
|
||||
if self.extra_vars and self.job_template and self.job_template.survey_enabled:
|
||||
try:
|
||||
extra_vars = json.loads(self.extra_vars)
|
||||
for key in self.job_template.survey_password_variables():
|
||||
if key in extra_vars:
|
||||
extra_vars[key] = REPLACE_STR
|
||||
return json.dumps(extra_vars)
|
||||
except ValueError:
|
||||
pass
|
||||
return self.extra_vars
|
||||
|
||||
def _survey_search_and_replace(self, content):
|
||||
# Use job template survey spec to identify password fields.
|
||||
# Then lookup password fields in extra_vars and save the values
|
||||
|
||||
@ -754,8 +754,8 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
name=self.name,
|
||||
url=self.get_ui_url(),
|
||||
created_by=smart_text(self.created_by),
|
||||
started=self.started.isoformat(),
|
||||
finished=self.finished.isoformat(),
|
||||
started=self.started.isoformat() if self.started is not None else None,
|
||||
finished=self.finished.isoformat() if self.finished is not None else None,
|
||||
status=self.status,
|
||||
traceback=self.result_traceback)
|
||||
|
||||
|
||||
@ -318,10 +318,15 @@ def activity_stream_create(sender, instance, created, **kwargs):
|
||||
return
|
||||
# TODO: Rethink details of the new instance
|
||||
object1 = camelcase_to_underscore(instance.__class__.__name__)
|
||||
changes = model_to_dict(instance, model_serializer_mapping)
|
||||
# Special case where Job survey password variables need to be hidden
|
||||
if type(instance) == Job:
|
||||
if 'extra_vars' in changes:
|
||||
changes['extra_vars'] = instance.display_extra_vars()
|
||||
activity_entry = ActivityStream(
|
||||
operation='create',
|
||||
object1=object1,
|
||||
changes=json.dumps(model_to_dict(instance, model_serializer_mapping)))
|
||||
changes=json.dumps(changes))
|
||||
activity_entry.save()
|
||||
#TODO: Weird situation where cascade SETNULL doesn't work
|
||||
# it might actually be a good idea to remove all of these FK references since
|
||||
@ -379,11 +384,16 @@ def activity_stream_associate(sender, instance, **kwargs):
|
||||
obj1 = instance
|
||||
object1=camelcase_to_underscore(obj1.__class__.__name__)
|
||||
obj_rel = sender.__module__ + "." + sender.__name__
|
||||
|
||||
for entity_acted in kwargs['pk_set']:
|
||||
obj2 = kwargs['model']
|
||||
obj2_id = entity_acted
|
||||
obj2_actual = obj2.objects.get(id=obj2_id)
|
||||
object2 = camelcase_to_underscore(obj2.__name__)
|
||||
if isinstance(obj2_actual, Role) and obj2_actual.content_object is not None:
|
||||
obj2_actual = obj2_actual.content_object
|
||||
object2 = camelcase_to_underscore(obj2_actual.__class__.__name__)
|
||||
else:
|
||||
object2 = camelcase_to_underscore(obj2.__name__)
|
||||
# Skip recording any inventory source, or system job template changes here.
|
||||
if isinstance(obj1, InventorySource) or isinstance(obj2_actual, InventorySource):
|
||||
continue
|
||||
@ -409,7 +419,7 @@ def activity_stream_associate(sender, instance, **kwargs):
|
||||
# If the m2m is from the User side we need to
|
||||
# set the content_object of the Role for our entry.
|
||||
if type(instance) == User and role.content_object is not None:
|
||||
getattr(activity_entry, role.content_type.name).add(role.content_object)
|
||||
getattr(activity_entry, role.content_type.name.replace(' ', '_')).add(role.content_object)
|
||||
|
||||
activity_entry.role.add(role)
|
||||
activity_entry.object_relationship_type = obj_rel
|
||||
|
||||
@ -423,6 +423,24 @@ class BaseTask(Task):
|
||||
'': '',
|
||||
}
|
||||
|
||||
def add_ansible_venv(self, env):
|
||||
if settings.ANSIBLE_USE_VENV:
|
||||
env['VIRTUAL_ENV'] = settings.ANSIBLE_VENV_PATH
|
||||
env['PATH'] = os.path.join(settings.ANSIBLE_VENV_PATH, "bin") + ":" + env['PATH']
|
||||
venv_libdir = os.path.join(settings.ANSIBLE_VENV_PATH, "lib")
|
||||
env.pop('PYTHONPATH', None) # default to none if no python_ver matches
|
||||
for python_ver in ["python2.7", "python2.6"]:
|
||||
if os.path.isdir(os.path.join(venv_libdir, python_ver)):
|
||||
env['PYTHONPATH'] = os.path.join(venv_libdir, python_ver, "site-packages") + ":"
|
||||
break
|
||||
return env
|
||||
|
||||
def add_tower_venv(self, env):
|
||||
if settings.TOWER_USE_VENV:
|
||||
env['VIRTUAL_ENV'] = settings.TOWER_VENV_PATH
|
||||
env['PATH'] = os.path.join(settings.TOWER_VENV_PATH, "bin") + ":" + env['PATH']
|
||||
return env
|
||||
|
||||
def build_env(self, instance, **kwargs):
|
||||
'''
|
||||
Build environment dictionary for ansible-playbook.
|
||||
@ -438,10 +456,8 @@ class BaseTask(Task):
|
||||
# Set environment variables needed for inventory and job event
|
||||
# callbacks to work.
|
||||
# Update PYTHONPATH to use local site-packages.
|
||||
if settings.ANSIBLE_USE_VENV:
|
||||
env['VIRTUAL_ENV'] = settings.ANSIBLE_VENV_PATH
|
||||
env['PATH'] = os.path.join(settings.ANSIBLE_VENV_PATH, "bin") + ":" + env['PATH']
|
||||
env['PYTHONPATH'] = os.path.join(settings.ANSIBLE_VENV_PATH, "lib/python2.7/site-packages/") + ":"
|
||||
# NOTE:
|
||||
# Derived class should call add_ansible_venv() or add_tower_venv()
|
||||
if self.should_use_proot(instance, **kwargs):
|
||||
env['PROOT_TMP_DIR'] = tower_settings.AWX_PROOT_BASE_PATH
|
||||
return env
|
||||
@ -756,6 +772,7 @@ class RunJob(BaseTask):
|
||||
plugin_dirs.append(tower_settings.AWX_ANSIBLE_CALLBACK_PLUGINS)
|
||||
plugin_path = ':'.join(plugin_dirs)
|
||||
env = super(RunJob, self).build_env(job, **kwargs)
|
||||
env = self.add_ansible_venv(env)
|
||||
# Set environment variables needed for inventory and job event
|
||||
# callbacks to work.
|
||||
env['JOB_ID'] = str(job.pk)
|
||||
@ -790,6 +807,7 @@ class RunJob(BaseTask):
|
||||
elif cloud_cred and cloud_cred.kind == 'rax':
|
||||
env['RAX_USERNAME'] = cloud_cred.username
|
||||
env['RAX_API_KEY'] = decrypt_field(cloud_cred, 'password')
|
||||
env['CLOUD_VERIFY_SSL'] = str(True)
|
||||
elif cloud_cred and cloud_cred.kind == 'gce':
|
||||
env['GCE_EMAIL'] = cloud_cred.username
|
||||
env['GCE_PROJECT'] = cloud_cred.project
|
||||
@ -915,7 +933,10 @@ class RunJob(BaseTask):
|
||||
'tower_user_name': job.created_by.username,
|
||||
})
|
||||
if job.extra_vars_dict:
|
||||
extra_vars.update(job.extra_vars_dict)
|
||||
if kwargs.get('display', False) and job.job_template and job.job_template.survey_enabled:
|
||||
extra_vars.update(json.loads(job.display_extra_vars()))
|
||||
else:
|
||||
extra_vars.update(job.extra_vars_dict)
|
||||
args.extend(['-e', json.dumps(extra_vars)])
|
||||
|
||||
# Add path to playbook (relative to project.local_path).
|
||||
@ -925,6 +946,9 @@ class RunJob(BaseTask):
|
||||
args.append(job.playbook)
|
||||
return args
|
||||
|
||||
def build_safe_args(self, job, **kwargs):
|
||||
return self.build_args(job, display=True, **kwargs)
|
||||
|
||||
def build_cwd(self, job, **kwargs):
|
||||
if job.project is None and job.job_type == PERM_INVENTORY_SCAN:
|
||||
return self.get_path_to('..', 'playbooks')
|
||||
@ -1026,6 +1050,7 @@ class RunProjectUpdate(BaseTask):
|
||||
Build environment dictionary for ansible-playbook.
|
||||
'''
|
||||
env = super(RunProjectUpdate, self).build_env(project_update, **kwargs)
|
||||
env = self.add_ansible_venv(env)
|
||||
env['ANSIBLE_ASK_PASS'] = str(False)
|
||||
env['ANSIBLE_ASK_SUDO_PASS'] = str(False)
|
||||
env['DISPLAY'] = '' # Prevent stupid password popup when running tests.
|
||||
@ -1326,9 +1351,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
"""
|
||||
env = super(RunInventoryUpdate, self).build_env(inventory_update,
|
||||
**kwargs)
|
||||
if settings.TOWER_USE_VENV:
|
||||
env['VIRTUAL_ENV'] = settings.TOWER_VENV_PATH
|
||||
env['PATH'] = os.path.join(settings.TOWER_VENV_PATH, "bin") + ":" + env['PATH']
|
||||
env = self.add_tower_venv(env)
|
||||
# Pass inventory source ID to inventory script.
|
||||
env['INVENTORY_SOURCE_ID'] = str(inventory_update.inventory_source_id)
|
||||
env['INVENTORY_UPDATE_ID'] = str(inventory_update.pk)
|
||||
@ -1531,6 +1554,7 @@ class RunAdHocCommand(BaseTask):
|
||||
'''
|
||||
plugin_dir = self.get_path_to('..', 'plugins', 'callback')
|
||||
env = super(RunAdHocCommand, self).build_env(ad_hoc_command, **kwargs)
|
||||
env = self.add_ansible_venv(env)
|
||||
# Set environment variables needed for inventory and ad hoc event
|
||||
# callbacks to work.
|
||||
env['AD_HOC_COMMAND_ID'] = str(ad_hoc_command.pk)
|
||||
@ -1687,5 +1711,11 @@ class RunSystemJob(BaseTask):
|
||||
logger.error("Failed to parse system job: " + str(e))
|
||||
return args
|
||||
|
||||
def build_env(self, instance, **kwargs):
|
||||
env = super(RunSystemJob, self).build_env(instance,
|
||||
**kwargs)
|
||||
env = self.add_tower_venv(env)
|
||||
return env
|
||||
|
||||
def build_cwd(self, instance, **kwargs):
|
||||
return settings.BASE_DIR
|
||||
|
||||
41
awx/main/tests/conftest.py
Normal file
41
awx/main/tests/conftest.py
Normal file
@ -0,0 +1,41 @@
|
||||
|
||||
# Python
|
||||
import pytest
|
||||
|
||||
from awx.main.tests.factories import (
|
||||
create_organization,
|
||||
create_job_template,
|
||||
create_notification_template,
|
||||
create_survey_spec,
|
||||
)
|
||||
|
||||
@pytest.fixture
|
||||
def job_template_factory():
|
||||
return create_job_template
|
||||
|
||||
@pytest.fixture
|
||||
def organization_factory():
|
||||
return create_organization
|
||||
|
||||
@pytest.fixture
|
||||
def notification_template_factory():
|
||||
return create_notification_template
|
||||
|
||||
@pytest.fixture
|
||||
def survey_spec_factory():
|
||||
return create_survey_spec
|
||||
|
||||
@pytest.fixture
|
||||
def job_with_secret_key_factory(job_template_factory):
|
||||
def rf(persisted):
|
||||
"Returns job with linked JT survey with password survey questions"
|
||||
objects = job_template_factory('jt', organization='org1', survey=[
|
||||
{'variable': 'submitter_email', 'type': 'text', 'default': 'foobar@redhat.com'},
|
||||
{'variable': 'secret_key', 'default': '6kQngg3h8lgiSTvIEb21', 'type': 'password'},
|
||||
{'variable': 'SSN', 'type': 'password'}], jobs=[1], persisted=persisted)
|
||||
return objects.jobs[1]
|
||||
return rf
|
||||
|
||||
@pytest.fixture
|
||||
def job_with_secret_key_unit(job_with_secret_key_factory):
|
||||
return job_with_secret_key_factory(persisted=False)
|
||||
65
awx/main/tests/factories/README.md
Normal file
65
awx/main/tests/factories/README.md
Normal file
@ -0,0 +1,65 @@
|
||||
factories
|
||||
=========
|
||||
|
||||
This is a module for defining stand-alone factories and fixtures. Ideally a fixture will implement a single item.
|
||||
DO NOT decorate fixtures in this module with the @pytest.fixture. These fixtures are to be combined
|
||||
with fixture factories and composition using the `conftest.py` convention. Those composed fixtures
|
||||
will be decorated for usage and discovery.
|
||||
|
||||
Use the fixtures directly in factory methods to build up the desired set of components and relationships.
|
||||
Each fixture should create exactly one object and should support the option for that object to be persisted
|
||||
or not.
|
||||
|
||||
A factory should create at a minimum a single object for that factory type. The creation of any
|
||||
associated objects should be explicit. For example, the `create_organization` factory when given only
|
||||
a `name` parameter will create an Organization but it will not implicitly create any other objects.
|
||||
|
||||
teams
|
||||
-----
|
||||
|
||||
There is some special handling for users when adding teams. There is a short hand that allows you to
|
||||
assign a user to the member\_role of a team using the string notation of `team_name:user_name`. There is
|
||||
no shortcut for adding a user to the admin\_role of a team. See the roles section for more information
|
||||
about how to do that.
|
||||
|
||||
roles
|
||||
-----
|
||||
|
||||
The roles helper allows you pass in roles to a factory. These roles assignments will happen after
|
||||
the objects are created. Using the roles parameter required that persisted=True (default).
|
||||
|
||||
You can use a string notation of `object_name.role_name:user` OR `object_name.role_name:object_name.child_role`
|
||||
|
||||
obj.parent_role:user # This will make the user a member of parent_role
|
||||
obj1.role:obj2.role # This will make obj2 a child role of obj1
|
||||
|
||||
team1.admin_role:joe
|
||||
team1.admin_role:project1.admin_role
|
||||
|
||||
examples
|
||||
--------
|
||||
|
||||
objects = create_organization('test-org')
|
||||
assert objects.organization.name == 'test-org'
|
||||
|
||||
objects = create_organization('test-org', projects=['test-proj'])
|
||||
assert objects.projects.test-proj.organization == objects.organization
|
||||
|
||||
objects = create_organization('test-org', persisted=False)
|
||||
assert not objects.organization.pk
|
||||
|
||||
patterns
|
||||
--------
|
||||
|
||||
`mk` functions are single object fixtures. They should create only a single object with the minimum deps.
|
||||
They should also accept a `persited` flag, if they must be persisted to work, they raise an error if persisted=False
|
||||
|
||||
`generate` and `apply` functions are helpers that build up the various parts of a `create` functions objects. These
|
||||
should be useful for more than one create function to use and should explicitly accept all of the values needed
|
||||
to execute. These functions should also be robust and have very speciifc error reporting about constraints and/or
|
||||
bad values.
|
||||
|
||||
`create` functions compose many of the `mk` and `generate` functions to make different object
|
||||
factories. These functions when giving the minimum set of arguments should only produce a
|
||||
single artifact (or the minimum needed for that object). These should be wrapped by discoverable
|
||||
fixtures in various conftest.py files.
|
||||
18
awx/main/tests/factories/__init__.py
Normal file
18
awx/main/tests/factories/__init__.py
Normal file
@ -0,0 +1,18 @@
|
||||
from .tower import (
|
||||
create_organization,
|
||||
create_job_template,
|
||||
create_notification_template,
|
||||
create_survey_spec,
|
||||
)
|
||||
|
||||
from .exc import (
|
||||
NotUnique,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'create_organization',
|
||||
'create_job_template',
|
||||
'create_notification_template',
|
||||
'create_survey_spec',
|
||||
'NotUnique',
|
||||
]
|
||||
5
awx/main/tests/factories/exc.py
Normal file
5
awx/main/tests/factories/exc.py
Normal file
@ -0,0 +1,5 @@
|
||||
class NotUnique(Exception):
|
||||
def __init__(self, name, objects):
|
||||
msg = '{} is not a unique key, found {}={}'.format(name, name, objects[name])
|
||||
super(Exception, self).__init__(msg)
|
||||
|
||||
150
awx/main/tests/factories/fixtures.py
Normal file
150
awx/main/tests/factories/fixtures.py
Normal file
@ -0,0 +1,150 @@
|
||||
import json
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
from awx.main.models import (
|
||||
Organization,
|
||||
Project,
|
||||
Team,
|
||||
Instance,
|
||||
JobTemplate,
|
||||
Job,
|
||||
NotificationTemplate,
|
||||
Credential,
|
||||
Inventory,
|
||||
Label,
|
||||
)
|
||||
|
||||
# mk methods should create only a single object of a single type.
|
||||
# they should also have the option of being persisted or not.
|
||||
# if the object must be persisted an error should be raised when
|
||||
# persisted=False
|
||||
#
|
||||
|
||||
def mk_instance(persisted=True):
|
||||
if not persisted:
|
||||
raise RuntimeError('creating an Instance requires persisted=True')
|
||||
from django.conf import settings
|
||||
return Instance.objects.get_or_create(uuid=settings.SYSTEM_UUID, primary=True, hostname="instance.example.org")
|
||||
|
||||
|
||||
def mk_organization(name, description=None, persisted=True):
|
||||
description = description or '{}-description'.format(name)
|
||||
org = Organization(name=name, description=description)
|
||||
if persisted:
|
||||
mk_instance(persisted)
|
||||
org.save()
|
||||
return org
|
||||
|
||||
|
||||
def mk_label(name, organization=None, description=None, persisted=True):
|
||||
description = description or '{}-description'.format(name)
|
||||
label = Label(name=name, description=description)
|
||||
if organization is not None:
|
||||
label.organization = organization
|
||||
if persisted:
|
||||
label.save()
|
||||
return label
|
||||
|
||||
|
||||
def mk_team(name, organization=None, persisted=True):
|
||||
team = Team(name=name)
|
||||
if organization is not None:
|
||||
team.organization = organization
|
||||
if persisted:
|
||||
mk_instance(persisted)
|
||||
team.save()
|
||||
return team
|
||||
|
||||
|
||||
def mk_user(name, is_superuser=False, organization=None, team=None, persisted=True):
|
||||
user = User(username=name, is_superuser=is_superuser)
|
||||
if persisted:
|
||||
user.save()
|
||||
if organization is not None:
|
||||
organization.member_role.members.add(user)
|
||||
if team is not None:
|
||||
team.member_role.members.add(user)
|
||||
return user
|
||||
|
||||
|
||||
def mk_project(name, organization=None, description=None, persisted=True):
|
||||
description = description or '{}-description'.format(name)
|
||||
project = Project(name=name, description=description)
|
||||
if organization is not None:
|
||||
project.organization = organization
|
||||
if persisted:
|
||||
project.save()
|
||||
return project
|
||||
|
||||
|
||||
def mk_credential(name, cloud=False, kind='ssh', persisted=True):
|
||||
cred = Credential(name=name, cloud=cloud, kind=kind)
|
||||
if persisted:
|
||||
cred.save()
|
||||
return cred
|
||||
|
||||
|
||||
def mk_notification_template(name, notification_type='webhook', configuration=None, organization=None, persisted=True):
|
||||
nt = NotificationTemplate(name=name)
|
||||
nt.notification_type = notification_type
|
||||
nt.notification_configuration = configuration or dict(url="http://localhost", headers={"Test": "Header"})
|
||||
|
||||
if organization is not None:
|
||||
nt.organization = organization
|
||||
if persisted:
|
||||
nt.save()
|
||||
return nt
|
||||
|
||||
|
||||
def mk_inventory(name, organization=None, persisted=True):
|
||||
inv = Inventory(name=name)
|
||||
if organization is not None:
|
||||
inv.organization = organization
|
||||
if persisted:
|
||||
inv.save()
|
||||
return inv
|
||||
|
||||
|
||||
def mk_job(job_type='run', status='new', job_template=None, inventory=None,
|
||||
credential=None, project=None, extra_vars={},
|
||||
persisted=True):
|
||||
job = Job(job_type=job_type, status=status, extra_vars=json.dumps(extra_vars))
|
||||
|
||||
job.job_template = job_template
|
||||
job.inventory = inventory
|
||||
job.credential = credential
|
||||
job.project = project
|
||||
|
||||
if persisted:
|
||||
job.save()
|
||||
return job
|
||||
|
||||
|
||||
def mk_job_template(name, job_type='run',
|
||||
organization=None, inventory=None,
|
||||
credential=None, persisted=True, extra_vars='',
|
||||
project=None, spec=None):
|
||||
if extra_vars:
|
||||
extra_vars = json.dumps(extra_vars)
|
||||
|
||||
jt = JobTemplate(name=name, job_type=job_type, extra_vars=extra_vars,
|
||||
playbook='mocked')
|
||||
|
||||
jt.inventory = inventory
|
||||
if jt.inventory is None:
|
||||
jt.ask_inventory_on_launch = True
|
||||
|
||||
jt.credential = credential
|
||||
if jt.credential is None:
|
||||
jt.ask_credential_on_launch = True
|
||||
|
||||
jt.project = project
|
||||
|
||||
jt.survey_spec = spec
|
||||
if jt.survey_spec is not None:
|
||||
jt.survey_enabled = True
|
||||
|
||||
if persisted:
|
||||
jt.save()
|
||||
return jt
|
||||
59
awx/main/tests/factories/objects.py
Normal file
59
awx/main/tests/factories/objects.py
Normal file
@ -0,0 +1,59 @@
|
||||
from collections import namedtuple
|
||||
|
||||
from .exc import NotUnique
|
||||
|
||||
def generate_objects(artifacts, kwargs):
|
||||
'''generate_objects takes a list of artifacts that are supported by
|
||||
a create function and compares it to the kwargs passed in to the create
|
||||
function. If a kwarg is found that is not in the artifacts list a RuntimeError
|
||||
is raised.
|
||||
'''
|
||||
for k in kwargs.keys():
|
||||
if k not in artifacts:
|
||||
raise RuntimeError('{} is not a valid argument'.format(k))
|
||||
return namedtuple("Objects", ",".join(artifacts))
|
||||
|
||||
|
||||
def generate_role_objects(objects):
|
||||
'''generate_role_objects assembles a dictionary of all possible objects by name.
|
||||
It will raise an exception if any of the objects share a name due to the fact that
|
||||
it is to be used with apply_roles, which expects unique object names.
|
||||
|
||||
roles share a common name e.g. admin_role, member_role. This ensures that the
|
||||
roles short hand used for mapping Roles and Users in apply_roles will function as desired.
|
||||
'''
|
||||
combined_objects = {}
|
||||
for o in objects:
|
||||
if type(o) is dict:
|
||||
for k,v in o.iteritems():
|
||||
if combined_objects.get(k) is not None:
|
||||
raise NotUnique(k, combined_objects)
|
||||
combined_objects[k] = v
|
||||
elif hasattr(o, 'name'):
|
||||
if combined_objects.get(o.name) is not None:
|
||||
raise NotUnique(o.name, combined_objects)
|
||||
combined_objects[o.name] = o
|
||||
else:
|
||||
if o is not None:
|
||||
raise RuntimeError('expected a list of dict or list of list, got a type {}'.format(type(o)))
|
||||
return combined_objects
|
||||
|
||||
|
||||
class _Mapped(object):
|
||||
'''_Mapped is a helper class that replaces spaces and dashes
|
||||
in the name of an object and assigns the object as an attribute
|
||||
|
||||
input: {'my org': Organization}
|
||||
output: instance.my_org = Organization
|
||||
'''
|
||||
def __init__(self, d):
|
||||
self.d = d
|
||||
for k,v in d.items():
|
||||
k = k.replace(' ', '_')
|
||||
k = k.replace('-', '_')
|
||||
|
||||
setattr(self, k.replace(' ','_'), v)
|
||||
|
||||
def all(self):
|
||||
return self.d.values()
|
||||
|
||||
332
awx/main/tests/factories/tower.py
Normal file
332
awx/main/tests/factories/tower.py
Normal file
@ -0,0 +1,332 @@
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
from awx.main.models import (
|
||||
Organization,
|
||||
Project,
|
||||
Team,
|
||||
NotificationTemplate,
|
||||
Credential,
|
||||
Inventory,
|
||||
Job,
|
||||
Label,
|
||||
)
|
||||
|
||||
from .objects import (
|
||||
generate_objects,
|
||||
generate_role_objects,
|
||||
_Mapped,
|
||||
)
|
||||
|
||||
from .fixtures import (
|
||||
mk_organization,
|
||||
mk_team,
|
||||
mk_user,
|
||||
mk_job_template,
|
||||
mk_job,
|
||||
mk_credential,
|
||||
mk_inventory,
|
||||
mk_project,
|
||||
mk_label,
|
||||
mk_notification_template,
|
||||
)
|
||||
|
||||
|
||||
def apply_roles(roles, objects, persisted):
|
||||
'''apply_roles evaluates a list of Role relationships represented as strings.
|
||||
The format of this string is 'role:[user|role]'. When a user is provided, they will be
|
||||
made a member of the role on the LHS. When a role is provided that role will be added to
|
||||
the children of the role on the LHS.
|
||||
|
||||
This function assumes that objects is a dictionary that contains a unique set of key to value
|
||||
mappings for all possible "Role objects". See the example below:
|
||||
|
||||
Mapping Users
|
||||
-------------
|
||||
roles = ['org1.admin_role:user1', 'team1.admin_role:user1']
|
||||
objects = {'org1': Organization, 'team1': Team, 'user1': User]
|
||||
|
||||
Mapping Roles
|
||||
-------------
|
||||
roles = ['org1.admin_role:team1.admin_role']
|
||||
objects = {'org1': Organization, 'team1': Team}
|
||||
|
||||
Invalid Mapping
|
||||
---------------
|
||||
roles = ['org1.admin_role:team1.admin_role']
|
||||
objects = {'org1': Organization', 'user1': User} # Exception, no team1 entry
|
||||
'''
|
||||
if roles is None:
|
||||
return None
|
||||
|
||||
if not persisted:
|
||||
raise RuntimeError('roles can not be used when persisted=False')
|
||||
|
||||
for role in roles:
|
||||
obj_role, sep, member_role = role.partition(':')
|
||||
if not member_role:
|
||||
raise RuntimeError('you must provide an assignment role, got None')
|
||||
|
||||
obj_str, o_role_str = obj_role.split('.')
|
||||
member_str, m_sep, m_role_str = member_role.partition('.')
|
||||
|
||||
obj = objects[obj_str]
|
||||
obj_role = getattr(obj, o_role_str)
|
||||
|
||||
member = objects[member_str]
|
||||
if m_role_str:
|
||||
if hasattr(member, m_role_str):
|
||||
member_role = getattr(member, m_role_str)
|
||||
obj_role.children.add(member_role)
|
||||
else:
|
||||
raise RuntimeError('unable to find {} role for {}'.format(m_role_str, member_str))
|
||||
else:
|
||||
if type(member) is User:
|
||||
obj_role.members.add(member)
|
||||
else:
|
||||
raise RuntimeError('unable to add non-user {} for members list of {}'.format(member_str, obj_str))
|
||||
|
||||
def generate_users(organization, teams, superuser, persisted, **kwargs):
|
||||
'''generate_users evaluates a mixed list of User objects and strings.
|
||||
If a string is encountered a user with that username is created and added to the lookup dict.
|
||||
If a User object is encountered the User.username is used as a key for the lookup dict.
|
||||
|
||||
A short hand for assigning a user to a team is available in the following format: "team_name:username".
|
||||
If a string in that format is encounted an attempt to lookup the team by the key team_name from the teams
|
||||
argumnent is made, a KeyError will be thrown if the team does not exist in the dict. The teams argument should
|
||||
be a dict of {Team.name:Team}
|
||||
'''
|
||||
users = {}
|
||||
key = 'superusers' if superuser else 'users'
|
||||
if key in kwargs and kwargs.get(key) is not None:
|
||||
for u in kwargs[key]:
|
||||
if type(u) is User:
|
||||
users[u.username] = u
|
||||
else:
|
||||
p1, sep, p2 = u.partition(':')
|
||||
if p2:
|
||||
t = teams[p1]
|
||||
users[p2] = mk_user(p2, organization=organization, team=t, is_superuser=superuser, persisted=persisted)
|
||||
else:
|
||||
users[p1] = mk_user(p1, organization=organization, team=None, is_superuser=superuser, persisted=persisted)
|
||||
return users
|
||||
|
||||
def generate_teams(organization, persisted, **kwargs):
|
||||
'''generate_teams evalutes a mixed list of Team objects and strings.
|
||||
If a string is encountered a team with that string name is created and added to the lookup dict.
|
||||
If a Team object is encounted the Team.name is used as a key for the lookup dict.
|
||||
'''
|
||||
teams = {}
|
||||
if 'teams' in kwargs and kwargs.get('teams') is not None:
|
||||
for t in kwargs['teams']:
|
||||
if type(t) is Team:
|
||||
teams[t.name] = t
|
||||
else:
|
||||
teams[t] = mk_team(t, organization=organization, persisted=persisted)
|
||||
return teams
|
||||
|
||||
def create_survey_spec(variables=None, default_type='integer', required=True):
|
||||
'''
|
||||
Returns a valid survey spec for a job template, based on the input
|
||||
argument specifying variable name(s)
|
||||
'''
|
||||
if isinstance(variables, list):
|
||||
name = "%s survey" % variables[0]
|
||||
description = "A survey that starts with %s." % variables[0]
|
||||
vars_list = variables
|
||||
else:
|
||||
name = "%s survey" % variables
|
||||
description = "A survey about %s." % variables
|
||||
vars_list = [variables]
|
||||
|
||||
spec = []
|
||||
index = 0
|
||||
for var in vars_list:
|
||||
spec_item = {}
|
||||
spec_item['index'] = index
|
||||
index += 1
|
||||
spec_item['required'] = required
|
||||
spec_item['choices'] = ''
|
||||
spec_item['type'] = default_type
|
||||
if isinstance(var, dict):
|
||||
spec_item.update(var)
|
||||
var_name = spec_item.get('variable', 'variable')
|
||||
else:
|
||||
var_name = var
|
||||
spec_item.setdefault('variable', var_name)
|
||||
spec_item.setdefault('question_name', "Enter a value for %s." % var_name)
|
||||
spec_item.setdefault('question_description', "A question about %s." % var_name)
|
||||
if spec_item['type'] == 'integer':
|
||||
spec_item.setdefault('default', 0)
|
||||
spec_item.setdefault('max', spec_item['default'] + 100)
|
||||
spec_item.setdefault('min', spec_item['default'] - 100)
|
||||
else:
|
||||
spec_item.setdefault('default', '')
|
||||
spec.append(spec_item)
|
||||
|
||||
survey_spec = {}
|
||||
survey_spec['spec'] = spec
|
||||
survey_spec['name'] = name
|
||||
survey_spec['description'] = description
|
||||
return survey_spec
|
||||
|
||||
|
||||
# create methods are intended to be called directly as needed
|
||||
# or encapsulated by specific factory fixtures in a conftest
|
||||
#
|
||||
|
||||
def create_job_template(name, roles=None, persisted=True, **kwargs):
|
||||
Objects = generate_objects(["job_template", "jobs",
|
||||
"organization",
|
||||
"inventory",
|
||||
"project",
|
||||
"credential",
|
||||
"job_type",
|
||||
"survey",], kwargs)
|
||||
|
||||
org = None
|
||||
proj = None
|
||||
inv = None
|
||||
cred = None
|
||||
spec = None
|
||||
jobs = {}
|
||||
job_type = kwargs.get('job_type', 'run')
|
||||
extra_vars = kwargs.get('extra_vars', '')
|
||||
|
||||
if 'organization' in kwargs:
|
||||
org = kwargs['organization']
|
||||
if type(org) is not Organization:
|
||||
org = mk_organization(org, '%s-desc'.format(org), persisted=persisted)
|
||||
|
||||
if 'credential' in kwargs:
|
||||
cred = kwargs['credential']
|
||||
if type(cred) is not Credential:
|
||||
cred = mk_credential(cred, persisted=persisted)
|
||||
|
||||
if 'project' in kwargs:
|
||||
proj = kwargs['project']
|
||||
if type(proj) is not Project:
|
||||
proj = mk_project(proj, organization=org, persisted=persisted)
|
||||
|
||||
if 'inventory' in kwargs:
|
||||
inv = kwargs['inventory']
|
||||
if type(inv) is not Inventory:
|
||||
inv = mk_inventory(inv, organization=org, persisted=persisted)
|
||||
|
||||
if 'survey' in kwargs:
|
||||
spec = create_survey_spec(kwargs['survey'])
|
||||
|
||||
jt = mk_job_template(name, project=proj,
|
||||
inventory=inv, credential=cred,
|
||||
job_type=job_type, spec=spec, extra_vars=extra_vars,
|
||||
persisted=persisted)
|
||||
|
||||
if 'jobs' in kwargs:
|
||||
for i in kwargs['jobs']:
|
||||
if type(i) is Job:
|
||||
jobs[i.pk] = i
|
||||
else:
|
||||
# Fill in default survey answers
|
||||
job_extra_vars = {}
|
||||
for question in spec['spec']:
|
||||
job_extra_vars[question['variable']] = question['default']
|
||||
jobs[i] = mk_job(job_template=jt, project=proj, inventory=inv, credential=cred,
|
||||
extra_vars=job_extra_vars,
|
||||
job_type=job_type, persisted=persisted)
|
||||
|
||||
role_objects = generate_role_objects([org, proj, inv, cred])
|
||||
apply_roles(roles, role_objects, persisted)
|
||||
|
||||
return Objects(job_template=jt,
|
||||
jobs=jobs,
|
||||
project=proj,
|
||||
inventory=inv,
|
||||
credential=cred,
|
||||
job_type=job_type,
|
||||
organization=org,
|
||||
survey=spec,)
|
||||
|
||||
def create_organization(name, roles=None, persisted=True, **kwargs):
|
||||
Objects = generate_objects(["organization",
|
||||
"teams", "users",
|
||||
"superusers",
|
||||
"projects",
|
||||
"labels",
|
||||
"notification_templates",
|
||||
"inventories",], kwargs)
|
||||
|
||||
projects = {}
|
||||
inventories = {}
|
||||
labels = {}
|
||||
notification_templates = {}
|
||||
|
||||
org = mk_organization(name, '%s-desc'.format(name), persisted=persisted)
|
||||
|
||||
if 'inventories' in kwargs:
|
||||
for i in kwargs['inventories']:
|
||||
if type(i) is Inventory:
|
||||
inventories[i.name] = i
|
||||
else:
|
||||
inventories[i] = mk_inventory(i, organization=org, persisted=persisted)
|
||||
|
||||
if 'projects' in kwargs:
|
||||
for p in kwargs['projects']:
|
||||
if type(p) is Project:
|
||||
projects[p.name] = p
|
||||
else:
|
||||
projects[p] = mk_project(p, organization=org, persisted=persisted)
|
||||
|
||||
teams = generate_teams(org, persisted, teams=kwargs.get('teams'))
|
||||
superusers = generate_users(org, teams, True, persisted, superusers=kwargs.get('superusers'))
|
||||
users = generate_users(org, teams, False, persisted, users=kwargs.get('users'))
|
||||
|
||||
if 'labels' in kwargs:
|
||||
for l in kwargs['labels']:
|
||||
if type(l) is Label:
|
||||
labels[l.name] = l
|
||||
else:
|
||||
labels[l] = mk_label(l, organization=org, persisted=persisted)
|
||||
|
||||
if 'notification_templates' in kwargs:
|
||||
for nt in kwargs['notification_templates']:
|
||||
if type(nt) is NotificationTemplate:
|
||||
notification_templates[nt.name] = nt
|
||||
else:
|
||||
notification_templates[nt] = mk_notification_template(nt, organization=org, persisted=persisted)
|
||||
|
||||
role_objects = generate_role_objects([org, superusers, users, teams, projects, labels, notification_templates])
|
||||
apply_roles(roles, role_objects, persisted)
|
||||
return Objects(organization=org,
|
||||
superusers=_Mapped(superusers),
|
||||
users=_Mapped(users),
|
||||
teams=_Mapped(teams),
|
||||
projects=_Mapped(projects),
|
||||
labels=_Mapped(labels),
|
||||
notification_templates=_Mapped(notification_templates),
|
||||
inventories=_Mapped(inventories))
|
||||
|
||||
def create_notification_template(name, roles=None, persisted=True, **kwargs):
|
||||
Objects = generate_objects(["notification_template",
|
||||
"organization",
|
||||
"users",
|
||||
"superusers",
|
||||
"teams",], kwargs)
|
||||
|
||||
organization = None
|
||||
|
||||
if 'organization' in kwargs:
|
||||
org = kwargs['organization']
|
||||
organization = mk_organization(org, '{}-desc'.format(org), persisted=persisted)
|
||||
|
||||
notification_template = mk_notification_template(name, organization=organization, persisted=persisted)
|
||||
|
||||
teams = generate_teams(organization, persisted, teams=kwargs.get('teams'))
|
||||
superusers = generate_users(organization, teams, True, persisted, superusers=kwargs.get('superusers'))
|
||||
users = generate_users(organization, teams, False, persisted, users=kwargs.get('users'))
|
||||
|
||||
role_objects = generate_role_objects([organization, notification_template])
|
||||
apply_roles(roles, role_objects, persisted)
|
||||
return Objects(notification_template=notification_template,
|
||||
organization=organization,
|
||||
users=_Mapped(users),
|
||||
superusers=_Mapped(superusers),
|
||||
teams=teams)
|
||||
@ -131,3 +131,24 @@ def test_stream_queryset_hides_shows_items(
|
||||
assert queryset.filter(host__pk=host.pk, operation='create').count() == 1
|
||||
assert queryset.filter(team__pk=team.pk, operation='create').count() == 1
|
||||
assert queryset.filter(notification_template__pk=notification_template.pk, operation='create').count() == 1
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
def test_stream_user_direct_role_updates(get, post, organization_factory):
|
||||
objects = organization_factory('test_org',
|
||||
superusers=['admin'],
|
||||
users=['test'],
|
||||
inventories=['inv1'])
|
||||
|
||||
url = reverse('api:user_roles_list', args=(objects.users.test.pk,))
|
||||
post(url, dict(id=objects.inventories.inv1.read_role.pk), objects.superusers.admin)
|
||||
|
||||
activity_stream = ActivityStream.objects.filter(
|
||||
inventory__pk=objects.inventories.inv1.pk,
|
||||
user__pk=objects.users.test.pk,
|
||||
role__pk=objects.inventories.inv1.read_role.pk).first()
|
||||
url = reverse('api:activity_stream_detail', args=(activity_stream.pk,))
|
||||
response = get(url, objects.users.test)
|
||||
|
||||
assert response.data['object1'] == 'user'
|
||||
assert response.data['object2'] == 'inventory'
|
||||
|
||||
148
awx/main/tests/functional/api/test_adhoc.py
Normal file
148
awx/main/tests/functional/api/test_adhoc.py
Normal file
@ -0,0 +1,148 @@
|
||||
import mock # noqa
|
||||
import pytest
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
|
||||
|
||||
"""
|
||||
def run_test_ad_hoc_command(self, **kwargs):
|
||||
# Post to list to start a new ad hoc command.
|
||||
expect = kwargs.pop('expect', 201)
|
||||
url = kwargs.pop('url', reverse('api:ad_hoc_command_list'))
|
||||
data = {
|
||||
'inventory': self.inventory.pk,
|
||||
'credential': self.credential.pk,
|
||||
'module_name': 'command',
|
||||
'module_args': 'uptime',
|
||||
}
|
||||
data.update(kwargs)
|
||||
for k,v in data.items():
|
||||
if v is None:
|
||||
del data[k]
|
||||
return self.post(url, data, expect=expect)
|
||||
"""
|
||||
|
||||
@pytest.fixture
|
||||
def post_adhoc(post, inventory, machine_credential):
|
||||
def f(url, data, user, expect=201):
|
||||
if not url:
|
||||
url = reverse('api:ad_hoc_command_list')
|
||||
|
||||
if 'module_name' not in data:
|
||||
data['module_name'] = 'command'
|
||||
if 'module_args' not in data:
|
||||
data['module_args'] = 'uptime'
|
||||
if 'inventory' not in data:
|
||||
data['inventory'] = inventory.id
|
||||
if 'credential' not in data:
|
||||
data['credential'] = machine_credential.id
|
||||
|
||||
for k,v in data.items():
|
||||
if v is None:
|
||||
del data[k]
|
||||
|
||||
return post(url, data, user, expect=expect)
|
||||
return f
|
||||
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_admin_post_ad_hoc_command_list(admin, post_adhoc, inventory, machine_credential):
|
||||
res = post_adhoc(reverse('api:ad_hoc_command_list'), {}, admin, expect=201)
|
||||
assert res.data['job_type'] == 'run'
|
||||
assert res.data['inventory'], inventory.id
|
||||
assert res.data['credential'] == machine_credential.id
|
||||
assert res.data['module_name'] == 'command'
|
||||
assert res.data['module_args'] == 'uptime'
|
||||
assert res.data['limit'] == ''
|
||||
assert res.data['forks'] == 0
|
||||
assert res.data['verbosity'] == 0
|
||||
assert res.data['become_enabled'] is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty_post_403(admin, post):
|
||||
post(reverse('api:ad_hoc_command_list'), {}, admin, expect=400)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty_put_405(admin, put):
|
||||
put(reverse('api:ad_hoc_command_list'), {}, admin, expect=405)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty_patch_405(admin, patch):
|
||||
patch(reverse('api:ad_hoc_command_list'), {}, admin, expect=405)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty_delete_405(admin, delete):
|
||||
delete(reverse('api:ad_hoc_command_list'), admin, expect=405)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_post_ad_hoc_command_list(alice, post_adhoc, inventory, machine_credential):
|
||||
inventory.adhoc_role.members.add(alice)
|
||||
machine_credential.use_role.members.add(alice)
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {}, alice, expect=201)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_post_ad_hoc_command_list_xfail(alice, post_adhoc, inventory, machine_credential):
|
||||
inventory.read_role.members.add(alice) # just read access? no dice.
|
||||
machine_credential.use_role.members.add(alice)
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {}, alice, expect=403)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_post_ad_hoc_command_list_without_creds(alice, post_adhoc, inventory, machine_credential):
|
||||
inventory.adhoc_role.members.add(alice)
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {}, alice, expect=403)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_post_ad_hoc_command_list_without_inventory(alice, post_adhoc, inventory, machine_credential):
|
||||
machine_credential.use_role.members.add(alice)
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {}, alice, expect=403)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_admin_post_inventory_ad_hoc_command_list(admin, post_adhoc, inventory):
|
||||
post_adhoc(reverse('api:inventory_ad_hoc_commands_list', args=(inventory.id,)), {'inventory': None}, admin, expect=201)
|
||||
post_adhoc(reverse('api:inventory_ad_hoc_commands_list', args=(inventory.id,)), {}, admin, expect=201)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_inventory_ad_hoc_command_list(admin, alice, post_adhoc, get, inventory_factory, machine_credential):
|
||||
inv1 = inventory_factory('inv1')
|
||||
inv2 = inventory_factory('inv2')
|
||||
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {'inventory': inv1.id}, admin, expect=201)
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {'inventory': inv2.id}, admin, expect=201)
|
||||
res = get(reverse('api:ad_hoc_command_list'), admin, expect=200)
|
||||
assert res.data['count'] == 2
|
||||
res = get(reverse('api:inventory_ad_hoc_commands_list', args=(inv1.id,)), admin, expect=200)
|
||||
assert res.data['count'] == 1
|
||||
res = get(reverse('api:inventory_ad_hoc_commands_list', args=(inv2.id,)), admin, expect=200)
|
||||
assert res.data['count'] == 1
|
||||
|
||||
inv1.adhoc_role.members.add(alice)
|
||||
res = get(reverse('api:inventory_ad_hoc_commands_list', args=(inv1.id,)), alice, expect=200)
|
||||
assert res.data['count'] == 0
|
||||
|
||||
machine_credential.use_role.members.add(alice)
|
||||
res = get(reverse('api:inventory_ad_hoc_commands_list', args=(inv1.id,)), alice, expect=200)
|
||||
assert res.data['count'] == 1
|
||||
res = get(reverse('api:inventory_ad_hoc_commands_list', args=(inv2.id,)), alice, expect=403)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bad_data1(admin, post_adhoc):
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {'module_name': 'command', 'module_args': None}, admin, expect=400)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bad_data2(admin, post_adhoc):
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {'job_type': 'baddata'}, admin, expect=400)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bad_data3(admin, post_adhoc):
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {'verbosity': -1}, admin, expect=400)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bad_data4(admin, post_adhoc):
|
||||
post_adhoc(reverse('api:ad_hoc_command_list'), {'forks': -1}, admin, expect=400)
|
||||
|
||||
@ -147,8 +147,7 @@ def test_credential_detail(post, get, organization, org_admin):
|
||||
response = get(reverse('api:credential_detail', args=(response.data['id'],)), org_admin)
|
||||
assert response.status_code == 200
|
||||
summary_fields = response.data['summary_fields']
|
||||
assert 'owners' in summary_fields
|
||||
assert summary_fields['owners'][0]['id'] == organization.id
|
||||
assert 'organization' in summary_fields
|
||||
related_fields = response.data['related']
|
||||
assert 'organization' in related_fields
|
||||
|
||||
|
||||
@ -70,7 +70,7 @@ def bad_scan_JT(job_template_prompts):
|
||||
# End of setup, tests start here
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, user, mocker):
|
||||
def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, admin_user, mocker):
|
||||
job_template = job_template_prompts(False)
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||
@ -78,8 +78,7 @@ def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, us
|
||||
with mocker.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(reverse('api:job_template_launch', args=[job_template.pk]),
|
||||
runtime_data, user('admin', True))
|
||||
assert response.status_code == 201
|
||||
runtime_data, admin_user, expect=201)
|
||||
|
||||
# Check that job is serialized correctly
|
||||
job_id = response.data['job']
|
||||
@ -99,7 +98,7 @@ def test_job_ignore_unprompted_vars(runtime_data, job_template_prompts, post, us
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, user, mocker):
|
||||
def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, admin_user, mocker):
|
||||
job_template = job_template_prompts(True)
|
||||
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, **runtime_data)
|
||||
@ -107,9 +106,8 @@ def test_job_accept_prompted_vars(runtime_data, job_template_prompts, post, user
|
||||
with mocker.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(reverse('api:job_template_launch', args=[job_template.pk]),
|
||||
runtime_data, user('admin', True))
|
||||
runtime_data, admin_user, expect=201)
|
||||
|
||||
assert response.status_code == 201
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
|
||||
@ -134,51 +132,47 @@ def test_job_accept_prompted_vars_null(runtime_data, job_template_prompts_null,
|
||||
with mocker.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
response = post(reverse('api:job_template_launch', args=[job_template.pk]),
|
||||
runtime_data, rando)
|
||||
runtime_data, rando, expect=201)
|
||||
|
||||
assert response.status_code == 201
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
mock_job.signal_start.assert_called_once_with(**runtime_data)
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_reject_invalid_prompted_vars(runtime_data, job_template_prompts, post, user):
|
||||
def test_job_reject_invalid_prompted_vars(runtime_data, job_template_prompts, post, admin_user):
|
||||
job_template = job_template_prompts(True)
|
||||
|
||||
response = post(
|
||||
reverse('api:job_template_launch', args=[job_template.pk]),
|
||||
dict(job_type='foobicate', # foobicate is not a valid job type
|
||||
inventory=87865, credential=48474), user('admin', True))
|
||||
inventory=87865, credential=48474), admin_user, expect=400)
|
||||
|
||||
assert response.status_code == 400
|
||||
assert response.data['job_type'] == [u'"foobicate" is not a valid choice.']
|
||||
assert response.data['inventory'] == [u'Invalid pk "87865" - object does not exist.']
|
||||
assert response.data['credential'] == [u'Invalid pk "48474" - object does not exist.']
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_reject_invalid_prompted_extra_vars(runtime_data, job_template_prompts, post, user):
|
||||
def test_job_reject_invalid_prompted_extra_vars(runtime_data, job_template_prompts, post, admin_user):
|
||||
job_template = job_template_prompts(True)
|
||||
|
||||
response = post(
|
||||
reverse('api:job_template_launch', args=[job_template.pk]),
|
||||
dict(extra_vars='{"unbalanced brackets":'), user('admin', True))
|
||||
dict(extra_vars='{"unbalanced brackets":'), admin_user, expect=400)
|
||||
|
||||
assert response.status_code == 400
|
||||
assert response.data['extra_vars'] == ['Must be a valid JSON or YAML dictionary.']
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_launch_fails_without_inventory(deploy_jobtemplate, post, user):
|
||||
def test_job_launch_fails_without_inventory(deploy_jobtemplate, post, admin_user):
|
||||
deploy_jobtemplate.inventory = None
|
||||
deploy_jobtemplate.save()
|
||||
|
||||
response = post(reverse('api:job_template_launch',
|
||||
args=[deploy_jobtemplate.pk]), {}, user('admin', True))
|
||||
args=[deploy_jobtemplate.pk]), {}, admin_user, expect=400)
|
||||
|
||||
assert response.status_code == 400
|
||||
assert response.data['inventory'] == ['Job Template Inventory is missing or undefined.']
|
||||
assert response.data['inventory'] == ["Job Template 'inventory' is missing or undefined."]
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
@ -188,9 +182,8 @@ def test_job_launch_fails_without_inventory_access(job_template_prompts, runtime
|
||||
|
||||
# Assure that giving an inventory without access to the inventory blocks the launch
|
||||
response = post(reverse('api:job_template_launch', args=[job_template.pk]),
|
||||
dict(inventory=runtime_data['inventory']), rando)
|
||||
dict(inventory=runtime_data['inventory']), rando, expect=403)
|
||||
|
||||
assert response.status_code == 403
|
||||
assert response.data['detail'] == u'You do not have permission to perform this action.'
|
||||
|
||||
@pytest.mark.django_db
|
||||
@ -201,9 +194,8 @@ def test_job_launch_fails_without_credential_access(job_template_prompts, runtim
|
||||
|
||||
# Assure that giving a credential without access blocks the launch
|
||||
response = post(reverse('api:job_template_launch', args=[job_template.pk]),
|
||||
dict(credential=runtime_data['credential']), rando)
|
||||
dict(credential=runtime_data['credential']), rando, expect=403)
|
||||
|
||||
assert response.status_code == 403
|
||||
assert response.data['detail'] == u'You do not have permission to perform this action.'
|
||||
|
||||
@pytest.mark.django_db
|
||||
@ -213,20 +205,19 @@ def test_job_block_scan_job_type_change(job_template_prompts, post, admin_user):
|
||||
|
||||
# Assure that changing the type of a scan job blocks the launch
|
||||
response = post(reverse('api:job_template_launch', args=[job_template.pk]),
|
||||
dict(job_type='scan'), admin_user)
|
||||
dict(job_type='scan'), admin_user, expect=400)
|
||||
|
||||
assert response.status_code == 400
|
||||
assert 'job_type' in response.data
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_block_scan_job_inv_change(mocker, bad_scan_JT, runtime_data, post, admin_user):
|
||||
# Assure that giving a new inventory for a scan job blocks the launch
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license', return_value=True):
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
response = post(reverse('api:job_template_launch', args=[bad_scan_JT.pk]),
|
||||
dict(inventory=runtime_data['inventory']), admin_user)
|
||||
dict(inventory=runtime_data['inventory']), admin_user,
|
||||
expect=400)
|
||||
|
||||
assert response.status_code == 400
|
||||
assert 'inventory' in response.data
|
||||
|
||||
@pytest.mark.django_db
|
||||
@ -286,41 +277,23 @@ def test_job_launch_JT_with_validation(machine_credential, deploy_jobtemplate):
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_runtime_vars
|
||||
def test_job_launch_unprompted_vars_with_survey(mocker, job_template_prompts, post, user):
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license', return_value=False):
|
||||
job_template = job_template_prompts(False)
|
||||
job_template.survey_enabled = True
|
||||
job_template.survey_spec = {
|
||||
"spec": [
|
||||
{
|
||||
"index": 0,
|
||||
"question_name": "survey_var",
|
||||
"min": 0,
|
||||
"default": "",
|
||||
"max": 100,
|
||||
"question_description": "A survey question",
|
||||
"required": True,
|
||||
"variable": "survey_var",
|
||||
"choices": "",
|
||||
"type": "integer"
|
||||
}
|
||||
],
|
||||
"description": "",
|
||||
"name": ""
|
||||
}
|
||||
job_template.save()
|
||||
def test_job_launch_unprompted_vars_with_survey(mocker, survey_spec_factory, job_template_prompts, post, admin_user):
|
||||
job_template = job_template_prompts(False)
|
||||
job_template.survey_enabled = True
|
||||
job_template.survey_spec = survey_spec_factory('survey_var')
|
||||
job_template.save()
|
||||
|
||||
with mocker.patch('awx.main.access.BaseAccess.check_license'):
|
||||
mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4})
|
||||
with mocker.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job', return_value=mock_job):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation'):
|
||||
with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}):
|
||||
response = post(
|
||||
reverse('api:job_template_launch', args=[job_template.pk]),
|
||||
dict(extra_vars={"job_launch_var": 3, "survey_var": 4}),
|
||||
user('admin', True))
|
||||
assert response.status_code == 201
|
||||
admin_user, expect=201)
|
||||
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
job_id = response.data['job']
|
||||
assert job_id == 968
|
||||
|
||||
# Check that the survey variable is accepted and the job variable isn't
|
||||
mock_job.signal_start.assert_called_once_with(extra_vars={"survey_var": 4})
|
||||
# Check that the survey variable is accepted and the job variable isn't
|
||||
mock_job.signal_start.assert_called_once_with(extra_vars={"survey_var": 4})
|
||||
|
||||
335
awx/main/tests/functional/api/test_job_template.py
Normal file
335
awx/main/tests/functional/api/test_job_template.py
Normal file
@ -0,0 +1,335 @@
|
||||
import pytest
|
||||
import mock
|
||||
|
||||
# AWX
|
||||
from awx.api.serializers import JobTemplateSerializer, JobLaunchSerializer
|
||||
from awx.main.models.jobs import JobTemplate
|
||||
from awx.main.models.projects import ProjectOptions
|
||||
|
||||
# Django
|
||||
from django.test.client import RequestFactory
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
@property
|
||||
def project_playbooks(self):
|
||||
return ['mocked', 'mocked.yml', 'alt-mocked.yml']
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch.object(ProjectOptions, "playbooks", project_playbooks)
|
||||
@pytest.mark.parametrize(
|
||||
"grant_project, grant_credential, grant_inventory, expect", [
|
||||
(True, True, True, 201),
|
||||
(True, True, False, 403),
|
||||
(True, False, True, 403),
|
||||
(False, True, True, 403),
|
||||
]
|
||||
)
|
||||
def test_create(post, project, machine_credential, inventory, alice, grant_project, grant_credential, grant_inventory, expect):
|
||||
if grant_project:
|
||||
project.use_role.members.add(alice)
|
||||
if grant_credential:
|
||||
machine_credential.use_role.members.add(alice)
|
||||
if grant_inventory:
|
||||
inventory.use_role.members.add(alice)
|
||||
|
||||
post(reverse('api:job_template_list'), {
|
||||
'name': 'Some name',
|
||||
'project': project.id,
|
||||
'credential': machine_credential.id,
|
||||
'inventory': inventory.id,
|
||||
'playbook': 'mocked.yml',
|
||||
}, alice, expect=expect)
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch.object(ProjectOptions, "playbooks", project_playbooks)
|
||||
@pytest.mark.parametrize(
|
||||
"grant_project, grant_credential, grant_inventory, expect", [
|
||||
(True, True, True, 200),
|
||||
(True, True, False, 403),
|
||||
(True, False, True, 403),
|
||||
(False, True, True, 403),
|
||||
]
|
||||
)
|
||||
def test_edit_sensitive_fields(patch, job_template_factory, alice, grant_project, grant_credential, grant_inventory, expect):
|
||||
objs = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')
|
||||
objs.job_template.admin_role.members.add(alice)
|
||||
|
||||
if grant_project:
|
||||
objs.project.use_role.members.add(alice)
|
||||
if grant_credential:
|
||||
objs.credential.use_role.members.add(alice)
|
||||
if grant_inventory:
|
||||
objs.inventory.use_role.members.add(alice)
|
||||
|
||||
patch(reverse('api:job_template_detail', args=(objs.job_template.id,)), {
|
||||
'name': 'Some name',
|
||||
'project': objs.project.id,
|
||||
'credential': objs.credential.id,
|
||||
'inventory': objs.inventory.id,
|
||||
'playbook': 'alt-mocked.yml',
|
||||
}, alice, expect=expect)
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch.object(ProjectOptions, "playbooks", project_playbooks)
|
||||
def test_edit_playbook(patch, job_template_factory, alice):
|
||||
objs = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')
|
||||
objs.job_template.admin_role.members.add(alice)
|
||||
objs.project.use_role.members.add(alice)
|
||||
objs.credential.use_role.members.add(alice)
|
||||
objs.inventory.use_role.members.add(alice)
|
||||
|
||||
patch(reverse('api:job_template_detail', args=(objs.job_template.id,)), {
|
||||
'playbook': 'alt-mocked.yml',
|
||||
}, alice, expect=200)
|
||||
|
||||
objs.inventory.use_role.members.remove(alice)
|
||||
patch(reverse('api:job_template_detail', args=(objs.job_template.id,)), {
|
||||
'playbook': 'mocked.yml',
|
||||
}, alice, expect=403)
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch.object(ProjectOptions, "playbooks", project_playbooks)
|
||||
def test_edit_nonsenstive(patch, job_template_factory, alice):
|
||||
objs = job_template_factory('jt', organization='org1', project='prj', inventory='inv', credential='cred')
|
||||
jt = objs.job_template
|
||||
jt.admin_role.members.add(alice)
|
||||
|
||||
res = patch(reverse('api:job_template_detail', args=(jt.id,)), {
|
||||
'name': 'updated',
|
||||
'description': 'bar',
|
||||
'forks': 14,
|
||||
'limit': 'something',
|
||||
'verbosity': 5,
|
||||
'extra_vars': '--',
|
||||
'job_tags': 'sometags',
|
||||
'force_handlers': True,
|
||||
'skip_tags': True,
|
||||
'ask_variables_on_launch':True,
|
||||
'ask_tags_on_launch':True,
|
||||
'ask_job_type_on_launch':True,
|
||||
'ask_inventory_on_launch':True,
|
||||
'ask_credential_on_launch': True,
|
||||
}, alice, expect=200)
|
||||
print(res.data)
|
||||
assert res.data['name'] == 'updated'
|
||||
@pytest.fixture
|
||||
def jt_copy_edit(job_template_factory, project):
|
||||
objects = job_template_factory(
|
||||
'copy-edit-job-template',
|
||||
project=project)
|
||||
return objects.job_template
|
||||
|
||||
@property
|
||||
def project_playbooks(self):
|
||||
return ['mocked', 'mocked.yml', 'alt-mocked.yml']
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_template_role_user(post, organization_factory, job_template_factory):
|
||||
objects = organization_factory("org",
|
||||
superusers=['admin'],
|
||||
users=['test'])
|
||||
|
||||
jt_objects = job_template_factory("jt",
|
||||
organization=objects.organization,
|
||||
inventory='test_inv',
|
||||
project='test_proj')
|
||||
|
||||
url = reverse('api:user_roles_list', args=(objects.users.test.pk,))
|
||||
response = post(url, dict(id=jt_objects.job_template.execute_role.pk), objects.superusers.admin)
|
||||
assert response.status_code == 204
|
||||
|
||||
# Test protection against limited set of validation problems
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bad_data_copy_edit(admin_user, project):
|
||||
"""
|
||||
If a required resource (inventory here) was deleted, copying not allowed
|
||||
because doing so would caues a validation error
|
||||
"""
|
||||
|
||||
jt_res = JobTemplate.objects.create(
|
||||
job_type='run',
|
||||
project=project,
|
||||
inventory=None, ask_inventory_on_launch=False, # not allowed
|
||||
credential=None, ask_credential_on_launch=True,
|
||||
name='deploy-job-template'
|
||||
)
|
||||
serializer = JobTemplateSerializer(jt_res)
|
||||
request = RequestFactory().get('/api/v1/job_templates/12/')
|
||||
request.user = admin_user
|
||||
serializer.context['request'] = request
|
||||
response = serializer.to_representation(jt_res)
|
||||
assert not response['summary_fields']['can_copy']
|
||||
assert response['summary_fields']['can_edit']
|
||||
|
||||
# Tests for correspondence between view info and actual access
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_admin_copy_edit(jt_copy_edit, admin_user):
|
||||
"Absent a validation error, system admins can do everything"
|
||||
|
||||
# Serializer can_copy/can_edit fields
|
||||
serializer = JobTemplateSerializer(jt_copy_edit)
|
||||
request = RequestFactory().get('/api/v1/job_templates/12/')
|
||||
request.user = admin_user
|
||||
serializer.context['request'] = request
|
||||
response = serializer.to_representation(jt_copy_edit)
|
||||
assert response['summary_fields']['can_copy']
|
||||
assert response['summary_fields']['can_edit']
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_admin_copy_edit(jt_copy_edit, org_admin):
|
||||
"Organization admins SHOULD be able to copy a JT firmly in their org"
|
||||
|
||||
# Serializer can_copy/can_edit fields
|
||||
serializer = JobTemplateSerializer(jt_copy_edit)
|
||||
request = RequestFactory().get('/api/v1/job_templates/12/')
|
||||
request.user = org_admin
|
||||
serializer.context['request'] = request
|
||||
response = serializer.to_representation(jt_copy_edit)
|
||||
assert response['summary_fields']['can_copy']
|
||||
assert response['summary_fields']['can_edit']
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_admin_foreign_cred_no_copy_edit(jt_copy_edit, org_admin, machine_credential):
|
||||
"""
|
||||
Organization admins without access to the 3 related resources:
|
||||
SHOULD NOT be able to copy JT
|
||||
SHOULD NOT be able to edit that job template
|
||||
"""
|
||||
|
||||
# Attach credential to JT that org admin can not use
|
||||
jt_copy_edit.credential = machine_credential
|
||||
jt_copy_edit.save()
|
||||
|
||||
# Serializer can_copy/can_edit fields
|
||||
serializer = JobTemplateSerializer(jt_copy_edit)
|
||||
request = RequestFactory().get('/api/v1/job_templates/12/')
|
||||
request.user = org_admin
|
||||
serializer.context['request'] = request
|
||||
response = serializer.to_representation(jt_copy_edit)
|
||||
assert not response['summary_fields']['can_copy']
|
||||
assert not response['summary_fields']['can_edit']
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_jt_admin_copy_edit(jt_copy_edit, rando):
|
||||
"JT admins wihout access to associated resources SHOULD NOT be able to copy"
|
||||
|
||||
# random user given JT admin access only
|
||||
jt_copy_edit.admin_role.members.add(rando)
|
||||
jt_copy_edit.save()
|
||||
|
||||
# Serializer can_copy/can_edit fields
|
||||
serializer = JobTemplateSerializer(jt_copy_edit)
|
||||
request = RequestFactory().get('/api/v1/job_templates/12/')
|
||||
request.user = rando
|
||||
serializer.context['request'] = request
|
||||
response = serializer.to_representation(jt_copy_edit)
|
||||
assert not response['summary_fields']['can_copy']
|
||||
assert not response['summary_fields']['can_edit']
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_proj_jt_admin_copy_edit(jt_copy_edit, rando):
|
||||
"JT admins with access to associated resources SHOULD be able to copy"
|
||||
|
||||
# random user given JT and project admin abilities
|
||||
jt_copy_edit.admin_role.members.add(rando)
|
||||
jt_copy_edit.save()
|
||||
jt_copy_edit.project.admin_role.members.add(rando)
|
||||
jt_copy_edit.project.save()
|
||||
|
||||
# Serializer can_copy/can_edit fields
|
||||
serializer = JobTemplateSerializer(jt_copy_edit)
|
||||
request = RequestFactory().get('/api/v1/job_templates/12/')
|
||||
request.user = rando
|
||||
serializer.context['request'] = request
|
||||
response = serializer.to_representation(jt_copy_edit)
|
||||
assert response['summary_fields']['can_copy']
|
||||
assert response['summary_fields']['can_edit']
|
||||
|
||||
# Functional tests - create new JT with all returned fields, as the UI does
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch.object(ProjectOptions, "playbooks", project_playbooks)
|
||||
def test_org_admin_copy_edit_functional(jt_copy_edit, org_admin, get, post):
|
||||
get_response = get(reverse('api:job_template_detail', args=[jt_copy_edit.pk]), user=org_admin)
|
||||
assert get_response.status_code == 200
|
||||
assert get_response.data['summary_fields']['can_copy']
|
||||
|
||||
post_data = get_response.data
|
||||
post_data['name'] = '%s @ 12:19:47 pm' % post_data['name']
|
||||
post_response = post(reverse('api:job_template_list', args=[]), user=org_admin, data=post_data)
|
||||
assert post_response.status_code == 201
|
||||
assert post_response.data['name'] == 'copy-edit-job-template @ 12:19:47 pm'
|
||||
|
||||
@pytest.mark.django_db
|
||||
@mock.patch.object(ProjectOptions, "playbooks", project_playbooks)
|
||||
def test_jt_admin_copy_edit_functional(jt_copy_edit, rando, get, post):
|
||||
|
||||
# Grant random user JT admin access only
|
||||
jt_copy_edit.admin_role.members.add(rando)
|
||||
jt_copy_edit.save()
|
||||
|
||||
get_response = get(reverse('api:job_template_detail', args=[jt_copy_edit.pk]), user=rando)
|
||||
assert get_response.status_code == 200
|
||||
assert not get_response.data['summary_fields']['can_copy']
|
||||
|
||||
post_data = get_response.data
|
||||
post_data['name'] = '%s @ 12:19:47 pm' % post_data['name']
|
||||
post_response = post(reverse('api:job_template_list', args=[]), user=rando, data=post_data)
|
||||
assert post_response.status_code == 403
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_scan_jt_no_inventory(job_template_factory):
|
||||
# A user should be able to create a scan job without a project, but an inventory is required
|
||||
objects = job_template_factory('jt',
|
||||
credential='c',
|
||||
job_type="scan",
|
||||
project='p',
|
||||
inventory='i',
|
||||
organization='o')
|
||||
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
|
||||
"project": None, "inventory": objects.inventory.pk})
|
||||
assert serializer.is_valid()
|
||||
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
|
||||
"project": None, "inventory": None})
|
||||
assert not serializer.is_valid()
|
||||
assert "inventory" in serializer.errors
|
||||
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
|
||||
"project": None, "inventory": None,
|
||||
"ask_inventory_on_launch": True})
|
||||
assert not serializer.is_valid()
|
||||
assert "inventory" in serializer.errors
|
||||
|
||||
# A user shouldn't be able to launch a scan job template which is missing an inventory
|
||||
obj_jt = objects.job_template
|
||||
obj_jt.inventory = None
|
||||
serializer = JobLaunchSerializer(instance=obj_jt,
|
||||
context={'obj': obj_jt,
|
||||
"data": {}},
|
||||
data={})
|
||||
assert not serializer.is_valid()
|
||||
assert 'inventory' in serializer.errors
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_scan_jt_surveys(inventory):
|
||||
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
|
||||
"project": None, "inventory": inventory.pk,
|
||||
"survey_enabled": True})
|
||||
assert not serializer.is_valid()
|
||||
assert "survey_enabled" in serializer.errors
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_jt_without_project(inventory):
|
||||
data = dict(name="Test", job_type="run",
|
||||
inventory=inventory.pk, project=None)
|
||||
serializer = JobTemplateSerializer(data=data)
|
||||
assert not serializer.is_valid()
|
||||
assert "project" in serializer.errors
|
||||
data["job_type"] = "check"
|
||||
serializer = JobTemplateSerializer(data=data)
|
||||
assert not serializer.is_valid()
|
||||
assert "project" in serializer.errors
|
||||
data["job_type"] = "scan"
|
||||
serializer = JobTemplateSerializer(data=data)
|
||||
assert serializer.is_valid()
|
||||
200
awx/main/tests/functional/api/test_survey_spec.py
Normal file
200
awx/main/tests/functional/api/test_survey_spec.py
Normal file
@ -0,0 +1,200 @@
|
||||
import mock
|
||||
import pytest
|
||||
import json
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
from awx.main.models.jobs import JobTemplate, Job
|
||||
from awx.main.models.activity_stream import ActivityStream
|
||||
from awx.api.license import LicenseForbids
|
||||
from awx.main.access import JobTemplateAccess
|
||||
|
||||
|
||||
def mock_no_surveys(self, add_host=False, feature=None, check_expiration=True):
|
||||
if feature == 'surveys':
|
||||
raise LicenseForbids("Feature %s is not enabled in the active license." % feature)
|
||||
else:
|
||||
pass
|
||||
|
||||
@pytest.fixture
|
||||
def job_template_with_survey(job_template_factory):
|
||||
objects = job_template_factory('jt', project='prj', survey='submitted_email')
|
||||
return objects.job_template
|
||||
|
||||
# Survey license-based denial tests
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: False)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_survey_spec_view_denied(job_template_with_survey, get, admin_user):
|
||||
# TODO: Test non-enterprise license
|
||||
response = get(reverse('api:job_template_survey_spec',
|
||||
args=(job_template_with_survey.id,)), admin_user, expect=402)
|
||||
assert response.data['detail'] == 'Your license does not allow adding surveys.'
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_deny_enabling_survey(deploy_jobtemplate, patch, admin_user):
|
||||
response = patch(url=deploy_jobtemplate.get_absolute_url(),
|
||||
data=dict(survey_enabled=True), user=admin_user, expect=402)
|
||||
assert response.data['detail'] == 'Feature surveys is not enabled in the active license.'
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_job_start_blocked_without_survey_license(job_template_with_survey, admin_user):
|
||||
"""Check that user can't start a job with surveys without a survey license."""
|
||||
access = JobTemplateAccess(admin_user)
|
||||
with pytest.raises(LicenseForbids):
|
||||
access.can_start(job_template_with_survey)
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_deny_creating_with_survey(project, post, admin_user):
|
||||
response = post(
|
||||
url=reverse('api:job_template_list'),
|
||||
data=dict(
|
||||
name = 'JT with survey',
|
||||
job_type = 'run',
|
||||
project = project.pk,
|
||||
playbook = 'helloworld.yml',
|
||||
ask_credential_on_launch = True,
|
||||
ask_inventory_on_launch = True,
|
||||
survey_enabled = True),
|
||||
user=admin_user, expect=402)
|
||||
assert response.data['detail'] == 'Feature surveys is not enabled in the active license.'
|
||||
|
||||
# Test normal operations with survey license work
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_survey_spec_view_allowed(deploy_jobtemplate, get, admin_user):
|
||||
get(reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,)),
|
||||
admin_user, expect=200)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_survey_spec_sucessful_creation(survey_spec_factory, job_template, post, admin_user):
|
||||
survey_input_data = survey_spec_factory('new_question')
|
||||
post(url=reverse('api:job_template_survey_spec', args=(job_template.id,)),
|
||||
data=survey_input_data, user=admin_user, expect=200)
|
||||
updated_jt = JobTemplate.objects.get(pk=job_template.pk)
|
||||
assert updated_jt.survey_spec == survey_input_data
|
||||
|
||||
# Tests related to survey content validation
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_survey_spec_non_dict_error(deploy_jobtemplate, post, admin_user):
|
||||
"""When a question doesn't follow the standard format, verify error thrown."""
|
||||
response = post(
|
||||
url=reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,)),
|
||||
data={"description": "Email of the submitter",
|
||||
"spec": ["What is your email?"], "name": "Email survey"},
|
||||
user=admin_user, expect=400)
|
||||
assert response.data['error'] == "Survey question 0 is not a json object."
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: True)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_survey_spec_dual_names_error(survey_spec_factory, deploy_jobtemplate, post, user):
|
||||
response = post(
|
||||
url=reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,)),
|
||||
data=survey_spec_factory(['submitter_email', 'submitter_email']),
|
||||
user=user('admin', True), expect=400)
|
||||
assert response.data['error'] == "'variable' 'submitter_email' duplicated in survey question 1."
|
||||
|
||||
# Test actions that should be allowed with non-survey license
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_disable_survey_access_without_license(job_template_with_survey, admin_user):
|
||||
"""Assure that user can disable a JT survey after downgrading license."""
|
||||
access = JobTemplateAccess(admin_user)
|
||||
assert access.can_change(job_template_with_survey, dict(survey_enabled=False))
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_delete_survey_access_without_license(job_template_with_survey, admin_user):
|
||||
"""Assure that access.py allows deleting surveys after downgrading license."""
|
||||
access = JobTemplateAccess(admin_user)
|
||||
assert access.can_change(job_template_with_survey, dict(survey_spec=None))
|
||||
assert access.can_change(job_template_with_survey, dict(survey_spec={}))
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_job_start_allowed_with_survey_spec(job_template_factory, admin_user):
|
||||
"""After user downgrades survey license and disables survey on the JT,
|
||||
check that jobs still launch even if the survey_spec data persists."""
|
||||
objects = job_template_factory('jt', project='prj', survey='submitter_email')
|
||||
obj = objects.job_template
|
||||
obj.survey_enabled = False
|
||||
obj.save()
|
||||
access = JobTemplateAccess(admin_user)
|
||||
assert access.can_start(job_template_with_survey, {})
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_job_template_delete_access_with_survey(job_template_with_survey, admin_user):
|
||||
"""The survey_spec view relies on JT `can_delete` to determine permission
|
||||
to delete the survey. This checks that system admins can delete the survey on a JT."""
|
||||
access = JobTemplateAccess(admin_user)
|
||||
assert access.can_delete(job_template_with_survey)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', lambda feature: False)
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_delete_survey_spec_without_license(job_template_with_survey, delete, admin_user):
|
||||
"""Functional delete test through the survey_spec view."""
|
||||
delete(reverse('api:job_template_survey_spec', args=[job_template_with_survey.pk]),
|
||||
admin_user, expect=200)
|
||||
new_jt = JobTemplate.objects.get(pk=job_template_with_survey.pk)
|
||||
assert new_jt.survey_spec == {}
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', lambda self, **kwargs: True)
|
||||
@mock.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job',
|
||||
lambda self, extra_vars: mock.MagicMock(spec=Job, id=968))
|
||||
@mock.patch('awx.api.serializers.JobSerializer.to_representation', lambda self, obj: {})
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_launch_survey_enabled_but_no_survey_spec(job_template_factory, post, admin_user):
|
||||
"""False-ish values for survey_spec are interpreted as a survey with 0 questions."""
|
||||
objects = job_template_factory('jt', organization='org1', project='prj',
|
||||
inventory='inv', credential='cred')
|
||||
obj = objects.job_template
|
||||
obj.survey_enabled = True
|
||||
obj.save()
|
||||
response = post(reverse('api:job_template_launch', args=[obj.pk]),
|
||||
dict(extra_vars=dict(survey_var=7)), admin_user, expect=201)
|
||||
assert 'survey_var' in response.data['ignored_fields']['extra_vars']
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', new=mock_no_surveys)
|
||||
@mock.patch('awx.main.models.unified_jobs.UnifiedJobTemplate.create_unified_job',
|
||||
lambda self: mock.MagicMock(spec=Job, id=968))
|
||||
@mock.patch('awx.api.serializers.JobSerializer.to_representation', lambda self, obj: {})
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_launch_with_non_empty_survey_spec_no_license(job_template_factory, post, admin_user):
|
||||
"""Assure jobs can still be launched from JTs with a survey_spec
|
||||
when the survey is diabled."""
|
||||
objects = job_template_factory('jt', organization='org1', project='prj',
|
||||
inventory='inv', credential='cred',
|
||||
survey='survey_var')
|
||||
obj = objects.job_template
|
||||
obj.survey_enabled = False
|
||||
obj.save()
|
||||
post(reverse('api:job_template_launch', args=[obj.pk]), {}, admin_user, expect=201)
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_redact_survey_passwords_in_activity_stream(job_with_secret_key):
|
||||
AS_record = ActivityStream.objects.filter(object1='job').all()[0]
|
||||
changes_dict = json.loads(AS_record.changes)
|
||||
extra_vars = json.loads(changes_dict['extra_vars'])
|
||||
assert extra_vars['secret_key'] == '$encrypted$'
|
||||
@ -1,135 +0,0 @@
|
||||
import mock
|
||||
import pytest
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
from awx.main.models.jobs import JobTemplate
|
||||
from awx.api.license import LicenseForbids
|
||||
|
||||
def mock_feature_enabled(feature, bypass_database=None):
|
||||
return True
|
||||
|
||||
def mock_feature_disabled(feature, bypass_database=None):
|
||||
return False
|
||||
|
||||
def mock_check_license(self, add_host=False, feature=None, check_expiration=True):
|
||||
raise LicenseForbids("Feature %s is not enabled in the active license." % feature)
|
||||
|
||||
@pytest.fixture
|
||||
def survey_jobtemplate(project, inventory, credential):
|
||||
return JobTemplate.objects.create(
|
||||
job_type='run',
|
||||
project=project,
|
||||
inventory=inventory,
|
||||
credential=credential,
|
||||
name='deploy-job-template'
|
||||
)
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_disabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_survey_spec_view_denied(deploy_jobtemplate, get, user):
|
||||
# TODO: Test non-enterprise license
|
||||
spec_url = reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,))
|
||||
response = get(spec_url, user('admin', True))
|
||||
|
||||
assert response.status_code == 402
|
||||
assert response.data['detail'] == 'Your license does not allow adding surveys.'
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', mock_check_license)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_deny_enabling_survey(deploy_jobtemplate, patch, user):
|
||||
JT_url = reverse('api:job_template_detail', args=(deploy_jobtemplate.id,))
|
||||
response = patch(url=JT_url, data=dict(survey_enabled=True), user=user('admin', True))
|
||||
assert response.status_code == 402
|
||||
assert response.data['detail'] == 'Feature surveys is not enabled in the active license.'
|
||||
|
||||
@mock.patch('awx.main.access.BaseAccess.check_license', mock_check_license)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_deny_creating_with_survey(machine_credential, project, inventory, post, user):
|
||||
JT_url = reverse('api:job_template_list')
|
||||
JT_data = dict(
|
||||
name = 'JT with survey',
|
||||
job_type = 'run',
|
||||
inventory = inventory.pk,
|
||||
project = project.pk,
|
||||
playbook = 'hiworld.yml',
|
||||
credential = machine_credential.pk,
|
||||
survey_enabled = True,
|
||||
)
|
||||
response = post(url=JT_url, data=JT_data, user=user('admin', True))
|
||||
|
||||
assert response.status_code == 402
|
||||
assert response.data['detail'] == 'Feature surveys is not enabled in the active license.'
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_survey_spec_view_allowed(deploy_jobtemplate, get, user):
|
||||
spec_url = reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,))
|
||||
response = get(spec_url, user('admin', True))
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_survey_spec_sucessful_creation(deploy_jobtemplate, post, user):
|
||||
spec_url = reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,))
|
||||
response = post(
|
||||
url=spec_url,
|
||||
data={
|
||||
"description": "Email of the submitter",
|
||||
"spec": [{
|
||||
"variable": "submitter_email",
|
||||
"question_name": "Enter your email",
|
||||
"type": "text",
|
||||
"required": False
|
||||
}],
|
||||
"name": "Email survey"
|
||||
},
|
||||
user=user('admin', True))
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_survey_spec_non_dict_error(deploy_jobtemplate, post, user):
|
||||
spec_url = reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,))
|
||||
response = post(
|
||||
url=spec_url,
|
||||
data={"description": "Email of the submitter",
|
||||
"spec": ["What is your email?"], "name": "Email survey"},
|
||||
user=user('admin', True))
|
||||
|
||||
assert response.status_code == 400
|
||||
assert response.data['error'] == "Survey question 0 is not a json object."
|
||||
|
||||
@mock.patch('awx.api.views.feature_enabled', new=mock_feature_enabled)
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.survey
|
||||
def test_survey_spec_dual_names_error(deploy_jobtemplate, post, user):
|
||||
spec_url = reverse('api:job_template_survey_spec', args=(deploy_jobtemplate.id,))
|
||||
response = post(
|
||||
url=spec_url,
|
||||
data={
|
||||
"description": "Email of the submitter",
|
||||
"spec": [{
|
||||
"variable": "submitter_email",
|
||||
"question_name": "Enter your email",
|
||||
"type": "text",
|
||||
"required": False
|
||||
}, {
|
||||
"variable": "submitter_email",
|
||||
"question_name": "Same variable as last question",
|
||||
"type": "integer",
|
||||
"required": False
|
||||
}],
|
||||
"name": "Email survey"
|
||||
},
|
||||
user=user('admin', True))
|
||||
|
||||
assert response.status_code == 400
|
||||
assert response.data['error'] == "'variable' 'submitter_email' duplicated in survey question 1."
|
||||
@ -147,18 +147,6 @@ def instance(settings):
|
||||
def organization(instance):
|
||||
return Organization.objects.create(name="test-org", description="test-org-desc")
|
||||
|
||||
@pytest.fixture
|
||||
def organization_factory(instance):
|
||||
def factory(name):
|
||||
try:
|
||||
org = Organization.objects.get(name=name)
|
||||
except Organization.DoesNotExist:
|
||||
org = Organization.objects.create(name=name,
|
||||
description="description for " + name,
|
||||
)
|
||||
return org
|
||||
return factory
|
||||
|
||||
@pytest.fixture
|
||||
def credential():
|
||||
return Credential.objects.create(kind='aws', name='test-cred')
|
||||
@ -197,6 +185,11 @@ def notification_template(organization):
|
||||
notification_type="webhook",
|
||||
notification_configuration=dict(url="http://localhost",
|
||||
headers={"Test": "Header"}))
|
||||
|
||||
@pytest.fixture
|
||||
def job_with_secret_key(job_with_secret_key_factory):
|
||||
return job_with_secret_key_factory(persisted=True)
|
||||
|
||||
@pytest.fixture
|
||||
def admin(user):
|
||||
return user('admin', True)
|
||||
@ -221,6 +214,13 @@ def org_admin(user, organization):
|
||||
organization.member_role.members.add(ret)
|
||||
return ret
|
||||
|
||||
@pytest.fixture
|
||||
def org_auditor(user, organization):
|
||||
ret = user('org-auditor', False)
|
||||
organization.auditor_role.members.add(ret)
|
||||
organization.member_role.members.add(ret)
|
||||
return ret
|
||||
|
||||
@pytest.fixture
|
||||
def org_member(user, organization):
|
||||
ret = user('org-member', False)
|
||||
@ -282,24 +282,9 @@ def permissions():
|
||||
'update':False, 'delete':False, 'scm_update':False, 'execute':False, 'use':True,},
|
||||
}
|
||||
|
||||
@pytest.fixture
|
||||
def notification_template_factory(organization):
|
||||
def n(name="test-notification_template"):
|
||||
try:
|
||||
notification_template = NotificationTemplate.objects.get(name=name)
|
||||
except NotificationTemplate.DoesNotExist:
|
||||
notification_template = NotificationTemplate(name=name,
|
||||
organization=organization,
|
||||
notification_type="webhook",
|
||||
notification_configuration=dict(url="http://localhost",
|
||||
headers={"Test": "Header"}))
|
||||
notification_template.save()
|
||||
return notification_template
|
||||
return n
|
||||
|
||||
@pytest.fixture
|
||||
def post():
|
||||
def rf(url, data, user=None, middleware=None, **kwargs):
|
||||
def rf(url, data, user=None, middleware=None, expect=None, **kwargs):
|
||||
view, view_args, view_kwargs = resolve(urlparse(url)[2])
|
||||
if 'format' not in kwargs:
|
||||
kwargs['format'] = 'json'
|
||||
@ -311,12 +296,16 @@ def post():
|
||||
response = view(request, *view_args, **view_kwargs)
|
||||
if middleware:
|
||||
middleware.process_response(request, response)
|
||||
if expect:
|
||||
if response.status_code != expect:
|
||||
print(response.data)
|
||||
assert response.status_code == expect
|
||||
return response
|
||||
return rf
|
||||
|
||||
@pytest.fixture
|
||||
def get():
|
||||
def rf(url, user=None, middleware=None, **kwargs):
|
||||
def rf(url, user=None, middleware=None, expect=None, **kwargs):
|
||||
view, view_args, view_kwargs = resolve(urlparse(url)[2])
|
||||
if 'format' not in kwargs:
|
||||
kwargs['format'] = 'json'
|
||||
@ -328,12 +317,16 @@ def get():
|
||||
response = view(request, *view_args, **view_kwargs)
|
||||
if middleware:
|
||||
middleware.process_response(request, response)
|
||||
if expect:
|
||||
if response.status_code != expect:
|
||||
print(response.data)
|
||||
assert response.status_code == expect
|
||||
return response
|
||||
return rf
|
||||
|
||||
@pytest.fixture
|
||||
def put():
|
||||
def rf(url, data, user=None, middleware=None, **kwargs):
|
||||
def rf(url, data, user=None, middleware=None, expect=None, **kwargs):
|
||||
view, view_args, view_kwargs = resolve(urlparse(url)[2])
|
||||
if 'format' not in kwargs:
|
||||
kwargs['format'] = 'json'
|
||||
@ -345,12 +338,16 @@ def put():
|
||||
response = view(request, *view_args, **view_kwargs)
|
||||
if middleware:
|
||||
middleware.process_response(request, response)
|
||||
if expect:
|
||||
if response.status_code != expect:
|
||||
print(response.data)
|
||||
assert response.status_code == expect
|
||||
return response
|
||||
return rf
|
||||
|
||||
@pytest.fixture
|
||||
def patch():
|
||||
def rf(url, data, user=None, middleware=None, **kwargs):
|
||||
def rf(url, data, user=None, middleware=None, expect=None, **kwargs):
|
||||
view, view_args, view_kwargs = resolve(urlparse(url)[2])
|
||||
if 'format' not in kwargs:
|
||||
kwargs['format'] = 'json'
|
||||
@ -362,12 +359,16 @@ def patch():
|
||||
response = view(request, *view_args, **view_kwargs)
|
||||
if middleware:
|
||||
middleware.process_response(request, response)
|
||||
if expect:
|
||||
if response.status_code != expect:
|
||||
print(response.data)
|
||||
assert response.status_code == expect
|
||||
return response
|
||||
return rf
|
||||
|
||||
@pytest.fixture
|
||||
def delete():
|
||||
def rf(url, user=None, middleware=None, **kwargs):
|
||||
def rf(url, user=None, middleware=None, expect=None, **kwargs):
|
||||
view, view_args, view_kwargs = resolve(urlparse(url)[2])
|
||||
if 'format' not in kwargs:
|
||||
kwargs['format'] = 'json'
|
||||
@ -379,12 +380,16 @@ def delete():
|
||||
response = view(request, *view_args, **view_kwargs)
|
||||
if middleware:
|
||||
middleware.process_response(request, response)
|
||||
if expect:
|
||||
if response.status_code != expect:
|
||||
print(response.data)
|
||||
assert response.status_code == expect
|
||||
return response
|
||||
return rf
|
||||
|
||||
@pytest.fixture
|
||||
def head():
|
||||
def rf(url, user=None, middleware=None, **kwargs):
|
||||
def rf(url, user=None, middleware=None, expect=None, **kwargs):
|
||||
view, view_args, view_kwargs = resolve(urlparse(url)[2])
|
||||
if 'format' not in kwargs:
|
||||
kwargs['format'] = 'json'
|
||||
@ -396,12 +401,16 @@ def head():
|
||||
response = view(request, *view_args, **view_kwargs)
|
||||
if middleware:
|
||||
middleware.process_response(request, response)
|
||||
if expect:
|
||||
if response.status_code != expect:
|
||||
print(response.data)
|
||||
assert response.status_code == expect
|
||||
return response
|
||||
return rf
|
||||
|
||||
@pytest.fixture
|
||||
def options():
|
||||
def rf(url, data, user=None, middleware=None, **kwargs):
|
||||
def rf(url, data, user=None, middleware=None, expect=None, **kwargs):
|
||||
view, view_args, view_kwargs = resolve(urlparse(url)[2])
|
||||
if 'format' not in kwargs:
|
||||
kwargs['format'] = 'json'
|
||||
@ -413,6 +422,10 @@ def options():
|
||||
response = view(request, *view_args, **view_kwargs)
|
||||
if middleware:
|
||||
middleware.process_response(request, response)
|
||||
if expect:
|
||||
if response.status_code != expect:
|
||||
print(response.data)
|
||||
assert response.status_code == expect
|
||||
return response
|
||||
return rf
|
||||
|
||||
@ -474,3 +487,4 @@ def job_template_labels(organization, job_template):
|
||||
job_template.labels.create(name="label-2", organization=organization)
|
||||
|
||||
return job_template
|
||||
|
||||
|
||||
106
awx/main/tests/functional/test_fixture_factories.py
Normal file
106
awx/main/tests/functional/test_fixture_factories.py
Normal file
@ -0,0 +1,106 @@
|
||||
import pytest
|
||||
|
||||
from awx.main.tests.factories import NotUnique
|
||||
|
||||
def test_roles_exc_not_persisted(organization_factory):
|
||||
with pytest.raises(RuntimeError) as exc:
|
||||
organization_factory('test-org', roles=['test-org.admin_role:user1'], persisted=False)
|
||||
assert 'persisted=False' in str(exc.value)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_roles_exc_bad_object(organization_factory):
|
||||
with pytest.raises(KeyError):
|
||||
organization_factory('test-org', roles=['test-project.admin_role:user'])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_roles_exc_not_unique(organization_factory):
|
||||
with pytest.raises(NotUnique) as exc:
|
||||
organization_factory('test-org', projects=['foo'], teams=['foo'], roles=['foo.admin_role:user'])
|
||||
assert 'not a unique key' in str(exc.value)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_roles_exc_not_assignment(organization_factory):
|
||||
with pytest.raises(RuntimeError) as exc:
|
||||
organization_factory('test-org', projects=['foo'], roles=['foo.admin_role'])
|
||||
assert 'provide an assignment' in str(exc.value)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_roles_exc_not_found(organization_factory):
|
||||
with pytest.raises(RuntimeError) as exc:
|
||||
organization_factory('test-org', users=['user'], projects=['foo'], roles=['foo.admin_role:user.bad_role'])
|
||||
assert 'unable to find' in str(exc.value)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_roles_exc_not_user(organization_factory):
|
||||
with pytest.raises(RuntimeError) as exc:
|
||||
organization_factory('test-org', projects=['foo'], roles=['foo.admin_role:foo'])
|
||||
assert 'unable to add non-user' in str(exc.value)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_factory_roles(organization_factory):
|
||||
objects = organization_factory('org_roles_test',
|
||||
teams=['team1', 'team2'],
|
||||
users=['team1:foo', 'bar'],
|
||||
projects=['baz', 'bang'],
|
||||
roles=['team2.member_role:foo',
|
||||
'team1.admin_role:bar',
|
||||
'team1.admin_role:team2.admin_role',
|
||||
'baz.admin_role:foo'])
|
||||
|
||||
assert objects.users.bar in objects.teams.team2.admin_role
|
||||
assert objects.users.foo in objects.projects.baz.admin_role
|
||||
assert objects.users.foo in objects.teams.team1.member_role
|
||||
assert objects.teams.team2.admin_role in objects.teams.team1.admin_role.children.all()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_factory(organization_factory):
|
||||
objects = organization_factory('organization1',
|
||||
teams=['team1'],
|
||||
superusers=['superuser'],
|
||||
users=['admin', 'alice', 'team1:bob'],
|
||||
projects=['proj1'])
|
||||
assert hasattr(objects.users, 'admin')
|
||||
assert hasattr(objects.users, 'alice')
|
||||
assert hasattr(objects.superusers, 'superuser')
|
||||
assert objects.users.bob in objects.teams.team1.member_role.members.all()
|
||||
assert objects.projects.proj1.organization == objects.organization
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_template_factory(job_template_factory):
|
||||
jt_objects = job_template_factory('testJT', organization='org1',
|
||||
project='proj1', inventory='inventory1',
|
||||
credential='cred1', survey='test-survey',
|
||||
jobs=[1])
|
||||
assert jt_objects.job_template.name == 'testJT'
|
||||
assert jt_objects.project.name == 'proj1'
|
||||
assert jt_objects.inventory.name == 'inventory1'
|
||||
assert jt_objects.credential.name == 'cred1'
|
||||
assert jt_objects.inventory.organization.name == 'org1'
|
||||
assert jt_objects.job_template.survey_enabled is True
|
||||
assert jt_objects.job_template.survey_spec is not None
|
||||
assert 'test-survey' in jt_objects.jobs[1].extra_vars
|
||||
|
||||
def test_survey_spec_generator_simple(survey_spec_factory):
|
||||
survey_spec = survey_spec_factory('survey_variable')
|
||||
assert 'name' in survey_spec
|
||||
assert 'spec' in survey_spec
|
||||
assert type(survey_spec['spec']) is list
|
||||
assert type(survey_spec['spec'][0]) is dict
|
||||
assert survey_spec['spec'][0]['type'] == 'integer'
|
||||
|
||||
def test_survey_spec_generator_mixed(survey_spec_factory):
|
||||
survey_spec = survey_spec_factory(
|
||||
[{'variable': 'question1', 'type': 'integer', 'max': 87},
|
||||
{'variable': 'question2', 'type': 'str'},
|
||||
'some_variable'])
|
||||
assert len(survey_spec['spec']) == 3
|
||||
assert [spec_item['type'] for spec_item in survey_spec['spec']] == ['integer', 'str', 'integer']
|
||||
assert survey_spec['spec'][0]['max'] == 87
|
||||
@ -22,3 +22,16 @@ def test_job_blocking(get, post, job_template, inventory, inventory_factory):
|
||||
assert j_callback_1.is_blocked_by(j_callback_2)
|
||||
j_callback_2.limit = 'b'
|
||||
assert not j_callback_1.is_blocked_by(j_callback_2)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_blocking_allow_simul(get, post, job_template, inventory):
|
||||
job_template.allow_simultaneous = True
|
||||
j1 = Job.objects.create(job_template=job_template,
|
||||
inventory=inventory)
|
||||
j2 = Job.objects.create(job_template=job_template,
|
||||
inventory=inventory)
|
||||
assert not j1.is_blocked_by(j2)
|
||||
assert not j2.is_blocked_by(j1)
|
||||
job_template.allow_simultaneous = False
|
||||
assert j1.is_blocked_by(j2)
|
||||
assert j2.is_blocked_by(j1)
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
import mock # noqa
|
||||
import pytest
|
||||
|
||||
from django.db import transaction
|
||||
from django.core.urlresolvers import reverse
|
||||
from awx.main.models import Project
|
||||
|
||||
@ -9,62 +8,55 @@ from awx.main.models import Project
|
||||
#
|
||||
# Project listing and visibility tests
|
||||
#
|
||||
@pytest.fixture
|
||||
def team_project_list(organization_factory):
|
||||
objects = organization_factory('org-test',
|
||||
superusers=['admin'],
|
||||
users=['team1:alice', 'team2:bob'],
|
||||
teams=['team1', 'team2'],
|
||||
projects=['pteam1', 'pteam2', 'pshared'],
|
||||
roles=['team1.member_role:pteam1.admin_role',
|
||||
'team2.member_role:pteam2.admin_role',
|
||||
'team1.member_role:pshared.admin_role',
|
||||
'team2.member_role:pshared.admin_role'])
|
||||
return objects
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_project_list(get, project_factory, organization, admin, alice, bob):
|
||||
def test_user_project_list(get, organization_factory):
|
||||
'List of projects a user has access to, filtered by projects you can also see'
|
||||
|
||||
organization.member_role.members.add(alice, bob)
|
||||
objects = organization_factory('org1',
|
||||
projects=['alice project', 'bob project', 'shared project'],
|
||||
superusers=['admin'],
|
||||
users=['alice', 'bob'],
|
||||
roles=['alice project.admin_role:alice',
|
||||
'bob project.admin_role:bob',
|
||||
'shared project.admin_role:bob',
|
||||
'shared project.admin_role:alice'])
|
||||
|
||||
alice_project = project_factory('alice project')
|
||||
alice_project.admin_role.members.add(alice)
|
||||
|
||||
bob_project = project_factory('bob project')
|
||||
bob_project.admin_role.members.add(bob)
|
||||
|
||||
shared_project = project_factory('shared project')
|
||||
shared_project.admin_role.members.add(alice)
|
||||
shared_project.admin_role.members.add(bob)
|
||||
|
||||
# admins can see all projects
|
||||
assert get(reverse('api:user_projects_list', args=(admin.pk,)), admin).data['count'] == 3
|
||||
assert get(reverse('api:user_projects_list', args=(objects.superusers.admin.pk,)), objects.superusers.admin).data['count'] == 3
|
||||
|
||||
# admins can see everyones projects
|
||||
assert get(reverse('api:user_projects_list', args=(alice.pk,)), admin).data['count'] == 2
|
||||
assert get(reverse('api:user_projects_list', args=(bob.pk,)), admin).data['count'] == 2
|
||||
assert get(reverse('api:user_projects_list', args=(objects.users.alice.pk,)), objects.superusers.admin).data['count'] == 2
|
||||
assert get(reverse('api:user_projects_list', args=(objects.users.bob.pk,)), objects.superusers.admin).data['count'] == 2
|
||||
|
||||
# users can see their own projects
|
||||
assert get(reverse('api:user_projects_list', args=(alice.pk,)), alice).data['count'] == 2
|
||||
assert get(reverse('api:user_projects_list', args=(objects.users.alice.pk,)), objects.users.alice).data['count'] == 2
|
||||
|
||||
# alice should only be able to see the shared project when looking at bobs projects
|
||||
assert get(reverse('api:user_projects_list', args=(bob.pk,)), alice).data['count'] == 1
|
||||
assert get(reverse('api:user_projects_list', args=(objects.users.bob.pk,)), objects.users.alice).data['count'] == 1
|
||||
|
||||
# alice should see all projects they can see when viewing an admin
|
||||
assert get(reverse('api:user_projects_list', args=(admin.pk,)), alice).data['count'] == 2
|
||||
assert get(reverse('api:user_projects_list', args=(objects.superusers.admin.pk,)), objects.users.alice).data['count'] == 2
|
||||
|
||||
|
||||
def setup_test_team_project_list(project_factory, team_factory, admin, alice, bob):
|
||||
team1 = team_factory('team1')
|
||||
team2 = team_factory('team2')
|
||||
|
||||
team1_project = project_factory('team1 project')
|
||||
team1_project.admin_role.parents.add(team1.member_role)
|
||||
|
||||
team2_project = project_factory('team2 project')
|
||||
team2_project.admin_role.parents.add(team2.member_role)
|
||||
|
||||
shared_project = project_factory('shared project')
|
||||
shared_project.admin_role.parents.add(team1.member_role)
|
||||
shared_project.admin_role.parents.add(team2.member_role)
|
||||
|
||||
team1.member_role.members.add(alice)
|
||||
team2.member_role.members.add(bob)
|
||||
return team1, team2
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_team_project_list(get, project_factory, team_factory, admin, alice, bob):
|
||||
'List of projects a team has access to, filtered by projects you can also see'
|
||||
team1, team2 = setup_test_team_project_list(project_factory, team_factory, admin, alice, bob)
|
||||
def test_team_project_list(get, team_project_list):
|
||||
objects = team_project_list
|
||||
|
||||
team1, team2 = objects.teams.team1, objects.teams.team2
|
||||
alice, bob, admin = objects.users.alice, objects.users.bob, objects.superusers.admin
|
||||
|
||||
# admins can see all projects on a team
|
||||
assert get(reverse('api:team_projects_list', args=(team1.pk,)), admin).data['count'] == 2
|
||||
@ -78,12 +70,6 @@ def test_team_project_list(get, project_factory, team_factory, admin, alice, bob
|
||||
assert get(reverse('api:team_projects_list', args=(team2.pk,)), alice).data['count'] == 1
|
||||
team2.read_role.members.remove(alice)
|
||||
|
||||
# Test user endpoints first, very similar tests to test_user_project_list
|
||||
# but permissions are being derived from team membership instead.
|
||||
with transaction.atomic():
|
||||
res = get(reverse('api:user_projects_list', args=(bob.pk,)), alice)
|
||||
assert res.status_code == 403
|
||||
|
||||
# admins can see all projects
|
||||
assert get(reverse('api:user_projects_list', args=(admin.pk,)), admin).data['count'] == 3
|
||||
|
||||
@ -98,17 +84,11 @@ def test_team_project_list(get, project_factory, team_factory, admin, alice, bob
|
||||
assert get(reverse('api:user_projects_list', args=(admin.pk,)), alice).data['count'] == 2
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_team_project_list_fail1(get, project_factory, team_factory, admin, alice, bob):
|
||||
# alice should not be able to see team2 projects because she doesn't have access to team2
|
||||
team1, team2 = setup_test_team_project_list(project_factory, team_factory, admin, alice, bob)
|
||||
res = get(reverse('api:team_projects_list', args=(team2.pk,)), alice)
|
||||
def test_team_project_list_fail1(get, team_project_list):
|
||||
objects = team_project_list
|
||||
res = get(reverse('api:team_projects_list', args=(objects.teams.team2.pk,)), objects.users.alice)
|
||||
assert res.status_code == 403
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_team_project_list_fail2(get, project_factory, team_factory, admin, alice, bob):
|
||||
team1, team2 = setup_test_team_project_list(project_factory, team_factory, admin, alice, bob)
|
||||
# alice should not be able to see bob
|
||||
|
||||
@pytest.mark.parametrize("u,expected_status_code", [
|
||||
('rando', 403),
|
||||
('org_member', 403),
|
||||
|
||||
@ -406,9 +406,9 @@ def test_ensure_rbac_fields_are_present(organization, get, admin):
|
||||
org = response.data
|
||||
|
||||
assert 'summary_fields' in org
|
||||
assert 'roles' in org['summary_fields']
|
||||
assert 'object_roles' in org['summary_fields']
|
||||
|
||||
role_pk = org['summary_fields']['roles']['admin_role']['id']
|
||||
role_pk = org['summary_fields']['object_roles']['admin_role']['id']
|
||||
role_url = reverse('api:role_detail', args=(role_pk,))
|
||||
org_role_response = get(role_url, admin)
|
||||
|
||||
@ -435,5 +435,5 @@ def test_ensure_role_summary_is_present(organization, get, user):
|
||||
org = response.data
|
||||
|
||||
assert 'summary_fields' in org
|
||||
assert 'roles' in org['summary_fields']
|
||||
assert org['summary_fields']['roles']['admin_role']['id'] > 0
|
||||
assert 'object_roles' in org['summary_fields']
|
||||
assert org['summary_fields']['object_roles']['admin_role']['id'] > 0
|
||||
|
||||
@ -118,6 +118,9 @@ def test_cred_job_template(user, team, deploy_jobtemplate):
|
||||
|
||||
access = CredentialAccess(a)
|
||||
rbac.migrate_credential(apps, None)
|
||||
|
||||
cred.refresh_from_db()
|
||||
|
||||
assert access.can_change(cred, {'organization': org.pk})
|
||||
|
||||
org.admin_role.members.remove(a)
|
||||
@ -135,6 +138,8 @@ def test_cred_multi_job_template_single_org_xfail(user, deploy_jobtemplate):
|
||||
|
||||
access = CredentialAccess(a)
|
||||
rbac.migrate_credential(apps, None)
|
||||
cred.refresh_from_db()
|
||||
|
||||
assert not access.can_change(cred, {'organization': org.pk})
|
||||
|
||||
@pytest.mark.django_db
|
||||
@ -149,6 +154,8 @@ def test_cred_multi_job_template_single_org(user, team, deploy_jobtemplate):
|
||||
|
||||
access = CredentialAccess(a)
|
||||
rbac.migrate_credential(apps, None)
|
||||
cred.refresh_from_db()
|
||||
|
||||
assert access.can_change(cred, {'organization': org.pk})
|
||||
|
||||
org.admin_role.members.remove(a)
|
||||
@ -180,6 +187,7 @@ def test_single_cred_multi_job_template_multi_org(user, organizations, credentia
|
||||
|
||||
for jt in jts:
|
||||
jt.refresh_from_db()
|
||||
credential.refresh_from_db()
|
||||
|
||||
assert jts[0].credential != jts[1].credential
|
||||
assert access.can_change(jts[0].credential, {'organization': org.pk})
|
||||
|
||||
72
awx/main/tests/functional/test_rbac_job.py
Normal file
72
awx/main/tests/functional/test_rbac_job.py
Normal file
@ -0,0 +1,72 @@
|
||||
import pytest
|
||||
|
||||
from awx.main.access import JobAccess
|
||||
from awx.main.models import Job
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def normal_job(deploy_jobtemplate):
|
||||
return Job.objects.create(
|
||||
job_template=deploy_jobtemplate,
|
||||
project=deploy_jobtemplate.project,
|
||||
inventory=deploy_jobtemplate.inventory
|
||||
)
|
||||
|
||||
# Read permissions testing
|
||||
@pytest.mark.django_db
|
||||
def test_superuser_sees_orphans(normal_job, admin_user):
|
||||
normal_job.job_template = None
|
||||
access = JobAccess(admin_user)
|
||||
assert access.can_read(normal_job)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_member_does_not_see_orphans(normal_job, org_member, project):
|
||||
normal_job.job_template = None
|
||||
# Check that privledged access to project still does not grant access
|
||||
project.admin_role.members.add(org_member)
|
||||
access = JobAccess(org_member)
|
||||
assert not access.can_read(normal_job)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_admin_sees_orphans(normal_job, org_admin):
|
||||
normal_job.job_template = None
|
||||
access = JobAccess(org_admin)
|
||||
assert access.can_read(normal_job)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_auditor_sees_orphans(normal_job, org_auditor):
|
||||
normal_job.job_template = None
|
||||
access = JobAccess(org_auditor)
|
||||
assert access.can_read(normal_job)
|
||||
|
||||
# Delete permissions testing
|
||||
@pytest.mark.django_db
|
||||
def test_JT_admin_delete_denied(normal_job, rando):
|
||||
normal_job.job_template.admin_role.members.add(rando)
|
||||
access = JobAccess(rando)
|
||||
assert not access.can_delete(normal_job)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_inventory_admin_delete_denied(normal_job, rando):
|
||||
normal_job.job_template.inventory.admin_role.members.add(rando)
|
||||
access = JobAccess(rando)
|
||||
assert not access.can_delete(normal_job)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_null_related_delete_denied(normal_job, rando):
|
||||
normal_job.project = None
|
||||
normal_job.inventory = None
|
||||
access = JobAccess(rando)
|
||||
assert not access.can_delete(normal_job)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_inventory_org_admin_delete_allowed(normal_job, org_admin):
|
||||
normal_job.project = None # do this so we test job->inventory->org->admin connection
|
||||
access = JobAccess(org_admin)
|
||||
assert access.can_delete(normal_job)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_project_org_admin_delete_allowed(normal_job, org_admin):
|
||||
normal_job.inventory = None # do this so we test job->project->org->admin connection
|
||||
access = JobAccess(org_admin)
|
||||
assert access.can_delete(normal_job)
|
||||
@ -7,8 +7,11 @@ from awx.main.access import (
|
||||
)
|
||||
from awx.main.migrations import _rbac as rbac
|
||||
from awx.main.models import Permission
|
||||
from awx.main.models.jobs import JobTemplate
|
||||
from django.apps import apps
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_template_migration_check(credential, deploy_jobtemplate, check_jobtemplate, user):
|
||||
@ -155,3 +158,26 @@ def test_job_template_access_superuser(check_license, user, deploy_jobtemplate):
|
||||
# THEN all access checks should pass
|
||||
assert access.can_read(deploy_jobtemplate)
|
||||
assert access.can_add({})
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.job_permissions
|
||||
def test_job_template_creator_access(project, rando, post):
|
||||
|
||||
project.admin_role.members.add(rando)
|
||||
with mock.patch(
|
||||
'awx.main.models.projects.ProjectOptions.playbooks',
|
||||
new_callable=mock.PropertyMock(return_value=['helloworld.yml'])):
|
||||
response = post(reverse('api:job_template_list', args=[]), dict(
|
||||
name='newly-created-jt',
|
||||
job_type='run',
|
||||
ask_inventory_on_launch=True,
|
||||
ask_credential_on_launch=True,
|
||||
project=project.pk,
|
||||
playbook='helloworld.yml'
|
||||
), rando)
|
||||
|
||||
assert response.status_code == 201
|
||||
jt_pk = response.data['id']
|
||||
jt_obj = JobTemplate.objects.get(pk=jt_pk)
|
||||
# Creating a JT should place the creator in the admin role
|
||||
assert rando in jt_obj.admin_role
|
||||
|
||||
@ -31,20 +31,22 @@ def test_label_access_superuser(label, user):
|
||||
assert access.can_delete(label)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_label_access_admin(label, user, organization_factory):
|
||||
def test_label_access_admin(organization_factory):
|
||||
'''can_change because I am an admin of that org'''
|
||||
a = user('admin', False)
|
||||
org_no_members = organization_factory("no_members")
|
||||
org_members = organization_factory("has_members")
|
||||
no_members = organization_factory("no_members")
|
||||
members = organization_factory("has_members",
|
||||
users=['admin'],
|
||||
labels=['test'])
|
||||
|
||||
label.organization.admin_role.members.add(a)
|
||||
org_members.admin_role.members.add(a)
|
||||
label = members.labels.test
|
||||
admin = members.users.admin
|
||||
members.organization.admin_role.members.add(admin)
|
||||
|
||||
access = LabelAccess(user('admin', False))
|
||||
assert not access.can_change(label, {'organization': org_no_members.id})
|
||||
access = LabelAccess(admin)
|
||||
assert not access.can_change(label, {'organization': no_members.organization.id})
|
||||
assert access.can_read(label)
|
||||
assert access.can_change(label, None)
|
||||
assert access.can_change(label, {'organization': org_members.id})
|
||||
assert access.can_change(label, {'organization': members.organization.id})
|
||||
assert access.can_delete(label)
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@ -25,35 +25,44 @@ def test_notification_template_get_queryset_orgadmin(notification_template, user
|
||||
assert access.get_queryset().count() == 1
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_notification_template_access_superuser(notification_template, user, notification_template_factory):
|
||||
access = NotificationTemplateAccess(user('admin', True))
|
||||
assert access.can_read(notification_template)
|
||||
assert access.can_change(notification_template, None)
|
||||
assert access.can_delete(notification_template)
|
||||
nf = notification_template_factory("test-orphaned")
|
||||
def test_notification_template_access_superuser(notification_template_factory):
|
||||
nf_objects = notification_template_factory('test-orphaned', organization='test', superusers=['admin'])
|
||||
admin = nf_objects.superusers.admin
|
||||
nf = nf_objects.notification_template
|
||||
|
||||
access = NotificationTemplateAccess(admin)
|
||||
assert access.can_read(nf)
|
||||
assert access.can_change(nf, None)
|
||||
assert access.can_delete(nf)
|
||||
|
||||
nf.organization = None
|
||||
nf.save()
|
||||
|
||||
assert access.can_read(nf)
|
||||
assert access.can_change(nf, None)
|
||||
assert access.can_delete(nf)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_notification_template_access_admin(notification_template, user, organization_factory, notification_template_factory):
|
||||
adm = user('admin', False)
|
||||
other_org = organization_factory('other')
|
||||
present_org = organization_factory('present')
|
||||
notification_template.organization.admin_role.members.add(adm)
|
||||
present_org.admin_role.members.add(adm)
|
||||
def test_notification_template_access_admin(organization_factory, notification_template_factory):
|
||||
other_objects = organization_factory('other')
|
||||
present_objects = organization_factory('present',
|
||||
users=['admin'],
|
||||
notification_templates=['test-notification'],
|
||||
roles=['present.admin_role:admin'])
|
||||
|
||||
access = NotificationTemplateAccess(user('admin', False))
|
||||
notification_template = present_objects.notification_templates.test_notification
|
||||
other_org = other_objects.organization
|
||||
present_org = present_objects.organization
|
||||
admin = present_objects.users.admin
|
||||
|
||||
access = NotificationTemplateAccess(admin)
|
||||
assert not access.can_change(notification_template, {'organization': other_org.id})
|
||||
assert access.can_read(notification_template)
|
||||
assert access.can_change(notification_template, None)
|
||||
assert access.can_change(notification_template, {'organization': present_org.id})
|
||||
assert access.can_delete(notification_template)
|
||||
|
||||
nf = notification_template_factory("test-orphaned")
|
||||
nf.organization = None
|
||||
nf.save()
|
||||
assert not access.can_read(nf)
|
||||
assert not access.can_change(nf, None)
|
||||
assert not access.can_delete(nf)
|
||||
|
||||
@ -404,164 +404,6 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
|
||||
del data[k]
|
||||
return self.post(url, data, expect=expect)
|
||||
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', side_effect=run_pexpect_mock)
|
||||
def test_ad_hoc_command_list(self, ignore):
|
||||
url = reverse('api:ad_hoc_command_list')
|
||||
|
||||
# Retrieve the empty list of ad hoc commands.
|
||||
qs = AdHocCommand.objects.none()
|
||||
self.check_get_list(url, 'admin', qs)
|
||||
self.check_get_list(url, 'normal', qs)
|
||||
self.check_get_list(url, 'other', qs)
|
||||
self.check_get_list(url, 'nobody', qs)
|
||||
self.check_get_list(url, None, qs, expect=401)
|
||||
|
||||
# Start a new ad hoc command. Only admin and normal user (org admin)
|
||||
# can run commands by default.
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command()
|
||||
self.assertEqual(response['job_type'], 'run')
|
||||
self.assertEqual(response['inventory'], self.inventory.pk)
|
||||
self.assertEqual(response['credential'], self.credential.pk)
|
||||
self.assertEqual(response['module_name'], 'command')
|
||||
self.assertEqual(response['module_args'], 'uptime')
|
||||
self.assertEqual(response['limit'], '')
|
||||
self.assertEqual(response['forks'], 0)
|
||||
self.assertEqual(response['verbosity'], 0)
|
||||
self.assertEqual(response['become_enabled'], False)
|
||||
self.put(url, {}, expect=405)
|
||||
self.patch(url, {}, expect=405)
|
||||
self.delete(url, expect=405)
|
||||
with self.current_user('normal'):
|
||||
self.run_test_ad_hoc_command()
|
||||
self.put(url, {}, expect=405)
|
||||
self.patch(url, {}, expect=405)
|
||||
self.delete(url, expect=405)
|
||||
with self.current_user('other'):
|
||||
self.run_test_ad_hoc_command(expect=403)
|
||||
self.put(url, {}, expect=405)
|
||||
self.patch(url, {}, expect=405)
|
||||
self.delete(url, expect=405)
|
||||
with self.current_user('nobody'):
|
||||
self.run_test_ad_hoc_command(expect=403)
|
||||
self.put(url, {}, expect=405)
|
||||
self.patch(url, {}, expect=405)
|
||||
self.delete(url, expect=405)
|
||||
with self.current_user(None):
|
||||
self.run_test_ad_hoc_command(expect=401)
|
||||
self.put(url, {}, expect=401)
|
||||
self.patch(url, {}, expect=401)
|
||||
self.delete(url, expect=401)
|
||||
|
||||
# Retrieve the list of ad hoc commands (only admin/normal can see by default).
|
||||
qs = AdHocCommand.objects.all()
|
||||
self.assertEqual(qs.count(), 2)
|
||||
self.check_get_list(url, 'admin', qs)
|
||||
self.check_get_list(url, 'normal', qs)
|
||||
qs = AdHocCommand.objects.none()
|
||||
self.check_get_list(url, 'other', qs)
|
||||
self.check_get_list(url, 'nobody', qs)
|
||||
self.check_get_list(url, None, qs, expect=401)
|
||||
|
||||
# Explicitly give other user updater permission on the inventory (still
|
||||
# not allowed to run ad hoc commands).
|
||||
user_roles_list_url = reverse('api:user_roles_list', args=(self.other_django_user.pk,))
|
||||
with self.current_user('admin'):
|
||||
response = self.post(user_roles_list_url, {"id": self.inventory.update_role.id}, expect=204)
|
||||
with self.current_user('other'):
|
||||
self.run_test_ad_hoc_command(expect=403)
|
||||
self.check_get_list(url, 'other', qs)
|
||||
|
||||
# Add executor role permissions to other. Fails
|
||||
# when other user can't read credential.
|
||||
with self.current_user('admin'):
|
||||
response = self.post(user_roles_list_url, {"id": self.inventory.execute_role.id}, expect=204)
|
||||
with self.current_user('other'):
|
||||
self.run_test_ad_hoc_command(expect=403)
|
||||
|
||||
# Succeeds once other user has a readable credential. Other user can
|
||||
# only see his own ad hoc command (because of credential permissions).
|
||||
other_cred = self.create_test_credential(user=self.other_django_user)
|
||||
with self.current_user('other'):
|
||||
self.run_test_ad_hoc_command(credential=other_cred.pk)
|
||||
qs = AdHocCommand.objects.filter(created_by=self.other_django_user)
|
||||
self.assertEqual(qs.count(), 1)
|
||||
self.check_get_list(url, 'other', qs)
|
||||
|
||||
# Explicitly give nobody user read permission on the inventory.
|
||||
nobody_roles_list_url = reverse('api:user_roles_list', args=(self.nobody_django_user.pk,))
|
||||
with self.current_user('admin'):
|
||||
response = self.post(nobody_roles_list_url, {"id": self.inventory.read_role.id}, expect=204)
|
||||
with self.current_user('nobody'):
|
||||
self.run_test_ad_hoc_command(credential=other_cred.pk, expect=403)
|
||||
self.check_get_list(url, 'other', qs)
|
||||
|
||||
# Create a cred for the nobody user, run an ad hoc command as the admin
|
||||
# user with that cred. Nobody user can still not see the ad hoc command
|
||||
# without the run_ad_hoc_commands permission flag.
|
||||
nobody_cred = self.create_test_credential(user=self.nobody_django_user)
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(credential=nobody_cred.pk)
|
||||
qs = AdHocCommand.objects.none()
|
||||
self.check_get_list(url, 'nobody', qs)
|
||||
|
||||
# Give the nobody user the run_ad_hoc_commands flag, and can now see
|
||||
# the one ad hoc command previously run.
|
||||
with self.current_user('admin'):
|
||||
response = self.post(nobody_roles_list_url, {"id": self.inventory.execute_role.id}, expect=204)
|
||||
qs = AdHocCommand.objects.filter(credential_id=nobody_cred.pk)
|
||||
self.assertEqual(qs.count(), 1)
|
||||
self.check_get_list(url, 'nobody', qs)
|
||||
|
||||
# Post without inventory (should fail).
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(inventory=None, expect=400)
|
||||
|
||||
# Post without credential (should fail).
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(credential=None, expect=400)
|
||||
|
||||
# Post with empty or unsupported module name (empty defaults to command).
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command(module_name=None)
|
||||
self.assertEqual(response['module_name'], 'command')
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command(module_name='')
|
||||
self.assertEqual(response['module_name'], 'command')
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(module_name='transcombobulator', expect=400)
|
||||
|
||||
# Post with empty module args for shell/command modules (should fail),
|
||||
# empty args for other modules ok.
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(module_args=None, expect=400)
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(module_name='shell', module_args=None, expect=400)
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(module_name='shell', module_args='', expect=400)
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(module_name='ping', module_args=None)
|
||||
|
||||
# Post with invalid values for other parameters.
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(job_type='something', expect=400)
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command(job_type='check')
|
||||
self.assertEqual(response['job_type'], 'check')
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(verbosity=-1, expect=400)
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(forks=-1, expect=400)
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command(become_enabled=True)
|
||||
self.assertEqual(response['become_enabled'], True)
|
||||
|
||||
# Try to run with expired license.
|
||||
self.create_expired_license_file()
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(expect=403)
|
||||
with self.current_user('normal'):
|
||||
self.run_test_ad_hoc_command(expect=403)
|
||||
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', side_effect=run_pexpect_mock)
|
||||
def test_ad_hoc_command_detail(self, ignore):
|
||||
@ -953,98 +795,6 @@ class AdHocCommandApiTest(BaseAdHocCommandTest):
|
||||
self.patch(url, {}, expect=401)
|
||||
self.delete(url, expect=401)
|
||||
|
||||
@mock.patch('awx.main.tasks.BaseTask.run_pexpect', side_effect=run_pexpect_mock)
|
||||
def test_inventory_ad_hoc_commands_list(self, ignore):
|
||||
with self.current_user('admin'):
|
||||
response = self.run_test_ad_hoc_command()
|
||||
response = self.run_test_ad_hoc_command(inventory=self.inventory2.pk)
|
||||
|
||||
# Test the ad hoc commands list for an inventory. Should only return
|
||||
# the ad hoc command(s) run against that inventory. Posting should
|
||||
# start a new ad hoc command and always set the inventory from the URL.
|
||||
url = reverse('api:inventory_ad_hoc_commands_list', args=(self.inventory.pk,))
|
||||
inventory_url = reverse('api:inventory_detail', args=(self.inventory.pk,))
|
||||
with self.current_user('admin'):
|
||||
response = self.get(url, expect=200)
|
||||
self.assertEqual(response['count'], 1)
|
||||
response = self.run_test_ad_hoc_command(url=url, inventory=None, expect=201)
|
||||
self.assertEqual(response['inventory'], self.inventory.pk)
|
||||
response = self.run_test_ad_hoc_command(url=url, inventory=self.inventory2.pk, expect=201)
|
||||
self.assertEqual(response['inventory'], self.inventory.pk)
|
||||
self.put(url, {}, expect=405)
|
||||
self.patch(url, {}, expect=405)
|
||||
self.delete(url, expect=405)
|
||||
response = self.get(inventory_url, expect=200)
|
||||
self.assertTrue(response['can_run_ad_hoc_commands'])
|
||||
with self.current_user('normal'):
|
||||
response = self.get(url, expect=200)
|
||||
self.assertEqual(response['count'], 3)
|
||||
response = self.run_test_ad_hoc_command(url=url, inventory=None, expect=201)
|
||||
self.assertEqual(response['inventory'], self.inventory.pk)
|
||||
self.put(url, {}, expect=405)
|
||||
self.patch(url, {}, expect=405)
|
||||
self.delete(url, expect=405)
|
||||
response = self.get(inventory_url, expect=200)
|
||||
self.assertTrue(response['can_run_ad_hoc_commands'])
|
||||
with self.current_user('other'):
|
||||
self.get(url, expect=403)
|
||||
self.post(url, {}, expect=403)
|
||||
self.put(url, {}, expect=405)
|
||||
self.patch(url, {}, expect=405)
|
||||
self.delete(url, expect=405)
|
||||
with self.current_user('nobody'):
|
||||
self.get(url, expect=403)
|
||||
self.post(url, {}, expect=403)
|
||||
self.put(url, {}, expect=405)
|
||||
self.patch(url, {}, expect=405)
|
||||
self.delete(url, expect=405)
|
||||
with self.current_user(None):
|
||||
self.get(url, expect=401)
|
||||
self.post(url, {}, expect=401)
|
||||
self.put(url, {}, expect=401)
|
||||
self.patch(url, {}, expect=401)
|
||||
self.delete(url, expect=401)
|
||||
|
||||
# Create another unrelated inventory permission with run_ad_hoc_commands
|
||||
# set; this tests an edge case in the RBAC query where we'll return
|
||||
# can_run_ad_hoc_commands = True when we shouldn't.
|
||||
nobody_roles_list_url = reverse('api:user_roles_list', args=(self.nobody_django_user.pk,))
|
||||
with self.current_user('admin'):
|
||||
response = self.post(nobody_roles_list_url, {"id": self.inventory.execute_role.id}, expect=204)
|
||||
|
||||
# Create a credential for the other user and explicitly give other
|
||||
# user admin permission on the inventory (still not allowed to run ad
|
||||
# hoc commands; can get the list but can't see any items).
|
||||
other_cred = self.create_test_credential(user=self.other_django_user)
|
||||
user_roles_list_url = reverse('api:user_roles_list', args=(self.other_django_user.pk,))
|
||||
with self.current_user('admin'):
|
||||
response = self.post(user_roles_list_url, {"id": self.inventory.update_role.id}, expect=204)
|
||||
with self.current_user('other'):
|
||||
response = self.get(url, expect=200)
|
||||
self.assertEqual(response['count'], 0)
|
||||
response = self.get(inventory_url, expect=200)
|
||||
self.assertFalse(response['can_run_ad_hoc_commands'])
|
||||
self.run_test_ad_hoc_command(url=url, inventory=None, credential=other_cred.pk, expect=403)
|
||||
|
||||
# Update permission to allow other user to run ad hoc commands. Can
|
||||
# only see his own ad hoc commands (because of credential permission).
|
||||
with self.current_user('admin'):
|
||||
response = self.post(user_roles_list_url, {"id": self.inventory.adhoc_role.id}, expect=204)
|
||||
with self.current_user('other'):
|
||||
response = self.get(url, expect=200)
|
||||
self.assertEqual(response['count'], 0)
|
||||
self.run_test_ad_hoc_command(url=url, inventory=None, credential=other_cred.pk, expect=201)
|
||||
response = self.get(url, expect=200)
|
||||
self.assertEqual(response['count'], 1)
|
||||
response = self.get(inventory_url, expect=200)
|
||||
self.assertTrue(response['can_run_ad_hoc_commands'])
|
||||
|
||||
# Try to run with expired license.
|
||||
self.create_expired_license_file()
|
||||
with self.current_user('admin'):
|
||||
self.run_test_ad_hoc_command(url=url, expect=403)
|
||||
with self.current_user('normal'):
|
||||
self.run_test_ad_hoc_command(url=url, expect=403)
|
||||
|
||||
def test_host_ad_hoc_commands_list(self):
|
||||
# TODO: Figure out why this test needs pexpect
|
||||
|
||||
@ -1770,6 +1770,7 @@ class InventoryUpdatesTest(BaseTransactionTest):
|
||||
self.assertFalse(inventory_update.name.endswith(inventory_update.inventory_source.name), inventory_update.name)
|
||||
|
||||
def test_update_from_rax(self):
|
||||
self.skipTest('Skipping until we can resolve the CERTIFICATE_VERIFY_FAILED issue: #1706')
|
||||
source_username = getattr(settings, 'TEST_RACKSPACE_USERNAME', '')
|
||||
source_password = getattr(settings, 'TEST_RACKSPACE_API_KEY', '')
|
||||
source_regions = getattr(settings, 'TEST_RACKSPACE_REGIONS', '')
|
||||
|
||||
@ -961,7 +961,7 @@ class JobTemplateCallbackTest(BaseJobTestMixin, django.test.LiveServerTestCase):
|
||||
self.assertEqual(jobs_qs.count(), 7)
|
||||
job = jobs_qs[0]
|
||||
self.assertEqual(job.launch_type, 'callback')
|
||||
self.assertEqual(job.limit, ':&'.join([job_template.limit, host.name]))
|
||||
self.assertEqual(job.limit, host.name)
|
||||
self.assertEqual(job.hosts.count(), 1)
|
||||
self.assertEqual(job.hosts.all()[0], host)
|
||||
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
# Python
|
||||
import pytest
|
||||
import mock
|
||||
import json
|
||||
|
||||
# AWX
|
||||
from awx.api.serializers import JobTemplateSerializer, JobSerializer, JobOptionsSerializer
|
||||
@ -9,9 +10,14 @@ from awx.main.models import Label, Job
|
||||
#DRF
|
||||
from rest_framework import serializers
|
||||
|
||||
def mock_JT_resource_data():
|
||||
return ({}, [])
|
||||
|
||||
@pytest.fixture
|
||||
def job_template(mocker):
|
||||
return mocker.MagicMock(pk=5)
|
||||
mock_jt = mocker.MagicMock(pk=5)
|
||||
mock_jt.resource_validation_data = mock_JT_resource_data
|
||||
return mock_jt
|
||||
|
||||
@pytest.fixture
|
||||
def job(mocker, job_template):
|
||||
@ -140,6 +146,21 @@ class TestJobSerializerGetRelated(GetRelatedMixin):
|
||||
assert 'job_template' in related
|
||||
assert related['job_template'] == '/api/v1/%s/%d/' % ('job_templates', job.job_template.pk)
|
||||
|
||||
@mock.patch('awx.api.serializers.BaseSerializer.to_representation', lambda self,obj: {
|
||||
'extra_vars': obj.extra_vars})
|
||||
class TestJobSerializerSubstitution():
|
||||
|
||||
def test_survey_password_hide(self, mocker):
|
||||
job = mocker.MagicMock(**{
|
||||
'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}',
|
||||
'extra_vars.return_value': '{\"secret_key\": \"my_password\"}'})
|
||||
serializer = JobSerializer(job)
|
||||
rep = serializer.to_representation(job)
|
||||
extra_vars = json.loads(rep['extra_vars'])
|
||||
assert extra_vars['secret_key'] == '$encrypted$'
|
||||
job.display_extra_vars.assert_called_once_with()
|
||||
assert 'my_password' not in extra_vars
|
||||
|
||||
@mock.patch('awx.api.serializers.BaseSerializer.get_summary_fields', lambda x,y: {})
|
||||
class TestJobOptionsSerializerGetSummaryFields(GetSummaryFieldsMixin):
|
||||
def test__summary_field_labels_10_max(self, mocker, job_template, labels):
|
||||
|
||||
40
awx/main/tests/unit/models/test_job_template_unit.py
Normal file
40
awx/main/tests/unit/models/test_job_template_unit.py
Normal file
@ -0,0 +1,40 @@
|
||||
import pytest
|
||||
|
||||
|
||||
def test_missing_project_error(job_template_factory):
|
||||
objects = job_template_factory(
|
||||
'missing-project-jt',
|
||||
organization='org1',
|
||||
inventory='inventory1',
|
||||
credential='cred1',
|
||||
persisted=False)
|
||||
obj = objects.job_template
|
||||
assert 'project' in obj.resources_needed_to_start
|
||||
validation_errors, resources_needed_to_start = obj.resource_validation_data()
|
||||
assert 'project' in validation_errors
|
||||
|
||||
def test_inventory_credential_need_to_start(job_template_factory):
|
||||
objects = job_template_factory(
|
||||
'job-template-few-resources',
|
||||
project='project1',
|
||||
persisted=False)
|
||||
obj = objects.job_template
|
||||
assert 'inventory' in obj.resources_needed_to_start
|
||||
assert 'credential' in obj.resources_needed_to_start
|
||||
|
||||
def test_inventory_credential_contradictions(job_template_factory):
|
||||
objects = job_template_factory(
|
||||
'job-template-paradox',
|
||||
project='project1',
|
||||
persisted=False)
|
||||
obj = objects.job_template
|
||||
obj.ask_inventory_on_launch = False
|
||||
obj.ask_credential_on_launch = False
|
||||
validation_errors, resources_needed_to_start = obj.resource_validation_data()
|
||||
assert 'inventory' in validation_errors
|
||||
assert 'credential' in validation_errors
|
||||
|
||||
@pytest.mark.survey
|
||||
def test_survey_password_list(job_with_secret_key_unit):
|
||||
"""Verify that survey_password_variables method gives a list of survey passwords"""
|
||||
assert job_with_secret_key_unit.job_template.survey_password_variables() == ['secret_key', 'SSN']
|
||||
39
awx/main/tests/unit/models/test_job_unit.py
Normal file
39
awx/main/tests/unit/models/test_job_unit.py
Normal file
@ -0,0 +1,39 @@
|
||||
import pytest
|
||||
import json
|
||||
|
||||
from awx.main.tasks import RunJob
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job(mocker):
|
||||
return mocker.MagicMock(**{
|
||||
'display_extra_vars.return_value': '{\"secret_key\": \"$encrypted$\"}',
|
||||
'extra_vars_dict': {"secret_key": "my_password"},
|
||||
'pk': 1, 'job_template.pk': 1, 'job_template.name': '',
|
||||
'created_by.pk': 1, 'created_by.username': 'admin',
|
||||
'launch_type': 'manual'})
|
||||
|
||||
@pytest.mark.survey
|
||||
def test_job_redacted_extra_vars(job_with_secret_key_unit):
|
||||
"""Verify that this method redacts vars marked as passwords in a survey"""
|
||||
assert json.loads(job_with_secret_key_unit.display_extra_vars()) == {
|
||||
'submitter_email': 'foobar@redhat.com',
|
||||
'secret_key': '$encrypted$',
|
||||
'SSN': '$encrypted$'}
|
||||
|
||||
def test_job_safe_args_redacted_passwords(job):
|
||||
"""Verify that safe_args hides passwords in the job extra_vars"""
|
||||
kwargs = {'ansible_version': '2.1'}
|
||||
run_job = RunJob()
|
||||
safe_args = run_job.build_safe_args(job, **kwargs)
|
||||
ev_index = safe_args.index('-e') + 1
|
||||
extra_vars = json.loads(safe_args[ev_index])
|
||||
assert extra_vars['secret_key'] == '$encrypted$'
|
||||
|
||||
def test_job_args_unredacted_passwords(job):
|
||||
kwargs = {'ansible_version': '2.1'}
|
||||
run_job = RunJob()
|
||||
args = run_job.build_args(job, **kwargs)
|
||||
ev_index = args.index('-e') + 1
|
||||
extra_vars = json.loads(args[ev_index])
|
||||
assert extra_vars['secret_key'] == 'my_password'
|
||||
@ -141,6 +141,7 @@ except ImportError:
|
||||
|
||||
|
||||
class Ec2Inventory(object):
|
||||
|
||||
def _empty_inventory(self):
|
||||
return {"_meta" : {"hostvars" : {}}}
|
||||
|
||||
@ -157,6 +158,9 @@ class Ec2Inventory(object):
|
||||
# Boto profile to use (if any)
|
||||
self.boto_profile = None
|
||||
|
||||
# AWS credentials.
|
||||
self.credentials = {}
|
||||
|
||||
# Read settings and parse CLI arguments
|
||||
self.parse_cli_args()
|
||||
self.read_settings()
|
||||
@ -224,7 +228,7 @@ class Ec2Inventory(object):
|
||||
configRegions_exclude = config.get('ec2', 'regions_exclude')
|
||||
if (configRegions == 'all'):
|
||||
if self.eucalyptus_host:
|
||||
self.regions.append(boto.connect_euca(host=self.eucalyptus_host).region.name)
|
||||
self.regions.append(boto.connect_euca(host=self.eucalyptus_host).region.name, **self.credentials)
|
||||
else:
|
||||
for regionInfo in ec2.regions():
|
||||
if regionInfo.name not in configRegions_exclude:
|
||||
@ -236,6 +240,19 @@ class Ec2Inventory(object):
|
||||
self.destination_variable = config.get('ec2', 'destination_variable')
|
||||
self.vpc_destination_variable = config.get('ec2', 'vpc_destination_variable')
|
||||
|
||||
if config.has_option('ec2', 'hostname_variable'):
|
||||
self.hostname_variable = config.get('ec2', 'hostname_variable')
|
||||
else:
|
||||
self.hostname_variable = None
|
||||
|
||||
if config.has_option('ec2', 'destination_format') and \
|
||||
config.has_option('ec2', 'destination_format_tags'):
|
||||
self.destination_format = config.get('ec2', 'destination_format')
|
||||
self.destination_format_tags = config.get('ec2', 'destination_format_tags').split(',')
|
||||
else:
|
||||
self.destination_format = None
|
||||
self.destination_format_tags = None
|
||||
|
||||
# Route53
|
||||
self.route53_enabled = config.getboolean('ec2', 'route53')
|
||||
self.route53_excluded_zones = []
|
||||
@ -310,6 +327,29 @@ class Ec2Inventory(object):
|
||||
if config.has_option('ec2', 'boto_profile') and not self.boto_profile:
|
||||
self.boto_profile = config.get('ec2', 'boto_profile')
|
||||
|
||||
# AWS credentials (prefer environment variables)
|
||||
if not (self.boto_profile or os.environ.get('AWS_ACCESS_KEY_ID') or
|
||||
os.environ.get('AWS_PROFILE')):
|
||||
if config.has_option('credentials', 'aws_access_key_id'):
|
||||
aws_access_key_id = config.get('credentials', 'aws_access_key_id')
|
||||
else:
|
||||
aws_access_key_id = None
|
||||
if config.has_option('credentials', 'aws_secret_access_key'):
|
||||
aws_secret_access_key = config.get('credentials', 'aws_secret_access_key')
|
||||
else:
|
||||
aws_secret_access_key = None
|
||||
if config.has_option('credentials', 'aws_security_token'):
|
||||
aws_security_token = config.get('credentials', 'aws_security_token')
|
||||
else:
|
||||
aws_security_token = None
|
||||
if aws_access_key_id:
|
||||
self.credentials = {
|
||||
'aws_access_key_id': aws_access_key_id,
|
||||
'aws_secret_access_key': aws_secret_access_key
|
||||
}
|
||||
if aws_security_token:
|
||||
self.credentials['security_token'] = aws_security_token
|
||||
|
||||
# Cache related
|
||||
cache_dir = os.path.expanduser(config.get('ec2', 'cache_path'))
|
||||
if self.boto_profile:
|
||||
@ -317,10 +357,22 @@ class Ec2Inventory(object):
|
||||
if not os.path.exists(cache_dir):
|
||||
os.makedirs(cache_dir)
|
||||
|
||||
self.cache_path_cache = cache_dir + "/ansible-ec2.cache"
|
||||
self.cache_path_index = cache_dir + "/ansible-ec2.index"
|
||||
cache_name = 'ansible-ec2'
|
||||
aws_profile = lambda: (self.boto_profile or
|
||||
os.environ.get('AWS_PROFILE') or
|
||||
os.environ.get('AWS_ACCESS_KEY_ID') or
|
||||
self.credentials.get('aws_access_key_id', None))
|
||||
if aws_profile():
|
||||
cache_name = '%s-%s' % (cache_name, aws_profile())
|
||||
self.cache_path_cache = cache_dir + "/%s.cache" % cache_name
|
||||
self.cache_path_index = cache_dir + "/%s.index" % cache_name
|
||||
self.cache_max_age = config.getint('ec2', 'cache_max_age')
|
||||
|
||||
if config.has_option('ec2', 'expand_csv_tags'):
|
||||
self.expand_csv_tags = config.getboolean('ec2', 'expand_csv_tags')
|
||||
else:
|
||||
self.expand_csv_tags = False
|
||||
|
||||
# Configure nested groups instead of flat namespace.
|
||||
if config.has_option('ec2', 'nested_groups'):
|
||||
self.nested_groups = config.getboolean('ec2', 'nested_groups')
|
||||
@ -382,7 +434,10 @@ class Ec2Inventory(object):
|
||||
# Instance filters (see boto and EC2 API docs). Ignore invalid filters.
|
||||
self.ec2_instance_filters = defaultdict(list)
|
||||
if config.has_option('ec2', 'instance_filters'):
|
||||
for instance_filter in config.get('ec2', 'instance_filters', '').split(','):
|
||||
|
||||
filters = [f for f in config.get('ec2', 'instance_filters').split(',') if f]
|
||||
|
||||
for instance_filter in filters:
|
||||
instance_filter = instance_filter.strip()
|
||||
if not instance_filter or '=' not in instance_filter:
|
||||
continue
|
||||
@ -401,7 +456,7 @@ class Ec2Inventory(object):
|
||||
help='Get all the variables about a specific instance')
|
||||
parser.add_argument('--refresh-cache', action='store_true', default=False,
|
||||
help='Force refresh of cache by making API requests to EC2 (default: False - use cache files)')
|
||||
parser.add_argument('--boto-profile', action='store',
|
||||
parser.add_argument('--profile', '--boto-profile', action='store', dest='boto_profile',
|
||||
help='Use boto profile for connections to EC2')
|
||||
self.args = parser.parse_args()
|
||||
|
||||
@ -426,7 +481,7 @@ class Ec2Inventory(object):
|
||||
def connect(self, region):
|
||||
''' create connection to api server'''
|
||||
if self.eucalyptus:
|
||||
conn = boto.connect_euca(host=self.eucalyptus_host)
|
||||
conn = boto.connect_euca(host=self.eucalyptus_host, **self.credentials)
|
||||
conn.APIVersion = '2010-08-31'
|
||||
else:
|
||||
conn = self.connect_to_aws(ec2, region)
|
||||
@ -440,7 +495,7 @@ class Ec2Inventory(object):
|
||||
return connect_args
|
||||
|
||||
def connect_to_aws(self, module, region):
|
||||
connect_args = {}
|
||||
connect_args = self.credentials
|
||||
|
||||
# only pass the profile name if it's set (as it is not supported by older boto versions)
|
||||
if self.boto_profile:
|
||||
@ -466,15 +521,27 @@ class Ec2Inventory(object):
|
||||
else:
|
||||
reservations = conn.get_all_instances()
|
||||
|
||||
# Pull the tags back in a second step
|
||||
# AWS are on record as saying that the tags fetched in the first `get_all_instances` request are not
|
||||
# reliable and may be missing, and the only way to guarantee they are there is by calling `get_all_tags`
|
||||
instance_ids = []
|
||||
for reservation in reservations:
|
||||
instance_ids.extend([instance.id for instance in reservation.instances])
|
||||
tags = conn.get_all_tags(filters={'resource-type': 'instance', 'resource-id': instance_ids})
|
||||
tags_by_instance_id = defaultdict(dict)
|
||||
for tag in tags:
|
||||
tags_by_instance_id[tag.res_id][tag.name] = tag.value
|
||||
|
||||
for reservation in reservations:
|
||||
for instance in reservation.instances:
|
||||
instance.tags = tags_by_instance_id[instance.id]
|
||||
self.add_instance(instance, region)
|
||||
|
||||
except boto.exception.BotoServerError as e:
|
||||
if e.error_code == 'AuthFailure':
|
||||
error = self.get_auth_error_message()
|
||||
else:
|
||||
backend = 'Eucalyptus' if self.eucalyptus else 'AWS'
|
||||
backend = 'Eucalyptus' if self.eucalyptus else 'AWS'
|
||||
error = "Error connecting to %s backend.\n%s" % (backend, e.message)
|
||||
self.fail_with_error(error, 'getting EC2 instances')
|
||||
|
||||
@ -485,9 +552,14 @@ class Ec2Inventory(object):
|
||||
try:
|
||||
conn = self.connect_to_aws(rds, region)
|
||||
if conn:
|
||||
instances = conn.get_all_dbinstances()
|
||||
for instance in instances:
|
||||
self.add_rds_instance(instance, region)
|
||||
marker = None
|
||||
while True:
|
||||
instances = conn.get_all_dbinstances(marker=marker)
|
||||
marker = instances.marker
|
||||
for instance in instances:
|
||||
self.add_rds_instance(instance, region)
|
||||
if not marker:
|
||||
break
|
||||
except boto.exception.BotoServerError as e:
|
||||
error = e.reason
|
||||
|
||||
@ -609,7 +681,9 @@ class Ec2Inventory(object):
|
||||
return
|
||||
|
||||
# Select the best destination address
|
||||
if instance.subnet_id:
|
||||
if self.destination_format and self.destination_format_tags:
|
||||
dest = self.destination_format.format(*[ getattr(instance, 'tags').get(tag, '') for tag in self.destination_format_tags ])
|
||||
elif instance.subnet_id:
|
||||
dest = getattr(instance, self.vpc_destination_variable, None)
|
||||
if dest is None:
|
||||
dest = getattr(instance, 'tags').get(self.vpc_destination_variable, None)
|
||||
@ -622,32 +696,46 @@ class Ec2Inventory(object):
|
||||
# Skip instances we cannot address (e.g. private VPC subnet)
|
||||
return
|
||||
|
||||
# Set the inventory name
|
||||
hostname = None
|
||||
if self.hostname_variable:
|
||||
if self.hostname_variable.startswith('tag_'):
|
||||
hostname = instance.tags.get(self.hostname_variable[4:], None)
|
||||
else:
|
||||
hostname = getattr(instance, self.hostname_variable)
|
||||
|
||||
# If we can't get a nice hostname, use the destination address
|
||||
if not hostname:
|
||||
hostname = dest
|
||||
else:
|
||||
hostname = self.to_safe(hostname).lower()
|
||||
|
||||
# if we only want to include hosts that match a pattern, skip those that don't
|
||||
if self.pattern_include and not self.pattern_include.match(dest):
|
||||
if self.pattern_include and not self.pattern_include.match(hostname):
|
||||
return
|
||||
|
||||
# if we need to exclude hosts that match a pattern, skip those
|
||||
if self.pattern_exclude and self.pattern_exclude.match(dest):
|
||||
if self.pattern_exclude and self.pattern_exclude.match(hostname):
|
||||
return
|
||||
|
||||
# Add to index
|
||||
self.index[dest] = [region, instance.id]
|
||||
self.index[hostname] = [region, instance.id]
|
||||
|
||||
# Inventory: Group by instance ID (always a group of 1)
|
||||
if self.group_by_instance_id:
|
||||
self.inventory[instance.id] = [dest]
|
||||
self.inventory[instance.id] = [hostname]
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'instances', instance.id)
|
||||
|
||||
# Inventory: Group by region
|
||||
if self.group_by_region:
|
||||
self.push(self.inventory, region, dest)
|
||||
self.push(self.inventory, region, hostname)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'regions', region)
|
||||
|
||||
# Inventory: Group by availability zone
|
||||
if self.group_by_availability_zone:
|
||||
self.push(self.inventory, instance.placement, dest)
|
||||
self.push(self.inventory, instance.placement, hostname)
|
||||
if self.nested_groups:
|
||||
if self.group_by_region:
|
||||
self.push_group(self.inventory, region, instance.placement)
|
||||
@ -656,28 +744,28 @@ class Ec2Inventory(object):
|
||||
# Inventory: Group by Amazon Machine Image (AMI) ID
|
||||
if self.group_by_ami_id:
|
||||
ami_id = self.to_safe(instance.image_id)
|
||||
self.push(self.inventory, ami_id, dest)
|
||||
self.push(self.inventory, ami_id, hostname)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'images', ami_id)
|
||||
|
||||
# Inventory: Group by instance type
|
||||
if self.group_by_instance_type:
|
||||
type_name = self.to_safe('type_' + instance.instance_type)
|
||||
self.push(self.inventory, type_name, dest)
|
||||
self.push(self.inventory, type_name, hostname)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'types', type_name)
|
||||
|
||||
# Inventory: Group by key pair
|
||||
if self.group_by_key_pair and instance.key_name:
|
||||
key_name = self.to_safe('key_' + instance.key_name)
|
||||
self.push(self.inventory, key_name, dest)
|
||||
self.push(self.inventory, key_name, hostname)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'keys', key_name)
|
||||
|
||||
# Inventory: Group by VPC
|
||||
if self.group_by_vpc_id and instance.vpc_id:
|
||||
vpc_id_name = self.to_safe('vpc_id_' + instance.vpc_id)
|
||||
self.push(self.inventory, vpc_id_name, dest)
|
||||
self.push(self.inventory, vpc_id_name, hostname)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'vpcs', vpc_id_name)
|
||||
|
||||
@ -686,44 +774,51 @@ class Ec2Inventory(object):
|
||||
try:
|
||||
for group in instance.groups:
|
||||
key = self.to_safe("security_group_" + group.name)
|
||||
self.push(self.inventory, key, dest)
|
||||
self.push(self.inventory, key, hostname)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'security_groups', key)
|
||||
except AttributeError:
|
||||
self.fail_with_error('\n'.join(['Package boto seems a bit older.',
|
||||
self.fail_with_error('\n'.join(['Package boto seems a bit older.',
|
||||
'Please upgrade boto >= 2.3.0.']))
|
||||
|
||||
# Inventory: Group by tag keys
|
||||
if self.group_by_tag_keys:
|
||||
for k, v in instance.tags.items():
|
||||
if v:
|
||||
key = self.to_safe("tag_" + k + "=" + v)
|
||||
if self.expand_csv_tags and v and ',' in v:
|
||||
values = map(lambda x: x.strip(), v.split(','))
|
||||
else:
|
||||
key = self.to_safe("tag_" + k)
|
||||
self.push(self.inventory, key, dest)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'tags', self.to_safe("tag_" + k))
|
||||
values = [v]
|
||||
|
||||
for v in values:
|
||||
if v:
|
||||
self.push_group(self.inventory, self.to_safe("tag_" + k), key)
|
||||
key = self.to_safe("tag_" + k + "=" + v)
|
||||
else:
|
||||
key = self.to_safe("tag_" + k)
|
||||
self.push(self.inventory, key, hostname)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'tags', self.to_safe("tag_" + k))
|
||||
if v:
|
||||
self.push_group(self.inventory, self.to_safe("tag_" + k), key)
|
||||
|
||||
# Inventory: Group by Route53 domain names if enabled
|
||||
if self.route53_enabled and self.group_by_route53_names:
|
||||
route53_names = self.get_instance_route53_names(instance)
|
||||
for name in route53_names:
|
||||
self.push(self.inventory, name, dest)
|
||||
self.push(self.inventory, name, hostname)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'route53', name)
|
||||
|
||||
# Global Tag: instances without tags
|
||||
if self.group_by_tag_none and len(instance.tags) == 0:
|
||||
self.push(self.inventory, 'tag_none', dest)
|
||||
self.push(self.inventory, 'tag_none', hostname)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'tags', 'tag_none')
|
||||
|
||||
# Global Tag: tag all EC2 instances
|
||||
self.push(self.inventory, 'ec2', dest)
|
||||
self.push(self.inventory, 'ec2', hostname)
|
||||
|
||||
self.inventory["_meta"]["hostvars"][dest] = self.get_host_info_dict_from_instance(instance)
|
||||
self.inventory["_meta"]["hostvars"][hostname] = self.get_host_info_dict_from_instance(instance)
|
||||
self.inventory["_meta"]["hostvars"][hostname]['ansible_ssh_host'] = dest
|
||||
|
||||
|
||||
def add_rds_instance(self, instance, region):
|
||||
@ -741,24 +836,38 @@ class Ec2Inventory(object):
|
||||
# Skip instances we cannot address (e.g. private VPC subnet)
|
||||
return
|
||||
|
||||
# Set the inventory name
|
||||
hostname = None
|
||||
if self.hostname_variable:
|
||||
if self.hostname_variable.startswith('tag_'):
|
||||
hostname = instance.tags.get(self.hostname_variable[4:], None)
|
||||
else:
|
||||
hostname = getattr(instance, self.hostname_variable)
|
||||
|
||||
# If we can't get a nice hostname, use the destination address
|
||||
if not hostname:
|
||||
hostname = dest
|
||||
|
||||
hostname = self.to_safe(hostname).lower()
|
||||
|
||||
# Add to index
|
||||
self.index[dest] = [region, instance.id]
|
||||
self.index[hostname] = [region, instance.id]
|
||||
|
||||
# Inventory: Group by instance ID (always a group of 1)
|
||||
if self.group_by_instance_id:
|
||||
self.inventory[instance.id] = [dest]
|
||||
self.inventory[instance.id] = [hostname]
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'instances', instance.id)
|
||||
|
||||
# Inventory: Group by region
|
||||
if self.group_by_region:
|
||||
self.push(self.inventory, region, dest)
|
||||
self.push(self.inventory, region, hostname)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'regions', region)
|
||||
|
||||
# Inventory: Group by availability zone
|
||||
if self.group_by_availability_zone:
|
||||
self.push(self.inventory, instance.availability_zone, dest)
|
||||
self.push(self.inventory, instance.availability_zone, hostname)
|
||||
if self.nested_groups:
|
||||
if self.group_by_region:
|
||||
self.push_group(self.inventory, region, instance.availability_zone)
|
||||
@ -767,14 +876,14 @@ class Ec2Inventory(object):
|
||||
# Inventory: Group by instance type
|
||||
if self.group_by_instance_type:
|
||||
type_name = self.to_safe('type_' + instance.instance_class)
|
||||
self.push(self.inventory, type_name, dest)
|
||||
self.push(self.inventory, type_name, hostname)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'types', type_name)
|
||||
|
||||
# Inventory: Group by VPC
|
||||
if self.group_by_vpc_id and instance.subnet_group and instance.subnet_group.vpc_id:
|
||||
vpc_id_name = self.to_safe('vpc_id_' + instance.subnet_group.vpc_id)
|
||||
self.push(self.inventory, vpc_id_name, dest)
|
||||
self.push(self.inventory, vpc_id_name, hostname)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'vpcs', vpc_id_name)
|
||||
|
||||
@ -783,31 +892,32 @@ class Ec2Inventory(object):
|
||||
try:
|
||||
if instance.security_group:
|
||||
key = self.to_safe("security_group_" + instance.security_group.name)
|
||||
self.push(self.inventory, key, dest)
|
||||
self.push(self.inventory, key, hostname)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'security_groups', key)
|
||||
|
||||
except AttributeError:
|
||||
self.fail_with_error('\n'.join(['Package boto seems a bit older.',
|
||||
self.fail_with_error('\n'.join(['Package boto seems a bit older.',
|
||||
'Please upgrade boto >= 2.3.0.']))
|
||||
|
||||
|
||||
# Inventory: Group by engine
|
||||
if self.group_by_rds_engine:
|
||||
self.push(self.inventory, self.to_safe("rds_" + instance.engine), dest)
|
||||
self.push(self.inventory, self.to_safe("rds_" + instance.engine), hostname)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'rds_engines', self.to_safe("rds_" + instance.engine))
|
||||
|
||||
# Inventory: Group by parameter group
|
||||
if self.group_by_rds_parameter_group:
|
||||
self.push(self.inventory, self.to_safe("rds_parameter_group_" + instance.parameter_group.name), dest)
|
||||
self.push(self.inventory, self.to_safe("rds_parameter_group_" + instance.parameter_group.name), hostname)
|
||||
if self.nested_groups:
|
||||
self.push_group(self.inventory, 'rds_parameter_groups', self.to_safe("rds_parameter_group_" + instance.parameter_group.name))
|
||||
|
||||
# Global Tag: all RDS instances
|
||||
self.push(self.inventory, 'rds', dest)
|
||||
self.push(self.inventory, 'rds', hostname)
|
||||
|
||||
self.inventory["_meta"]["hostvars"][dest] = self.get_host_info_dict_from_instance(instance)
|
||||
self.inventory["_meta"]["hostvars"][hostname] = self.get_host_info_dict_from_instance(instance)
|
||||
self.inventory["_meta"]["hostvars"][hostname]['ansible_ssh_host'] = dest
|
||||
|
||||
def add_elasticache_cluster(self, cluster, region):
|
||||
''' Adds an ElastiCache cluster to the inventory and index, as long as
|
||||
@ -1120,6 +1230,8 @@ class Ec2Inventory(object):
|
||||
instance_vars['ec2_placement'] = value.zone
|
||||
elif key == 'ec2_tags':
|
||||
for k, v in value.items():
|
||||
if self.expand_csv_tags and ',' in v:
|
||||
v = map(lambda x: x.strip(), v.split(','))
|
||||
key = self.to_safe('ec2_tag_' + k)
|
||||
instance_vars[key] = v
|
||||
elif key == 'ec2_groups':
|
||||
@ -1294,7 +1406,7 @@ class Ec2Inventory(object):
|
||||
def to_safe(self, word):
|
||||
''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups '''
|
||||
regex = "[^A-Za-z0-9\_"
|
||||
if self.replace_dash_in_groups:
|
||||
if not self.replace_dash_in_groups:
|
||||
regex += "\-"
|
||||
return re.sub(regex + "]", "_", word)
|
||||
|
||||
@ -1310,4 +1422,3 @@ class Ec2Inventory(object):
|
||||
|
||||
# Run the script
|
||||
Ec2Inventory()
|
||||
|
||||
|
||||
@ -540,6 +540,7 @@ EC2_REGION_NAMES = {
|
||||
'ap-southeast-1': 'Asia Pacific (Singapore)',
|
||||
'ap-southeast-2': 'Asia Pacific (Sydney)',
|
||||
'ap-northeast-1': 'Asia Pacific (Tokyo)',
|
||||
'ap-northeast-2': 'Asia Pacific (Seoul)',
|
||||
'sa-east-1': 'South America (Sao Paulo)',
|
||||
'us-gov-west-1': 'US West (GovCloud)',
|
||||
'cn-north-1': 'China (Beijing)',
|
||||
|
||||
@ -6,6 +6,7 @@ import logging
|
||||
|
||||
# Django
|
||||
from django.dispatch import receiver
|
||||
from django.contrib.auth.models import User
|
||||
from django.conf import settings as django_settings
|
||||
|
||||
# django-auth-ldap
|
||||
@ -104,6 +105,18 @@ class RADIUSBackend(BaseRADIUSBackend):
|
||||
return None
|
||||
return super(RADIUSBackend, self).get_user(user_id)
|
||||
|
||||
def get_django_user(self, username, password=None):
|
||||
try:
|
||||
user = User.objects.get(username=username)
|
||||
except User.DoesNotExist:
|
||||
user = User(username=username)
|
||||
|
||||
if password is not None:
|
||||
user.set_unusable_password()
|
||||
user.save()
|
||||
|
||||
return user
|
||||
|
||||
|
||||
class TowerSAMLIdentityProvider(BaseSAMLIdentityProvider):
|
||||
'''
|
||||
@ -163,7 +176,7 @@ class SAMLAuth(BaseSAMLAuth):
|
||||
return super(SAMLAuth, self).get_user(user_id)
|
||||
|
||||
|
||||
def _update_m2m_from_groups(user, ldap_user, rel, opts, remove=False):
|
||||
def _update_m2m_from_groups(user, ldap_user, rel, opts, remove=True):
|
||||
'''
|
||||
Hepler function to update m2m relationship based on LDAP group membership.
|
||||
'''
|
||||
@ -207,7 +220,7 @@ def on_populate_user(sender, **kwargs):
|
||||
org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {})
|
||||
for org_name, org_opts in org_map.items():
|
||||
org, created = Organization.objects.get_or_create(name=org_name)
|
||||
remove = bool(org_opts.get('remove', False))
|
||||
remove = bool(org_opts.get('remove', True))
|
||||
admins_opts = org_opts.get('admins', None)
|
||||
remove_admins = bool(org_opts.get('remove_admins', remove))
|
||||
_update_m2m_from_groups(user, ldap_user, org.admin_role.members, admins_opts,
|
||||
@ -225,7 +238,7 @@ def on_populate_user(sender, **kwargs):
|
||||
org, created = Organization.objects.get_or_create(name=team_opts['organization'])
|
||||
team, created = Team.objects.get_or_create(name=team_name, organization=org)
|
||||
users_opts = team_opts.get('users', None)
|
||||
remove = bool(team_opts.get('remove', False))
|
||||
remove = bool(team_opts.get('remove', True))
|
||||
_update_m2m_from_groups(user, ldap_user, team.member_role.users, users_opts,
|
||||
remove)
|
||||
|
||||
|
||||
@ -44,7 +44,7 @@ def prevent_inactive_login(backend, details, user=None, *args, **kwargs):
|
||||
raise AuthInactive(backend)
|
||||
|
||||
|
||||
def _update_m2m_from_expression(user, rel, expr, remove=False):
|
||||
def _update_m2m_from_expression(user, rel, expr, remove=True):
|
||||
'''
|
||||
Helper function to update m2m relationship based on user matching one or
|
||||
more expressions.
|
||||
@ -95,7 +95,7 @@ def update_user_orgs(backend, details, user=None, *args, **kwargs):
|
||||
continue
|
||||
|
||||
# Update org admins from expression(s).
|
||||
remove = bool(org_opts.get('remove', False))
|
||||
remove = bool(org_opts.get('remove', True))
|
||||
admins_expr = org_opts.get('admins', None)
|
||||
remove_admins = bool(org_opts.get('remove_admins', remove))
|
||||
_update_m2m_from_expression(user, org.admin_role.members, admins_expr, remove_admins)
|
||||
@ -133,5 +133,5 @@ def update_user_teams(backend, details, user=None, *args, **kwargs):
|
||||
# Update team members from expression(s).
|
||||
team = Team.objects.get_or_create(name=team_name, organization=org)[0]
|
||||
users_expr = team_opts.get('users', None)
|
||||
remove = bool(team_opts.get('remove', False))
|
||||
remove = bool(team_opts.get('remove', True))
|
||||
_update_m2m_from_expression(user, team.member_role.members, users_expr, remove)
|
||||
|
||||
@ -37,13 +37,13 @@ body .navbar {
|
||||
border-color: #E8E8E8;
|
||||
}
|
||||
body .navbar .navbar-brand {
|
||||
color: #848992;
|
||||
color: #707070;
|
||||
padding: 0;
|
||||
font-size: 14px;
|
||||
}
|
||||
body .navbar .navbar-brand:focus,
|
||||
body .navbar .navbar-brand:hover {
|
||||
color: #848992;
|
||||
color: #707070;
|
||||
}
|
||||
body .navbar .navbar-brand img {
|
||||
display: inline-block;
|
||||
@ -60,7 +60,7 @@ body .navbar .navbar-brand > span {
|
||||
body .navbar .navbar-title {
|
||||
float: left;
|
||||
height: 50px;
|
||||
color: #848992;
|
||||
color: #707070;
|
||||
padding: 0;
|
||||
font-size: 14px;
|
||||
display: none;
|
||||
@ -74,19 +74,19 @@ body.show-title .navbar .navbar-title {
|
||||
display: inline-block;
|
||||
}
|
||||
body .navbar .navbar-nav > li > a {
|
||||
color: #848992;
|
||||
color: #707070;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
||||
body .navbar .navbar-nav > li > a:focus,
|
||||
body .navbar .navbar-nav > li > a:hover {
|
||||
color: #848992;
|
||||
color: #707070;
|
||||
}
|
||||
body .navbar .navbar-nav > li > a > span.glyphicon {
|
||||
font-size: 20px;
|
||||
padding-right: 5px;
|
||||
padding-left: 5px;
|
||||
color: #B7B7B7;
|
||||
color: #848992;
|
||||
}
|
||||
|
||||
body .page-header {
|
||||
@ -110,7 +110,7 @@ body .description .hide-description span.glyphicon {
|
||||
font-size: 20px;
|
||||
}
|
||||
body .description .hide-description:hover span.glyphicon {
|
||||
color: #B7B7B7;
|
||||
color: #848992;
|
||||
}
|
||||
body ul.breadcrumb,
|
||||
body .description,
|
||||
@ -167,7 +167,7 @@ body .form-actions button {
|
||||
body .form-horizontal .control-label {
|
||||
text-transform: uppercase;
|
||||
font-weight: normal;
|
||||
color: #848992;
|
||||
color: #707070;
|
||||
}
|
||||
body textarea.form-control {
|
||||
font-family: Menlo, Monaco, Consolas, "Courier New", monospace;
|
||||
@ -182,22 +182,22 @@ body .description pre {
|
||||
}
|
||||
|
||||
body .tooltip.bottom .tooltip-arrow {
|
||||
border-bottom-color: #848992;
|
||||
border-bottom-color: #707070;
|
||||
}
|
||||
body .tooltip.top .tooltip-arrow {
|
||||
border-top-color: #848992;
|
||||
border-top-color: #707070;
|
||||
}
|
||||
body .tooltip.left .tooltip-arrow {
|
||||
border-left-color: #848992;
|
||||
border-left-color: #707070;
|
||||
}
|
||||
body .tooltip.right .tooltip-arrow {
|
||||
border-right-color: #848992;
|
||||
border-right-color: #707070;
|
||||
}
|
||||
body .tooltip.in {
|
||||
opacity: 1;
|
||||
}
|
||||
body .tooltip-inner {
|
||||
background-color: #848992;
|
||||
background-color: #707070;
|
||||
}
|
||||
|
||||
body .btn {
|
||||
@ -205,7 +205,7 @@ body .btn {
|
||||
}
|
||||
.btn-primary {
|
||||
background-color: #FFFFFF;
|
||||
color: #848992;
|
||||
color: #707070;
|
||||
border: 1px solid #E8E8E8;
|
||||
}
|
||||
.btn-primary:hover,
|
||||
@ -224,14 +224,14 @@ body .btn {
|
||||
.open>.dropdown-toggle.btn-primary:hover,
|
||||
.open>.dropdown-toggle.btn-primary {
|
||||
background-color: #FAFAFA;
|
||||
color: #848992;
|
||||
color: #707070;
|
||||
border: 1px solid #E8E8E8;
|
||||
}
|
||||
|
||||
body .tab-content .btn-primary {
|
||||
background-color: #3CB878;
|
||||
background-color: #5cb85c;
|
||||
color: #FFFFFF;
|
||||
border: 1px solid #3CB878;
|
||||
border: 1px solid #5cb85c;
|
||||
}
|
||||
body .tab-content .btn-primary:hover,
|
||||
body .tab-content .btn-primary.focus,
|
||||
@ -244,9 +244,9 @@ body .tab-content .btn-primary.active:hover,
|
||||
body .tab-content .btn-primary:active.focus,
|
||||
body .tab-content .btn-primary:active:focus,
|
||||
body .tab-content .btn-primary:active:hover {
|
||||
background-color: #60D66F;
|
||||
background-color: #449d44;
|
||||
color: #FFFFFF;
|
||||
border: 1px solid #60D66F;
|
||||
border: 1px solid #449d44;
|
||||
}
|
||||
|
||||
.btn-danger {
|
||||
@ -265,8 +265,8 @@ body .tab-content .btn-primary:active:hover {
|
||||
.btn-danger:active.focus,
|
||||
.btn-danger:active:focus,
|
||||
.btn-danger:active:hover {
|
||||
background-color: #F0AD4E;
|
||||
border-color: #F0AD4E;
|
||||
background-color: #c9302c;
|
||||
border-color: #c9302c;
|
||||
}
|
||||
|
||||
body .wrapper {
|
||||
@ -283,7 +283,7 @@ body #footer {
|
||||
overflow: hidden;
|
||||
margin-bottom: 0;
|
||||
height: 40px;
|
||||
color: #848992;
|
||||
color: #707070;
|
||||
}
|
||||
body #footer .footer-logo {
|
||||
text-align: left;
|
||||
@ -302,7 +302,7 @@ body #footer .footer-copyright {
|
||||
padding-top: 10px;
|
||||
}
|
||||
body #footer .footer-copyright a {
|
||||
color: #848992;
|
||||
color: #707070;
|
||||
}
|
||||
|
||||
@media screen and (min-width: 768px) {
|
||||
@ -329,7 +329,7 @@ body #footer .footer-copyright a {
|
||||
border-color: #E8E8E8;
|
||||
}
|
||||
body .navbar .navbar-toggle .icon-bar {
|
||||
background-color: #B7B7B7;
|
||||
background-color: #848992;
|
||||
}
|
||||
body .navbar .tooltip {
|
||||
visibility: hidden;
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 6.4 KiB After Width: | Height: | Size: 15 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 6.4 KiB After Width: | Height: | Size: 15 KiB |
@ -962,6 +962,10 @@ input[type="checkbox"].checkbox-no-label {
|
||||
.checkbox-inline, .radio-inline {
|
||||
margin-right: 10px;
|
||||
}
|
||||
|
||||
.checkbox-inline.stack-inline {
|
||||
display: block;
|
||||
}
|
||||
}
|
||||
|
||||
.checkbox-options {
|
||||
@ -972,6 +976,7 @@ input[type="checkbox"].checkbox-no-label {
|
||||
/* Display list actions next to search widget */
|
||||
.list-actions {
|
||||
text-align: right;
|
||||
margin-bottom: 20px;
|
||||
|
||||
.fa-lg {
|
||||
vertical-align: -8%;
|
||||
@ -1966,7 +1971,6 @@ tr td button i {
|
||||
}
|
||||
|
||||
.list-actions {
|
||||
margin-bottom: 20px;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
@ -2235,3 +2239,11 @@ a:hover {
|
||||
.dropdown-menu>li>a {
|
||||
padding: 3px 10px;
|
||||
}
|
||||
|
||||
#scheduled-jobs-tab .List-header {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.ui-widget {
|
||||
font-family: 'Open Sans';
|
||||
}
|
||||
|
||||
@ -173,12 +173,26 @@
|
||||
}
|
||||
|
||||
.Form-formGroup--fullWidth {
|
||||
max-width: none;
|
||||
max-width: none !important;
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
.Form-formGroup--checkbox{
|
||||
display: flex;
|
||||
align-items: flex-end;
|
||||
align-items: flex-start;
|
||||
}
|
||||
|
||||
.Form-checkbox--stacked {
|
||||
label {
|
||||
display: block;
|
||||
}
|
||||
}
|
||||
|
||||
.Form-textUneditable {
|
||||
.Form-textInput {
|
||||
border: none;
|
||||
padding: 0;
|
||||
}
|
||||
}
|
||||
|
||||
.Form-subForm {
|
||||
@ -220,14 +234,12 @@
|
||||
}
|
||||
|
||||
.Form-textArea{
|
||||
background-color: @field-secondary-bg!important;
|
||||
background-color: @field-secondary-bg!important;
|
||||
border-radius: 5px;
|
||||
color: @field-input-text;
|
||||
}
|
||||
|
||||
.Form-textInput{
|
||||
height: 30px!important;
|
||||
height: 30px;
|
||||
background-color: @field-secondary-bg!important;
|
||||
border-radius: 5px;
|
||||
border:1px solid @field-border;
|
||||
@ -242,6 +254,19 @@
|
||||
font-family: Menlo,Monaco,Consolas,"Courier New",monospace!important;
|
||||
}
|
||||
|
||||
.Form-alertblock {
|
||||
margin: 20px 0;
|
||||
font-size: 12px;
|
||||
width: 100%;
|
||||
padding: 15px;
|
||||
padding-top: 10px;
|
||||
margin-bottom: 15px;
|
||||
border-radius: 4px;
|
||||
border: 1px solid @login-notice-border;
|
||||
background-color: @login-notice-bg;
|
||||
color: @login-notice-text;
|
||||
}
|
||||
|
||||
.ui-spinner{
|
||||
height: 30px;
|
||||
background-color: @field-secondary-bg;
|
||||
@ -282,7 +307,7 @@
|
||||
}
|
||||
|
||||
.Form-dropDown {
|
||||
height: 30px !important;
|
||||
min-height: 30px !important;
|
||||
border-radius: 5px !important;
|
||||
border:1px solid @field-border!important;
|
||||
color: @field-input-text!important;
|
||||
@ -324,6 +349,12 @@
|
||||
|
||||
.select2-dropdown{
|
||||
border:1px solid @field-border;
|
||||
|
||||
}
|
||||
|
||||
.select2-container--open .select2-dropdown--below {
|
||||
margin-top: -1px;
|
||||
border-top: 1px solid @field-border;
|
||||
}
|
||||
|
||||
.Form-dropDown:focus{
|
||||
@ -427,6 +458,10 @@ input[type='radio']:checked:before {
|
||||
outline:none;
|
||||
}
|
||||
|
||||
.Form-inputLabelContainer {
|
||||
width: 100%;
|
||||
display: block !important;
|
||||
}
|
||||
.Form-inputLabel{
|
||||
text-transform: uppercase;
|
||||
color: @default-interface-txt;
|
||||
@ -437,6 +472,16 @@ input[type='radio']:checked:before {
|
||||
.noselect;
|
||||
}
|
||||
|
||||
.Form-labelAction {
|
||||
text-transform: uppercase;
|
||||
font-weight: normal;
|
||||
font-size: 0.8em;
|
||||
padding-left:5px;
|
||||
float: right;
|
||||
margin-top: 3px;
|
||||
.noselect;
|
||||
}
|
||||
|
||||
.Form-buttons{
|
||||
height: 30px;
|
||||
display: flex;
|
||||
@ -494,7 +539,6 @@ input[type='radio']:checked:before {
|
||||
|
||||
.Form-surveyButton {
|
||||
background-color: @default-link;
|
||||
margin-right: 20px;
|
||||
color: @default-bg;
|
||||
text-transform: uppercase;
|
||||
padding-left:15px;
|
||||
@ -538,3 +582,8 @@ input[type='radio']:checked:before {
|
||||
padding-right: 50px;
|
||||
}
|
||||
}
|
||||
|
||||
.action_column {
|
||||
float: right;
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
@ -214,7 +214,7 @@
|
||||
}
|
||||
|
||||
#job-detail-container {
|
||||
|
||||
|
||||
.well {
|
||||
overflow: hidden;
|
||||
}
|
||||
@ -276,6 +276,8 @@
|
||||
overflow-x: hidden;
|
||||
overflow-y: auto;
|
||||
background-color: @white;
|
||||
min-height: 40px;
|
||||
|
||||
.row {
|
||||
border-top: 1px solid @grey;
|
||||
}
|
||||
@ -318,7 +320,7 @@
|
||||
|
||||
#play-section {
|
||||
.table-detail {
|
||||
height: 150px;
|
||||
min-height: 40px;
|
||||
}
|
||||
}
|
||||
|
||||
@ -421,7 +423,6 @@
|
||||
table-layout: fixed;
|
||||
}
|
||||
#hosts-table-detail {
|
||||
height: 150px;
|
||||
background-color: @white;
|
||||
}
|
||||
#hosts-table-detail table {
|
||||
|
||||
32
awx/ui/client/lib/lrInfiniteScroll/.bower.json
Normal file
32
awx/ui/client/lib/lrInfiniteScroll/.bower.json
Normal file
@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "lrInfiniteScroll",
|
||||
"main": "lrInfiniteScroll.js",
|
||||
"version": "1.0.0",
|
||||
"homepage": "https://github.com/lorenzofox3/lrInfiniteScroll",
|
||||
"authors": [
|
||||
"lorenzofox3 <laurent34azerty@gmail.com>"
|
||||
],
|
||||
"description": "angular directive to handle element scroll",
|
||||
"keywords": [
|
||||
"angular",
|
||||
"scroll",
|
||||
"inifinite"
|
||||
],
|
||||
"license": "MIT",
|
||||
"ignore": [
|
||||
"**/.*",
|
||||
"node_modules",
|
||||
"bower_components",
|
||||
"test",
|
||||
"tests"
|
||||
],
|
||||
"_release": "1.0.0",
|
||||
"_resolution": {
|
||||
"type": "version",
|
||||
"tag": "1.0.0",
|
||||
"commit": "c833e9d8ff56d6c66e2a21ed7f27ad840f159a8b"
|
||||
},
|
||||
"_source": "https://github.com/lorenzofox3/lrInfiniteScroll.git",
|
||||
"_target": "~1.0.0",
|
||||
"_originalSource": "lrInfiniteScroll"
|
||||
}
|
||||
2
awx/ui/client/lib/lrInfiniteScroll/index.js
Normal file
2
awx/ui/client/lib/lrInfiniteScroll/index.js
Normal file
@ -0,0 +1,2 @@
|
||||
require('./lrInfiniteScroll');
|
||||
module.exports = 'lrInfiniteScroll';
|
||||
@ -2,13 +2,12 @@
|
||||
'use strict';
|
||||
var module = ng.module('lrInfiniteScroll', []);
|
||||
|
||||
module.directive('lrInfiniteScroll', ['$log', '$timeout', function ($log, timeout) {
|
||||
module.directive('lrInfiniteScroll', ['$timeout', function (timeout) {
|
||||
return{
|
||||
link: function (scope, element, attr) {
|
||||
var
|
||||
lengthThreshold = attr.scrollThreshold || 50,
|
||||
timeThreshold = attr.timeThreshold || 400,
|
||||
direction = attr.direction || 'down',
|
||||
handler = scope.$eval(attr.lrInfiniteScroll),
|
||||
promise = null,
|
||||
lastRemaining = 9999;
|
||||
@ -20,14 +19,14 @@
|
||||
handler = ng.noop;
|
||||
}
|
||||
|
||||
$log.debug('lrInfiniteScroll: ' + attr.lrInfiniteScroll);
|
||||
|
||||
element.bind('scroll', function () {
|
||||
var remaining = (direction === 'down') ? element[0].scrollHeight - (element[0].clientHeight + element[0].scrollTop) : element[0].scrollTop;
|
||||
// if we have reached the threshold and we scroll down
|
||||
if ((direction === 'down' && remaining < lengthThreshold && (remaining - lastRemaining) < 0) ||
|
||||
direction === 'up' && remaining < lengthThreshold) {
|
||||
//if there is already a timer running which has not expired yet we have to cancel it and restart the timer
|
||||
var
|
||||
remaining = element[0].scrollHeight - (element[0].clientHeight + element[0].scrollTop);
|
||||
|
||||
//if we have reached the threshold and we scroll down
|
||||
if (remaining < lengthThreshold && (remaining - lastRemaining) < 0) {
|
||||
|
||||
//if there is already a timer running which has no expired yet we have to cancel it and restart the timer
|
||||
if (promise !== null) {
|
||||
timeout.cancel(promise);
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
export default
|
||||
['$scope', '$state', 'CheckLicense', function($scope, $state, CheckLicense){
|
||||
['$scope', '$state', 'ConfigService', function($scope, $state, ConfigService){
|
||||
var processVersion = function(version){
|
||||
// prettify version & calculate padding
|
||||
// e,g 3.0.0-0.git201602191743/ -> 3.0.0
|
||||
@ -16,10 +16,10 @@ export default
|
||||
return paddedStr;
|
||||
};
|
||||
var init = function(){
|
||||
CheckLicense.get()
|
||||
.then(function(res){
|
||||
$scope.subscription = res.data.license_info.subscription_name;
|
||||
$scope.version = processVersion(res.data.version);
|
||||
ConfigService.getConfig()
|
||||
.then(function(config){
|
||||
$scope.subscription = config.license_info.subscription_name;
|
||||
$scope.version = processVersion(config.version);
|
||||
$('#about-modal').modal('show');
|
||||
});
|
||||
};
|
||||
|
||||
@ -30,25 +30,25 @@ export default ['$rootScope', '$scope', 'GetBasePath', 'Rest', '$q', 'Wait', 'Pr
|
||||
|
||||
// array for all possible roles for the object
|
||||
scope.roles = Object
|
||||
.keys(scope.object.summary_fields.roles)
|
||||
.keys(scope.object.summary_fields.object_roles)
|
||||
.map(function(key) {
|
||||
return {
|
||||
value: scope.object.summary_fields
|
||||
.roles[key].id,
|
||||
.object_roles[key].id,
|
||||
label: scope.object.summary_fields
|
||||
.roles[key].name };
|
||||
.object_roles[key].name };
|
||||
});
|
||||
|
||||
// TODO: get working with api
|
||||
// array w roles and descriptions for key
|
||||
scope.roleKey = Object
|
||||
.keys(scope.object.summary_fields.roles)
|
||||
.keys(scope.object.summary_fields.object_roles)
|
||||
.map(function(key) {
|
||||
return {
|
||||
name: scope.object.summary_fields
|
||||
.roles[key].name,
|
||||
.object_roles[key].name,
|
||||
description: scope.object.summary_fields
|
||||
.roles[key].description };
|
||||
.object_roles[key].description };
|
||||
});
|
||||
|
||||
scope.showKeyPane = false;
|
||||
|
||||
@ -41,11 +41,11 @@
|
||||
</div>
|
||||
|
||||
<div class="AddPermissions-list" ng-show="usersSelected">
|
||||
<add-permissions-list type="users">
|
||||
<add-permissions-list all-selected="allSelected" type="users">
|
||||
</add-permissions-list>
|
||||
</div>
|
||||
<div class="AddPermissions-list" ng-show="teamsSelected">
|
||||
<add-permissions-list type="teams">
|
||||
<add-permissions-list all-selected="allSelected" type="teams">
|
||||
</add-permissions-list>
|
||||
</div>
|
||||
|
||||
|
||||
@ -13,6 +13,7 @@ export default
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
allSelected: '='
|
||||
},
|
||||
template: "<div class='addPermissionsList-inner'></div>",
|
||||
link: function(scope, element, attrs, ctrl) {
|
||||
@ -50,6 +51,23 @@ export default
|
||||
PaginateInit({ scope: scope,
|
||||
list: list, url: url, pageSize: 5 });
|
||||
|
||||
if (scope.removePostRefresh) {
|
||||
scope.removePostRefresh();
|
||||
}
|
||||
scope.removePostRefresh = scope.$on('PostRefresh', function () {
|
||||
if(scope.allSelected && scope.allSelected.length > 0) {
|
||||
// We need to check to see if any of the selected items are now in our list!
|
||||
for(var i=0; i<scope.allSelected.length; i++) {
|
||||
for(var j=0; j<scope[set].length; j++) {
|
||||
if(scope.allSelected[i].id === scope[set][j].id && scope.allSelected[i].type === scope[set][j].type) {
|
||||
// If so, let's go ahead and mark it as selected so that select-list-item knows to check the box
|
||||
scope[set][j].isSelected = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
scope.search(list.iterator);
|
||||
});
|
||||
}
|
||||
|
||||
@ -7,7 +7,7 @@
|
||||
|
||||
export default function() {
|
||||
return {
|
||||
|
||||
searchSize: 'col-lg-12 col-md-12 col-sm-12 col-xs-12',
|
||||
name: 'teams',
|
||||
iterator: 'team',
|
||||
listTitleBadge: false,
|
||||
@ -15,13 +15,13 @@
|
||||
multiSelectExtended: true,
|
||||
index: false,
|
||||
hover: true,
|
||||
|
||||
emptyListText : 'No Teams exist',
|
||||
fields: {
|
||||
name: {
|
||||
key: true,
|
||||
label: 'name'
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
@ -7,7 +7,7 @@
|
||||
|
||||
export default function() {
|
||||
return {
|
||||
|
||||
searchSize: 'col-lg-12 col-md-12 col-sm-12 col-xs-12',
|
||||
name: 'users',
|
||||
iterator: 'user',
|
||||
title: false,
|
||||
@ -16,6 +16,7 @@
|
||||
multiSelectExtended: true,
|
||||
index: false,
|
||||
hover: true,
|
||||
emptyListText : 'No Users exist',
|
||||
|
||||
fields: {
|
||||
first_name: {
|
||||
|
||||
@ -37,7 +37,7 @@
|
||||
border-top-left-radius: 0px;
|
||||
border-bottom-left-radius: 0px;
|
||||
border-right: 0;
|
||||
max-wdith: ~"calc(100% - 23px)";
|
||||
max-width: ~"calc(100% - 23px)";
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
|
||||
@ -23,7 +23,7 @@ export default
|
||||
return i.role;
|
||||
}))
|
||||
.filter((role) => {
|
||||
return !!attrs.teamRoleList == !!role.team_id;
|
||||
return Boolean(attrs.teamRoleList) === Boolean(role.team_id);
|
||||
})
|
||||
.sort((a, b) => {
|
||||
if (a.name
|
||||
|
||||
@ -18,25 +18,26 @@ export default {
|
||||
label: "ACTIVITY STREAM"
|
||||
},
|
||||
resolve: {
|
||||
features: ['FeaturesService', 'ProcessErrors', '$state', function(FeaturesService, ProcessErrors, $state) {
|
||||
FeaturesService.get()
|
||||
.then(function(features) {
|
||||
features: ['FeaturesService', 'ProcessErrors', '$state', '$rootScope',
|
||||
function(FeaturesService, ProcessErrors, $state, $rootScope) {
|
||||
var features = FeaturesService.get();
|
||||
if(features){
|
||||
if(FeaturesService.featureEnabled('activity_streams')) {
|
||||
// Good to go - pass the features along to the controller.
|
||||
return features;
|
||||
}
|
||||
else {
|
||||
// The activity stream feature isn't enabled. Take the user
|
||||
// back to the dashboard
|
||||
$state.go('dashboard');
|
||||
}
|
||||
})
|
||||
.catch(function (response) {
|
||||
ProcessErrors(null, response.data, response.status, null, {
|
||||
hdr: 'Error!',
|
||||
msg: 'Failed to get feature info. GET returned status: ' +
|
||||
response.status
|
||||
});
|
||||
}
|
||||
$rootScope.featuresConfigured.promise.then(function(features){
|
||||
if(features){
|
||||
if(FeaturesService.featureEnabled('activity_streams')) {
|
||||
return features;
|
||||
}
|
||||
else {
|
||||
$state.go('dashboard');
|
||||
}
|
||||
}
|
||||
});
|
||||
}],
|
||||
subTitle:
|
||||
|
||||
@ -21,7 +21,7 @@ export default ['templateUrl', function(templateUrl) {
|
||||
{label: 'Inventories', value: 'inventory'},
|
||||
{label: 'Inventory Scripts', value: 'inventory_script'},
|
||||
{label: 'Job Templates', value: 'job_template'},
|
||||
{label: 'Management Jobs', value: 'management_job'},
|
||||
{label: 'Jobs', value: 'job'},
|
||||
{label: 'Organizations', value: 'organization'},
|
||||
{label: 'Projects', value: 'project'},
|
||||
{label: 'Schedules', value: 'schedule'},
|
||||
|
||||
@ -1,14 +0,0 @@
|
||||
/*************************************************
|
||||
* Copyright (c) 2015 Ansible, Inc.
|
||||
*
|
||||
* All Rights Reserved
|
||||
*************************************************/
|
||||
|
||||
import {templateUrl} from '../shared/template-url/template-url.factory';
|
||||
|
||||
export default {
|
||||
route: '/adhoc',
|
||||
name: 'inventoryManage.adhoc',
|
||||
templateUrl: templateUrl('adhoc/adhoc'),
|
||||
controller: 'adhocController'
|
||||
};
|
||||
@ -1,10 +0,0 @@
|
||||
import route from './adhoc.route';
|
||||
import adhocController from './adhoc.controller';
|
||||
import form from './adhoc.form';
|
||||
|
||||
export default angular.module('adhoc', [])
|
||||
.controller('adhocController', adhocController)
|
||||
.run(['$stateExtender', function($stateExtender) {
|
||||
$stateExtender.addState(route);
|
||||
}])
|
||||
.factory('adhocForm', form);
|
||||
@ -47,12 +47,12 @@ import browserData from './browser-data/main';
|
||||
import dashboard from './dashboard/main';
|
||||
import moment from './shared/moment/main';
|
||||
import templateUrl from './shared/template-url/main';
|
||||
import adhoc from './adhoc/main';
|
||||
import login from './login/main';
|
||||
import activityStream from './activity-stream/main';
|
||||
import standardOut from './standard-out/main';
|
||||
import JobTemplates from './job-templates/main';
|
||||
import search from './search/main';
|
||||
import credentials from './credentials/main';
|
||||
import {ProjectsList, ProjectsAdd, ProjectsEdit} from './controllers/Projects';
|
||||
import OrganizationsList from './organizations/list/organizations-list.controller';
|
||||
import OrganizationsAdd from './organizations/add/organizations-add.controller';
|
||||
@ -67,9 +67,9 @@ import './shared/Modal';
|
||||
import './shared/prompt-dialog';
|
||||
import './shared/directives';
|
||||
import './shared/filters';
|
||||
import './shared/InventoryTree';
|
||||
import './shared/Socket';
|
||||
import './shared/features/main';
|
||||
import config from './shared/config/main';
|
||||
import './login/authenticationServices/pendo/ng-pendo';
|
||||
import footer from './footer/main';
|
||||
import scheduler from './scheduler/main';
|
||||
@ -81,6 +81,7 @@ __deferLoadIfEnabled();
|
||||
|
||||
var tower = angular.module('Tower', [
|
||||
//'ngAnimate',
|
||||
'lrInfiniteScroll',
|
||||
'ngSanitize',
|
||||
'ngCookies',
|
||||
about.name,
|
||||
@ -99,7 +100,6 @@ var tower = angular.module('Tower', [
|
||||
dashboard.name,
|
||||
moment.name,
|
||||
templateUrl.name,
|
||||
adhoc.name,
|
||||
login.name,
|
||||
activityStream.name,
|
||||
footer.name,
|
||||
@ -111,6 +111,8 @@ var tower = angular.module('Tower', [
|
||||
JobTemplates.name,
|
||||
portalMode.name,
|
||||
search.name,
|
||||
config.name,
|
||||
credentials.name,
|
||||
'ngToast',
|
||||
'templates',
|
||||
'Utilities',
|
||||
@ -169,7 +171,6 @@ var tower = angular.module('Tower', [
|
||||
'StreamWidget',
|
||||
'JobsHelper',
|
||||
'InventoryGroupsHelpDefinition',
|
||||
'InventoryTree',
|
||||
'CredentialsHelper',
|
||||
'StreamListDefinition',
|
||||
'HomeGroupListDefinition',
|
||||
@ -215,8 +216,10 @@ var tower = angular.module('Tower', [
|
||||
timeout: 4000
|
||||
});
|
||||
}])
|
||||
.config(['$stateProvider', '$urlRouterProvider', '$breadcrumbProvider', '$urlMatcherFactoryProvider',
|
||||
function ($stateProvider, $urlRouterProvider, $breadcrumbProvider, $urlMatcherFactoryProvider) {
|
||||
.config(['$stateProvider', '$urlRouterProvider', '$breadcrumbProvider',
|
||||
'$urlMatcherFactoryProvider',
|
||||
function ($stateProvider, $urlRouterProvider, $breadcrumbProvider,
|
||||
$urlMatcherFactoryProvider) {
|
||||
$urlMatcherFactoryProvider.strictMode(false);
|
||||
$breadcrumbProvider.setOptions({
|
||||
templateUrl: urlPrefix + 'partials/breadcrumb.html'
|
||||
@ -224,6 +227,7 @@ var tower = angular.module('Tower', [
|
||||
|
||||
// route to the details pane of /job/:id/host-event/:eventId if no other child specified
|
||||
$urlRouterProvider.when('/jobs/*/host-event/*', '/jobs/*/host-event/*/details');
|
||||
|
||||
// $urlRouterProvider.otherwise("/home");
|
||||
$urlRouterProvider.otherwise(function($injector){
|
||||
var $state = $injector.get("$state");
|
||||
@ -243,10 +247,9 @@ var tower = angular.module('Tower', [
|
||||
label: "DASHBOARD"
|
||||
},
|
||||
resolve: {
|
||||
graphData: ['$q', 'jobStatusGraphData', 'FeaturesService', function($q, jobStatusGraphData, FeaturesService) {
|
||||
graphData: ['$q', 'jobStatusGraphData', function($q, jobStatusGraphData) {
|
||||
return $q.all({
|
||||
jobStatus: jobStatusGraphData.get("month", "all"),
|
||||
features: FeaturesService.get()
|
||||
});
|
||||
}]
|
||||
}
|
||||
@ -272,7 +275,7 @@ var tower = angular.module('Tower', [
|
||||
}).
|
||||
|
||||
state('projects', {
|
||||
url: '/projects',
|
||||
url: '/projects?{status}',
|
||||
templateUrl: urlPrefix + 'partials/projects.html',
|
||||
controller: ProjectsList,
|
||||
data: {
|
||||
@ -300,8 +303,13 @@ var tower = angular.module('Tower', [
|
||||
controller: ProjectsEdit,
|
||||
data: {
|
||||
activityStreamId: 'id'
|
||||
},
|
||||
ncyBreadcrumb: {
|
||||
parent: 'projects',
|
||||
label: '{{name}}'
|
||||
}
|
||||
}).
|
||||
|
||||
state('projectOrganizations', {
|
||||
url: '/projects/:project_id/organizations',
|
||||
templateUrl: urlPrefix + 'partials/projects.html',
|
||||
@ -313,6 +321,7 @@ var tower = angular.module('Tower', [
|
||||
templateUrl: urlPrefix + 'partials/projects.html',
|
||||
controller: OrganizationsAdd
|
||||
}).
|
||||
|
||||
state('teams', {
|
||||
url: '/teams',
|
||||
templateUrl: urlPrefix + 'partials/teams.html',
|
||||
@ -343,6 +352,10 @@ var tower = angular.module('Tower', [
|
||||
controller: TeamsEdit,
|
||||
data: {
|
||||
activityStreamId: 'team_id'
|
||||
},
|
||||
ncyBreadcrumb: {
|
||||
parent: "teams",
|
||||
label: "{{team_obj.name}}"
|
||||
}
|
||||
}).
|
||||
|
||||
@ -427,7 +440,7 @@ var tower = angular.module('Tower', [
|
||||
},
|
||||
ncyBreadcrumb: {
|
||||
parent: "credentials",
|
||||
label: "EDIT CREDENTIAL"
|
||||
label: "{{credential_obj.name}}"
|
||||
}
|
||||
}).
|
||||
|
||||
@ -461,6 +474,10 @@ var tower = angular.module('Tower', [
|
||||
controller: UsersEdit,
|
||||
data: {
|
||||
activityStreamId: 'user_id'
|
||||
},
|
||||
ncyBreadcrumb: {
|
||||
parent: "users",
|
||||
label: "{{user_obj.username}}"
|
||||
}
|
||||
}).
|
||||
|
||||
@ -506,10 +523,16 @@ var tower = angular.module('Tower', [
|
||||
}]);
|
||||
}])
|
||||
|
||||
.run(['$q', '$compile', '$cookieStore', '$rootScope', '$log', 'CheckLicense', '$location', 'Authorization', 'LoadBasePaths', 'Timer', 'ClearScope', 'Socket',
|
||||
'LoadConfig', 'Store', 'ShowSocketHelp', 'pendoService', 'Prompt', 'Rest', 'Wait', 'ProcessErrors', '$state', 'GetBasePath',
|
||||
function ($q, $compile, $cookieStore, $rootScope, $log, CheckLicense, $location, Authorization, LoadBasePaths, Timer, ClearScope, Socket,
|
||||
LoadConfig, Store, ShowSocketHelp, pendoService, Prompt, Rest, Wait, ProcessErrors, $state, GetBasePath) {
|
||||
.run(['$q', '$compile', '$cookieStore', '$rootScope', '$log',
|
||||
'CheckLicense', '$location', 'Authorization', 'LoadBasePaths', 'Timer',
|
||||
'ClearScope', 'Socket', 'LoadConfig', 'Store',
|
||||
'ShowSocketHelp', 'pendoService', 'Prompt', 'Rest', 'Wait',
|
||||
'ProcessErrors', '$state', 'GetBasePath', 'ConfigService',
|
||||
'FeaturesService',
|
||||
function ($q, $compile, $cookieStore, $rootScope, $log, CheckLicense,
|
||||
$location, Authorization, LoadBasePaths, Timer, ClearScope, Socket,
|
||||
LoadConfig, Store, ShowSocketHelp, pendoService, Prompt, Rest, Wait,
|
||||
ProcessErrors, $state, GetBasePath, ConfigService, FeaturesService) {
|
||||
var sock;
|
||||
$rootScope.addPermission = function (scope) {
|
||||
$compile("<add-permissions class='AddPermissions'></add-permissions>")(scope);
|
||||
@ -577,11 +600,11 @@ var tower = angular.module('Tower', [
|
||||
Prompt({
|
||||
hdr: `Remove role`,
|
||||
body: `
|
||||
<div class="Prompt-bodyQuery">
|
||||
Confirm the removal of the ${roleType}
|
||||
<span class="Prompt-emphasis"> ${roleName} </span>
|
||||
role associated with ${userName}.
|
||||
</div>
|
||||
<div class="Prompt-bodyQuery">
|
||||
Confirm the removal of the ${roleType}
|
||||
<span class="Prompt-emphasis"> ${roleName} </span>
|
||||
role associated with ${userName}.
|
||||
</div>
|
||||
`,
|
||||
action: action,
|
||||
actionText: 'REMOVE'
|
||||
@ -607,11 +630,11 @@ var tower = angular.module('Tower', [
|
||||
Prompt({
|
||||
hdr: `Remove role`,
|
||||
body: `
|
||||
<div class="Prompt-bodyQuery">
|
||||
Confirm the removal of the ${roleType}
|
||||
<span class="Prompt-emphasis"> ${roleName} </span>
|
||||
role associated with the ${teamName} team.
|
||||
</div>
|
||||
<div class="Prompt-bodyQuery">
|
||||
Confirm the removal of the ${roleType}
|
||||
<span class="Prompt-emphasis"> ${roleName} </span>
|
||||
role associated with the ${teamName} team.
|
||||
</div>
|
||||
`,
|
||||
action: action,
|
||||
actionText: 'REMOVE'
|
||||
@ -737,7 +760,7 @@ var tower = angular.module('Tower', [
|
||||
control_socket.on("limit_reached", function(data) {
|
||||
$log.debug(data.reason);
|
||||
$rootScope.sessionTimer.expireSession('session_limit');
|
||||
$location.url('/login');
|
||||
$state.go('signOut');
|
||||
});
|
||||
}
|
||||
openSocket();
|
||||
@ -752,9 +775,7 @@ var tower = angular.module('Tower', [
|
||||
|
||||
|
||||
$rootScope.$on("$stateChangeStart", function (event, next, nextParams, prev) {
|
||||
if (next.name !== 'signOut'){
|
||||
CheckLicense.notify();
|
||||
}
|
||||
|
||||
$rootScope.$broadcast("closePermissionsModal");
|
||||
$rootScope.$broadcast("closeUsersModal");
|
||||
// this line removes the query params attached to a route
|
||||
@ -805,15 +826,15 @@ var tower = angular.module('Tower', [
|
||||
if ($rootScope.current_user === undefined || $rootScope.current_user === null) {
|
||||
Authorization.restoreUserInfo(); //user must have hit browser refresh
|
||||
}
|
||||
if (next && (next.name !== "signIn" && next.name !== "signOut" && next.name !== "license")) {
|
||||
// if not headed to /login or /logout, then check the license
|
||||
CheckLicense.test(event);
|
||||
}
|
||||
}
|
||||
activateTab();
|
||||
});
|
||||
|
||||
$rootScope.$on('$stateChangeSuccess', function(event, toState, toParams, fromState) {
|
||||
// catch license expiration notifications immediately after user logs in, redirect
|
||||
if (fromState.name === 'signIn'){
|
||||
CheckLicense.notify();
|
||||
}
|
||||
|
||||
if(fromState.name === 'license' && toParams.hasOwnProperty('licenseMissing')){
|
||||
$rootScope.licenseMissing = toParams.licenseMissing;
|
||||
@ -851,17 +872,27 @@ var tower = angular.module('Tower', [
|
||||
// User not authenticated, redirect to login page
|
||||
$rootScope.sessionExpired = false;
|
||||
$cookieStore.put('sessionExpired', false);
|
||||
$rootScope.configReady = true;
|
||||
$location.path('/login');
|
||||
} else {
|
||||
// If browser refresh, set the user_is_superuser value
|
||||
$rootScope.user_is_superuser = Authorization.getUserInfo('is_superuser');
|
||||
// state the user refreshes we want to open the socket, except if the user is on the login page, which should happen after the user logs in (see the AuthService module for that call to OpenSocket)
|
||||
if(!_.contains($location.$$url, '/login')){
|
||||
Timer.init().then(function(timer){
|
||||
$rootScope.sessionTimer = timer;
|
||||
$rootScope.$emit('OpenSocket');
|
||||
pendoService.issuePendoIdentity();
|
||||
CheckLicense.notify();
|
||||
ConfigService.getConfig().then(function(){
|
||||
Timer.init().then(function(timer){
|
||||
$rootScope.sessionTimer = timer;
|
||||
$rootScope.$emit('OpenSocket');
|
||||
pendoService.issuePendoIdentity();
|
||||
CheckLicense.test();
|
||||
FeaturesService.get();
|
||||
if($location.$$path === "/home" && $state.current && $state.current.name === ""){
|
||||
$state.go('dashboard');
|
||||
}
|
||||
else if($location.$$path === "/portal" && $state.current && $state.current.name === ""){
|
||||
$state.go('portalMode');
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -896,7 +927,11 @@ var tower = angular.module('Tower', [
|
||||
// create a promise that will resolve state $AnsibleConfig is loaded
|
||||
$rootScope.loginConfig = $q.defer();
|
||||
}
|
||||
|
||||
if (!$rootScope.featuresConfigured) {
|
||||
// create a promise that will resolve when features are loaded
|
||||
$rootScope.featuresConfigured = $q.defer();
|
||||
}
|
||||
$rootScope.licenseMissing = true;
|
||||
//the authorization controller redirects to the home page automatcially if there is no last path defined. in order to override
|
||||
// this, set the last path to /portal for instances where portal is visited for the first time.
|
||||
$rootScope.lastPath = ($location.path() === "/portal") ? 'portal' : undefined;
|
||||
|
||||
@ -31,14 +31,26 @@
|
||||
.BreadCrumb-menuLink:hover {
|
||||
color: @bc-link-icon-focus;
|
||||
}
|
||||
.BreadCrumb-menuLink.BreadCrumb-menuLinkActive {
|
||||
color: @bc-link-icon-focus;
|
||||
}
|
||||
.BreadCrumb-menuLinkImage {
|
||||
font-size: 18px;
|
||||
color: @bc-link-icon;
|
||||
flex: initial;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
color: @default-link-hov;
|
||||
}
|
||||
}
|
||||
.BreadCrumb-menuLink.BreadCrumb-menuLinkActive {
|
||||
color: @bc-link-icon-focus;
|
||||
|
||||
.BreadCrumb-menuLinkImage {
|
||||
color: @bc-link-icon-focus;
|
||||
|
||||
&:hover {
|
||||
color: @default-link-hov
|
||||
}
|
||||
}
|
||||
}
|
||||
.BreadCrumb-list {
|
||||
padding: 0px 20px;
|
||||
list-style: none;
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
export default
|
||||
[ 'templateUrl', '$state', 'FeaturesService', 'ProcessErrors', 'Store', 'Empty', function(templateUrl, $state, FeaturesService, ProcessErrors, Store, Empty) {
|
||||
['templateUrl', '$state', 'FeaturesService', 'ProcessErrors','$rootScope', 'Store', 'Empty',
|
||||
function(templateUrl, $state, FeaturesService, ProcessErrors, $rootScope, Store, Empty) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: templateUrl('bread-crumb/bread-crumb'),
|
||||
@ -12,40 +13,40 @@ export default
|
||||
|
||||
scope.toggleActivityStream = function() {
|
||||
|
||||
// If the user is not already on the activity stream then they want to navigate to it
|
||||
if(!scope.activityStreamActive) {
|
||||
var stateGoParams = {};
|
||||
// If the user is not already on the activity stream then they want to navigate to it
|
||||
if(!scope.activityStreamActive) {
|
||||
var stateGoParams = {};
|
||||
|
||||
if(streamConfig && streamConfig.activityStream) {
|
||||
if(streamConfig.activityStreamTarget) {
|
||||
stateGoParams.target = streamConfig.activityStreamTarget;
|
||||
if(streamConfig && streamConfig.activityStream) {
|
||||
if(streamConfig.activityStreamTarget) {
|
||||
stateGoParams.target = streamConfig.activityStreamTarget;
|
||||
}
|
||||
if(streamConfig.activityStreamId) {
|
||||
stateGoParams.id = $state.params[streamConfig.activityStreamId];
|
||||
}
|
||||
}
|
||||
if(streamConfig.activityStreamId) {
|
||||
stateGoParams.id = $state.params[streamConfig.activityStreamId];
|
||||
}
|
||||
}
|
||||
|
||||
$state.go('activityStream', stateGoParams);
|
||||
}
|
||||
// The user is navigating away from the activity stream - take them back from whence they came
|
||||
else {
|
||||
// Pull the previous state out of local storage
|
||||
var previousState = Store('previous_state');
|
||||
|
||||
if(previousState && !Empty(previousState.name)) {
|
||||
$state.go(previousState.name, previousState.fromParams);
|
||||
$state.go('activityStream', stateGoParams);
|
||||
}
|
||||
// The user is navigating away from the activity stream - take them back from whence they came
|
||||
else {
|
||||
// If for some reason something went wrong (like local storage was wiped, etc) take the
|
||||
// user back to the dashboard
|
||||
$state.go('dashboard');
|
||||
// Pull the previous state out of local storage
|
||||
var previousState = Store('previous_state');
|
||||
|
||||
if(previousState && !Empty(previousState.name)) {
|
||||
$state.go(previousState.name, previousState.fromParams);
|
||||
}
|
||||
else {
|
||||
// If for some reason something went wrong (like local storage was wiped, etc) take the
|
||||
// user back to the dashboard
|
||||
$state.go('dashboard');
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
scope.$on("$stateChangeSuccess", function updateActivityStreamButton(event, toState, toParams, fromState, fromParams) {
|
||||
scope.$on("$stateChangeStart", function updateActivityStreamButton(event, toState, toParams, fromState, fromParams) {
|
||||
|
||||
if(fromState && !Empty(fromState.name)) {
|
||||
// Go ahead and attach the from params to the state object so that it can all be stored together
|
||||
@ -65,26 +66,13 @@ export default
|
||||
// point. We use the get() function call here just in case the features aren't available.
|
||||
// The get() function will only fire off the server call if the features aren't already
|
||||
// attached to the $rootScope.
|
||||
|
||||
FeaturesService.get()
|
||||
.then(function() {
|
||||
var features = FeaturesService.get();
|
||||
if(features){
|
||||
scope.loadingLicense = false;
|
||||
scope.activityStreamActive = (toState.name === 'activityStream') ? true : false;
|
||||
scope.showActivityStreamButton = (FeaturesService.featureEnabled('activity_streams') || toState.name === 'activityStream') ? true : false;
|
||||
var licenseInfo = FeaturesService.getLicenseInfo();
|
||||
scope.licenseType = licenseInfo ? licenseInfo.license_type : null;
|
||||
if (!licenseInfo) {
|
||||
console.warn("License info not loaded correctly");
|
||||
}
|
||||
})
|
||||
.catch(function (response) {
|
||||
ProcessErrors(null, response.data, response.status, null, {
|
||||
hdr: 'Error!',
|
||||
msg: 'Failed to get feature info. GET returned status: ' +
|
||||
response.status
|
||||
});
|
||||
});
|
||||
|
||||
scope.activityStreamTooltip = (toState.name === 'activityStream') ? 'Hide Activity Stream' : 'View Activity Stream';
|
||||
scope.showActivityStreamButton = (FeaturesService.featureEnabled('activity_streams') || toState.name ==='activityStream') ? true : false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
||||
@ -93,6 +81,16 @@ export default
|
||||
}
|
||||
});
|
||||
|
||||
// scope.$on('featuresLoaded', function(){
|
||||
$rootScope.featuresConfigured.promise.then(function(features){
|
||||
// var features = FeaturesService.get();
|
||||
if(features){
|
||||
scope.loadingLicense = false;
|
||||
scope.activityStreamActive = ($state.current.name === 'activityStream') ? true : false;
|
||||
scope.activityStreamTooltip = ($state.current.name === 'activityStream') ? 'Hide Activity Stream' : 'View Activity Stream';
|
||||
scope.showActivityStreamButton = (FeaturesService.featureEnabled('activity_streams') || $state.current.name ==='activityStream') ? true : false;
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
}];
|
||||
|
||||
@ -1,14 +1,15 @@
|
||||
<div id="bread_crumb" class="BreadCrumb" ng-class="{'is-loggedOut' : !$root.current_user.username}">
|
||||
<div ng-if="!licenseMissing" ncy-breadcrumb></div>
|
||||
<div ng-hide="licenseMissing" ncy-breadcrumb></div>
|
||||
<div class="BreadCrumb-menuLink"
|
||||
id="bread_crumb_activity_stream"
|
||||
aw-tool-tip="View Activity Stream"
|
||||
aw-tool-tip="{{activityStreamTooltip}}"
|
||||
data-tip-watch="activityStreamTooltip"
|
||||
data-placement="left"
|
||||
data-trigger="hover"
|
||||
data-container="body"
|
||||
ng-class="{'BreadCrumb-menuLinkActive' : activityStreamActive}"
|
||||
ng-if="showActivityStreamButton"
|
||||
ng-hide= "loadingLicense || licenseMissing || licenseType == 'basic'"
|
||||
ng-hide= "loadingLicense || licenseMissing"
|
||||
ng-click="toggleActivityStream()">
|
||||
<i class="BreadCrumb-menuLinkImage icon-activity-stream"
|
||||
alt="Activity Stream">
|
||||
|
||||
@ -103,6 +103,9 @@ export function CredentialsList($scope, $rootScope, $location, $log,
|
||||
Rest.destroy()
|
||||
.success(function () {
|
||||
$scope.search(list.iterator);
|
||||
if (new RegExp('/' + id + '$').test($location.$$url)) {
|
||||
$state.go('^');
|
||||
}
|
||||
})
|
||||
.error(function (data, status) {
|
||||
ProcessErrors($scope, data, status, null, { hdr: 'Error!',
|
||||
@ -175,32 +178,39 @@ export function CredentialsAdd($scope, $rootScope, $compile, $location, $log,
|
||||
|
||||
$scope.canShareCredential = false;
|
||||
|
||||
if ($rootScope.current_user.is_superuser) {
|
||||
$scope.canShareCredential = true;
|
||||
} else {
|
||||
Rest.setUrl(`/api/v1/users/${$rootScope.current_user.id}/admin_of_organizations`);
|
||||
Rest.get()
|
||||
.success(function(data) {
|
||||
$scope.canShareCredential = (data.count) ? true : false;
|
||||
}).error(function (data, status) {
|
||||
ProcessErrors($scope, data, status, null, { hdr: 'Error!', msg: 'Failed to find if users is admin of org' + status });
|
||||
$rootScope.$watch('current_user', function(){
|
||||
try {
|
||||
if ($rootScope.current_user.is_superuser) {
|
||||
$scope.canShareCredential = true;
|
||||
} else {
|
||||
Rest.setUrl(`/api/v1/users/${$rootScope.current_user.id}/admin_of_organizations`);
|
||||
Rest.get()
|
||||
.success(function(data) {
|
||||
$scope.canShareCredential = (data.count) ? true : false;
|
||||
}).error(function (data, status) {
|
||||
ProcessErrors($scope, data, status, null, { hdr: 'Error!', msg: 'Failed to find if users is admin of org' + status });
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
var orgUrl = ($rootScope.current_user.is_superuser) ?
|
||||
GetBasePath("organizations") :
|
||||
$rootScope.current_user.url + "admin_of_organizations?";
|
||||
|
||||
// Create LookUpInit for organizations
|
||||
LookUpInit({
|
||||
scope: $scope,
|
||||
url: orgUrl,
|
||||
form: form,
|
||||
list: OrganizationList,
|
||||
field: 'organization',
|
||||
input_type: 'radio',
|
||||
autopopulateLookup: false
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
var orgUrl = ($rootScope.current_user.is_superuser) ?
|
||||
GetBasePath("organizations") :
|
||||
$rootScope.current_user.url + "admin_of_organizations?";
|
||||
|
||||
// Create LookUpInit for organizations
|
||||
LookUpInit({
|
||||
scope: $scope,
|
||||
url: orgUrl,
|
||||
form: form,
|
||||
list: OrganizationList,
|
||||
field: 'organization',
|
||||
input_type: 'radio',
|
||||
autopopulateLookup: false
|
||||
}
|
||||
catch(err){
|
||||
// $rootScope.current_user isn't available because a call to the config endpoint hasn't finished resolving yet
|
||||
}
|
||||
});
|
||||
|
||||
if (!Empty($stateParams.user_id)) {
|
||||
|
||||
@ -20,8 +20,7 @@ export function JobsListController ($rootScope, $log, $scope, $compile, $statePa
|
||||
var jobs_scope, scheduled_scope,
|
||||
choicesCount = 0,
|
||||
listCount = 0,
|
||||
api_complete = false,
|
||||
max_rows;
|
||||
api_complete = false;
|
||||
|
||||
$scope.jobsSelected = true;
|
||||
|
||||
@ -68,7 +67,6 @@ export function JobsListController ($rootScope, $log, $scope, $compile, $statePa
|
||||
list: AllJobsList,
|
||||
id: 'active-jobs',
|
||||
url: GetBasePath('unified_jobs') + '?status__in=pending,waiting,running,completed,failed,successful,error,canceled',
|
||||
pageSize: max_rows,
|
||||
searchParams: search_params,
|
||||
spinner: false
|
||||
});
|
||||
@ -81,8 +79,7 @@ export function JobsListController ($rootScope, $log, $scope, $compile, $statePa
|
||||
list: ScheduledJobsList,
|
||||
id: 'scheduled-jobs-tab',
|
||||
searchSize: 'col-lg-4 col-md-4 col-sm-4 col-xs-12',
|
||||
url: GetBasePath('schedules') + '?next_run__isnull=false',
|
||||
pageSize: max_rows
|
||||
url: GetBasePath('schedules') + '?next_run__isnull=false'
|
||||
});
|
||||
|
||||
$scope.refreshJobs = function() {
|
||||
@ -121,10 +118,6 @@ export function JobsListController ($rootScope, $log, $scope, $compile, $statePa
|
||||
scheduled_scope.search('schedule');
|
||||
}
|
||||
});
|
||||
|
||||
$(window).resize(_.debounce(function() {
|
||||
resizeContainers();
|
||||
}, 500));
|
||||
});
|
||||
|
||||
if ($scope.removeChoicesReady) {
|
||||
@ -133,7 +126,6 @@ export function JobsListController ($rootScope, $log, $scope, $compile, $statePa
|
||||
$scope.removeChoicesReady = $scope.$on('choicesReady', function() {
|
||||
choicesCount++;
|
||||
if (choicesCount === 2) {
|
||||
setHeight();
|
||||
$scope.$emit('buildJobsList');
|
||||
}
|
||||
});
|
||||
@ -155,46 +147,6 @@ export function JobsListController ($rootScope, $log, $scope, $compile, $statePa
|
||||
variable: 'type_choices',
|
||||
callback: 'choicesReady'
|
||||
});
|
||||
|
||||
// Set the height of each container and calc max number of rows containers can hold
|
||||
function setHeight() {
|
||||
var docw = $(window).width(),
|
||||
//doch = $(window).height(),
|
||||
available_height,
|
||||
search_row, page_row, height, header, row_height;
|
||||
$log.debug('docw: ' + docw);
|
||||
|
||||
// customize the container height and # of rows based on available viewport height
|
||||
available_height = $(window).height() - $('#main-menu-container .navbar').outerHeight() - 80;
|
||||
if (docw < 1350) {
|
||||
available_height = (available_height < 800) ? 800 : available_height;
|
||||
} else {
|
||||
available_height = (available_height < 550) ? 550 : available_height;
|
||||
}
|
||||
$log.debug('available_height: ' + available_height);
|
||||
$('.jobs-list-container').each(function() {
|
||||
$(this).height(Math.floor(available_height));
|
||||
});
|
||||
search_row = Math.max($('.search-row:eq(0)').outerHeight(), 50);
|
||||
page_row = Math.max($('.page-row:eq(0)').outerHeight(), 33);
|
||||
header = Math.max($('#active_jobs_table thead').height(), 24);
|
||||
height = Math.floor(available_height ) - header - page_row - search_row - 30;
|
||||
row_height = 44;
|
||||
|
||||
max_rows = Math.floor(height / row_height);
|
||||
max_rows = (max_rows < 5) ? 5 : max_rows;
|
||||
|
||||
$log.debug('max_rows: ' + max_rows);
|
||||
}
|
||||
|
||||
// Set container height and return the number of allowed rows
|
||||
function resizeContainers() {
|
||||
setHeight();
|
||||
jobs_scope[AllJobsList.iterator + '_page_size'] = max_rows;
|
||||
jobs_scope.changePageSize(AllJobsList.name, AllJobsList.iterator);
|
||||
scheduled_scope[ScheduledJobsList.iterator + '_page_size'] = max_rows;
|
||||
scheduled_scope.changePageSize(ScheduledJobsList.name, ScheduledJobsList.iterator);
|
||||
}
|
||||
}
|
||||
|
||||
JobsListController.$inject = ['$rootScope', '$log', '$scope', '$compile', '$stateParams',
|
||||
|
||||
@ -22,7 +22,7 @@ export function ProjectsList ($scope, $rootScope, $location, $log, $stateParams,
|
||||
Wait('start');
|
||||
|
||||
var list = ProjectList,
|
||||
defaultUrl = GetBasePath('projects'),
|
||||
defaultUrl = GetBasePath('projects') + ($stateParams.status ? '?status=' + $stateParams.status : ''),
|
||||
view = GenerateList,
|
||||
base = $location.path().replace(/^\//, '').split('/')[0],
|
||||
mode = (base === 'projects') ? 'edit' : 'select',
|
||||
@ -247,6 +247,9 @@ export function ProjectsList ($scope, $rootScope, $location, $log, $stateParams,
|
||||
Rest.destroy()
|
||||
.success(function () {
|
||||
$scope.search(list.iterator);
|
||||
if (new RegExp('/' + id + '$').test($location.$$url)) {
|
||||
$state.go('^');
|
||||
}
|
||||
})
|
||||
.error(function (data, status) {
|
||||
ProcessErrors($scope, data, status, null, { hdr: 'Error!',
|
||||
@ -495,6 +498,35 @@ export function ProjectsAdd(Refresh, $scope, $rootScope, $compile, $location, $l
|
||||
$scope.scmRequired = ($scope.scm_type.value !== 'manual') ? true : false;
|
||||
$scope.scmBranchLabel = ($scope.scm_type.value === 'svn') ? 'Revision #' : 'SCM Branch';
|
||||
}
|
||||
|
||||
// Dynamically update popover values
|
||||
if($scope.scm_type.value) {
|
||||
switch ($scope.scm_type.value) {
|
||||
case 'git':
|
||||
$scope.urlPopover = '<p>Example URLs for GIT SCM include:</p><ul class=\"no-bullets\"><li>https://github.com/ansible/ansible.git</li>' +
|
||||
'<li>git@github.com:ansible/ansible.git</li><li>git://servername.example.com/ansible.git</li></ul>' +
|
||||
'<p><strong>Note:</strong> When using SSH protocol for GitHub or Bitbucket, enter an SSH key only, ' +
|
||||
'do not enter a username (other than git). Additionally, GitHub and Bitbucket do not support password authentication when using ' +
|
||||
'SSH. GIT read only protocol (git://) does not use username or password information.';
|
||||
break;
|
||||
case 'svn':
|
||||
$scope.urlPopover = '<p>Example URLs for Subversion SCM include:</p>' +
|
||||
'<ul class=\"no-bullets\"><li>https://github.com/ansible/ansible</li><li>svn://servername.example.com/path</li>' +
|
||||
'<li>svn+ssh://servername.example.com/path</li></ul>';
|
||||
break;
|
||||
case 'hg':
|
||||
$scope.urlPopover = '<p>Example URLs for Mercurial SCM include:</p>' +
|
||||
'<ul class=\"no-bullets\"><li>https://bitbucket.org/username/project</li><li>ssh://hg@bitbucket.org/username/project</li>' +
|
||||
'<li>ssh://server.example.com/path</li></ul>' +
|
||||
'<p><strong>Note:</strong> Mercurial does not support password authentication for SSH. ' +
|
||||
'Do not put the username and key in the URL. ' +
|
||||
'If using Bitbucket and SSH, do not supply your Bitbucket username.';
|
||||
break;
|
||||
default:
|
||||
$scope.urlPopover = '<p> URL popover text';
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
$scope.formCancel = function () {
|
||||
@ -696,6 +728,7 @@ export function ProjectsEdit($scope, $rootScope, $compile, $location, $log,
|
||||
}
|
||||
|
||||
$scope.project_obj = data;
|
||||
$scope.name = data.name;
|
||||
$scope.$emit('projectLoaded');
|
||||
})
|
||||
.error(function (data, status) {
|
||||
|
||||
@ -98,6 +98,9 @@ export function TeamsList($scope, $rootScope, $location, $log, $stateParams,
|
||||
Wait('stop');
|
||||
$('#prompt-modal').modal('hide');
|
||||
$scope.search(list.iterator);
|
||||
if (new RegExp('/' + id + '$').test($location.$$url)) {
|
||||
$state.go('^');
|
||||
}
|
||||
})
|
||||
.error(function (data, status) {
|
||||
Wait('stop');
|
||||
|
||||
@ -112,6 +112,9 @@ export function UsersList($scope, $rootScope, $location, $log, $stateParams,
|
||||
Rest.destroy()
|
||||
.success(function () {
|
||||
$scope.search(list.iterator);
|
||||
if (new RegExp('/' + id + '$').test($location.$$url)) {
|
||||
$state.go('^');
|
||||
}
|
||||
})
|
||||
.error(function (data, status) {
|
||||
ProcessErrors($scope, data, status, null, { hdr: 'Error!',
|
||||
@ -162,7 +165,7 @@ export function UsersAdd($scope, $rootScope, $compile, $location, $log,
|
||||
generator.reset();
|
||||
|
||||
$scope.user_type_options = user_type_options;
|
||||
$scope.user_type = user_type_options[0]
|
||||
$scope.user_type = user_type_options[0];
|
||||
$scope.$watch('user_type', user_type_sync($scope));
|
||||
|
||||
CreateSelect2({
|
||||
@ -271,7 +274,7 @@ export function UsersEdit($scope, $rootScope, $location,
|
||||
generator.reset();
|
||||
|
||||
$scope.user_type_options = user_type_options;
|
||||
$scope.user_type = user_type_options[0]
|
||||
$scope.user_type = user_type_options[0];
|
||||
$scope.$watch('user_type', user_type_sync($scope));
|
||||
|
||||
var setScopeFields = function(data){
|
||||
@ -343,6 +346,8 @@ export function UsersEdit($scope, $rootScope, $location,
|
||||
$scope.is_superuser = true;
|
||||
}
|
||||
|
||||
$scope.user_obj = data;
|
||||
|
||||
CreateSelect2({
|
||||
element: '#user_user_type',
|
||||
multiple: false
|
||||
|
||||
11
awx/ui/client/src/credentials/main.js
Normal file
11
awx/ui/client/src/credentials/main.js
Normal file
@ -0,0 +1,11 @@
|
||||
/*************************************************
|
||||
* Copyright (c) 2016 Ansible, Inc.
|
||||
*
|
||||
* All Rights Reserved
|
||||
*************************************************/
|
||||
|
||||
import ownerList from './ownerList.directive';
|
||||
|
||||
export default
|
||||
angular.module('credentials', [])
|
||||
.directive('ownerList', ownerList);
|
||||
13
awx/ui/client/src/credentials/ownerList.directive.js
Normal file
13
awx/ui/client/src/credentials/ownerList.directive.js
Normal file
@ -0,0 +1,13 @@
|
||||
export default
|
||||
[ 'templateUrl',
|
||||
function(templateUrl) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: false,
|
||||
templateUrl: templateUrl('credentials/ownerList'),
|
||||
link: function(scope) {
|
||||
scope.owners_list = scope.credential.summary_fields.owners && scope.credential.summary_fields.owners.length > 0 ? scope.credential.summary_fields.owners : [];
|
||||
}
|
||||
};
|
||||
}
|
||||
];
|
||||
5
awx/ui/client/src/credentials/ownerList.partial.html
Normal file
5
awx/ui/client/src/credentials/ownerList.partial.html
Normal file
@ -0,0 +1,5 @@
|
||||
<div ng-repeat="owner in owners_list">
|
||||
<a ng-if="owner.type === 'organization'" ui-sref="organizations.edit({ organization_id: owner.id })">{{ owner.name }}{{$last ? '' : ', '}}</a>
|
||||
<a ng-if="owner.type === 'user'" ui-sref="users.edit({ user_id: owner.id })">{{ owner.name }}{{$last ? '' : ', '}}</a>
|
||||
<a ng-if="owner.type === 'team'" ui-sref="teams.edit({ team_id: owner.id })">{{ owner.name }}{{$last ? '' : ', '}}</a>
|
||||
</div>
|
||||
@ -49,7 +49,7 @@ export default
|
||||
label: "Inventories",
|
||||
},
|
||||
{
|
||||
url: "/#/inventories/?inventory_sources_with_failures",
|
||||
url: "/#/inventories?status=sync-failed",
|
||||
number: scope.data.inventories.inventory_failed,
|
||||
label: "Inventory Sync Failures",
|
||||
isFailureCount: true
|
||||
@ -60,7 +60,7 @@ export default
|
||||
label: "Projects"
|
||||
},
|
||||
{
|
||||
url: "/#/projects/?status=failed",
|
||||
url: "/#/projects?status=failed",
|
||||
number: scope.data.projects.failed,
|
||||
label: "Project Sync Failures",
|
||||
isFailureCount: true
|
||||
|
||||
@ -118,6 +118,7 @@
|
||||
top: auto;
|
||||
box-shadow: none;
|
||||
text-transform: uppercase;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.DashboardGraphs-periodDropdown,
|
||||
|
||||
@ -50,7 +50,7 @@ export default function(){
|
||||
type: 'textarea',
|
||||
editRequired: false,
|
||||
rows: 6,
|
||||
class: 'modal-input-xlarge Form-textArea',
|
||||
class: 'modal-input-xlarge Form-textArea Form-formGroup--fullWidth',
|
||||
dataTitle: 'Host Variables',
|
||||
dataPlacement: 'right',
|
||||
dataContainer: 'body',
|
||||
@ -71,7 +71,7 @@ export default function(){
|
||||
save: {
|
||||
ngClick: 'formSave()', //$scope.function to call on click, optional
|
||||
ngDisabled: "host_form.$invalid"//true //Disable when $pristine or $invalid, optional and when can_edit = false, for permission reasons
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@ -21,7 +21,7 @@ export default function(){
|
||||
basePath: 'unified_jobs',
|
||||
label: '',
|
||||
iconOnly: true,
|
||||
searchable: true,
|
||||
searchable: false,
|
||||
searchType: 'select',
|
||||
nosort: true,
|
||||
searchOptions: [],
|
||||
|
||||
@ -22,9 +22,6 @@ var dashboardHostsList = {
|
||||
label: "HOSTS"
|
||||
},
|
||||
resolve: {
|
||||
features: ['FeaturesService', function(FeaturesService) {
|
||||
return FeaturesService.get();
|
||||
}],
|
||||
hosts: ['Rest', 'GetBasePath', '$stateParams', function(Rest, GetBasePath, $stateParams){
|
||||
var defaultUrl = GetBasePath('hosts') + '?page_size=10' + ($stateParams['active-failures'] ? '&has_active_failures=true' : '' );
|
||||
Rest.setUrl(defaultUrl);
|
||||
|
||||
@ -18,11 +18,12 @@
|
||||
ng-class-even="'List-tableRow--evenRow'"
|
||||
ng-repeat = "job in jobs">
|
||||
<td class="DashboardList-nameCell">
|
||||
<i class="fa
|
||||
DashboardList-status"
|
||||
ng-class="{'DashboardList-status--success icon-job-successful': isSuccessful(job.status),
|
||||
'DashboardList-status--failed icon-job-successful': !isSuccessful(job.status)}">
|
||||
</i>
|
||||
<a href="{{ job.detailsUrl }}" ng-if="isSuccessful(job.status)" aw-tool-tip="Job successful. Click for details." aw-tip-placement="right">
|
||||
<i class="fa DashboardList-status DashboardList-status--success icon-job-successful"></i>
|
||||
</a>
|
||||
<a href="{{ job.detailsUrl }}" ng-if="!isSuccessful(job.status)" aw-tool-tip="Job failed. Click for details." aw-tip-placement="right">
|
||||
<i class="fa DashboardList-status DashboardList-status--failed icon-job-successful"></i>
|
||||
</a>
|
||||
<a href="{{ job.detailsUrl }}" class="DashboardList-nameContainer">
|
||||
{{ job.name }}
|
||||
</a>
|
||||
|
||||
@ -49,8 +49,8 @@ export default
|
||||
sourceModel: 'organization',
|
||||
sourceField: 'name',
|
||||
ngClick: 'lookUpOrganization()',
|
||||
awPopOver: "<p>If no organization is given, the credential can only be used by the user that creates the credential. organization admins and system administrators can assign an organization so that roles can be assigned to users and teams in that organization.</p>",
|
||||
dataTitle: 'Required ',
|
||||
awPopOver: "<p>If no organization is given, the credential can only be used by the user that creates the credential. Organization admins and system administrators can assign an organization so that roles for the credential can be assigned to users and teams in that organization.</p>",
|
||||
dataTitle: 'Organization ',
|
||||
dataPlacement: 'bottom',
|
||||
dataContainer: "body"
|
||||
},
|
||||
@ -246,7 +246,7 @@ export default
|
||||
rows: 10,
|
||||
awPopOver: "SSH key description",
|
||||
awPopOverWatch: "key_description",
|
||||
dataTitle: 'Help',
|
||||
dataTitle: 'Private Key',
|
||||
dataPlacement: 'right',
|
||||
dataContainer: "body",
|
||||
subForm: "credentialSubForm"
|
||||
@ -310,10 +310,6 @@ export default
|
||||
client:{
|
||||
type: 'text',
|
||||
label: 'Client ID',
|
||||
awRequiredWhen: {
|
||||
reqExpression: "azure_rm_required",
|
||||
init: false
|
||||
},
|
||||
subForm: 'credentialSubForm',
|
||||
ngShow: "kind.value === 'azure_rm'"
|
||||
},
|
||||
@ -322,20 +318,12 @@ export default
|
||||
hasShowInputButton: true,
|
||||
autocomplete: false,
|
||||
label: 'Client Secret',
|
||||
awRequiredWhen: {
|
||||
reqExpression: "azure_rm_required",
|
||||
init: false
|
||||
},
|
||||
subForm: 'credentialSubForm',
|
||||
ngShow: "kind.value === 'azure_rm'"
|
||||
},
|
||||
tenant: {
|
||||
type: 'text',
|
||||
label: 'Tenent ID',
|
||||
awRequiredWhen: {
|
||||
reqExpression: "azure_rm_required",
|
||||
init: false
|
||||
},
|
||||
label: 'Tenant ID',
|
||||
subForm: 'credentialSubForm',
|
||||
ngShow: "kind.value === 'azure_rm'"
|
||||
},
|
||||
@ -448,13 +436,15 @@ export default
|
||||
label: 'Role',
|
||||
type: 'role',
|
||||
noSort: true,
|
||||
class: 'col-lg-4 col-md-4 col-sm-4 col-xs-4'
|
||||
class: 'col-lg-4 col-md-4 col-sm-4 col-xs-4',
|
||||
searchable: false
|
||||
},
|
||||
team_roles: {
|
||||
label: 'Team Roles',
|
||||
type: 'team_roles',
|
||||
noSort: true,
|
||||
class: 'col-lg-5 col-md-5 col-sm-5 col-xs-4'
|
||||
class: 'col-lg-5 col-md-5 col-sm-5 col-xs-4',
|
||||
searchable: false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -61,23 +61,15 @@ export default
|
||||
label: 'Source',
|
||||
type: 'select',
|
||||
ngOptions: 'source.label for source in source_type_options track by source.value',
|
||||
ngChange: 'sourceChange()',
|
||||
ngChange: 'sourceChange(source)',
|
||||
addRequired: false,
|
||||
editRequired: false
|
||||
},
|
||||
source_path: {
|
||||
label: 'Script Path',
|
||||
ngShow: "source && source.value == 'file'",
|
||||
type: 'text',
|
||||
awRequiredWhen: {
|
||||
reqExpression: "sourcePathRequired",
|
||||
init: "false"
|
||||
}
|
||||
editRequired: false,
|
||||
ngModel: 'source'
|
||||
},
|
||||
credential: {
|
||||
label: 'Cloud Credential',
|
||||
type: 'lookup',
|
||||
ngShow: "source && source.value !== 'manual' && source.value !== 'custom'",
|
||||
ngShow: "source && source.value !== '' && source.value !== 'custom'",
|
||||
sourceModel: 'credential',
|
||||
sourceField: 'name',
|
||||
ngClick: 'lookUpCredential()',
|
||||
@ -147,7 +139,6 @@ export default
|
||||
},
|
||||
inventory_script: {
|
||||
label : "Custom Inventory Script",
|
||||
labelClass: 'prepend-asterisk',
|
||||
type: 'lookup',
|
||||
ngShow: "source && source.value === 'custom'",
|
||||
sourceModel: 'inventory_script',
|
||||
@ -157,7 +148,8 @@ export default
|
||||
editRequired: true,
|
||||
ngRequired: "source && source.value === 'custom'",
|
||||
},
|
||||
extra_vars: {
|
||||
custom_variables: {
|
||||
id: 'custom_variables',
|
||||
label: 'Environment Variables', //"{{vars_label}}" ,
|
||||
ngShow: "source && source.value=='custom' ",
|
||||
type: 'textarea',
|
||||
@ -165,7 +157,7 @@ export default
|
||||
addRequired: false,
|
||||
editRequired: false,
|
||||
rows: 6,
|
||||
'default': '---',
|
||||
'default': null,
|
||||
parseTypeName: 'envParseType',
|
||||
dataTitle: "Environment Variables",
|
||||
dataPlacement: 'right',
|
||||
@ -176,15 +168,16 @@ export default
|
||||
"<blockquote>---<br />somevar: somevalue<br />password: magic<br /></blockquote>\n",
|
||||
dataContainer: 'body'
|
||||
},
|
||||
source_vars: {
|
||||
ec2_variables: {
|
||||
id: 'ec2_variables',
|
||||
label: 'Source Variables', //"{{vars_label}}" ,
|
||||
ngShow: "source && (source.value == 'file' || source.value == 'ec2')",
|
||||
ngShow: "source && source.value == 'ec2'",
|
||||
type: 'textarea',
|
||||
class: 'Form-textAreaLabel Form-formGroup--fullWidth',
|
||||
addRequired: false,
|
||||
editRequird: false,
|
||||
rows: 6,
|
||||
'default': '---',
|
||||
'default': null,
|
||||
parseTypeName: 'envParseType',
|
||||
dataTitle: "Source Variables",
|
||||
dataPlacement: 'right',
|
||||
@ -200,17 +193,17 @@ export default
|
||||
'<p>View YAML examples at <a href="http://docs.ansible.com/YAMLSyntax.html" target="_blank">docs.ansible.com</a></p>',
|
||||
dataContainer: 'body'
|
||||
},
|
||||
inventory_variables: {
|
||||
vmware_variables: {
|
||||
id: 'vmware_variables',
|
||||
label: 'Source Variables', //"{{vars_label}}" ,
|
||||
|
||||
ngShow: "source && (source.value == 'vmware' || " +
|
||||
"source.value == 'openstack')",
|
||||
ngShow: "source && source.value == 'vmware'",
|
||||
type: 'textarea',
|
||||
addRequired: false,
|
||||
class: 'Form-textAreaLabel Form-formGroup--fullWidth',
|
||||
editRequird: false,
|
||||
rows: 6,
|
||||
'default': '---',
|
||||
'default': null,
|
||||
parseTypeName: 'envParseType',
|
||||
dataTitle: "Source Variables",
|
||||
dataPlacement: 'right',
|
||||
@ -226,16 +219,43 @@ export default
|
||||
'<p>View YAML examples at <a href="http://docs.ansible.com/YAMLSyntax.html" target="_blank">docs.ansible.com</a></p>',
|
||||
dataContainer: 'body'
|
||||
},
|
||||
openstack_variables: {
|
||||
id: 'openstack_variables',
|
||||
label: 'Source Variables', //"{{vars_label}}" ,
|
||||
|
||||
ngShow: "source && source.value == 'openstack'",
|
||||
type: 'textarea',
|
||||
addRequired: false,
|
||||
class: 'Form-textAreaLabel Form-formGroup--fullWidth',
|
||||
editRequird: false,
|
||||
rows: 6,
|
||||
'default': null,
|
||||
parseTypeName: 'envParseType',
|
||||
dataTitle: "Source Variables",
|
||||
dataPlacement: 'right',
|
||||
awPopOver: "<p>Override variables found in openstack.yml and used by the inventory update script. For an example variable configuration " +
|
||||
"<a href=\"https://github.com/ansible/ansible/blob/devel/contrib/inventory/openstack.yml\" target=\"_blank\">" +
|
||||
"view openstack.yml in the Ansible github repo.</a></p>" +
|
||||
"<p>Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.</p>" +
|
||||
"JSON:<br />\n" +
|
||||
"<blockquote>{<br /> \"somevar\": \"somevalue\",<br /> \"password\": \"magic\"<br /> }</blockquote>\n" +
|
||||
"YAML:<br />\n" +
|
||||
"<blockquote>---<br />somevar: somevalue<br />password: magic<br /></blockquote>\n" +
|
||||
'<p>View JSON examples at <a href="http://www.json.org" target="_blank">www.json.org</a></p>' +
|
||||
'<p>View YAML examples at <a href="http://docs.ansible.com/YAMLSyntax.html" target="_blank">docs.ansible.com</a></p>',
|
||||
dataContainer: 'body'
|
||||
},
|
||||
checkbox_group: {
|
||||
label: 'Update Options',
|
||||
type: 'checkbox_group',
|
||||
ngShow: "source && (source.value !== 'manual' && source.value !== null)",
|
||||
ngShow: "source && (source.value !== '' && source.value !== null)",
|
||||
class: 'Form-checkbox--stacked',
|
||||
|
||||
fields: [{
|
||||
name: 'overwrite',
|
||||
label: 'Overwrite',
|
||||
type: 'checkbox',
|
||||
ngShow: "source.value !== 'manual' && source.value !== null",
|
||||
ngShow: "source.value !== '' && source.value !== null",
|
||||
addRequired: false,
|
||||
editRequired: false,
|
||||
awPopOver: '<p>If checked, all child groups and hosts not found on the external source will be deleted from ' +
|
||||
@ -249,7 +269,7 @@ export default
|
||||
name: 'overwrite_vars',
|
||||
label: 'Overwrite Variables',
|
||||
type: 'checkbox',
|
||||
ngShow: "source.value !== 'manual' && source.value !== null",
|
||||
ngShow: "source.value !== '' && source.value !== null",
|
||||
addRequired: false,
|
||||
editRequired: false,
|
||||
awPopOver: '<p>If checked, all variables for child groups and hosts will be removed and replaced by those ' +
|
||||
@ -263,7 +283,7 @@ export default
|
||||
name: 'update_on_launch',
|
||||
label: 'Update on Launch',
|
||||
type: 'checkbox',
|
||||
ngShow: "source.value !== 'manual' && source.value !== null",
|
||||
ngShow: "source.value !== '' && source.value !== null",
|
||||
addRequired: false,
|
||||
editRequired: false,
|
||||
awPopOver: '<p>Each time a job runs using this inventory, refresh the inventory from the selected source before ' +
|
||||
@ -280,7 +300,7 @@ export default
|
||||
type: 'number',
|
||||
integer: true,
|
||||
min: 0,
|
||||
ngShow: "source && source.value !== 'manual' && update_on_launch",
|
||||
ngShow: "source && source.value !== '' && update_on_launch",
|
||||
spinner: true,
|
||||
"default": 0,
|
||||
addRequired: false,
|
||||
@ -295,12 +315,12 @@ export default
|
||||
},
|
||||
|
||||
buttons: {
|
||||
save: {
|
||||
ngClick: 'formSave()'
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'formCancel()'
|
||||
},
|
||||
save: {
|
||||
ngClick: 'saveGroup()'
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
related: {
|
||||
|
||||
@ -60,7 +60,7 @@ export default
|
||||
addRequired: false,
|
||||
editRequird: false,
|
||||
rows: 6,
|
||||
"class": "modal-input-xlarge Form-textArea",
|
||||
"class": "modal-input-xlarge Form-textArea Form-formGroup--fullWidth",
|
||||
"default": "---",
|
||||
awPopOver: "<p>Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.</p>" +
|
||||
"JSON:<br />\n" +
|
||||
|
||||
@ -185,13 +185,15 @@ export default
|
||||
label: 'Role',
|
||||
type: 'role',
|
||||
noSort: true,
|
||||
class: 'col-lg-4 col-md-4 col-sm-4 col-xs-4'
|
||||
class: 'col-lg-4 col-md-4 col-sm-4 col-xs-4',
|
||||
noSearch: true
|
||||
},
|
||||
team_roles: {
|
||||
label: 'Team Roles',
|
||||
type: 'team_roles',
|
||||
noSort: true,
|
||||
class: 'col-lg-5 col-md-5 col-sm-5 col-xs-4'
|
||||
class: 'col-lg-5 col-md-5 col-sm-5 col-xs-4',
|
||||
noSearch: true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -81,6 +81,11 @@ export default
|
||||
},
|
||||
project: {
|
||||
label: 'Project',
|
||||
labelAction: {
|
||||
label: 'RESET',
|
||||
ngClick: 'resetProjectToDefault()',
|
||||
'class': "{{!(job_type.value === 'scan' && project_name !== 'Default') ? 'hidden' : ''}}",
|
||||
},
|
||||
type: 'lookup',
|
||||
sourceModel: 'project',
|
||||
sourceField: 'name',
|
||||
@ -99,6 +104,7 @@ export default
|
||||
label: 'Playbook',
|
||||
type:'select',
|
||||
ngOptions: 'book for book in playbook_options track by book',
|
||||
ngDisabled: "job_type.value === 'scan' && project_name === 'Default'",
|
||||
id: 'playbook-select',
|
||||
awRequiredWhen: {
|
||||
reqExpression: "playbookrequired",
|
||||
@ -110,12 +116,6 @@ export default
|
||||
dataPlacement: 'right',
|
||||
dataContainer: "body",
|
||||
},
|
||||
default_scan: {
|
||||
type: 'custom',
|
||||
column: 1,
|
||||
ngShow: 'job_type.value === "scan" && project_name !== "Default"',
|
||||
control: '<a href="" ng-click="toggleScanInfo()">Reset to default project and playbook</a>'
|
||||
},
|
||||
credential: {
|
||||
label: 'Machine Credential',
|
||||
type: 'lookup',
|
||||
@ -224,41 +224,6 @@ export default
|
||||
text: 'Prompt on launch'
|
||||
}
|
||||
},
|
||||
labels: {
|
||||
label: 'Labels',
|
||||
type: 'select',
|
||||
ngOptions: 'label.label for label in labelOptions track by label.value',
|
||||
multiSelect: true,
|
||||
addRequired: false,
|
||||
editRequired: false,
|
||||
dataTitle: 'Labels',
|
||||
dataPlacement: 'right',
|
||||
awPopOver: 'You can add labels to a job template to aid in filtering',
|
||||
dataContainer: 'body'
|
||||
},
|
||||
variables: {
|
||||
label: 'Extra Variables',
|
||||
type: 'textarea',
|
||||
class: 'Form-textAreaLabel Form-formGroup--fullWidth',
|
||||
rows: 6,
|
||||
addRequired: false,
|
||||
editRequired: false,
|
||||
"default": "---",
|
||||
column: 2,
|
||||
awPopOver: "<p>Pass extra command line variables to the playbook. This is the -e or --extra-vars command line parameter " +
|
||||
"for ansible-playbook. Provide key/value pairs using either YAML or JSON.</p>" +
|
||||
"JSON:<br />\n" +
|
||||
"<blockquote>{<br /> \"somevar\": \"somevalue\",<br /> \"password\": \"magic\"<br /> }</blockquote>\n" +
|
||||
"YAML:<br />\n" +
|
||||
"<blockquote>---<br />somevar: somevalue<br />password: magic<br /></blockquote>\n",
|
||||
dataTitle: 'Extra Variables',
|
||||
dataPlacement: 'right',
|
||||
dataContainer: "body",
|
||||
subCheckbox: {
|
||||
variable: 'ask_variables_on_launch',
|
||||
text: 'Prompt on launch'
|
||||
}
|
||||
},
|
||||
become_enabled: {
|
||||
label: 'Enable Privilege Escalation',
|
||||
type: 'checkbox',
|
||||
@ -310,16 +275,57 @@ export default
|
||||
dataTitle: "Host Config Key",
|
||||
dataContainer: "body"
|
||||
},
|
||||
survey: {
|
||||
type: 'custom',
|
||||
labels: {
|
||||
label: 'Labels',
|
||||
type: 'select',
|
||||
class: 'Form-formGroup--fullWidth',
|
||||
ngOptions: 'label.label for label in labelOptions track by label.value',
|
||||
multiSelect: true,
|
||||
addRequired: false,
|
||||
editRequired: false,
|
||||
dataTitle: 'Labels',
|
||||
dataPlacement: 'right',
|
||||
awPopOver: 'You can add labels to a job template to aid in filtering',
|
||||
dataContainer: 'body'
|
||||
},
|
||||
variables: {
|
||||
label: 'Extra Variables',
|
||||
type: 'textarea',
|
||||
class: 'Form-textAreaLabel Form-formGroup--fullWidth',
|
||||
rows: 6,
|
||||
addRequired: false,
|
||||
editRequired: false,
|
||||
"default": "---",
|
||||
column: 2,
|
||||
ngHide: "job_type.value === 'scan'" ,
|
||||
control: '<button type="button" class="btn btn-sm Form-surveyButton" id="job_templates_create_survey_btn" ng-show="!survey_exists" ng-click="addSurvey()">ADD SURVEY</button>'+
|
||||
'<button type="button" class="btn btn-sm Form-surveyButton" id="job_templates_edit_survey_btn" ng-show="survey_exists" ng-click="editSurvey()">EDIT SURVEY</button>'
|
||||
awPopOver: "<p>Pass extra command line variables to the playbook. This is the -e or --extra-vars command line parameter " +
|
||||
"for ansible-playbook. Provide key/value pairs using either YAML or JSON.</p>" +
|
||||
"JSON:<br />\n" +
|
||||
"<blockquote>{<br /> \"somevar\": \"somevalue\",<br /> \"password\": \"magic\"<br /> }</blockquote>\n" +
|
||||
"YAML:<br />\n" +
|
||||
"<blockquote>---<br />somevar: somevalue<br />password: magic<br /></blockquote>\n",
|
||||
dataTitle: 'Extra Variables',
|
||||
dataPlacement: 'right',
|
||||
dataContainer: "body",
|
||||
subCheckbox: {
|
||||
variable: 'ask_variables_on_launch',
|
||||
text: 'Prompt on launch'
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
buttons: { //for now always generates <button> tags
|
||||
add_survey: {
|
||||
ngClick: 'addSurvey()',
|
||||
ngShow: 'job_type.value !== "scan" && !survey_exists',
|
||||
awFeature: 'surveys',
|
||||
awToolTip: 'Surveys allow users to be prompted at job launch with a series of questions related to the job',
|
||||
dataPlacement: 'top'
|
||||
},
|
||||
edit_survey: {
|
||||
ngClick: 'editSurvey()',
|
||||
awFeature: 'surveys',
|
||||
ngShow: 'job_type.value !== "scan" && survey_exists'
|
||||
},
|
||||
cancel: {
|
||||
ngClick: 'formCancel()'
|
||||
},
|
||||
@ -364,13 +370,15 @@ export default
|
||||
label: 'Role',
|
||||
type: 'role',
|
||||
noSort: true,
|
||||
class: 'col-lg-4 col-md-4 col-sm-4 col-xs-4'
|
||||
class: 'col-lg-4 col-md-4 col-sm-4 col-xs-4',
|
||||
searchable: false
|
||||
},
|
||||
team_roles: {
|
||||
label: 'Team Roles',
|
||||
type: 'team_roles',
|
||||
noSort: true,
|
||||
class: 'col-lg-5 col-md-5 col-sm-5 col-xs-4'
|
||||
class: 'col-lg-5 col-md-5 col-sm-5 col-xs-4',
|
||||
searchable: false
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@ -47,6 +47,7 @@ export default
|
||||
|
||||
related: {
|
||||
permissions: {
|
||||
basePath: 'organizations/:id/access_list/',
|
||||
awToolTip: 'Please save before assigning permissions',
|
||||
dataPlacement: 'top',
|
||||
type: 'collection',
|
||||
@ -76,13 +77,15 @@ export default
|
||||
label: 'Role',
|
||||
type: 'role',
|
||||
noSort: true,
|
||||
class: 'col-lg-4 col-md-4 col-sm-4 col-xs-4'
|
||||
class: 'col-lg-4 col-md-4 col-sm-4 col-xs-4',
|
||||
searchable: false
|
||||
},
|
||||
team_roles: {
|
||||
label: 'Team Roles',
|
||||
type: 'team_roles',
|
||||
noSort: true,
|
||||
class: 'col-lg-5 col-md-5 col-sm-5 col-xs-4'
|
||||
class: 'col-lg-5 col-md-5 col-sm-5 col-xs-4',
|
||||
searchable: false
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@ -48,10 +48,6 @@ angular.module('ProjectFormDefinition', ['SchedulesListDefinition'])
|
||||
reqExpression: "organizationrequired",
|
||||
init: "true"
|
||||
},
|
||||
awPopOver: '<p>A project must have at least one organization. Pick one organization now to create the project, and then after ' +
|
||||
'the project is created you can add additional organizations.</p><p>Only super users and organization administrators are allowed ' +
|
||||
'to make changes to projects. Associating one or more organizations to a project determins which organizations admins have ' +
|
||||
'access to modify the project.',
|
||||
dataTitle: 'Organization',
|
||||
dataContainer: 'body',
|
||||
dataPlacement: 'right'
|
||||
@ -64,11 +60,10 @@ angular.module('ProjectFormDefinition', ['SchedulesListDefinition'])
|
||||
ngChange: 'scmChange()',
|
||||
addRequired: true,
|
||||
editRequired: true,
|
||||
hasSubForm: true
|
||||
hasSubForm: true,
|
||||
},
|
||||
missing_path_alert: {
|
||||
type: 'alertblock',
|
||||
"class": 'alert-info',
|
||||
ngShow: "showMissingPlaybooksAlert && scm_type.value == 'manual'",
|
||||
alertTxt: '<p class=\"text-justify\"><strong>WARNING:</strong> There are no available playbook directories in {{ base_dir }}. ' +
|
||||
'Either that directory is empty, or all of the contents are already assigned to other projects. ' +
|
||||
@ -79,7 +74,7 @@ angular.module('ProjectFormDefinition', ['SchedulesListDefinition'])
|
||||
base_dir: {
|
||||
label: 'Project Base Path',
|
||||
type: 'text',
|
||||
//"class": 'col-lg-6',
|
||||
class: 'Form-textUneditable',
|
||||
showonly: true,
|
||||
ngShow: "scm_type.value == 'manual' " ,
|
||||
awPopOver: '<p>Base path used for locating playbooks. Directories found inside this path will be listed in the playbook directory drop-down. ' +
|
||||
@ -115,30 +110,12 @@ angular.module('ProjectFormDefinition', ['SchedulesListDefinition'])
|
||||
init: false
|
||||
},
|
||||
subForm: 'sourceSubForm',
|
||||
helpCollapse: [{
|
||||
hdr: 'GIT URLs',
|
||||
content: '<p>Example URLs for GIT SCM include:</p><ul class=\"no-bullets\"><li>https://github.com/ansible/ansible.git</li>' +
|
||||
'<li>git@github.com:ansible/ansible.git</li><li>git://servername.example.com/ansible.git</li></ul>' +
|
||||
'<p><strong>Note:</strong> When using SSH protocol for GitHub or Bitbucket, enter an SSH key only, ' +
|
||||
'do not enter a username (other than git). Additionally, GitHub and Bitbucket do not support password authentication when using ' +
|
||||
'SSH. GIT read only protocol (git://) does not use username or password information.',
|
||||
show: "scm_type.value == 'git'"
|
||||
}, {
|
||||
hdr: 'SVN URLs',
|
||||
content: '<p>Example URLs for Subversion SCM include:</p>' +
|
||||
'<ul class=\"no-bullets\"><li>https://github.com/ansible/ansible</li><li>svn://servername.example.com/path</li>' +
|
||||
'<li>svn+ssh://servername.example.com/path</li></ul>',
|
||||
show: "scm_type.value == 'svn'"
|
||||
}, {
|
||||
hdr: 'Mercurial URLs',
|
||||
content: '<p>Example URLs for Mercurial SCM include:</p>' +
|
||||
'<ul class=\"no-bullets\"><li>https://bitbucket.org/username/project</li><li>ssh://hg@bitbucket.org/username/project</li>' +
|
||||
'<li>ssh://server.example.com/path</li></ul>' +
|
||||
'<p><strong>Note:</strong> Mercurial does not support password authentication for SSH. ' +
|
||||
'Do not put the username and key in the URL. ' +
|
||||
'If using Bitbucket and SSH, do not supply your Bitbucket username.',
|
||||
show: "scm_type.value == 'hg'"
|
||||
}],
|
||||
hideSubForm: "scm_type.value === 'manual'",
|
||||
awPopOverWatch: "urlPopover",
|
||||
awPopOver: "set in controllers/projects",
|
||||
dataTitle: 'SCM URL',
|
||||
dataContainer: 'body',
|
||||
dataPlacement: 'right'
|
||||
},
|
||||
scm_branch: {
|
||||
labelBind: "scmBranchLabel",
|
||||
@ -174,7 +151,7 @@ angular.module('ProjectFormDefinition', ['SchedulesListDefinition'])
|
||||
dataTitle: 'SCM Clean',
|
||||
dataContainer: 'body',
|
||||
dataPlacement: 'right',
|
||||
labelClass: 'checkbox-options'
|
||||
labelClass: 'checkbox-options stack-inline'
|
||||
}, {
|
||||
name: 'scm_delete_on_update',
|
||||
label: 'Delete on Update',
|
||||
@ -186,7 +163,7 @@ angular.module('ProjectFormDefinition', ['SchedulesListDefinition'])
|
||||
dataTitle: 'SCM Delete',
|
||||
dataContainer: 'body',
|
||||
dataPlacement: 'right',
|
||||
labelClass: 'checkbox-options'
|
||||
labelClass: 'checkbox-options stack-inline'
|
||||
}, {
|
||||
name: 'scm_update_on_launch',
|
||||
label: 'Update on Launch',
|
||||
@ -197,7 +174,7 @@ angular.module('ProjectFormDefinition', ['SchedulesListDefinition'])
|
||||
dataTitle: 'SCM Update',
|
||||
dataContainer: 'body',
|
||||
dataPlacement: 'right',
|
||||
labelClass: 'checkbox-options'
|
||||
labelClass: 'checkbox-options stack-inline'
|
||||
}]
|
||||
},
|
||||
scm_update_cache_timeout: {
|
||||
@ -262,18 +239,20 @@ angular.module('ProjectFormDefinition', ['SchedulesListDefinition'])
|
||||
label: 'Role',
|
||||
type: 'role',
|
||||
noSort: true,
|
||||
class: 'col-lg-4 col-md-4 col-sm-4 col-xs-4'
|
||||
class: 'col-lg-4 col-md-4 col-sm-4 col-xs-4',
|
||||
noSearch: true
|
||||
},
|
||||
team_roles: {
|
||||
label: 'Team Roles',
|
||||
type: 'team_roles',
|
||||
noSort: true,
|
||||
class: 'col-lg-5 col-md-5 col-sm-5 col-xs-4'
|
||||
class: 'col-lg-5 col-md-5 col-sm-5 col-xs-4',
|
||||
noSearch: true
|
||||
}
|
||||
}
|
||||
},
|
||||
notifications: {
|
||||
include: "NotificationsList"
|
||||
include: "NotificationsList",
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user