mirror of
https://github.com/ansible/awx.git
synced 2026-01-18 05:01:19 -03:30
Merge pull request #4022 from AlanCoding/workflow_copy_relaunch
Workflow copy and relaunch
This commit is contained in:
commit
dd1d911775
@ -339,7 +339,7 @@ class BaseSerializer(serializers.ModelSerializer):
|
||||
if hasattr(self, 'show_capabilities'):
|
||||
view = self.context.get('view', None)
|
||||
parent_obj = None
|
||||
if view and hasattr(view, 'parent_model'):
|
||||
if view and hasattr(view, 'parent_model') and hasattr(view, 'get_parent_object'):
|
||||
parent_obj = view.get_parent_object()
|
||||
if view and view.request and view.request.user:
|
||||
user_capabilities = get_user_capabilities(
|
||||
@ -2203,7 +2203,7 @@ class SystemJobCancelSerializer(SystemJobSerializer):
|
||||
|
||||
|
||||
class WorkflowJobTemplateSerializer(LabelsListMixin, UnifiedJobTemplateSerializer):
|
||||
show_capabilities = ['start', 'edit', 'delete']
|
||||
show_capabilities = ['start', 'edit', 'copy', 'delete']
|
||||
|
||||
class Meta:
|
||||
model = WorkflowJobTemplate
|
||||
@ -2215,6 +2215,7 @@ class WorkflowJobTemplateSerializer(LabelsListMixin, UnifiedJobTemplateSerialize
|
||||
workflow_jobs = reverse('api:workflow_job_template_jobs_list', args=(obj.pk,)),
|
||||
schedules = reverse('api:workflow_job_template_schedules_list', args=(obj.pk,)),
|
||||
launch = reverse('api:workflow_job_template_launch', args=(obj.pk,)),
|
||||
copy = reverse('api:workflow_job_template_copy', args=(obj.pk,)),
|
||||
workflow_nodes = reverse('api:workflow_job_template_workflow_nodes_list', args=(obj.pk,)),
|
||||
labels = reverse('api:workflow_job_template_label_list', args=(obj.pk,)),
|
||||
activity_stream = reverse('api:workflow_job_template_activity_stream_list', args=(obj.pk,)),
|
||||
@ -2251,6 +2252,7 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer):
|
||||
res['workflow_nodes'] = reverse('api:workflow_job_workflow_nodes_list', args=(obj.pk,))
|
||||
res['labels'] = reverse('api:workflow_job_label_list', args=(obj.pk,))
|
||||
res['activity_stream'] = reverse('api:workflow_job_activity_stream_list', args=(obj.pk,))
|
||||
res['relaunch'] = reverse('api:workflow_job_relaunch', args=(obj.pk,))
|
||||
if obj.can_cancel or True:
|
||||
res['cancel'] = reverse('api:workflow_job_cancel', args=(obj.pk,))
|
||||
return res
|
||||
|
||||
@ -262,6 +262,7 @@ workflow_job_template_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/$', 'workflow_job_template_detail'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_jobs/$', 'workflow_job_template_jobs_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/launch/$', 'workflow_job_template_launch'),
|
||||
url(r'^(?P<pk>[0-9]+)/copy/$', 'workflow_job_template_copy'),
|
||||
url(r'^(?P<pk>[0-9]+)/schedules/$', 'workflow_job_template_schedules_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/survey_spec/$', 'workflow_job_template_survey_spec'),
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', 'workflow_job_template_workflow_nodes_list'),
|
||||
@ -271,7 +272,6 @@ workflow_job_template_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/notification_templates_success/$', 'workflow_job_template_notification_templates_success_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/access_list/$', 'workflow_job_template_access_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', 'workflow_job_template_label_list'),
|
||||
# url(r'^(?P<pk>[0-9]+)/cancel/$', 'workflow_job_template_cancel'),
|
||||
)
|
||||
|
||||
workflow_job_urls = patterns('awx.api.views',
|
||||
@ -280,6 +280,7 @@ workflow_job_urls = patterns('awx.api.views',
|
||||
url(r'^(?P<pk>[0-9]+)/workflow_nodes/$', 'workflow_job_workflow_nodes_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/labels/$', 'workflow_job_label_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/cancel/$', 'workflow_job_cancel'),
|
||||
url(r'^(?P<pk>[0-9]+)/relaunch/$', 'workflow_job_relaunch'),
|
||||
url(r'^(?P<pk>[0-9]+)/notifications/$', 'workflow_job_notifications_list'),
|
||||
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'workflow_job_activity_stream_list'),
|
||||
)
|
||||
|
||||
@ -2917,6 +2917,32 @@ class WorkflowJobTemplateDetail(RetrieveUpdateDestroyAPIView):
|
||||
new_in_310 = True
|
||||
|
||||
|
||||
class WorkflowJobTemplateCopy(GenericAPIView):
|
||||
|
||||
model = WorkflowJobTemplate
|
||||
parent_model = WorkflowJobTemplate
|
||||
serializer_class = EmptySerializer
|
||||
new_in_310 = True
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
data = {}
|
||||
copy_TF, messages = request.user.can_access_with_errors(self.model, 'copy', obj)
|
||||
data['can_copy'] = copy_TF
|
||||
data['warnings'] = messages
|
||||
return Response(data)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
if not request.user.can_access(self.model, 'copy', obj):
|
||||
return PermissionDenied()
|
||||
new_wfjt = obj.user_copy(request.user)
|
||||
data = OrderedDict()
|
||||
data.update(WorkflowJobTemplateSerializer(
|
||||
new_wfjt, context=self.get_serializer_context()).to_representation(new_wfjt))
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
|
||||
|
||||
class WorkflowJobTemplateLabelList(JobTemplateLabelList):
|
||||
parent_model = WorkflowJobTemplate
|
||||
new_in_310 = True
|
||||
@ -2952,7 +2978,7 @@ class WorkflowJobTemplateLaunch(RetrieveAPIView):
|
||||
|
||||
prompted_fields, ignored_fields = obj._accept_or_ignore_job_kwargs(**request.data)
|
||||
|
||||
new_job = obj.create_unified_job(**prompted_fields)
|
||||
new_job = obj.create_workflow_job(**prompted_fields)
|
||||
new_job.signal_start(**prompted_fields)
|
||||
|
||||
data = OrderedDict()
|
||||
@ -2962,6 +2988,25 @@ class WorkflowJobTemplateLaunch(RetrieveAPIView):
|
||||
return Response(data, status=status.HTTP_201_CREATED)
|
||||
|
||||
|
||||
class WorkflowJobRelaunch(GenericAPIView):
|
||||
|
||||
model = WorkflowJob
|
||||
serializer_class = EmptySerializer
|
||||
is_job_start = True
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
return Response({})
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
new_workflow_job = obj.create_relaunch_workflow_job()
|
||||
new_workflow_job.signal_start()
|
||||
|
||||
data = WorkflowJobSerializer(new_workflow_job, context=self.get_serializer_context()).data
|
||||
headers = {'Location': new_workflow_job.get_absolute_url()}
|
||||
return Response(data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
|
||||
# TODO:
|
||||
class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView):
|
||||
|
||||
@ -3325,8 +3370,7 @@ class JobRelaunch(RetrieveAPIView, GenericAPIView):
|
||||
if not serializer.is_valid():
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
obj.launch_type = 'relaunch'
|
||||
new_job = obj.copy()
|
||||
new_job = obj.copy_unified_job()
|
||||
result = new_job.signal_start(**request.data)
|
||||
if not result:
|
||||
data = dict(passwords_needed_to_start=new_job.passwords_needed_to_start)
|
||||
|
||||
@ -24,7 +24,7 @@ from awx.main.models.mixins import ResourceMixin
|
||||
from awx.main.task_engine import TaskEnhancer
|
||||
from awx.conf.license import LicenseForbids
|
||||
|
||||
__all__ = ['get_user_queryset', 'check_user_access',
|
||||
__all__ = ['get_user_queryset', 'check_user_access', 'check_user_access_with_errors',
|
||||
'user_accessible_objects',
|
||||
'user_admin_role', 'StateConflict',]
|
||||
|
||||
@ -124,6 +124,20 @@ def check_user_access(user, model_class, action, *args, **kwargs):
|
||||
return False
|
||||
|
||||
|
||||
def check_user_access_with_errors(user, model_class, action, *args, **kwargs):
|
||||
'''
|
||||
Return T/F permission and summary of problems with the action.
|
||||
'''
|
||||
for access_class in access_registry.get(model_class, []):
|
||||
access_instance = access_class(user, save_messages=True)
|
||||
access_method = getattr(access_instance, 'can_%s' % action, None)
|
||||
result = access_method(*args, **kwargs)
|
||||
logger.debug('%s.%s %r returned %r', access_instance.__class__.__name__,
|
||||
access_method.__name__, args, result)
|
||||
return (result, access_instance.messages)
|
||||
return (False, '')
|
||||
|
||||
|
||||
def get_user_capabilities(user, instance, **kwargs):
|
||||
'''
|
||||
Returns a dictionary of capabilities the user has on the particular
|
||||
@ -160,8 +174,11 @@ class BaseAccess(object):
|
||||
|
||||
model = None
|
||||
|
||||
def __init__(self, user):
|
||||
def __init__(self, user, save_messages=False):
|
||||
self.user = user
|
||||
self.save_messages = save_messages
|
||||
if save_messages:
|
||||
self.messages = {}
|
||||
|
||||
def get_queryset(self):
|
||||
if self.user.is_superuser or self.user.is_system_auditor:
|
||||
@ -329,8 +346,6 @@ class BaseAccess(object):
|
||||
# Aliases for going form UI language to API language
|
||||
if display_method == 'edit':
|
||||
method = 'change'
|
||||
elif display_method == 'copy':
|
||||
method = 'add'
|
||||
elif display_method == 'adhoc':
|
||||
method = 'run_ad_hoc_commands'
|
||||
else:
|
||||
@ -347,17 +362,12 @@ class BaseAccess(object):
|
||||
user_capabilities['copy'] = user_capabilities['edit']
|
||||
continue
|
||||
|
||||
# Preprocessing before the access method is called
|
||||
data = {}
|
||||
if method == 'add':
|
||||
if isinstance(obj, JobTemplate):
|
||||
data['reference_obj'] = obj
|
||||
|
||||
# Compute permission
|
||||
data = {}
|
||||
access_method = getattr(self, "can_%s" % method)
|
||||
if method in ['change']: # 3 args
|
||||
user_capabilities[display_method] = access_method(obj, data)
|
||||
elif method in ['delete', 'run_ad_hoc_commands']:
|
||||
elif method in ['delete', 'run_ad_hoc_commands', 'copy']:
|
||||
user_capabilities[display_method] = access_method(obj)
|
||||
elif method in ['start']:
|
||||
user_capabilities[display_method] = access_method(obj, validate_license=False)
|
||||
@ -1095,6 +1105,9 @@ class JobTemplateAccess(BaseAccess):
|
||||
else:
|
||||
return False
|
||||
|
||||
def can_copy(self, obj):
|
||||
return self.can_add({'reference_obj': obj})
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
# Check license.
|
||||
if validate_license:
|
||||
@ -1487,7 +1500,7 @@ class WorkflowJobNodeAccess(BaseAccess):
|
||||
return False
|
||||
|
||||
|
||||
# TODO: revisit for survey logic, notification attachments?
|
||||
# TODO: notification attachments?
|
||||
class WorkflowJobTemplateAccess(BaseAccess):
|
||||
'''
|
||||
I can only see/manage Workflow Job Templates if I'm a super user
|
||||
@ -1519,37 +1532,33 @@ class WorkflowJobTemplateAccess(BaseAccess):
|
||||
if not data: # So the browseable API will work
|
||||
return Organization.accessible_objects(self.user, 'admin_role').exists()
|
||||
|
||||
# if reference_obj is provided, determine if it can be coppied
|
||||
reference_obj = data.pop('reference_obj', None)
|
||||
if reference_obj:
|
||||
for node in reference_obj.workflow_job_template_nodes.all():
|
||||
if node.inventory and self.user not in node.inventory.use_role:
|
||||
return False
|
||||
if node.credential and self.user not in node.credential.use_role:
|
||||
return False
|
||||
if node.unified_job_template:
|
||||
if isinstance(node.unified_job_template, SystemJobTemplate):
|
||||
if not self.user.is_superuser:
|
||||
return False
|
||||
elif isinstance(node.unified_job_template, JobTemplate):
|
||||
if self.user not in node.unified_job_template.execute_role:
|
||||
return False
|
||||
elif isinstance(node.unified_job_template, Project):
|
||||
if self.user not in node.unified_job_template.update_role:
|
||||
return False
|
||||
elif isinstance(node.unified_job_template, InventorySource):
|
||||
if not self.user.can_access(InventorySource, 'start', node.unified_job_template):
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
return True
|
||||
|
||||
# will check this if surveys are added to WFJT
|
||||
if 'survey_enabled' in data and data['survey_enabled']:
|
||||
self.check_license(feature='surveys')
|
||||
|
||||
return self.check_related('organization', Organization, data, mandatory=True)
|
||||
|
||||
def can_copy(self, obj):
|
||||
if self.save_messages:
|
||||
wfjt_errors = {}
|
||||
qs = obj.workflow_job_template_nodes
|
||||
qs.select_related('unified_job_template', 'inventory', 'credential')
|
||||
for node in qs.all():
|
||||
node_errors = {}
|
||||
if node.inventory and self.user not in node.inventory.use_role:
|
||||
node_errors['inventory'] = 'Prompted inventory %s can not be coppied.' % node.inventory.name
|
||||
if node.credential and self.user not in node.credential.use_role:
|
||||
node_errors['credential'] = 'Prompted credential %s can not be coppied.' % node.credential.name
|
||||
ujt = node.unified_job_template
|
||||
if ujt and not self.user.can_access(UnifiedJobTemplate, 'start', ujt, validate_license=False):
|
||||
node_errors['unified_job_template'] = (
|
||||
'Prompted %s %s can not be coppied.' % (ujt._meta.verbose_name_raw, ujt.name))
|
||||
if node_errors:
|
||||
wfjt_errors[node.id] = node_errors
|
||||
self.messages.update(wfjt_errors)
|
||||
|
||||
return self.check_related('organization', Organization, {}, obj=obj, mandatory=True)
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
if validate_license:
|
||||
# check basic license, node count
|
||||
@ -1620,9 +1629,16 @@ class WorkflowJobAccess(BaseAccess):
|
||||
return self.user.is_superuser
|
||||
return self.user in obj.workflow_job_template.admin_role
|
||||
|
||||
# TODO: add support for relaunching workflow jobs
|
||||
def can_start(self, obj, validate_license=True):
|
||||
return False
|
||||
if validate_license:
|
||||
self.check_license()
|
||||
if obj.survey_enabled:
|
||||
self.check_license(feature='surveys')
|
||||
|
||||
if self.user.is_superuser:
|
||||
return True
|
||||
|
||||
return (obj.workflow_job_template and self.user in obj.workflow_job_template.execute_role)
|
||||
|
||||
def can_cancel(self, obj):
|
||||
if not obj.can_cancel:
|
||||
@ -1818,6 +1834,11 @@ class UnifiedJobTemplateAccess(BaseAccess):
|
||||
|
||||
return qs.all()
|
||||
|
||||
def can_start(self, obj, validate_license=True):
|
||||
access_class = access_registry.get(obj.__class__, [])[0]
|
||||
access_instance = access_class(self.user)
|
||||
return access_instance.can_start(obj, validate_license=validate_license)
|
||||
|
||||
|
||||
class UnifiedJobAccess(BaseAccess):
|
||||
'''
|
||||
|
||||
@ -48,6 +48,7 @@ from awx.main.access import * # noqa
|
||||
|
||||
User.add_to_class('get_queryset', get_user_queryset)
|
||||
User.add_to_class('can_access', check_user_access)
|
||||
User.add_to_class('can_access_with_errors', check_user_access_with_errors)
|
||||
User.add_to_class('accessible_objects', user_accessible_objects)
|
||||
User.add_to_class('admin_role', user_admin_role)
|
||||
|
||||
|
||||
@ -455,6 +455,10 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin):
|
||||
def _global_timeout_setting(self):
|
||||
return 'DEFAULT_JOB_TIMEOUT'
|
||||
|
||||
@classmethod
|
||||
def _get_unified_job_template_class(cls):
|
||||
return JobTemplate
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:job_detail', args=(self.pk,))
|
||||
|
||||
@ -629,14 +633,6 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin):
|
||||
content = super(Job, self)._result_stdout_raw(*args, **kwargs)
|
||||
return self._survey_search_and_replace(content)
|
||||
|
||||
def copy(self):
|
||||
presets = {}
|
||||
for kw in JobTemplate._get_unified_job_field_names():
|
||||
presets[kw] = getattr(self, kw)
|
||||
if not self.job_template:
|
||||
self.job_template = JobTemplate(name='temporary')
|
||||
return self.job_template.create_unified_job(**presets)
|
||||
|
||||
# Job Credential required
|
||||
@property
|
||||
def can_start(self):
|
||||
|
||||
@ -102,9 +102,6 @@ class SurveyJobTemplateMixin(models.Model):
|
||||
Combine extra_vars with variable precedence order:
|
||||
JT extra_vars -> JT survey defaults -> runtime extra_vars
|
||||
'''
|
||||
if 'launch_type' in kwargs and kwargs['launch_type'] == 'relaunch':
|
||||
return kwargs
|
||||
|
||||
# Job Template extra_vars
|
||||
extra_vars = self.extra_vars_dict
|
||||
|
||||
|
||||
@ -10,6 +10,7 @@ import os
|
||||
import os.path
|
||||
from collections import OrderedDict
|
||||
from StringIO import StringIO
|
||||
from datetime import datetime
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
@ -29,7 +30,10 @@ from djcelery.models import TaskMeta
|
||||
# AWX
|
||||
from awx.main.models.base import * # noqa
|
||||
from awx.main.models.schedules import Schedule
|
||||
from awx.main.utils import decrypt_field, _inventory_updates
|
||||
from awx.main.utils import (
|
||||
decrypt_field, _inventory_updates,
|
||||
copy_model_by_class, copy_m2m_relationships
|
||||
)
|
||||
from awx.main.redact import UriCleaner, REPLACE_STR
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
from awx.main.fields import JSONField
|
||||
@ -303,46 +307,13 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
|
||||
Create a new unified job based on this unified job template.
|
||||
'''
|
||||
unified_job_class = self._get_unified_job_class()
|
||||
fields = self._get_unified_job_field_names()
|
||||
unified_job = copy_model_by_class(self, unified_job_class, fields, kwargs)
|
||||
|
||||
# Set the unified job template back-link on the job
|
||||
parent_field_name = unified_job_class._get_parent_field_name()
|
||||
kwargs.pop('%s_id' % parent_field_name, None)
|
||||
create_kwargs = {}
|
||||
m2m_fields = {}
|
||||
if self.pk:
|
||||
create_kwargs[parent_field_name] = self
|
||||
for field_name in self._get_unified_job_field_names():
|
||||
# Foreign keys can be specified as field_name or field_name_id.
|
||||
id_field_name = '%s_id' % field_name
|
||||
if hasattr(self, id_field_name):
|
||||
if field_name in kwargs:
|
||||
value = kwargs[field_name]
|
||||
elif id_field_name in kwargs:
|
||||
value = kwargs[id_field_name]
|
||||
else:
|
||||
value = getattr(self, id_field_name)
|
||||
if hasattr(value, 'id'):
|
||||
value = value.id
|
||||
create_kwargs[id_field_name] = value
|
||||
elif field_name in kwargs:
|
||||
if field_name == 'extra_vars' and isinstance(kwargs[field_name], dict):
|
||||
create_kwargs[field_name] = json.dumps(kwargs['extra_vars'])
|
||||
# We can't get a hold of django.db.models.fields.related.ManyRelatedManager to compare
|
||||
# so this is the next best thing.
|
||||
elif kwargs[field_name].__class__.__name__ is 'ManyRelatedManager':
|
||||
m2m_fields[field_name] = kwargs[field_name]
|
||||
else:
|
||||
create_kwargs[field_name] = kwargs[field_name]
|
||||
elif hasattr(self, field_name):
|
||||
field_obj = self._meta.get_field_by_name(field_name)[0]
|
||||
# Many to Many can be specified as field_name
|
||||
if isinstance(field_obj, models.ManyToManyField):
|
||||
m2m_fields[field_name] = getattr(self, field_name)
|
||||
else:
|
||||
create_kwargs[field_name] = getattr(self, field_name)
|
||||
if hasattr(self, '_update_unified_job_kwargs'):
|
||||
new_kwargs = self._update_unified_job_kwargs(**create_kwargs)
|
||||
else:
|
||||
new_kwargs = create_kwargs
|
||||
unified_job = unified_job_class(**new_kwargs)
|
||||
setattr(unified_job, parent_field_name, self)
|
||||
|
||||
# For JobTemplate-based jobs with surveys, add passwords to list for perma-redaction
|
||||
if hasattr(self, 'survey_spec') and getattr(self, 'survey_enabled', False):
|
||||
password_list = self.survey_password_variables()
|
||||
@ -350,12 +321,32 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio
|
||||
for password in password_list:
|
||||
hide_password_dict[password] = REPLACE_STR
|
||||
unified_job.survey_passwords = hide_password_dict
|
||||
|
||||
unified_job.save()
|
||||
for field_name, src_field_value in m2m_fields.iteritems():
|
||||
dest_field = getattr(unified_job, field_name)
|
||||
dest_field.add(*list(src_field_value.all().values_list('id', flat=True)))
|
||||
# Labels coppied here
|
||||
copy_m2m_relationships(self, unified_job, fields, kwargs=kwargs)
|
||||
return unified_job
|
||||
|
||||
@classmethod
|
||||
def _get_unified_jt_copy_names(cls):
|
||||
return cls._get_unified_job_field_names()
|
||||
|
||||
def copy_unified_jt(self):
|
||||
'''
|
||||
Returns saved object, including related fields.
|
||||
Create a copy of this unified job template.
|
||||
'''
|
||||
unified_jt_class = self.__class__
|
||||
fields = self._get_unified_jt_copy_names()
|
||||
unified_jt = copy_model_by_class(self, unified_jt_class, fields, {})
|
||||
|
||||
time_now = datetime.now()
|
||||
unified_jt.name = unified_jt.name + ' @ ' + time_now.strftime('%H:%M:%S %p')
|
||||
|
||||
unified_jt.save()
|
||||
copy_m2m_relationships(self, unified_jt, fields)
|
||||
return unified_jt
|
||||
|
||||
|
||||
class UnifiedJobTypeStringMixin(object):
|
||||
@classmethod
|
||||
@ -552,6 +543,13 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
def _get_parent_field_name(cls):
|
||||
return 'unified_job_template' # Override in subclasses.
|
||||
|
||||
@classmethod
|
||||
def _get_unified_job_template_class(cls):
|
||||
'''
|
||||
Return subclass of UnifiedJobTemplate that applies to this unified job.
|
||||
'''
|
||||
raise NotImplementedError # Implement in subclass.
|
||||
|
||||
def _global_timeout_setting(self):
|
||||
"Override in child classes, None value indicates this is not configurable"
|
||||
return None
|
||||
@ -666,6 +664,24 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique
|
||||
pass
|
||||
super(UnifiedJob, self).delete()
|
||||
|
||||
def copy_unified_job(self):
|
||||
'''
|
||||
Returns saved object, including related fields.
|
||||
Create a copy of this unified job for the purpose of relaunch
|
||||
'''
|
||||
unified_job_class = self.__class__
|
||||
unified_jt_class = self._get_unified_job_template_class()
|
||||
parent_field_name = unified_job_class._get_parent_field_name()
|
||||
|
||||
fields = unified_jt_class._get_unified_job_field_names() + [parent_field_name]
|
||||
unified_job = copy_model_by_class(self, unified_job_class, fields, {})
|
||||
unified_job.job_type = 'relaunch'
|
||||
unified_job.save()
|
||||
|
||||
# Labels coppied here
|
||||
copy_m2m_relationships(self, unified_job, fields)
|
||||
return unified_job
|
||||
|
||||
def result_stdout_raw_handle(self, attempt=0):
|
||||
"""Return a file-like object containing the standard out of the
|
||||
job's result.
|
||||
|
||||
@ -159,6 +159,18 @@ class WorkflowNodeBase(CreatedModifiedModel):
|
||||
return ['workflow_job', 'unified_job_template',
|
||||
'inventory', 'credential', 'char_prompts']
|
||||
|
||||
def create_workflow_job_node(self, **kwargs):
|
||||
'''
|
||||
Create a new workflow job node based on this workflow node.
|
||||
'''
|
||||
create_kwargs = {}
|
||||
for field_name in self._get_workflow_job_field_names():
|
||||
if field_name in kwargs:
|
||||
create_kwargs[field_name] = kwargs[field_name]
|
||||
elif hasattr(self, field_name):
|
||||
create_kwargs[field_name] = getattr(self, field_name)
|
||||
return WorkflowJobNode.objects.create(**create_kwargs)
|
||||
|
||||
|
||||
class WorkflowJobTemplateNode(WorkflowNodeBase):
|
||||
workflow_job_template = models.ForeignKey(
|
||||
@ -173,17 +185,24 @@ class WorkflowJobTemplateNode(WorkflowNodeBase):
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:workflow_job_template_node_detail', args=(self.pk,))
|
||||
|
||||
def create_workflow_job_node(self, **kwargs):
|
||||
def create_wfjt_node_copy(self, user, workflow_job_template=None):
|
||||
'''
|
||||
Create a new workflow job node based on this workflow node.
|
||||
Copy this node to a new WFJT, leaving out related fields the user
|
||||
is not allowed to access
|
||||
'''
|
||||
create_kwargs = {}
|
||||
for field_name in self._get_workflow_job_field_names():
|
||||
if field_name in kwargs:
|
||||
create_kwargs[field_name] = kwargs[field_name]
|
||||
elif hasattr(self, field_name):
|
||||
create_kwargs[field_name] = getattr(self, field_name)
|
||||
return WorkflowJobNode.objects.create(**create_kwargs)
|
||||
if hasattr(self, field_name):
|
||||
item = getattr(self, field_name)
|
||||
if field_name in ['inventory', 'credential']:
|
||||
if not user.can_access(item.__class__, 'use', item):
|
||||
continue
|
||||
if field_name in ['unified_job_template']:
|
||||
if not user.can_access(item.__class__, 'start', item, validate_license=False):
|
||||
continue
|
||||
create_kwargs[field_name] = item
|
||||
create_kwargs['workflow_job_template'] = workflow_job_template
|
||||
return self.__class__.objects.create(**create_kwargs)
|
||||
|
||||
|
||||
class WorkflowJobNode(WorkflowNodeBase):
|
||||
@ -248,14 +267,14 @@ class WorkflowJobNode(WorkflowNodeBase):
|
||||
if password_dict:
|
||||
data['survey_passwords'] = password_dict
|
||||
# process extra_vars
|
||||
# TODO: still lack consensus about variable precedence
|
||||
extra_vars = {}
|
||||
if self.workflow_job and self.workflow_job.extra_vars:
|
||||
extra_vars.update(self.workflow_job.extra_vars_dict)
|
||||
if aa_dict:
|
||||
functional_aa_dict = copy(aa_dict)
|
||||
functional_aa_dict.pop('_ansible_no_log', None)
|
||||
extra_vars.update(functional_aa_dict)
|
||||
# Workflow Job extra_vars higher precedence than ancestor artifacts
|
||||
if self.workflow_job and self.workflow_job.extra_vars:
|
||||
extra_vars.update(self.workflow_job.extra_vars_dict)
|
||||
if extra_vars:
|
||||
data['extra_vars'] = extra_vars
|
||||
# ensure that unified jobs created by WorkflowJobs are marked
|
||||
@ -274,6 +293,40 @@ class WorkflowJobOptions(BaseModel):
|
||||
|
||||
extra_vars_dict = VarsDictProperty('extra_vars', True)
|
||||
|
||||
@property
|
||||
def workflow_nodes(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _create_workflow_nodes(self, old_node_list, user=None):
|
||||
node_links = {}
|
||||
for old_node in old_node_list:
|
||||
if user:
|
||||
new_node = old_node.create_wfjt_node_copy(user, workflow_job_template=self)
|
||||
else:
|
||||
new_node = old_node.create_workflow_job_node(workflow_job=self)
|
||||
node_links[old_node.pk] = new_node
|
||||
return node_links
|
||||
|
||||
def _inherit_node_relationships(self, old_node_list, node_links):
|
||||
for old_node in old_node_list:
|
||||
new_node = node_links[old_node.pk]
|
||||
for relationship in ['always_nodes', 'success_nodes', 'failure_nodes']:
|
||||
old_manager = getattr(old_node, relationship)
|
||||
for old_child_node in old_manager.all():
|
||||
new_child_node = node_links[old_child_node.pk]
|
||||
new_manager = getattr(new_node, relationship)
|
||||
new_manager.add(new_child_node)
|
||||
|
||||
def copy_nodes_from_original(self, original=None, user=None):
|
||||
old_node_list = original.workflow_nodes.prefetch_related('always_nodes', 'success_nodes', 'failure_nodes').all()
|
||||
node_links = self._create_workflow_nodes(old_node_list, user=user)
|
||||
self._inherit_node_relationships(old_node_list, node_links)
|
||||
|
||||
def create_relaunch_workflow_job(self):
|
||||
new_workflow_job = self.copy_unified_job()
|
||||
new_workflow_job.copy_nodes_from_original(original=self)
|
||||
return new_workflow_job
|
||||
|
||||
|
||||
class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTemplateMixin, ResourceMixin):
|
||||
class Meta:
|
||||
@ -298,13 +351,23 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
'organization.auditor_role', 'execute_role', 'admin_role'
|
||||
])
|
||||
|
||||
@property
|
||||
def workflow_nodes(self):
|
||||
return self.workflow_job_template_nodes
|
||||
|
||||
@classmethod
|
||||
def _get_unified_job_class(cls):
|
||||
return WorkflowJob
|
||||
|
||||
@classmethod
|
||||
def _get_unified_job_field_names(cls):
|
||||
return ['name', 'description', 'extra_vars', 'labels', 'survey_passwords', 'schedule', 'launch_type']
|
||||
return ['name', 'description', 'extra_vars', 'labels', 'survey_passwords',
|
||||
'schedule', 'launch_type']
|
||||
|
||||
@classmethod
|
||||
def _get_unified_jt_copy_names(cls):
|
||||
return (super(WorkflowJobTemplate, cls)._get_unified_jt_copy_names() +
|
||||
['survey_spec', 'survey_enabled', 'organization'])
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('api:workflow_job_template_detail', args=(self.pk,))
|
||||
@ -326,21 +389,10 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
return dict(error=list(error_notification_templates),
|
||||
success=list(success_notification_templates),
|
||||
any=list(any_notification_templates))
|
||||
# TODO: Surveys
|
||||
|
||||
#def create_job(self, **kwargs):
|
||||
# '''
|
||||
# Create a new job based on this template.
|
||||
# '''
|
||||
# return self.create_unified_job(**kwargs)
|
||||
|
||||
# TODO: Delete create_unified_job here and explicitly call create_workflow_job() .. figure out where the call is
|
||||
def create_unified_job(self, **kwargs):
|
||||
|
||||
#def create_workflow_job(self, **kwargs):
|
||||
#workflow_job = self.create_unified_job(**kwargs)
|
||||
workflow_job = super(WorkflowJobTemplate, self).create_unified_job(**kwargs)
|
||||
workflow_job.inherit_job_template_workflow_nodes()
|
||||
def create_workflow_job(self, **kwargs):
|
||||
workflow_job = self.create_unified_job(**kwargs)
|
||||
workflow_job.copy_nodes_from_original(original=self)
|
||||
return workflow_job
|
||||
|
||||
def _accept_or_ignore_job_kwargs(self, extra_vars=None, **kwargs):
|
||||
@ -377,51 +429,18 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl
|
||||
warning_data[node.pk] = node_prompts_warnings
|
||||
return warning_data
|
||||
|
||||
def user_copy(self, user):
|
||||
new_wfjt = self.copy_unified_jt()
|
||||
new_wfjt.copy_nodes_from_original(original=self, user=user)
|
||||
return new_wfjt
|
||||
|
||||
|
||||
# Stub in place because of old migrations, can remove if migrations are squashed
|
||||
class WorkflowJobInheritNodesMixin(object):
|
||||
def _inherit_relationship(self, old_node, new_node, node_ids_map, node_type):
|
||||
old_related_nodes = self._get_all_by_type(old_node, node_type)
|
||||
new_node_type_mgr = getattr(new_node, node_type)
|
||||
|
||||
for old_related_node in old_related_nodes:
|
||||
new_related_node = self._get_workflow_job_node_by_id(node_ids_map[old_related_node.id])
|
||||
new_node_type_mgr.add(new_related_node)
|
||||
|
||||
'''
|
||||
Create a WorkflowJobNode for each WorkflowJobTemplateNode
|
||||
'''
|
||||
def _create_workflow_job_nodes(self, old_nodes):
|
||||
return [old_node.create_workflow_job_node(workflow_job=self) for old_node in old_nodes]
|
||||
|
||||
def _map_workflow_job_nodes(self, old_nodes, new_nodes):
|
||||
node_ids_map = {}
|
||||
|
||||
for i, old_node in enumerate(old_nodes):
|
||||
node_ids_map[old_node.id] = new_nodes[i].id
|
||||
|
||||
return node_ids_map
|
||||
|
||||
def _get_workflow_job_template_nodes(self):
|
||||
return self.workflow_job_template.workflow_job_template_nodes.all()
|
||||
|
||||
def _get_workflow_job_node_by_id(self, id):
|
||||
return WorkflowJobNode.objects.get(id=id)
|
||||
|
||||
def _get_all_by_type(self, node, node_type):
|
||||
return getattr(node, node_type).all()
|
||||
|
||||
def inherit_job_template_workflow_nodes(self):
|
||||
old_nodes = self._get_workflow_job_template_nodes()
|
||||
new_nodes = self._create_workflow_job_nodes(old_nodes)
|
||||
node_ids_map = self._map_workflow_job_nodes(old_nodes, new_nodes)
|
||||
|
||||
for index, old_node in enumerate(old_nodes):
|
||||
new_node = new_nodes[index]
|
||||
for node_type in ['success_nodes', 'failure_nodes', 'always_nodes']:
|
||||
self._inherit_relationship(old_node, new_node, node_ids_map, node_type)
|
||||
pass
|
||||
|
||||
|
||||
class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificationMixin, WorkflowJobInheritNodesMixin):
|
||||
class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificationMixin):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('id',)
|
||||
@ -435,10 +454,18 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
|
||||
@property
|
||||
def workflow_nodes(self):
|
||||
return self.workflow_job_nodes
|
||||
|
||||
@classmethod
|
||||
def _get_parent_field_name(cls):
|
||||
return 'workflow_job_template'
|
||||
|
||||
@classmethod
|
||||
def _get_unified_job_template_class(cls):
|
||||
return WorkflowJobTemplate
|
||||
|
||||
def _has_failed(self):
|
||||
return False
|
||||
|
||||
|
||||
@ -178,7 +178,7 @@ def mk_workflow_job_template(name, extra_vars='', spec=None, organization=None,
|
||||
wfjt = WorkflowJobTemplate(name=name, extra_vars=extra_vars, organization=organization)
|
||||
|
||||
wfjt.survey_spec = spec
|
||||
if wfjt.survey_spec is not None:
|
||||
if wfjt.survey_spec:
|
||||
wfjt.survey_enabled = True
|
||||
|
||||
if persisted:
|
||||
|
||||
@ -261,7 +261,7 @@ def test_job_relaunch_copy_vars(job_with_links, machine_credential, inventory,
|
||||
job_with_links.limit = "my_server"
|
||||
with mocker.patch('awx.main.models.unified_jobs.UnifiedJobTemplate._get_unified_job_field_names',
|
||||
return_value=['inventory', 'credential', 'limit']):
|
||||
second_job = job_with_links.copy()
|
||||
second_job = job_with_links.copy_unified_job()
|
||||
|
||||
# Check that job data matches the original variables
|
||||
assert second_job.credential == job_with_links.credential
|
||||
|
||||
@ -53,7 +53,7 @@ class TestWorkflowJob:
|
||||
return wfj
|
||||
|
||||
def test_inherit_job_template_workflow_nodes(self, mocker, workflow_job):
|
||||
workflow_job.inherit_job_template_workflow_nodes()
|
||||
workflow_job.copy_nodes_from_original(original=workflow_job.workflow_job_template)
|
||||
|
||||
nodes = WorkflowJob.objects.get(id=workflow_job.id).workflow_job_nodes.all().order_by('created')
|
||||
assert nodes[0].success_nodes.filter(id=nodes[1].id).exists()
|
||||
@ -99,8 +99,10 @@ class TestWorkflowJob:
|
||||
@pytest.mark.django_db
|
||||
class TestWorkflowJobTemplate:
|
||||
@pytest.fixture
|
||||
def wfjt(self, workflow_job_template_factory):
|
||||
wfjt = workflow_job_template_factory('test').workflow_job_template
|
||||
def wfjt(self, workflow_job_template_factory, organization):
|
||||
wfjt = workflow_job_template_factory(
|
||||
'test', organization=organization).workflow_job_template
|
||||
wfjt.organization = organization
|
||||
nodes = [WorkflowJobTemplateNode.objects.create(workflow_job_template=wfjt) for i in range(0, 3)]
|
||||
nodes[0].success_nodes.add(nodes[1])
|
||||
nodes[1].failure_nodes.add(nodes[2])
|
||||
@ -134,6 +136,25 @@ class TestWorkflowJobTemplate:
|
||||
assert (test_view.is_valid_relation(nodes[2], node_assoc_1) ==
|
||||
{'Error': 'Cannot associate failure_nodes when always_nodes have been associated.'})
|
||||
|
||||
def test_wfjt_copy(self, wfjt, job_template, inventory, admin_user):
|
||||
old_nodes = wfjt.workflow_job_template_nodes.all()
|
||||
node1 = old_nodes[1]
|
||||
node1.unified_job_template = job_template
|
||||
node1.save()
|
||||
node2 = old_nodes[2]
|
||||
node2.inventory = inventory
|
||||
node2.save()
|
||||
new_wfjt = wfjt.user_copy(admin_user)
|
||||
for fd in ['description', 'survey_spec', 'survey_enabled', 'extra_vars']:
|
||||
assert getattr(wfjt, fd) == getattr(new_wfjt, fd)
|
||||
assert new_wfjt.organization == wfjt.organization
|
||||
assert len(new_wfjt.workflow_job_template_nodes.all()) == 3
|
||||
nodes = new_wfjt.workflow_job_template_nodes.all()
|
||||
assert nodes[0].success_nodes.all()[0] == nodes[1]
|
||||
assert nodes[1].failure_nodes.all()[0] == nodes[2]
|
||||
assert nodes[1].unified_job_template == job_template
|
||||
assert nodes[2].inventory == inventory
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestWorkflowJobFailure:
|
||||
|
||||
@ -6,7 +6,7 @@ import pytest
|
||||
@pytest.mark.django_db
|
||||
def test_orphan_unified_job_creation(instance, inventory):
|
||||
job = Job.objects.create(job_template=None, inventory=inventory, name='hi world')
|
||||
job2 = job.copy()
|
||||
job2 = job.copy_unified_job()
|
||||
assert job2.job_template is None
|
||||
assert job2.inventory == inventory
|
||||
assert job2.name == 'hi world'
|
||||
|
||||
@ -47,13 +47,6 @@ class TestWorkflowJobTemplateAccess:
|
||||
assert org_admin in wfjt.execute_role
|
||||
assert org_admin in wfjt.read_role
|
||||
|
||||
def test_jt_blocks_copy(self, wfjt_with_nodes, org_admin):
|
||||
"""I want to copy a workflow JT in my organization, but someone
|
||||
included a job template that I don't have access to, so I can
|
||||
not copy the WFJT as-is"""
|
||||
access = WorkflowJobTemplateAccess(org_admin)
|
||||
assert not access.can_add({'reference_obj': wfjt_with_nodes})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestWorkflowJobTemplateNodeAccess:
|
||||
@ -77,3 +70,23 @@ class TestWorkflowJobAccess:
|
||||
workflow_job.save()
|
||||
access = WorkflowJobAccess(rando)
|
||||
assert access.can_cancel(workflow_job)
|
||||
|
||||
def test_workflow_copy_warnings_inv(self, wfjt, rando, inventory):
|
||||
'''
|
||||
The user `rando` does not have access to the prompted inventory in a
|
||||
node inside the workflow - test surfacing this information
|
||||
'''
|
||||
wfjt.workflow_job_template_nodes.create(inventory=inventory)
|
||||
access = WorkflowJobTemplateAccess(rando, save_messages=True)
|
||||
assert not access.can_copy(wfjt)
|
||||
warnings = access.messages
|
||||
assert 1 in warnings
|
||||
assert 'inventory' in warnings[1]
|
||||
|
||||
def test_workflow_copy_warnings_jt(self, wfjt, rando, job_template):
|
||||
wfjt.workflow_job_template_nodes.create(unified_job_template=job_template)
|
||||
access = WorkflowJobTemplateAccess(rando, save_messages=True)
|
||||
assert not access.can_copy(wfjt)
|
||||
warnings = access.messages
|
||||
assert 1 in warnings
|
||||
assert 'unified_job_template' in warnings[1]
|
||||
|
||||
@ -3,7 +3,7 @@ import pytest
|
||||
from awx.main.models.jobs import JobTemplate
|
||||
from awx.main.models import Inventory, Credential, Project
|
||||
from awx.main.models.workflow import (
|
||||
WorkflowJobTemplate, WorkflowJobTemplateNode, WorkflowJobInheritNodesMixin,
|
||||
WorkflowJobTemplate, WorkflowJobTemplateNode, WorkflowJobOptions,
|
||||
WorkflowJob, WorkflowJobNode
|
||||
)
|
||||
import mock
|
||||
@ -22,8 +22,8 @@ class TestWorkflowJobInheritNodesMixin():
|
||||
def test__create_workflow_job_nodes(self, mocker, job_template_nodes):
|
||||
workflow_job_node_create = mocker.patch('awx.main.models.WorkflowJobTemplateNode.create_workflow_job_node')
|
||||
|
||||
mixin = WorkflowJobInheritNodesMixin()
|
||||
mixin._create_workflow_job_nodes(job_template_nodes)
|
||||
mixin = WorkflowJobOptions()
|
||||
mixin._create_workflow_nodes(job_template_nodes)
|
||||
|
||||
for job_template_node in job_template_nodes:
|
||||
workflow_job_node_create.assert_any_call(workflow_job=mixin)
|
||||
@ -37,22 +37,30 @@ class TestWorkflowJobInheritNodesMixin():
|
||||
def job_nodes(self):
|
||||
return [WorkflowJobNode(id=i) for i in range(100, 120)]
|
||||
|
||||
def test__map_workflow_job_nodes(self, job_template_nodes, job_nodes):
|
||||
mixin = WorkflowJobInheritNodesMixin()
|
||||
def test__map_workflow_job_nodes(self, job_template_nodes, job_nodes, mocker):
|
||||
mixin = WorkflowJob()
|
||||
wj_node = WorkflowJobNode()
|
||||
mocker.patch('awx.main.models.workflow.WorkflowJobTemplateNode.create_workflow_job_node',
|
||||
return_value=wj_node)
|
||||
|
||||
node_ids_map = mixin._map_workflow_job_nodes(job_template_nodes, job_nodes)
|
||||
node_ids_map = mixin._create_workflow_nodes(job_template_nodes, user=None)
|
||||
assert len(node_ids_map) == len(job_template_nodes)
|
||||
|
||||
for i, job_template_node in enumerate(job_template_nodes):
|
||||
assert node_ids_map[job_template_node.id] == job_nodes[i].id
|
||||
assert node_ids_map[job_template_node.id] == wj_node
|
||||
|
||||
class TestInheritRelationship():
|
||||
@pytest.fixture
|
||||
def job_template_nodes(self, mocker):
|
||||
nodes = [mocker.MagicMock(id=i) for i in range(0, 10)]
|
||||
nodes = [mocker.MagicMock(id=i, pk=i) for i in range(0, 10)]
|
||||
|
||||
for i in range(0, 9):
|
||||
nodes[i].success_nodes = [mocker.MagicMock(id=i + 1)]
|
||||
nodes[i].success_nodes = mocker.MagicMock(
|
||||
all=mocker.MagicMock(return_value=[mocker.MagicMock(id=i + 1, pk=i + 1)]))
|
||||
nodes[i].always_nodes = mocker.MagicMock(all=mocker.MagicMock(return_value=[]))
|
||||
nodes[i].failure_nodes = mocker.MagicMock(all=mocker.MagicMock(return_value=[]))
|
||||
new_wj_node = mocker.MagicMock(success_nodes=mocker.MagicMock())
|
||||
nodes[i].create_workflow_job_node = mocker.MagicMock(return_value=new_wj_node)
|
||||
|
||||
return nodes
|
||||
|
||||
@ -70,18 +78,13 @@ class TestWorkflowJobInheritNodesMixin():
|
||||
|
||||
|
||||
def test__inherit_relationship(self, mocker, job_template_nodes, job_nodes, job_nodes_dict):
|
||||
mixin = WorkflowJobInheritNodesMixin()
|
||||
wj = WorkflowJob()
|
||||
|
||||
mixin._get_workflow_job_node_by_id = lambda x: job_nodes_dict[x]
|
||||
mixin._get_all_by_type = lambda x,node_type: x.success_nodes
|
||||
node_ids_map = wj._create_workflow_nodes(job_template_nodes)
|
||||
wj._inherit_node_relationships(job_template_nodes, node_ids_map)
|
||||
|
||||
node_ids_map = mixin._map_workflow_job_nodes(job_template_nodes, job_nodes)
|
||||
|
||||
for i, job_template_node in enumerate(job_template_nodes):
|
||||
mixin._inherit_relationship(job_template_node, job_nodes[i], node_ids_map, 'success_nodes')
|
||||
|
||||
for i in range(0, 9):
|
||||
job_nodes[i].success_nodes.add.assert_any_call(job_nodes[i + 1])
|
||||
for i in range(0, 8):
|
||||
node_ids_map[i].success_nodes.add.assert_any_call(node_ids_map[i + 1])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@ -258,8 +258,8 @@ def test_user_capabilities_method():
|
||||
def can_change(self, obj, data):
|
||||
return 'bar'
|
||||
|
||||
def can_add(self, data):
|
||||
return 'foobar'
|
||||
def can_copy(self, obj):
|
||||
return 'foo'
|
||||
|
||||
user = User(username='auser')
|
||||
foo_access = FooAccess(user)
|
||||
@ -267,7 +267,7 @@ def test_user_capabilities_method():
|
||||
foo_capabilities = foo_access.get_user_capabilities(foo, ['edit', 'copy'])
|
||||
assert foo_capabilities == {
|
||||
'edit': 'bar',
|
||||
'copy': 'foobar'
|
||||
'copy': 'foo'
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -23,6 +23,7 @@ from decorator import decorator
|
||||
|
||||
# Django
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.db.models import ManyToManyField
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import ParseError, PermissionDenied
|
||||
@ -38,7 +39,8 @@ logger = logging.getLogger('awx.main.utils')
|
||||
|
||||
__all__ = ['get_object_or_400', 'get_object_or_403', 'camelcase_to_underscore', 'memoize',
|
||||
'get_ansible_version', 'get_ssh_version', 'get_awx_version', 'update_scm_url',
|
||||
'get_type_for_model', 'get_model_for_type', 'cache_list_capabilities', 'to_python_boolean',
|
||||
'get_type_for_model', 'get_model_for_type', 'copy_model_by_class',
|
||||
'copy_m2m_relationships' ,'cache_list_capabilities', 'to_python_boolean',
|
||||
'ignore_inventory_computed_fields', 'ignore_inventory_group_removal',
|
||||
'_inventory_updates', 'get_pk_from_dict', 'getattrd', 'NoDefaultProvided',
|
||||
'get_current_apps', 'set_current_apps', 'OutputEventFilter']
|
||||
@ -410,6 +412,66 @@ def model_to_dict(obj, serializer_mapping=None):
|
||||
return attr_d
|
||||
|
||||
|
||||
def copy_model_by_class(obj1, Class2, fields, kwargs):
|
||||
'''
|
||||
Creates a new unsaved object of type Class2 using the fields from obj1
|
||||
values in kwargs can override obj1
|
||||
'''
|
||||
create_kwargs = {}
|
||||
for field_name in fields:
|
||||
# Foreign keys can be specified as field_name or field_name_id.
|
||||
id_field_name = '%s_id' % field_name
|
||||
if hasattr(obj1, id_field_name):
|
||||
if field_name in kwargs:
|
||||
value = kwargs[field_name]
|
||||
elif id_field_name in kwargs:
|
||||
value = kwargs[id_field_name]
|
||||
else:
|
||||
value = getattr(obj1, id_field_name)
|
||||
if hasattr(value, 'id'):
|
||||
value = value.id
|
||||
create_kwargs[id_field_name] = value
|
||||
elif field_name in kwargs:
|
||||
if field_name == 'extra_vars' and isinstance(kwargs[field_name], dict):
|
||||
create_kwargs[field_name] = json.dumps(kwargs['extra_vars'])
|
||||
# We can't get a hold of django.db.models.fields.related.ManyRelatedManager to compare
|
||||
# so this is the next best thing.
|
||||
elif kwargs[field_name].__class__.__name__ is not 'ManyRelatedManager':
|
||||
create_kwargs[field_name] = kwargs[field_name]
|
||||
elif hasattr(obj1, field_name):
|
||||
field_obj = obj1._meta.get_field_by_name(field_name)[0]
|
||||
if not isinstance(field_obj, ManyToManyField):
|
||||
create_kwargs[field_name] = getattr(obj1, field_name)
|
||||
|
||||
# Apply class-specific extra processing for origination of unified jobs
|
||||
if hasattr(obj1, '_update_unified_job_kwargs') and obj1.__class__ != Class2:
|
||||
new_kwargs = obj1._update_unified_job_kwargs(**create_kwargs)
|
||||
else:
|
||||
new_kwargs = create_kwargs
|
||||
|
||||
return Class2(**new_kwargs)
|
||||
|
||||
|
||||
def copy_m2m_relationships(obj1, obj2, fields, kwargs=None):
|
||||
'''
|
||||
In-place operation.
|
||||
Given two saved objects, copies related objects from obj1
|
||||
to obj2 to field of same name, if field occurs in `fields`
|
||||
'''
|
||||
for field_name in fields:
|
||||
if hasattr(obj1, field_name):
|
||||
field_obj = obj1._meta.get_field_by_name(field_name)[0]
|
||||
if isinstance(field_obj, ManyToManyField):
|
||||
# Many to Many can be specified as field_name
|
||||
src_field_value = getattr(obj1, field_name)
|
||||
if kwargs and field_name in kwargs:
|
||||
override_field_val = kwargs[field_name]
|
||||
if override_field_val.__class__.__name__ is 'ManyRelatedManager':
|
||||
src_field_value = override_field_val
|
||||
dest_field = getattr(obj2, field_name)
|
||||
dest_field.add(*list(src_field_value.all().values_list('id', flat=True)))
|
||||
|
||||
|
||||
def get_type_for_model(model):
|
||||
'''
|
||||
Return type name for a given model class.
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user