Adding EE/IG/labels/forks/timeout/job_slice_count to schedules

Modifying schedules to work with related fields

Updating awx.awx.workflow_job_template_node
This commit is contained in:
John Westcott IV 2022-08-24 14:31:05 -04:00 committed by Alan Rominger
parent 2e217ed466
commit 809df74050
No known key found for this signature in database
GPG Key ID: C2D7EAAA12B63559
15 changed files with 686 additions and 50 deletions

View File

@ -3640,6 +3640,10 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
skip_tags = serializers.CharField(allow_blank=True, allow_null=True, required=False, default=None)
diff_mode = serializers.BooleanField(required=False, allow_null=True, default=None)
verbosity = serializers.ChoiceField(allow_null=True, required=False, default=None, choices=VERBOSITY_CHOICES)
execution_environment = serializers.PrimaryKeyRelatedField(queryset=ExecutionEnvironment.objects.all(), required=False, allow_null=True, default=None)
forks = serializers.IntegerField(required=False, allow_null=True, default=None)
job_slice_count = serializers.IntegerField(required=False, allow_null=True, default=None)
timeout = serializers.IntegerField(required=False, allow_null=True, default=None)
exclude_errors = ()
class Meta:
@ -3655,6 +3659,10 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
'skip_tags',
'diff_mode',
'verbosity',
'execution_environment',
'forks',
'job_slice_count',
'timeout',
)
def get_related(self, obj):
@ -3662,6 +3670,10 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
if obj.inventory_id:
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory_id})
res['credentials'] = self.reverse('api:{}_credentials_list'.format(get_type_for_model(self.Meta.model)), kwargs={'pk': obj.pk})
res['labels'] = self.reverse('api:{}_labels_list'.format(get_type_for_model(self.Meta.model)), kwargs={'pk': obj.pk})
res['instance_groups'] = self.reverse('api:{}_instance_groups_list'.format(get_type_for_model(self.Meta.model)), kwargs={'pk': obj.pk})
if obj.execution_environment_id:
res['execution_environment'] = self.reverse('api:execution_environment_detail', kwargs={'pk': obj.execution_environment_id})
return res
def _build_mock_obj(self, attrs):
@ -3671,7 +3683,11 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
setattr(mock_obj, field.name, getattr(self.instance, field.name))
field_names = set(field.name for field in self.Meta.model._meta.fields)
for field_name, value in list(attrs.items()):
setattr(mock_obj, field_name, value)
if field_name == 'execution_environment':
if value:
setattr(mock_obj, field_name, value)
else:
setattr(mock_obj, field_name, value)
if field_name not in field_names:
attrs.pop(field_name)
return mock_obj
@ -4135,12 +4151,12 @@ class JobLaunchSerializer(BaseSerializer):
skip_tags = serializers.CharField(required=False, write_only=True, allow_blank=True)
limit = serializers.CharField(required=False, write_only=True, allow_blank=True)
verbosity = serializers.ChoiceField(required=False, choices=VERBOSITY_CHOICES, write_only=True)
execution_environment = serializers.PrimaryKeyRelatedField(queryset=ExecutionEnvironment.objects.all(), required=False, write_only=True)
labels = serializers.PrimaryKeyRelatedField(many=True, queryset=Label.objects.all(), required=False, write_only=True)
execution_environment = serializers.PrimaryKeyRelatedField(queryset=ExecutionEnvironment.objects.all(), required=False)
labels = serializers.PrimaryKeyRelatedField(many=True, queryset=Label.objects.all(), required=False)
forks = serializers.IntegerField(required=False, write_only=True, default=1)
job_slice_count = serializers.IntegerField(required=False, write_only=True, default=0)
timeout = serializers.IntegerField(required=False, write_only=True, default=0)
instance_groups = serializers.PrimaryKeyRelatedField(many=True, queryset=InstanceGroup.objects.all(), required=False, write_only=True)
instance_groups = serializers.PrimaryKeyRelatedField(many=True, queryset=InstanceGroup.objects.all(), required=False)
class Meta:
model = JobTemplate
@ -4778,7 +4794,7 @@ class SchedulePreviewSerializer(BaseSerializer):
return value
class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSerializer):
class ScheduleSerializer(LabelsListMixin, LaunchConfigurationBaseSerializer, SchedulePreviewSerializer):
show_capabilities = ['edit', 'delete']
timezone = serializers.SerializerMethodField(
@ -4822,6 +4838,8 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
if isinstance(obj.unified_job_template, SystemJobTemplate):
summary_fields['unified_job_template']['job_type'] = obj.unified_job_template.job_type
# We are not showing instance groups on summary fields because JTs don't either
if 'inventory' in summary_fields:
return summary_fields

View File

@ -3,7 +3,7 @@
from django.urls import re_path
from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList
from awx.api.views import ScheduleList, ScheduleDetail, ScheduleUnifiedJobsList, ScheduleCredentialsList, ScheduleLabelsList, ScheduleInstanceGroupList
urls = [
@ -11,6 +11,8 @@ urls = [
re_path(r'^(?P<pk>[0-9]+)/$', ScheduleDetail.as_view(), name='schedule_detail'),
re_path(r'^(?P<pk>[0-9]+)/jobs/$', ScheduleUnifiedJobsList.as_view(), name='schedule_unified_jobs_list'),
re_path(r'^(?P<pk>[0-9]+)/credentials/$', ScheduleCredentialsList.as_view(), name='schedule_credentials_list'),
re_path(r'^(?P<pk>[0-9]+)/labels/$', ScheduleLabelsList.as_view(), name='schedule_labels_list'),
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', ScheduleInstanceGroupList.as_view(), name='schedule_instance_groups_list'),
]
__all__ = ['urls']

View File

@ -10,6 +10,8 @@ from awx.api.views import (
WorkflowJobNodeFailureNodesList,
WorkflowJobNodeAlwaysNodesList,
WorkflowJobNodeCredentialsList,
WorkflowJobNodeLabelsList,
WorkflowJobNodeInstanceGroupsList,
)
@ -20,6 +22,8 @@ urls = [
re_path(r'^(?P<pk>[0-9]+)/failure_nodes/$', WorkflowJobNodeFailureNodesList.as_view(), name='workflow_job_node_failure_nodes_list'),
re_path(r'^(?P<pk>[0-9]+)/always_nodes/$', WorkflowJobNodeAlwaysNodesList.as_view(), name='workflow_job_node_always_nodes_list'),
re_path(r'^(?P<pk>[0-9]+)/credentials/$', WorkflowJobNodeCredentialsList.as_view(), name='workflow_job_node_credentials_list'),
re_path(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobNodeLabelsList.as_view(), name='workflow_job_node_labels_list'),
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', WorkflowJobNodeInstanceGroupsList.as_view(), name='workflow_job_node_instance_group_list'),
]
__all__ = ['urls']

View File

@ -11,6 +11,8 @@ from awx.api.views import (
WorkflowJobTemplateNodeAlwaysNodesList,
WorkflowJobTemplateNodeCredentialsList,
WorkflowJobTemplateNodeCreateApproval,
WorkflowJobTemplateNodeLabelsList,
WorkflowJobTemplateNodeInstanceGroupsList,
)
@ -21,6 +23,8 @@ urls = [
re_path(r'^(?P<pk>[0-9]+)/failure_nodes/$', WorkflowJobTemplateNodeFailureNodesList.as_view(), name='workflow_job_template_node_failure_nodes_list'),
re_path(r'^(?P<pk>[0-9]+)/always_nodes/$', WorkflowJobTemplateNodeAlwaysNodesList.as_view(), name='workflow_job_template_node_always_nodes_list'),
re_path(r'^(?P<pk>[0-9]+)/credentials/$', WorkflowJobTemplateNodeCredentialsList.as_view(), name='workflow_job_template_node_credentials_list'),
re_path(r'^(?P<pk>[0-9]+)/labels/$', WorkflowJobTemplateNodeLabelsList.as_view(), name='workflow_job_template_node_labels_list'),
re_path(r'^(?P<pk>[0-9]+)/instance_groups/$', WorkflowJobTemplateNodeInstanceGroupsList.as_view(), name='workflow_job_template_node_instance_groups_list'),
re_path(r'^(?P<pk>[0-9]+)/create_approval_template/$', WorkflowJobTemplateNodeCreateApproval.as_view(), name='workflow_job_template_node_create_approval'),
]

View File

@ -618,6 +618,38 @@ class ScheduleCredentialsList(LaunchConfigCredentialsBase):
parent_model = models.Schedule
class ScheduleLabelsList(DeleteLastUnattachLabelMixin, SubListCreateAttachDetachAPIView):
model = models.Label
serializer_class = serializers.LabelSerializer
parent_model = models.Schedule
relationship = 'labels'
def post(self, request, *args, **kwargs):
# If a label already exists in the database, attach it instead of erroring out
# that it already exists
if 'id' not in request.data and 'name' in request.data and 'organization' in request.data:
existing = models.Label.objects.filter(name=request.data['name'], organization_id=request.data['organization'])
if existing.exists():
existing = existing[0]
request.data['id'] = existing.id
del request.data['name']
del request.data['organization']
if models.Label.objects.filter(schedule_labels=self.kwargs['pk']).count() > 100:
return Response(
dict(msg=_('Maximum number of labels for {} reached.'.format(self.parent_model._meta.verbose_name_raw))), status=status.HTTP_400_BAD_REQUEST
)
return super(ScheduleLabelsList, self).post(request, *args, **kwargs)
class ScheduleInstanceGroupList(SubListAttachDetachAPIView):
model = models.InstanceGroup
serializer_class = serializers.InstanceGroupSerializer
parent_model = models.Schedule
relationship = 'instance_groups'
class ScheduleUnifiedJobsList(SubListAPIView):
model = models.UnifiedJob
@ -2967,6 +2999,38 @@ class WorkflowJobNodeCredentialsList(SubListAPIView):
relationship = 'credentials'
class WorkflowJobNodeLabelsList(DeleteLastUnattachLabelMixin, SubListCreateAttachDetachAPIView):
model = models.Label
serializer_class = serializers.LabelSerializer
parent_model = models.WorkflowJobNode
relationship = 'labels'
def post(self, request, *args, **kwargs):
# If a label already exists in the database, attach it instead of erroring out
# that it already exists
if 'id' not in request.data and 'name' in request.data and 'organization' in request.data:
existing = models.Label.objects.filter(name=request.data['name'], organization_id=request.data['organization'])
if existing.exists():
existing = existing[0]
request.data['id'] = existing.id
del request.data['name']
del request.data['organization']
if models.Label.objects.filter(workflowjobnode_labels=self.kwargs['pk']).count() > 100:
return Response(
dict(msg=_('Maximum number of labels for {} reached.'.format(self.parent_model._meta.verbose_name_raw))), status=status.HTTP_400_BAD_REQUEST
)
return super(WorkflowJobNodeLabelsList, self).post(request, *args, **kwargs)
class WorkflowJobNodeInstanceGroupsList(SubListAttachDetachAPIView):
model = models.InstanceGroup
serializer_class = serializers.InstanceGroupSerializer
parent_model = models.WorkflowJobNode
relationship = 'instance_groups'
class WorkflowJobTemplateNodeList(ListCreateAPIView):
model = models.WorkflowJobTemplateNode
@ -2985,6 +3049,38 @@ class WorkflowJobTemplateNodeCredentialsList(LaunchConfigCredentialsBase):
parent_model = models.WorkflowJobTemplateNode
class WorkflowJobTemplateNodeLabelsList(DeleteLastUnattachLabelMixin, SubListCreateAttachDetachAPIView):
model = models.Label
serializer_class = serializers.LabelSerializer
parent_model = models.WorkflowJobTemplateNode
relationship = 'labels'
def post(self, request, *args, **kwargs):
# If a label already exists in the database, attach it instead of erroring out
# that it already exists
if 'id' not in request.data and 'name' in request.data and 'organization' in request.data:
existing = models.Label.objects.filter(name=request.data['name'], organization_id=request.data['organization'])
if existing.exists():
existing = existing[0]
request.data['id'] = existing.id
del request.data['name']
del request.data['organization']
if models.Label.objects.filter(workflowjobtemplatenode_labels=self.kwargs['pk']).count() > 100:
return Response(
dict(msg=_('Maximum number of labels for {} reached.'.format(self.parent_model._meta.verbose_name_raw))), status=status.HTTP_400_BAD_REQUEST
)
return super(WorkflowJobTemplateNodeLabelsList, self).post(request, *args, **kwargs)
class WorkflowJobTemplateNodeInstanceGroupsList(SubListAttachDetachAPIView):
model = models.InstanceGroup
serializer_class = serializers.InstanceGroupSerializer
parent_model = models.WorkflowJobTemplateNode
relationship = 'instance_groups'
class WorkflowJobTemplateNodeChildrenBaseList(EnforceParentRelationshipMixin, SubListCreateAttachDetachAPIView):
model = models.WorkflowJobTemplateNode

View File

@ -1924,18 +1924,80 @@ class JobLaunchConfigAccess(BaseAccess):
def can_attach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
if isinstance(sub_obj, Credential) and relationship == 'credentials':
return self.user in sub_obj.use_role
else:
raise NotImplementedError('Only credentials can be attached to launch configurations.')
if not self.user in sub_obj.use_role:
logger.debug(
"User {} not allowed access to credential {} for {} {} ({})".format(self.user.username, sub_obj.name, obj.__class__, obj.name, obj.id)
)
return False
return True
if isinstance(sub_obj, Label) and relationship == 'labels':
if not self.user.can_access(Label, 'read', sub_obj):
logger.debug("User {} not allowed access to label {} for {} {} ({})".format(self.user.username, sub_obj.name, obj.__class__, obj.name, obj.id))
return False
return True
if isinstance(sub_obj, InstanceGroup) and relationship == 'instance_groups':
if not sub_obj in self.user.get_queryset(InstanceGroup):
logger.debug(
"User {} not allowed access to instance_group {} for {} {} ({})".format(self.user.username, sub_obj.name, obj.__class__, obj.name, obj.id)
)
return False
return True
raise NotImplementedError('Only credentials, labels and instance groups can be attached to launch configurations.')
def can_unattach(self, obj, sub_obj, relationship, data, skip_sub_obj_read_check=False):
if isinstance(sub_obj, Credential) and relationship == 'credentials':
if skip_sub_obj_read_check:
if not skip_sub_obj_read_check:
logger.debug(
"Skipping check if user {} can access credential {} ({}) for removal from {} {} ({})".format(
self.user.username, sub_obj.name, sub_obj.id, obj.__class__, obj.name, obj.id
)
)
return True
else:
return self.user in sub_obj.read_role
else:
raise NotImplementedError('Only credentials can be attached to launch configurations.')
if not self.user in sub_obj.read_role:
logger.debug(
"User {} can not read credential {} ({}) for removal from {} {} ({})".format(
self.user.username, sub_obj.name, sub_obj.id, obj.__class__, obj.name, obj.id
)
)
return False
return True
if isinstance(sub_obj, Label) and relationship == 'labels':
if skip_sub_obj_read_check:
logger.debug(
"Skipping check if user {} can access label {} ({}) for removal from {} {} ({})".format(
self.user.username, sub_obj.name, sub_obj.id, obj.__class__, obj.name, obj.id
)
)
return True
if self.user.can_access(Label, 'read', sub_obj):
return True
logger.debug(
"User {} can not read label {} ({}) for removal from {} {} ({})".format(
self.user.username, sub_obj.name, sub_obj.id, obj.__class__, obj.name, obj.id
)
)
return False
if isinstance(sub_obj, InstanceGroup) and relationship == 'instance_groups':
if skip_sub_obj_read_check:
logger.debug(
"Skipping check if user {} can access instance_group {} ({}) for removal from {} {} ({})".format(
self.user.username, sub_obj.name, sub_obj.id, obj.__class__, obj.name, obj.id
)
)
return True
if sub_obj in self.user.get_queryset(InstanceGroup):
return True
logger.debug(
"User {} can not read instance_group {} ({}) for removal from {} {} ({})".format(
self.user.username, sub_obj.name, sub_obj.id, obj.__class__, obj.name, obj.id
)
)
return False
raise NotImplementedError('Only credentials, labels and instance groups can be attached to launch configurations.')
class WorkflowJobTemplateNodeAccess(BaseAccess):
@ -2014,6 +2076,24 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
return JobLaunchConfigAccess(self.user).can_attach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
elif relationship in ('success_nodes', 'failure_nodes', 'always_nodes'):
return self.check_same_WFJT(obj, sub_obj)
elif relationship == 'labels':
if self.user.can_access(Label, 'read', sub_obj):
return True
logger.debug(
"User {} can not read label {} ({}) for removal from {} {} ({})".format(
self.user.username, sub_obj.name, sub_obj.id, obj.__class__, obj.name, obj.id
)
)
return False
elif relationship == 'instance_groups':
if sub_obj in self.user.get_queryset(InstanceGroup):
return True
logger.debug(
"User {} can not read instance_group {} ({}) for removal from {} {} ({})".format(
self.user.username, sub_obj.name, sub_obj.id, obj.__class__, obj.name, obj.id
)
)
return False
else:
raise NotImplementedError('Relationship {} not understood for WFJT nodes.'.format(relationship))
@ -2026,6 +2106,24 @@ class WorkflowJobTemplateNodeAccess(BaseAccess):
return JobLaunchConfigAccess(self.user).can_unattach(obj, sub_obj, relationship, data, skip_sub_obj_read_check=skip_sub_obj_read_check)
elif relationship in ('success_nodes', 'failure_nodes', 'always_nodes'):
return self.check_same_WFJT(obj, sub_obj)
elif relationship == 'labels':
if self.user.can_access(Label, 'read', sub_obj):
return True
logger.debug(
"User {} can not read label {} ({}) for removal from {} {} ({})".format(
self.user.username, sub_obj.name, sub_obj.id, obj.__class__, obj.name, obj.id
)
)
return False
elif relationship == 'instance_groups':
if sub_obj in self.user.get_queryset(InstanceGroup):
return True
logger.debug(
"User {} can not read instance_group {} ({}) for removal from {} {} ({})".format(
self.user.username, sub_obj.name, sub_obj.id, obj.__class__, obj.name, obj.id
)
)
return False
else:
raise NotImplementedError('Relationship {} not understood for WFJT nodes.'.format(relationship))

View File

@ -1,4 +1,4 @@
# Generated by Django 3.2.13 on 2022-08-16 11:40
# Generated by Django 3.2.13 on 2022-08-31 19:15
import awx.main.fields
import awx.main.utils.polymorphic
@ -21,7 +21,7 @@ class Migration(migrations.Migration):
default=None,
null=True,
on_delete=awx.main.utils.polymorphic.SET_NULL,
related_name='execution_environment',
related_name='joblaunchconfig_as_prompt',
to='main.executionenvironment',
),
),
@ -60,21 +60,84 @@ class Migration(migrations.Migration):
name='ask_timeout_on_launch',
field=awx.main.fields.AskForField(blank=True, default=False),
),
migrations.AddField(
model_name='schedule',
name='execution_environment',
field=models.ForeignKey(
blank=True,
default=None,
null=True,
on_delete=awx.main.utils.polymorphic.SET_NULL,
related_name='schedule_as_prompt',
to='main.executionenvironment',
),
),
migrations.AddField(
model_name='schedule',
name='labels',
field=models.ManyToManyField(related_name='schedule_labels', to='main.Label'),
),
migrations.AddField(
model_name='workflowjobnode',
name='execution_environment',
field=models.ForeignKey(
blank=True,
default=None,
null=True,
on_delete=awx.main.utils.polymorphic.SET_NULL,
related_name='workflowjobnode_as_prompt',
to='main.executionenvironment',
),
),
migrations.AddField(
model_name='workflowjobnode',
name='labels',
field=models.ManyToManyField(related_name='workflowjobnode_labels', to='main.Label'),
),
migrations.AddField(
model_name='workflowjobtemplatenode',
name='execution_environment',
field=models.ForeignKey(
blank=True,
default=None,
null=True,
on_delete=awx.main.utils.polymorphic.SET_NULL,
related_name='workflowjobtemplatenode_as_prompt',
to='main.executionenvironment',
),
),
migrations.AddField(
model_name='workflowjobtemplatenode',
name='labels',
field=models.ManyToManyField(related_name='workflowjobtemplatenode_labels', to='main.Label'),
),
migrations.CreateModel(
name='WorkflowJobTemplateNodeBaseInstanceGroupMembership',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('position', models.PositiveIntegerField(db_index=True, default=None, null=True)),
('instancegroup', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.instancegroup')),
('schedule', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.workflowjobtemplatenode')),
],
),
migrations.CreateModel(
name='WorkflowJobNodeBaseInstanceGroupMembership',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('position', models.PositiveIntegerField(db_index=True, default=None, null=True)),
('instancegroup', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.instancegroup')),
('schedule', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.workflowjobnode')),
],
),
migrations.CreateModel(
name='ScheduleInstanceGroupMembership',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('position', models.PositiveIntegerField(db_index=True, default=None, null=True)),
('instancegroup', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.instancegroup')),
('schedule', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.schedule')),
],
),
migrations.CreateModel(
name='JobLaunchConfigInstanceGroupMembership',
fields=[
@ -107,20 +170,33 @@ class Migration(migrations.Migration):
blank=True, editable=False, related_name='joblaunchconfigs', through='main.JobLaunchConfigInstanceGroupMembership', to='main.InstanceGroup'
),
),
# added WFJT prompts
migrations.AddField(
model_name='workflowjobtemplate',
name='ask_labels_on_launch',
field=awx.main.fields.AskForField(blank=True, default=False),
model_name='schedule',
name='instance_groups',
field=awx.main.fields.OrderedManyToManyField(
blank=True, editable=False, related_name='schedule_instance_groups', through='main.ScheduleInstanceGroupMembership', to='main.InstanceGroup'
),
),
migrations.AddField(
model_name='workflowjobtemplate',
name='ask_skip_tags_on_launch',
field=awx.main.fields.AskForField(blank=True, default=False),
model_name='workflowjobnode',
name='instance_groups',
field=awx.main.fields.OrderedManyToManyField(
blank=True,
editable=False,
related_name='workflow_job_node_instance_groups',
through='main.WorkflowJobNodeBaseInstanceGroupMembership',
to='main.InstanceGroup',
),
),
migrations.AddField(
model_name='workflowjobtemplate',
name='ask_tags_on_launch',
field=awx.main.fields.AskForField(blank=True, default=False),
model_name='workflowjobtemplatenode',
name='instance_groups',
field=awx.main.fields.OrderedManyToManyField(
blank=True,
editable=False,
related_name='workflow_job_template_node_instance_groups',
through='main.WorkflowJobTemplateNodeBaseInstanceGroupMembership',
to='main.InstanceGroup',
),
),
]

View File

@ -456,3 +456,36 @@ class JobLaunchConfigInstanceGroupMembership(models.Model):
default=None,
db_index=True,
)
class ScheduleInstanceGroupMembership(models.Model):
schedule = models.ForeignKey('Schedule', on_delete=models.CASCADE)
instancegroup = models.ForeignKey('InstanceGroup', on_delete=models.CASCADE)
position = models.PositiveIntegerField(
null=True,
default=None,
db_index=True,
)
class WorkflowJobTemplateNodeBaseInstanceGroupMembership(models.Model):
schedule = models.ForeignKey('WorkflowJobTemplateNode', on_delete=models.CASCADE)
instancegroup = models.ForeignKey('InstanceGroup', on_delete=models.CASCADE)
position = models.PositiveIntegerField(
null=True,
default=None,
db_index=True,
)
class WorkflowJobNodeBaseInstanceGroupMembership(models.Model):
schedule = models.ForeignKey('WorkflowJobNode', on_delete=models.CASCADE)
instancegroup = models.ForeignKey('InstanceGroup', on_delete=models.CASCADE)
position = models.PositiveIntegerField(
null=True,
default=None,
db_index=True,
)

View File

@ -1007,6 +1007,10 @@ class LaunchTimeConfig(LaunchTimeConfigBase):
# Labels needed for non-unified job / unified JT models
labels = models.ManyToManyField('Label', related_name='%(class)s_labels')
execution_environment = models.ForeignKey(
'ExecutionEnvironment', null=True, blank=True, default=None, on_delete=polymorphic.SET_NULL, related_name='%(class)s_as_prompt'
)
@property
def extra_vars(self):
return self.extra_data
@ -1054,10 +1058,6 @@ class JobLaunchConfig(LaunchTimeConfig):
'InstanceGroup', related_name='%(class)ss', blank=True, editable=False, through='JobLaunchConfigInstanceGroupMembership'
)
execution_environment = models.ForeignKey(
'ExecutionEnvironment', null=True, blank=True, default=None, on_delete=polymorphic.SET_NULL, related_name='execution_environment'
)
def has_user_prompts(self, template):
"""
Returns True if any fields exist in the launch config that are

View File

@ -18,6 +18,7 @@ from django.utils.translation import gettext_lazy as _
# AWX
from awx.api.versioning import reverse
from awx.main.fields import OrderedManyToManyField
from awx.main.models.base import PrimordialModel
from awx.main.models.jobs import LaunchTimeConfig
from awx.main.utils import ignore_inventory_computed_fields
@ -83,6 +84,13 @@ class Schedule(PrimordialModel, LaunchTimeConfig):
)
rrule = models.TextField(help_text=_("A value representing the schedules iCal recurrence rule."))
next_run = models.DateTimeField(null=True, default=None, editable=False, help_text=_("The next time that the scheduled action will run."))
instance_groups = OrderedManyToManyField(
'InstanceGroup',
related_name='schedule_instance_groups',
blank=True,
editable=False,
through='ScheduleInstanceGroupMembership',
)
@classmethod
def get_zoneinfo(cls):

View File

@ -29,7 +29,7 @@ from awx.main.models import prevent_search, accepts_json, UnifiedJobTemplate, Un
from awx.main.models.notifications import NotificationTemplate, JobNotificationMixin
from awx.main.models.base import CreatedModifiedModel, VarsDictProperty
from awx.main.models.rbac import ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR
from awx.main.fields import ImplicitRoleField, JSONBlob
from awx.main.fields import ImplicitRoleField, AskForField, JSONBlob, OrderedManyToManyField
from awx.main.models.mixins import (
ResourceMixin,
SurveyJobTemplateMixin,
@ -167,6 +167,13 @@ class WorkflowJobTemplateNode(WorkflowNodeBase):
blank=False,
help_text=_('An identifier for this node that is unique within its workflow. ' 'It is copied to workflow job nodes corresponding to this node.'),
)
instance_groups = OrderedManyToManyField(
'InstanceGroup',
related_name='workflow_job_template_node_instance_groups',
blank=True,
editable=False,
through='WorkflowJobTemplateNodeBaseInstanceGroupMembership',
)
class Meta:
app_label = 'main'
@ -250,6 +257,9 @@ class WorkflowJobNode(WorkflowNodeBase):
blank=True, # blank denotes pre-migration job nodes
help_text=_('An identifier coresponding to the workflow job template node that this node was created from.'),
)
instance_groups = OrderedManyToManyField(
'InstanceGroup', related_name='workflow_job_node_instance_groups', blank=True, editable=False, through='WorkflowJobNodeBaseInstanceGroupMembership'
)
class Meta:
app_label = 'main'

View File

@ -42,17 +42,37 @@ options:
- Optional description of this schedule.
required: False
type: str
execution_environment:
description:
- Execution Environment applied as a prompt, assuming jot template prompts for execution environment
type: str
extra_data:
description:
- Specify C(extra_vars) for the template.
required: False
type: dict
default: {}
forks:
description:
- Forks applied as a prompt, assuming job template prompts for forks
type: int
instance_groups:
description:
- List of Instance Groups applied as a prompt, assuming job template prompts for instance groups
type: list
elements: str
inventory:
description:
- Inventory applied as a prompt, assuming job template prompts for inventory
required: False
type: str
job_slice_count:
description:
- Job Slice Count applied as a prompt, assuming job template prompts for job slice count
type: int
labels:
description:
- List of labels applied as a prompt, assuming job template prompts for labels
credentials:
description:
- List of credentials applied as a prompt, assuming job template prompts for credentials
@ -63,6 +83,10 @@ options:
- Branch to use in job run. Project default used if blank. Only allowed if project allow_override field is set to true.
required: False
type: str
timeout:
description:
- Timeout applied as a prompt, assuming job template prompts for timeout
type: int
job_type:
description:
- The job type to use for the job template.
@ -176,8 +200,14 @@ def main():
name=dict(required=True),
new_name=dict(),
description=dict(),
execution_environment=dict(type='str'),
extra_data=dict(type='dict'),
forks=dict(type='int'),
instance_groups=dict(type='list', elements='str'),
inventory=dict(),
job_slice_count=dict(type='int'),
labels=dict(type='list', elements='str'),
timeout=dict(type='int'),
credentials=dict(type='list', elements='str'),
scm_branch=dict(),
job_type=dict(choices=['run', 'check']),
@ -200,8 +230,14 @@ def main():
name = module.params.get('name')
new_name = module.params.get("new_name")
description = module.params.get('description')
execution_environment = module.params.get('execution_environment')
extra_data = module.params.get('extra_data')
forks = module.params.get('forks')
instance_groups = module.params.get('instance_groups')
inventory = module.params.get('inventory')
job_slice_count = module.params.get('job_slice_count')
labels = module.params.get('labels')
timeout = module.params.get('timeout')
credentials = module.params.get('credentials')
scm_branch = module.params.get('scm_branch')
job_type = module.params.get('job_type')
@ -238,6 +274,28 @@ def main():
for item in credentials:
association_fields['credentials'].append(module.resolve_name_to_id('credentials', item))
# We need to clear out the name from the search fields so we can use name_or_id in the following searches
if 'name' in search_fields:
del search_fields['name']
if labels is not None:
association_fields['labels'] = []
for item in labels:
label_id = module.get_one('labels', name_or_id=item, **{'data': search_fields})
if label_id is None:
module.fail_json(msg='Could not find label entry with name {0}'.format(item))
else:
association_fields['labels'].append(label_id['id'])
if instance_groups is not None:
association_fields['instance_groups'] = []
for item in instance_groups:
instance_group_id = module.get_one('instance_groups', name_or_id=item, **{'data': search_fields})
if instance_group_id is None:
module.fail_json(msg='Could not find instance_group entry with name {0}'.format(item))
else:
association_fields['instance_groups'].append(instance_group_id['id'])
# Create the data that gets sent for create and update
new_fields = {}
if rrule is not None:
@ -267,6 +325,22 @@ def main():
new_fields['unified_job_template'] = unified_job_template_id
if enabled is not None:
new_fields['enabled'] = enabled
if forks is not None:
new_fields['forks'] = forks
if job_slice_count is not None:
new_fields['job_slice_count'] = job_slice_count
if timeout is not None:
new_fields['timeout'] = timeout
if execution_environment is not None:
if execution_environment == '':
new_fields['execution_environment'] = ''
else:
ee = module.get_one('execution_environments', name_or_id=execution_environment, **{'data': search_fields})
if ee is None:
module.fail_json(msg='could not find execution_environment entry with name {0}'.format(execution_environment))
else:
new_fields['execution_environment'] = ee['id']
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this

View File

@ -152,6 +152,30 @@ options:
- Uniqueness is not handled rigorously.
type: list
elements: str
execution_environment:
description:
- Execution Environment applied as a prompt, assuming jot template prompts for execution environment
type: str
forks:
description:
- Forks applied as a prompt, assuming job template prompts for forks
type: int
instance_groups:
description:
- List of Instance Groups applied as a prompt, assuming job template prompts for instance groups
type: list
elements: str
job_slice_count:
description:
- Job Slice Count applied as a prompt, assuming job template prompts for job slice count
type: int
labels:
description:
- List of labels applied as a prompt, assuming job template prompts for labels
timeout:
description:
- Timeout applied as a prompt, assuming job template prompts for timeout
type: int
state:
description:
- Desired state of the resource.
@ -255,6 +279,12 @@ def main():
always_nodes=dict(type='list', elements='str'),
failure_nodes=dict(type='list', elements='str'),
credentials=dict(type='list', elements='str'),
execution_environment=dict(type='str'),
forks=dict(type='int'),
instance_groups=dict(type='list', elements='str'),
job_slice_count=dict(type='int'),
labels=dict(type='list', elements='str'),
timeout=dict(type='int'),
state=dict(choices=['present', 'absent'], default='present'),
)
mutually_exclusive = [("unified_job_template", "approval_node")]
@ -327,32 +357,44 @@ def main():
'diff_mode',
'verbosity',
'all_parents_must_converge',
'forks',
'job_slice_count',
'timeout',
):
field_val = module.params.get(field_name)
if field_val:
new_fields[field_name] = field_val
association_fields = {}
for association in ('always_nodes', 'success_nodes', 'failure_nodes', 'credentials'):
for association in ('always_nodes', 'success_nodes', 'failure_nodes', 'credentials', 'instance_groups', 'labels'):
name_list = module.params.get(association)
if name_list is None:
continue
id_list = []
for sub_name in name_list:
if association == 'credentials':
endpoint = 'credentials'
lookup_data = {'name': sub_name}
if association in ['credentials', 'instance_groups', 'labels']:
sub_obj = module.get_one(association, name_or_id=sub_name)
else:
endpoint = 'workflow_job_template_nodes'
lookup_data = {'identifier': sub_name}
if workflow_job_template_id:
lookup_data['workflow_job_template'] = workflow_job_template_id
sub_obj = module.get_one(endpoint, **{'data': lookup_data})
sub_obj = module.get_one(endpoint, **{'data': lookup_data})
if sub_obj is None:
module.fail_json(msg='Could not find {0} entry with name {1}'.format(association, sub_name))
id_list.append(sub_obj['id'])
if id_list:
association_fields[association] = id_list
association_fields[association] = id_list
execution_environment = module.params.get('execution_environment')
if execution_environment is not None:
if execution_environment == '':
new_fields['execution_environment'] = ''
else:
ee = module.get_one('execution_environments', name_or_id=execution_environment)
if ee is None:
module.fail_json(msg='could not find execution_environment entry with name {0}'.format(execution_environment))
else:
new_fields['execution_environment'] = ee['id']
# In the case of a new object, the utils need to know it is a node
new_fields['type'] = 'workflow_job_template_node'

View File

@ -7,11 +7,17 @@
set_fact:
org_name: "AWX-Collection-tests-organization-org-{{ test_id }}"
sched1: "AWX-Collection-tests-schedule-sched1-{{ test_id }}"
sched2: "AWX-Collection-tests-schedule-sched2-{{ test_id }}"
cred1: "AWX-Collection-tests-schedule-cred1-{{ test_id }}"
proj1: "AWX-Collection-tests-schedule-proj1-{{ test_id }}"
proj2: "AWX-Collection-tests-schedule-proj2-{{ test_id }}"
jt1: "AWX-Collection-tests-schedule-jt1-{{ test_id }}"
jt2: "AWX-Collection-tests-schedule-jt1-{{ test_id }}"
ee1: "AWX-Collection-tests-schedule-ee1-{{ test_id }}"
label1: "AWX-Collection-tests-schedule-l1-{{ test_id }}"
label2: "AWX-Collection-tests-schedule-l2-{{ test_id }}"
ig1: "AWX-Collection-tests-schedule-ig1-{{ test_id }}"
ig2: "AWX-Collection-tests-schedule-ig2-{{ test_id }}"
- block:
- name: Try to create without an rrule
@ -124,6 +130,12 @@
ask_limit_on_launch: true
ask_diff_mode_on_launch: true
ask_verbosity_on_launch: true
ask_execution_environment_on_launch: true
ask_forks_on_launch: true
ask_instance_groups_on_launch: true
ask_job_slice_count_on_launch: true
ask_labels_on_launch: true
ask_timeout_on_launch: true
job_type: run
state: present
register: result
@ -132,14 +144,33 @@
that:
- "result is changed"
- name: Create labels
label:
name: "{{ item }}"
organization: "{{ org_name }}"
loop:
- "{{ label1 }}"
- "{{ label2 }}"
- name: Create an execution environment
execution_environment:
name: "{{ ee1 }}"
image: "junk"
- name: Create instance groups
instance_group:
name: "{{ item }}"
loop:
- "{{ ig1 }}"
- "{{ ig2 }}"
- name: Create with options that the JT does support
schedule:
name: "{{ sched1 }}"
name: "{{ sched2 }}"
state: present
unified_job_template: "{{ jt1 }}"
rrule: "DTSTART:20191219T130551Z RRULE:FREQ=WEEKLY;INTERVAL=1;COUNT=1"
description: "This hopefully will not work"
description: "This hopefully will work"
extra_data:
some: var
inventory: Demo Inventory
@ -153,6 +184,33 @@
diff_mode: true
verbosity: 4
enabled: true
execution_environment: "{{ ee1 }}"
forks: 10
instance_groups:
- "{{ ig1 }}"
- "{{ ig2 }}"
job_slice_count: 10
labels:
- "{{ label1 }}"
- "{{ label2 }}"
timeout: 10
register: result
ignore_errors: true
- assert:
that:
- "result is changed"
- name: Reset some options
schedule:
name: "{{ sched2 }}"
state: present
execution_environment: ""
forks: 1
instance_groups: []
job_slice_count: 1
labels: []
timeout: 60
register: result
ignore_errors: true
@ -163,7 +221,7 @@
- name: Disable a schedule
schedule:
name: "{{ sched1 }}"
unified_job_template: "{{ jt1 }}"
unified_job_template: "Demo Job Template"
state: present
enabled: "false"
register: result
@ -213,42 +271,48 @@
- result is changed
always:
- name: Delete the schedule
- name: Delete the schedules
schedule:
name: "{{ sched1 }}"
name: "{{ item }}"
state: absent
loop:
- "{{ sched1 }}"
- "{{ sched2 }}"
ignore_errors: True
- name: Delete the jt
- name: Delete the jt1
job_template:
name: "{{ jt1 }}"
project: "{{ proj1 }}"
playbook: hello_world.yml
state: absent
ignore_errors: True
- name: Delete the jt
- name: Delete the jt2
job_template:
name: "{{ jt2 }}"
project: "{{ proj2 }}"
playbook: hello_world.yml
state: absent
ignore_errors: True
- name: Delete the Project
- name: Delete the Project2
project:
name: "{{ proj2 }}"
organization: "{{ org_name }}"
state: absent
scm_type: git
scm_url: https://github.com/ansible/ansible-tower-samples.git
register: result
ignore_errors: True
- name: Delete the Project
- name: Delete the Project1
project:
name: "{{ proj1 }}"
organization: Default
state: absent
scm_type: git
scm_url: https://github.com/ansible/ansible-tower-samples.git
register: result
ignore_errors: True
- name: Delete Credential1
credential:
@ -256,9 +320,28 @@
organization: Default
credential_type: Red Hat Ansible Automation Platform
state: absent
ignore_errors: True
# Labels can not be deleted
- name: Delete an execution environment
execution_environment:
name: "{{ ee1 }}"
image: "junk"
state: absent
ignore_errors: True
- name: Delete instance groups
instance_group:
name: "{{ item }}"
state: absent
loop:
- "{{ ig1 }}"
- "{{ ig2 }}"
ignore_errors: True
- name: "Remove the organization"
organization:
name: "{{ org_name }}"
state: absent
register: result
ignore_errors: True

View File

@ -20,6 +20,11 @@
project_inv: "AWX-Collection-tests-inventory_source-inv-project-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
project_inv_source: "AWX-Collection-tests-inventory_source-inv-source-project-{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
github_webhook_credential_name: "AWX-Collection-tests-credential-webhook-{{ test_id }}_github"
ee1: "AWX-Collection-tests-workflow_job_template-ee1-{{ test_id }}"
label1: "AWX-Collection-tests-workflow_job_template-l1-{{ test_id }}"
label2: "AWX-Collection-tests-workflow_job_template-l2-{{ test_id }}"
ig1: "AWX-Collection-tests-workflow_job_template-ig1-{{ test_id }}"
ig2: "AWX-Collection-tests-workflow_job_template-ig2-{{ test_id }}"
- block:
- name: "Create a new organization"
@ -181,6 +186,12 @@
playbook: hello_world.yml
job_type: run
state: present
ask_execution_environment_on_launch: true
ask_forks_on_launch: true
ask_instance_groups_on_launch: true
ask_timeout_on_launch: true
ask_job_slice_count_on_launch: true
ask_labels_on_launch: true
register: jt2_name_result
- assert:
@ -198,6 +209,12 @@
state: present
survey_enabled: true
survey_spec: '{"spec": [{"index": 0, "question_name": "my question?", "default": "mydef", "variable": "myvar", "type": "text", "required": false}], "description": "test", "name": "test"}'
ask_execution_environment_on_launch: true
ask_forks_on_launch: true
ask_instance_groups_on_launch: true
ask_timeout_on_launch: true
ask_job_slice_count_on_launch: true
ask_labels_on_launch: true
register: result
- assert:
@ -255,6 +272,26 @@
that:
- "result is changed"
- name: Create labels
label:
name: "{{ item }}"
organization: "{{ org_name }}"
loop:
- "{{ label1 }}"
- "{{ label2 }}"
- name: Create an execution environment
execution_environment:
name: "{{ ee1 }}"
image: "junk"
- name: Create instance groups
instance_group:
name: "{{ item }}"
loop:
- "{{ ig1 }}"
- "{{ ig2 }}"
# Node actions do what the schema command used to do
- name: Create leaf node
workflow_job_template_node:
@ -262,6 +299,39 @@
unified_job_template: "{{ jt2_name }}"
lookup_organization: "{{ org_name }}"
workflow: "{{ wfjt_name }}"
execution_environment: "{{ ee1 }}"
forks: 12
instance_groups:
- "{{ ig1 }}"
- "{{ ig2 }}"
job_slice_count: 2
labels:
- "{{ label1 }}"
- "{{ label2 }}"
timeout: 23
register: results
- assert:
that:
- "results is changed"
- name: Update prompts on leaf node
workflow_job_template_node:
identifier: leaf
unified_job_template: "{{ jt2_name }}"
lookup_organization: "{{ org_name }}"
workflow: "{{ wfjt_name }}"
execution_environment: ""
forks: 1
instance_groups: []
job_slice_count: 1
labels: []
timeout: 10
register: results
- assert:
that:
- "results is changed"
- name: Create root node
workflow_job_template_node:
@ -815,6 +885,24 @@
state: absent
ignore_errors: True
# Labels can not be deleted
- name: Delete an execution environment
execution_environment:
name: "{{ ee1 }}"
image: "junk"
state: absent
ignore_errors: True
- name: Delete instance groups
instance_group:
name: "{{ item }}"
state: absent
loop:
- "{{ ig1 }}"
- "{{ ig2 }}"
ignore_errors: True
- name: "Remove the organization"
organization:
name: "{{ org_name }}"