From 0b1776098b15e86c4d09010d76ee5c8d256370aa Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 14 Aug 2018 12:12:07 -0400 Subject: [PATCH 01/29] Implement model/view/launch paradigm for shard/split job templates --- awx/api/serializers.py | 2 +- awx/api/views/__init__.py | 7 +++-- awx/main/migrations/0048_v340_split_jobs.py | 20 ++++++++++++++ .../migrations/0049_v340_add_job_template.py | 26 +++++++++++++++++++ awx/main/models/jobs.py | 11 ++++++++ awx/main/models/unified_jobs.py | 14 +++++++--- awx/main/models/workflow.py | 8 ++++++ 7 files changed, 82 insertions(+), 6 deletions(-) create mode 100644 awx/main/migrations/0048_v340_split_jobs.py create mode 100644 awx/main/migrations/0049_v340_add_job_template.py diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 0d9867f49c..8073b89735 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -3011,7 +3011,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO fields = ('*', 'host_config_key', 'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch', 'survey_enabled', 'become_enabled', 'diff_mode', - 'allow_simultaneous', 'custom_virtualenv') + 'allow_simultaneous', 'custom_virtualenv', 'job_shard_count') def get_related(self, obj): res = super(JobTemplateSerializer, self).get_related(obj) diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index e1af329953..de8756ce40 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -2903,7 +2903,7 @@ class JobTemplateLaunch(RetrieveAPIView): raise PermissionDenied() passwords = serializer.validated_data.pop('credential_passwords', {}) - new_job = obj.create_unified_job(**serializer.validated_data) + new_job = obj.create_job(**serializer.validated_data) result = new_job.signal_start(**passwords) if not result: @@ -2914,7 +2914,10 @@ class JobTemplateLaunch(RetrieveAPIView): data = OrderedDict() data['job'] = new_job.id data['ignored_fields'] = self.sanitize_for_response(ignored_fields) - data.update(JobSerializer(new_job, context=self.get_serializer_context()).to_representation(new_job)) + if isinstance(new_job, WorkflowJob): + data.update(WorkflowJobSerializer(new_job, context=self.get_serializer_context()).to_representation(new_job)) + else: + data.update(JobSerializer(new_job, context=self.get_serializer_context()).to_representation(new_job)) headers = {'Location': new_job.get_absolute_url(request)} return Response(data, status=status.HTTP_201_CREATED, headers=headers) diff --git a/awx/main/migrations/0048_v340_split_jobs.py b/awx/main/migrations/0048_v340_split_jobs.py new file mode 100644 index 0000000000..de1242760a --- /dev/null +++ b/awx/main/migrations/0048_v340_split_jobs.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.11 on 2018-08-14 13:43 +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0047_v330_activitystream_instance'), + ] + + operations = [ + migrations.AddField( + model_name='jobtemplate', + name='job_shard_count', + field=models.IntegerField(blank=True, + default=0, + help_text='The number of jobs to split into at runtime. Will cause the Job Template to launch a workflow.'), + ), + ] diff --git a/awx/main/migrations/0049_v340_add_job_template.py b/awx/main/migrations/0049_v340_add_job_template.py new file mode 100644 index 0000000000..3174ca9532 --- /dev/null +++ b/awx/main/migrations/0049_v340_add_job_template.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.11 on 2018-08-14 16:04 +from __future__ import unicode_literals + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0048_v340_split_jobs'), + ] + + operations = [ + migrations.AddField( + model_name='workflowjob', + name='job_template', + field=models.ForeignKey(blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name='sharded_jobs', to='main.JobTemplate'), + ), + ] diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index c5bad0c6c4..79bd353832 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -277,6 +277,12 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour default=False, allows_field='credentials' ) + job_shard_count = models.IntegerField( + blank=True, + default=0, + help_text=_("The number of jobs to split into at runtime. Will cause the Job Template to launch a workflow."), + ) + admin_role = ImplicitRoleField( parent_role=['project.organization.job_template_admin_role', 'inventory.organization.job_template_admin_role'] ) @@ -318,6 +324,11 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour ''' Create a new job based on this template. ''' + if self.job_shard_count > 1: + # A sharded Job Template will generate a WorkflowJob rather than a Job + from awx.main.models.workflow import WorkflowJobTemplate + kwargs['_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class() + kwargs['_unified_job_field_names'] = WorkflowJobTemplate._get_unified_job_field_names() return self.create_unified_job(**kwargs) def get_absolute_url(self, request=None): diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index da3f43ad80..54b6dbad3e 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -328,6 +328,8 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio ''' Create a new unified job based on this unified job template. ''' + from awx.main.models import JobTemplate, WorkflowJob + new_job_passwords = kwargs.pop('survey_passwords', {}) eager_fields = kwargs.pop('_eager_fields', None) @@ -336,8 +338,10 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio password_list = self.survey_password_variables() encrypt_dict(kwargs.get('extra_vars', {}), password_list) - unified_job_class = self._get_unified_job_class() - fields = self._get_unified_job_field_names() + unified_job_class = kwargs.pop("_unified_job_class", self._get_unified_job_class()) + fields = kwargs.pop("_unified_job_field_names", self._get_unified_job_field_names()) + print("UJC: {}".format(unified_job_class)) + print("fields: {}".format(fields)) unallowed_fields = set(kwargs.keys()) - set(fields) if unallowed_fields: logger.warn('Fields {} are not allowed as overrides.'.format(unallowed_fields)) @@ -350,7 +354,11 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio setattr(unified_job, fd, val) # Set the unified job template back-link on the job - parent_field_name = unified_job_class._get_parent_field_name() + # TODO: fix this hack properly before merge matburt + if isinstance(self, JobTemplate) and isinstance(unified_job, WorkflowJob): + parent_field_name = "job_template" + else: + parent_field_name = unified_job_class._get_parent_field_name() setattr(unified_job, parent_field_name, self) # For JobTemplate-based jobs with surveys, add passwords to list for perma-redaction diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 357dd9eeb0..b97f555d57 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -433,6 +433,14 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio default=None, on_delete=models.SET_NULL, ) + job_template = models.ForeignKey( + 'JobTemplate', + related_name='sharded_jobs', + blank=True, + null=True, + default=None, + on_delete=models.SET_NULL, + ) @property def workflow_nodes(self): From 8a18984be1d08fd59a5f7dfdf514238d9cfac1f8 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Wed, 22 Aug 2018 09:29:59 -0400 Subject: [PATCH 02/29] Spawn concrete workflow jobs from a job template launch --- awx/main/models/jobs.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 79bd353832..4e3a22a774 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -326,10 +326,21 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour ''' if self.job_shard_count > 1: # A sharded Job Template will generate a WorkflowJob rather than a Job - from awx.main.models.workflow import WorkflowJobTemplate + from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode kwargs['_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class() kwargs['_unified_job_field_names'] = WorkflowJobTemplate._get_unified_job_field_names() - return self.create_unified_job(**kwargs) + job = self.create_unified_job(**kwargs) + if self.job_shard_count > 1: + for idx in xrange(self.job_shard_count): + create_kwargs = dict(workflow_job=job, + unified_job_template=self, + #survey_passwords=self.survey_passwords, + inventory=self.inventory) + #char_prompts=self.char_prompts) + wfjn = WorkflowJobNode.objects.create(**create_kwargs) + for cred in self.credentials.all(): + wfjn.credentials.add(cred) + return job def get_absolute_url(self, request=None): return reverse('api:job_template_detail', kwargs={'pk': self.pk}, request=request) From 44ffcf86de69585f4709fbb7c14e1c0edf2a95e9 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Fri, 24 Aug 2018 14:21:18 -0400 Subject: [PATCH 03/29] Properly take prompted inventory into account This also will rename shard jobs to add an index to the job name --- awx/main/models/jobs.py | 10 ++++++++-- awx/main/scheduler/task_manager.py | 3 +++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 4e3a22a774..4821223a3f 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -331,11 +331,17 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour kwargs['_unified_job_field_names'] = WorkflowJobTemplate._get_unified_job_field_names() job = self.create_unified_job(**kwargs) if self.job_shard_count > 1: - for idx in xrange(self.job_shard_count): + if 'inventory' in kwargs: + actual_inventory = kwargs['inventory'] + else: + actual_inventory = self.inventory + for idx in xrange(min(self.job_shard_count, + actual_inventory.hosts.count())): create_kwargs = dict(workflow_job=job, unified_job_template=self, #survey_passwords=self.survey_passwords, - inventory=self.inventory) + inventory=actual_inventory, + ancestor_artifacts=dict(job_shard=idx)) #char_prompts=self.char_prompts) wfjn = WorkflowJobNode.objects.create(**create_kwargs) for cred in self.credentials.all(): diff --git a/awx/main/scheduler/task_manager.py b/awx/main/scheduler/task_manager.py index 08cb6cd247..a5bfccb967 100644 --- a/awx/main/scheduler/task_manager.py +++ b/awx/main/scheduler/task_manager.py @@ -117,6 +117,9 @@ class TaskManager(): continue kv = spawn_node.get_job_kwargs() job = spawn_node.unified_job_template.create_unified_job(**kv) + if 'job_shard' in spawn_node.ancestor_artifacts: + job.name = "{} - {}".format(job.name, spawn_node.ancestor_artifacts['job_shard'] + 1) + job.save() spawn_node.job = job spawn_node.save() logger.info('Spawned %s in %s for node %s', job.log_format, workflow_job.log_format, spawn_node.pk) From dab678c5cc3f451da558068c6ff8d0bf7fb6b6dd Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Mon, 27 Aug 2018 11:08:06 -0400 Subject: [PATCH 04/29] Implement splitting logic in inventory & job task code --- awx/api/serializers.py | 1 + .../templates/api/inventory_script_view.md | 3 + awx/api/urls/job_template.py | 2 + awx/api/views/__init__.py | 13 ++- awx/main/models/inventory.py | 81 ++++++++++++------- awx/main/scheduler/task_manager.py | 2 +- awx/main/tasks.py | 11 ++- .../tests/functional/models/test_inventory.py | 8 ++ 8 files changed, 89 insertions(+), 32 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 8073b89735..e880648467 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -3028,6 +3028,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO labels = self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk}), object_roles = self.reverse('api:job_template_object_roles_list', kwargs={'pk': obj.pk}), instance_groups = self.reverse('api:job_template_instance_groups_list', kwargs={'pk': obj.pk}), + sharded_jobs = self.reverse('api:job_template_sharded_jobs_list', kwargs={'pk': obj.pk}), )) if self.version > 1: res['copy'] = self.reverse('api:job_template_copy', kwargs={'pk': obj.pk}) diff --git a/awx/api/templates/api/inventory_script_view.md b/awx/api/templates/api/inventory_script_view.md index 07656c1eff..19cfff28ce 100644 --- a/awx/api/templates/api/inventory_script_view.md +++ b/awx/api/templates/api/inventory_script_view.md @@ -26,6 +26,9 @@ string of `?all=1` to return all hosts, including disabled ones. Specify a query string of `?towervars=1` to add variables to the hostvars of each host that specifies its enabled state and database ID. +Specify a query string of `?subset=shard2of5` to product an inventory that +has a restricted number of hosts according to the rules of job splitting. + To apply multiple query strings, join them with the `&` character, like `?hostvars=1&all=1`. ## Host Response diff --git a/awx/api/urls/job_template.py b/awx/api/urls/job_template.py index b11dbf4fea..9b830d64a7 100644 --- a/awx/api/urls/job_template.py +++ b/awx/api/urls/job_template.py @@ -8,6 +8,7 @@ from awx.api.views import ( JobTemplateDetail, JobTemplateLaunch, JobTemplateJobsList, + JobTemplateShardedJobsList, JobTemplateCallback, JobTemplateSchedulesList, JobTemplateSurveySpec, @@ -28,6 +29,7 @@ urls = [ url(r'^(?P[0-9]+)/$', JobTemplateDetail.as_view(), name='job_template_detail'), url(r'^(?P[0-9]+)/launch/$', JobTemplateLaunch.as_view(), name='job_template_launch'), url(r'^(?P[0-9]+)/jobs/$', JobTemplateJobsList.as_view(), name='job_template_jobs_list'), + url(r'^(?P[0-9]+)/sharded_jobs/$', JobTemplateShardedJobsList.as_view(), name='job_template_sharded_jobs_list'), url(r'^(?P[0-9]+)/callback/$', JobTemplateCallback.as_view(), name='job_template_callback'), url(r'^(?P[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'), url(r'^(?P[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'), diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index de8756ce40..8bd9f25dc2 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -2452,6 +2452,7 @@ class InventoryScriptView(RetrieveAPIView): hostvars = bool(request.query_params.get('hostvars', '')) towervars = bool(request.query_params.get('towervars', '')) show_all = bool(request.query_params.get('all', '')) + subset = request.query_params.get('subset', '') if hostname: hosts_q = dict(name=hostname) if not show_all: @@ -2461,7 +2462,8 @@ class InventoryScriptView(RetrieveAPIView): return Response(obj.get_script_data( hostvars=hostvars, towervars=towervars, - show_all=show_all + show_all=show_all, + subset=subset )) @@ -3396,6 +3398,15 @@ class JobTemplateJobsList(SubListCreateAPIView): return methods +class JobTemplateShardedJobsList(SubListCreateAPIView): + + model = WorkflowJob + serializer_class = WorkflowJobListSerializer + parent_model = JobTemplate + relationship = 'sharded_jobs' + parent_key = 'job_template' + + class JobTemplateInstanceGroupsList(SubListAttachDetachAPIView): model = InstanceGroup diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 175fa40236..26a5be6c3a 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -19,6 +19,9 @@ from django.core.exceptions import ValidationError from django.utils.timezone import now from django.db.models import Q +# REST Framework +from rest_framework.exceptions import ParseError + # AWX from awx.api.versioning import reverse from awx.main.constants import CLOUD_PROVIDERS @@ -217,67 +220,87 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin): group_children.add(from_group_id) return group_children_map - def get_script_data(self, hostvars=False, towervars=False, show_all=False): - if show_all: - hosts_q = dict() - else: - hosts_q = dict(enabled=True) + @staticmethod + def parse_shard_params(shard_str): + m = re.match(r"shard(?P\d+)of(?P\d+)", shard_str) + if not m: + raise ParseError(_('Could not parse subset as shard specification.')) + offset = int(m.group('offset')) + step = int(m.group('step')) + if offset > step: + raise ParseError(_('Shard offset must be greater than total number of shards.')) + return (offset, step) + + def get_script_data(self, hostvars=False, towervars=False, show_all=False, subset=None): + hosts_kw = dict() + if not show_all: + hosts_kw['enabled'] = True + fetch_fields = ['name', 'id', 'variables'] + if towervars: + fetch_fields.append('enabled') + hosts = self.hosts.filter(**hosts_kw).order_by('name').only(*fetch_fields) + if subset: + if not isinstance(subset, six.string_types): + raise ParseError(_('Inventory subset argument must be a string.')) + if subset.startswith('shard'): + offset, step = Inventory.parse_shard_params(subset) + hosts = hosts[offset::step] + else: + raise ParseError(_('Subset does not use any supported syntax.')) + data = dict() + all_group = data.setdefault('all', dict()) if self.variables_dict: - all_group = data.setdefault('all', dict()) all_group['vars'] = self.variables_dict + if self.kind == 'smart': - if len(self.hosts.all()) == 0: - return {} - else: - all_group = data.setdefault('all', dict()) - smart_hosts_qs = self.hosts.filter(**hosts_q).all() - smart_hosts = list(smart_hosts_qs.values_list('name', flat=True)) - all_group['hosts'] = smart_hosts + all_group['hosts'] = [host.name for host in hosts] else: - # Add hosts without a group to the all group. - groupless_hosts_qs = self.hosts.filter(groups__isnull=True, **hosts_q) - groupless_hosts = list(groupless_hosts_qs.values_list('name', flat=True)) - if groupless_hosts: - all_group = data.setdefault('all', dict()) - all_group['hosts'] = groupless_hosts + # Keep track of hosts that are members of a group + grouped_hosts = set([]) # Build in-memory mapping of groups and their hosts. - group_hosts_kw = dict(group__inventory_id=self.id, host__inventory_id=self.id) - if 'enabled' in hosts_q: - group_hosts_kw['host__enabled'] = hosts_q['enabled'] - group_hosts_qs = Group.hosts.through.objects.filter(**group_hosts_kw) - group_hosts_qs = group_hosts_qs.values_list('group_id', 'host_id', 'host__name') + group_hosts_qs = Group.hosts.through.objects.filter( + group__inventory_id=self.id, + host__inventory_id=self.id + ).values_list('group_id', 'host_id', 'host__name') group_hosts_map = {} for group_id, host_id, host_name in group_hosts_qs: group_hostnames = group_hosts_map.setdefault(group_id, []) group_hostnames.append(host_name) + grouped_hosts.add(host_name) # Build in-memory mapping of groups and their children. group_parents_qs = Group.parents.through.objects.filter( from_group__inventory_id=self.id, to_group__inventory_id=self.id, - ) - group_parents_qs = group_parents_qs.values_list('from_group_id', 'from_group__name', - 'to_group_id') + ).values_list('from_group_id', 'from_group__name', 'to_group_id') group_children_map = {} for from_group_id, from_group_name, to_group_id in group_parents_qs: group_children = group_children_map.setdefault(to_group_id, []) group_children.append(from_group_name) # Now use in-memory maps to build up group info. - for group in self.groups.all(): + for group in self.groups.only('name', 'id', 'variables'): group_info = dict() group_info['hosts'] = group_hosts_map.get(group.id, []) group_info['children'] = group_children_map.get(group.id, []) group_info['vars'] = group.variables_dict data[group.name] = group_info + # Add ungrouped hosts to all group + all_group['hosts'] = [host.name for host in hosts if host.name not in grouped_hosts] + + # Remove any empty groups + for group_name in list(data.keys()): + if not data.get(group_name, {}).get('hosts', []): + data.pop(group_name) + if hostvars: data.setdefault('_meta', dict()) data['_meta'].setdefault('hostvars', dict()) - for host in self.hosts.filter(**hosts_q): + for host in hosts: data['_meta']['hostvars'][host.name] = host.variables_dict if towervars: tower_dict = dict(remote_tower_enabled=str(host.enabled).lower(), diff --git a/awx/main/scheduler/task_manager.py b/awx/main/scheduler/task_manager.py index a5bfccb967..4f21e56903 100644 --- a/awx/main/scheduler/task_manager.py +++ b/awx/main/scheduler/task_manager.py @@ -118,7 +118,7 @@ class TaskManager(): kv = spawn_node.get_job_kwargs() job = spawn_node.unified_job_template.create_unified_job(**kv) if 'job_shard' in spawn_node.ancestor_artifacts: - job.name = "{} - {}".format(job.name, spawn_node.ancestor_artifacts['job_shard'] + 1) + job.name = six.text_type("{} - {}").format(job.name, spawn_node.ancestor_artifacts['job_shard'] + 1) job.save() spawn_node.job = job spawn_node.save() diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 0fdbef8036..db573b0b68 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -825,7 +825,16 @@ class BaseTask(object): return False def build_inventory(self, instance, **kwargs): - json_data = json.dumps(instance.inventory.get_script_data(hostvars=True)) + workflow_job = instance.get_workflow_job() + if workflow_job and workflow_job.job_template_id: + shard_address = 'shard{0}of{1}'.format( + instance.unified_job_node.ancestor_artifacts['job_shard'], + workflow_job.workflow_job_nodes.count() + ) + script_data = instance.inventory.get_script_data(hostvars=True, subset=shard_address) + else: + script_data = instance.inventory.get_script_data(hostvars=True) + json_data = json.dumps(script_data) handle, path = tempfile.mkstemp(dir=kwargs.get('private_data_dir', None)) f = os.fdopen(handle, 'w') f.write('#! /usr/bin/env python\n# -*- coding: utf-8 -*-\nprint %r\n' % json_data) diff --git a/awx/main/tests/functional/models/test_inventory.py b/awx/main/tests/functional/models/test_inventory.py index 57365b914b..34eb1d7b13 100644 --- a/awx/main/tests/functional/models/test_inventory.py +++ b/awx/main/tests/functional/models/test_inventory.py @@ -38,6 +38,14 @@ class TestInventoryScript: 'remote_tower_id': host.id } + def test_shard_subset(self, inventory): + for i in range(3): + inventory.hosts.create(name='host{}'.format(i)) + for i in range(3): + assert inventory.get_script_data(subset='shard{}of3'.format(i)) == { + 'all': {'hosts': ['host{}'.format(i)]} + } + @pytest.mark.django_db class TestActiveCount: From f9bdb1da154de3649ac0d802574c82ce913e6487 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Tue, 28 Aug 2018 15:31:59 -0400 Subject: [PATCH 05/29] Job splitting access logic and more feature development *allow sharding with prompts and schedules *modify create_unified_job contract to pass class & parent_field name *make parent field name instance method & set sharded UJT field *access methods made compatible with job sharding *move shard job special logic from task manager to workflows *save sharded job prompts to workflow job exclusively *allow using sharded jobs in workflows --- awx/api/serializers.py | 2 +- awx/api/views/__init__.py | 2 +- awx/main/access.py | 12 ++-- awx/main/models/ad_hoc_commands.py | 3 +- awx/main/models/inventory.py | 3 +- awx/main/models/jobs.py | 31 +++++---- awx/main/models/projects.py | 3 +- awx/main/models/unified_jobs.py | 67 +++++++++---------- awx/main/models/workflow.py | 38 ++++++++--- awx/main/scheduler/task_manager.py | 3 - awx/main/tests/functional/conftest.py | 34 ++++++++++ awx/main/tests/functional/models/test_job.py | 21 +++++- .../tests/functional/test_rbac_job_start.py | 30 +++++++++ 13 files changed, 174 insertions(+), 75 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index e880648467..2c904e2a7f 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -3590,7 +3590,7 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer): class Meta: model = WorkflowJob - fields = ('*', 'workflow_job_template', 'extra_vars', 'allow_simultaneous', + fields = ('*', 'workflow_job_template', 'extra_vars', 'allow_simultaneous', 'job_template', '-execution_node', '-event_processing_finished', '-controller_node',) def get_related(self, obj): diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index 8bd9f25dc2..cd96f3fcbc 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -2905,7 +2905,7 @@ class JobTemplateLaunch(RetrieveAPIView): raise PermissionDenied() passwords = serializer.validated_data.pop('credential_passwords', {}) - new_job = obj.create_job(**serializer.validated_data) + new_job = obj.create_unified_job(**serializer.validated_data) result = new_job.signal_start(**passwords) if not result: diff --git a/awx/main/access.py b/awx/main/access.py index 768545ce05..5c1ea23a3a 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -1789,7 +1789,7 @@ class WorkflowJobNodeAccess(BaseAccess): def filtered_queryset(self): return self.model.objects.filter( - workflow_job__workflow_job_template__in=WorkflowJobTemplate.accessible_objects( + workflow_job__unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs( self.user, 'read_role')) @check_superuser @@ -1915,7 +1915,7 @@ class WorkflowJobAccess(BaseAccess): def filtered_queryset(self): return WorkflowJob.objects.filter( - workflow_job_template__in=WorkflowJobTemplate.accessible_objects( + unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs( self.user, 'read_role')) def can_add(self, data): @@ -1947,9 +1947,11 @@ class WorkflowJobAccess(BaseAccess): if self.user.is_superuser: return True - wfjt = obj.workflow_job_template + template = obj.workflow_job_template + if not template and obj.job_template_id: + template = obj.job_template # only superusers can relaunch orphans - if not wfjt: + if not template: return False # If job was launched by another user, it could have survey passwords @@ -1967,7 +1969,7 @@ class WorkflowJobAccess(BaseAccess): return False # execute permission to WFJT is mandatory for any relaunch - return (self.user in wfjt.execute_role) + return (self.user in template.execute_role) def can_recreate(self, obj): node_qs = obj.workflow_job_nodes.all().prefetch_related('inventory', 'credentials', 'unified_job_template') diff --git a/awx/main/models/ad_hoc_commands.py b/awx/main/models/ad_hoc_commands.py index 3549bb2a41..af9c519812 100644 --- a/awx/main/models/ad_hoc_commands.py +++ b/awx/main/models/ad_hoc_commands.py @@ -136,8 +136,7 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin): else: return [] - @classmethod - def _get_parent_field_name(cls): + def _get_parent_field_name(self): return '' @classmethod diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 26a5be6c3a..03220ac967 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -1647,8 +1647,7 @@ class InventoryUpdate(UnifiedJob, InventorySourceOptions, JobNotificationMixin, null=True ) - @classmethod - def _get_parent_field_name(cls): + def _get_parent_field_name(self): return 'inventory_source' @classmethod diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 4821223a3f..1671496191 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -320,32 +320,32 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour def resources_needed_to_start(self): return [fd for fd in ['project', 'inventory'] if not getattr(self, '{}_id'.format(fd))] - def create_job(self, **kwargs): + def create_unified_job(self, **kwargs): ''' Create a new job based on this template. ''' - if self.job_shard_count > 1: + split_event = bool( + self.job_shard_count > 1 and + not kwargs.pop('_prevent_sharding', False) + ) + if split_event: # A sharded Job Template will generate a WorkflowJob rather than a Job from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode kwargs['_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class() - kwargs['_unified_job_field_names'] = WorkflowJobTemplate._get_unified_job_field_names() - job = self.create_unified_job(**kwargs) - if self.job_shard_count > 1: - if 'inventory' in kwargs: - actual_inventory = kwargs['inventory'] - else: - actual_inventory = self.inventory + kwargs['_parent_field_name'] = "job_template" + job = super(JobTemplate, self).create_unified_job(**kwargs) + if split_event: + try: + wj_config = job.launch_config + except JobLaunchConfig.DoesNotExist: + wj_config = JobLaunchConfig() + actual_inventory = wj_config.inventory if wj_config.inventory else self.inventory for idx in xrange(min(self.job_shard_count, actual_inventory.hosts.count())): create_kwargs = dict(workflow_job=job, unified_job_template=self, - #survey_passwords=self.survey_passwords, - inventory=actual_inventory, ancestor_artifacts=dict(job_shard=idx)) - #char_prompts=self.char_prompts) wfjn = WorkflowJobNode.objects.create(**create_kwargs) - for cred in self.credentials.all(): - wfjn.credentials.add(cred) return job def get_absolute_url(self, request=None): @@ -531,8 +531,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana ) - @classmethod - def _get_parent_field_name(cls): + def _get_parent_field_name(self): return 'job_template' @classmethod diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index 7f296376fa..3c283e2fd2 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -496,8 +496,7 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage default='check', ) - @classmethod - def _get_parent_field_name(cls): + def _get_parent_field_name(self): return 'project' @classmethod diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 54b6dbad3e..5dd1241e0e 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -309,13 +309,6 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio ''' raise NotImplementedError # Implement in subclass. - @classmethod - def _get_unified_job_field_names(cls): - ''' - Return field names that should be copied from template to new job. - ''' - raise NotImplementedError # Implement in subclass. - @property def notification_templates(self): ''' @@ -338,27 +331,32 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio password_list = self.survey_password_variables() encrypt_dict(kwargs.get('extra_vars', {}), password_list) - unified_job_class = kwargs.pop("_unified_job_class", self._get_unified_job_class()) - fields = kwargs.pop("_unified_job_field_names", self._get_unified_job_field_names()) - print("UJC: {}".format(unified_job_class)) - print("fields: {}".format(fields)) + unified_job_class = self._get_unified_job_class() + fields = self._get_unified_job_field_names() + parent_field_name = None + if "_unified_job_class" in kwargs: + # Special case where spawned job is different type than usual + # Only used for sharded jobs + unified_job_class = kwargs.pop("_unified_job_class") + fields = unified_job_class._get_unified_job_field_names() & fields + parent_field_name = kwargs.pop('_parent_field_name') + unallowed_fields = set(kwargs.keys()) - set(fields) + validated_kwargs = kwargs.copy() if unallowed_fields: logger.warn('Fields {} are not allowed as overrides.'.format(unallowed_fields)) - map(kwargs.pop, unallowed_fields) + map(validated_kwargs.pop, unallowed_fields) - unified_job = copy_model_by_class(self, unified_job_class, fields, kwargs) + unified_job = copy_model_by_class(self, unified_job_class, fields, validated_kwargs) if eager_fields: for fd, val in eager_fields.items(): setattr(unified_job, fd, val) - # Set the unified job template back-link on the job - # TODO: fix this hack properly before merge matburt - if isinstance(self, JobTemplate) and isinstance(unified_job, WorkflowJob): - parent_field_name = "job_template" - else: - parent_field_name = unified_job_class._get_parent_field_name() + # NOTE: sharded workflow jobs _get_parent_field_name method + # is not correct until this is set + if not parent_field_name: + parent_field_name = unified_job._get_parent_field_name() setattr(unified_job, parent_field_name, self) # For JobTemplate-based jobs with surveys, add passwords to list for perma-redaction @@ -372,24 +370,25 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio unified_job.save() # Labels and credentials copied here - if kwargs.get('credentials'): + if validated_kwargs.get('credentials'): Credential = UnifiedJob._meta.get_field('credentials').related_model cred_dict = Credential.unique_dict(self.credentials.all()) - prompted_dict = Credential.unique_dict(kwargs['credentials']) + prompted_dict = Credential.unique_dict(validated_kwargs['credentials']) # combine prompted credentials with JT cred_dict.update(prompted_dict) - kwargs['credentials'] = [cred for cred in cred_dict.values()] + validated_kwargs['credentials'] = [cred for cred in cred_dict.values()] + kwargs['credentials'] = validated_kwargs['credentials'] from awx.main.signals import disable_activity_stream with disable_activity_stream(): - copy_m2m_relationships(self, unified_job, fields, kwargs=kwargs) + copy_m2m_relationships(self, unified_job, fields, kwargs=validated_kwargs) - if 'extra_vars' in kwargs: - unified_job.handle_extra_data(kwargs['extra_vars']) + if 'extra_vars' in validated_kwargs: + unified_job.handle_extra_data(validated_kwargs['extra_vars']) if not getattr(self, '_deprecated_credential_launch', False): # Create record of provided prompts for relaunch and rescheduling - unified_job.create_config_from_prompts(kwargs) + unified_job.create_config_from_prompts(kwargs, parent=self) return unified_job @@ -702,8 +701,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique def supports_isolation(cls): return False - @classmethod - def _get_parent_field_name(cls): + def _get_parent_field_name(self): return 'unified_job_template' # Override in subclasses. @classmethod @@ -874,16 +872,16 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique except JobLaunchConfig.DoesNotExist: return None - def create_config_from_prompts(self, kwargs): + def create_config_from_prompts(self, kwargs, parent=None): ''' Create a launch configuration entry for this job, given prompts returns None if it can not be created ''' - if self.unified_job_template is None: - return None JobLaunchConfig = self._meta.get_field('launch_config').related_model config = JobLaunchConfig(job=self) - valid_fields = self.unified_job_template.get_ask_mapping().keys() + if parent is None: + parent = getattr(self, self._get_parent_field_name()) + valid_fields = parent.get_ask_mapping().keys() # Special cases allowed for workflows if hasattr(self, 'extra_vars'): valid_fields.extend(['survey_passwords', 'extra_vars']) @@ -900,8 +898,9 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique setattr(config, key, value) config.save() - job_creds = (set(kwargs.get('credentials', [])) - - set(self.unified_job_template.credentials.all())) + job_creds = set(kwargs.get('credentials', [])) + if 'credentials' in [field.name for field in parent._meta.get_fields()]: + job_creds = job_creds - set(parent.credentials.all()) if job_creds: config.credentials.add(*job_creds) return config diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index b97f555d57..8994c1aa1a 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -4,11 +4,13 @@ # Python #import urlparse import logging +import six # Django from django.db import models from django.conf import settings from django.utils.translation import ugettext_lazy as _ +from django.core.exceptions import ObjectDoesNotExist #from django import settings as tower_settings # AWX @@ -206,6 +208,15 @@ class WorkflowJobNode(WorkflowNodeBase): workflow_pk=self.pk, error_text=errors)) data.update(accepted_fields) # missing fields are handled in the scheduler + try: + # config saved on the workflow job itself + wj_config = self.workflow_job.launch_config + except ObjectDoesNotExist: + wj_config = None + if wj_config: + accepted_fields, ignored_fields, errors = ujt_obj._accept_or_ignore_job_kwargs(**wj_config.prompts_dict()) + accepted_fields.pop('extra_vars', None) # merge handled with other extra_vars later + data.update(accepted_fields) # build ancestor artifacts, save them to node model for later aa_dict = {} for parent_node in self.get_parent_nodes(): @@ -240,6 +251,15 @@ class WorkflowJobNode(WorkflowNodeBase): data['extra_vars'] = extra_vars # ensure that unified jobs created by WorkflowJobs are marked data['_eager_fields'] = {'launch_type': 'workflow'} + # Extra processing in the case that this is a sharded job + if 'job_shard' in self.ancestor_artifacts: + shard_str = six.text_type(self.ancestor_artifacts['job_shard'] + 1) + data['_eager_fields']['name'] = six.text_type("{} - {}").format( + self.unified_job_template.name[:512 - len(shard_str) - len(' - ')], + shard_str + ) + data['_eager_fields']['allow_simultaneous'] = True + data['_prevent_sharding'] = True return data @@ -261,6 +281,12 @@ class WorkflowJobOptions(BaseModel): def workflow_nodes(self): raise NotImplementedError() + @classmethod + def _get_unified_job_field_names(cls): + return set(f.name for f in WorkflowJobOptions._meta.fields) | set( + ['name', 'description', 'schedule', 'survey_passwords', 'labels'] + ) + def _create_workflow_nodes(self, old_node_list, user=None): node_links = {} for old_node in old_node_list: @@ -331,12 +357,6 @@ class WorkflowJobTemplate(UnifiedJobTemplate, WorkflowJobOptions, SurveyJobTempl def _get_unified_job_class(cls): return WorkflowJob - @classmethod - def _get_unified_job_field_names(cls): - return set(f.name for f in WorkflowJobOptions._meta.fields) | set( - ['name', 'description', 'schedule', 'survey_passwords', 'labels'] - ) - @classmethod def _get_unified_jt_copy_names(cls): base_list = super(WorkflowJobTemplate, cls)._get_unified_jt_copy_names() @@ -446,8 +466,10 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio def workflow_nodes(self): return self.workflow_job_nodes - @classmethod - def _get_parent_field_name(cls): + def _get_parent_field_name(self): + if self.job_template_id: + # This is a workflow job which is a container for sharded jobs + return 'job_template' return 'workflow_job_template' @classmethod diff --git a/awx/main/scheduler/task_manager.py b/awx/main/scheduler/task_manager.py index 4f21e56903..08cb6cd247 100644 --- a/awx/main/scheduler/task_manager.py +++ b/awx/main/scheduler/task_manager.py @@ -117,9 +117,6 @@ class TaskManager(): continue kv = spawn_node.get_job_kwargs() job = spawn_node.unified_job_template.create_unified_job(**kv) - if 'job_shard' in spawn_node.ancestor_artifacts: - job.name = six.text_type("{} - {}").format(job.name, spawn_node.ancestor_artifacts['job_shard'] + 1) - job.save() spawn_node.job = job spawn_node.save() logger.info('Spawned %s in %s for node %s', job.log_format, workflow_job.log_format, spawn_node.pk) diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index 459471b6e8..57ceec41be 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -776,3 +776,37 @@ def sqlite_copy_expert(request): def disable_database_settings(mocker): m = mocker.patch('awx.conf.settings.SettingsWrapper.all_supported_settings', new_callable=PropertyMock) m.return_value = [] + + +@pytest.fixture +def shard_jt_factory(inventory): + def r(N, jt_kwargs=None): + for i in range(N): + inventory.hosts.create(name='foo{}'.format(i)) + if not jt_kwargs: + jt_kwargs = {} + return JobTemplate.objects.create( + name='shard-jt-from-factory', + job_shard_count=N, + inventory=inventory, + **jt_kwargs + ) + return r + + +@pytest.fixture +def shard_job_factory(shard_jt_factory): + def r(N, jt_kwargs=None, prompts=None, spawn=False): + shard_jt = shard_jt_factory(N, jt_kwargs=jt_kwargs) + if not prompts: + prompts = {} + shard_job = shard_jt.create_unified_job(**prompts) + if spawn: + for node in shard_job.workflow_nodes.all(): + # does what the task manager does for spawning workflow jobs + kv = node.get_job_kwargs() + job = node.unified_job_template.create_unified_job(**kv) + node.job = job + node.save() + return shard_job + return r diff --git a/awx/main/tests/functional/models/test_job.py b/awx/main/tests/functional/models/test_job.py index 013f73ca39..9926e42b5d 100644 --- a/awx/main/tests/functional/models/test_job.py +++ b/awx/main/tests/functional/models/test_job.py @@ -1,7 +1,7 @@ import pytest import six -from awx.main.models import JobTemplate, Job, JobHostSummary +from awx.main.models import JobTemplate, Job, JobHostSummary, WorkflowJob from crum import impersonate @@ -81,3 +81,22 @@ def test_job_host_summary_representation(host): jhs = JobHostSummary.objects.get(pk=jhs.id) host.delete() assert 'N/A changed=1 dark=2 failures=3 ok=4 processed=5 skipped=6' == six.text_type(jhs) + +@pytest.mark.django_db +class TestShardingModels: + + def test_shard_workflow_spawn(self, shard_jt_factory): + shard_jt = shard_jt_factory(3) + job = shard_jt.create_unified_job() + assert isinstance(job, WorkflowJob) + assert job.job_template == shard_jt + assert job.unified_job_template == shard_jt + assert job.workflow_nodes.count() == 3 + + def test_shards_with_JT_and_prompts(self, shard_job_factory): + job = shard_job_factory(3, jt_kwargs={'ask_limit_on_launch': True}, prompts={'limit': 'foobar'}, spawn=True) + assert job.launch_config.prompts_dict() == {'limit': 'foobar'} + for node in job.workflow_nodes.all(): + assert node.limit == None # data not saved in node prompts + job = node.job + assert job.limit == 'foobar' diff --git a/awx/main/tests/functional/test_rbac_job_start.py b/awx/main/tests/functional/test_rbac_job_start.py index 60c35e0803..4b44a5a284 100644 --- a/awx/main/tests/functional/test_rbac_job_start.py +++ b/awx/main/tests/functional/test_rbac_job_start.py @@ -3,6 +3,11 @@ import pytest from awx.main.models.inventory import Inventory from awx.main.models.credential import Credential from awx.main.models.jobs import JobTemplate, Job +from awx.main.access import ( + UnifiedJobAccess, + WorkflowJobAccess, WorkflowJobNodeAccess, + JobAccess +) @pytest.mark.django_db @@ -43,6 +48,31 @@ def test_inventory_use_access(inventory, user): assert common_user.can_access(Inventory, 'use', inventory) +@pytest.mark.django_db +def test_sharded_job(shard_job_factory, rando): + workflow_job = shard_job_factory(2, jt_kwargs={'created_by': rando}, spawn=True) + workflow_job.job_template.execute_role.members.add(rando) + + # Abilities of user with execute_role for shard workflow job container + assert WorkflowJobAccess(rando).can_start(workflow_job) # relaunch allowed + for access_cls in (UnifiedJobAccess, WorkflowJobAccess): + access = access_cls(rando) + assert access.can_read(workflow_job) + assert workflow_job in access.get_queryset() + + # Abilities of user with execute_role for all the shards of the job + for node in workflow_job.workflow_nodes.all(): + access = WorkflowJobNodeAccess(rando) + assert access.can_read(node) + assert node in access.get_queryset() + job = node.job + assert JobAccess(rando).can_start(job) # relaunch allowed + for access_cls in (UnifiedJobAccess, JobAccess): + access = access_cls(rando) + assert access.can_read(job) + assert job in access.get_queryset() + + @pytest.mark.django_db class TestJobRelaunchAccess: @pytest.fixture From 7ff04dafd38f70a41d1606e5d01000573b281d58 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Mon, 10 Sep 2018 13:54:04 -0400 Subject: [PATCH 06/29] Fix IntegrityError deleting job splitting JT misc: *show sharded jobs in recent_jobs *test updates --- awx/api/serializers.py | 7 ++----- .../0050_v340_unified_jt_set_null.py | 21 +++++++++++++++++++ awx/main/models/jobs.py | 9 +++++--- awx/main/models/unified_jobs.py | 8 +++---- awx/main/tests/functional/api/test_job.py | 16 ++++++++++++++ awx/main/tests/functional/models/test_job.py | 3 ++- .../functional/models/test_unified_job.py | 4 +--- .../test_job_template_serializers.py | 13 ++++++++---- 8 files changed, 61 insertions(+), 20 deletions(-) create mode 100644 awx/main/migrations/0050_v340_unified_jt_set_null.py diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 2c904e2a7f..5ee7d1b3b7 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2976,12 +2976,9 @@ class JobTemplateMixin(object): ''' def _recent_jobs(self, obj): - if hasattr(obj, 'workflow_jobs'): - job_mgr = obj.workflow_jobs - else: - job_mgr = obj.jobs + job_mgr = obj.unifiedjob_unified_jobs.non_polymorphic().only('id', 'status', 'finished') return [{'id': x.id, 'status': x.status, 'finished': x.finished} - for x in job_mgr.all().order_by('-created')[:10]] + for x in job_mgr.order_by('-created')[:10]] def get_summary_fields(self, obj): d = super(JobTemplateMixin, self).get_summary_fields(obj) diff --git a/awx/main/migrations/0050_v340_unified_jt_set_null.py b/awx/main/migrations/0050_v340_unified_jt_set_null.py new file mode 100644 index 0000000000..7ad65087f4 --- /dev/null +++ b/awx/main/migrations/0050_v340_unified_jt_set_null.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.11 on 2018-09-10 17:41 +from __future__ import unicode_literals + +import awx.main.utils.polymorphic +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0049_v340_add_job_template'), + ] + + operations = [ + migrations.AlterField( + model_name='unifiedjob', + name='unified_job_template', + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, related_name='unifiedjob_unified_jobs', to='main.UnifiedJobTemplate'), + ), + ] diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 1671496191..bb1b229a43 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -320,10 +320,13 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour def resources_needed_to_start(self): return [fd for fd in ['project', 'inventory'] if not getattr(self, '{}_id'.format(fd))] - def create_unified_job(self, **kwargs): + def create_job(self, **kwargs): ''' Create a new job based on this template. ''' + return self.create_unified_job(**kwargs) + + def create_unified_job(self, **kwargs): split_event = bool( self.job_shard_count > 1 and not kwargs.pop('_prevent_sharding', False) @@ -345,7 +348,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour create_kwargs = dict(workflow_job=job, unified_job_template=self, ancestor_artifacts=dict(job_shard=idx)) - wfjn = WorkflowJobNode.objects.create(**create_kwargs) + WorkflowJobNode.objects.create(**create_kwargs) return job def get_absolute_url(self, request=None): @@ -480,7 +483,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour RelatedJobsMixin ''' def _get_related_jobs(self): - return Job.objects.filter(job_template=self) + return UnifiedJob.objects.filter(unified_job_template=self) class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskManagerJobMixin): diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 5dd1241e0e..cbda5109e2 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -321,8 +321,6 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio ''' Create a new unified job based on this unified job template. ''' - from awx.main.models import JobTemplate, WorkflowJob - new_job_passwords = kwargs.pop('survey_passwords', {}) eager_fields = kwargs.pop('_eager_fields', None) @@ -553,7 +551,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique default=None, editable=False, related_name='%(class)s_unified_jobs', - on_delete=models.SET_NULL, + on_delete=polymorphic.SET_NULL, ) launch_type = models.CharField( max_length=20, @@ -834,7 +832,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique ''' unified_job_class = self.__class__ unified_jt_class = self._get_unified_job_template_class() - parent_field_name = unified_job_class._get_parent_field_name() + parent_field_name = self._get_parent_field_name() fields = unified_jt_class._get_unified_job_field_names() | set([parent_field_name]) create_data = {} @@ -881,6 +879,8 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique config = JobLaunchConfig(job=self) if parent is None: parent = getattr(self, self._get_parent_field_name()) + if parent is None: + return valid_fields = parent.get_ask_mapping().keys() # Special cases allowed for workflows if hasattr(self, 'extra_vars'): diff --git a/awx/main/tests/functional/api/test_job.py b/awx/main/tests/functional/api/test_job.py index 8cd26e71ba..1685c62304 100644 --- a/awx/main/tests/functional/api/test_job.py +++ b/awx/main/tests/functional/api/test_job.py @@ -122,6 +122,22 @@ def test_job_relaunch_on_failed_hosts(post, inventory, project, machine_credenti assert r.data.get('limit') == hosts +@pytest.mark.django_db +def test_shard_jt_recent_jobs(shard_job_factory, admin_user, get): + workflow_job = shard_job_factory(3, spawn=True) + shard_jt = workflow_job.job_template + r = get( + url=shard_jt.get_absolute_url(), + user=admin_user, + expect=200 + ) + job_ids = [entry['id'] for entry in r.data['summary_fields']['recent_jobs']] + assert workflow_job.pk in job_ids + for node in workflow_job.workflow_nodes.all(): + job = node.job + assert job.pk in job_ids + + @pytest.mark.django_db def test_block_unprocessed_events(delete, admin_user, mocker): time_of_finish = parse("Thu Feb 28 09:10:20 2013 -0500") diff --git a/awx/main/tests/functional/models/test_job.py b/awx/main/tests/functional/models/test_job.py index 9926e42b5d..c2614b17f1 100644 --- a/awx/main/tests/functional/models/test_job.py +++ b/awx/main/tests/functional/models/test_job.py @@ -82,6 +82,7 @@ def test_job_host_summary_representation(host): host.delete() assert 'N/A changed=1 dark=2 failures=3 ok=4 processed=5 skipped=6' == six.text_type(jhs) + @pytest.mark.django_db class TestShardingModels: @@ -97,6 +98,6 @@ class TestShardingModels: job = shard_job_factory(3, jt_kwargs={'ask_limit_on_launch': True}, prompts={'limit': 'foobar'}, spawn=True) assert job.launch_config.prompts_dict() == {'limit': 'foobar'} for node in job.workflow_nodes.all(): - assert node.limit == None # data not saved in node prompts + assert node.limit is None # data not saved in node prompts job = node.job assert job.limit == 'foobar' diff --git a/awx/main/tests/functional/models/test_unified_job.py b/awx/main/tests/functional/models/test_unified_job.py index ff3af2439b..f587e4c448 100644 --- a/awx/main/tests/functional/models/test_unified_job.py +++ b/awx/main/tests/functional/models/test_unified_job.py @@ -58,9 +58,7 @@ class TestCreateUnifiedJob: job_with_links.save() job_with_links.credentials.add(machine_credential) job_with_links.credentials.add(net_credential) - with mocker.patch('awx.main.models.unified_jobs.UnifiedJobTemplate._get_unified_job_field_names', - return_value=['inventory', 'credential', 'limit']): - second_job = job_with_links.copy_unified_job() + second_job = job_with_links.copy_unified_job() # Check that job data matches the original variables assert second_job.credential == job_with_links.credential diff --git a/awx/main/tests/unit/api/serializers/test_job_template_serializers.py b/awx/main/tests/unit/api/serializers/test_job_template_serializers.py index a6f41debb9..8719b9b1b9 100644 --- a/awx/main/tests/unit/api/serializers/test_job_template_serializers.py +++ b/awx/main/tests/unit/api/serializers/test_job_template_serializers.py @@ -71,14 +71,19 @@ class TestJobTemplateSerializerGetRelated(): class TestJobTemplateSerializerGetSummaryFields(): def test__recent_jobs(self, mocker, job_template, jobs): - job_template.jobs.all = mocker.MagicMock(**{'order_by.return_value': jobs}) - job_template.jobs.all.return_value = job_template.jobs.all + job_template.unifiedjob_unified_jobs = mocker.MagicMock(**{ + 'non_polymorphic.return_value': mocker.MagicMock(**{ + 'only.return_value': mocker.MagicMock(**{ + 'order_by.return_value': jobs + }) + }) + }) serializer = JobTemplateSerializer() recent_jobs = serializer._recent_jobs(job_template) - job_template.jobs.all.assert_called_once_with() - job_template.jobs.all.order_by.assert_called_once_with('-created') + job_template.unifiedjob_unified_jobs.non_polymorphic.assert_called_once_with() + job_template.unifiedjob_unified_jobs.non_polymorphic().only().order_by.assert_called_once_with('-created') assert len(recent_jobs) == 10 for x in jobs[:10]: assert recent_jobs == [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in jobs[:10]] From 20226f89849511d8ff81fb71acc9e701c02a7188 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Thu, 13 Sep 2018 11:53:38 -0400 Subject: [PATCH 07/29] Polish split jobs API info & add fields to UI *clarify help text and squash migrations *adds new internal_limit field to Job model for faster reference *if field is non-blank, populate shard params in summary_fields *add summary information to UI job/wfj details, JT selector --- awx/api/serializers.py | 7 ++++ awx/api/views/__init__.py | 6 ++- awx/main/migrations/0048_v330_split_jobs.py | 37 +++++++++++++++++++ awx/main/migrations/0048_v340_split_jobs.py | 20 ---------- .../migrations/0049_v340_add_job_template.py | 26 ------------- .../0050_v340_unified_jt_set_null.py | 21 ----------- awx/main/models/jobs.py | 23 +++++++++--- awx/main/models/workflow.py | 6 ++- awx/main/scheduler/task_manager.py | 6 +-- awx/main/tasks.py | 12 ++---- .../features/output/details.component.js | 21 +++++++++++ .../features/output/details.partial.html | 6 +++ .../client/features/output/output.strings.js | 2 + .../job_templates/job-template.form.js | 14 +++++++ .../workflow-results.controller.js | 7 ++++ .../workflow-results.partial.html | 16 ++++++++ 16 files changed, 142 insertions(+), 88 deletions(-) create mode 100644 awx/main/migrations/0048_v330_split_jobs.py delete mode 100644 awx/main/migrations/0048_v340_split_jobs.py delete mode 100644 awx/main/migrations/0049_v340_add_job_template.py delete mode 100644 awx/main/migrations/0050_v340_unified_jt_set_null.py diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 5ee7d1b3b7..eb6264c5e2 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -3199,6 +3199,13 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer): def get_summary_fields(self, obj): summary_fields = super(JobSerializer, self).get_summary_fields(obj) + if obj.internal_limit: + summary_fields['internal_limit'] = {} + if obj.internal_limit.startswith('shard'): + offset, step = Inventory.parse_shard_params(obj.internal_limit) + summary_fields['internal_limit']['shard'] = {'offset': offset, 'step': step} + else: + summary_fields['internal_limit']['unknown'] = self.internal_limit all_creds = [] # Organize credential data into multitude of deprecated fields # TODO: remove most of this as v1 is removed diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index cd96f3fcbc..e99532cbca 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -2914,11 +2914,13 @@ class JobTemplateLaunch(RetrieveAPIView): return Response(data, status=status.HTTP_400_BAD_REQUEST) else: data = OrderedDict() - data['job'] = new_job.id - data['ignored_fields'] = self.sanitize_for_response(ignored_fields) if isinstance(new_job, WorkflowJob): + data['workflow_job'] = new_job.id + data['ignored_fields'] = self.sanitize_for_response(ignored_fields) data.update(WorkflowJobSerializer(new_job, context=self.get_serializer_context()).to_representation(new_job)) else: + data['job'] = new_job.id + data['ignored_fields'] = self.sanitize_for_response(ignored_fields) data.update(JobSerializer(new_job, context=self.get_serializer_context()).to_representation(new_job)) headers = {'Location': new_job.get_absolute_url(request)} return Response(data, status=status.HTTP_201_CREATED, headers=headers) diff --git a/awx/main/migrations/0048_v330_split_jobs.py b/awx/main/migrations/0048_v330_split_jobs.py new file mode 100644 index 0000000000..17c6ab65ca --- /dev/null +++ b/awx/main/migrations/0048_v330_split_jobs.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.11 on 2018-09-13 15:55 +from __future__ import unicode_literals + +import awx.main.utils.polymorphic +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0047_v330_activitystream_instance'), + ] + + operations = [ + migrations.AddField( + model_name='jobtemplate', + name='job_shard_count', + field=models.IntegerField(blank=True, default=0, help_text='The number of jobs to split into at runtime. Will cause the Job Template to launch a workflow if value is non-zero.'), + ), + migrations.AddField( + model_name='workflowjob', + name='job_template', + field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='sharded_jobs', to='main.JobTemplate'), + ), + migrations.AlterField( + model_name='unifiedjob', + name='unified_job_template', + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, related_name='unifiedjob_unified_jobs', to='main.UnifiedJobTemplate'), + ), + migrations.AddField( + model_name='job', + name='internal_limit', + field=models.CharField(default=b'', editable=False, max_length=1024), + ), + ] diff --git a/awx/main/migrations/0048_v340_split_jobs.py b/awx/main/migrations/0048_v340_split_jobs.py deleted file mode 100644 index de1242760a..0000000000 --- a/awx/main/migrations/0048_v340_split_jobs.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.11 on 2018-08-14 13:43 -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('main', '0047_v330_activitystream_instance'), - ] - - operations = [ - migrations.AddField( - model_name='jobtemplate', - name='job_shard_count', - field=models.IntegerField(blank=True, - default=0, - help_text='The number of jobs to split into at runtime. Will cause the Job Template to launch a workflow.'), - ), - ] diff --git a/awx/main/migrations/0049_v340_add_job_template.py b/awx/main/migrations/0049_v340_add_job_template.py deleted file mode 100644 index 3174ca9532..0000000000 --- a/awx/main/migrations/0049_v340_add_job_template.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.11 on 2018-08-14 16:04 -from __future__ import unicode_literals - -from django.conf import settings -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ('main', '0048_v340_split_jobs'), - ] - - operations = [ - migrations.AddField( - model_name='workflowjob', - name='job_template', - field=models.ForeignKey(blank=True, - default=None, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name='sharded_jobs', to='main.JobTemplate'), - ), - ] diff --git a/awx/main/migrations/0050_v340_unified_jt_set_null.py b/awx/main/migrations/0050_v340_unified_jt_set_null.py deleted file mode 100644 index 7ad65087f4..0000000000 --- a/awx/main/migrations/0050_v340_unified_jt_set_null.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.11 on 2018-09-10 17:41 -from __future__ import unicode_literals - -import awx.main.utils.polymorphic -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('main', '0049_v340_add_job_template'), - ] - - operations = [ - migrations.AlterField( - model_name='unifiedjob', - name='unified_job_template', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, related_name='unifiedjob_unified_jobs', to='main.UnifiedJobTemplate'), - ), - ] diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index bb1b229a43..d698ad3726 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -280,7 +280,8 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour job_shard_count = models.IntegerField( blank=True, default=0, - help_text=_("The number of jobs to split into at runtime. Will cause the Job Template to launch a workflow."), + help_text=_("The number of jobs to split into at runtime. " + "Will cause the Job Template to launch a workflow if value is non-zero."), ) admin_role = ImplicitRoleField( @@ -301,7 +302,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour @classmethod def _get_unified_job_field_names(cls): return set(f.name for f in JobOptions._meta.fields) | set( - ['name', 'description', 'schedule', 'survey_passwords', 'labels', 'credentials'] + ['name', 'description', 'schedule', 'survey_passwords', 'labels', 'credentials', 'internal_limit'] ) @property @@ -327,10 +328,8 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour return self.create_unified_job(**kwargs) def create_unified_job(self, **kwargs): - split_event = bool( - self.job_shard_count > 1 and - not kwargs.pop('_prevent_sharding', False) - ) + prevent_sharding = kwargs.pop('_prevent_sharding', False) + split_event = bool(self.job_shard_count > 1 and (not prevent_sharding)) if split_event: # A sharded Job Template will generate a WorkflowJob rather than a Job from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode @@ -532,6 +531,11 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana on_delete=models.SET_NULL, help_text=_('The SCM Refresh task used to make sure the playbooks were available for the job run'), ) + internal_limit = models.CharField( + max_length=1024, + default='', + editable=False, + ) def _get_parent_field_name(self): @@ -575,6 +579,13 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana def event_class(self): return JobEvent + def copy_unified_job(self, **new_prompts): + new_prompts['_prevent_sharding'] = True + if self.internal_limit: + new_prompts.setdefault('_eager_fields', {}) + new_prompts['_eager_fields']['internal_limit'] = self.internal_limit # oddball, not from JT or prompts + return super(Job, self).copy_unified_job(**new_prompts) + @property def ask_diff_mode_on_launch(self): if self.job_template is not None: diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 8994c1aa1a..f15e66b226 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -259,6 +259,10 @@ class WorkflowJobNode(WorkflowNodeBase): shard_str ) data['_eager_fields']['allow_simultaneous'] = True + data['_eager_fields']['internal_limit'] = 'shard{0}of{1}'.format( + self.ancestor_artifacts['job_shard'], + self.workflow_job.workflow_job_nodes.count() + ) data['_prevent_sharding'] = True return data @@ -314,7 +318,7 @@ class WorkflowJobOptions(BaseModel): def create_relaunch_workflow_job(self): new_workflow_job = self.copy_unified_job() - if self.workflow_job_template is None: + if self.unified_job_template_id is None: new_workflow_job.copy_nodes_from_original(original=self) return new_workflow_job diff --git a/awx/main/scheduler/task_manager.py b/awx/main/scheduler/task_manager.py index 08cb6cd247..540b94181e 100644 --- a/awx/main/scheduler/task_manager.py +++ b/awx/main/scheduler/task_manager.py @@ -419,7 +419,7 @@ class TaskManager(): logger.debug(six.text_type("Dependent {} couldn't be scheduled on graph, waiting for next cycle").format(task.log_format)) def process_pending_tasks(self, pending_tasks): - running_workflow_templates = set([wf.workflow_job_template_id for wf in self.get_running_workflow_jobs()]) + running_workflow_templates = set([wf.unified_job_template_id for wf in self.get_running_workflow_jobs()]) for task in pending_tasks: self.process_dependencies(task, self.generate_dependencies(task)) if self.is_job_blocked(task): @@ -429,12 +429,12 @@ class TaskManager(): found_acceptable_queue = False idle_instance_that_fits = None if isinstance(task, WorkflowJob): - if task.workflow_job_template_id in running_workflow_templates: + if task.unified_job_template_id in running_workflow_templates: if not task.allow_simultaneous: logger.debug(six.text_type("{} is blocked from running, workflow already running").format(task.log_format)) continue else: - running_workflow_templates.add(task.workflow_job_template_id) + running_workflow_templates.add(task.unified_job_template_id) self.start_task(task, None, task.get_jobs_fail_chain(), None) continue for rampart_group in preferred_instance_groups: diff --git a/awx/main/tasks.py b/awx/main/tasks.py index db573b0b68..2c543d88d3 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -825,15 +825,9 @@ class BaseTask(object): return False def build_inventory(self, instance, **kwargs): - workflow_job = instance.get_workflow_job() - if workflow_job and workflow_job.job_template_id: - shard_address = 'shard{0}of{1}'.format( - instance.unified_job_node.ancestor_artifacts['job_shard'], - workflow_job.workflow_job_nodes.count() - ) - script_data = instance.inventory.get_script_data(hostvars=True, subset=shard_address) - else: - script_data = instance.inventory.get_script_data(hostvars=True) + script_data = instance.inventory.get_script_data( + hostvars=True, subset=getattr(instance, 'internal_limit', '') + ) json_data = json.dumps(script_data) handle, path = tempfile.mkstemp(dir=kwargs.get('private_data_dir', None)) f = os.fdopen(handle, 'w') diff --git a/awx/ui/client/features/output/details.component.js b/awx/ui/client/features/output/details.component.js index b1cc4734cd..bc39abeeaf 100644 --- a/awx/ui/client/features/output/details.component.js +++ b/awx/ui/client/features/output/details.component.js @@ -126,6 +126,26 @@ function getSourceWorkflowJobDetails () { return { link, tooltip }; } +function getShardDetails () { + const internalLimitDetails = resource.model.get('summary_fields.internal_limit'); + + if (!internalLimitDetails) { + return null; + } + + const shardDetails = resource.model.get('summary_fields.internal_limit.shard'); + + if (!shardDetails) { + return null; + } + + const label = strings.get('labels.SHARD_DETAILS'); + const offset = `${shardDetails.offset} of ${shardDetails.step} shards`; + const tooltip = strings.get('tooltips.SHARD_DETAILS'); + + return { label, offset, tooltip }; +} + function getJobTemplateDetails () { const jobTemplate = resource.model.get('summary_fields.job_template'); @@ -671,6 +691,7 @@ function JobDetailsController ( vm.jobType = getJobTypeDetails(); vm.jobTemplate = getJobTemplateDetails(); vm.sourceWorkflowJob = getSourceWorkflowJobDetails(); + vm.shardDetails = getShardDetails(); vm.inventory = getInventoryDetails(); vm.project = getProjectDetails(); vm.projectUpdate = getProjectUpdateDetails(); diff --git a/awx/ui/client/features/output/details.partial.html b/awx/ui/client/features/output/details.partial.html index f681059f90..2f3670f4ae 100644 --- a/awx/ui/client/features/output/details.partial.html +++ b/awx/ui/client/features/output/details.partial.html @@ -151,6 +151,12 @@
{{ vm.jobType.value }}
+ +
+ +
{{ vm.shardDetails.offset }}
+
+
diff --git a/awx/ui/client/features/output/output.strings.js b/awx/ui/client/features/output/output.strings.js index 538b533cb0..4fa6fe4335 100644 --- a/awx/ui/client/features/output/output.strings.js +++ b/awx/ui/client/features/output/output.strings.js @@ -23,6 +23,7 @@ function OutputStrings (BaseString) { EXTRA_VARS: t.s('Read-only view of extra variables added to the job template'), INVENTORY: t.s('View the Inventory'), JOB_TEMPLATE: t.s('View the Job Template'), + SHARD_DETAILS: t.s('Job is one of several shards from a JT that splits on inventory'), PROJECT: t.s('View the Project'), PROJECT_UPDATE: t.s('View Project checkout results'), SCHEDULE: t.s('View the Schedule'), @@ -55,6 +56,7 @@ function OutputStrings (BaseString) { JOB_EXPLANATION: t.s('Explanation'), JOB_TAGS: t.s('Job Tags'), JOB_TEMPLATE: t.s('Job Template'), + SHARD_DETAILS: t.s('Shard Details'), JOB_TYPE: t.s('Job Type'), LABELS: t.s('Labels'), LAUNCHED_BY: t.s('Launched By'), diff --git a/awx/ui/client/src/templates/job_templates/job-template.form.js b/awx/ui/client/src/templates/job_templates/job-template.form.js index 54bbe2c70d..3773d2b865 100644 --- a/awx/ui/client/src/templates/job_templates/job-template.form.js +++ b/awx/ui/client/src/templates/job_templates/job-template.form.js @@ -271,6 +271,20 @@ function(NotificationsList, i18n) { }, ngDisabled: '!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)' }, + job_shard_count: { + label: i18n._('Number of job shards to use'), + type: 'number', + integer: true, + min: 0, + spinner: true, + // 'class': "input-small", + // toggleSource: 'diff_mode', + dataTitle: i18n._('Job Shard Count'), + dataPlacement: 'right', + dataContainer: 'body', + awPopOver: "

" + i18n._("If non-zero, split into multiple jobs that run on mutually exclusive slices of the inventory.") + "

", + ngDisabled: '!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)' + }, checkbox_group: { label: i18n._('Options'), type: 'checkbox_group', diff --git a/awx/ui/client/src/workflow-results/workflow-results.controller.js b/awx/ui/client/src/workflow-results/workflow-results.controller.js index 31ad3191f8..f66e2a09e9 100644 --- a/awx/ui/client/src/workflow-results/workflow-results.controller.js +++ b/awx/ui/client/src/workflow-results/workflow-results.controller.js @@ -39,6 +39,7 @@ export default ['workflowData', 'workflowResultsService', 'workflowDataOptions', DELETE: i18n._('Delete'), EDIT_USER: i18n._('Edit the user'), EDIT_WORKFLOW: i18n._('Edit the workflow job template'), + EDIT_SHARD_TEMPLATE: i18n._('Edit the shard job template'), EDIT_SCHEDULE: i18n._('Edit the schedule'), TOGGLE_STDOUT_FULLSCREEN: i18n._('Expand Output'), STATUS: '' // re-assigned elsewhere @@ -49,6 +50,7 @@ export default ['workflowData', 'workflowResultsService', 'workflowDataOptions', STARTED: i18n._('Started'), FINISHED: i18n._('Finished'), LABELS: i18n._('Labels'), + SHARD_TEMPLATE: i18n._('Shard Template'), STATUS: i18n._('Status') }, details: { @@ -109,6 +111,11 @@ export default ['workflowData', 'workflowResultsService', 'workflowDataOptions', $scope.workflow_job_template_link = `/#/templates/workflow_job_template/${$scope.workflow.summary_fields.workflow_job_template.id}`; } + if(workflowData.summary_fields && workflowData.summary_fields.job_template && + workflowData.summary_fields.job_template.id){ + $scope.shard_job_template_link = `/#/templates/job_template/${$scope.workflow.summary_fields.job_template.id}`; + } + // turn related api browser routes into front end routes getLinks(); diff --git a/awx/ui/client/src/workflow-results/workflow-results.partial.html b/awx/ui/client/src/workflow-results/workflow-results.partial.html index 97ef30a914..f3f8943dfc 100644 --- a/awx/ui/client/src/workflow-results/workflow-results.partial.html +++ b/awx/ui/client/src/workflow-results/workflow-results.partial.html @@ -144,6 +144,22 @@
+ +
+ + +
+ Date: Mon, 17 Sep 2018 10:54:11 -0400 Subject: [PATCH 08/29] Adding architecture doc for job sharding --- docs/job_sharding.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 docs/job_sharding.md diff --git a/docs/job_sharding.md b/docs/job_sharding.md new file mode 100644 index 0000000000..c16fa030f9 --- /dev/null +++ b/docs/job_sharding.md @@ -0,0 +1,13 @@ +# Job Sharding Overview + +Ansible, by default, runs jobs from a single control instance. At best a single Ansible job can be split up on a single system via forks but this doesn't fully take advantage of AWX's ability to distribute work to multiple nodes in a cluster. + +Job Sharding solves this by adding a Job Template field `job_shard_count`. This field specifies the number of **Jobs** to split the Ansible run into. When this number is greater than 1 ``AWX`` will generate a **Workflow** from a **JobTemplate** instead of a **Job**. The **Inventory** will be split evenly amongst the sharded jobs. The workflow job is then started and proceeds as though it were a normal workflow. The API will return either a **Job** resource (if `job_shard_count` < 2) or a **WorkflowJob** resource otherwise. Likewise, the UI will redirect to the appropriate screen to display the status of the run. + +## Implications for Job execution + +When jobs are split they can run on any Tower node and some may not run at the same time. Because of this, anything that relies on setting/sharing state (using modules such as ``set_fact``) will not work as expected. It's reasonable to expect that not all jobs will actually run at the same time (if there is not enough capacity in the system for example) + +## Simultaneous Execution Behavior + +By default Job Templates aren't normally configured to execute simultaneously (``allow_simultaneous`` must be checked). Sharding overrides this behavior and implies ``allow_simultaneous`` even if that setting is unchecked. \ No newline at end of file From c8d76dbe78c0f258cd91556793097281fc7800fc Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Thu, 27 Sep 2018 11:48:59 -0400 Subject: [PATCH 09/29] update migration after rebase --- .../{0048_v330_split_jobs.py => 0050_v340_split_jobs.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename awx/main/migrations/{0048_v330_split_jobs.py => 0050_v340_split_jobs.py} (95%) diff --git a/awx/main/migrations/0048_v330_split_jobs.py b/awx/main/migrations/0050_v340_split_jobs.py similarity index 95% rename from awx/main/migrations/0048_v330_split_jobs.py rename to awx/main/migrations/0050_v340_split_jobs.py index 17c6ab65ca..f946d22896 100644 --- a/awx/main/migrations/0048_v330_split_jobs.py +++ b/awx/main/migrations/0050_v340_split_jobs.py @@ -10,7 +10,7 @@ import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ - ('main', '0047_v330_activitystream_instance'), + ('main', '0049_v330_validate_instance_capacity_adjustment'), ] operations = [ From ef4a2cbebb9e5cc4fbbcdaff85fb37d58f22be23 Mon Sep 17 00:00:00 2001 From: kialam Date: Wed, 3 Oct 2018 12:01:48 -0400 Subject: [PATCH 10/29] Add Job Splitting feature to UI --- .../features/jobs/jobsList.controller.js | 16 +++++++++++ .../client/features/jobs/jobsList.view.html | 3 ++- .../features/output/details.component.js | 14 +++++----- .../features/output/details.partial.html | 8 +++--- .../client/features/output/output.strings.js | 4 +-- awx/ui/client/lib/components/list/_index.less | 2 +- .../lib/components/list/row-item.directive.js | 1 + .../lib/components/list/row-item.partial.html | 3 +++ .../job_templates/job-template.form.js | 27 +++++++++---------- 9 files changed, 49 insertions(+), 29 deletions(-) diff --git a/awx/ui/client/features/jobs/jobsList.controller.js b/awx/ui/client/features/jobs/jobsList.controller.js index 4833fdda9a..219dfdd052 100644 --- a/awx/ui/client/features/jobs/jobsList.controller.js +++ b/awx/ui/client/features/jobs/jobsList.controller.js @@ -76,6 +76,22 @@ function ListJobsController ( return { icon, link, value }; }); + vm.getSplitJobDetails = (details) => { + const internalLimitDetails = Object.assign({}, details); + + if (!internalLimitDetails) { + return null; + } + + const splitJobDetails = internalLimitDetails.shard; + + if (!splitJobDetails) { + return null; + } + + return `Split Job ${splitJobDetails.offset + 1}/${splitJobDetails.step}`; + }; + vm.getSref = ({ type, id }) => { let sref; diff --git a/awx/ui/client/features/jobs/jobsList.view.html b/awx/ui/client/features/jobs/jobsList.view.html index 580a35d8a7..8124b6ccf1 100644 --- a/awx/ui/client/features/jobs/jobsList.view.html +++ b/awx/ui/client/features/jobs/jobsList.view.html @@ -23,7 +23,8 @@ status-tip="{{ vm.strings.get('list.STATUS_TOOLTIP', job.status) }}" header-value="{{ job.id }} - {{ job.name }}" header-state="{{ vm.getSref(job) }}" - header-tag="{{ vm.jobTypes[job.type] }}"> + header-tag="{{ vm.jobTypes[job.type] }}" + secondary-tag="{{ vm.getSplitJobDetails(job.summary_fields.internal_limit) }}">
{{ vm.jobType.value }}
- -
- -
{{ vm.shardDetails.offset }}
+ +
+ +
{{ vm.splitJobDetails.offset }}
diff --git a/awx/ui/client/features/output/output.strings.js b/awx/ui/client/features/output/output.strings.js index 4fa6fe4335..9e2da6d983 100644 --- a/awx/ui/client/features/output/output.strings.js +++ b/awx/ui/client/features/output/output.strings.js @@ -23,7 +23,7 @@ function OutputStrings (BaseString) { EXTRA_VARS: t.s('Read-only view of extra variables added to the job template'), INVENTORY: t.s('View the Inventory'), JOB_TEMPLATE: t.s('View the Job Template'), - SHARD_DETAILS: t.s('Job is one of several shards from a JT that splits on inventory'), + SPLIT_JOB_DETAILS: t.s('Job is one of several from a JT that splits on inventory'), PROJECT: t.s('View the Project'), PROJECT_UPDATE: t.s('View Project checkout results'), SCHEDULE: t.s('View the Schedule'), @@ -56,7 +56,7 @@ function OutputStrings (BaseString) { JOB_EXPLANATION: t.s('Explanation'), JOB_TAGS: t.s('Job Tags'), JOB_TEMPLATE: t.s('Job Template'), - SHARD_DETAILS: t.s('Shard Details'), + SPLIT_JOB: t.s('Split Job'), JOB_TYPE: t.s('Job Type'), LABELS: t.s('Labels'), LAUNCHED_BY: t.s('Launched By'), diff --git a/awx/ui/client/lib/components/list/_index.less b/awx/ui/client/lib/components/list/_index.less index 1f8d534034..c5b64d4eed 100644 --- a/awx/ui/client/lib/components/list/_index.less +++ b/awx/ui/client/lib/components/list/_index.less @@ -197,7 +197,7 @@ color: @at-color-list-row-item-tag-primary; } -.at-RowItem-tag--header { +.at-RowItem-tag--header, .at-RowItem-tag--secondary { line-height: inherit; } diff --git a/awx/ui/client/lib/components/list/row-item.directive.js b/awx/ui/client/lib/components/list/row-item.directive.js index 731aa837ec..b28b661581 100644 --- a/awx/ui/client/lib/components/list/row-item.directive.js +++ b/awx/ui/client/lib/components/list/row-item.directive.js @@ -13,6 +13,7 @@ function atRowItem () { headerLink: '@', headerState: '@', headerTag: '@', + secondaryTag: '@', status: '@', statusTip: '@', statusClick: '&?', diff --git a/awx/ui/client/lib/components/list/row-item.partial.html b/awx/ui/client/lib/components/list/row-item.partial.html index eebeab39a9..17f1e3b5d9 100644 --- a/awx/ui/client/lib/components/list/row-item.partial.html +++ b/awx/ui/client/lib/components/list/row-item.partial.html @@ -29,6 +29,9 @@
{{ headerTag }}
+
+ {{ secondaryTag }} +
diff --git a/awx/ui/client/src/templates/job_templates/job-template.form.js b/awx/ui/client/src/templates/job_templates/job-template.form.js index 3773d2b865..8d5b68075f 100644 --- a/awx/ui/client/src/templates/job_templates/job-template.form.js +++ b/awx/ui/client/src/templates/job_templates/job-template.form.js @@ -257,6 +257,19 @@ function(NotificationsList, i18n) { dataPlacement: 'right', control: '', }, + job_shard_count: { + label: i18n._('Job Splitting'), + type: 'number', + integer: true, + min: 1, + default: 1, + spinner: true, + dataTitle: i18n._('Split Job Count'), + dataPlacement: 'right', + dataContainer: 'body', + awPopOver: "

" + i18n._("The number of jobs to split into at runtime. Will cause the Job Template to launch a workflow if value is non-zero.") + "

", + ngDisabled: '!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)' + }, diff_mode: { label: i18n._('Show Changes'), type: 'toggleSwitch', @@ -271,20 +284,6 @@ function(NotificationsList, i18n) { }, ngDisabled: '!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)' }, - job_shard_count: { - label: i18n._('Number of job shards to use'), - type: 'number', - integer: true, - min: 0, - spinner: true, - // 'class': "input-small", - // toggleSource: 'diff_mode', - dataTitle: i18n._('Job Shard Count'), - dataPlacement: 'right', - dataContainer: 'body', - awPopOver: "

" + i18n._("If non-zero, split into multiple jobs that run on mutually exclusive slices of the inventory.") + "

", - ngDisabled: '!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)' - }, checkbox_group: { label: i18n._('Options'), type: 'checkbox_group', From 04eec613874aa7582265233e5f017e35aa6c1c93 Mon Sep 17 00:00:00 2001 From: kialam Date: Tue, 2 Oct 2018 15:22:16 -0400 Subject: [PATCH 11/29] Redirect to WF details page when a Split Job is launched --- .../launchTemplateButton.component.js | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/awx/ui/client/lib/components/launchTemplateButton/launchTemplateButton.component.js b/awx/ui/client/lib/components/launchTemplateButton/launchTemplateButton.component.js index 637555d57f..7a24be1711 100644 --- a/awx/ui/client/lib/components/launchTemplateButton/launchTemplateButton.component.js +++ b/awx/ui/client/lib/components/launchTemplateButton/launchTemplateButton.component.js @@ -41,7 +41,13 @@ function atLaunchTemplateCtrl ( selectedJobTemplate .postLaunch({ id: vm.template.id }) .then(({ data }) => { - $state.go('output', { id: data.job, type: 'playbook' }, { reload: true }); + /* Split Jobs: Redirect to WF Details page if returned + job type is a WF job */ + if (data.type === 'workflow_job' && data.workflow_job !== null) { + $state.go('workflowResults', { id: data.workflow_job }, { reload: true }); + } else { + $state.go('output', { id: data.job, type: 'playbook' }, { reload: true }); + } }); } else { const promptData = { From 47711bc0079a21d9b815fd25266a51baac14004f Mon Sep 17 00:00:00 2001 From: kialam Date: Wed, 3 Oct 2018 12:05:39 -0400 Subject: [PATCH 12/29] add package-lock.json to gitignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 621101995d..f1463667a5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ + # Tags .tags .tags1 @@ -52,6 +53,7 @@ __pycache__ **/node_modules/** /tmp **/npm-debug.log* +**/package-lock.json # UI build flag files awx/ui/.deps_built From dccd7f2e9d253fede020f91abe6d6fc761de4324 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Wed, 3 Oct 2018 20:56:16 -0400 Subject: [PATCH 13/29] do not split JT callback jobs --- awx/api/views/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index e99532cbca..47a55161b8 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -3369,6 +3369,7 @@ class JobTemplateCallback(GenericAPIView): if extra_vars is not None and job_template.ask_variables_on_launch: extra_vars_redacted, removed = extract_ansible_vars(extra_vars) kv['extra_vars'] = extra_vars_redacted + kv['_prevent_sharding'] = True # will only run against 1 host, so no point with transaction.atomic(): job = job_template.create_job(**kv) From 475a701f78fd70619aee7ad22994026b1e397572 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Thu, 4 Oct 2018 11:50:08 -0400 Subject: [PATCH 14/29] Allow use of credential password prompting with split JTs also *update test to work with new JT callback call pattern *fix spelling in template --- awx/api/templates/api/inventory_script_view.md | 2 +- awx/main/models/unified_jobs.py | 5 ++++- awx/main/scheduler/task_manager.py | 16 ++++++++++++---- .../functional/api/test_job_runtime_params.py | 14 +++++++------- 4 files changed, 24 insertions(+), 13 deletions(-) diff --git a/awx/api/templates/api/inventory_script_view.md b/awx/api/templates/api/inventory_script_view.md index 19cfff28ce..c1315bfc94 100644 --- a/awx/api/templates/api/inventory_script_view.md +++ b/awx/api/templates/api/inventory_script_view.md @@ -26,7 +26,7 @@ string of `?all=1` to return all hosts, including disabled ones. Specify a query string of `?towervars=1` to add variables to the hostvars of each host that specifies its enabled state and database ID. -Specify a query string of `?subset=shard2of5` to product an inventory that +Specify a query string of `?subset=shard2of5` to produce an inventory that has a restricted number of hosts according to the rules of job splitting. To apply multiple query strings, join them with the `&` character, like `?hostvars=1&all=1`. diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index cbda5109e2..b416bd8616 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -342,7 +342,10 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio unallowed_fields = set(kwargs.keys()) - set(fields) validated_kwargs = kwargs.copy() if unallowed_fields: - logger.warn('Fields {} are not allowed as overrides.'.format(unallowed_fields)) + if parent_field_name is None: + logger.warn(six.text_type('Fields {} are not allowed as overrides to spawn {} from {}.').format( + six.text_type(', ').join(unallowed_fields), unified_job, self + )) map(validated_kwargs.pop, unallowed_fields) unified_job = copy_model_by_class(self, unified_job_class, fields, validated_kwargs) diff --git a/awx/main/scheduler/task_manager.py b/awx/main/scheduler/task_manager.py index 540b94181e..863352857c 100644 --- a/awx/main/scheduler/task_manager.py +++ b/awx/main/scheduler/task_manager.py @@ -121,7 +121,11 @@ class TaskManager(): spawn_node.save() logger.info('Spawned %s in %s for node %s', job.log_format, workflow_job.log_format, spawn_node.pk) if job._resources_sufficient_for_launch(): - can_start = job.signal_start() + if workflow_job.start_args: + start_args = json.loads(decrypt_field(workflow_job, 'start_args')) + else: + start_args = {} + can_start = job.signal_start(**start_args) if not can_start: job.job_explanation = _("Job spawned from workflow could not start because it " "was not in the right state or required manual credentials") @@ -147,7 +151,8 @@ class TaskManager(): if cancel_finished: logger.info('Marking %s as canceled, all spawned jobs have concluded.', workflow_job.log_format) workflow_job.status = 'canceled' - workflow_job.save() + workflow_job.start_args = '' # blank field to remove encrypted passwords + workflow_job.save(update_fields=['status', 'start_args']) connection.on_commit(lambda: workflow_job.websocket_emit_status(workflow_job.status)) else: is_done, has_failed = dag.is_workflow_done() @@ -155,8 +160,11 @@ class TaskManager(): continue logger.info('Marking %s as %s.', workflow_job.log_format, 'failed' if has_failed else 'successful') result.append(workflow_job.id) - workflow_job.status = 'failed' if has_failed else 'successful' - workflow_job.save() + new_status = 'failed' if has_failed else 'successful' + logger.debug(six.text_type("Transitioning {} to {} status.").format(workflow_job.log_format, new_status)) + workflow_job.status = new_status + workflow_job.start_args = '' # blank field to remove encrypted passwords + workflow_job.save(update_fields=['status', 'start_args']) connection.on_commit(lambda: workflow_job.websocket_emit_status(workflow_job.status)) return result diff --git a/awx/main/tests/functional/api/test_job_runtime_params.py b/awx/main/tests/functional/api/test_job_runtime_params.py index cf9c5c8286..4bd46e5cdf 100644 --- a/awx/main/tests/functional/api/test_job_runtime_params.py +++ b/awx/main/tests/functional/api/test_job_runtime_params.py @@ -6,7 +6,7 @@ import json from awx.api.serializers import JobLaunchSerializer from awx.main.models.credential import Credential from awx.main.models.inventory import Inventory, Host -from awx.main.models.jobs import Job, JobTemplate +from awx.main.models.jobs import Job, JobTemplate, UnifiedJobTemplate from awx.api.versioning import reverse @@ -553,15 +553,15 @@ def test_callback_accept_prompted_extra_var(mocker, survey_spec_factory, job_tem with mocker.patch('awx.main.access.BaseAccess.check_license'): mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4}) - with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job): + with mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job): with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}): with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]): post( reverse('api:job_template_callback', kwargs={'pk': job_template.pk}), dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"), admin_user, expect=201, format='json') - assert JobTemplate.create_unified_job.called - assert JobTemplate.create_unified_job.call_args == ({ + assert UnifiedJobTemplate.create_unified_job.called + assert UnifiedJobTemplate.create_unified_job.call_args == ({ 'extra_vars': {'survey_var': 4, 'job_launch_var': 3}, '_eager_fields': {'launch_type': 'callback'}, 'limit': 'single-host'}, @@ -579,15 +579,15 @@ def test_callback_ignore_unprompted_extra_var(mocker, survey_spec_factory, job_t with mocker.patch('awx.main.access.BaseAccess.check_license'): mock_job = mocker.MagicMock(spec=Job, id=968, extra_vars={"job_launch_var": 3, "survey_var": 4}) - with mocker.patch.object(JobTemplate, 'create_unified_job', return_value=mock_job): + with mocker.patch.object(UnifiedJobTemplate, 'create_unified_job', return_value=mock_job): with mocker.patch('awx.api.serializers.JobSerializer.to_representation', return_value={}): with mocker.patch('awx.api.views.JobTemplateCallback.find_matching_hosts', return_value=[host]): post( reverse('api:job_template_callback', kwargs={'pk':job_template.pk}), dict(extra_vars={"job_launch_var": 3, "survey_var": 4}, host_config_key="foo"), admin_user, expect=201, format='json') - assert JobTemplate.create_unified_job.called - assert JobTemplate.create_unified_job.call_args == ({ + assert UnifiedJobTemplate.create_unified_job.called + assert UnifiedJobTemplate.create_unified_job.call_args == ({ '_eager_fields': {'launch_type': 'callback'}, 'limit': 'single-host'}, ) From 46d6dce73840dc68d6173d15660c1636fe496aa2 Mon Sep 17 00:00:00 2001 From: Matthew Jones Date: Tue, 9 Oct 2018 10:30:07 -0400 Subject: [PATCH 15/29] Mass rename of shard -> split --- awx/api/serializers.py | 10 +++++----- .../templates/api/inventory_script_view.md | 2 +- awx/api/urls/job_template.py | 4 ++-- awx/api/views/__init__.py | 6 +++--- awx/main/migrations/0050_v340_split_jobs.py | 4 ++-- awx/main/models/inventory.py | 12 +++++------ awx/main/models/jobs.py | 14 ++++++------- awx/main/models/unified_jobs.py | 4 ++-- awx/main/models/workflow.py | 20 +++++++++---------- awx/main/tests/functional/api/test_job.py | 8 ++++---- awx/main/tests/functional/conftest.py | 16 +++++++-------- .../tests/functional/models/test_inventory.py | 4 ++-- awx/main/tests/functional/models/test_job.py | 16 +++++++-------- .../tests/functional/test_rbac_job_start.py | 8 ++++---- .../features/jobs/jobsList.controller.js | 2 +- .../features/output/details.component.js | 2 +- .../job_templates/job-template.form.js | 2 +- .../workflow-results.controller.js | 6 +++--- .../workflow-results.partial.html | 6 +++--- docs/job_sharding.md | 13 ------------ docs/job_splitting.md | 13 ++++++++++++ 21 files changed, 86 insertions(+), 86 deletions(-) delete mode 100644 docs/job_sharding.md create mode 100644 docs/job_splitting.md diff --git a/awx/api/serializers.py b/awx/api/serializers.py index eb6264c5e2..32d75f3697 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -3008,7 +3008,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO fields = ('*', 'host_config_key', 'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch', 'survey_enabled', 'become_enabled', 'diff_mode', - 'allow_simultaneous', 'custom_virtualenv', 'job_shard_count') + 'allow_simultaneous', 'custom_virtualenv', 'job_split_count') def get_related(self, obj): res = super(JobTemplateSerializer, self).get_related(obj) @@ -3025,7 +3025,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO labels = self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk}), object_roles = self.reverse('api:job_template_object_roles_list', kwargs={'pk': obj.pk}), instance_groups = self.reverse('api:job_template_instance_groups_list', kwargs={'pk': obj.pk}), - sharded_jobs = self.reverse('api:job_template_sharded_jobs_list', kwargs={'pk': obj.pk}), + split_jobs = self.reverse('api:job_template_split_jobs_list', kwargs={'pk': obj.pk}), )) if self.version > 1: res['copy'] = self.reverse('api:job_template_copy', kwargs={'pk': obj.pk}) @@ -3201,9 +3201,9 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer): summary_fields = super(JobSerializer, self).get_summary_fields(obj) if obj.internal_limit: summary_fields['internal_limit'] = {} - if obj.internal_limit.startswith('shard'): - offset, step = Inventory.parse_shard_params(obj.internal_limit) - summary_fields['internal_limit']['shard'] = {'offset': offset, 'step': step} + if obj.internal_limit.startswith('split'): + offset, step = Inventory.parse_split_params(obj.internal_limit) + summary_fields['internal_limit']['split'] = {'offset': offset, 'step': step} else: summary_fields['internal_limit']['unknown'] = self.internal_limit all_creds = [] diff --git a/awx/api/templates/api/inventory_script_view.md b/awx/api/templates/api/inventory_script_view.md index c1315bfc94..dbbe5eb0c0 100644 --- a/awx/api/templates/api/inventory_script_view.md +++ b/awx/api/templates/api/inventory_script_view.md @@ -26,7 +26,7 @@ string of `?all=1` to return all hosts, including disabled ones. Specify a query string of `?towervars=1` to add variables to the hostvars of each host that specifies its enabled state and database ID. -Specify a query string of `?subset=shard2of5` to produce an inventory that +Specify a query string of `?subset=split2of5` to produce an inventory that has a restricted number of hosts according to the rules of job splitting. To apply multiple query strings, join them with the `&` character, like `?hostvars=1&all=1`. diff --git a/awx/api/urls/job_template.py b/awx/api/urls/job_template.py index 9b830d64a7..4d6ca79137 100644 --- a/awx/api/urls/job_template.py +++ b/awx/api/urls/job_template.py @@ -8,7 +8,7 @@ from awx.api.views import ( JobTemplateDetail, JobTemplateLaunch, JobTemplateJobsList, - JobTemplateShardedJobsList, + JobTemplateSplitJobsList, JobTemplateCallback, JobTemplateSchedulesList, JobTemplateSurveySpec, @@ -29,7 +29,7 @@ urls = [ url(r'^(?P[0-9]+)/$', JobTemplateDetail.as_view(), name='job_template_detail'), url(r'^(?P[0-9]+)/launch/$', JobTemplateLaunch.as_view(), name='job_template_launch'), url(r'^(?P[0-9]+)/jobs/$', JobTemplateJobsList.as_view(), name='job_template_jobs_list'), - url(r'^(?P[0-9]+)/sharded_jobs/$', JobTemplateShardedJobsList.as_view(), name='job_template_sharded_jobs_list'), + url(r'^(?P[0-9]+)/split_jobs/$', JobTemplateSplitJobsList.as_view(), name='job_template_split_jobs_list'), url(r'^(?P[0-9]+)/callback/$', JobTemplateCallback.as_view(), name='job_template_callback'), url(r'^(?P[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'), url(r'^(?P[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'), diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index 47a55161b8..0a7cc8146d 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -3369,7 +3369,7 @@ class JobTemplateCallback(GenericAPIView): if extra_vars is not None and job_template.ask_variables_on_launch: extra_vars_redacted, removed = extract_ansible_vars(extra_vars) kv['extra_vars'] = extra_vars_redacted - kv['_prevent_sharding'] = True # will only run against 1 host, so no point + kv['_prevent_splitting'] = True # will only run against 1 host, so no point with transaction.atomic(): job = job_template.create_job(**kv) @@ -3401,12 +3401,12 @@ class JobTemplateJobsList(SubListCreateAPIView): return methods -class JobTemplateShardedJobsList(SubListCreateAPIView): +class JobTemplateSplitJobsList(SubListCreateAPIView): model = WorkflowJob serializer_class = WorkflowJobListSerializer parent_model = JobTemplate - relationship = 'sharded_jobs' + relationship = 'split_jobs' parent_key = 'job_template' diff --git a/awx/main/migrations/0050_v340_split_jobs.py b/awx/main/migrations/0050_v340_split_jobs.py index f946d22896..d7b80ee50c 100644 --- a/awx/main/migrations/0050_v340_split_jobs.py +++ b/awx/main/migrations/0050_v340_split_jobs.py @@ -16,13 +16,13 @@ class Migration(migrations.Migration): operations = [ migrations.AddField( model_name='jobtemplate', - name='job_shard_count', + name='job_split_count', field=models.IntegerField(blank=True, default=0, help_text='The number of jobs to split into at runtime. Will cause the Job Template to launch a workflow if value is non-zero.'), ), migrations.AddField( model_name='workflowjob', name='job_template', - field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='sharded_jobs', to='main.JobTemplate'), + field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='split_jobs', to='main.JobTemplate'), ), migrations.AlterField( model_name='unifiedjob', diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 03220ac967..5c8cdebfa8 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -221,14 +221,14 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin): return group_children_map @staticmethod - def parse_shard_params(shard_str): - m = re.match(r"shard(?P\d+)of(?P\d+)", shard_str) + def parse_split_params(split_str): + m = re.match(r"split(?P\d+)of(?P\d+)", split_str) if not m: - raise ParseError(_('Could not parse subset as shard specification.')) + raise ParseError(_('Could not parse subset as split specification.')) offset = int(m.group('offset')) step = int(m.group('step')) if offset > step: - raise ParseError(_('Shard offset must be greater than total number of shards.')) + raise ParseError(_('Split offset must be greater than total number of splits.')) return (offset, step) def get_script_data(self, hostvars=False, towervars=False, show_all=False, subset=None): @@ -242,8 +242,8 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin): if subset: if not isinstance(subset, six.string_types): raise ParseError(_('Inventory subset argument must be a string.')) - if subset.startswith('shard'): - offset, step = Inventory.parse_shard_params(subset) + if subset.startswith('split'): + offset, step = Inventory.parse_split_params(subset) hosts = hosts[offset::step] else: raise ParseError(_('Subset does not use any supported syntax.')) diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index d698ad3726..dd2197c573 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -277,7 +277,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour default=False, allows_field='credentials' ) - job_shard_count = models.IntegerField( + job_split_count = models.IntegerField( blank=True, default=0, help_text=_("The number of jobs to split into at runtime. " @@ -328,10 +328,10 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour return self.create_unified_job(**kwargs) def create_unified_job(self, **kwargs): - prevent_sharding = kwargs.pop('_prevent_sharding', False) - split_event = bool(self.job_shard_count > 1 and (not prevent_sharding)) + prevent_splitting = kwargs.pop('_prevent_splitting', False) + split_event = bool(self.job_split_count > 1 and (not prevent_splitting)) if split_event: - # A sharded Job Template will generate a WorkflowJob rather than a Job + # A Split Job Template will generate a WorkflowJob rather than a Job from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode kwargs['_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class() kwargs['_parent_field_name'] = "job_template" @@ -342,11 +342,11 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour except JobLaunchConfig.DoesNotExist: wj_config = JobLaunchConfig() actual_inventory = wj_config.inventory if wj_config.inventory else self.inventory - for idx in xrange(min(self.job_shard_count, + for idx in xrange(min(self.job_split_count, actual_inventory.hosts.count())): create_kwargs = dict(workflow_job=job, unified_job_template=self, - ancestor_artifacts=dict(job_shard=idx)) + ancestor_artifacts=dict(job_split=idx)) WorkflowJobNode.objects.create(**create_kwargs) return job @@ -580,7 +580,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana return JobEvent def copy_unified_job(self, **new_prompts): - new_prompts['_prevent_sharding'] = True + new_prompts['_prevent_splitting'] = True if self.internal_limit: new_prompts.setdefault('_eager_fields', {}) new_prompts['_eager_fields']['internal_limit'] = self.internal_limit # oddball, not from JT or prompts diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index b416bd8616..07077d09cd 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -334,7 +334,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio parent_field_name = None if "_unified_job_class" in kwargs: # Special case where spawned job is different type than usual - # Only used for sharded jobs + # Only used for split jobs unified_job_class = kwargs.pop("_unified_job_class") fields = unified_job_class._get_unified_job_field_names() & fields parent_field_name = kwargs.pop('_parent_field_name') @@ -354,7 +354,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio for fd, val in eager_fields.items(): setattr(unified_job, fd, val) - # NOTE: sharded workflow jobs _get_parent_field_name method + # NOTE: split workflow jobs _get_parent_field_name method # is not correct until this is set if not parent_field_name: parent_field_name = unified_job._get_parent_field_name() diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index f15e66b226..7aa2753c6d 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -251,19 +251,19 @@ class WorkflowJobNode(WorkflowNodeBase): data['extra_vars'] = extra_vars # ensure that unified jobs created by WorkflowJobs are marked data['_eager_fields'] = {'launch_type': 'workflow'} - # Extra processing in the case that this is a sharded job - if 'job_shard' in self.ancestor_artifacts: - shard_str = six.text_type(self.ancestor_artifacts['job_shard'] + 1) + # Extra processing in the case that this is a split job + if 'job_split' in self.ancestor_artifacts: + split_str = six.text_type(self.ancestor_artifacts['job_split'] + 1) data['_eager_fields']['name'] = six.text_type("{} - {}").format( - self.unified_job_template.name[:512 - len(shard_str) - len(' - ')], - shard_str + self.unified_job_template.name[:512 - len(split_str) - len(' - ')], + split_str ) data['_eager_fields']['allow_simultaneous'] = True - data['_eager_fields']['internal_limit'] = 'shard{0}of{1}'.format( - self.ancestor_artifacts['job_shard'], + data['_eager_fields']['internal_limit'] = 'split{0}of{1}'.format( + self.ancestor_artifacts['job_split'], self.workflow_job.workflow_job_nodes.count() ) - data['_prevent_sharding'] = True + data['_prevent_splitting'] = True return data @@ -459,7 +459,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio ) job_template = models.ForeignKey( 'JobTemplate', - related_name='sharded_jobs', + related_name='split_jobs', blank=True, null=True, default=None, @@ -472,7 +472,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio def _get_parent_field_name(self): if self.job_template_id: - # This is a workflow job which is a container for sharded jobs + # This is a workflow job which is a container for split jobs return 'job_template' return 'workflow_job_template' diff --git a/awx/main/tests/functional/api/test_job.py b/awx/main/tests/functional/api/test_job.py index 1685c62304..40b17bc368 100644 --- a/awx/main/tests/functional/api/test_job.py +++ b/awx/main/tests/functional/api/test_job.py @@ -123,11 +123,11 @@ def test_job_relaunch_on_failed_hosts(post, inventory, project, machine_credenti @pytest.mark.django_db -def test_shard_jt_recent_jobs(shard_job_factory, admin_user, get): - workflow_job = shard_job_factory(3, spawn=True) - shard_jt = workflow_job.job_template +def test_split_jt_recent_jobs(split_job_factory, admin_user, get): + workflow_job = split_job_factory(3, spawn=True) + split_jt = workflow_job.job_template r = get( - url=shard_jt.get_absolute_url(), + url=split_jt.get_absolute_url(), user=admin_user, expect=200 ) diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index 57ceec41be..6e0aa11f6c 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -779,15 +779,15 @@ def disable_database_settings(mocker): @pytest.fixture -def shard_jt_factory(inventory): +def split_jt_factory(inventory): def r(N, jt_kwargs=None): for i in range(N): inventory.hosts.create(name='foo{}'.format(i)) if not jt_kwargs: jt_kwargs = {} return JobTemplate.objects.create( - name='shard-jt-from-factory', - job_shard_count=N, + name='split-jt-from-factory', + job_split_count=N, inventory=inventory, **jt_kwargs ) @@ -795,18 +795,18 @@ def shard_jt_factory(inventory): @pytest.fixture -def shard_job_factory(shard_jt_factory): +def split_job_factory(split_jt_factory): def r(N, jt_kwargs=None, prompts=None, spawn=False): - shard_jt = shard_jt_factory(N, jt_kwargs=jt_kwargs) + split_jt = split_jt_factory(N, jt_kwargs=jt_kwargs) if not prompts: prompts = {} - shard_job = shard_jt.create_unified_job(**prompts) + split_job = split_jt.create_unified_job(**prompts) if spawn: - for node in shard_job.workflow_nodes.all(): + for node in split_job.workflow_nodes.all(): # does what the task manager does for spawning workflow jobs kv = node.get_job_kwargs() job = node.unified_job_template.create_unified_job(**kv) node.job = job node.save() - return shard_job + return split_job return r diff --git a/awx/main/tests/functional/models/test_inventory.py b/awx/main/tests/functional/models/test_inventory.py index 34eb1d7b13..7b1574a56d 100644 --- a/awx/main/tests/functional/models/test_inventory.py +++ b/awx/main/tests/functional/models/test_inventory.py @@ -38,11 +38,11 @@ class TestInventoryScript: 'remote_tower_id': host.id } - def test_shard_subset(self, inventory): + def test_split_subset(self, inventory): for i in range(3): inventory.hosts.create(name='host{}'.format(i)) for i in range(3): - assert inventory.get_script_data(subset='shard{}of3'.format(i)) == { + assert inventory.get_script_data(subset='split{}of3'.format(i)) == { 'all': {'hosts': ['host{}'.format(i)]} } diff --git a/awx/main/tests/functional/models/test_job.py b/awx/main/tests/functional/models/test_job.py index c2614b17f1..16098d6fd1 100644 --- a/awx/main/tests/functional/models/test_job.py +++ b/awx/main/tests/functional/models/test_job.py @@ -84,18 +84,18 @@ def test_job_host_summary_representation(host): @pytest.mark.django_db -class TestShardingModels: +class TestSplittingModels: - def test_shard_workflow_spawn(self, shard_jt_factory): - shard_jt = shard_jt_factory(3) - job = shard_jt.create_unified_job() + def test_split_workflow_spawn(self, split_jt_factory): + split_jt = split_jt_factory(3) + job = split_jt.create_unified_job() assert isinstance(job, WorkflowJob) - assert job.job_template == shard_jt - assert job.unified_job_template == shard_jt + assert job.job_template == split_jt + assert job.unified_job_template == split_jt assert job.workflow_nodes.count() == 3 - def test_shards_with_JT_and_prompts(self, shard_job_factory): - job = shard_job_factory(3, jt_kwargs={'ask_limit_on_launch': True}, prompts={'limit': 'foobar'}, spawn=True) + def test_splits_with_JT_and_prompts(self, split_job_factory): + job = split_job_factory(3, jt_kwargs={'ask_limit_on_launch': True}, prompts={'limit': 'foobar'}, spawn=True) assert job.launch_config.prompts_dict() == {'limit': 'foobar'} for node in job.workflow_nodes.all(): assert node.limit is None # data not saved in node prompts diff --git a/awx/main/tests/functional/test_rbac_job_start.py b/awx/main/tests/functional/test_rbac_job_start.py index 4b44a5a284..f20611da14 100644 --- a/awx/main/tests/functional/test_rbac_job_start.py +++ b/awx/main/tests/functional/test_rbac_job_start.py @@ -49,18 +49,18 @@ def test_inventory_use_access(inventory, user): @pytest.mark.django_db -def test_sharded_job(shard_job_factory, rando): - workflow_job = shard_job_factory(2, jt_kwargs={'created_by': rando}, spawn=True) +def test_split_job(split_job_factory, rando): + workflow_job = split_job_factory(2, jt_kwargs={'created_by': rando}, spawn=True) workflow_job.job_template.execute_role.members.add(rando) - # Abilities of user with execute_role for shard workflow job container + # Abilities of user with execute_role for split workflow job container assert WorkflowJobAccess(rando).can_start(workflow_job) # relaunch allowed for access_cls in (UnifiedJobAccess, WorkflowJobAccess): access = access_cls(rando) assert access.can_read(workflow_job) assert workflow_job in access.get_queryset() - # Abilities of user with execute_role for all the shards of the job + # Abilities of user with execute_role for all the split of the job for node in workflow_job.workflow_nodes.all(): access = WorkflowJobNodeAccess(rando) assert access.can_read(node) diff --git a/awx/ui/client/features/jobs/jobsList.controller.js b/awx/ui/client/features/jobs/jobsList.controller.js index 219dfdd052..ac4bdfa1a9 100644 --- a/awx/ui/client/features/jobs/jobsList.controller.js +++ b/awx/ui/client/features/jobs/jobsList.controller.js @@ -83,7 +83,7 @@ function ListJobsController ( return null; } - const splitJobDetails = internalLimitDetails.shard; + const splitJobDetails = internalLimitDetails.split; if (!splitJobDetails) { return null; diff --git a/awx/ui/client/features/output/details.component.js b/awx/ui/client/features/output/details.component.js index d63fe640f8..03115e5710 100644 --- a/awx/ui/client/features/output/details.component.js +++ b/awx/ui/client/features/output/details.component.js @@ -133,7 +133,7 @@ function getSplitJobDetails () { return null; } - const splitJobDetails = resource.model.get('summary_fields.internal_limit.shard'); + const splitJobDetails = resource.model.get('summary_fields.internal_limit.split'); if (!splitJobDetails) { return null; diff --git a/awx/ui/client/src/templates/job_templates/job-template.form.js b/awx/ui/client/src/templates/job_templates/job-template.form.js index 8d5b68075f..b807bd169d 100644 --- a/awx/ui/client/src/templates/job_templates/job-template.form.js +++ b/awx/ui/client/src/templates/job_templates/job-template.form.js @@ -257,7 +257,7 @@ function(NotificationsList, i18n) { dataPlacement: 'right', control: '', }, - job_shard_count: { + job_split_count: { label: i18n._('Job Splitting'), type: 'number', integer: true, diff --git a/awx/ui/client/src/workflow-results/workflow-results.controller.js b/awx/ui/client/src/workflow-results/workflow-results.controller.js index f66e2a09e9..b6556767cc 100644 --- a/awx/ui/client/src/workflow-results/workflow-results.controller.js +++ b/awx/ui/client/src/workflow-results/workflow-results.controller.js @@ -39,7 +39,7 @@ export default ['workflowData', 'workflowResultsService', 'workflowDataOptions', DELETE: i18n._('Delete'), EDIT_USER: i18n._('Edit the user'), EDIT_WORKFLOW: i18n._('Edit the workflow job template'), - EDIT_SHARD_TEMPLATE: i18n._('Edit the shard job template'), + EDIT_SPLIT_TEMPLATE: i18n._('Edit the split job template'), EDIT_SCHEDULE: i18n._('Edit the schedule'), TOGGLE_STDOUT_FULLSCREEN: i18n._('Expand Output'), STATUS: '' // re-assigned elsewhere @@ -50,7 +50,7 @@ export default ['workflowData', 'workflowResultsService', 'workflowDataOptions', STARTED: i18n._('Started'), FINISHED: i18n._('Finished'), LABELS: i18n._('Labels'), - SHARD_TEMPLATE: i18n._('Shard Template'), + SPLIT_TEMPLATE: i18n._('Split Template'), STATUS: i18n._('Status') }, details: { @@ -113,7 +113,7 @@ export default ['workflowData', 'workflowResultsService', 'workflowDataOptions', if(workflowData.summary_fields && workflowData.summary_fields.job_template && workflowData.summary_fields.job_template.id){ - $scope.shard_job_template_link = `/#/templates/job_template/${$scope.workflow.summary_fields.job_template.id}`; + $scope.split_job_template_link = `/#/templates/job_template/${$scope.workflow.summary_fields.job_template.id}`; } // turn related api browser routes into front end routes diff --git a/awx/ui/client/src/workflow-results/workflow-results.partial.html b/awx/ui/client/src/workflow-results/workflow-results.partial.html index f3f8943dfc..c88aa6a00d 100644 --- a/awx/ui/client/src/workflow-results/workflow-results.partial.html +++ b/awx/ui/client/src/workflow-results/workflow-results.partial.html @@ -149,11 +149,11 @@ ng-show="workflow.summary_fields.job_template.name">
- {{ workflow.summary_fields.job_template.name }} diff --git a/docs/job_sharding.md b/docs/job_sharding.md deleted file mode 100644 index c16fa030f9..0000000000 --- a/docs/job_sharding.md +++ /dev/null @@ -1,13 +0,0 @@ -# Job Sharding Overview - -Ansible, by default, runs jobs from a single control instance. At best a single Ansible job can be split up on a single system via forks but this doesn't fully take advantage of AWX's ability to distribute work to multiple nodes in a cluster. - -Job Sharding solves this by adding a Job Template field `job_shard_count`. This field specifies the number of **Jobs** to split the Ansible run into. When this number is greater than 1 ``AWX`` will generate a **Workflow** from a **JobTemplate** instead of a **Job**. The **Inventory** will be split evenly amongst the sharded jobs. The workflow job is then started and proceeds as though it were a normal workflow. The API will return either a **Job** resource (if `job_shard_count` < 2) or a **WorkflowJob** resource otherwise. Likewise, the UI will redirect to the appropriate screen to display the status of the run. - -## Implications for Job execution - -When jobs are split they can run on any Tower node and some may not run at the same time. Because of this, anything that relies on setting/sharing state (using modules such as ``set_fact``) will not work as expected. It's reasonable to expect that not all jobs will actually run at the same time (if there is not enough capacity in the system for example) - -## Simultaneous Execution Behavior - -By default Job Templates aren't normally configured to execute simultaneously (``allow_simultaneous`` must be checked). Sharding overrides this behavior and implies ``allow_simultaneous`` even if that setting is unchecked. \ No newline at end of file diff --git a/docs/job_splitting.md b/docs/job_splitting.md new file mode 100644 index 0000000000..a8eb83b058 --- /dev/null +++ b/docs/job_splitting.md @@ -0,0 +1,13 @@ +# Job Splitting Overview + +Ansible, by default, runs jobs from a single control instance. At best a single Ansible job can be split up on a single system via forks but this doesn't fully take advantage of AWX's ability to distribute work to multiple nodes in a cluster. + +Job Splitting solves this by adding a Job Template field `job_split_count`. This field specifies the number of **Jobs** to split the Ansible run into. When this number is greater than 1 ``AWX`` will generate a **Workflow** from a **JobTemplate** instead of a **Job**. The **Inventory** will be split evenly amongst the split jobs. The workflow job is then started and proceeds as though it were a normal workflow. The API will return either a **Job** resource (if `job_split_count` < 2) or a **WorkflowJob** resource otherwise. Likewise, the UI will redirect to the appropriate screen to display the status of the run. + +## Implications for Job execution + +When jobs are split they can run on any Tower node and some may not run at the same time. Because of this, anything that relies on setting/split state (using modules such as ``set_fact``) will not work as expected. It's reasonable to expect that not all jobs will actually run at the same time (if there is not enough capacity in the system for example) + +## Simultaneous Execution Behavior + +By default Job Templates aren't normally configured to execute simultaneously (``allow_simultaneous`` must be checked). Splitting overrides this behavior and implies ``allow_simultaneous`` even if that setting is unchecked. From bbd3edba4784d253d075842839b8e623988d70e1 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Mon, 15 Oct 2018 16:10:55 -0400 Subject: [PATCH 16/29] rename to slicing and schema tweaks --- awx/api/serializers.py | 13 ++--- .../templates/api/inventory_script_view.md | 4 +- awx/api/urls/job_template.py | 4 +- awx/api/views/__init__.py | 19 ++++++-- awx/main/migrations/0050_v330_job_slicing.py | 47 +++++++++++++++++++ awx/main/migrations/0050_v340_split_jobs.py | 37 --------------- awx/main/models/inventory.py | 29 +++++------- awx/main/models/jobs.py | 45 +++++++++++------- awx/main/models/workflow.py | 25 +++++----- awx/main/tasks.py | 3 +- awx/main/tests/functional/api/test_job.py | 8 ++-- awx/main/tests/functional/conftest.py | 16 +++---- .../tests/functional/models/test_inventory.py | 4 +- awx/main/tests/functional/models/test_job.py | 16 +++---- .../tests/functional/test_rbac_job_start.py | 8 ++-- .../features/jobs/jobsList.controller.js | 14 ++---- .../client/features/jobs/jobsList.view.html | 2 +- .../features/output/details.component.js | 20 ++++---- .../features/output/details.partial.html | 8 ++-- .../client/features/output/output.strings.js | 4 +- .../launchTemplateButton.component.js | 2 +- .../job_templates/job-template.form.js | 8 ++-- .../workflow-results.controller.js | 4 +- .../workflow-results.partial.html | 8 ++-- docs/job_slicing.md | 13 +++++ docs/job_splitting.md | 13 ----- 26 files changed, 193 insertions(+), 181 deletions(-) create mode 100644 awx/main/migrations/0050_v330_job_slicing.py delete mode 100644 awx/main/migrations/0050_v340_split_jobs.py create mode 100644 docs/job_slicing.md delete mode 100644 docs/job_splitting.md diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 32d75f3697..33f9e79db9 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -3008,7 +3008,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO fields = ('*', 'host_config_key', 'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch', 'survey_enabled', 'become_enabled', 'diff_mode', - 'allow_simultaneous', 'custom_virtualenv', 'job_split_count') + 'allow_simultaneous', 'custom_virtualenv', 'job_slice_count') def get_related(self, obj): res = super(JobTemplateSerializer, self).get_related(obj) @@ -3025,7 +3025,7 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO labels = self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk}), object_roles = self.reverse('api:job_template_object_roles_list', kwargs={'pk': obj.pk}), instance_groups = self.reverse('api:job_template_instance_groups_list', kwargs={'pk': obj.pk}), - split_jobs = self.reverse('api:job_template_split_jobs_list', kwargs={'pk': obj.pk}), + slice_workflow_jobs = self.reverse('api:job_template_slice_workflow_jobs_list', kwargs={'pk': obj.pk}), )) if self.version > 1: res['copy'] = self.reverse('api:job_template_copy', kwargs={'pk': obj.pk}) @@ -3121,7 +3121,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer): 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch', 'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch', 'ask_credential_on_launch', 'allow_simultaneous', 'artifacts', 'scm_revision', - 'instance_group', 'diff_mode') + 'instance_group', 'diff_mode', 'job_slice_number', 'job_slice_count') def get_related(self, obj): res = super(JobSerializer, self).get_related(obj) @@ -3199,13 +3199,6 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer): def get_summary_fields(self, obj): summary_fields = super(JobSerializer, self).get_summary_fields(obj) - if obj.internal_limit: - summary_fields['internal_limit'] = {} - if obj.internal_limit.startswith('split'): - offset, step = Inventory.parse_split_params(obj.internal_limit) - summary_fields['internal_limit']['split'] = {'offset': offset, 'step': step} - else: - summary_fields['internal_limit']['unknown'] = self.internal_limit all_creds = [] # Organize credential data into multitude of deprecated fields # TODO: remove most of this as v1 is removed diff --git a/awx/api/templates/api/inventory_script_view.md b/awx/api/templates/api/inventory_script_view.md index dbbe5eb0c0..28126dcbbb 100644 --- a/awx/api/templates/api/inventory_script_view.md +++ b/awx/api/templates/api/inventory_script_view.md @@ -26,8 +26,8 @@ string of `?all=1` to return all hosts, including disabled ones. Specify a query string of `?towervars=1` to add variables to the hostvars of each host that specifies its enabled state and database ID. -Specify a query string of `?subset=split2of5` to produce an inventory that -has a restricted number of hosts according to the rules of job splitting. +Specify a query string of `?subset=slice2of5` to produce an inventory that +has a restricted number of hosts according to the rules of job slicing. To apply multiple query strings, join them with the `&` character, like `?hostvars=1&all=1`. diff --git a/awx/api/urls/job_template.py b/awx/api/urls/job_template.py index 4d6ca79137..0b43575ba4 100644 --- a/awx/api/urls/job_template.py +++ b/awx/api/urls/job_template.py @@ -8,7 +8,7 @@ from awx.api.views import ( JobTemplateDetail, JobTemplateLaunch, JobTemplateJobsList, - JobTemplateSplitJobsList, + JobTemplateSliceWorkflowJobsList, JobTemplateCallback, JobTemplateSchedulesList, JobTemplateSurveySpec, @@ -29,7 +29,7 @@ urls = [ url(r'^(?P[0-9]+)/$', JobTemplateDetail.as_view(), name='job_template_detail'), url(r'^(?P[0-9]+)/launch/$', JobTemplateLaunch.as_view(), name='job_template_launch'), url(r'^(?P[0-9]+)/jobs/$', JobTemplateJobsList.as_view(), name='job_template_jobs_list'), - url(r'^(?P[0-9]+)/split_jobs/$', JobTemplateSplitJobsList.as_view(), name='job_template_split_jobs_list'), + url(r'^(?P[0-9]+)/slice_workflow_jobs/$', JobTemplateSliceWorkflowJobsList.as_view(), name='job_template_slice_workflow_jobs_list'), url(r'^(?P[0-9]+)/callback/$', JobTemplateCallback.as_view(), name='job_template_callback'), url(r'^(?P[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'), url(r'^(?P[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'), diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index 0a7cc8146d..71250e6a0b 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -2453,6 +2453,15 @@ class InventoryScriptView(RetrieveAPIView): towervars = bool(request.query_params.get('towervars', '')) show_all = bool(request.query_params.get('all', '')) subset = request.query_params.get('subset', '') + if subset: + if not isinstance(subset, six.string_types): + raise ParseError(_('Inventory subset argument must be a string.')) + if subset.startswith('slice'): + slice_number, slice_count = Inventory.parse_slice_params(subset) + else: + raise ParseError(_('Subset does not use any supported syntax.')) + else: + slice_number, slice_count = 1, 1 if hostname: hosts_q = dict(name=hostname) if not show_all: @@ -2463,7 +2472,7 @@ class InventoryScriptView(RetrieveAPIView): hostvars=hostvars, towervars=towervars, show_all=show_all, - subset=subset + slice_number=slice_number, slice_count=slice_count )) @@ -3369,7 +3378,7 @@ class JobTemplateCallback(GenericAPIView): if extra_vars is not None and job_template.ask_variables_on_launch: extra_vars_redacted, removed = extract_ansible_vars(extra_vars) kv['extra_vars'] = extra_vars_redacted - kv['_prevent_splitting'] = True # will only run against 1 host, so no point + kv['_prevent_slicing'] = True # will only run against 1 host, so no point with transaction.atomic(): job = job_template.create_job(**kv) @@ -3401,12 +3410,12 @@ class JobTemplateJobsList(SubListCreateAPIView): return methods -class JobTemplateSplitJobsList(SubListCreateAPIView): +class JobTemplateSliceWorkflowJobsList(SubListCreateAPIView): model = WorkflowJob serializer_class = WorkflowJobListSerializer parent_model = JobTemplate - relationship = 'split_jobs' + relationship = 'slice_workflow_jobs' parent_key = 'job_template' @@ -3702,6 +3711,8 @@ class WorkflowJobRelaunch(WorkflowsEnforcementMixin, GenericAPIView): def post(self, request, *args, **kwargs): obj = self.get_object() + if obj.is_sliced_job and not obj.job_template_id: + raise ParseError(_('Cannot relaunch slice workflow job orphaned from job template.')) new_workflow_job = obj.create_relaunch_workflow_job() new_workflow_job.signal_start() diff --git a/awx/main/migrations/0050_v330_job_slicing.py b/awx/main/migrations/0050_v330_job_slicing.py new file mode 100644 index 0000000000..c786f455db --- /dev/null +++ b/awx/main/migrations/0050_v330_job_slicing.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.11 on 2018-10-15 16:21 +from __future__ import unicode_literals + +import awx.main.utils.polymorphic +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0049_v330_validate_instance_capacity_adjustment'), + ] + + operations = [ + migrations.AddField( + model_name='job', + name='job_slice_count', + field=models.PositiveIntegerField(blank=True, default=1, help_text='If ran as part of sliced jobs, the total number of slices. If 1, job is not part of a sliced job.'), + ), + migrations.AddField( + model_name='job', + name='job_slice_number', + field=models.PositiveIntegerField(blank=True, default=0, help_text='If part of a sliced job, the ID of the inventory slice operated on. If not part of sliced job, parameter is not used.'), + ), + migrations.AddField( + model_name='jobtemplate', + name='job_slice_count', + field=models.PositiveIntegerField(blank=True, default=1, help_text='The number of jobs to slice into at runtime. Will cause the Job Template to launch a workflow if value is greater than 1.'), + ), + migrations.AddField( + model_name='workflowjob', + name='is_sliced_job', + field=models.BooleanField(default=False), + ), + migrations.AddField( + model_name='workflowjob', + name='job_template', + field=models.ForeignKey(blank=True, default=None, help_text='If automatically created for a sliced job run, the job template the workflow job was created from.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='slice_workflow_jobs', to='main.JobTemplate'), + ), + migrations.AlterField( + model_name='unifiedjob', + name='unified_job_template', + field=models.ForeignKey(default=None, editable=False, null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, related_name='unifiedjob_unified_jobs', to='main.UnifiedJobTemplate'), + ), + ] diff --git a/awx/main/migrations/0050_v340_split_jobs.py b/awx/main/migrations/0050_v340_split_jobs.py deleted file mode 100644 index d7b80ee50c..0000000000 --- a/awx/main/migrations/0050_v340_split_jobs.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.11 on 2018-09-13 15:55 -from __future__ import unicode_literals - -import awx.main.utils.polymorphic -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ('main', '0049_v330_validate_instance_capacity_adjustment'), - ] - - operations = [ - migrations.AddField( - model_name='jobtemplate', - name='job_split_count', - field=models.IntegerField(blank=True, default=0, help_text='The number of jobs to split into at runtime. Will cause the Job Template to launch a workflow if value is non-zero.'), - ), - migrations.AddField( - model_name='workflowjob', - name='job_template', - field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='split_jobs', to='main.JobTemplate'), - ), - migrations.AlterField( - model_name='unifiedjob', - name='unified_job_template', - field=models.ForeignKey(default=None, editable=False, null=True, on_delete=awx.main.utils.polymorphic.SET_NULL, related_name='unifiedjob_unified_jobs', to='main.UnifiedJobTemplate'), - ), - migrations.AddField( - model_name='job', - name='internal_limit', - field=models.CharField(default=b'', editable=False, max_length=1024), - ), - ] diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 5c8cdebfa8..805e4eb1f4 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -221,17 +221,19 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin): return group_children_map @staticmethod - def parse_split_params(split_str): - m = re.match(r"split(?P\d+)of(?P\d+)", split_str) + def parse_slice_params(slice_str): + m = re.match(r"slice(?P\d+)of(?P\d+)", slice_str) if not m: - raise ParseError(_('Could not parse subset as split specification.')) - offset = int(m.group('offset')) + raise ParseError(_('Could not parse subset as slice specification.')) + number = int(m.group('number')) step = int(m.group('step')) - if offset > step: - raise ParseError(_('Split offset must be greater than total number of splits.')) - return (offset, step) + if number > step: + raise ParseError(_('Slice number must be less than total number of slices.')) + elif number < 1: + raise ParseError(_('Slice number must be 1 or higher.')) + return (number, step) - def get_script_data(self, hostvars=False, towervars=False, show_all=False, subset=None): + def get_script_data(self, hostvars=False, towervars=False, show_all=False, slice_number=1, slice_count=1): hosts_kw = dict() if not show_all: hosts_kw['enabled'] = True @@ -239,14 +241,9 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin): if towervars: fetch_fields.append('enabled') hosts = self.hosts.filter(**hosts_kw).order_by('name').only(*fetch_fields) - if subset: - if not isinstance(subset, six.string_types): - raise ParseError(_('Inventory subset argument must be a string.')) - if subset.startswith('split'): - offset, step = Inventory.parse_split_params(subset) - hosts = hosts[offset::step] - else: - raise ParseError(_('Subset does not use any supported syntax.')) + if slice_count > 1: + offset = slice_number - 1 + hosts = hosts[offset::slice_count] data = dict() all_group = data.setdefault('all', dict()) diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index dd2197c573..78b5ead7b2 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -277,11 +277,11 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour default=False, allows_field='credentials' ) - job_split_count = models.IntegerField( + job_slice_count = models.PositiveIntegerField( blank=True, - default=0, - help_text=_("The number of jobs to split into at runtime. " - "Will cause the Job Template to launch a workflow if value is non-zero."), + default=1, + help_text=_("The number of jobs to slice into at runtime. " + "Will cause the Job Template to launch a workflow if value is greater than 1."), ) admin_role = ImplicitRoleField( @@ -302,7 +302,8 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour @classmethod def _get_unified_job_field_names(cls): return set(f.name for f in JobOptions._meta.fields) | set( - ['name', 'description', 'schedule', 'survey_passwords', 'labels', 'credentials', 'internal_limit'] + ['name', 'description', 'schedule', 'survey_passwords', 'labels', 'credentials', + 'job_slice_number', 'job_slice_count'] ) @property @@ -328,13 +329,15 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour return self.create_unified_job(**kwargs) def create_unified_job(self, **kwargs): - prevent_splitting = kwargs.pop('_prevent_splitting', False) - split_event = bool(self.job_split_count > 1 and (not prevent_splitting)) + prevent_splitting = kwargs.pop('_prevent_slicing', False) + split_event = bool(self.job_slice_count > 1 and (not prevent_splitting)) if split_event: # A Split Job Template will generate a WorkflowJob rather than a Job from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode kwargs['_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class() kwargs['_parent_field_name'] = "job_template" + kwargs.setdefault('_eager_fields', {}) + kwargs['_eager_fields']['is_sliced_job'] = True job = super(JobTemplate, self).create_unified_job(**kwargs) if split_event: try: @@ -342,11 +345,11 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour except JobLaunchConfig.DoesNotExist: wj_config = JobLaunchConfig() actual_inventory = wj_config.inventory if wj_config.inventory else self.inventory - for idx in xrange(min(self.job_split_count, + for idx in xrange(min(self.job_slice_count, actual_inventory.hosts.count())): create_kwargs = dict(workflow_job=job, unified_job_template=self, - ancestor_artifacts=dict(job_split=idx)) + ancestor_artifacts=dict(job_split=idx + 1)) WorkflowJobNode.objects.create(**create_kwargs) return job @@ -531,10 +534,17 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana on_delete=models.SET_NULL, help_text=_('The SCM Refresh task used to make sure the playbooks were available for the job run'), ) - internal_limit = models.CharField( - max_length=1024, - default='', - editable=False, + job_slice_number = models.PositiveIntegerField( + blank=True, + default=0, + help_text=_("If part of a sliced job, the ID of the inventory slice operated on. " + "If not part of sliced job, parameter is not used."), + ) + job_slice_count = models.PositiveIntegerField( + blank=True, + default=1, + help_text=_("If ran as part of sliced jobs, the total number of slices. " + "If 1, job is not part of a sliced job."), ) @@ -580,10 +590,11 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana return JobEvent def copy_unified_job(self, **new_prompts): - new_prompts['_prevent_splitting'] = True - if self.internal_limit: - new_prompts.setdefault('_eager_fields', {}) - new_prompts['_eager_fields']['internal_limit'] = self.internal_limit # oddball, not from JT or prompts + # Needed for job slice relaunch consistency, do no re-spawn workflow job + # target same slice as original job + new_prompts['_prevent_slicing'] = True + new_prompts.setdefault('_eager_fields', {}) + new_prompts['_eager_fields']['job_slice_number'] = self.job_slice_number return super(Job, self).copy_unified_job(**new_prompts) @property diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index 7aa2753c6d..e5e816d64e 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -219,11 +219,13 @@ class WorkflowJobNode(WorkflowNodeBase): data.update(accepted_fields) # build ancestor artifacts, save them to node model for later aa_dict = {} + is_root_node = True for parent_node in self.get_parent_nodes(): + is_root_node = False aa_dict.update(parent_node.ancestor_artifacts) if parent_node.job and hasattr(parent_node.job, 'artifacts'): aa_dict.update(parent_node.job.artifacts) - if aa_dict: + if aa_dict and not is_root_node: self.ancestor_artifacts = aa_dict self.save(update_fields=['ancestor_artifacts']) # process password list @@ -252,18 +254,12 @@ class WorkflowJobNode(WorkflowNodeBase): # ensure that unified jobs created by WorkflowJobs are marked data['_eager_fields'] = {'launch_type': 'workflow'} # Extra processing in the case that this is a split job - if 'job_split' in self.ancestor_artifacts: + if 'job_split' in self.ancestor_artifacts and is_root_node: split_str = six.text_type(self.ancestor_artifacts['job_split'] + 1) - data['_eager_fields']['name'] = six.text_type("{} - {}").format( - self.unified_job_template.name[:512 - len(split_str) - len(' - ')], - split_str - ) data['_eager_fields']['allow_simultaneous'] = True - data['_eager_fields']['internal_limit'] = 'split{0}of{1}'.format( - self.ancestor_artifacts['job_split'], - self.workflow_job.workflow_job_nodes.count() - ) - data['_prevent_splitting'] = True + data['_eager_fields']['job_slice_number'] = self.ancestor_artifacts['job_split'] + data['_eager_fields']['job_slice_count'] = self.workflow_job.workflow_job_nodes.count() + data['_prevent_slicing'] = True return data @@ -459,11 +455,16 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio ) job_template = models.ForeignKey( 'JobTemplate', - related_name='split_jobs', + related_name='slice_workflow_jobs', blank=True, null=True, default=None, on_delete=models.SET_NULL, + help_text=_("If automatically created for a sliced job run, the job template " + "the workflow job was created from."), + ) + is_sliced_job = models.BooleanField( + default=False ) @property diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 2c543d88d3..f45bcd176b 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -826,7 +826,8 @@ class BaseTask(object): def build_inventory(self, instance, **kwargs): script_data = instance.inventory.get_script_data( - hostvars=True, subset=getattr(instance, 'internal_limit', '') + hostvars=True, + slice_number=instance.job_slice_number, slice_count=instance.job_slice_count ) json_data = json.dumps(script_data) handle, path = tempfile.mkstemp(dir=kwargs.get('private_data_dir', None)) diff --git a/awx/main/tests/functional/api/test_job.py b/awx/main/tests/functional/api/test_job.py index 40b17bc368..0e735eccb0 100644 --- a/awx/main/tests/functional/api/test_job.py +++ b/awx/main/tests/functional/api/test_job.py @@ -123,11 +123,11 @@ def test_job_relaunch_on_failed_hosts(post, inventory, project, machine_credenti @pytest.mark.django_db -def test_split_jt_recent_jobs(split_job_factory, admin_user, get): - workflow_job = split_job_factory(3, spawn=True) - split_jt = workflow_job.job_template +def test_slice_jt_recent_jobs(slice_job_factory, admin_user, get): + workflow_job = slice_job_factory(3, spawn=True) + slice_jt = workflow_job.job_template r = get( - url=split_jt.get_absolute_url(), + url=slice_jt.get_absolute_url(), user=admin_user, expect=200 ) diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index 6e0aa11f6c..1b90225adb 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -779,15 +779,15 @@ def disable_database_settings(mocker): @pytest.fixture -def split_jt_factory(inventory): +def slice_jt_factory(inventory): def r(N, jt_kwargs=None): for i in range(N): inventory.hosts.create(name='foo{}'.format(i)) if not jt_kwargs: jt_kwargs = {} return JobTemplate.objects.create( - name='split-jt-from-factory', - job_split_count=N, + name='slice-jt-from-factory', + job_slice_count=N, inventory=inventory, **jt_kwargs ) @@ -795,18 +795,18 @@ def split_jt_factory(inventory): @pytest.fixture -def split_job_factory(split_jt_factory): +def slice_job_factory(slice_jt_factory): def r(N, jt_kwargs=None, prompts=None, spawn=False): - split_jt = split_jt_factory(N, jt_kwargs=jt_kwargs) + slice_jt = slice_jt_factory(N, jt_kwargs=jt_kwargs) if not prompts: prompts = {} - split_job = split_jt.create_unified_job(**prompts) + slice_job = slice_jt.create_unified_job(**prompts) if spawn: - for node in split_job.workflow_nodes.all(): + for node in slice_job.workflow_nodes.all(): # does what the task manager does for spawning workflow jobs kv = node.get_job_kwargs() job = node.unified_job_template.create_unified_job(**kv) node.job = job node.save() - return split_job + return slice_job return r diff --git a/awx/main/tests/functional/models/test_inventory.py b/awx/main/tests/functional/models/test_inventory.py index 7b1574a56d..e11a4f926c 100644 --- a/awx/main/tests/functional/models/test_inventory.py +++ b/awx/main/tests/functional/models/test_inventory.py @@ -38,11 +38,11 @@ class TestInventoryScript: 'remote_tower_id': host.id } - def test_split_subset(self, inventory): + def test_slice_subset(self, inventory): for i in range(3): inventory.hosts.create(name='host{}'.format(i)) for i in range(3): - assert inventory.get_script_data(subset='split{}of3'.format(i)) == { + assert inventory.get_script_data(slice_number=i + 1, slice_count=3) == { 'all': {'hosts': ['host{}'.format(i)]} } diff --git a/awx/main/tests/functional/models/test_job.py b/awx/main/tests/functional/models/test_job.py index 16098d6fd1..e64acafd2b 100644 --- a/awx/main/tests/functional/models/test_job.py +++ b/awx/main/tests/functional/models/test_job.py @@ -84,18 +84,18 @@ def test_job_host_summary_representation(host): @pytest.mark.django_db -class TestSplittingModels: +class TestSlicingModels: - def test_split_workflow_spawn(self, split_jt_factory): - split_jt = split_jt_factory(3) - job = split_jt.create_unified_job() + def test_slice_workflow_spawn(self, slice_jt_factory): + slice_jt = slice_jt_factory(3) + job = slice_jt.create_unified_job() assert isinstance(job, WorkflowJob) - assert job.job_template == split_jt - assert job.unified_job_template == split_jt + assert job.job_template == slice_jt + assert job.unified_job_template == slice_jt assert job.workflow_nodes.count() == 3 - def test_splits_with_JT_and_prompts(self, split_job_factory): - job = split_job_factory(3, jt_kwargs={'ask_limit_on_launch': True}, prompts={'limit': 'foobar'}, spawn=True) + def test_slices_with_JT_and_prompts(self, slice_job_factory): + job = slice_job_factory(3, jt_kwargs={'ask_limit_on_launch': True}, prompts={'limit': 'foobar'}, spawn=True) assert job.launch_config.prompts_dict() == {'limit': 'foobar'} for node in job.workflow_nodes.all(): assert node.limit is None # data not saved in node prompts diff --git a/awx/main/tests/functional/test_rbac_job_start.py b/awx/main/tests/functional/test_rbac_job_start.py index f20611da14..6748b3df5d 100644 --- a/awx/main/tests/functional/test_rbac_job_start.py +++ b/awx/main/tests/functional/test_rbac_job_start.py @@ -49,18 +49,18 @@ def test_inventory_use_access(inventory, user): @pytest.mark.django_db -def test_split_job(split_job_factory, rando): - workflow_job = split_job_factory(2, jt_kwargs={'created_by': rando}, spawn=True) +def test_slice_job(slice_job_factory, rando): + workflow_job = slice_job_factory(2, jt_kwargs={'created_by': rando}, spawn=True) workflow_job.job_template.execute_role.members.add(rando) - # Abilities of user with execute_role for split workflow job container + # Abilities of user with execute_role for slice workflow job container assert WorkflowJobAccess(rando).can_start(workflow_job) # relaunch allowed for access_cls in (UnifiedJobAccess, WorkflowJobAccess): access = access_cls(rando) assert access.can_read(workflow_job) assert workflow_job in access.get_queryset() - # Abilities of user with execute_role for all the split of the job + # Abilities of user with execute_role for all the slice of the job for node in workflow_job.workflow_nodes.all(): access = WorkflowJobNodeAccess(rando) assert access.can_read(node) diff --git a/awx/ui/client/features/jobs/jobsList.controller.js b/awx/ui/client/features/jobs/jobsList.controller.js index ac4bdfa1a9..cd1b9e1b96 100644 --- a/awx/ui/client/features/jobs/jobsList.controller.js +++ b/awx/ui/client/features/jobs/jobsList.controller.js @@ -76,20 +76,12 @@ function ListJobsController ( return { icon, link, value }; }); - vm.getSplitJobDetails = (details) => { - const internalLimitDetails = Object.assign({}, details); - - if (!internalLimitDetails) { + vm.getSliceJobDetails = (job) => { + if (job.job_slice_count === 1) { return null; } - const splitJobDetails = internalLimitDetails.split; - - if (!splitJobDetails) { - return null; - } - - return `Split Job ${splitJobDetails.offset + 1}/${splitJobDetails.step}`; + return `Slice Job ${job.job_slice_number}/${job.job_slice_count}`; }; vm.getSref = ({ type, id }) => { diff --git a/awx/ui/client/features/jobs/jobsList.view.html b/awx/ui/client/features/jobs/jobsList.view.html index 8124b6ccf1..783a734024 100644 --- a/awx/ui/client/features/jobs/jobsList.view.html +++ b/awx/ui/client/features/jobs/jobsList.view.html @@ -24,7 +24,7 @@ header-value="{{ job.id }} - {{ job.name }}" header-state="{{ vm.getSref(job) }}" header-tag="{{ vm.jobTypes[job.type] }}" - secondary-tag="{{ vm.getSplitJobDetails(job.summary_fields.internal_limit) }}"> + secondary-tag="{{ vm.getSliceJobDetails(job) }}">
{{ vm.jobType.value }}
- -
- -
{{ vm.splitJobDetails.offset }}
+ +
+ +
{{ vm.sliceJobDetails.offset }}
diff --git a/awx/ui/client/features/output/output.strings.js b/awx/ui/client/features/output/output.strings.js index 9e2da6d983..53430984b8 100644 --- a/awx/ui/client/features/output/output.strings.js +++ b/awx/ui/client/features/output/output.strings.js @@ -23,7 +23,7 @@ function OutputStrings (BaseString) { EXTRA_VARS: t.s('Read-only view of extra variables added to the job template'), INVENTORY: t.s('View the Inventory'), JOB_TEMPLATE: t.s('View the Job Template'), - SPLIT_JOB_DETAILS: t.s('Job is one of several from a JT that splits on inventory'), + SLICE_JOB_DETAILS: t.s('Job is one of several from a JT that slices on inventory'), PROJECT: t.s('View the Project'), PROJECT_UPDATE: t.s('View Project checkout results'), SCHEDULE: t.s('View the Schedule'), @@ -56,7 +56,7 @@ function OutputStrings (BaseString) { JOB_EXPLANATION: t.s('Explanation'), JOB_TAGS: t.s('Job Tags'), JOB_TEMPLATE: t.s('Job Template'), - SPLIT_JOB: t.s('Split Job'), + SLICE_JOB: t.s('Slice Job'), JOB_TYPE: t.s('Job Type'), LABELS: t.s('Labels'), LAUNCHED_BY: t.s('Launched By'), diff --git a/awx/ui/client/lib/components/launchTemplateButton/launchTemplateButton.component.js b/awx/ui/client/lib/components/launchTemplateButton/launchTemplateButton.component.js index 7a24be1711..5ff065c0c3 100644 --- a/awx/ui/client/lib/components/launchTemplateButton/launchTemplateButton.component.js +++ b/awx/ui/client/lib/components/launchTemplateButton/launchTemplateButton.component.js @@ -41,7 +41,7 @@ function atLaunchTemplateCtrl ( selectedJobTemplate .postLaunch({ id: vm.template.id }) .then(({ data }) => { - /* Split Jobs: Redirect to WF Details page if returned + /* Slice Jobs: Redirect to WF Details page if returned job type is a WF job */ if (data.type === 'workflow_job' && data.workflow_job !== null) { $state.go('workflowResults', { id: data.workflow_job }, { reload: true }); diff --git a/awx/ui/client/src/templates/job_templates/job-template.form.js b/awx/ui/client/src/templates/job_templates/job-template.form.js index b807bd169d..da3454ef60 100644 --- a/awx/ui/client/src/templates/job_templates/job-template.form.js +++ b/awx/ui/client/src/templates/job_templates/job-template.form.js @@ -257,17 +257,17 @@ function(NotificationsList, i18n) { dataPlacement: 'right', control: '', }, - job_split_count: { - label: i18n._('Job Splitting'), + job_slice_count: { + label: i18n._('Job Slicing'), type: 'number', integer: true, min: 1, default: 1, spinner: true, - dataTitle: i18n._('Split Job Count'), + dataTitle: i18n._('Slice Job Count'), dataPlacement: 'right', dataContainer: 'body', - awPopOver: "

" + i18n._("The number of jobs to split into at runtime. Will cause the Job Template to launch a workflow if value is non-zero.") + "

", + awPopOver: "

" + i18n._("The number of jobs to slice into at runtime. Will cause the Job Template to launch a workflow if value is greater than 1.") + "

", ngDisabled: '!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)' }, diff_mode: { diff --git a/awx/ui/client/src/workflow-results/workflow-results.controller.js b/awx/ui/client/src/workflow-results/workflow-results.controller.js index b6556767cc..0477f4ca70 100644 --- a/awx/ui/client/src/workflow-results/workflow-results.controller.js +++ b/awx/ui/client/src/workflow-results/workflow-results.controller.js @@ -39,7 +39,7 @@ export default ['workflowData', 'workflowResultsService', 'workflowDataOptions', DELETE: i18n._('Delete'), EDIT_USER: i18n._('Edit the user'), EDIT_WORKFLOW: i18n._('Edit the workflow job template'), - EDIT_SPLIT_TEMPLATE: i18n._('Edit the split job template'), + EDIT_SLICE_TEMPLATE: i18n._('Edit the slice job template'), EDIT_SCHEDULE: i18n._('Edit the schedule'), TOGGLE_STDOUT_FULLSCREEN: i18n._('Expand Output'), STATUS: '' // re-assigned elsewhere @@ -50,7 +50,7 @@ export default ['workflowData', 'workflowResultsService', 'workflowDataOptions', STARTED: i18n._('Started'), FINISHED: i18n._('Finished'), LABELS: i18n._('Labels'), - SPLIT_TEMPLATE: i18n._('Split Template'), + SLICE_TEMPLATE: i18n._('Slice Job Template'), STATUS: i18n._('Status') }, details: { diff --git a/awx/ui/client/src/workflow-results/workflow-results.partial.html b/awx/ui/client/src/workflow-results/workflow-results.partial.html index c88aa6a00d..474fac1d6e 100644 --- a/awx/ui/client/src/workflow-results/workflow-results.partial.html +++ b/awx/ui/client/src/workflow-results/workflow-results.partial.html @@ -144,16 +144,16 @@
- +
- {{ workflow.summary_fields.job_template.name }} diff --git a/docs/job_slicing.md b/docs/job_slicing.md new file mode 100644 index 0000000000..d9234a978e --- /dev/null +++ b/docs/job_slicing.md @@ -0,0 +1,13 @@ +# Job Slicing Overview + +Ansible, by default, runs jobs from a single control instance. At best a single Ansible job can be sliced up on a single system via forks but this doesn't fully take advantage of AWX's ability to distribute work to multiple nodes in a cluster. + +Job Slicing solves this by adding a Job Template field `job_slice_count`. This field specifies the number of **Jobs** to slice the Ansible run into. When this number is greater than 1 ``AWX`` will generate a **Workflow** from a **JobTemplate** instead of a **Job**. The **Inventory** will be distributed evenly amongst the slice jobs. The workflow job is then started and proceeds as though it were a normal workflow. The API will return either a **Job** resource (if `job_slice_count` < 2) or a **WorkflowJob** resource otherwise. Likewise, the UI will redirect to the appropriate screen to display the status of the run. + +## Implications for Job execution + +When jobs are sliced they can run on any Tower node and some may not run at the same time. Because of this, anything that relies on setting/sliced state (using modules such as ``set_fact``) will not work as expected. It's reasonable to expect that not all jobs will actually run at the same time (if there is not enough capacity in the system for example) + +## Simultaneous Execution Behavior + +By default Job Templates aren't normally configured to execute simultaneously (``allow_simultaneous`` must be checked). Slicing overrides this behavior and implies ``allow_simultaneous`` even if that setting is unchecked. diff --git a/docs/job_splitting.md b/docs/job_splitting.md deleted file mode 100644 index a8eb83b058..0000000000 --- a/docs/job_splitting.md +++ /dev/null @@ -1,13 +0,0 @@ -# Job Splitting Overview - -Ansible, by default, runs jobs from a single control instance. At best a single Ansible job can be split up on a single system via forks but this doesn't fully take advantage of AWX's ability to distribute work to multiple nodes in a cluster. - -Job Splitting solves this by adding a Job Template field `job_split_count`. This field specifies the number of **Jobs** to split the Ansible run into. When this number is greater than 1 ``AWX`` will generate a **Workflow** from a **JobTemplate** instead of a **Job**. The **Inventory** will be split evenly amongst the split jobs. The workflow job is then started and proceeds as though it were a normal workflow. The API will return either a **Job** resource (if `job_split_count` < 2) or a **WorkflowJob** resource otherwise. Likewise, the UI will redirect to the appropriate screen to display the status of the run. - -## Implications for Job execution - -When jobs are split they can run on any Tower node and some may not run at the same time. Because of this, anything that relies on setting/split state (using modules such as ``set_fact``) will not work as expected. It's reasonable to expect that not all jobs will actually run at the same time (if there is not enough capacity in the system for example) - -## Simultaneous Execution Behavior - -By default Job Templates aren't normally configured to execute simultaneously (``allow_simultaneous`` must be checked). Splitting overrides this behavior and implies ``allow_simultaneous`` even if that setting is unchecked. From 6dc58af8e1291eff2b85db5e528a6e3a98aa6c56 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Tue, 16 Oct 2018 10:46:20 -0400 Subject: [PATCH 17/29] slicing rename test cleanup and bugfix --- awx/api/serializers.py | 5 ++++- awx/main/models/jobs.py | 12 ++++++------ awx/main/models/unified_jobs.py | 4 ++-- awx/main/models/workflow.py | 10 ++++------ awx/main/tasks.py | 9 +++++---- awx/main/tests/unit/test_tasks.py | 2 ++ awx/ui/client/features/jobs/jobsList.controller.js | 4 ++++ awx/ui/client/features/output/details.component.js | 4 ++++ .../workflow-results/workflow-results.controller.js | 2 +- 9 files changed, 32 insertions(+), 20 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index 33f9e79db9..fa685cb0ab 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -3587,7 +3587,8 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer): class Meta: model = WorkflowJob - fields = ('*', 'workflow_job_template', 'extra_vars', 'allow_simultaneous', 'job_template', + fields = ('*', 'workflow_job_template', 'extra_vars', 'allow_simultaneous', + 'job_template', 'is_sliced_job', '-execution_node', '-event_processing_finished', '-controller_node',) def get_related(self, obj): @@ -3596,6 +3597,8 @@ class WorkflowJobSerializer(LabelsListMixin, UnifiedJobSerializer): res['workflow_job_template'] = self.reverse('api:workflow_job_template_detail', kwargs={'pk': obj.workflow_job_template.pk}) res['notifications'] = self.reverse('api:workflow_job_notifications_list', kwargs={'pk': obj.pk}) + if obj.job_template_id: + res['job_template'] = self.reverse('api:job_template_detail', kwargs={'pk': obj.job_template_id}) res['workflow_nodes'] = self.reverse('api:workflow_job_workflow_nodes_list', kwargs={'pk': obj.pk}) res['labels'] = self.reverse('api:workflow_job_label_list', kwargs={'pk': obj.pk}) res['activity_stream'] = self.reverse('api:workflow_job_activity_stream_list', kwargs={'pk': obj.pk}) diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 78b5ead7b2..bb8f6f0dbc 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -329,17 +329,17 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour return self.create_unified_job(**kwargs) def create_unified_job(self, **kwargs): - prevent_splitting = kwargs.pop('_prevent_slicing', False) - split_event = bool(self.job_slice_count > 1 and (not prevent_splitting)) - if split_event: - # A Split Job Template will generate a WorkflowJob rather than a Job + prevent_slicing = kwargs.pop('_prevent_slicing', False) + slice_event = bool(self.job_slice_count > 1 and (not prevent_slicing)) + if slice_event: + # A Slice Job Template will generate a WorkflowJob rather than a Job from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode kwargs['_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class() kwargs['_parent_field_name'] = "job_template" kwargs.setdefault('_eager_fields', {}) kwargs['_eager_fields']['is_sliced_job'] = True job = super(JobTemplate, self).create_unified_job(**kwargs) - if split_event: + if slice_event: try: wj_config = job.launch_config except JobLaunchConfig.DoesNotExist: @@ -349,7 +349,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour actual_inventory.hosts.count())): create_kwargs = dict(workflow_job=job, unified_job_template=self, - ancestor_artifacts=dict(job_split=idx + 1)) + ancestor_artifacts=dict(job_slice=idx + 1)) WorkflowJobNode.objects.create(**create_kwargs) return job diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 07077d09cd..0c0d82d31e 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -334,7 +334,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio parent_field_name = None if "_unified_job_class" in kwargs: # Special case where spawned job is different type than usual - # Only used for split jobs + # Only used for slice jobs unified_job_class = kwargs.pop("_unified_job_class") fields = unified_job_class._get_unified_job_field_names() & fields parent_field_name = kwargs.pop('_parent_field_name') @@ -354,7 +354,7 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio for fd, val in eager_fields.items(): setattr(unified_job, fd, val) - # NOTE: split workflow jobs _get_parent_field_name method + # NOTE: slice workflow jobs _get_parent_field_name method # is not correct until this is set if not parent_field_name: parent_field_name = unified_job._get_parent_field_name() diff --git a/awx/main/models/workflow.py b/awx/main/models/workflow.py index e5e816d64e..d02e3f6057 100644 --- a/awx/main/models/workflow.py +++ b/awx/main/models/workflow.py @@ -4,7 +4,6 @@ # Python #import urlparse import logging -import six # Django from django.db import models @@ -253,11 +252,10 @@ class WorkflowJobNode(WorkflowNodeBase): data['extra_vars'] = extra_vars # ensure that unified jobs created by WorkflowJobs are marked data['_eager_fields'] = {'launch_type': 'workflow'} - # Extra processing in the case that this is a split job - if 'job_split' in self.ancestor_artifacts and is_root_node: - split_str = six.text_type(self.ancestor_artifacts['job_split'] + 1) + # Extra processing in the case that this is a slice job + if 'job_slice' in self.ancestor_artifacts and is_root_node: data['_eager_fields']['allow_simultaneous'] = True - data['_eager_fields']['job_slice_number'] = self.ancestor_artifacts['job_split'] + data['_eager_fields']['job_slice_number'] = self.ancestor_artifacts['job_slice'] data['_eager_fields']['job_slice_count'] = self.workflow_job.workflow_job_nodes.count() data['_prevent_slicing'] = True return data @@ -473,7 +471,7 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio def _get_parent_field_name(self): if self.job_template_id: - # This is a workflow job which is a container for split jobs + # This is a workflow job which is a container for slice jobs return 'job_template' return 'workflow_job_template' diff --git a/awx/main/tasks.py b/awx/main/tasks.py index f45bcd176b..7106b68a25 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -825,10 +825,11 @@ class BaseTask(object): return False def build_inventory(self, instance, **kwargs): - script_data = instance.inventory.get_script_data( - hostvars=True, - slice_number=instance.job_slice_number, slice_count=instance.job_slice_count - ) + script_params = dict(hostvars=True) + if hasattr(instance, 'job_slice_number'): + script_params['slice_number'] = instance.job_slice_number + script_params['slice_count'] = instance.job_slice_count + script_data = instance.inventory.get_script_data(**script_params) json_data = json.dumps(script_data) handle, path = tempfile.mkstemp(dir=kwargs.get('private_data_dir', None)) f = os.fdopen(handle, 'w') diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index 050b9c8b07..578b9f7eda 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -246,6 +246,8 @@ class TestJobExecution(object): # If `Job.update_model` is called, we're not actually persisting # to the database; just update the status, which is usually # the update we care about for testing purposes + if kwargs.get('result_traceback'): + raise Exception('Task encountered error:\n{}'.format(kwargs['result_traceback'])) if 'status' in kwargs: self.instance.status = kwargs['status'] if 'job_env' in kwargs: diff --git a/awx/ui/client/features/jobs/jobsList.controller.js b/awx/ui/client/features/jobs/jobsList.controller.js index cd1b9e1b96..cd109b3a2c 100644 --- a/awx/ui/client/features/jobs/jobsList.controller.js +++ b/awx/ui/client/features/jobs/jobsList.controller.js @@ -77,6 +77,10 @@ function ListJobsController ( }); vm.getSliceJobDetails = (job) => { + if (!job.job_slice_count) { + return null; + } + if (job.job_slice_count === 1) { return null; } diff --git a/awx/ui/client/features/output/details.component.js b/awx/ui/client/features/output/details.component.js index 9a0ab34443..d3525db370 100644 --- a/awx/ui/client/features/output/details.component.js +++ b/awx/ui/client/features/output/details.component.js @@ -129,6 +129,10 @@ function getSourceWorkflowJobDetails () { function getSliceJobDetails () { const count = resource.model.get('job_slice_count'); + if (!count) { + return null; + } + if (count === 1) { return null; } diff --git a/awx/ui/client/src/workflow-results/workflow-results.controller.js b/awx/ui/client/src/workflow-results/workflow-results.controller.js index 0477f4ca70..fe9d42ebcb 100644 --- a/awx/ui/client/src/workflow-results/workflow-results.controller.js +++ b/awx/ui/client/src/workflow-results/workflow-results.controller.js @@ -113,7 +113,7 @@ export default ['workflowData', 'workflowResultsService', 'workflowDataOptions', if(workflowData.summary_fields && workflowData.summary_fields.job_template && workflowData.summary_fields.job_template.id){ - $scope.split_job_template_link = `/#/templates/job_template/${$scope.workflow.summary_fields.job_template.id}`; + $scope.slice_job_template_link = `/#/templates/job_template/${$scope.workflow.summary_fields.job_template.id}`; } // turn related api browser routes into front end routes From 571e34bf791da8935ce71c7765f2faa64f47e044 Mon Sep 17 00:00:00 2001 From: kialam Date: Mon, 8 Oct 2018 14:56:30 -0400 Subject: [PATCH 18/29] Begin adding unit tests for split jobs - Test split job tag method within Jobs List Controller. --- awx/ui/client/features/jobs/index.js | 2 + .../client/features/jobs/routes/jobs.route.js | 3 +- awx/ui/test/unit/components/index.js | 1 + .../test/unit/components/split-jobs.unit.js | 95 +++++++++++++++++++ 4 files changed, 99 insertions(+), 2 deletions(-) create mode 100644 awx/ui/test/unit/components/split-jobs.unit.js diff --git a/awx/ui/client/features/jobs/index.js b/awx/ui/client/features/jobs/index.js index 8bb692f0c8..99d91515d5 100644 --- a/awx/ui/client/features/jobs/index.js +++ b/awx/ui/client/features/jobs/index.js @@ -1,12 +1,14 @@ import JobsStrings from './jobs.strings'; import jobsRoute from './routes/jobs.route'; import { jobsSchedulesRoute, jobsSchedulesEditRoute } from '../../src/scheduler/schedules.route'; +import jobsListController from './jobsList.controller'; const MODULE_NAME = 'at.features.jobs'; angular .module(MODULE_NAME, []) .service('JobsStrings', JobsStrings) + .controller('jobsListController', jobsListController) .run(['$stateExtender', ($stateExtender) => { $stateExtender.addState(jobsRoute); $stateExtender.addState(jobsSchedulesRoute); diff --git a/awx/ui/client/features/jobs/routes/jobs.route.js b/awx/ui/client/features/jobs/routes/jobs.route.js index 52e6456bd7..427d7d165d 100644 --- a/awx/ui/client/features/jobs/routes/jobs.route.js +++ b/awx/ui/client/features/jobs/routes/jobs.route.js @@ -1,5 +1,4 @@ import { N_ } from '../../../src/i18n'; -import jobsListController from '../jobsList.controller'; import indexController from '../index.controller'; const indexTemplate = require('~features/jobs/index.view.html'); @@ -69,7 +68,7 @@ export default { }, 'jobsList@jobs': { templateUrl: jobsListTemplate, - controller: jobsListController, + controller: 'jobsListController', controllerAs: 'vm' } } diff --git a/awx/ui/test/unit/components/index.js b/awx/ui/test/unit/components/index.js index 8d75e3cf71..384436db40 100644 --- a/awx/ui/test/unit/components/index.js +++ b/awx/ui/test/unit/components/index.js @@ -6,4 +6,5 @@ import './file.unit'; import './layout.unit'; import './side-nav.unit'; import './side-nav-item.unit'; +import './split-jobs.unit'; diff --git a/awx/ui/test/unit/components/split-jobs.unit.js b/awx/ui/test/unit/components/split-jobs.unit.js new file mode 100644 index 0000000000..246471b6db --- /dev/null +++ b/awx/ui/test/unit/components/split-jobs.unit.js @@ -0,0 +1,95 @@ +describe('View: Split Jobs List', () => { + let JobList, + scope, + state, + Dataset, + resolvedModels, + JobsStrings, + QuerySet, + Prompt, + filter, + ProcessErrors, + Wait, + Rest, + SearchBasePath; + + beforeEach(angular.mock.module('at.features.jobs', ($provide) => { + Dataset = { + data: { + results: {} + } + } + state = { + params: { + job_search: {} + }, + go: jasmine.createSpy('go'), + includes: jasmine.createSpy('includes') + } + resolvedModels = [ + { + options: () => { + return ["foo", "bar"]; + } + } + ] + + ProcessErrors = jasmine.createSpy('ProcessErrors'); + Wait = jasmine.createSpy('Wait'); + Prompt = jasmine.createSpy('Prompt'); + + $provide.value('state', state); + $provide.value('Dataset', Dataset); + $provide.value('resolvedModels', resolvedModels); + $provide.value('ProcessErrors', ProcessErrors); + $provide.value('Wait', Wait); + $provide.value('Prompt', Prompt); + $provide.value('Rest', angular.noop); + $provide.value('SearchBasePath', ''); + $provide.value('JobsStrings', angular.noop); + $provide.value('QuerySet', angular.noop); + + $provide.provider('$stateProvider', { '$get': function() { return function() {}; } }); + $provide.value('$stateExtender', { addState: jasmine.createSpy('addState'), }); + })); + + beforeEach(angular.mock.inject(function($controller, $rootScope, _state_, _Dataset_, _resolvedModels_, _JobsStrings_, _QuerySet_, _Prompt_, _$filter_, _ProcessErrors_, _Wait_, _Rest_, _SearchBasePath_){ + scope = $rootScope.$new(); + state = _state_; + Dataset = _Dataset_; + resolvedModels = _resolvedModels_; + JobsStrings = _JobsStrings_; + QuerySet = _QuerySet_; + Prompt = _Prompt_; + filter = _$filter_; + ProcessErrors = _ProcessErrors_; + Wait = _Wait_; + Rest = _Rest_; + SearchBasePath = _SearchBasePath_; + + JobList = $controller('jobsListController', { + $scope: scope, + $state: state, + Dataset: Dataset, + resolvedModels: resolvedModels, + JobsStrings: JobsStrings, + ProcessErrors: ProcessErrors, + QuerySet: QuerySet, + Wait: Wait, + Prompt: Prompt, + $filter: filter, + Wait: Wait, + Rest: Rest, + SearchBasePath: SearchBasePath, + }); + })); + + describe('JobList Controller', () => { + it('is created successfully', () => { + expect(JobList).toBeDefined(); + }); + it('has method "getSplitJobDetails"', () => { + expect(JobList.getSplitJobDetails).toBeDefined(); + }); + }); +}); \ No newline at end of file From 65a0e5ed45f6beceefaccbad27ca2694e3bf2caf Mon Sep 17 00:00:00 2001 From: kialam Date: Thu, 11 Oct 2018 11:07:51 -0400 Subject: [PATCH 19/29] Fix failing tests. --- .../test/unit/components/split-jobs.unit.js | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/awx/ui/test/unit/components/split-jobs.unit.js b/awx/ui/test/unit/components/split-jobs.unit.js index 246471b6db..ffd0bdfaab 100644 --- a/awx/ui/test/unit/components/split-jobs.unit.js +++ b/awx/ui/test/unit/components/split-jobs.unit.js @@ -91,5 +91,34 @@ describe('View: Split Jobs List', () => { it('has method "getSplitJobDetails"', () => { expect(JobList.getSplitJobDetails).toBeDefined(); }); + it('returns a string', () => { + let data = { + shard: { + offset: 1, + step: 2 + } + } + const result = JobList.getSplitJobDetails(data); + expect(result).toEqual('Split Job 2/2'); + }); + it('returns null when there is no data', () => { + let data = undefined; + const result = JobList.getSplitJobDetails(data); + expect(result).toBeNull(); + }); + it('returns null when there is no "shard" attribute', () => { + let data = { + foo: {} + }; + const result = JobList.getSplitJobDetails(data); + expect(result).toBeNull(); + }); + it('returns null when "shard" is an empty object', () => { + let data = { + shard: {} + }; + const result = JobList.getSplitJobDetails(data); + expect(result).toBeNull(); + }); }); }); \ No newline at end of file From 21aeda0f4553b9fef1897e6bc8514999660a80a8 Mon Sep 17 00:00:00 2001 From: kialam Date: Thu, 11 Oct 2018 11:10:13 -0400 Subject: [PATCH 20/29] Add unit tests for Job Details - Test `getSplitJobDetails` method. - Fix failing tests. - Rename unit tests. --- .../features/output/details.component.js | 5 +- awx/ui/test/unit/components/index.js | 3 +- .../components/job-details-split-jobs.unit.js | 206 ++++++++++++++++++ ...s.unit.js => jobs-list-split-jobs.unit.js} | 0 4 files changed, 212 insertions(+), 2 deletions(-) create mode 100644 awx/ui/test/unit/components/job-details-split-jobs.unit.js rename awx/ui/test/unit/components/{split-jobs.unit.js => jobs-list-split-jobs.unit.js} (100%) diff --git a/awx/ui/client/features/output/details.component.js b/awx/ui/client/features/output/details.component.js index d3525db370..93f405d2a0 100644 --- a/awx/ui/client/features/output/details.component.js +++ b/awx/ui/client/features/output/details.component.js @@ -143,7 +143,10 @@ function getSliceJobDetails () { const offset = `${number}/${count}`; const tooltip = strings.get('tooltips.SLICE_JOB_DETAILS'); - return { label, offset, tooltip }; + if (label && offset && tooltip) { + return { label, offset, tooltip }; + } + return null; } function getJobTemplateDetails () { diff --git a/awx/ui/test/unit/components/index.js b/awx/ui/test/unit/components/index.js index 384436db40..a95460d83a 100644 --- a/awx/ui/test/unit/components/index.js +++ b/awx/ui/test/unit/components/index.js @@ -6,5 +6,6 @@ import './file.unit'; import './layout.unit'; import './side-nav.unit'; import './side-nav-item.unit'; -import './split-jobs.unit'; +import './jobs-list-split-jobs.unit'; +import './job-details-split-jobs.unit'; diff --git a/awx/ui/test/unit/components/job-details-split-jobs.unit.js b/awx/ui/test/unit/components/job-details-split-jobs.unit.js new file mode 100644 index 0000000000..8948ededc6 --- /dev/null +++ b/awx/ui/test/unit/components/job-details-split-jobs.unit.js @@ -0,0 +1,206 @@ +'use strict'; +import moment from 'moment'; + +describe('View: Job Details', () => { + let JobDetails, + scope, + state, + OutputStrings, + Prompt, + filter, + ProcessErrors, + Wait, + httpBackend, + ParseVariableString, + subscribe, + OutputStatusService; + + var mockData = { + summary_fields: { + internal_limit: { + shard: { + offset: 1, + step: 2, + } + } + }, + labels: { + SPLIT_JOB: 'foo' + }, + tooltips: { + SPLIT_JOB_DETAILS: 'bar' + } + }; + let resource = { + id: '147', + type: 'playbook', + model: { + get: (obj) => { + return obj.split('.').reduce((i,o) => i && i[o] || null, mockData); + }, + has: jasmine.createSpy('has'), + options: jasmine.createSpy('options'), + }, + events: {}, + ws: {} + }; + + beforeEach(angular.mock.module('at.features.output', ($provide) => { + state = { + params: { + job_search: {} + }, + go: jasmine.createSpy('go'), + includes: jasmine.createSpy('includes') + } + + OutputStrings = { + get: (obj) => { + return obj.split('.').reduce((i,o) => i && i[o] || null, mockData); + }, + } + + OutputStatusService = { + subscribe: jasmine.createSpy('subscribe') + }; + + ProcessErrors = jasmine.createSpy('ProcessErrors'); + Wait = jasmine.createSpy('Wait'); + Prompt = jasmine.createSpy('Prompt'); + + $provide.value('state', state); + $provide.value('ProcessErrors', ProcessErrors); + $provide.value('Wait', Wait); + $provide.value('Prompt', Prompt); + $provide.value('OutputStrings', OutputStrings); + $provide.value('ParseVariableString', angular.noop); + $provide.value('OutputStatusService', OutputStatusService); + + $provide.provider('$stateProvider', { '$get': function() { return function() {}; } }); + $provide.value('$stateExtender', { addState: jasmine.createSpy('addState'), }); + $provide.value('$stateRegistry', { register: jasmine.createSpy('regster'), }); + $provide.value('sanitizeFilter', angular.noop); + $provide.value('subscribe', subscribe); + $provide.value('moment', moment); + $provide.value('longDateFilter', angular.noop); + })); + + beforeEach(angular.mock.inject(function($injector, $componentController, $rootScope, $httpBackend, _state_, _OutputStrings_, _ParseVariableString_, _Prompt_, _ProcessErrors_, _Wait_, _OutputStatusService_){ + scope = $rootScope.$new(); + state = _state_; + OutputStrings = _OutputStrings_; + Prompt = _Prompt_; + filter = $injector.get("$filter"); + ProcessErrors = _ProcessErrors_; + Wait = _Wait_; + ParseVariableString = _ParseVariableString_; + httpBackend = $httpBackend; + OutputStatusService = _OutputStatusService_; + + JobDetails = $componentController('atJobDetails', { + $scope: scope, + $state: state, + OutputStrings: OutputStrings, + ProcessErrors: ProcessErrors, + Wait: Wait, + Prompt: Prompt, + $filter: filter, + Wait: Wait, + ParseVariableString: ParseVariableString, + httpBackend: httpBackend, + OutputStatusService: OutputStatusService, + }, {resource: resource}); + JobDetails.$onInit(); + })); + + describe('JobDetails Component', () => { + it('is created successfully', () => { + expect(JobDetails).toBeDefined(); + }); + it('has method "getSplitJobDetails"', () => { + expect(JobDetails.splitJobDetails).toBeDefined(); + }); + describe('splitJobDetails method', () => { + it('returned values are strings', () => { + const result = JobDetails.splitJobDetails; + const { label, offset, tooltip } = result; + expect(offset).toEqual('2/2'); + expect(label).toEqual('foo'); + expect(tooltip).toEqual('bar'); + }); + it('returns null if label, offset, or tooltip is undefined', () => { + mockData = { + summary_fields: { + internal_limit: { + shard: { + offset: 1, + step: 2, + } + } + }, + labels: { + SPLIT_JOB: null + }, + tooltips: { + SPLIT_JOB_DETAILS: null + } + }; + JobDetails.$onInit(); + const result = JobDetails.splitJobDetails; + expect(result).toBeNull(); + }); + it('returns null if summary_fields.internal_limit is undefined or null', () => { + mockData = { + summary_fields: { + internal_limit: undefined + }, + labels: { + SPLIT_JOB: 'foo' + }, + tooltips: { + SPLIT_JOB_DETAILS: 'bar' + } + }; + JobDetails.$onInit(); + const result = JobDetails.splitJobDetails; + expect(result).toBeNull(); + }); + it('returns null if summary_fields.internal_limit.shard is undefined or null', () => { + mockData = { + summary_fields: { + internal_limit: { + shard: undefined + } + }, + labels: { + SPLIT_JOB: 'foo' + }, + tooltips: { + SPLIT_JOB_DETAILS: 'bar' + } + }; + JobDetails.$onInit(); + const result = JobDetails.splitJobDetails; + expect(result).toBeNull(); + }); + it('returns null if summary_fields.internal_limit.shard is an empty object', () => { + mockData = { + summary_fields: { + internal_limit: { + shard: {} + } + }, + labels: { + SPLIT_JOB: 'foo' + }, + tooltips: { + SPLIT_JOB_DETAILS: 'bar' + } + }; + JobDetails.$onInit(); + const result = JobDetails.splitJobDetails; + expect(result).toBeNull(); + }); + }); + }); +}); \ No newline at end of file diff --git a/awx/ui/test/unit/components/split-jobs.unit.js b/awx/ui/test/unit/components/jobs-list-split-jobs.unit.js similarity index 100% rename from awx/ui/test/unit/components/split-jobs.unit.js rename to awx/ui/test/unit/components/jobs-list-split-jobs.unit.js From f72fca5fcf988588cc0e67ab78e80f864ef754dc Mon Sep 17 00:00:00 2001 From: kialam Date: Wed, 17 Oct 2018 09:31:34 -0400 Subject: [PATCH 21/29] Fix unit tests after "slice" rename. - Update Jobs List unit tests with new schema and test cases. - Update Job Details unit tests with new schema and test cases. - Test both for expected behavior when handling a regular non-sliced job. --- .../features/jobs/jobsList.controller.js | 6 +- .../components/job-details-split-jobs.unit.js | 63 +++++++------------ .../components/jobs-list-split-jobs.unit.js | 39 ++++++------ 3 files changed, 48 insertions(+), 60 deletions(-) diff --git a/awx/ui/client/features/jobs/jobsList.controller.js b/awx/ui/client/features/jobs/jobsList.controller.js index cd109b3a2c..1a1943c6c4 100644 --- a/awx/ui/client/features/jobs/jobsList.controller.js +++ b/awx/ui/client/features/jobs/jobsList.controller.js @@ -85,7 +85,11 @@ function ListJobsController ( return null; } - return `Slice Job ${job.job_slice_number}/${job.job_slice_count}`; + if (job.job_slice_number && job.job_slice_count) { + return `Slice Job ${job.job_slice_number}/${job.job_slice_count}`; + } + + return null; }; vm.getSref = ({ type, id }) => { diff --git a/awx/ui/test/unit/components/job-details-split-jobs.unit.js b/awx/ui/test/unit/components/job-details-split-jobs.unit.js index 8948ededc6..eb1a895ef2 100644 --- a/awx/ui/test/unit/components/job-details-split-jobs.unit.js +++ b/awx/ui/test/unit/components/job-details-split-jobs.unit.js @@ -16,19 +16,13 @@ describe('View: Job Details', () => { OutputStatusService; var mockData = { - summary_fields: { - internal_limit: { - shard: { - offset: 1, - step: 2, - } - } - }, + job_slice_count: 2, + job_slice_number: 2, labels: { - SPLIT_JOB: 'foo' + SLICE_JOB: 'foo' }, tooltips: { - SPLIT_JOB_DETAILS: 'bar' + SLICE_JOB_DETAILS: 'bar' } }; let resource = { @@ -117,12 +111,12 @@ describe('View: Job Details', () => { it('is created successfully', () => { expect(JobDetails).toBeDefined(); }); - it('has method "getSplitJobDetails"', () => { - expect(JobDetails.splitJobDetails).toBeDefined(); + it('has method "sliceJobDetails"', () => { + expect(JobDetails.sliceJobDetails).toBeDefined(); }); describe('splitJobDetails method', () => { it('returned values are strings', () => { - const result = JobDetails.splitJobDetails; + const result = JobDetails.sliceJobDetails; const { label, offset, tooltip } = result; expect(offset).toEqual('2/2'); expect(label).toEqual('foo'); @@ -130,14 +124,8 @@ describe('View: Job Details', () => { }); it('returns null if label, offset, or tooltip is undefined', () => { mockData = { - summary_fields: { - internal_limit: { - shard: { - offset: 1, - step: 2, - } - } - }, + job_slice_count: 2, + job_slice_number: 2, labels: { SPLIT_JOB: null }, @@ -146,14 +134,13 @@ describe('View: Job Details', () => { } }; JobDetails.$onInit(); - const result = JobDetails.splitJobDetails; + const result = JobDetails.sliceJobDetails; expect(result).toBeNull(); }); - it('returns null if summary_fields.internal_limit is undefined or null', () => { + it('returns null if job_slice_count is undefined or null', () => { mockData = { - summary_fields: { - internal_limit: undefined - }, + job_slice_count: null, + job_slice_number: 2, labels: { SPLIT_JOB: 'foo' }, @@ -162,16 +149,13 @@ describe('View: Job Details', () => { } }; JobDetails.$onInit(); - const result = JobDetails.splitJobDetails; + const result = JobDetails.sliceJobDetails; expect(result).toBeNull(); }); - it('returns null if summary_fields.internal_limit.shard is undefined or null', () => { + it('returns null if job_slice_number is undefined or null', () => { mockData = { - summary_fields: { - internal_limit: { - shard: undefined - } - }, + job_slice_count: 2, + job_slice_number: null, labels: { SPLIT_JOB: 'foo' }, @@ -180,16 +164,13 @@ describe('View: Job Details', () => { } }; JobDetails.$onInit(); - const result = JobDetails.splitJobDetails; + const result = JobDetails.sliceJobDetails; expect(result).toBeNull(); }); - it('returns null if summary_fields.internal_limit.shard is an empty object', () => { + it('returns null if job is a non-sliced job', () => { mockData = { - summary_fields: { - internal_limit: { - shard: {} - } - }, + job_slice_count: 1, + job_slice_number: null, labels: { SPLIT_JOB: 'foo' }, @@ -198,7 +179,7 @@ describe('View: Job Details', () => { } }; JobDetails.$onInit(); - const result = JobDetails.splitJobDetails; + const result = JobDetails.sliceJobDetails; expect(result).toBeNull(); }); }); diff --git a/awx/ui/test/unit/components/jobs-list-split-jobs.unit.js b/awx/ui/test/unit/components/jobs-list-split-jobs.unit.js index ffd0bdfaab..9c9a9dc3bc 100644 --- a/awx/ui/test/unit/components/jobs-list-split-jobs.unit.js +++ b/awx/ui/test/unit/components/jobs-list-split-jobs.unit.js @@ -89,35 +89,38 @@ describe('View: Split Jobs List', () => { expect(JobList).toBeDefined(); }); it('has method "getSplitJobDetails"', () => { - expect(JobList.getSplitJobDetails).toBeDefined(); + expect(JobList.getSliceJobDetails).toBeDefined(); }); it('returns a string', () => { let data = { - shard: { - offset: 1, - step: 2 - } + job_slice_number: 1, + job_slice_count: 2 } - const result = JobList.getSplitJobDetails(data); - expect(result).toEqual('Split Job 2/2'); + const result = JobList.getSliceJobDetails(data); + expect(result).toEqual('Slice Job 1/2'); }); - it('returns null when there is no data', () => { - let data = undefined; - const result = JobList.getSplitJobDetails(data); + it('returns null when data is null', () => { + let data = { + job_slice_number: null, + job_slice_count: null + } + const result = JobList.getSliceJobDetails(data); expect(result).toBeNull(); }); - it('returns null when there is no "shard" attribute', () => { + it('returns null when data is undefined', () => { let data = { - foo: {} - }; - const result = JobList.getSplitJobDetails(data); + job_slice_number: undefined, + job_slice_count: undefined + } + const result = JobList.getSliceJobDetails(data); expect(result).toBeNull(); }); - it('returns null when "shard" is an empty object', () => { + it('returns null when job is not a sliced job', () => { let data = { - shard: {} - }; - const result = JobList.getSplitJobDetails(data); + job_slice_number: null, + job_slice_count: 1 + } + const result = JobList.getSliceJobDetails(data); expect(result).toBeNull(); }); }); From 37f90249401dcd14965c027f11ba03e5907acaf0 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Tue, 16 Oct 2018 16:47:11 -0400 Subject: [PATCH 22/29] fix slicing task_impact and script gen bugs --- awx/main/models/inventory.py | 3 ++ awx/main/models/jobs.py | 4 ++ .../tests/functional/models/test_inventory.py | 19 ++++++++ .../functional/models/test_unified_job.py | 47 +++++++++++++++++++ 4 files changed, 73 insertions(+) diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 805e4eb1f4..c7b9254ada 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -247,6 +247,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin): data = dict() all_group = data.setdefault('all', dict()) + all_hostnames = set(host.name for host in hosts) if self.variables_dict: all_group['vars'] = self.variables_dict @@ -264,6 +265,8 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin): ).values_list('group_id', 'host_id', 'host__name') group_hosts_map = {} for group_id, host_id, host_name in group_hosts_qs: + if host_name not in all_hostnames: + continue # host might not be in current shard group_hostnames = group_hosts_map.setdefault(group_id, []) group_hostnames.append(host_name) grouped_hosts.add(host_name) diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index bb8f6f0dbc..a0ea174bbe 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -595,6 +595,7 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana new_prompts['_prevent_slicing'] = True new_prompts.setdefault('_eager_fields', {}) new_prompts['_eager_fields']['job_slice_number'] = self.job_slice_number + new_prompts['_eager_fields']['job_slice_count'] = self.job_slice_count return super(Job, self).copy_unified_job(**new_prompts) @property @@ -690,6 +691,9 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana count_hosts = 2 else: count_hosts = Host.objects.filter(inventory__jobs__pk=self.pk).count() + if self.job_slice_count > 1: + # Integer division intentional + count_hosts = (count_hosts + self.job_slice_count - self.job_slice_number) / self.job_slice_count return min(count_hosts, 5 if self.forks == 0 else self.forks) + 1 @property diff --git a/awx/main/tests/functional/models/test_inventory.py b/awx/main/tests/functional/models/test_inventory.py index e11a4f926c..97cf1cb0a0 100644 --- a/awx/main/tests/functional/models/test_inventory.py +++ b/awx/main/tests/functional/models/test_inventory.py @@ -46,6 +46,25 @@ class TestInventoryScript: 'all': {'hosts': ['host{}'.format(i)]} } + def test_slice_subset_with_groups(self, inventory): + hosts = [] + for i in range(3): + host = inventory.hosts.create(name='host{}'.format(i)) + hosts.append(host) + g1 = inventory.groups.create(name='contains_all_hosts') + for host in hosts: + g1.hosts.add(host) + g2 = inventory.groups.create(name='contains_two_hosts') + for host in hosts[:2]: + g2.hosts.add(host) + for i in range(3): + expected_data = { + 'contains_all_hosts': {'hosts': ['host{}'.format(i)], 'children': [], 'vars': {}}, + } + if i < 2: + expected_data['contains_two_hosts'] = {'hosts': ['host{}'.format(i)], 'children': [], 'vars': {}} + assert inventory.get_script_data(slice_number=i + 1, slice_count=3) == expected_data + @pytest.mark.django_db class TestActiveCount: diff --git a/awx/main/tests/functional/models/test_unified_job.py b/awx/main/tests/functional/models/test_unified_job.py index f587e4c448..74e163c66e 100644 --- a/awx/main/tests/functional/models/test_unified_job.py +++ b/awx/main/tests/functional/models/test_unified_job.py @@ -152,3 +152,50 @@ def test_event_processing_not_finished(): def test_event_model_undefined(): wj = WorkflowJob.objects.create(name='foobar', status='finished') assert wj.event_processing_finished + + +@pytest.mark.django_db +class TestTaskImpact: + @pytest.fixture + def job_host_limit(self, job_template, inventory): + def r(hosts, forks): + for i in range(hosts): + inventory.hosts.create(name='foo' + str(i)) + job = Job.objects.create( + name='fake-job', + launch_type='workflow', + job_template=job_template, + inventory=inventory, + forks=forks + ) + return job + return r + + def test_limit_task_impact(self, job_host_limit): + job = job_host_limit(5, 2) + assert job.task_impact == 2 + 1 # forks becomes constraint + + def test_host_task_impact(self, job_host_limit): + job = job_host_limit(3, 5) + assert job.task_impact == 3 + 1 # hosts becomes constraint + + def test_shard_task_impact(self, slice_job_factory): + # factory creates on host per slice + workflow_job = slice_job_factory(3, jt_kwargs={'forks': 50}, spawn=True) + # arrange the jobs by their number + jobs = [None for i in range(3)] + for node in workflow_job.workflow_nodes.all(): + jobs[node.job.job_slice_number - 1] = node.job + # Even distribution - all jobs run on 1 host + assert [ + len(jobs[0].inventory.get_script_data(slice_number=i + 1, slice_count=3)['all']['hosts']) + for i in range(3) + ] == [1, 1, 1] + assert [job.task_impact for job in jobs] == [2, 2, 2] # plus one base task impact + # Uneven distribution - first job takes the extra host + jobs[0].inventory.hosts.create(name='remainder_foo') + assert [ + len(jobs[0].inventory.get_script_data(slice_number=i + 1, slice_count=3)['all']['hosts']) + for i in range(3) + ] == [2, 1, 1] + assert [job.task_impact for job in jobs] == [3, 2, 2] From affacb8ab5df9dffe3edff91f18448ec1961c796 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Wed, 17 Oct 2018 15:26:34 -0400 Subject: [PATCH 23/29] revert change of including slice wfj ids in recent_jobs list --- awx/api/serializers.py | 7 +++++-- awx/main/tests/functional/api/test_job.py | 2 +- .../serializers/test_job_template_serializers.py | 13 ++++--------- 3 files changed, 10 insertions(+), 12 deletions(-) diff --git a/awx/api/serializers.py b/awx/api/serializers.py index fa685cb0ab..b86c4c5e4a 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -2976,9 +2976,12 @@ class JobTemplateMixin(object): ''' def _recent_jobs(self, obj): - job_mgr = obj.unifiedjob_unified_jobs.non_polymorphic().only('id', 'status', 'finished') + if hasattr(obj, 'workflow_jobs'): + job_mgr = obj.workflow_jobs + else: + job_mgr = obj.jobs return [{'id': x.id, 'status': x.status, 'finished': x.finished} - for x in job_mgr.order_by('-created')[:10]] + for x in job_mgr.all().order_by('-created')[:10]] def get_summary_fields(self, obj): d = super(JobTemplateMixin, self).get_summary_fields(obj) diff --git a/awx/main/tests/functional/api/test_job.py b/awx/main/tests/functional/api/test_job.py index 0e735eccb0..d635a35e0f 100644 --- a/awx/main/tests/functional/api/test_job.py +++ b/awx/main/tests/functional/api/test_job.py @@ -132,7 +132,7 @@ def test_slice_jt_recent_jobs(slice_job_factory, admin_user, get): expect=200 ) job_ids = [entry['id'] for entry in r.data['summary_fields']['recent_jobs']] - assert workflow_job.pk in job_ids + assert workflow_job.pk not in job_ids for node in workflow_job.workflow_nodes.all(): job = node.job assert job.pk in job_ids diff --git a/awx/main/tests/unit/api/serializers/test_job_template_serializers.py b/awx/main/tests/unit/api/serializers/test_job_template_serializers.py index 8719b9b1b9..a6f41debb9 100644 --- a/awx/main/tests/unit/api/serializers/test_job_template_serializers.py +++ b/awx/main/tests/unit/api/serializers/test_job_template_serializers.py @@ -71,19 +71,14 @@ class TestJobTemplateSerializerGetRelated(): class TestJobTemplateSerializerGetSummaryFields(): def test__recent_jobs(self, mocker, job_template, jobs): - job_template.unifiedjob_unified_jobs = mocker.MagicMock(**{ - 'non_polymorphic.return_value': mocker.MagicMock(**{ - 'only.return_value': mocker.MagicMock(**{ - 'order_by.return_value': jobs - }) - }) - }) + job_template.jobs.all = mocker.MagicMock(**{'order_by.return_value': jobs}) + job_template.jobs.all.return_value = job_template.jobs.all serializer = JobTemplateSerializer() recent_jobs = serializer._recent_jobs(job_template) - job_template.unifiedjob_unified_jobs.non_polymorphic.assert_called_once_with() - job_template.unifiedjob_unified_jobs.non_polymorphic().only().order_by.assert_called_once_with('-created') + job_template.jobs.all.assert_called_once_with() + job_template.jobs.all.order_by.assert_called_once_with('-created') assert len(recent_jobs) == 10 for x in jobs[:10]: assert recent_jobs == [{'id': x.id, 'status': x.status, 'finished': x.finished} for x in jobs[:10]] From a59017ceef8addd19c0434fdab459892e95e65fd Mon Sep 17 00:00:00 2001 From: kialam Date: Wed, 17 Oct 2018 16:38:24 -0400 Subject: [PATCH 24/29] Fix eslint errors. --- .../components/job-details-split-jobs.unit.js | 76 +++++++++--------- .../components/jobs-list-split-jobs.unit.js | 80 +++++++++---------- 2 files changed, 77 insertions(+), 79 deletions(-) diff --git a/awx/ui/test/unit/components/job-details-split-jobs.unit.js b/awx/ui/test/unit/components/job-details-split-jobs.unit.js index eb1a895ef2..d6f7b0f23a 100644 --- a/awx/ui/test/unit/components/job-details-split-jobs.unit.js +++ b/awx/ui/test/unit/components/job-details-split-jobs.unit.js @@ -1,21 +1,20 @@ -'use strict'; import moment from 'moment'; describe('View: Job Details', () => { - let JobDetails, - scope, - state, - OutputStrings, - Prompt, - filter, - ProcessErrors, - Wait, - httpBackend, - ParseVariableString, - subscribe, - OutputStatusService; + let JobDetails; + let scope; + let state; + let OutputStrings; + let Prompt; + let filter; + let ProcessErrors; + let Wait; + let httpBackend; + let ParseVariableString; + let subscribe; + let OutputStatusService; - var mockData = { + let mockData = { job_slice_count: 2, job_slice_number: 2, labels: { @@ -25,13 +24,11 @@ describe('View: Job Details', () => { SLICE_JOB_DETAILS: 'bar' } }; - let resource = { + const resource = { id: '147', type: 'playbook', model: { - get: (obj) => { - return obj.split('.').reduce((i,o) => i && i[o] || null, mockData); - }, + get: (obj) => obj.split('.').reduce((i, o) => i && i[o] || null, mockData), has: jasmine.createSpy('has'), options: jasmine.createSpy('options'), }, @@ -46,13 +43,11 @@ describe('View: Job Details', () => { }, go: jasmine.createSpy('go'), includes: jasmine.createSpy('includes') - } + }; OutputStrings = { - get: (obj) => { - return obj.split('.').reduce((i,o) => i && i[o] || null, mockData); - }, - } + get: (obj) => obj.split('.').reduce((i, o) => i && i[o] || null, mockData), + }; OutputStatusService = { subscribe: jasmine.createSpy('subscribe') @@ -70,7 +65,7 @@ describe('View: Job Details', () => { $provide.value('ParseVariableString', angular.noop); $provide.value('OutputStatusService', OutputStatusService); - $provide.provider('$stateProvider', { '$get': function() { return function() {}; } }); + $provide.provider('$stateProvider', { $get: jasmine.createSpy('$get'), }); $provide.value('$stateExtender', { addState: jasmine.createSpy('addState'), }); $provide.value('$stateRegistry', { register: jasmine.createSpy('regster'), }); $provide.value('sanitizeFilter', angular.noop); @@ -79,12 +74,16 @@ describe('View: Job Details', () => { $provide.value('longDateFilter', angular.noop); })); - beforeEach(angular.mock.inject(function($injector, $componentController, $rootScope, $httpBackend, _state_, _OutputStrings_, _ParseVariableString_, _Prompt_, _ProcessErrors_, _Wait_, _OutputStatusService_){ + beforeEach(angular.mock.inject(( + $injector, $componentController, $rootScope, + $httpBackend, _state_, _OutputStrings_, _ParseVariableString_, _Prompt_, + _ProcessErrors_, _Wait_, _OutputStatusService_ + ) => { scope = $rootScope.$new(); state = _state_; OutputStrings = _OutputStrings_; Prompt = _Prompt_; - filter = $injector.get("$filter"); + filter = $injector.get('$filter'); ProcessErrors = _ProcessErrors_; Wait = _Wait_; ParseVariableString = _ParseVariableString_; @@ -94,16 +93,15 @@ describe('View: Job Details', () => { JobDetails = $componentController('atJobDetails', { $scope: scope, $state: state, - OutputStrings: OutputStrings, - ProcessErrors: ProcessErrors, - Wait: Wait, - Prompt: Prompt, + OutputStrings, + ProcessErrors, + Wait, + Prompt, $filter: filter, - Wait: Wait, - ParseVariableString: ParseVariableString, - httpBackend: httpBackend, - OutputStatusService: OutputStatusService, - }, {resource: resource}); + ParseVariableString, + httpBackend, + OutputStatusService, + }, { resource }); JobDetails.$onInit(); })); @@ -137,7 +135,7 @@ describe('View: Job Details', () => { const result = JobDetails.sliceJobDetails; expect(result).toBeNull(); }); - it('returns null if job_slice_count is undefined or null', () => { + it('returns null if job_slice_count is undefined or null', () => { mockData = { job_slice_count: null, job_slice_number: 2, @@ -152,7 +150,7 @@ describe('View: Job Details', () => { const result = JobDetails.sliceJobDetails; expect(result).toBeNull(); }); - it('returns null if job_slice_number is undefined or null', () => { + it('returns null if job_slice_number is undefined or null', () => { mockData = { job_slice_count: 2, job_slice_number: null, @@ -167,7 +165,7 @@ describe('View: Job Details', () => { const result = JobDetails.sliceJobDetails; expect(result).toBeNull(); }); - it('returns null if job is a non-sliced job', () => { + it('returns null if job is a non-sliced job', () => { mockData = { job_slice_count: 1, job_slice_number: null, @@ -184,4 +182,4 @@ describe('View: Job Details', () => { }); }); }); -}); \ No newline at end of file +}); diff --git a/awx/ui/test/unit/components/jobs-list-split-jobs.unit.js b/awx/ui/test/unit/components/jobs-list-split-jobs.unit.js index 9c9a9dc3bc..cd4438a0b5 100644 --- a/awx/ui/test/unit/components/jobs-list-split-jobs.unit.js +++ b/awx/ui/test/unit/components/jobs-list-split-jobs.unit.js @@ -1,38 +1,36 @@ describe('View: Split Jobs List', () => { - let JobList, - scope, - state, - Dataset, - resolvedModels, - JobsStrings, - QuerySet, - Prompt, - filter, - ProcessErrors, - Wait, - Rest, - SearchBasePath; + let JobList; + let scope; + let state; + let Dataset; + let resolvedModels; + let JobsStrings; + let QuerySet; + let Prompt; + let filter; + let ProcessErrors; + let Wait; + let Rest; + let SearchBasePath; beforeEach(angular.mock.module('at.features.jobs', ($provide) => { Dataset = { data: { results: {} } - } + }; state = { params: { job_search: {} }, go: jasmine.createSpy('go'), includes: jasmine.createSpy('includes') - } + }; resolvedModels = [ { - options: () => { - return ["foo", "bar"]; - } + options: () => ['foo', 'bar'], } - ] + ]; ProcessErrors = jasmine.createSpy('ProcessErrors'); Wait = jasmine.createSpy('Wait'); @@ -49,11 +47,14 @@ describe('View: Split Jobs List', () => { $provide.value('JobsStrings', angular.noop); $provide.value('QuerySet', angular.noop); - $provide.provider('$stateProvider', { '$get': function() { return function() {}; } }); + $provide.provider('$stateProvider', { $get: jasmine.createSpy('$get'), }); $provide.value('$stateExtender', { addState: jasmine.createSpy('addState'), }); })); - beforeEach(angular.mock.inject(function($controller, $rootScope, _state_, _Dataset_, _resolvedModels_, _JobsStrings_, _QuerySet_, _Prompt_, _$filter_, _ProcessErrors_, _Wait_, _Rest_, _SearchBasePath_){ + beforeEach(angular.mock.inject(( + $controller, $rootScope, _state_, _Dataset_, _resolvedModels_, _JobsStrings_, + _QuerySet_, _Prompt_, _$filter_, _ProcessErrors_, _Wait_, _Rest_, _SearchBasePath_ + ) => { scope = $rootScope.$new(); state = _state_; Dataset = _Dataset_; @@ -70,17 +71,16 @@ describe('View: Split Jobs List', () => { JobList = $controller('jobsListController', { $scope: scope, $state: state, - Dataset: Dataset, - resolvedModels: resolvedModels, - JobsStrings: JobsStrings, - ProcessErrors: ProcessErrors, - QuerySet: QuerySet, - Wait: Wait, - Prompt: Prompt, + Dataset, + resolvedModels, + JobsStrings, + ProcessErrors, + QuerySet, + Wait, + Prompt, $filter: filter, - Wait: Wait, - Rest: Rest, - SearchBasePath: SearchBasePath, + Rest, + SearchBasePath, }); })); @@ -92,36 +92,36 @@ describe('View: Split Jobs List', () => { expect(JobList.getSliceJobDetails).toBeDefined(); }); it('returns a string', () => { - let data = { + const data = { job_slice_number: 1, job_slice_count: 2 - } + }; const result = JobList.getSliceJobDetails(data); expect(result).toEqual('Slice Job 1/2'); }); it('returns null when data is null', () => { - let data = { + const data = { job_slice_number: null, job_slice_count: null - } + }; const result = JobList.getSliceJobDetails(data); expect(result).toBeNull(); }); it('returns null when data is undefined', () => { - let data = { + const data = { job_slice_number: undefined, job_slice_count: undefined - } + }; const result = JobList.getSliceJobDetails(data); expect(result).toBeNull(); }); it('returns null when job is not a sliced job', () => { - let data = { + const data = { job_slice_number: null, job_slice_count: 1 - } + }; const result = JobList.getSliceJobDetails(data); expect(result).toBeNull(); }); }); -}); \ No newline at end of file +}); From a7028df82837d8d0e3ef8644c17b90da7a1bb085 Mon Sep 17 00:00:00 2001 From: kialam Date: Wed, 17 Oct 2018 16:40:01 -0400 Subject: [PATCH 25/29] Fix one failing unit test. --- .../client/features/output/details.component.js | 4 ++++ .../components/job-details-split-jobs.unit.js | 16 ++++++++-------- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/awx/ui/client/features/output/details.component.js b/awx/ui/client/features/output/details.component.js index 93f405d2a0..5d0ef52338 100644 --- a/awx/ui/client/features/output/details.component.js +++ b/awx/ui/client/features/output/details.component.js @@ -139,6 +139,10 @@ function getSliceJobDetails () { const number = resource.model.get('job_slice_number'); + if (!number) { + return null; + } + const label = strings.get('labels.SLICE_JOB'); const offset = `${number}/${count}`; const tooltip = strings.get('tooltips.SLICE_JOB_DETAILS'); diff --git a/awx/ui/test/unit/components/job-details-split-jobs.unit.js b/awx/ui/test/unit/components/job-details-split-jobs.unit.js index d6f7b0f23a..39099969aa 100644 --- a/awx/ui/test/unit/components/job-details-split-jobs.unit.js +++ b/awx/ui/test/unit/components/job-details-split-jobs.unit.js @@ -125,10 +125,10 @@ describe('View: Job Details', () => { job_slice_count: 2, job_slice_number: 2, labels: { - SPLIT_JOB: null + SLICE_JOB: null }, tooltips: { - SPLIT_JOB_DETAILS: null + SLICE_JOB_DETAILS: null } }; JobDetails.$onInit(); @@ -140,10 +140,10 @@ describe('View: Job Details', () => { job_slice_count: null, job_slice_number: 2, labels: { - SPLIT_JOB: 'foo' + SLICE_JOB: 'foo' }, tooltips: { - SPLIT_JOB_DETAILS: 'bar' + SLICE_JOB_DETAILS: 'bar' } }; JobDetails.$onInit(); @@ -155,10 +155,10 @@ describe('View: Job Details', () => { job_slice_count: 2, job_slice_number: null, labels: { - SPLIT_JOB: 'foo' + SLICE_JOB: 'foo' }, tooltips: { - SPLIT_JOB_DETAILS: 'bar' + SLICE_JOB_DETAILS: 'bar' } }; JobDetails.$onInit(); @@ -170,10 +170,10 @@ describe('View: Job Details', () => { job_slice_count: 1, job_slice_number: null, labels: { - SPLIT_JOB: 'foo' + SLICE_JOB: 'foo' }, tooltips: { - SPLIT_JOB_DETAILS: 'bar' + SLICE_JOB_DETAILS: 'bar' } }; JobDetails.$onInit(); From 236b332a8b3b33c9b66417ce661a22b44cda3d95 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Thu, 18 Oct 2018 08:40:44 -0400 Subject: [PATCH 26/29] bump migration number --- .../{0050_v330_job_slicing.py => 0051_v340_job_slicing.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename awx/main/migrations/{0050_v330_job_slicing.py => 0051_v340_job_slicing.py} (96%) diff --git a/awx/main/migrations/0050_v330_job_slicing.py b/awx/main/migrations/0051_v340_job_slicing.py similarity index 96% rename from awx/main/migrations/0050_v330_job_slicing.py rename to awx/main/migrations/0051_v340_job_slicing.py index c786f455db..0e5c8bd701 100644 --- a/awx/main/migrations/0050_v330_job_slicing.py +++ b/awx/main/migrations/0051_v340_job_slicing.py @@ -10,7 +10,7 @@ import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ - ('main', '0049_v330_validate_instance_capacity_adjustment'), + ('main', '0050_v340_drop_celery_tables'), ] operations = [ From f435e577b2a4cdeda94bc29abb6e5d5bfb6397d6 Mon Sep 17 00:00:00 2001 From: AlanCoding Date: Tue, 30 Oct 2018 10:09:46 -0400 Subject: [PATCH 27/29] Adjust slicing tooltip text --- awx/ui/client/src/templates/job_templates/job-template.form.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/ui/client/src/templates/job_templates/job-template.form.js b/awx/ui/client/src/templates/job_templates/job-template.form.js index da3454ef60..f91ce149a9 100644 --- a/awx/ui/client/src/templates/job_templates/job-template.form.js +++ b/awx/ui/client/src/templates/job_templates/job-template.form.js @@ -267,7 +267,7 @@ function(NotificationsList, i18n) { dataTitle: i18n._('Slice Job Count'), dataPlacement: 'right', dataContainer: 'body', - awPopOver: "

" + i18n._("The number of jobs to slice into at runtime. Will cause the Job Template to launch a workflow if value is greater than 1.") + "

", + awPopOver: "

" + i18n._("The number of job slices to create for this job. This should not be set for jobs where cross-host orchestration is done. For best behavior, this number should be high enough that each slice is smaller than an individual cluster node capacity.") + "

", ngDisabled: '!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)' }, diff_mode: { From 2d286c5f6852889d4e0cc5c9326f7e164b913ad3 Mon Sep 17 00:00:00 2001 From: kialam Date: Thu, 18 Oct 2018 10:19:17 -0400 Subject: [PATCH 28/29] Redirect to WF Details page after prompt for slice JT. --- .../launchTemplateButton.component.js | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/awx/ui/client/lib/components/launchTemplateButton/launchTemplateButton.component.js b/awx/ui/client/lib/components/launchTemplateButton/launchTemplateButton.component.js index 5ff065c0c3..20cf1d8e94 100644 --- a/awx/ui/client/lib/components/launchTemplateButton/launchTemplateButton.component.js +++ b/awx/ui/client/lib/components/launchTemplateButton/launchTemplateButton.component.js @@ -148,7 +148,13 @@ function atLaunchTemplateCtrl ( id: vm.promptData.template, launchData: jobLaunchData }).then((launchRes) => { - $state.go('output', { id: launchRes.data.job, type: 'playbook' }, { reload: true }); + /* Slice Jobs: Redirect to WF Details page if returned + job type is a WF job */ + if (launchRes.data.type === 'workflow_job' && launchRes.data.workflow_job !== null) { + $state.go('workflowResults', { id: launchRes.data.workflow_job }, { reload: true }); + } else { + $state.go('output', { id: launchRes.data.job, type: 'playbook' }, { reload: true }); + } }).catch(createErrorHandler('launch job template', 'POST')); } else if (vm.promptData.templateType === 'workflow_job_template') { workflowTemplate.create().postLaunch({ From 62a36e3704746e2c8ef3b8b2546101455b3160f4 Mon Sep 17 00:00:00 2001 From: Jake McDermott Date: Tue, 30 Oct 2018 22:23:36 -0400 Subject: [PATCH 29/29] update job slice count help text --- awx/ui/client/src/templates/job_templates/job-template.form.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/awx/ui/client/src/templates/job_templates/job-template.form.js b/awx/ui/client/src/templates/job_templates/job-template.form.js index f91ce149a9..f716cb4abb 100644 --- a/awx/ui/client/src/templates/job_templates/job-template.form.js +++ b/awx/ui/client/src/templates/job_templates/job-template.form.js @@ -267,7 +267,7 @@ function(NotificationsList, i18n) { dataTitle: i18n._('Slice Job Count'), dataPlacement: 'right', dataContainer: 'body', - awPopOver: "

" + i18n._("The number of job slices to create for this job. This should not be set for jobs where cross-host orchestration is done. For best behavior, this number should be high enough that each slice is smaller than an individual cluster node capacity.") + "

", + awPopOver: "

" + i18n._("Divide the work done by this job template into the specified number of job slices, each running the same tasks against a portion of the inventory.") + "

", ngDisabled: '!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)' }, diff_mode: {