Merge pull request #6692 from AlanCoding/manual_locations

Tag jobs started via special cases with node & group
This commit is contained in:
Alan Rominger
2017-06-22 16:42:17 -04:00
committed by GitHub
5 changed files with 18 additions and 6 deletions

View File

@@ -371,6 +371,9 @@ class Command(NoArgsCommand):
job_args=json.dumps(sys.argv), job_args=json.dumps(sys.argv),
job_env=dict(os.environ.items()), job_env=dict(os.environ.items()),
job_cwd=os.getcwd(), job_cwd=os.getcwd(),
_eager_fields=dict(
execution_node=settings.CLUSTER_HOST_ID,
instance_group=InstanceGroup.objects.get(name='tower'))
) )
# FIXME: Wait or raise error if inventory is being updated by another # FIXME: Wait or raise error if inventory is being updated by another

View File

@@ -47,7 +47,7 @@ class TaskManager():
def get_tasks(self, status_list=('pending', 'waiting', 'running')): def get_tasks(self, status_list=('pending', 'waiting', 'running')):
jobs = [j for j in Job.objects.filter(status__in=status_list).prefetch_related('instance_group')] jobs = [j for j in Job.objects.filter(status__in=status_list).prefetch_related('instance_group')]
inventory_updates_qs = InventoryUpdate.objects.filter(status__in=status_list).prefetch_related('inventory_source', 'instance_group') inventory_updates_qs = InventoryUpdate.objects.filter(status__in=status_list).exclude(source='file').prefetch_related('inventory_source', 'instance_group')
inventory_updates = [i for i in inventory_updates_qs] inventory_updates = [i for i in inventory_updates_qs]
project_updates = [p for p in ProjectUpdate.objects.filter(status__in=status_list).prefetch_related('instance_group')] project_updates = [p for p in ProjectUpdate.objects.filter(status__in=status_list).prefetch_related('instance_group')]
system_jobs = [s for s in SystemJob.objects.filter(status__in=status_list).prefetch_related('instance_group')] system_jobs = [s for s in SystemJob.objects.filter(status__in=status_list).prefetch_related('instance_group')]

View File

@@ -1134,14 +1134,17 @@ class RunJob(BaseTask):
if job.project and job.project.scm_type: if job.project and job.project.scm_type:
job_request_id = '' if self.request.id is None else self.request.id job_request_id = '' if self.request.id is None else self.request.id
pu_ig = job.instance_group pu_ig = job.instance_group
pu_en = job.execution_node
if kwargs['isolated']: if kwargs['isolated']:
pu_ig = pu_ig.controller pu_ig = pu_ig.controller
pu_en = settings.CLUSTER_HOST_ID
local_project_sync = job.project.create_project_update( local_project_sync = job.project.create_project_update(
launch_type="sync", launch_type="sync",
_eager_fields=dict( _eager_fields=dict(
job_type='run', job_type='run',
status='running', status='running',
instance_group = pu_ig, instance_group = pu_ig,
execution_node=pu_en,
celery_task_id=job_request_id)) celery_task_id=job_request_id))
# save the associated job before calling run() so that a # save the associated job before calling run() so that a
# cancel() call on the job can cancel the project update # cancel() call on the job can cancel the project update
@@ -1392,6 +1395,8 @@ class RunProjectUpdate(BaseTask):
launch_type='scm', launch_type='scm',
_eager_fields=dict( _eager_fields=dict(
status='running', status='running',
instance_group=project_update.instance_group,
execution_node=project_update.execution_node,
celery_task_id=str(project_request_id), celery_task_id=str(project_request_id),
source_project_update=project_update)) source_project_update=project_update))
try: try:
@@ -1856,6 +1861,7 @@ class RunInventoryUpdate(BaseTask):
_eager_fields=dict( _eager_fields=dict(
job_type='run', job_type='run',
status='running', status='running',
execution_node=inventory_update.execution_node,
instance_group = inventory_update.instance_group, instance_group = inventory_update.instance_group,
celery_task_id=request_id)) celery_task_id=request_id))
# associate the inventory update before calling run() so that a # associate the inventory update before calling run() so that a

View File

@@ -86,6 +86,7 @@ def mock_logging(self):
@mock.patch.object(inventory_import.Command, 'set_logging_level', mock_logging) @mock.patch.object(inventory_import.Command, 'set_logging_level', mock_logging)
class TestInvalidOptionsFunctional: class TestInvalidOptionsFunctional:
@mock.patch.object(inventory_import.InstanceGroup.objects, 'get', new=mock.MagicMock(return_value=None))
def test_invalid_options_invalid_source(self, inventory): def test_invalid_options_invalid_source(self, inventory):
# Give invalid file to the command # Give invalid file to the command
cmd = inventory_import.Command() cmd = inventory_import.Command()
@@ -113,8 +114,9 @@ class TestInvalidOptionsFunctional:
@pytest.mark.django_db @pytest.mark.django_db
@pytest.mark.inventory_import @pytest.mark.inventory_import
@mock.patch.object(inventory_import.Command, 'check_license', mock.MagicMock()) @mock.patch.object(inventory_import.InstanceGroup.objects, 'get', new=mock.MagicMock(return_value=None))
@mock.patch.object(inventory_import.Command, 'set_logging_level', mock_logging) @mock.patch.object(inventory_import.Command, 'check_license', new=mock.MagicMock())
@mock.patch.object(inventory_import.Command, 'set_logging_level', new=mock_logging)
class TestINIImports: class TestINIImports:
@mock.patch.object(inventory_import.AnsibleInventoryLoader, 'load', mock.MagicMock(return_value=TEST_MEM_OBJECTS)) @mock.patch.object(inventory_import.AnsibleInventoryLoader, 'load', mock.MagicMock(return_value=TEST_MEM_OBJECTS))

View File

@@ -115,9 +115,9 @@ rabbitmq_enable_manager=false
### Security Isolated Rampart Groups ### Security Isolated Rampart Groups
In Tower versions 3.2+ customers may optionally define isolated groups In Tower versions 3.2+ customers may optionally define isolated groups
inside security-restricted networking zones to run jobs from. inside security-restricted networking zones to run jobs and ad hoc commands from.
Instances in these groups will _not_ have a full install of Tower, but will have a minimal Instances in these groups will _not_ have a full install of Tower, but will have a minimal
set of utilities used to run jobs on them. These must be specified set of utilities used to run jobs. Isolated groups must be specified
in the inventory file prefixed with `isolated_group_`. An example inventory in the inventory file prefixed with `isolated_group_`. An example inventory
file is shown below. file is shown below.
@@ -150,7 +150,8 @@ the `isolatedA` and `isolatedB` hosts).
When a job is scheduled to run on an "isolated" instance: When a job is scheduled to run on an "isolated" instance:
* The "controller" instance compiles metadata required to run the job and copies * The "controller" instance compiles metadata required to run the job and copies
it to the "isolated" instance via `rsync`. This metadata includes: it to the "isolated" instance via `rsync` (any related project or inventory
updates are run on the controller instance). This metadata includes:
- the entire SCM checkout directory for the project - the entire SCM checkout directory for the project
- a static inventory file - a static inventory file