mirror of
https://github.com/ansible/awx.git
synced 2026-05-16 13:57:39 -02:30
Merge branch 'rampart_groups_setup_playbook' into devel
* rampart_groups_setup_playbook: Updating changelog for Instance Groups Fix an incorrect reference on instance group jobs list Purge remaining references to rampart groups Simplify can_access for instance groups on job templates Adding Instance Group permissions and tests Increase test coverage for task scheduler inventory updates Exit logic fixes for instance group tools View Fixes for instance groups new view to allow associations but no creations Updating acceptance documentation and system docs Updating unit tests for task manager refactoring Update views and serializers to support instance group (ramparts) Implementing models for instance groups, updating task manager Updating the setup playbook to support instance group installation Add nginx to server start and switch back to first tmux win Fix an issue where the local queue wouldn't use the rabbitmq name
This commit is contained in:
@@ -6,6 +6,8 @@ import pytest
|
||||
from awx.main.tests.factories import (
|
||||
create_organization,
|
||||
create_job_template,
|
||||
create_instance,
|
||||
create_instance_group,
|
||||
create_notification_template,
|
||||
create_survey_spec,
|
||||
create_workflow_job_template,
|
||||
@@ -32,6 +34,21 @@ def survey_spec_factory():
|
||||
return create_survey_spec
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def instance_factory():
|
||||
return create_instance
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def instance_group_factory():
|
||||
return create_instance_group
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def default_instance_group(instance_factory, instance_group_factory):
|
||||
return create_instance_group("tower", instances=[create_instance("hostA")])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_template_with_survey_passwords_factory(job_template_factory):
|
||||
def rf(persisted):
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
from .tower import (
|
||||
create_instance,
|
||||
create_instance_group,
|
||||
create_organization,
|
||||
create_job_template,
|
||||
create_notification_template,
|
||||
@@ -11,6 +13,8 @@ from .exc import (
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'create_instance',
|
||||
'create_instance_group',
|
||||
'create_organization',
|
||||
'create_job_template',
|
||||
'create_notification_template',
|
||||
|
||||
@@ -7,6 +7,7 @@ from awx.main.models import (
|
||||
Project,
|
||||
Team,
|
||||
Instance,
|
||||
InstanceGroup,
|
||||
JobTemplate,
|
||||
Job,
|
||||
NotificationTemplate,
|
||||
@@ -27,11 +28,23 @@ from awx.main.models import (
|
||||
#
|
||||
|
||||
|
||||
def mk_instance(persisted=True):
|
||||
def mk_instance(persisted=True, hostname='instance.example.org'):
|
||||
if not persisted:
|
||||
raise RuntimeError('creating an Instance requires persisted=True')
|
||||
from django.conf import settings
|
||||
return Instance.objects.get_or_create(uuid=settings.SYSTEM_UUID, hostname="instance.example.org")
|
||||
return Instance.objects.get_or_create(uuid=settings.SYSTEM_UUID, hostname=hostname)[0]
|
||||
|
||||
|
||||
def mk_instance_group(name='tower', instance=None):
|
||||
ig, status = InstanceGroup.objects.get_or_create(name=name)
|
||||
if instance is not None:
|
||||
if type(instance) == list:
|
||||
for i in instance:
|
||||
ig.instances.add(i)
|
||||
else:
|
||||
ig.instances.add(instance)
|
||||
ig.save()
|
||||
return ig
|
||||
|
||||
|
||||
def mk_organization(name, description=None, persisted=True):
|
||||
@@ -86,9 +99,11 @@ def mk_project(name, organization=None, description=None, persisted=True):
|
||||
|
||||
|
||||
def mk_credential(name, credential_type='ssh', persisted=True):
|
||||
type_ = CredentialType.defaults[credential_type]()
|
||||
if persisted:
|
||||
type_, status = CredentialType.objects.get_or_create(kind=credential_type)
|
||||
type_.save()
|
||||
else:
|
||||
type_ = CredentialType.defaults[credential_type]()
|
||||
cred = Credential(
|
||||
credential_type=type_,
|
||||
name=name
|
||||
|
||||
@@ -19,6 +19,8 @@ from .objects import (
|
||||
)
|
||||
|
||||
from .fixtures import (
|
||||
mk_instance,
|
||||
mk_instance_group,
|
||||
mk_organization,
|
||||
mk_team,
|
||||
mk_user,
|
||||
@@ -129,6 +131,14 @@ def generate_teams(organization, persisted, **kwargs):
|
||||
return teams
|
||||
|
||||
|
||||
def create_instance(name, instance_groups=None):
|
||||
return mk_instance(hostname=name)
|
||||
|
||||
|
||||
def create_instance_group(name, instances=None):
|
||||
return mk_instance_group(name=name, instance=instances)
|
||||
|
||||
|
||||
def create_survey_spec(variables=None, default_type='integer', required=True):
|
||||
'''
|
||||
Returns a valid survey spec for a job template, based on the input
|
||||
@@ -234,6 +244,8 @@ def create_job_template(name, roles=None, persisted=True, **kwargs):
|
||||
|
||||
if 'survey' in kwargs:
|
||||
spec = create_survey_spec(kwargs['survey'])
|
||||
else:
|
||||
spec = None
|
||||
|
||||
jt = mk_job_template(name, project=proj,
|
||||
inventory=inv, credential=cred,
|
||||
@@ -248,8 +260,9 @@ def create_job_template(name, roles=None, persisted=True, **kwargs):
|
||||
else:
|
||||
# Fill in default survey answers
|
||||
job_extra_vars = {}
|
||||
for question in spec['spec']:
|
||||
job_extra_vars[question['variable']] = question['default']
|
||||
if spec is not None:
|
||||
for question in spec['spec']:
|
||||
job_extra_vars[question['variable']] = question['default']
|
||||
jobs[i] = mk_job(job_template=jt, project=proj, inventory=inv, credential=cred,
|
||||
extra_vars=job_extra_vars,
|
||||
job_type=job_type, persisted=persisted)
|
||||
|
||||
150
awx/main/tests/functional/task_management/test_rampart_groups.py
Normal file
150
awx/main/tests/functional/task_management/test_rampart_groups.py
Normal file
@@ -0,0 +1,150 @@
|
||||
import pytest
|
||||
import mock
|
||||
from datetime import timedelta
|
||||
from awx.main.scheduler import TaskManager
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_multi_group_basic_job_launch(instance_factory, default_instance_group, mocker,
|
||||
instance_group_factory, job_template_factory):
|
||||
i1 = instance_factory("i1")
|
||||
i2 = instance_factory("i2")
|
||||
ig1 = instance_group_factory("ig1", instances=[i1])
|
||||
ig2 = instance_group_factory("ig2", instances=[i2])
|
||||
objects1 = job_template_factory('jt1', organization='org1', project='proj1',
|
||||
inventory='inv1', credential='cred1',
|
||||
jobs=["job_should_start"])
|
||||
objects1.job_template.instance_groups.add(ig1)
|
||||
j1 = objects1.jobs['job_should_start']
|
||||
j1.status = 'pending'
|
||||
j1.save()
|
||||
objects2 = job_template_factory('jt2', organization='org2', project='proj2',
|
||||
inventory='inv2', credential='cred2',
|
||||
jobs=["job_should_still_start"])
|
||||
objects2.job_template.instance_groups.add(ig2)
|
||||
j2 = objects2.jobs['job_should_still_start']
|
||||
j2.status = 'pending'
|
||||
j2.save()
|
||||
with mock.patch('awx.main.models.Job.task_impact', new_callable=mock.PropertyMock) as mock_task_impact:
|
||||
mock_task_impact.return_value = 500
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_has_calls([mock.call(j1, ig1), mock.call(j2, ig2)])
|
||||
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_multi_group_with_shared_dependency(instance_factory, default_instance_group, mocker,
|
||||
instance_group_factory, job_template_factory):
|
||||
i1 = instance_factory("i1")
|
||||
i2 = instance_factory("i2")
|
||||
ig1 = instance_group_factory("ig1", instances=[i1])
|
||||
ig2 = instance_group_factory("ig2", instances=[i2])
|
||||
objects1 = job_template_factory('jt1', organization='org1', project='proj1',
|
||||
inventory='inv1', credential='cred1',
|
||||
jobs=["job_should_start"])
|
||||
objects1.job_template.instance_groups.add(ig1)
|
||||
p = objects1.project
|
||||
p.scm_update_on_launch = True
|
||||
p.scm_update_cache_timeout = 0
|
||||
p.scm_type = "git"
|
||||
p.scm_url = "http://github.com/ansible/ansible.git"
|
||||
p.save()
|
||||
j1 = objects1.jobs['job_should_start']
|
||||
j1.status = 'pending'
|
||||
j1.save()
|
||||
objects2 = job_template_factory('jt2', organization=objects1.organization, project=p,
|
||||
inventory='inv2', credential='cred2',
|
||||
jobs=["job_should_still_start"])
|
||||
objects2.job_template.instance_groups.add(ig2)
|
||||
j2 = objects2.jobs['job_should_still_start']
|
||||
j2.status = 'pending'
|
||||
j2.save()
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
pu = p.project_updates.first()
|
||||
TaskManager.start_task.assert_called_once_with(pu, default_instance_group, [pu])
|
||||
pu.finished = pu.created + timedelta(seconds=1)
|
||||
pu.status = "successful"
|
||||
pu.save()
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_has_calls([mock.call(j1, ig1), mock.call(j2, ig2)])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_overcapacity_blocking_other_groups_unaffected(instance_factory, default_instance_group, mocker,
|
||||
instance_group_factory, job_template_factory):
|
||||
i1 = instance_factory("i1")
|
||||
i1.capacity = 1000
|
||||
i1.save()
|
||||
i2 = instance_factory("i2")
|
||||
ig1 = instance_group_factory("ig1", instances=[i1])
|
||||
ig2 = instance_group_factory("ig2", instances=[i2])
|
||||
objects1 = job_template_factory('jt1', organization='org1', project='proj1',
|
||||
inventory='inv1', credential='cred1',
|
||||
jobs=["job_should_start"])
|
||||
objects1.job_template.instance_groups.add(ig1)
|
||||
j1 = objects1.jobs['job_should_start']
|
||||
j1.status = 'pending'
|
||||
j1.save()
|
||||
objects2 = job_template_factory('jt2', organization=objects1.organization, project='proj2',
|
||||
inventory='inv2', credential='cred2',
|
||||
jobs=["job_should_start", "job_should_also_start"])
|
||||
objects2.job_template.instance_groups.add(ig1)
|
||||
j1_1 = objects2.jobs['job_should_also_start']
|
||||
j1_1.status = 'pending'
|
||||
j1_1.save()
|
||||
objects3 = job_template_factory('jt3', organization='org2', project='proj3',
|
||||
inventory='inv3', credential='cred3',
|
||||
jobs=["job_should_still_start"])
|
||||
objects3.job_template.instance_groups.add(ig2)
|
||||
j2 = objects3.jobs['job_should_still_start']
|
||||
j2.status = 'pending'
|
||||
j2.save()
|
||||
objects4 = job_template_factory('jt4', organization=objects3.organization, project='proj4',
|
||||
inventory='inv4', credential='cred4',
|
||||
jobs=["job_should_not_start"])
|
||||
objects4.job_template.instance_groups.add(ig2)
|
||||
j2_1 = objects4.jobs['job_should_not_start']
|
||||
j2_1.status = 'pending'
|
||||
j2_1.save()
|
||||
tm = TaskManager()
|
||||
with mock.patch('awx.main.models.Job.task_impact', new_callable=mock.PropertyMock) as mock_task_impact:
|
||||
mock_task_impact.return_value = 500
|
||||
with mock.patch.object(TaskManager, "start_task", wraps=tm.start_task) as mock_job:
|
||||
tm.schedule()
|
||||
mock_job.assert_has_calls([mock.call(j1, ig1), mock.call(j1_1, ig1),
|
||||
mock.call(j2, ig2)])
|
||||
assert mock_job.call_count == 3
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_failover_group_run(instance_factory, default_instance_group, mocker,
|
||||
instance_group_factory, job_template_factory):
|
||||
i1 = instance_factory("i1")
|
||||
i2 = instance_factory("i2")
|
||||
ig1 = instance_group_factory("ig1", instances=[i1])
|
||||
ig2 = instance_group_factory("ig2", instances=[i2])
|
||||
objects1 = job_template_factory('jt1', organization='org1', project='proj1',
|
||||
inventory='inv1', credential='cred1',
|
||||
jobs=["job_should_start"])
|
||||
objects1.job_template.instance_groups.add(ig1)
|
||||
j1 = objects1.jobs['job_should_start']
|
||||
j1.status = 'pending'
|
||||
j1.save()
|
||||
objects2 = job_template_factory('jt2', organization=objects1.organization, project='proj2',
|
||||
inventory='inv2', credential='cred2',
|
||||
jobs=["job_should_start", "job_should_also_start"])
|
||||
objects2.job_template.instance_groups.add(ig1)
|
||||
objects2.job_template.instance_groups.add(ig2)
|
||||
j1_1 = objects2.jobs['job_should_also_start']
|
||||
j1_1.status = 'pending'
|
||||
j1_1.save()
|
||||
tm = TaskManager()
|
||||
with mock.patch('awx.main.models.Job.task_impact', new_callable=mock.PropertyMock) as mock_task_impact:
|
||||
mock_task_impact.return_value = 500
|
||||
with mock.patch.object(TaskManager, "start_task", wraps=tm.start_task) as mock_job:
|
||||
tm.schedule()
|
||||
mock_job.assert_has_calls([mock.call(j1, ig1), mock.call(j1_1, ig2)])
|
||||
assert mock_job.call_count == 2
|
||||
200
awx/main/tests/functional/task_management/test_scheduler.py
Normal file
200
awx/main/tests/functional/task_management/test_scheduler.py
Normal file
@@ -0,0 +1,200 @@
|
||||
import pytest
|
||||
import mock
|
||||
from datetime import timedelta
|
||||
from awx.main.scheduler import TaskManager
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_single_job_scheduler_launch(default_instance_group, job_template_factory, mocker):
|
||||
objects = job_template_factory('jt', organization='org1', project='proj',
|
||||
inventory='inv', credential='cred',
|
||||
jobs=["job_should_start"])
|
||||
j = objects.jobs["job_should_start"]
|
||||
j.status = 'pending'
|
||||
j.save()
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
assert TaskManager.start_task.called
|
||||
assert TaskManager.start_task.call_args == ((j, default_instance_group),)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_single_jt_multi_job_launch_blocks_last(default_instance_group, job_template_factory, mocker):
|
||||
objects = job_template_factory('jt', organization='org1', project='proj',
|
||||
inventory='inv', credential='cred',
|
||||
jobs=["job_should_start", "job_should_not_start"])
|
||||
j1 = objects.jobs["job_should_start"]
|
||||
j1.status = 'pending'
|
||||
j1.save()
|
||||
j2 = objects.jobs["job_should_not_start"]
|
||||
j2.status = 'pending'
|
||||
j2.save()
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j1, default_instance_group)
|
||||
j1.status = "successful"
|
||||
j1.save()
|
||||
with mocker.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j2, default_instance_group)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_single_jt_multi_job_launch_allow_simul_allowed(default_instance_group, job_template_factory, mocker):
|
||||
objects = job_template_factory('jt', organization='org1', project='proj',
|
||||
inventory='inv', credential='cred',
|
||||
jobs=["job_should_start", "job_should_not_start"])
|
||||
jt = objects.job_template
|
||||
jt.save()
|
||||
|
||||
j1 = objects.jobs["job_should_start"]
|
||||
j1.allow_simultaneous = True
|
||||
j1.status = 'pending'
|
||||
j1.save()
|
||||
j2 = objects.jobs["job_should_not_start"]
|
||||
j2.allow_simultaneous = True
|
||||
j2.status = 'pending'
|
||||
j2.save()
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_has_calls([mock.call(j1, default_instance_group),
|
||||
mock.call(j2, default_instance_group)])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_multi_jt_capacity_blocking(default_instance_group, job_template_factory, mocker):
|
||||
objects1 = job_template_factory('jt1', organization='org1', project='proj1',
|
||||
inventory='inv1', credential='cred1',
|
||||
jobs=["job_should_start"])
|
||||
objects2 = job_template_factory('jt2', organization='org2', project='proj2',
|
||||
inventory='inv2', credential='cred2',
|
||||
jobs=["job_should_not_start"])
|
||||
j1 = objects1.jobs["job_should_start"]
|
||||
j1.status = 'pending'
|
||||
j1.save()
|
||||
j2 = objects2.jobs["job_should_not_start"]
|
||||
j2.status = 'pending'
|
||||
j2.save()
|
||||
tm = TaskManager()
|
||||
with mock.patch('awx.main.models.Job.task_impact', new_callable=mock.PropertyMock) as mock_task_impact:
|
||||
mock_task_impact.return_value = 500
|
||||
with mock.patch.object(TaskManager, "start_task", wraps=tm.start_task) as mock_job:
|
||||
tm.schedule()
|
||||
mock_job.assert_called_once_with(j1, default_instance_group)
|
||||
j1.status = "successful"
|
||||
j1.save()
|
||||
with mock.patch.object(TaskManager, "start_task", wraps=tm.start_task) as mock_job:
|
||||
tm.schedule()
|
||||
mock_job.assert_called_once_with(j2, default_instance_group)
|
||||
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_single_job_dependencies_project_launch(default_instance_group, job_template_factory, mocker):
|
||||
objects = job_template_factory('jt', organization='org1', project='proj',
|
||||
inventory='inv', credential='cred',
|
||||
jobs=["job_should_start"])
|
||||
j = objects.jobs["job_should_start"]
|
||||
j.status = 'pending'
|
||||
j.save()
|
||||
p = objects.project
|
||||
p.scm_update_on_launch = True
|
||||
p.scm_update_cache_timeout = 0
|
||||
p.scm_type = "git"
|
||||
p.scm_url = "http://github.com/ansible/ansible.git"
|
||||
p.save()
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
tm = TaskManager()
|
||||
with mock.patch.object(TaskManager, "create_project_update", wraps=tm.create_project_update) as mock_pu:
|
||||
tm.schedule()
|
||||
mock_pu.assert_called_once_with(j)
|
||||
pu = [x for x in p.project_updates.all()]
|
||||
assert len(pu) == 1
|
||||
TaskManager.start_task.assert_called_once_with(pu[0], default_instance_group, [pu[0]])
|
||||
pu[0].status = "successful"
|
||||
pu[0].save()
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j, default_instance_group)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_single_job_dependencies_inventory_update_launch(default_instance_group, job_template_factory, mocker, inventory_source_factory):
|
||||
objects = job_template_factory('jt', organization='org1', project='proj',
|
||||
inventory='inv', credential='cred',
|
||||
jobs=["job_should_start"])
|
||||
j = objects.jobs["job_should_start"]
|
||||
j.status = 'pending'
|
||||
j.save()
|
||||
i = objects.inventory
|
||||
ii = inventory_source_factory("ec2")
|
||||
ii.source = "ec2"
|
||||
ii.update_on_launch = True
|
||||
ii.update_cache_timeout = 0
|
||||
ii.save()
|
||||
i.inventory_sources.add(ii)
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
tm = TaskManager()
|
||||
with mock.patch.object(TaskManager, "create_inventory_update", wraps=tm.create_inventory_update) as mock_iu:
|
||||
tm.schedule()
|
||||
mock_iu.assert_called_once_with(j, ii)
|
||||
iu = [x for x in ii.inventory_updates.all()]
|
||||
assert len(iu) == 1
|
||||
TaskManager.start_task.assert_called_once_with(iu[0], default_instance_group, [iu[0]])
|
||||
iu[0].status = "successful"
|
||||
iu[0].save()
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j, default_instance_group)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_shared_dependencies_launch(default_instance_group, job_template_factory, mocker, inventory_source_factory):
|
||||
objects = job_template_factory('jt', organization='org1', project='proj',
|
||||
inventory='inv', credential='cred',
|
||||
jobs=["first_job", "second_job"])
|
||||
j1 = objects.jobs["first_job"]
|
||||
j1.status = 'pending'
|
||||
j1.save()
|
||||
j2 = objects.jobs["second_job"]
|
||||
j2.status = 'pending'
|
||||
j2.save()
|
||||
p = objects.project
|
||||
p.scm_update_on_launch = True
|
||||
p.scm_update_cache_timeout = 300
|
||||
p.scm_type = "git"
|
||||
p.scm_url = "http://github.com/ansible/ansible.git"
|
||||
p.save()
|
||||
|
||||
i = objects.inventory
|
||||
ii = inventory_source_factory("ec2")
|
||||
ii.source = "ec2"
|
||||
ii.update_on_launch = True
|
||||
ii.update_cache_timeout = 300
|
||||
ii.save()
|
||||
i.inventory_sources.add(ii)
|
||||
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
pu = p.project_updates.first()
|
||||
iu = ii.inventory_updates.first()
|
||||
TaskManager.start_task.assert_has_calls([mock.call(pu, default_instance_group, [pu, iu]),
|
||||
mock.call(iu, default_instance_group, [pu, iu])])
|
||||
pu.status = "successful"
|
||||
pu.finished = pu.created + timedelta(seconds=1)
|
||||
pu.save()
|
||||
iu.status = "successful"
|
||||
iu.finished = iu.created + timedelta(seconds=1)
|
||||
iu.save()
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j1, default_instance_group)
|
||||
j1.status = "successful"
|
||||
j1.save()
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j2, default_instance_group)
|
||||
pu = [x for x in p.project_updates.all()]
|
||||
iu = [x for x in ii.inventory_updates.all()]
|
||||
assert len(pu) == 1
|
||||
assert len(iu) == 1
|
||||
40
awx/main/tests/functional/test_instances.py
Normal file
40
awx/main/tests/functional/test_instances.py
Normal file
@@ -0,0 +1,40 @@
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_default_tower_instance_group(default_instance_group, job_factory):
|
||||
assert default_instance_group in job_factory().preferred_instance_groups
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_basic_instance_group_membership(instance_group_factory, default_instance_group, job_factory):
|
||||
j = job_factory()
|
||||
ig = instance_group_factory("basicA", [default_instance_group.instances.first()])
|
||||
j.job_template.instance_groups.add(ig)
|
||||
assert ig in j.preferred_instance_groups
|
||||
assert default_instance_group not in j.preferred_instance_groups
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_inherited_instance_group_membership(instance_group_factory, default_instance_group, job_factory, project, inventory):
|
||||
j = job_factory()
|
||||
j.project = project
|
||||
j.inventory = inventory
|
||||
ig_org = instance_group_factory("basicA", [default_instance_group.instances.first()])
|
||||
ig_inv = instance_group_factory("basicB", [default_instance_group.instances.first()])
|
||||
j.project.organization.instance_groups.add(ig_org)
|
||||
j.inventory.instance_groups.add(ig_inv)
|
||||
assert ig_org in j.preferred_instance_groups
|
||||
assert ig_inv in j.preferred_instance_groups
|
||||
assert default_instance_group not in j.preferred_instance_groups
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_instance_group_capacity(instance_factory, instance_group_factory):
|
||||
i1 = instance_factory("i1")
|
||||
i2 = instance_factory("i2")
|
||||
i3 = instance_factory("i3")
|
||||
ig_all = instance_group_factory("all", instances=[i1, i2, i3])
|
||||
assert ig_all.capacity == 300
|
||||
ig_single = instance_group_factory("single", instances=[i1])
|
||||
assert ig_single.capacity == 100
|
||||
79
awx/main/tests/functional/test_rbac_instance_groups.py
Normal file
79
awx/main/tests/functional/test_rbac_instance_groups.py
Normal file
@@ -0,0 +1,79 @@
|
||||
import pytest
|
||||
|
||||
from awx.main.access import (
|
||||
InstanceGroupAccess,
|
||||
OrganizationAccess,
|
||||
InventoryAccess,
|
||||
JobTemplateAccess,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_ig_normal_user_visibility(organization, default_instance_group, user):
|
||||
u = user('user', False)
|
||||
assert len(InstanceGroupAccess(u).get_queryset()) == 0
|
||||
organization.instance_groups.add(default_instance_group)
|
||||
organization.member_role.members.add(u)
|
||||
assert len(InstanceGroupAccess(u).get_queryset()) == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_ig_admin_user_visibility(organization, default_instance_group, admin, system_auditor, org_admin):
|
||||
assert len(InstanceGroupAccess(admin).get_queryset()) == 1
|
||||
assert len(InstanceGroupAccess(system_auditor).get_queryset()) == 1
|
||||
assert len(InstanceGroupAccess(org_admin).get_queryset()) == 0
|
||||
organization.instance_groups.add(default_instance_group)
|
||||
assert len(InstanceGroupAccess(org_admin).get_queryset()) == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_ig_normal_user_associability(organization, default_instance_group, user):
|
||||
u = user('user', False)
|
||||
access = OrganizationAccess(u)
|
||||
assert not access.can_attach(organization, default_instance_group, 'instance_groups', None)
|
||||
organization.instance_groups.add(default_instance_group)
|
||||
organization.member_role.members.add(u)
|
||||
assert not access.can_attach(organization, default_instance_group, 'instance_groups', None)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_ig_associability(organization, default_instance_group, admin, system_auditor, org_admin, org_member, job_template_factory):
|
||||
admin_access = OrganizationAccess(admin)
|
||||
auditor_access = OrganizationAccess(system_auditor)
|
||||
oadmin_access = OrganizationAccess(org_admin)
|
||||
omember_access = OrganizationAccess(org_member)
|
||||
assert admin_access.can_attach(organization, default_instance_group, 'instance_groups', None)
|
||||
assert not oadmin_access.can_attach(organization, default_instance_group, 'instance_groups', None)
|
||||
assert not auditor_access.can_attach(organization, default_instance_group, 'instance_groups', None)
|
||||
assert not omember_access.can_attach(organization, default_instance_group, 'instance_groups', None)
|
||||
|
||||
organization.instance_groups.add(default_instance_group)
|
||||
|
||||
assert admin_access.can_unattach(organization, default_instance_group, 'instance_groups', None)
|
||||
assert oadmin_access.can_unattach(organization, default_instance_group, 'instance_groups', None)
|
||||
assert not auditor_access.can_unattach(organization, default_instance_group, 'instance_groups', None)
|
||||
assert not omember_access.can_unattach(organization, default_instance_group, 'instance_groups', None)
|
||||
|
||||
objects = job_template_factory('jt', organization=organization, project='p',
|
||||
inventory='i', credential='c')
|
||||
admin_access = InventoryAccess(admin)
|
||||
auditor_access = InventoryAccess(system_auditor)
|
||||
oadmin_access = InventoryAccess(org_admin)
|
||||
omember_access = InventoryAccess(org_member)
|
||||
|
||||
assert admin_access.can_attach(objects.inventory, default_instance_group, 'instance_groups', None)
|
||||
assert oadmin_access.can_attach(objects.inventory, default_instance_group, 'instance_groups', None)
|
||||
assert not auditor_access.can_attach(objects.inventory, default_instance_group, 'instance_groups', None)
|
||||
assert not omember_access.can_attach(objects.inventory, default_instance_group, 'instance_groups', None)
|
||||
|
||||
admin_access = JobTemplateAccess(admin)
|
||||
auditor_access = JobTemplateAccess(system_auditor)
|
||||
oadmin_access = JobTemplateAccess(org_admin)
|
||||
omember_access = JobTemplateAccess(org_member)
|
||||
|
||||
assert admin_access.can_attach(objects.job_template, default_instance_group, 'instance_groups', None)
|
||||
assert oadmin_access.can_attach(objects.job_template, default_instance_group, 'instance_groups', None)
|
||||
assert not auditor_access.can_attach(objects.job_template, default_instance_group, 'instance_groups', None)
|
||||
assert not omember_access.can_attach(objects.job_template, default_instance_group, 'instance_groups', None)
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ from rest_framework.exceptions import PermissionDenied
|
||||
# AWX
|
||||
from awx.api.generics import (
|
||||
ParentMixin,
|
||||
SubListCreateAttachDetachAPIView,
|
||||
SubListCreateAttachDetachAPIView, SubListAttachDetachAPIView,
|
||||
DeleteLastUnattachLabelMixin,
|
||||
ResourceAccessList
|
||||
)
|
||||
@@ -140,6 +140,20 @@ class TestSubListCreateAttachDetachAPIView:
|
||||
view.unattach_by_id.assert_not_called()
|
||||
|
||||
|
||||
def test_attach_detatch_only(mocker):
|
||||
mock_request = mocker.MagicMock()
|
||||
mock_request.data = {'name': 'name for my new model'}
|
||||
view = SubListAttachDetachAPIView()
|
||||
view.model = mocker.MagicMock()
|
||||
view.model._meta = mocker.MagicMock()
|
||||
view.model._meta.verbose_name = "Foo Bar"
|
||||
|
||||
resp = view.post(mock_request)
|
||||
|
||||
assert 'Foo Bar' in resp.data['msg']
|
||||
assert 'field is missing' in resp.data['msg']
|
||||
|
||||
|
||||
class TestDeleteLastUnattachLabelMixin:
|
||||
@mock.patch('__builtin__.super')
|
||||
def test_unattach_ok(self, super, mocker):
|
||||
|
||||
@@ -1,265 +0,0 @@
|
||||
|
||||
# Python
|
||||
import pytest
|
||||
from datetime import timedelta
|
||||
|
||||
# Django
|
||||
from django.utils.timezone import now as tz_now
|
||||
|
||||
# awx
|
||||
from awx.main.scheduler.partial import (
|
||||
JobDict,
|
||||
ProjectUpdateDict,
|
||||
InventoryUpdateDict,
|
||||
InventorySourceDict,
|
||||
)
|
||||
from awx.main.scheduler import TaskManager
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def epoch():
|
||||
return tz_now()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def scheduler_factory(mocker, epoch):
|
||||
mocker.patch('awx.main.models.Instance.objects.total_capacity', return_value=10000)
|
||||
|
||||
def fn(tasks=[], inventory_sources=[], latest_project_updates=[], latest_inventory_updates=[], create_project_update=None, create_inventory_update=None):
|
||||
sched = TaskManager()
|
||||
|
||||
sched.graph.get_now = lambda: epoch
|
||||
|
||||
def no_create_inventory_update(task, ignore):
|
||||
raise RuntimeError("create_inventory_update should not be called")
|
||||
|
||||
def no_create_project_update(task):
|
||||
raise RuntimeError("create_project_update should not be called")
|
||||
|
||||
mocker.patch.object(sched, 'capture_chain_failure_dependencies')
|
||||
mocker.patch.object(sched, 'get_tasks', return_value=tasks)
|
||||
mocker.patch.object(sched, 'get_running_workflow_jobs', return_value=[])
|
||||
mocker.patch.object(sched, 'get_inventory_source_tasks', return_value=inventory_sources)
|
||||
mocker.patch.object(sched, 'get_latest_project_update_tasks', return_value=latest_project_updates)
|
||||
mocker.patch.object(sched, 'get_latest_inventory_update_tasks', return_value=latest_inventory_updates)
|
||||
create_project_update_mock = mocker.patch.object(sched, 'create_project_update', return_value=create_project_update)
|
||||
create_inventory_update_mock = mocker.patch.object(sched, 'create_inventory_update', return_value=create_inventory_update)
|
||||
mocker.patch.object(sched, 'start_task')
|
||||
|
||||
if not create_project_update:
|
||||
create_project_update_mock.side_effect = no_create_project_update
|
||||
if not create_inventory_update:
|
||||
create_inventory_update_mock.side_effect = no_create_inventory_update
|
||||
return sched
|
||||
return fn
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def project_update_factory(epoch):
|
||||
def fn():
|
||||
return ProjectUpdateDict({
|
||||
'id': 1,
|
||||
'created': epoch - timedelta(seconds=100),
|
||||
'project_id': 1,
|
||||
'project__scm_update_cache_timeout': 0,
|
||||
'celery_task_id': '',
|
||||
'launch_type': 'dependency',
|
||||
'project__scm_update_on_launch': True,
|
||||
})
|
||||
return fn
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pending_project_update(project_update_factory):
|
||||
project_update = project_update_factory()
|
||||
project_update['status'] = 'pending'
|
||||
return project_update
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def waiting_project_update(epoch, project_update_factory):
|
||||
project_update = project_update_factory()
|
||||
project_update['status'] = 'waiting'
|
||||
return project_update
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def running_project_update(epoch, project_update_factory):
|
||||
project_update = project_update_factory()
|
||||
project_update['status'] = 'running'
|
||||
return project_update
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def successful_project_update(epoch, project_update_factory):
|
||||
project_update = project_update_factory()
|
||||
project_update['finished'] = epoch - timedelta(seconds=90)
|
||||
project_update['status'] = 'successful'
|
||||
return project_update
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def successful_project_update_cache_expired(epoch, project_update_factory):
|
||||
project_update = project_update_factory()
|
||||
|
||||
project_update['status'] = 'successful'
|
||||
project_update['created'] = epoch - timedelta(seconds=120)
|
||||
project_update['finished'] = epoch - timedelta(seconds=110)
|
||||
project_update['project__scm_update_cache_timeout'] = 1
|
||||
return project_update
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def failed_project_update(epoch, project_update_factory):
|
||||
project_update = project_update_factory()
|
||||
project_update['finished'] = epoch - timedelta(seconds=90)
|
||||
project_update['status'] = 'failed'
|
||||
return project_update
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventory_update_factory(epoch):
|
||||
def fn():
|
||||
return InventoryUpdateDict({
|
||||
'id': 1,
|
||||
'created': epoch - timedelta(seconds=101),
|
||||
'inventory_id': 1,
|
||||
'celery_task_id': '',
|
||||
'status': 'pending',
|
||||
'launch_type': 'dependency',
|
||||
'inventory_source_id': 1,
|
||||
'inventory_source__inventory_id': 1,
|
||||
})
|
||||
return fn
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventory_update_latest_factory(epoch):
|
||||
def fn():
|
||||
return InventoryUpdateDict({
|
||||
'id': 1,
|
||||
'created': epoch - timedelta(seconds=101),
|
||||
'inventory_id': 1,
|
||||
'celery_task_id': '',
|
||||
'status': 'pending',
|
||||
'launch_type': 'dependency',
|
||||
'inventory_source_id': 1,
|
||||
'finished': None,
|
||||
})
|
||||
return fn
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventory_update_latest(inventory_update_latest_factory):
|
||||
return inventory_update_latest_factory()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def successful_inventory_update_latest(inventory_update_latest_factory):
|
||||
iu = inventory_update_latest_factory()
|
||||
iu['status'] = 'successful'
|
||||
iu['finished'] = iu['created'] + timedelta(seconds=10)
|
||||
return iu
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def failed_inventory_update_latest(inventory_update_latest_factory):
|
||||
iu = inventory_update_latest_factory()
|
||||
iu['status'] = 'failed'
|
||||
return iu
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pending_inventory_update(epoch, inventory_update_factory):
|
||||
inventory_update = inventory_update_factory()
|
||||
inventory_update['status'] = 'pending'
|
||||
return inventory_update
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def waiting_inventory_update(epoch, inventory_update_factory):
|
||||
inventory_update = inventory_update_factory()
|
||||
inventory_update['status'] = 'waiting'
|
||||
return inventory_update
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def failed_inventory_update(epoch, inventory_update_factory):
|
||||
inventory_update = inventory_update_factory()
|
||||
inventory_update['status'] = 'failed'
|
||||
return inventory_update
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def running_inventory_update(epoch, inventory_update_factory):
|
||||
inventory_update = inventory_update_factory()
|
||||
inventory_update['status'] = 'running'
|
||||
return inventory_update
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def successful_inventory_update(epoch, inventory_update_factory):
|
||||
inventory_update = inventory_update_factory()
|
||||
inventory_update['finished'] = epoch - timedelta(seconds=90)
|
||||
inventory_update['status'] = 'successful'
|
||||
return inventory_update
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_factory(epoch):
|
||||
'''
|
||||
Job
|
||||
'''
|
||||
def fn(id=1, project__scm_update_on_launch=True, inventory__inventory_sources=[], allow_simultaneous=False):
|
||||
return JobDict({
|
||||
'id': id,
|
||||
'status': 'pending',
|
||||
'job_template_id': 1,
|
||||
'project_id': 1,
|
||||
'inventory_id': 1,
|
||||
'launch_type': 'manual',
|
||||
'allow_simultaneous': allow_simultaneous,
|
||||
'created': epoch - timedelta(seconds=99),
|
||||
'celery_task_id': '',
|
||||
'project__scm_update_on_launch': project__scm_update_on_launch,
|
||||
'inventory__inventory_sources': inventory__inventory_sources,
|
||||
'forks': 5,
|
||||
'dependent_jobs__id': None,
|
||||
})
|
||||
return fn
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pending_job(job_factory):
|
||||
job = job_factory()
|
||||
job['status'] = 'pending'
|
||||
return job
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def running_job(job_factory):
|
||||
job = job_factory()
|
||||
job['status'] = 'running'
|
||||
return job
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventory_source_factory():
|
||||
'''
|
||||
Inventory id -> [InventorySourceDict, ...]
|
||||
'''
|
||||
def fn(id=1):
|
||||
return InventorySourceDict({
|
||||
'id': id,
|
||||
})
|
||||
return fn
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventory_id_sources(inventory_source_factory):
|
||||
return [
|
||||
(1, [
|
||||
inventory_source_factory(id=1),
|
||||
inventory_source_factory(id=2),
|
||||
]),
|
||||
]
|
||||
@@ -1,198 +0,0 @@
|
||||
|
||||
# Python
|
||||
import pytest
|
||||
|
||||
# AWX
|
||||
from awx.main.scheduler.dag_simple import SimpleDAG
|
||||
from awx.main.scheduler.dag_workflow import WorkflowDAG
|
||||
from awx.main.models import Job, JobTemplate
|
||||
from awx.main.models.workflow import WorkflowJobNode
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def dag_root():
|
||||
dag = SimpleDAG()
|
||||
data = [
|
||||
{1: 1},
|
||||
{2: 2},
|
||||
{3: 3},
|
||||
{4: 4},
|
||||
{5: 5},
|
||||
{6: 6},
|
||||
]
|
||||
# Add all the nodes to the DAG
|
||||
[dag.add_node(d) for d in data]
|
||||
|
||||
dag.add_edge(data[0], data[1])
|
||||
dag.add_edge(data[2], data[3])
|
||||
dag.add_edge(data[4], data[5])
|
||||
|
||||
return dag
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def dag_simple_edge_labels():
|
||||
dag = SimpleDAG()
|
||||
data = [
|
||||
{1: 1},
|
||||
{2: 2},
|
||||
{3: 3},
|
||||
{4: 4},
|
||||
{5: 5},
|
||||
{6: 6},
|
||||
]
|
||||
# Add all the nodes to the DAG
|
||||
[dag.add_node(d) for d in data]
|
||||
|
||||
dag.add_edge(data[0], data[1], 'one')
|
||||
dag.add_edge(data[2], data[3], 'two')
|
||||
dag.add_edge(data[4], data[5], 'three')
|
||||
|
||||
return dag
|
||||
|
||||
|
||||
'''
|
||||
class TestSimpleDAG(object):
|
||||
def test_get_root_nodes(self, dag_root):
|
||||
leafs = dag_root.get_leaf_nodes()
|
||||
|
||||
roots = dag_root.get_root_nodes()
|
||||
|
||||
def test_get_labeled_edges(self, dag_simple_edge_labels):
|
||||
dag = dag_simple_edge_labels
|
||||
nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'one')
|
||||
nodes = dag.get_dependencies(dag.nodes[0]['node_object'], 'two')
|
||||
'''
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def factory_node():
|
||||
def fn(id, status):
|
||||
wfn = WorkflowJobNode(id=id)
|
||||
if status:
|
||||
j = Job(status=status)
|
||||
wfn.job = j
|
||||
wfn.unified_job_template = JobTemplate(name='JT{}'.format(id))
|
||||
return wfn
|
||||
return fn
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_dag_level_2(factory_node):
|
||||
dag = WorkflowDAG()
|
||||
data = [
|
||||
factory_node(0, 'successful'),
|
||||
factory_node(1, 'successful'),
|
||||
factory_node(2, 'successful'),
|
||||
factory_node(3, None),
|
||||
factory_node(4, None),
|
||||
factory_node(5, None),
|
||||
]
|
||||
[dag.add_node(d) for d in data]
|
||||
|
||||
dag.add_edge(data[0], data[3], 'success_nodes')
|
||||
dag.add_edge(data[1], data[4], 'success_nodes')
|
||||
dag.add_edge(data[2], data[5], 'success_nodes')
|
||||
|
||||
return (dag, data[3:6], False)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_dag_multiple_roots(factory_node):
|
||||
dag = WorkflowDAG()
|
||||
data = [
|
||||
factory_node(1, None),
|
||||
factory_node(2, None),
|
||||
factory_node(3, None),
|
||||
factory_node(4, None),
|
||||
factory_node(5, None),
|
||||
factory_node(6, None),
|
||||
]
|
||||
[dag.add_node(d) for d in data]
|
||||
|
||||
dag.add_edge(data[0], data[3], 'success_nodes')
|
||||
dag.add_edge(data[1], data[4], 'success_nodes')
|
||||
dag.add_edge(data[2], data[5], 'success_nodes')
|
||||
|
||||
expected = data[0:3]
|
||||
return (dag, expected, False)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_dag_multiple_edges_labeled(factory_node):
|
||||
dag = WorkflowDAG()
|
||||
data = [
|
||||
factory_node(0, 'failed'),
|
||||
factory_node(1, None),
|
||||
factory_node(2, 'failed'),
|
||||
factory_node(3, None),
|
||||
factory_node(4, 'failed'),
|
||||
factory_node(5, None),
|
||||
]
|
||||
[dag.add_node(d) for d in data]
|
||||
|
||||
dag.add_edge(data[0], data[1], 'success_nodes')
|
||||
dag.add_edge(data[0], data[2], 'failure_nodes')
|
||||
dag.add_edge(data[2], data[3], 'success_nodes')
|
||||
dag.add_edge(data[2], data[4], 'failure_nodes')
|
||||
dag.add_edge(data[4], data[5], 'failure_nodes')
|
||||
|
||||
expected = data[5:6]
|
||||
return (dag, expected, False)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_dag_finished(factory_node):
|
||||
dag = WorkflowDAG()
|
||||
data = [
|
||||
factory_node(0, 'failed'),
|
||||
factory_node(1, None),
|
||||
factory_node(2, 'failed'),
|
||||
factory_node(3, None),
|
||||
factory_node(4, 'failed'),
|
||||
factory_node(5, 'successful'),
|
||||
]
|
||||
[dag.add_node(d) for d in data]
|
||||
|
||||
dag.add_edge(data[0], data[1], 'success_nodes')
|
||||
dag.add_edge(data[0], data[2], 'failure_nodes')
|
||||
dag.add_edge(data[2], data[3], 'success_nodes')
|
||||
dag.add_edge(data[2], data[4], 'failure_nodes')
|
||||
dag.add_edge(data[4], data[5], 'failure_nodes')
|
||||
|
||||
expected = []
|
||||
return (dag, expected, True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_dag_always(factory_node):
|
||||
dag = WorkflowDAG()
|
||||
data = [
|
||||
factory_node(0, 'failed'),
|
||||
factory_node(1, 'successful'),
|
||||
factory_node(2, None),
|
||||
]
|
||||
[dag.add_node(d) for d in data]
|
||||
|
||||
dag.add_edge(data[0], data[1], 'always_nodes')
|
||||
dag.add_edge(data[1], data[2], 'always_nodes')
|
||||
|
||||
expected = data[2:3]
|
||||
return (dag, expected, False)
|
||||
|
||||
|
||||
@pytest.fixture(params=['workflow_dag_multiple_roots', 'workflow_dag_level_2',
|
||||
'workflow_dag_multiple_edges_labeled', 'workflow_dag_finished',
|
||||
'workflow_dag_always'])
|
||||
def workflow_dag(request):
|
||||
return request.getfuncargvalue(request.param)
|
||||
|
||||
|
||||
class TestWorkflowDAG():
|
||||
def test_bfs_nodes_to_run(self, workflow_dag):
|
||||
dag, expected, is_done = workflow_dag
|
||||
assert dag.bfs_nodes_to_run() == expected
|
||||
|
||||
def test_is_workflow_done(self, workflow_dag):
|
||||
dag, expected, is_done = workflow_dag
|
||||
assert dag.is_workflow_done() == is_done
|
||||
@@ -1,121 +0,0 @@
|
||||
|
||||
# Python
|
||||
import pytest
|
||||
from datetime import timedelta
|
||||
|
||||
# Django
|
||||
from django.utils.timezone import now as tz_now
|
||||
|
||||
# AWX
|
||||
from awx.main.scheduler.dependency_graph import DependencyGraph
|
||||
from awx.main.scheduler.partial import ProjectUpdateDict
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def graph():
|
||||
return DependencyGraph()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job(job_factory):
|
||||
j = job_factory()
|
||||
j.project_id = 1
|
||||
return j
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def unsuccessful_last_project(graph, job):
|
||||
pu = ProjectUpdateDict(dict(id=1,
|
||||
project__scm_update_cache_timeout=999999,
|
||||
project_id=1,
|
||||
status='failed',
|
||||
created='3',
|
||||
finished='3',))
|
||||
|
||||
graph.add_latest_project_update(pu)
|
||||
|
||||
return graph
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def last_dependent_project(graph, job):
|
||||
now = tz_now()
|
||||
|
||||
job['project_id'] = 1
|
||||
job['created'] = now
|
||||
pu = ProjectUpdateDict(dict(id=1, project_id=1, status='waiting',
|
||||
project__scm_update_cache_timeout=0,
|
||||
launch_type='dependency',
|
||||
created=now - timedelta(seconds=1),))
|
||||
|
||||
graph.add_latest_project_update(pu)
|
||||
|
||||
return (graph, job)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def timedout_project_update(graph, job):
|
||||
now = tz_now()
|
||||
|
||||
job['project_id'] = 1
|
||||
job['created'] = now
|
||||
pu = ProjectUpdateDict(dict(id=1, project_id=1, status='successful',
|
||||
project__scm_update_cache_timeout=10,
|
||||
launch_type='dependency',
|
||||
created=now - timedelta(seconds=100),
|
||||
finished=now - timedelta(seconds=11),))
|
||||
|
||||
graph.add_latest_project_update(pu)
|
||||
|
||||
return (graph, job)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def not_timedout_project_update(graph, job):
|
||||
now = tz_now()
|
||||
|
||||
job['project_id'] = 1
|
||||
job['created'] = now
|
||||
pu = ProjectUpdateDict(dict(id=1, project_id=1, status='successful',
|
||||
project__scm_update_cache_timeout=3600,
|
||||
launch_type='dependency',
|
||||
created=now - timedelta(seconds=100),
|
||||
finished=now - timedelta(seconds=11),))
|
||||
|
||||
graph.add_latest_project_update(pu)
|
||||
|
||||
return (graph, job)
|
||||
|
||||
|
||||
class TestShouldUpdateRelatedProject():
|
||||
def test_no_project_updates(self, graph, job):
|
||||
actual = graph.should_update_related_project(job)
|
||||
|
||||
assert True is actual
|
||||
|
||||
def test_timedout_project_update(self, timedout_project_update):
|
||||
(graph, job) = timedout_project_update
|
||||
|
||||
actual = graph.should_update_related_project(job)
|
||||
|
||||
assert True is actual
|
||||
|
||||
def test_not_timedout_project_update(self, not_timedout_project_update):
|
||||
(graph, job) = not_timedout_project_update
|
||||
|
||||
actual = graph.should_update_related_project(job)
|
||||
|
||||
assert False is actual
|
||||
|
||||
def test_unsuccessful_last_project(self, unsuccessful_last_project, job):
|
||||
graph = unsuccessful_last_project
|
||||
|
||||
actual = graph.should_update_related_project(job)
|
||||
|
||||
assert True is actual
|
||||
|
||||
def test_last_dependent_project(self, last_dependent_project):
|
||||
(graph, job) = last_dependent_project
|
||||
|
||||
actual = graph.should_update_related_project(job)
|
||||
assert False is actual
|
||||
@@ -1,132 +0,0 @@
|
||||
|
||||
# Python
|
||||
import pytest
|
||||
from datetime import timedelta
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pending_job(job_factory):
|
||||
return job_factory(project__scm_update_on_launch=False, inventory__inventory_sources=['1'])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def successful_inventory_update_latest(inventory_update_latest_factory):
|
||||
iu = inventory_update_latest_factory()
|
||||
iu['inventory_source__update_cache_timeout'] = 100
|
||||
iu['status'] = 'successful'
|
||||
iu['finished'] = iu['created'] + timedelta(seconds=10)
|
||||
return iu
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def successful_inventory_update_latest_cache_expired(inventory_update_latest_factory):
|
||||
iu = inventory_update_latest_factory()
|
||||
iu['inventory_source__update_cache_timeout'] = 1
|
||||
iu['finished'] = iu['created'] + timedelta(seconds=2)
|
||||
return iu
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def failed_inventory_update_latest_cache_zero(failed_inventory_update_latest):
|
||||
iu = failed_inventory_update_latest
|
||||
iu['inventory_source__update_cache_timeout'] = 0
|
||||
iu['inventory_source__update_on_launch'] = True
|
||||
iu['finished'] = iu['created'] + timedelta(seconds=2)
|
||||
iu['status'] = 'failed'
|
||||
return iu
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def failed_inventory_update_latest_cache_non_zero(failed_inventory_update_latest_cache_zero):
|
||||
failed_inventory_update_latest_cache_zero['inventory_source__update_cache_timeout'] = 10000000
|
||||
return failed_inventory_update_latest_cache_zero
|
||||
|
||||
|
||||
class TestStartInventoryUpdate():
|
||||
def test_pending(self, scheduler_factory, pending_inventory_update):
|
||||
scheduler = scheduler_factory(tasks=[pending_inventory_update])
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_called_with(pending_inventory_update)
|
||||
|
||||
|
||||
class TestInventoryUpdateBlocked():
|
||||
def test_running_inventory_update(self, epoch, scheduler_factory, running_inventory_update, pending_inventory_update):
|
||||
running_inventory_update['created'] = epoch - timedelta(seconds=100)
|
||||
pending_inventory_update['created'] = epoch - timedelta(seconds=90)
|
||||
|
||||
scheduler = scheduler_factory(tasks=[running_inventory_update, pending_inventory_update])
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
def test_waiting_inventory_update(self, epoch, scheduler_factory, waiting_inventory_update, pending_inventory_update):
|
||||
waiting_inventory_update['created'] = epoch - timedelta(seconds=100)
|
||||
pending_inventory_update['created'] = epoch - timedelta(seconds=90)
|
||||
|
||||
scheduler = scheduler_factory(tasks=[waiting_inventory_update, pending_inventory_update])
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
|
||||
class TestCreateDependentInventoryUpdate():
|
||||
def test(self, scheduler_factory, pending_job, waiting_inventory_update, inventory_id_sources):
|
||||
scheduler = scheduler_factory(tasks=[pending_job],
|
||||
create_inventory_update=waiting_inventory_update,
|
||||
inventory_sources=inventory_id_sources)
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_called_with(waiting_inventory_update, [pending_job])
|
||||
|
||||
def test_cache_hit(self, scheduler_factory, pending_job, successful_inventory_update, successful_inventory_update_latest):
|
||||
scheduler = scheduler_factory(tasks=[successful_inventory_update, pending_job],
|
||||
latest_inventory_updates=[successful_inventory_update_latest])
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_called_with(pending_job)
|
||||
|
||||
def test_cache_miss(self, scheduler_factory, pending_job, successful_inventory_update, successful_inventory_update_latest_cache_expired, waiting_inventory_update, inventory_id_sources):
|
||||
scheduler = scheduler_factory(tasks=[successful_inventory_update, pending_job],
|
||||
latest_inventory_updates=[successful_inventory_update_latest_cache_expired],
|
||||
create_inventory_update=waiting_inventory_update,
|
||||
inventory_sources=inventory_id_sources)
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_called_with(waiting_inventory_update, [pending_job])
|
||||
|
||||
def test_last_update_timeout_zero_failed(self, scheduler_factory, pending_job, failed_inventory_update, failed_inventory_update_latest_cache_zero, waiting_inventory_update, inventory_id_sources):
|
||||
scheduler = scheduler_factory(tasks=[failed_inventory_update, pending_job],
|
||||
latest_inventory_updates=[failed_inventory_update_latest_cache_zero],
|
||||
create_inventory_update=waiting_inventory_update,
|
||||
inventory_sources=inventory_id_sources)
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_called_with(waiting_inventory_update, [pending_job])
|
||||
|
||||
def test_last_update_timeout_non_zero_failed(self, scheduler_factory, pending_job, failed_inventory_update, failed_inventory_update_latest_cache_non_zero, waiting_inventory_update, inventory_id_sources):
|
||||
scheduler = scheduler_factory(tasks=[failed_inventory_update, pending_job],
|
||||
latest_inventory_updates=[failed_inventory_update_latest_cache_non_zero],
|
||||
create_inventory_update=waiting_inventory_update,
|
||||
inventory_sources=inventory_id_sources)
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_called_with(waiting_inventory_update, [pending_job])
|
||||
|
||||
|
||||
class TestCaptureChainFailureDependencies():
|
||||
@pytest.fixture
|
||||
def inventory_id_sources(self, inventory_source_factory):
|
||||
return [
|
||||
(1, [inventory_source_factory(id=1)]),
|
||||
]
|
||||
|
||||
def test(self, scheduler_factory, pending_job, waiting_inventory_update, inventory_id_sources):
|
||||
scheduler = scheduler_factory(tasks=[pending_job],
|
||||
create_inventory_update=waiting_inventory_update,
|
||||
inventory_sources=inventory_id_sources)
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.capture_chain_failure_dependencies.assert_called_with(pending_job, [waiting_inventory_update])
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
|
||||
# Python
|
||||
import pytest
|
||||
from datetime import timedelta
|
||||
|
||||
|
||||
class TestJobBlocked():
|
||||
def test_inventory_update_waiting(self, scheduler_factory, waiting_inventory_update, pending_job):
|
||||
scheduler = scheduler_factory(tasks=[waiting_inventory_update, pending_job])
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_not_called()
|
||||
|
||||
def test_inventory_update_running(self, scheduler_factory, running_inventory_update, pending_job, inventory_source_factory, inventory_id_sources):
|
||||
scheduler = scheduler_factory(tasks=[running_inventory_update, pending_job],
|
||||
inventory_sources=inventory_id_sources)
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_not_called()
|
||||
|
||||
def test_project_update_running(self, scheduler_factory, pending_job, running_project_update):
|
||||
scheduler = scheduler_factory(tasks=[running_project_update, pending_job])
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_not_called()
|
||||
assert scheduler.create_project_update.call_count == 0
|
||||
|
||||
def test_project_update_waiting(self, scheduler_factory, pending_job, waiting_project_update):
|
||||
scheduler = scheduler_factory(tasks=[waiting_project_update, pending_job],
|
||||
latest_project_updates=[waiting_project_update])
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_not_called()
|
||||
assert scheduler.create_project_update.call_count == 0
|
||||
|
||||
|
||||
class TestJob():
|
||||
@pytest.fixture
|
||||
def successful_project_update(self, project_update_factory):
|
||||
project_update = project_update_factory()
|
||||
project_update['status'] = 'successful'
|
||||
project_update['finished'] = project_update['created'] + timedelta(seconds=10)
|
||||
project_update['project__scm_update_cache_timeout'] = 3600
|
||||
return project_update
|
||||
|
||||
def test_existing_dependencies_finished(self, scheduler_factory, successful_project_update, successful_inventory_update_latest, pending_job):
|
||||
scheduler = scheduler_factory(tasks=[successful_project_update, pending_job],
|
||||
latest_project_updates=[successful_project_update],
|
||||
latest_inventory_updates=[successful_inventory_update_latest])
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_called_with(pending_job)
|
||||
|
||||
|
||||
class TestCapacity():
|
||||
@pytest.fixture
|
||||
def pending_job_high_impact(self, mocker, job_factory):
|
||||
pending_job = job_factory(project__scm_update_on_launch=False)
|
||||
mocker.patch.object(pending_job, 'task_impact', return_value=10001)
|
||||
return pending_job
|
||||
|
||||
def test_no_capacity(self, scheduler_factory, pending_job_high_impact):
|
||||
scheduler = scheduler_factory(tasks=[pending_job_high_impact])
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_called_with(pending_job_high_impact)
|
||||
|
||||
@pytest.fixture
|
||||
def pending_jobs_impactful(self, mocker, job_factory):
|
||||
pending_jobs = [job_factory(id=i + 1, project__scm_update_on_launch=False, allow_simultaneous=True) for i in xrange(0, 3)]
|
||||
map(lambda pending_job: mocker.patch.object(pending_job, 'task_impact', return_value=10), pending_jobs)
|
||||
return pending_jobs
|
||||
|
||||
def test_capacity_exhausted(self, mocker, scheduler_factory, pending_jobs_impactful):
|
||||
scheduler = scheduler_factory(tasks=pending_jobs_impactful)
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
calls = [mocker.call(job) for job in pending_jobs_impactful]
|
||||
scheduler.start_task.assert_has_calls(calls)
|
||||
@@ -1,75 +0,0 @@
|
||||
|
||||
# TODO: wherever get_latest_rpoject_update_task() is stubbed and returns a
|
||||
# ProjectUpdateDict. We should instead return a ProjectUpdateLatestDict()
|
||||
# For now, this is ok since the fields on deviate that much.
|
||||
|
||||
|
||||
class TestStartProjectUpdate():
|
||||
def test(self, scheduler_factory, pending_project_update):
|
||||
scheduler = scheduler_factory(tasks=[pending_project_update])
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_called_with(pending_project_update)
|
||||
assert scheduler.create_project_update.call_count == 0
|
||||
|
||||
'''
|
||||
Explicit project update should always run. They should not use cache logic.
|
||||
'''
|
||||
def test_cache_oblivious(self, scheduler_factory, successful_project_update, pending_project_update):
|
||||
scheduler = scheduler_factory(tasks=[pending_project_update],
|
||||
latest_project_updates=[successful_project_update])
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_called_with(pending_project_update)
|
||||
assert scheduler.create_project_update.call_count == 0
|
||||
|
||||
|
||||
class TestCreateDependentProjectUpdate():
|
||||
def test(self, scheduler_factory, pending_job, waiting_project_update):
|
||||
scheduler = scheduler_factory(tasks=[pending_job],
|
||||
create_project_update=waiting_project_update)
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_called_with(waiting_project_update, [pending_job])
|
||||
|
||||
def test_cache_hit(self, scheduler_factory, pending_job, successful_project_update):
|
||||
scheduler = scheduler_factory(tasks=[successful_project_update, pending_job],
|
||||
latest_project_updates=[successful_project_update])
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_called_with(pending_job)
|
||||
|
||||
def test_cache_miss(self, scheduler_factory, pending_job, successful_project_update_cache_expired, waiting_project_update):
|
||||
scheduler = scheduler_factory(tasks=[successful_project_update_cache_expired, pending_job],
|
||||
latest_project_updates=[successful_project_update_cache_expired],
|
||||
create_project_update=waiting_project_update)
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_called_with(waiting_project_update, [pending_job])
|
||||
|
||||
def test_last_update_failed(self, scheduler_factory, pending_job, failed_project_update, waiting_project_update):
|
||||
scheduler = scheduler_factory(tasks=[failed_project_update, pending_job],
|
||||
latest_project_updates=[failed_project_update],
|
||||
create_project_update=waiting_project_update)
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_called_with(waiting_project_update, [pending_job])
|
||||
|
||||
|
||||
class TestProjectUpdateBlocked():
|
||||
def test_projct_update_running(self, scheduler_factory, running_project_update, pending_project_update):
|
||||
scheduler = scheduler_factory(tasks=[running_project_update, pending_project_update])
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_not_called()
|
||||
assert scheduler.create_project_update.call_count == 0
|
||||
|
||||
def test_job_running(self, scheduler_factory, running_job, pending_project_update):
|
||||
scheduler = scheduler_factory(tasks=[running_job, pending_project_update])
|
||||
|
||||
scheduler._schedule()
|
||||
|
||||
scheduler.start_task.assert_not_called()
|
||||
Reference in New Issue
Block a user