[last PR stuff] + Add warning if configs specified in 2 params (#5)

* Lean on API validation for tower_inventory_source arg errors

used for
 - validating needed credential is given
 - missing source_project for scm sources

* Add warning when config is specified in 2 places

Fix up unit tests, address multiple comments re: backwards compatibility, redundant methods, etc.

Update new_name and variables parameters, update unit tests
This commit is contained in:
Alan Rominger 2020-02-18 16:02:05 -05:00 committed by beeankha
parent 2e4e687d69
commit 768280c9ba
16 changed files with 219 additions and 243 deletions

View File

@ -192,7 +192,7 @@ class APIView(views.APIView):
response.data['detail'] += ' To establish a login session, visit /api/login/.'
logger.info(status_msg)
else:
logger.warn(status_msg)
logger.warning(status_msg)
response = super(APIView, self).finalize_response(request, response, *args, **kwargs)
time_started = getattr(self, 'time_started', None)
response['X-API-Node'] = settings.CLUSTER_HOST_ID

View File

@ -2115,7 +2115,13 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
def get_field_from_model_or_attrs(fd):
return attrs.get(fd, self.instance and getattr(self.instance, fd) or None)
if get_field_from_model_or_attrs('source') != 'scm':
if get_field_from_model_or_attrs('source') == 'scm':
if (('source' in attrs or 'source_project' in attrs) and
get_field_from_model_or_attrs('source_project') is None):
raise serializers.ValidationError(
{"source_project": _("Project required for scm type sources.")}
)
else:
redundant_scm_fields = list(filter(
lambda x: attrs.get(x, None),
['source_project', 'source_path', 'update_on_project_update']
@ -3716,7 +3722,7 @@ class WorkflowJobNodeSerializer(LaunchConfigurationBaseSerializer):
class Meta:
model = WorkflowJobNode
fields = ('*', 'job', 'workflow_job', '-name', '-description', 'id', 'url', 'related',
'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',
'unified_job_template', 'success_nodes', 'failure_nodes', 'always_nodes',
'all_parents_must_converge', 'do_not_run',)
def get_related(self, obj):

View File

@ -599,9 +599,9 @@ class TestControlledBySCM:
delete(inv_src.get_absolute_url(), admin_user, expect=204)
assert scm_inventory.inventory_sources.count() == 0
def test_adding_inv_src_ok(self, post, scm_inventory, admin_user):
def test_adding_inv_src_ok(self, post, scm_inventory, project, admin_user):
post(reverse('api:inventory_inventory_sources_list', kwargs={'pk': scm_inventory.id}),
{'name': 'new inv src', 'update_on_project_update': False, 'source': 'scm', 'overwrite_vars': True},
{'name': 'new inv src', 'source_project': project.pk, 'update_on_project_update': False, 'source': 'scm', 'overwrite_vars': True},
admin_user, expect=201)
def test_adding_inv_src_prohibited(self, post, scm_inventory, project, admin_user):

View File

@ -24,6 +24,8 @@ The following notes are changes that may require changes to playbooks.
- When the `extra_vars` parameter is used with the `tower_job_launch` module, the Job Template launch will fail unless `add_extra_vars` or `survey_enabled` is explicitly set to `True` on the Job Template.
- tower_group used to also service inventory sources, this functionality has been removed from this module; instead use tower_inventory_source.
- Specified tower_config file used to handle k=v pairs on a single line. This is no longer supported. You may a file formatted in: yaml, json or ini only.
- The `variables` parameter in the `tower_group` and `tower_host` modules are now in `dict` format and no longer
supports the use of the `C(@)` syntax (for an external vars file).
## Running

View File

@ -59,20 +59,8 @@ class TowerModule(AnsibleModule):
self.json_output = {'changed': False}
# We have to take off mutually_exclusive_if in order to init with Ansible
mutually_exclusive_if = kwargs.pop('mutually_exclusive_if', None)
super(TowerModule, self).__init__(argument_spec=args, **kwargs)
# Eventually, we would like to push this as a feature to Ansible core for others to use...
# Test mutually_exclusive if
if mutually_exclusive_if:
for (var_name, var_value, exclusive_names) in mutually_exclusive_if:
if self.params.get(var_name) == var_value:
for excluded_param_name in exclusive_names:
if self.params.get(excluded_param_name) is not None:
self.fail_json(msg='Arguments {0} can not be set if source is {1}'.format(', '.join(exclusive_names), var_value))
self.load_config_files()
# Parameters specified on command line will override settings in any config
@ -125,7 +113,17 @@ class TowerModule(AnsibleModule):
# If we have a specified tower config, load it
if self.params.get('tower_config_file'):
duplicated_params = []
for direct_field in ('tower_host', 'tower_username', 'tower_password', 'validate_certs', 'tower_oauthtoken'):
if self.params.get(direct_field):
duplicated_params.append(direct_field)
if duplicated_params:
self.warn((
'The parameter(s) {0} were provided at the same time as tower_config_file. '
'Precedence may be unstable, we suggest either using config file or params.'
).format(', '.join(duplicated_params)))
try:
# TODO: warn if there are conflicts with other params
self.load_config(self.params.get('tower_config_file'))
except ConfigFileException as cfe:
# Since we were told specifically to load this we want it to fail if we have an error
@ -620,30 +618,3 @@ class TowerModule(AnsibleModule):
return False
else:
return True
def load_variables_if_file_specified(self, vars_value, var_name):
if not vars_value.startswith('@'):
return vars_value
if not HAS_YAML:
self.fail_json(msg=self.missing_required_lib('yaml'))
file_name = None
file_content = None
try:
file_name = expanduser(vars_value[1:])
with open(file_name, 'r') as f:
file_content = f.read()
except Exception as e:
self.fail_json(msg="Failed to load file {0} for {1} : {2}".format(file_name, var_name, e))
try:
vars_value = yaml.safe_load(file_content)
except yaml.YAMLError:
# Maybe it wasn't a YAML structure... lets try JSON
try:
vars_value = loads(file_content)
except ValueError:
self.fail_json(msg="Failed to load file {0} specifed by {1} as yaml or json".format(file_name, var_name))
return dumps(vars_value)

View File

@ -144,10 +144,6 @@ def main():
}
})
# Add entries to json_output to match old module
module.json_output['credential_type'] = name
module.json_output['state'] = state
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this
module.delete_if_needed(credential_type)

View File

@ -44,8 +44,8 @@ options:
type: str
variables:
description:
- Variables to use for the group, use C(@) for a file.
type: str
- Variables to use for the group.
type: dict
state:
description:
- Desired state of the resource.
@ -72,6 +72,7 @@ EXAMPLES = '''
'''
from ..module_utils.tower_api import TowerModule
import json
def main():
@ -81,7 +82,7 @@ def main():
new_name=dict(required=False),
description=dict(),
inventory=dict(required=True),
variables=dict(),
variables=dict(type='dict', required=False),
state=dict(choices=['present', 'absent'], default='present'),
)
@ -107,10 +108,6 @@ def main():
}
})
# If the variables were specified as a file, load them
if variables:
variables = module.load_variables_if_file_specified(variables, 'variables')
# Create the data that gets sent for create and update
group_fields = {
'name': new_name if new_name else name,
@ -119,7 +116,7 @@ def main():
if description:
group_fields['description'] = description
if variables:
group_fields['variables'] = variables
group_fields['variables'] = json.dumps(variables)
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this

View File

@ -49,8 +49,8 @@ options:
default: 'yes'
variables:
description:
- Variables to use for the host. Use C(@) for a file.
type: str
- Variables to use for the host.
type: dict
state:
description:
- Desired state of the resource.
@ -80,6 +80,7 @@ EXAMPLES = '''
from ..module_utils.tower_api import TowerModule
import json
def main():
@ -90,7 +91,7 @@ def main():
description=dict(default=''),
inventory=dict(required=True),
enabled=dict(type='bool', default=True),
variables=dict(default=''),
variables=dict(type='dict', default=''),
state=dict(choices=['present', 'absent'], default='present'),
)
@ -106,9 +107,6 @@ def main():
state = module.params.get('state')
variables = module.params.get('variables')
if variables:
variables = module.load_variables_if_file_specified(variables, 'variables')
# Attempt to look up the related items the user specified (these will fail the module if not found)
inventory_id = module.resolve_name_to_id('inventories', inventory)
@ -128,7 +126,7 @@ def main():
'enabled': enabled,
}
if variables:
host_fields['variables'] = variables
host_fields['variables'] = json.dumps(variables)
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this

View File

@ -179,164 +179,20 @@ def main():
state=dict(choices=['present', 'absent'], default='present'),
)
# One question here is do we want to end up supporting this within the ansible module itself (i.e. required if, etc)
# Or do we want to let the API return issues with "this doesn't support that", etc.
#
# GUI OPTIONS:
# - - - - - - - manual: file: scm: ec2: gce azure_rm vmware sat cloudforms openstack rhv tower custom
# credential ? ? o o r r r r r r r r o
# source_project ? ? r - - - - - - - - - -
# source_path ? ? r - - - - - - - - - -
# verbosity ? ? o o o o o o o o o o o
# overwrite ? ? o o o o o o o o o o o
# overwrite_vars ? ? o o o o o o o o o o o
# update_on_launch ? ? o o o o o o o o o o o
# update_on_project_launch ? ? o - - - - - - - - - -
# source_regions ? ? - o o o - - - - - - -
# instance_filters ? ? - o - - o - - - - o -
# group_by ? ? - o - - o - - - - - -
# source_vars* ? ? - o - o o o o o - - -
# environmet vars* ? ? o - - - - - - - - - o
# source_script ? ? - - - - - - - - - - r
#
# * - source_vars are labeled environment_vars on project and custom sources
# Create a module for ourselves
module = TowerModule(argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
# We don't want to require source if state is present because
# you might be doing an update to an existing source.
# Later on in the code, we will do a test so that if state: present
# and if we don't have an object, we must have source.
('source', 'scm', ['source_project', 'source_path']),
('source', 'gce', ['credential']),
('source', 'azure_rm', ['credential']),
('source', 'vmware', ['credential']),
('source', 'satellite6', ['credential']),
('source', 'cloudforms', ['credential']),
('source', 'openstack', ['credential']),
('source', 'rhv', ['credential']),
('source', 'tower', ['credential']),
('source', 'custom', ['source_script']),
],
# This is provided by our module, it's not a core thing
mutually_exclusive_if=[
('source', 'scm', ['source_regions',
'instance_filters',
'group_by',
'source_script'
]),
('source', 'ec2', ['source_project',
'source_path',
'update_on_project_launch',
'source_script'
]),
('source', 'gce', ['source_project',
'source_path',
'update_on_project_launch',
'instance_filters',
'group_by',
'source_vars',
'source_script'
]),
('source', 'azure_rm', ['source_project',
'source_path',
'update_on_project_launch',
'instance_filters',
'group_by',
'source_script'
]),
('source', 'vmware', ['source_project', 'source_path', 'update_on_project_launch', 'source_regions', 'source_script']),
('source', 'satellite6', ['source_project',
'source_path',
'update_on_project_launch',
'source_regions',
'instance_filters',
'group_by',
'source_script'
]),
('source', 'cloudforms', ['source_project',
'source_path',
'update_on_project_launch',
'source_regions',
'instance_filters',
'group_by',
'source_script'
]),
('source', 'openstack', ['source_project',
'source_path',
'update_on_project_launch',
'source_regions',
'instance_filters',
'group_by',
'source_script'
]),
('source', 'rhv', ['source_project',
'source_path',
'update_on_project_launch',
'source_regions',
'instance_filters',
'group_by',
'source_vars',
'source_script'
]),
('source', 'tower', ['source_project',
'source_path',
'update_on_project_launch',
'source_regions',
'group_by',
'source_vars',
'source_script'
]),
('source', 'custom', ['source_project',
'source_path',
'update_on_project_launch',
'source_regions',
'instance_filters',
'group_by'
]),
])
module = TowerModule(argument_spec=argument_spec)
optional_vars = {}
# Extract our parameters
name = module.params.get('name')
new_name = module.params.get('new_name')
optional_vars['description'] = module.params.get('description')
inventory = module.params.get('inventory')
optional_vars['source'] = module.params.get('source')
optional_vars['source_path'] = module.params.get('source_path')
source_script = module.params.get('source_script')
optional_vars['source_vars'] = module.params.get('source_vars')
credential = module.params.get('credential')
optional_vars['source_regions'] = module.params.get('source_regions')
optional_vars['instance_filters'] = module.params.get('instance_filters')
optional_vars['group_by'] = module.params.get('group_by')
optional_vars['overwrite'] = module.params.get('overwrite')
optional_vars['overwrite_vars'] = module.params.get('overwrite_vars')
optional_vars['custom_virtualenv'] = module.params.get('custom_virtualenv')
optional_vars['timeout'] = module.params.get('timeout')
optional_vars['verbosity'] = module.params.get('verbosity')
optional_vars['update_on_launch'] = module.params.get('update_on_launch')
optional_vars['update_cache_timeout'] = module.params.get('update_cache_timeout')
source_project = module.params.get('source_project')
optional_vars['update_on_project_update'] = module.params.get('update_on_project_update')
state = module.params.get('state')
# Attempt to JSON encode source vars
if optional_vars['source_vars']:
optional_vars['source_vars'] = dumps(optional_vars['source_vars'])
# Attempt to look up the related items the user specified (these will fail the module if not found)
inventory_id = module.resolve_name_to_id('inventories', inventory)
if credential:
optional_vars['credential'] = module.resolve_name_to_id('credentials', credential)
if source_project:
optional_vars['source_project'] = module.resolve_name_to_id('projects', source_project)
if source_script:
optional_vars['source_script'] = module.resolve_name_to_id('inventory_scripts', source_script)
# Attempt to look up inventory source based on the provided name and inventory ID
inventory_id = module.resolve_name_to_id('inventories', inventory)
inventory_source = module.get_one('inventory_sources', **{
'data': {
'name': name,
@ -344,19 +200,41 @@ def main():
}
})
# Sanity check on arguments
if state == 'present' and not inventory_source and not optional_vars['source']:
module.fail_json(msg="If creating a new inventory source, the source param must be present")
# Create the data that gets sent for create and update
inventory_source_fields = {
'name': new_name if new_name else name,
'inventory': inventory_id,
}
# Attempt to look up the related items the user specified (these will fail the module if not found)
if credential:
inventory_source_fields['credential'] = module.resolve_name_to_id('credentials', credential)
if source_project:
inventory_source_fields['source_project'] = module.resolve_name_to_id('projects', source_project)
if source_script:
inventory_source_fields['source_script'] = module.resolve_name_to_id('inventory_scripts', source_script)
OPTIONAL_VARS = (
'description', 'source', 'source_path', 'source_vars',
'source_regions', 'instance_filters', 'group_by',
'overwrite', 'overwrite_vars', 'custom_virtualenv',
'timeout', 'verbosity', 'update_on_launch', 'update_cache_timeout',
'update_on_project_update'
)
# Layer in all remaining optional information
for field_name in optional_vars:
if optional_vars[field_name]:
inventory_source_fields[field_name] = optional_vars[field_name]
for field_name in OPTIONAL_VARS:
field_val = module.params.get(field_name)
if field_val:
inventory_source_fields[field_name] = field_val
# Attempt to JSON encode source vars
if inventory_source_fields.get('source_vars', None):
inventory_source_fields['source_vars'] = dumps(inventory_source_fields['source_vars'])
# Sanity check on arguments
if state == 'present' and not inventory_source and not inventory_source_fields['source']:
module.fail_json(msg="If creating a new inventory source, the source param must be present")
if state == 'absent':
# If the state was absent we can let the module delete it if needed, the module will handle exiting from this

View File

@ -45,7 +45,7 @@ options:
aliases: ['credential']
extra_vars:
description:
- extra_vars to use for the Job Template. Prepend C(@) if a file.
- extra_vars to use for the Job Template.
- ask_extra_vars needs to be set to True via tower_job_template module
when creating the Job Template.
type: dict

View File

@ -47,7 +47,19 @@ def sanitize_dict(din):
@pytest.fixture
def run_module(request):
def collection_import():
"""These tests run assuming that the awx_collection folder is inserted
into the PATH before-hand. But all imports internally to the collection
go through this fixture so that can be changed if needed.
For instance, we could switch to fully-qualified import paths.
"""
def rf(path):
return importlib.import_module(path)
return rf
@pytest.fixture
def run_module(request, collection_import):
def rf(module_name, module_params, request_user):
def new_request(self, method, url, **kwargs):
@ -97,7 +109,7 @@ def run_module(request):
# Note that a proper Ansiballz explosion of the modules will have an import path like:
# ansible_collections.awx.awx.plugins.modules.{}
# We should consider supporting that in the future
resource_module = importlib.import_module('plugins.modules.{0}'.format(module_name))
resource_module = collection_import('plugins.modules.{0}'.format(module_name))
if not isinstance(module_params, dict):
raise RuntimeError('Module params must be dict, got {0}'.format(type(module_params)))

View File

@ -78,10 +78,8 @@ def test_create_custom_credential_type(run_module, admin_user):
ct = CredentialType.objects.get(name='Nexus')
result.pop('invocation')
result.pop('name')
assert result == {
"credential_type": "Nexus",
"state": "present",
"name": "Nexus",
"id": ct.pk,
"changed": True,
}

View File

@ -10,18 +10,19 @@ from awx.main.models import Organization, Inventory, Group
def test_create_group(run_module, admin_user):
org = Organization.objects.create(name='test-org')
inv = Inventory.objects.create(name='test-inv', organization=org)
variables = {"ansible_network_os": "iosxr"}
result = run_module('tower_group', dict(
name='Test Group',
inventory='test-inv',
variables='ansible_network_os: iosxr',
variables=variables,
state='present'
), admin_user)
assert result.get('changed'), result
group = Group.objects.get(name='Test Group')
assert group.inventory == inv
assert group.variables == 'ansible_network_os: iosxr'
assert group.variables == '{"ansible_network_os": "iosxr"}'
result.pop('invocation')
assert result == {
@ -39,13 +40,11 @@ def test_tower_group_idempotent(run_module, admin_user):
group = Group.objects.create(
name='Test Group',
inventory=inv,
variables='ansible_network_os: iosxr'
)
result = run_module('tower_group', dict(
name='Test Group',
inventory='test-inv',
variables='ansible_network_os: iosxr',
state='present'
), admin_user)

View File

@ -10,25 +10,29 @@ from awx.main.models import Organization, Inventory, InventorySource, Project
def base_inventory():
org = Organization.objects.create(name='test-org')
inv = Inventory.objects.create(name='test-inv', organization=org)
Project.objects.create(
name='test-proj',
organization=org,
scm_type='git',
scm_url='https://github.com/ansible/test-playbooks.git',
)
return inv
@pytest.fixture
def project(base_inventory):
return Project.objects.create(
name='test-proj',
organization=base_inventory.organization,
scm_type='git',
scm_url='https://github.com/ansible/test-playbooks.git',
)
@pytest.mark.django_db
def test_inventory_source_create(run_module, admin_user, base_inventory):
def test_inventory_source_create(run_module, admin_user, base_inventory, project):
source_path = '/var/lib/awx/example_source_path/'
result = run_module('tower_inventory_source', dict(
name='foo',
inventory='test-inv',
inventory=base_inventory.name,
state='present',
source='scm',
source_path=source_path,
source_project='test-proj'
source_project=project.name
), admin_user)
assert result.pop('changed', None), result
@ -46,6 +50,7 @@ def test_create_inventory_source_implied_org(run_module, admin_user):
org = Organization.objects.create(name='test-org')
inv = Inventory.objects.create(name='test-inv', organization=org)
# Credential is not required for ec2 source, because of IAM roles
result = run_module('tower_inventory_source', dict(
name='Test Inventory Source',
inventory='test-inv',
@ -92,16 +97,16 @@ def test_create_inventory_source_multiple_orgs(run_module, admin_user):
@pytest.mark.django_db
def test_create_inventory_source_with_venv(run_module, admin_user, base_inventory, mocker):
def test_create_inventory_source_with_venv(run_module, admin_user, base_inventory, mocker, project):
path = '/var/lib/awx/venv/custom-venv/foobar13489435/'
source_path = '/var/lib/awx/example_source_path/'
with mocker.patch('awx.main.models.mixins.get_custom_venv_choices', return_value=[path]):
result = run_module('tower_inventory_source', dict(
name='foo',
inventory='test-inv',
inventory=base_inventory.name,
state='present',
source='scm',
source_project='test-proj',
source_project=project.name,
custom_virtualenv=path,
source_path=source_path
), admin_user)
@ -115,7 +120,7 @@ def test_create_inventory_source_with_venv(run_module, admin_user, base_inventor
@pytest.mark.django_db
def test_custom_venv_no_op(run_module, admin_user, base_inventory, mocker):
def test_custom_venv_no_op(run_module, admin_user, base_inventory, mocker, project):
"""If the inventory source is modified, then it should not blank fields
unrelated to the params that the user passed.
This enforces assumptions about the behavior of the AnsibleModule
@ -125,7 +130,7 @@ def test_custom_venv_no_op(run_module, admin_user, base_inventory, mocker):
inv_src = InventorySource.objects.create(
name='foo',
inventory=base_inventory,
source_project=Project.objects.get(name='test-proj'),
source_project=project,
source='scm',
custom_virtualenv='/venv/foobar/'
)
@ -134,13 +139,93 @@ def test_custom_venv_no_op(run_module, admin_user, base_inventory, mocker):
result = run_module('tower_inventory_source', dict(
name='foo',
description='this is the changed description',
inventory='test-inv',
inventory=base_inventory.name,
source='scm', # is required, but behavior is arguable
state='present',
source_project='test-proj',
source_project=project.name,
source_path=source_path
), admin_user)
assert result.pop('changed', None), result
inv_src.refresh_from_db()
assert inv_src.custom_virtualenv == '/venv/foobar/'
assert inv_src.description == 'this is the changed description'
# Tests related to source-specific parameters
#
# We want to let the API return issues with "this doesn't support that", etc.
#
# GUI OPTIONS:
# - - - - - - - manual: file: scm: ec2: gce azure_rm vmware sat cloudforms openstack rhv tower custom
# credential ? ? o o r r r r r r r r o
# source_project ? ? r - - - - - - - - - -
# source_path ? ? r - - - - - - - - - -
# verbosity ? ? o o o o o o o o o o o
# overwrite ? ? o o o o o o o o o o o
# overwrite_vars ? ? o o o o o o o o o o o
# update_on_launch ? ? o o o o o o o o o o o
# UoPL ? ? o - - - - - - - - - -
# source_regions ? ? - o o o - - - - - - -
# instance_filters ? ? - o - - o - - - - o -
# group_by ? ? - o - - o - - - - - -
# source_vars* ? ? - o - o o o o o - - -
# environmet vars* ? ? o - - - - - - - - - o
# source_script ? ? - - - - - - - - - - r
#
# UoPL - update_on_project_launch
# * - source_vars are labeled environment_vars on project and custom sources
@pytest.mark.django_db
def test_missing_required_credential(run_module, admin_user, base_inventory):
result = run_module('tower_inventory_source', dict(
name='Test Azure Source',
inventory=base_inventory.name,
source='azure_rm',
state='present'
), admin_user)
assert result.pop('failed', None) is True, result
assert 'Credential is required for a cloud source' in result.get('msg', '')
@pytest.mark.django_db
def test_source_project_not_for_cloud(run_module, admin_user, base_inventory, project):
result = run_module('tower_inventory_source', dict(
name='Test ec2 Inventory Source',
inventory=base_inventory.name,
source='ec2',
state='present',
source_project=project.name
), admin_user)
assert result.pop('failed', None) is True, result
assert 'Cannot set source_project if not SCM type' in result.get('msg', '')
@pytest.mark.django_db
def test_source_path_not_for_cloud(run_module, admin_user, base_inventory):
result = run_module('tower_inventory_source', dict(
name='Test ec2 Inventory Source',
inventory=base_inventory.name,
source='ec2',
state='present',
source_path='where/am/I'
), admin_user)
assert result.pop('failed', None) is True, result
assert 'Cannot set source_path if not SCM type' in result.get('msg', '')
@pytest.mark.django_db
def test_scm_source_needs_project(run_module, admin_user, base_inventory):
result = run_module('tower_inventory_source', dict(
name='SCM inventory without project',
inventory=base_inventory.name,
state='present',
source='scm',
source_path='/var/lib/awx/example_source_path/'
), admin_user)
assert result.pop('failed', None), result
assert 'Project required for scm type sources' in result.get('msg', '')

View File

@ -0,0 +1,35 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
from unittest import mock
import json
def test_duplicate_config(collection_import):
# imports done here because of PATH issues unique to this test suite
TowerModule = collection_import('plugins.module_utils.tower_api').TowerModule
data = {
'name': 'zigzoom',
'zig': 'zoom',
'tower_username': 'bob',
'tower_config_file': 'my_config'
}
cli_data = {'ANSIBLE_MODULE_ARGS': data}
testargs = ['module_file.py', json.dumps(cli_data)]
with mock.patch('ansible.module_utils.basic.AnsibleModule.warn') as mock_warn:
with mock.patch.object(sys, 'argv', testargs):
with mock.patch.object(TowerModule, 'load_config') as mock_load:
argument_spec = dict(
name=dict(required=True),
zig=dict(type='str'),
)
TowerModule(argument_spec=argument_spec)
mock_load.mock_calls[-1] == mock.call('my_config')
mock_warn.assert_called_once_with(
'The parameter(s) tower_username were provided at the same time as '
'tower_config_file. Precedence may be unstable, '
'we suggest either using config file or params.'
)

View File

@ -1 +0,0 @@
plugins/modules/tower_host.py use-argspec-type-path