diff --git a/awx/api/serializers.py b/awx/api/serializers.py index a571190199..9310d19a5d 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -1950,7 +1950,7 @@ class InventorySourceOptionsSerializer(BaseSerializer): class Meta: fields = ('*', 'source', 'source_path', 'source_script', 'source_vars', 'credential', 'source_regions', 'instance_filters', 'group_by', 'overwrite', 'overwrite_vars', - 'timeout', 'verbosity') + 'compatibility_mode', 'timeout', 'verbosity') def get_related(self, obj): res = super(InventorySourceOptionsSerializer, self).get_related(obj) diff --git a/awx/main/constants.py b/awx/main/constants.py index 99b2f6301e..1fbfa3ef53 100644 --- a/awx/main/constants.py +++ b/awx/main/constants.py @@ -25,7 +25,9 @@ STANDARD_INVENTORY_UPDATE_ENV = { # Failure to parse inventory should always be fatal 'ANSIBLE_INVENTORY_UNPARSED_FAILED': 'True', # Always use the --export option for ansible-inventory - 'ANSIBLE_INVENTORY_EXPORT': 'True' + 'ANSIBLE_INVENTORY_EXPORT': 'True', + # Redirecting output to stderr allows JSON parsing to still work with -vvv + 'ANSIBLE_VERBOSE_TO_STDERR': 'True' } CAN_CANCEL = ('new', 'pending', 'waiting', 'running') ACTIVE_STATES = CAN_CANCEL diff --git a/awx/main/management/commands/inventory_import.py b/awx/main/management/commands/inventory_import.py index c15d7c82c6..ade2478788 100644 --- a/awx/main/management/commands/inventory_import.py +++ b/awx/main/management/commands/inventory_import.py @@ -27,7 +27,6 @@ from awx.main.models.inventory import ( Host ) from awx.main.utils.mem_inventory import MemInventory, dict_to_mem_data -from awx.main.utils.ansible import filter_non_json_lines # other AWX imports from awx.main.models.rbac import batch_role_ancestor_rebuilding @@ -126,12 +125,12 @@ class AnsibleInventoryLoader(object): def get_base_args(self): # get ansible-inventory absolute path for running in bubblewrap/proot, in Popen - # NOTE: why do we add "python" to the start of these args? + # NOTE: why do we add "python" to the start of these args? # the script that runs ansible-inventory specifies a python interpreter # that makes no sense in light of the fact that we put all the dependencies # inside of /venv/ansible, so we override the specified interpreter # https://github.com/ansible/ansible/issues/50714 - bargs= ['python', self.get_path_to_ansible_inventory(), '-i', self.source] + bargs = ['python', self.get_path_to_ansible_inventory(), '-i', self.source] logger.debug('Using base command: {}'.format(' '.join(bargs))) return bargs @@ -174,21 +173,15 @@ class AnsibleInventoryLoader(object): cmd = self.get_proot_args(cmd, env) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env) - raw_stdout, stderr = proc.communicate() - raw_stdout = smart_text(raw_stdout) + stdout, stderr = proc.communicate() + stdout = smart_text(stdout) stderr = smart_text(stderr) if self.tmp_private_dir: shutil.rmtree(self.tmp_private_dir, True) if proc.returncode != 0: raise RuntimeError('%s failed (rc=%d) with stdout:\n%s\nstderr:\n%s' % ( - self.method, proc.returncode, raw_stdout, stderr)) - - # Openstack inventory plugin gives non-JSON lines - # Also, running with higher verbosity gives non-JSON lines - stdout = filter_non_json_lines(raw_stdout) - if stdout is not raw_stdout: - logger.warning('Output had lines stripped to obtain JSON format.') + self.method, proc.returncode, stdout, stderr)) for line in stderr.splitlines(): logger.error(line) @@ -315,12 +308,6 @@ class Command(BaseCommand): raise NotImplementedError('Value of enabled {} not understood.'.format(enabled)) def get_source_absolute_path(self, source): - # Sanity check: We sanitize these module names for our API but Ansible proper doesn't follow - # good naming conventions - source = source.replace('rhv.py', 'ovirt4.py') - source = source.replace('satellite6.py', 'foreman.py') - source = source.replace('vmware.py', 'vmware_inventory.py') - source = source.replace('openstack.py', 'openstack_inventory.py') if not os.path.exists(source): raise IOError('Source does not exist: %s' % source) source = os.path.join(os.getcwd(), os.path.dirname(source), @@ -893,12 +880,24 @@ class Command(BaseCommand): self._create_update_group_children() self._create_update_group_hosts() + def remote_tower_license_compare(self, local_license_type): + # this requires https://github.com/ansible/ansible/pull/52747 + source_vars = self.all_group.variables + remote_license_type = source_vars.get('tower_metadata', {}).get('license_type', None) + if remote_license_type is None: + raise CommandError('Unexpected Error: Tower inventory plugin missing needed metadata!') + if local_license_type != remote_license_type: + raise CommandError('Tower server licenses must match: source: {} local: {}'.format( + remote_license_type, local_license_type + )) + def check_license(self): license_info = get_licenser().validate() + local_license_type = license_info.get('license_type', 'UNLICENSED') if license_info.get('license_key', 'UNLICENSED') == 'UNLICENSED': logger.error(LICENSE_NON_EXISTANT_MESSAGE) raise CommandError('No license found!') - elif license_info.get('license_type', 'UNLICENSED') == 'open': + elif local_license_type == 'open': return available_instances = license_info.get('available_instances', 0) free_instances = license_info.get('free_instances', 0) @@ -907,6 +906,13 @@ class Command(BaseCommand): if time_remaining <= 0 and not license_info.get('demo', False): logger.error(LICENSE_EXPIRED_MESSAGE) raise CommandError("License has expired!") + # special check for tower-type inventory sources + # but only if running the plugin + TOWER_SOURCE_FILES = ['tower.yml', 'tower.yaml'] + if self.inventory_source.source == 'tower' and any(f in self.source for f in TOWER_SOURCE_FILES): + # only if this is the 2nd call to license check, we cannot compare before running plugin + if hasattr(self, 'all_group'): + self.remote_tower_license_compare(local_license_type) if free_instances < 0: d = { 'new_count': new_count, diff --git a/awx/main/migrations/0064_v350_inventory_compat_mode.py b/awx/main/migrations/0064_v350_inventory_compat_mode.py new file mode 100644 index 0000000000..7e80c17639 --- /dev/null +++ b/awx/main/migrations/0064_v350_inventory_compat_mode.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.11.16 on 2019-02-28 16:22 +from __future__ import unicode_literals + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0063_v350_org_host_limits'), + ] + + operations = [ + migrations.AddField( + model_name='inventorysource', + name='compatibility_mode', + field=models.BooleanField(default=True, help_text='This field is deprecated and will be removed in a future release. Restore old hostvars and names from before the transition to inventory plugins.'), + ), + migrations.AddField( + model_name='inventoryupdate', + name='compatibility_mode', + field=models.BooleanField(default=True, help_text='This field is deprecated and will be removed in a future release. Restore old hostvars and names from before the transition to inventory plugins.'), + ) + ] diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 4551dfc4a0..ae25d9f497 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -24,6 +24,7 @@ from django.utils.translation import ugettext_lazy as _ from django.db import transaction from django.core.exceptions import ValidationError from django.utils.timezone import now +from django.utils.encoding import iri_to_uri from django.db.models import Q # REST Framework @@ -1180,6 +1181,11 @@ class InventorySourceOptions(BaseModel): default=False, help_text=_('Overwrite local variables from remote inventory source.'), ) + compatibility_mode = models.BooleanField( + default=True, + help_text=_('This field is deprecated and will be removed in a future release. ' + 'Restore old hostvars and names from before the transition to inventory plugins.'), + ) timeout = models.IntegerField( blank=True, default=0, @@ -1219,21 +1225,12 @@ class InventorySourceOptions(BaseModel): ('ami_id', _('Image ID')), ('availability_zone', _('Availability Zone')), ('aws_account', _('Account')), - # These should have been added, but plugins do not support them - # so we will avoid introduction, because it would regress anyway - # ('elasticache_cluster', _('ElastiCache Cluster')), - # ('elasticache_engine', _('ElastiCache Engine')), - # ('elasticache_parameter_group', _('ElastiCache Parameter Group')), - # ('elasticache_replication_group', _('ElastiCache Replication Group')), ('instance_id', _('Instance ID')), ('instance_state', _('Instance State')), ('platform', _('Platform')), ('instance_type', _('Instance Type')), ('key_pair', _('Key Name')), - # ('rds_engine', _('RDS Engine')), - # ('rds_parameter_group', _('RDP Parameter Group')), ('region', _('Region')), - # ('route53_names', _('Route53 Names')), ('security_group', _('Security Group')), ('tag_keys', _('Tags')), ('tag_none', _('Tag None')), @@ -1341,7 +1338,7 @@ class InventorySourceOptions(BaseModel): primary_cred = self.get_cloud_credential() extra_creds = [] for cred in self.credentials.all(): - if primary_cred and cred.pk != primary_cred.pk: + if primary_cred is None or cred.pk != primary_cred.pk: extra_creds.append(cred) return extra_creds @@ -1555,7 +1552,8 @@ class InventorySource(UnifiedJobTemplate, InventorySourceOptions, RelatedJobsMix return True elif self.source == 'gce': # These updates will hang if correct credential is not supplied - return bool(self.get_cloud_credential().kind == 'gce') + credential = self.get_cloud_credential() + return bool(credential and credential.kind == 'gce') return True def create_inventory_update(self, **kwargs): @@ -1820,11 +1818,17 @@ class CustomInventoryScript(CommonModelNameNotUnique, ResourceMixin): return reverse('api:inventory_script_detail', kwargs={'pk': self.pk}, request=request) -# TODO: move these to their own file somewhere? +# TODO: move to awx/main/models/inventory/injectors.py class PluginFileInjector(object): + # if plugin_name is not given, no inventory plugin functionality exists plugin_name = None # Ansible core name used to reference plugin + # if initial_version is None, but we have plugin name, injection logic exists, + # but it is vaporware, meaning we do not use it for some reason in Ansible core initial_version = None # at what version do we switch to the plugin ini_env_reference = None # env var name that points to old ini config file + # base injector should be one of None, "managed", or "template" + # this dictates which logic to borrow from playbook injectors + base_injector = None def __init__(self, ansible_version): # This is InventoryOptions instance, could be source or inventory update @@ -1832,25 +1836,42 @@ class PluginFileInjector(object): @property def filename(self): + """Inventory filename for using the inventory plugin + This is created dynamically, but the auto plugin requires this exact naming + """ return '{0}.yml'.format(self.plugin_name) + @property + def script_name(self): + """Name of the script located in awx/plugins/inventory + """ + return '{0}.py'.format(self.__class__.__name__) + + def inventory_as_dict(self, inventory_update, private_data_dir): + """Default implementation of inventory plugin file contents. + There are some valid cases when all parameters can be obtained from + the environment variables, example "plugin: linode" is valid + ideally, however, some options should be filled from the inventory source data + """ + if self.plugin_name is None: + raise NotImplementedError('At minimum the plugin name is needed for inventory plugin use.') + return {'plugin': self.plugin_name} + def inventory_contents(self, inventory_update, private_data_dir): - return yaml.safe_dump(self.inventory_as_dict(inventory_update, private_data_dir), default_flow_style=False) + """Returns a string that is the content for the inventory file for the inventory plugin + """ + return yaml.safe_dump( + self.inventory_as_dict(inventory_update, private_data_dir), + default_flow_style=False, + width=1000 + ) def should_use_plugin(self): return bool( - self.initial_version and + self.plugin_name and self.initial_version and Version(self.ansible_version) >= Version(self.initial_version) ) - @staticmethod - def get_builtin_injector(source): - from awx.main.models.credential import injectors as builtin_injectors - cred_kind = source.replace('ec2', 'aws') - if cred_kind not in dir(builtin_injectors): - return None - return getattr(builtin_injectors, cred_kind) - def build_env(self, inventory_update, env, private_data_dir, private_data_files): if self.should_use_plugin(): injector_env = self.get_plugin_env(inventory_update, private_data_dir, private_data_files) @@ -1865,12 +1886,27 @@ class PluginFileInjector(object): """ injected_env = {} credential = inventory_update.get_cloud_credential() - builtin_injector = self.get_builtin_injector(inventory_update.source) - if builtin_injector is not None: - builtin_injector(credential, injected_env, private_data_dir) + # some sources may have no credential, specifically ec2 + if credential is None: + return injected_env + if self.base_injector == 'managed': + from awx.main.models.credential import injectors as builtin_injectors + cred_kind = inventory_update.source.replace('ec2', 'aws') + if cred_kind in dir(builtin_injectors): + getattr(builtin_injectors, cred_kind)(credential, injected_env, private_data_dir) + if safe: + from awx.main.models.credential import build_safe_env + return build_safe_env(injected_env) + elif self.base_injector == 'template': + injected_env['INVENTORY_UPDATE_ID'] = str(inventory_update.pk) # so injector knows this is inventory + safe_env = injected_env.copy() + args = [] + safe_args = [] + credential.credential_type.inject_credential( + credential, injected_env, safe_env, args, safe_args, private_data_dir + ) if safe: - from awx.main.models.credential import build_safe_env - injected_env = build_safe_env(injected_env) + return safe_env return injected_env def get_plugin_env(self, inventory_update, private_data_dir, private_data_files, safe=False): @@ -1916,32 +1952,103 @@ class PluginFileInjector(object): class azure_rm(PluginFileInjector): plugin_name = 'azure_rm' - initial_version = '2.7' + initial_version = '2.8' # Driven by unsafe group names issue, hostvars ini_env_reference = 'AZURE_INI_PATH' + base_injector = 'managed' + + def get_plugin_env(self, *args, **kwargs): + ret = super(azure_rm, self).get_plugin_env(*args, **kwargs) + # We need native jinja2 types so that tags can give JSON null value + ret['ANSIBLE_JINJA2_NATIVE'] = str(True) + return ret def inventory_as_dict(self, inventory_update, private_data_dir): - ret = dict( - plugin=self.plugin_name, + ret = super(azure_rm, self).inventory_as_dict(inventory_update, private_data_dir) + + source_vars = inventory_update.source_vars_dict + + group_by_hostvar = { + 'location': {'prefix': '', 'separator': '', 'key': 'location'}, + 'tag': {'prefix': '', 'separator': '', 'key': 'tags.keys() | list if tags else []'}, + # Introduced with https://github.com/ansible/ansible/pull/53046 + 'security_group': {'prefix': '', 'separator': '', 'key': 'security_group'}, + 'resource_group': {'prefix': '', 'separator': '', 'key': 'resource_group'}, + # Note, os_family was not documented correctly in script, but defaulted to grouping by it + 'os_family': {'prefix': '', 'separator': '', 'key': 'os_disk.operating_system_type'} + } + # by default group by everything + # always respect user setting, if they gave it + group_by = [ + grouping_name for grouping_name in group_by_hostvar + if source_vars.get('group_by_{}'.format(grouping_name), True) + ] + ret['keyed_groups'] = [group_by_hostvar[grouping_name] for grouping_name in group_by] + if 'tag' in group_by: + # Nasty syntax to reproduce "key_value" group names in addition to "key" + ret['keyed_groups'].append({ + 'prefix': '', 'separator': '', + 'key': r'dict(tags.keys() | map("regex_replace", "^(.*)$", "\1_") | list | zip(tags.values() | list)) if tags else []' + }) + + if inventory_update.compatibility_mode: + # Dashes actually were not configurable in azure_rm.py script + # however, we do not want unicode, so we use this + ret['use_contrib_script_compatible_sanitization'] = True # By default the script did not filter hosts - default_host_filters=[], - # Groups that the script returned - keyed_groups=[ - {'prefix': '', 'separator': '', 'key': 'location'}, - {'prefix': '', 'separator': '', 'key': 'powerstate'}, - {'prefix': '', 'separator': '', 'key': 'name'} - ], - hostvar_expressions={ + ret['default_host_filters'] = [] + # User-given host filters + user_filters = [] + old_filterables = [ + ('resource_groups', 'resource_group'), + ('tags', 'tags') + # locations / location would be an entry + # but this would conflict with source_regions + ] + for key, loc in old_filterables: + value = source_vars.get(key, None) + if value and isinstance(value, str): + user_filters.append('{} not in {}'.format( + loc, value.split(',') + )) + if user_filters: + ret.setdefault('exclude_host_filters', []) + ret['exclude_host_filters'].extend(user_filters) + + # One static group that was returned by script + ret['conditional_groups'] = {'azure': True} + # Compatibility hostvars + ret['hostvar_expressions'] = { 'provisioning_state': 'provisioning_state | title', 'computer_name': 'name', 'type': 'resource_type', - 'private_ip': 'private_ipv4_addresses | json_query("[0]")' + 'private_ip': 'private_ipv4_addresses | json_query("[0]")', + 'public_ip': 'public_ipv4_addresses | json_query("[0]")', + 'tags': 'tags if tags else None' } - ) + # Special functionality from script + if source_vars.get('use_private_ip', False): + ret['hostvar_expressions']['ansible_host'] = 'private_ipv4_addresses | json_query("[0]")' + else: + # Hopefully no one is using this after moving to plugins, but applying this + # setting will at least trigger the global redactor to warn user + if 'replace_dash_in_groups' in source_vars: + ret['use_contrib_script_compatible_sanitization'] = not source_vars['replace_dash_in_groups'] + if inventory_update.instance_filters: + ret.setdefault('exclude_host_filters', []) + for filter in inventory_update.instance_filters.split(','): + if not filter: + continue + ret['exclude_host_filters'].append(filter) - # TODO: all regions currently failing due to: - # https://github.com/ansible/ansible/pull/48079 if inventory_update.source_regions and 'all' not in inventory_update.source_regions: - ret['regions'] = inventory_update.source_regions.split(',') + # initialize a list for this section in inventory file + ret.setdefault('exclude_host_filters', []) + # make a python list of the regions we will use + python_regions = [x.strip() for x in inventory_update.source_regions.split(',')] + # convert that list in memory to python syntax in a string + # now put that in jinja2 syntax operating on hostvar key "location" + # and put that as an entry in the exclusions list + ret['exclude_host_filters'].append("location not in {}".format(repr(python_regions))) return ret def build_script_private_data(self, inventory_update, private_data_dir): @@ -1967,11 +2074,17 @@ class azure_rm(PluginFileInjector): class ec2(PluginFileInjector): plugin_name = 'aws_ec2' - initial_version = '2.6' # 2.5 has bugs forming keyed groups + initial_version = '2.8' # Driven by unsafe group names issue, parent_group templating, hostvars ini_env_reference = 'EC2_INI_PATH' + base_injector = 'managed' + + def get_plugin_env(self, *args, **kwargs): + ret = super(ec2, self).get_plugin_env(*args, **kwargs) + # We need native jinja2 types so that ec2_state_code will give integer + ret['ANSIBLE_JINJA2_NATIVE'] = str(True) + return ret def _compat_compose_vars(self): - # https://gist.github.com/s-hertel/089c613914c051f443b53ece6995cc77 return { # vars that change 'ec2_block_devices': ( @@ -1990,10 +2103,11 @@ class ec2(PluginFileInjector): 'ec2_reason': 'state_transition_reason', 'ec2_security_group_ids': "security_groups | map(attribute='group_id') | list | join(',')", 'ec2_security_group_names': "security_groups | map(attribute='group_name') | list | join(',')", + 'ec2_tag_Name': 'tags.Name', 'ec2_state': 'state.name', 'ec2_state_code': 'state.code', 'ec2_state_reason': 'state_reason.message if state_reason is defined else ""', - 'ec2_sourceDestCheck': 'source_dest_check | lower | string', # butchered snake_case case not a typo. + 'ec2_sourceDestCheck': 'source_dest_check | default(false) | lower | string', # butchered snake_case case not a typo. 'ec2_account_id': 'network_interfaces | json_query("[0].owner_id")', # vars that just need ec2_ prefix 'ec2_ami_launch_index': 'ami_launch_index | string', @@ -2004,7 +2118,7 @@ class ec2(PluginFileInjector): 'ec2_image_id': 'image_id', 'ec2_instance_type': 'instance_type', 'ec2_key_name': 'key_name', - 'ec2_launch_time': 'launch_time', + 'ec2_launch_time': r'launch_time | regex_replace(" ", "T") | regex_replace("(\+)(\d\d):(\d)(\d)$", ".\g<2>\g<3>Z")', 'ec2_platform': 'platform | default("")', 'ec2_private_dns_name': 'private_dns_name', 'ec2_private_ip_address': 'private_ip_address', @@ -2012,57 +2126,133 @@ class ec2(PluginFileInjector): 'ec2_region': 'placement.region', 'ec2_root_device_name': 'root_device_name', 'ec2_root_device_type': 'root_device_type', - 'ec2_spot_instance_request_id': 'spot_instance_request_id', - 'ec2_subnet_id': 'subnet_id', + # many items need blank defaults because the script tended to keep a common schema + 'ec2_spot_instance_request_id': 'spot_instance_request_id | default("")', + 'ec2_subnet_id': 'subnet_id | default("")', 'ec2_virtualization_type': 'virtualization_type', - 'ec2_vpc_id': 'vpc_id' + 'ec2_vpc_id': 'vpc_id | default("")', + # same as ec2_ip_address, the script provided this + 'ansible_host': 'public_ip_address', + # new with https://github.com/ansible/ansible/pull/53645 + 'ec2_eventsSet': 'events | default("")', + 'ec2_persistent': 'persistent | default(false)', + 'ec2_requester_id': 'requester_id | default("")' } def inventory_as_dict(self, inventory_update, private_data_dir): + ret = super(ec2, self).inventory_as_dict(inventory_update, private_data_dir) + keyed_groups = [] group_by_hostvar = { - 'ami_id': {'prefix': '', 'separator': '', 'key': 'image_id'}, - 'availability_zone': {'prefix': '', 'separator': '', 'key': 'placement.availability_zone'}, - 'aws_account': None, # not an option with plugin - 'instance_id': {'prefix': '', 'separator': '', 'key': 'instance_id'}, # normally turned off - 'instance_state': {'prefix': 'instance_state', 'key': 'state.name'}, - 'platform': {'prefix': 'platform', 'key': 'platform'}, - 'instance_type': {'prefix': 'type', 'key': 'instance_type'}, - 'key_pair': {'prefix': 'key', 'key': 'key_name'}, - 'region': {'prefix': '', 'separator': '', 'key': 'placement.region'}, + 'ami_id': {'prefix': '', 'separator': '', 'key': 'image_id', 'parent_group': 'images'}, + # 2 entries for zones for same groups to establish 2 parentage trees + 'availability_zone': {'prefix': '', 'separator': '', 'key': 'placement.availability_zone', 'parent_group': 'zones'}, + 'aws_account': {'prefix': '', 'separator': '', 'key': 'network_interfaces | json_query("[0].owner_id")', 'parent_group': 'accounts'}, + 'instance_id': {'prefix': '', 'separator': '', 'key': 'instance_id', 'parent_group': 'instances'}, # normally turned off + 'instance_state': {'prefix': 'instance_state', 'key': 'state.name', 'parent_group': 'instance_states'}, + 'platform': {'prefix': 'platform', 'key': 'platform | default("undefined")', 'parent_group': 'platforms'}, + 'instance_type': {'prefix': 'type', 'key': 'instance_type', 'parent_group': 'types'}, + 'key_pair': {'prefix': 'key', 'key': 'key_name', 'parent_group': 'keys'}, + 'region': {'prefix': '', 'separator': '', 'key': 'placement.region', 'parent_group': 'regions'}, # Security requires some ninja jinja2 syntax, credit to s-hertel - 'security_group': {'prefix': 'security_group', 'key': 'security_groups | json_query("[].group_name")'}, - 'tag_keys': {'prefix': 'tag', 'key': 'tags'}, - 'tag_none': None, # grouping by no tags isn't a different thing with plugin + 'security_group': {'prefix': 'security_group', 'key': 'security_groups | json_query("[].group_name")', 'parent_group': 'security_groups'}, + 'tag_keys': [ + {'prefix': 'tag', 'key': 'tags', 'parent_group': 'tags'}, + {'prefix': 'tag', 'key': 'tags.keys()', 'parent_group': 'tags'} + ], + # 'tag_none': None, # grouping by no tags isn't a different thing with plugin # naming is redundant, like vpc_id_vpc_8c412cea, but intended - 'vpc_id': {'prefix': 'vpc_id', 'key': 'vpc_id'}, + 'vpc_id': {'prefix': 'vpc_id', 'key': 'vpc_id', 'parent_group': 'vpcs'}, } - # -- same as script here -- + # -- same-ish as script here -- group_by = [x.strip().lower() for x in inventory_update.group_by.split(',') if x.strip()] for choice in inventory_update.get_ec2_group_by_choices(): - value = bool((group_by and choice[0] in group_by) or (not group_by and choice[0] != 'instance_id')) + value = bool( + ( + group_by and choice[0] in group_by + ) or ( + (not group_by) and choice[0] != 'instance_id' + ) + ) # -- end sameness to script -- if value: this_keyed_group = group_by_hostvar.get(choice[0], None) # If a keyed group syntax does not exist, there is nothing we can do to get this group if this_keyed_group is not None: - keyed_groups.append(this_keyed_group) + if isinstance(this_keyed_group, list): + keyed_groups.extend(this_keyed_group) + else: + keyed_groups.append(this_keyed_group) + # special case, this parentage is only added if both zones and regions are present + if 'region' in group_by and 'availability_zone' in group_by: + keyed_groups.append({'prefix': '', 'separator': '', 'key': 'placement.availability_zone', 'parent_group': '{{ placement.region }}'}) + + source_vars = inventory_update.source_vars_dict + # This is a setting from the script, hopefully no one used it + # if true, it replaces dashes, but not in region / loc names + replace_dash = bool(source_vars.get('replace_dash_in_groups', True)) + if inventory_update.compatibility_mode: + legacy_regex = { + True: r"[^A-Za-z0-9\_]", + False: r"[^A-Za-z0-9\_\-]" # do not replace dash, dash is whitelisted + }[replace_dash] + list_replacer = 'map("regex_replace", "{rx}", "_") | list'.format(rx=legacy_regex) + # this option, a plugin option, will allow dashes, but not unicode + # when set to False, unicode will be allowed, but it was not allowed by script + # thus, we always have to use this option, and always use our custom regex + ret['use_contrib_script_compatible_sanitization'] = True + for grouping_data in keyed_groups: + if grouping_data['key'] in ('placement.region', 'placement.availability_zone'): + # us-east-2 is always us-east-2 according to ec2.py + # no sanitization in region-ish groups for the script standards, ever ever + continue + if grouping_data['key'] == 'tags': + # dict jinja2 transformation + grouping_data['key'] = 'dict(tags.keys() | {replacer} | zip(tags.values() | {replacer}))'.format( + replacer=list_replacer + ) + elif grouping_data['key'] == 'tags.keys()' or grouping_data['prefix'] == 'security_group': + # list jinja2 transformation + grouping_data['key'] += ' | {replacer}'.format(replacer=list_replacer) + else: + # string transformation + grouping_data['key'] += ' | regex_replace("{rx}", "_")'.format(rx=legacy_regex) + + # This was an allowed ec2.ini option, also plugin option, so pass through + if source_vars.get('boto_profile', None): + ret['boto_profile'] = source_vars['boto_profile'] + + elif not replace_dash: + # Using the plugin, but still want dashes whitelisted + ret['use_contrib_script_compatible_sanitization'] = True + + if keyed_groups: + ret['keyed_groups'] = keyed_groups # Instance ID not part of compat vars, because of settings.EC2_INSTANCE_ID_VAR # remove this variable at your own peril, there be dragons compose_dict = {'ec2_id': 'instance_id'} - # TODO: add an ability to turn this off - compose_dict.update(self._compat_compose_vars()) + inst_filters = {} - inst_filters = { - # The script returned all states by default, the plugin does not + if inventory_update.compatibility_mode: + # TODO: add an ability to turn this off + compose_dict.update(self._compat_compose_vars()) + # plugin provides "aws_ec2", but not this which the script gave + ret['groups'] = {'ec2': True} + # public_ip as hostname is non-default plugin behavior, script behavior + ret['hostnames'] = [ + 'network-interface.addresses.association.public-ip', + 'dns-name', + 'private-dns-name' + ] + # The script returned only running state by default, the plugin does not # https://docs.aws.amazon.com/cli/latest/reference/ec2/describe-instances.html#options # options: pending | running | shutting-down | terminated | stopping | stopped - 'instance-state-name': [ - 'running' - # 'pending', 'running', 'shutting-down', 'terminated', 'stopping', 'stopped' - ] - } + inst_filters['instance-state-name'] = ['running'] + + if compose_dict: + ret['compose'] = compose_dict + if inventory_update.instance_filters: # logic used to live in ec2.py, now it belongs to us. Yay more code? filter_sets = [f for f in inventory_update.instance_filters.split(',') if f] @@ -2077,22 +2267,12 @@ class ec2(PluginFileInjector): continue inst_filters[filter_key] = filter_value - ret = dict( - plugin=self.plugin_name, - hostnames=[ - 'network-interface.addresses.association.public-ip', # non-default - 'dns-name', - 'private-dns-name' - ], - keyed_groups=keyed_groups, - groups={'ec2': True}, # plugin provides "aws_ec2", but not this - compose=compose_dict, - filters=inst_filters - ) - # TODO: all regions currently failing due to: - # https://github.com/ansible/ansible/pull/48079 + if inst_filters: + ret['filters'] = inst_filters + if inventory_update.source_regions and 'all' not in inventory_update.source_regions: ret['regions'] = inventory_update.source_regions.split(',') + return ret def build_script_private_data(self, inventory_update, private_data_dir): @@ -2133,7 +2313,8 @@ class ec2(PluginFileInjector): class gce(PluginFileInjector): plugin_name = 'gcp_compute' - initial_version = '2.6' + initial_version = '2.8' # Driven by unsafe group names issue, hostvars + base_injector = 'managed' def get_script_env(self, inventory_update, private_data_dir, private_data_files): env = super(gce, self).get_script_env(inventory_update, private_data_dir, private_data_files) @@ -2154,8 +2335,7 @@ class gce(PluginFileInjector): # missing: gce_image, gce_uuid # https://github.com/ansible/ansible/issues/51884 return { - 'gce_id': 'id', - 'gce_description': 'description | default(None)', + 'gce_description': 'description if description else None', 'gce_machine_type': 'machineType', 'gce_name': 'name', 'gce_network': 'networkInterfaces | json_query("[0].network.name")', @@ -2164,20 +2344,31 @@ class gce(PluginFileInjector): 'gce_status': 'status', 'gce_subnetwork': 'networkInterfaces | json_query("[0].subnetwork.name")', 'gce_tags': 'tags | json_query("items")', - 'gce_zone': 'zone' + 'gce_zone': 'zone', + 'gce_metadata': 'metadata.get("items", []) | items2dict(key_name="key", value_name="value")', + # We need this as long as hostnames is non-default, otherwise hosts + # will not be addressed correctly, was returned in script + 'ansible_ssh_host': 'networkInterfaces | json_query("[0].accessConfigs[0].natIP")' } def inventory_as_dict(self, inventory_update, private_data_dir): + ret = super(gce, self).inventory_as_dict(inventory_update, private_data_dir) credential = inventory_update.get_cloud_credential() - builtin_injector = self.get_builtin_injector(inventory_update.source) - creds_path = builtin_injector(credential, {}, private_data_dir) - # gce never processed ther group_by options, if it had, we would selectively - # apply those options here, but it didn't, so they are added here - # and we may all hope that one day they can die, and rest in peace + # auth related items + from awx.main.models.credential.injectors import gce as builtin_injector + ret['service_account_file'] = builtin_injector(credential, {}, private_data_dir) + ret['projects'] = [credential.get_input('project', default='')] + ret['auth_kind'] = "serviceaccount" + + filters = [] + # TODO: implement gce group_by options + # gce never processed the group_by field, if it had, we would selectively + # apply those options here, but it did not, so all groups are added here keyed_groups = [ # the jinja2 syntax is duplicated with compose # https://github.com/ansible/ansible/issues/51883 + {'prefix': 'network', 'key': 'networkInterfaces | json_query("[0].subnetwork.name")'}, # gce_subnetwork {'prefix': '', 'separator': '', 'key': 'networkInterfaces | json_query("[0].networkIP")'}, # gce_private_ip {'prefix': '', 'separator': '', 'key': 'networkInterfaces | json_query("[0].accessConfigs[0].natIP")'}, # gce_public_ip {'prefix': '', 'separator': '', 'key': 'machineType'}, @@ -2185,34 +2376,46 @@ class gce(PluginFileInjector): {'prefix': 'tag', 'key': 'tags | json_query("items")'}, # gce_tags {'prefix': 'status', 'key': 'status | lower'} ] + # This will be used as the gce instance_id, must be universal, non-compat + compose_dict = {'gce_id': 'id'} - # We need this as long as hostnames is non-default, otherwise hosts - # will not be addressed correctly, so not considered a "compat" change - compose_dict = {'ansible_ssh_host': 'networkInterfaces | json_query("[0].accessConfigs[0].natIP")'} - # These are only those necessary to emulate old hostvars - compose_dict.update(self._compat_compose_vars()) + if inventory_update.compatibility_mode: + # The gce.py script never sanitized any names in any way + ret['use_contrib_script_compatible_sanitization'] = True + # Add in old hostvars aliases + compose_dict.update(self._compat_compose_vars()) + # Non-default names to match script + ret['hostnames'] = ['name', 'public_ip', 'private_ip'] + elif inventory_update.instance_filters: + for filter in inventory_update.instance_filters.split(','): + if not filter: + continue + filters.append(filter) - ret = dict( - plugin=self.plugin_name, - projects=[credential.get_input('project', default='')], - filters=None, # necessary cruft, see: https://github.com/ansible/ansible/pull/50025 - service_account_file=creds_path, - auth_kind="serviceaccount", - hostnames=['name', 'public_ip', 'private_ip'], # need names to match with script - keyed_groups=keyed_groups, - compose=compose_dict, - ) + if keyed_groups: + ret['keyed_groups'] = keyed_groups + if filters: + ret['filters'] = filters + if compose_dict: + ret['compose'] = compose_dict if inventory_update.source_regions and 'all' not in inventory_update.source_regions: ret['zones'] = inventory_update.source_regions.split(',') return ret def get_plugin_env(self, inventory_update, private_data_dir, private_data_files, safe=False): # gce wants everything defined in inventory & cred files + # this explicitly turns off injection of environment variables return {} class vmware(PluginFileInjector): + # plugin_name = 'vmware_vm_inventory' # FIXME: implement me ini_env_reference = 'VMWARE_INI_PATH' + base_injector = 'managed' + + @property + def script_name(self): + return 'vmware_inventory.py' # exception def build_script_private_data(self, inventory_update, private_data_dir): cp = configparser.RawConfigParser() @@ -2242,7 +2445,12 @@ class vmware(PluginFileInjector): class openstack(PluginFileInjector): ini_env_reference = 'OS_CLIENT_CONFIG_FILE' plugin_name = 'openstack' - initial_version = '2.5' + # minimum version of 2.7.8 may be theoretically possible + initial_version = '2.8' # Driven by consistency with other sources + + @property + def script_name(self): + return 'openstack_inventory.py' # exception def _get_clouds_dict(self, inventory_update, credential, private_data_dir, mk_cache=True): openstack_auth = dict(auth_url=credential.get_input('host', default=''), @@ -2347,23 +2555,25 @@ class openstack(PluginFileInjector): class rhv(PluginFileInjector): + """ovirt uses the custom credential templating, and that is all + """ + # plugin_name = 'FIXME' # contribute inventory plugin to Ansible + base_injector = 'template' - def get_script_env(self, inventory_update, private_data_dir, private_data_files): - """Unlike the others, ovirt uses the custom credential templating - """ - env = {'INVENTORY_UPDATE_ID': inventory_update.pk} - safe_env = env.copy() - args = [] - safe_args = [] - credential = inventory_update.get_cloud_credential() - credential.credential_type.inject_credential( - credential, env, safe_env, args, safe_args, private_data_dir - ) - return env + @property + def script_name(self): + return 'ovirt4.py' # exception class satellite6(PluginFileInjector): + plugin_name = 'foreman' ini_env_reference = 'FOREMAN_INI_PATH' + # initial_version = '2.8' # FIXME: turn on after plugin is validated + # No base injector, because this does not work in playbooks. Bug?? + + @property + def script_name(self): + return 'foreman.py' # exception def build_script_private_data(self, inventory_update, private_data_dir): cp = configparser.RawConfigParser() @@ -2406,9 +2616,22 @@ class satellite6(PluginFileInjector): return self.dump_cp(cp, credential) + def get_plugin_env(self, inventory_update, private_data_dir, private_data_files, safe=False): + # this assumes that this is merged + # https://github.com/ansible/ansible/pull/52693 + credential = inventory_update.get_cloud_credential() + ret = {} + if credential: + ret['FOREMAN_SERVER'] = credential.get_input('host', default='') + ret['FOREMAN_USER'] = credential.get_input('username', default='') + ret['FOREMAN_PASSWORD'] = credential.get_input('password', default='') + return ret + class cloudforms(PluginFileInjector): + # plugin_name = 'FIXME' # contribute inventory plugin to Ansible ini_env_reference = 'CLOUDFORMS_INI_PATH' + # Also no base_injector because this does not work in playbooks def build_script_private_data(self, inventory_update, private_data_dir): cp = configparser.RawConfigParser() @@ -2441,6 +2664,9 @@ class cloudforms(PluginFileInjector): class tower(PluginFileInjector): + plugin_name = 'tower' + base_injector = 'template' + initial_version = '2.8' # Driven by "include_metadata" hostvars def get_script_env(self, inventory_update, private_data_dir, private_data_files): env = super(tower, self).get_script_env(inventory_update, private_data_dir, private_data_files) @@ -2448,6 +2674,20 @@ class tower(PluginFileInjector): env['TOWER_LICENSE_TYPE'] = get_licenser().validate().get('license_type', 'unlicensed') return env + def inventory_as_dict(self, inventory_update, private_data_dir): + # Credentials injected as env vars, same as script + try: + # plugin can take an actual int type + identifier = int(inventory_update.instance_filters) + except ValueError: + # inventory_id could be a named URL + identifier = iri_to_uri(inventory_update.instance_filters) + return { + 'plugin': self.plugin_name, + 'inventory_id': identifier, + 'include_metadata': True # used for license check + } + for cls in PluginFileInjector.__subclasses__(): InventorySourceOptions.injectors[cls.__name__] = cls diff --git a/awx/main/tasks.py b/awx/main/tasks.py index 7775362a6b..6c625e81a1 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -1988,15 +1988,21 @@ class RunInventoryUpdate(BaseTask): if inventory_update.source in InventorySource.injectors: injector = InventorySource.injectors[inventory_update.source](self.get_ansible_version(inventory_update)) - env = injector.build_env(inventory_update, env, private_data_dir, private_data_files) - if injector is not None: + env = injector.build_env(inventory_update, env, private_data_dir, private_data_files) # All CLOUD_PROVIDERS sources implement as either script or auto plugin if injector.should_use_plugin(): env['ANSIBLE_INVENTORY_ENABLED'] = 'auto' else: env['ANSIBLE_INVENTORY_ENABLED'] = 'script' + # Automatic transformation of group names + # https://github.com/ansible/ansible/pull/52748 + if inventory_update.compatibility_mode: + env['ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS'] = 'never' + else: + env['ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS'] = 'always' + if inventory_update.source in ['scm', 'custom']: for env_k in inventory_update.source_vars_dict: if str(env_k) not in env and str(env_k) not in settings.INV_ENV_VARIABLE_BLACKLIST: @@ -2090,7 +2096,7 @@ class RunInventoryUpdate(BaseTask): os.chmod(inventory_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) else: # Use the vendored script path - inventory_path = self.get_path_to('..', 'plugins', 'inventory', '%s.py' % src) + inventory_path = self.get_path_to('..', 'plugins', 'inventory', injector.script_name) elif src == 'scm': inventory_path = inventory_update.get_actual_source_path() elif src == 'custom': diff --git a/awx/main/tests/data/inventory/plugins/azure_rm/azure_rm.yml b/awx/main/tests/data/inventory/plugins/azure_rm/azure_rm.yml deleted file mode 100644 index 188a7cbb6a..0000000000 --- a/awx/main/tests/data/inventory/plugins/azure_rm/azure_rm.yml +++ /dev/null @@ -1,4 +0,0 @@ -plugin: azure_rm -regions: -- southcentralus -- westus diff --git a/awx/main/tests/data/inventory/plugins/azure_rm/env.json b/awx/main/tests/data/inventory/plugins/azure_rm/env.json new file mode 100644 index 0000000000..244256f83c --- /dev/null +++ b/awx/main/tests/data/inventory/plugins/azure_rm/env.json @@ -0,0 +1,8 @@ +{ + "AZURE_SUBSCRIPTION_ID": "fooo", + "AZURE_CLIENT_ID": "fooo", + "AZURE_TENANT": "fooo", + "AZURE_SECRET": "fooo", + "AZURE_CLOUD_ENVIRONMENT": "fooo", + "ANSIBLE_JINJA2_NATIVE": "True" +} \ No newline at end of file diff --git a/awx/main/tests/data/inventory/plugins/azure_rm/files/azure_rm.yml b/awx/main/tests/data/inventory/plugins/azure_rm/files/azure_rm.yml new file mode 100644 index 0000000000..1907b795e6 --- /dev/null +++ b/awx/main/tests/data/inventory/plugins/azure_rm/files/azure_rm.yml @@ -0,0 +1,35 @@ +conditional_groups: + azure: true +default_host_filters: [] +exclude_host_filters: +- resource_group not in ['foo_resources', 'bar_resources'] +- location not in ['southcentralus', 'westus'] +hostvar_expressions: + ansible_host: private_ipv4_addresses | json_query("[0]") + computer_name: name + private_ip: private_ipv4_addresses | json_query("[0]") + provisioning_state: provisioning_state | title + public_ip: public_ipv4_addresses | json_query("[0]") + tags: tags if tags else None + type: resource_type +keyed_groups: +- key: location + prefix: '' + separator: '' +- key: tags.keys() if tags else [] + prefix: '' + separator: '' +- key: security_group + prefix: '' + separator: '' +- key: resource_group + prefix: '' + separator: '' +- key: os_disk.operating_system_type + prefix: '' + separator: '' +- key: dict(tags.keys() | map("regex_replace", "^(.*)$", "\1_") | list | zip(tags.values() | list)) if tags else [] + prefix: '' + separator: '' +plugin: azure_rm +use_contrib_script_compatible_sanitization: true diff --git a/awx/main/tests/data/inventory/plugins/ec2/aws_ec2.yml b/awx/main/tests/data/inventory/plugins/ec2/aws_ec2.yml deleted file mode 100644 index 10237c8077..0000000000 --- a/awx/main/tests/data/inventory/plugins/ec2/aws_ec2.yml +++ /dev/null @@ -1,4 +0,0 @@ -plugin: aws_ec2 -regions: -- us-east-2 -- ap-south-1 diff --git a/awx/main/tests/data/inventory/plugins/ec2/env.json b/awx/main/tests/data/inventory/plugins/ec2/env.json new file mode 100644 index 0000000000..f57b4748c3 --- /dev/null +++ b/awx/main/tests/data/inventory/plugins/ec2/env.json @@ -0,0 +1,6 @@ +{ + "AWS_ACCESS_KEY_ID": "fooo", + "AWS_SECRET_ACCESS_KEY": "fooo", + "AWS_SECURITY_TOKEN": "fooo", + "ANSIBLE_JINJA2_NATIVE": "True" +} \ No newline at end of file diff --git a/awx/main/tests/data/inventory/plugins/ec2/files/aws_ec2.yml b/awx/main/tests/data/inventory/plugins/ec2/files/aws_ec2.yml new file mode 100644 index 0000000000..537c2b9ddc --- /dev/null +++ b/awx/main/tests/data/inventory/plugins/ec2/files/aws_ec2.yml @@ -0,0 +1,82 @@ +boto_profile: /tmp/my_boto_stuff +compose: + ansible_host: public_ip_address + ec2_account_id: network_interfaces | json_query("[0].owner_id") + ec2_ami_launch_index: ami_launch_index | string + ec2_architecture: architecture + ec2_block_devices: dict(block_device_mappings | map(attribute='device_name') | list | zip(block_device_mappings | map(attribute='ebs.volume_id') | list)) + ec2_client_token: client_token + ec2_dns_name: public_dns_name + ec2_ebs_optimized: ebs_optimized + ec2_eventsSet: events | default("") + ec2_group_name: placement.group_name + ec2_hypervisor: hypervisor + ec2_id: instance_id + ec2_image_id: image_id + ec2_instance_profile: iam_instance_profile | default("") + ec2_instance_type: instance_type + ec2_ip_address: public_ip_address + ec2_kernel: kernel_id | default("") + ec2_key_name: key_name + ec2_launch_time: launch_time | regex_replace(" ", "T") | regex_replace("(\+)(\d\d):(\d)(\d)$", ".\g<2>\g<3>Z") + ec2_monitored: monitoring.state in ['enabled', 'pending'] + ec2_monitoring_state: monitoring.state + ec2_persistent: persistent | default(false) + ec2_placement: placement.availability_zone + ec2_platform: platform | default("") + ec2_private_dns_name: private_dns_name + ec2_private_ip_address: private_ip_address + ec2_public_dns_name: public_dns_name + ec2_ramdisk: ramdisk_id | default("") + ec2_reason: state_transition_reason + ec2_region: placement.region + ec2_requester_id: requester_id | default("") + ec2_root_device_name: root_device_name + ec2_root_device_type: root_device_type + ec2_security_group_ids: security_groups | map(attribute='group_id') | list | join(',') + ec2_security_group_names: security_groups | map(attribute='group_name') | list | join(',') + ec2_sourceDestCheck: source_dest_check | default(false) | lower | string + ec2_spot_instance_request_id: spot_instance_request_id | default("") + ec2_state: state.name + ec2_state_code: state.code + ec2_state_reason: state_reason.message if state_reason is defined else "" + ec2_subnet_id: subnet_id | default("") + ec2_tag_Name: tags.Name + ec2_virtualization_type: virtualization_type + ec2_vpc_id: vpc_id | default("") +filters: + instance-state-name: + - running +groups: + ec2: true +hostnames: +- network-interface.addresses.association.public-ip +- dns-name +- private-dns-name +keyed_groups: +- key: placement.availability_zone + parent_group: zones + prefix: '' + separator: '' +- key: instance_type | regex_replace("[^A-Za-z0-9\_]", "_") + parent_group: types + prefix: type +- key: placement.region + parent_group: regions + prefix: '' + separator: '' +- key: dict(tags.keys() | map("regex_replace", "[^A-Za-z0-9\_]", "_") | list | zip(tags.values() | map("regex_replace", "[^A-Za-z0-9\_]", "_") | list)) + parent_group: tags + prefix: tag +- key: tags.keys() | map("regex_replace", "[^A-Za-z0-9\_]", "_") | list + parent_group: tags + prefix: tag +- key: placement.availability_zone + parent_group: '{{ placement.region }}' + prefix: '' + separator: '' +plugin: aws_ec2 +regions: +- us-east-2 +- ap-south-1 +use_contrib_script_compatible_sanitization: true diff --git a/awx/main/tests/data/inventory/plugins/gce/file_reference b/awx/main/tests/data/inventory/plugins/gce/files/file_reference similarity index 100% rename from awx/main/tests/data/inventory/plugins/gce/file_reference rename to awx/main/tests/data/inventory/plugins/gce/files/file_reference diff --git a/awx/main/tests/data/inventory/plugins/gce/files/gcp_compute.yml b/awx/main/tests/data/inventory/plugins/gce/files/gcp_compute.yml new file mode 100644 index 0000000000..50a6753384 --- /dev/null +++ b/awx/main/tests/data/inventory/plugins/gce/files/gcp_compute.yml @@ -0,0 +1,46 @@ +auth_kind: serviceaccount +compose: + ansible_ssh_host: networkInterfaces | json_query("[0].accessConfigs[0].natIP") + gce_description: description if description else None + gce_id: id + gce_machine_type: machineType + gce_metadata: metadata.get("items", []) | items2dict(key_name="key", value_name="value") + gce_name: name + gce_network: networkInterfaces | json_query("[0].network.name") + gce_private_ip: networkInterfaces | json_query("[0].networkIP") + gce_public_ip: networkInterfaces | json_query("[0].accessConfigs[0].natIP") + gce_status: status + gce_subnetwork: networkInterfaces | json_query("[0].subnetwork.name") + gce_tags: tags | json_query("items") + gce_zone: zone +hostnames: +- name +- public_ip +- private_ip +keyed_groups: +- key: networkInterfaces | json_query("[0].subnetwork.name") + prefix: network +- key: networkInterfaces | json_query("[0].networkIP") + prefix: '' + separator: '' +- key: networkInterfaces | json_query("[0].accessConfigs[0].natIP") + prefix: '' + separator: '' +- key: machineType + prefix: '' + separator: '' +- key: zone + prefix: '' + separator: '' +- key: tags | json_query("items") + prefix: tag +- key: status | lower + prefix: status +plugin: gcp_compute +projects: +- fooo +service_account_file: {{ file_reference }} +use_contrib_script_compatible_sanitization: true +zones: +- us-east4-a +- us-west1-b diff --git a/awx/main/tests/data/inventory/plugins/gce/gcp_compute.yml b/awx/main/tests/data/inventory/plugins/gce/gcp_compute.yml deleted file mode 100644 index d94cc9cb8f..0000000000 --- a/awx/main/tests/data/inventory/plugins/gce/gcp_compute.yml +++ /dev/null @@ -1,9 +0,0 @@ -auth_kind: serviceaccount -filters: null -plugin: gcp_compute -projects: -- fooo -service_account_file: {{ file_reference }} -zones: -- us-east4-a -- us-west1-b diff --git a/awx/main/tests/data/inventory/plugins/openstack/file_reference b/awx/main/tests/data/inventory/plugins/openstack/files/file_reference similarity index 93% rename from awx/main/tests/data/inventory/plugins/openstack/file_reference rename to awx/main/tests/data/inventory/plugins/openstack/files/file_reference index 4023ac2b9d..daf13976f4 100644 --- a/awx/main/tests/data/inventory/plugins/openstack/file_reference +++ b/awx/main/tests/data/inventory/plugins/openstack/files/file_reference @@ -11,3 +11,4 @@ clouds: project_name: fooo username: fooo private: false + verify: false diff --git a/awx/main/tests/data/inventory/plugins/openstack/openstack.yml b/awx/main/tests/data/inventory/plugins/openstack/files/openstack.yml similarity index 80% rename from awx/main/tests/data/inventory/plugins/openstack/openstack.yml rename to awx/main/tests/data/inventory/plugins/openstack/files/openstack.yml index 69a9c3c982..2356b3c4ed 100644 --- a/awx/main/tests/data/inventory/plugins/openstack/openstack.yml +++ b/awx/main/tests/data/inventory/plugins/openstack/files/openstack.yml @@ -2,5 +2,5 @@ clouds_yaml_path: - {{ file_reference }} expand_hostvars: true fail_on_errors: true -inventory_hostname: name +inventory_hostname: uuid plugin: openstack diff --git a/awx/main/tests/data/inventory/plugins/satellite6/env.json b/awx/main/tests/data/inventory/plugins/satellite6/env.json new file mode 100644 index 0000000000..ae1941a8dc --- /dev/null +++ b/awx/main/tests/data/inventory/plugins/satellite6/env.json @@ -0,0 +1,5 @@ +{ + "FOREMAN_SERVER": "https://foo.invalid", + "FOREMAN_USER": "fooo", + "FOREMAN_PASSWORD": "fooo" +} \ No newline at end of file diff --git a/awx/main/tests/data/inventory/plugins/satellite6/files/foreman.yml b/awx/main/tests/data/inventory/plugins/satellite6/files/foreman.yml new file mode 100644 index 0000000000..7528dbc9e9 --- /dev/null +++ b/awx/main/tests/data/inventory/plugins/satellite6/files/foreman.yml @@ -0,0 +1 @@ +plugin: foreman diff --git a/awx/main/tests/data/inventory/plugins/tower/env.json b/awx/main/tests/data/inventory/plugins/tower/env.json new file mode 100644 index 0000000000..d46d7e22ae --- /dev/null +++ b/awx/main/tests/data/inventory/plugins/tower/env.json @@ -0,0 +1,6 @@ +{ + "TOWER_HOST": "https://foo.invalid", + "TOWER_USERNAME": "fooo", + "TOWER_PASSWORD": "fooo", + "TOWER_VERIFY_SSL": "False" +} \ No newline at end of file diff --git a/awx/main/tests/data/inventory/plugins/tower/files/tower.yml b/awx/main/tests/data/inventory/plugins/tower/files/tower.yml new file mode 100644 index 0000000000..d8d0efdc9a --- /dev/null +++ b/awx/main/tests/data/inventory/plugins/tower/files/tower.yml @@ -0,0 +1,3 @@ +include_metadata: true +inventory_id: 42 +plugin: tower diff --git a/awx/main/tests/data/inventory/scripts/azure_rm/env.json b/awx/main/tests/data/inventory/scripts/azure_rm/env.json new file mode 100644 index 0000000000..acab1810f9 --- /dev/null +++ b/awx/main/tests/data/inventory/scripts/azure_rm/env.json @@ -0,0 +1,8 @@ +{ + "AZURE_SUBSCRIPTION_ID": "fooo", + "AZURE_CLIENT_ID": "fooo", + "AZURE_TENANT": "fooo", + "AZURE_SECRET": "fooo", + "AZURE_CLOUD_ENVIRONMENT": "fooo", + "AZURE_INI_PATH": "{{ file_reference }}" +} \ No newline at end of file diff --git a/awx/main/tests/data/inventory/scripts/azure_rm/AZURE_INI_PATH b/awx/main/tests/data/inventory/scripts/azure_rm/files/AZURE_INI_PATH similarity index 71% rename from awx/main/tests/data/inventory/scripts/azure_rm/AZURE_INI_PATH rename to awx/main/tests/data/inventory/scripts/azure_rm/files/AZURE_INI_PATH index 9e39d5560b..78f56e0e7a 100644 --- a/awx/main/tests/data/inventory/scripts/azure_rm/AZURE_INI_PATH +++ b/awx/main/tests/data/inventory/scripts/azure_rm/files/AZURE_INI_PATH @@ -5,4 +5,6 @@ group_by_location = yes group_by_tag = yes locations = southcentralus,westus base_source_var = value_of_var +use_private_ip = True +resource_groups = foo_resources,bar_resources diff --git a/awx/main/tests/data/inventory/scripts/cloudforms/env.json b/awx/main/tests/data/inventory/scripts/cloudforms/env.json new file mode 100644 index 0000000000..9d6bf03bda --- /dev/null +++ b/awx/main/tests/data/inventory/scripts/cloudforms/env.json @@ -0,0 +1,3 @@ +{ + "CLOUDFORMS_INI_PATH": "{{ file_reference }}" +} \ No newline at end of file diff --git a/awx/main/tests/data/inventory/scripts/cloudforms/CLOUDFORMS_INI_PATH b/awx/main/tests/data/inventory/scripts/cloudforms/files/CLOUDFORMS_INI_PATH similarity index 100% rename from awx/main/tests/data/inventory/scripts/cloudforms/CLOUDFORMS_INI_PATH rename to awx/main/tests/data/inventory/scripts/cloudforms/files/CLOUDFORMS_INI_PATH diff --git a/awx/main/tests/data/inventory/scripts/cloudforms/cache_dir b/awx/main/tests/data/inventory/scripts/cloudforms/files/cache_dir similarity index 100% rename from awx/main/tests/data/inventory/scripts/cloudforms/cache_dir rename to awx/main/tests/data/inventory/scripts/cloudforms/files/cache_dir diff --git a/awx/main/tests/data/inventory/scripts/ec2/env.json b/awx/main/tests/data/inventory/scripts/ec2/env.json new file mode 100644 index 0000000000..d8dc466709 --- /dev/null +++ b/awx/main/tests/data/inventory/scripts/ec2/env.json @@ -0,0 +1,6 @@ +{ + "AWS_ACCESS_KEY_ID": "fooo", + "AWS_SECRET_ACCESS_KEY": "fooo", + "AWS_SECURITY_TOKEN": "fooo", + "EC2_INI_PATH": "{{ file_reference }}" +} \ No newline at end of file diff --git a/awx/main/tests/data/inventory/scripts/ec2/EC2_INI_PATH b/awx/main/tests/data/inventory/scripts/ec2/files/EC2_INI_PATH similarity index 82% rename from awx/main/tests/data/inventory/scripts/ec2/EC2_INI_PATH rename to awx/main/tests/data/inventory/scripts/ec2/files/EC2_INI_PATH index d43f16ab06..aef5c1441c 100644 --- a/awx/main/tests/data/inventory/scripts/ec2/EC2_INI_PATH +++ b/awx/main/tests/data/inventory/scripts/ec2/files/EC2_INI_PATH @@ -1,5 +1,6 @@ [ec2] base_source_var = value_of_var +boto_profile = /tmp/my_boto_stuff regions = us-east-2,ap-south-1 regions_exclude = us-gov-west-1,cn-north-1 destination_variable = public_dns_name @@ -14,16 +15,16 @@ elasticache = False stack_filters = False instance_filters = foobaa group_by_ami_id = False -group_by_availability_zone = False +group_by_availability_zone = True group_by_aws_account = False group_by_instance_id = False group_by_instance_state = False group_by_platform = False -group_by_instance_type = False +group_by_instance_type = True group_by_key_pair = False -group_by_region = False +group_by_region = True group_by_security_group = False -group_by_tag_keys = False +group_by_tag_keys = True group_by_tag_none = False group_by_vpc_id = False cache_path = {{ cache_dir }} diff --git a/awx/main/tests/data/inventory/scripts/ec2/cache_dir b/awx/main/tests/data/inventory/scripts/ec2/files/cache_dir similarity index 100% rename from awx/main/tests/data/inventory/scripts/ec2/cache_dir rename to awx/main/tests/data/inventory/scripts/ec2/files/cache_dir diff --git a/awx/main/tests/data/inventory/scripts/gce/env.json b/awx/main/tests/data/inventory/scripts/gce/env.json new file mode 100644 index 0000000000..76ccfa623b --- /dev/null +++ b/awx/main/tests/data/inventory/scripts/gce/env.json @@ -0,0 +1,7 @@ +{ + "GCE_EMAIL": "fooo", + "GCE_PROJECT": "fooo", + "GCE_CREDENTIALS_FILE_PATH": "{{ file_reference }}", + "GCE_ZONE": "us-east4-a,us-west1-b", + "GCE_INI_PATH": "{{ file_reference }}" +} \ No newline at end of file diff --git a/awx/main/tests/data/inventory/scripts/gce/GCE_CREDENTIALS_FILE_PATH b/awx/main/tests/data/inventory/scripts/gce/files/GCE_CREDENTIALS_FILE_PATH similarity index 100% rename from awx/main/tests/data/inventory/scripts/gce/GCE_CREDENTIALS_FILE_PATH rename to awx/main/tests/data/inventory/scripts/gce/files/GCE_CREDENTIALS_FILE_PATH diff --git a/awx/main/tests/data/inventory/scripts/gce/GCE_INI_PATH b/awx/main/tests/data/inventory/scripts/gce/files/GCE_INI_PATH similarity index 100% rename from awx/main/tests/data/inventory/scripts/gce/GCE_INI_PATH rename to awx/main/tests/data/inventory/scripts/gce/files/GCE_INI_PATH diff --git a/awx/main/tests/data/inventory/scripts/openstack/env.json b/awx/main/tests/data/inventory/scripts/openstack/env.json new file mode 100644 index 0000000000..71903398b7 --- /dev/null +++ b/awx/main/tests/data/inventory/scripts/openstack/env.json @@ -0,0 +1,3 @@ +{ + "OS_CLIENT_CONFIG_FILE": "{{ file_reference }}" +} \ No newline at end of file diff --git a/awx/main/tests/data/inventory/scripts/openstack/OS_CLIENT_CONFIG_FILE b/awx/main/tests/data/inventory/scripts/openstack/files/OS_CLIENT_CONFIG_FILE similarity index 94% rename from awx/main/tests/data/inventory/scripts/openstack/OS_CLIENT_CONFIG_FILE rename to awx/main/tests/data/inventory/scripts/openstack/files/OS_CLIENT_CONFIG_FILE index fd93f64817..92a624965e 100644 --- a/awx/main/tests/data/inventory/scripts/openstack/OS_CLIENT_CONFIG_FILE +++ b/awx/main/tests/data/inventory/scripts/openstack/files/OS_CLIENT_CONFIG_FILE @@ -13,3 +13,4 @@ clouds: project_name: fooo username: fooo private: false + verify: false diff --git a/awx/main/tests/data/inventory/scripts/openstack/cache_dir b/awx/main/tests/data/inventory/scripts/openstack/files/cache_dir similarity index 100% rename from awx/main/tests/data/inventory/scripts/openstack/cache_dir rename to awx/main/tests/data/inventory/scripts/openstack/files/cache_dir diff --git a/awx/main/tests/data/inventory/scripts/rhv/env.json b/awx/main/tests/data/inventory/scripts/rhv/env.json new file mode 100644 index 0000000000..6896ec0f5f --- /dev/null +++ b/awx/main/tests/data/inventory/scripts/rhv/env.json @@ -0,0 +1,6 @@ +{ + "OVIRT_INI_PATH": "{{ file_reference }}", + "OVIRT_URL": "https://foo.invalid", + "OVIRT_USERNAME": "fooo", + "OVIRT_PASSWORD": "fooo" +} \ No newline at end of file diff --git a/awx/main/tests/data/inventory/scripts/rhv/OVIRT_INI_PATH b/awx/main/tests/data/inventory/scripts/rhv/files/OVIRT_INI_PATH similarity index 100% rename from awx/main/tests/data/inventory/scripts/rhv/OVIRT_INI_PATH rename to awx/main/tests/data/inventory/scripts/rhv/files/OVIRT_INI_PATH diff --git a/awx/main/tests/data/inventory/scripts/satellite6/env.json b/awx/main/tests/data/inventory/scripts/satellite6/env.json new file mode 100644 index 0000000000..73a0cd0d79 --- /dev/null +++ b/awx/main/tests/data/inventory/scripts/satellite6/env.json @@ -0,0 +1,3 @@ +{ + "FOREMAN_INI_PATH": "{{ file_reference }}" +} \ No newline at end of file diff --git a/awx/main/tests/data/inventory/scripts/satellite6/FOREMAN_INI_PATH b/awx/main/tests/data/inventory/scripts/satellite6/files/FOREMAN_INI_PATH similarity index 100% rename from awx/main/tests/data/inventory/scripts/satellite6/FOREMAN_INI_PATH rename to awx/main/tests/data/inventory/scripts/satellite6/files/FOREMAN_INI_PATH diff --git a/awx/main/tests/data/inventory/scripts/tower/env.json b/awx/main/tests/data/inventory/scripts/tower/env.json new file mode 100644 index 0000000000..e951d3484a --- /dev/null +++ b/awx/main/tests/data/inventory/scripts/tower/env.json @@ -0,0 +1,8 @@ +{ + "TOWER_HOST": "https://foo.invalid", + "TOWER_USERNAME": "fooo", + "TOWER_PASSWORD": "fooo", + "TOWER_VERIFY_SSL": "False", + "TOWER_INVENTORY": "42", + "TOWER_LICENSE_TYPE": "open" +} \ No newline at end of file diff --git a/awx/main/tests/data/inventory/scripts/vmware/env.json b/awx/main/tests/data/inventory/scripts/vmware/env.json new file mode 100644 index 0000000000..15d41e90b5 --- /dev/null +++ b/awx/main/tests/data/inventory/scripts/vmware/env.json @@ -0,0 +1,7 @@ +{ + "VMWARE_USER": "fooo", + "VMWARE_PASSWORD": "fooo", + "VMWARE_HOST": "https://foo.invalid", + "VMWARE_VALIDATE_CERTS": "False", + "VMWARE_INI_PATH": "{{ file_reference }}" +} \ No newline at end of file diff --git a/awx/main/tests/data/inventory/scripts/vmware/VMWARE_INI_PATH b/awx/main/tests/data/inventory/scripts/vmware/files/VMWARE_INI_PATH similarity index 100% rename from awx/main/tests/data/inventory/scripts/vmware/VMWARE_INI_PATH rename to awx/main/tests/data/inventory/scripts/vmware/files/VMWARE_INI_PATH diff --git a/awx/main/tests/functional/commands/test_inventory_import.py b/awx/main/tests/functional/commands/test_inventory_import.py index 979d97ebc9..630eca7b05 100644 --- a/awx/main/tests/functional/commands/test_inventory_import.py +++ b/awx/main/tests/functional/commands/test_inventory_import.py @@ -11,7 +11,8 @@ from django.core.management.base import CommandError # AWX from awx.main.management.commands import inventory_import -from awx.main.models import Inventory, Host, Group +from awx.main.models import Inventory, Host, Group, InventorySource +from awx.main.utils.mem_inventory import MemGroup TEST_INVENTORY_CONTENT = { @@ -306,3 +307,21 @@ class TestEnabledVar: def test_enabled_var_is_enabled_value(self, cmd): assert cmd._get_enabled({'foo': {'bar': 'barfoo'}}) is True + + +def test_tower_version_compare(): + cmd = inventory_import.Command() + cmd.inventory_source = InventorySource(source='tower') + cmd.all_group = MemGroup('all') + # mimic example from https://github.com/ansible/ansible/pull/52747 + # until that is merged, this is the best testing we can do + cmd.all_group.variables = { + 'tower_metadata': { + "ansible_version": "2.7.5", + "license_type": "open", + "version": "2.0.1-1068-g09684e2c41" + } + } + with pytest.raises(CommandError): + cmd.remote_tower_license_compare('very_supported') + cmd.remote_tower_license_compare('open') diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index 1f761e3f84..8837339783 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -464,8 +464,9 @@ def group(inventory): @pytest.fixture def inventory_source(inventory): + # by making it ec2, the credential is not required return InventorySource.objects.create(name='single-inv-src', - inventory=inventory, source='gce') + inventory=inventory, source='ec2') @pytest.fixture diff --git a/awx/main/tests/functional/models/test_inventory.py b/awx/main/tests/functional/models/test_inventory.py index 1519c3b28e..37ac254c65 100644 --- a/awx/main/tests/functional/models/test_inventory.py +++ b/awx/main/tests/functional/models/test_inventory.py @@ -2,6 +2,7 @@ import pytest from unittest import mock +import json from django.core.exceptions import ValidationError @@ -13,6 +14,8 @@ from awx.main.models import ( InventoryUpdate, Job ) +from awx.main.constants import CLOUD_PROVIDERS +from awx.main.models.inventory import PluginFileInjector from awx.main.utils.filters import SmartFilter @@ -206,6 +209,103 @@ class TestSCMClean: inv_src2.clean_update_on_project_update() +@pytest.mark.django_db +class TestInventorySourceInjectors: + def test_should_use_plugin(self): + class foo(PluginFileInjector): + plugin_name = 'foo_compute' + initial_version = '2.7.8' + assert not foo('2.7.7').should_use_plugin() + assert foo('2.8').should_use_plugin() + + def test_extra_credentials(self, project, credential): + inventory_source = InventorySource.objects.create( + name='foo', source='custom', source_project=project + ) + inventory_source.credentials.add(credential) + assert inventory_source.get_cloud_credential() is None + assert inventory_source.get_extra_credentials() == [credential] + + inventory_source.source = 'ec2' + assert inventory_source.get_cloud_credential() == credential + assert inventory_source.get_extra_credentials() == [] + + def test_all_cloud_sources_covered(self): + """Code in several places relies on the fact that the older + CLOUD_PROVIDERS constant contains the same names as what are + defined within the injectors + """ + assert set(CLOUD_PROVIDERS) == set(InventorySource.injectors.keys()) + + @pytest.mark.parametrize('source,filename', [ + ('ec2', 'aws_ec2.yml'), + ('openstack', 'openstack.yml'), + ('gce', 'gcp_compute.yml') + ]) + def test_plugin_filenames(self, source, filename): + """It is important that the filenames for inventory plugin files + are named correctly, because Ansible will reject files that do + not have these exact names + """ + injector = InventorySource.injectors[source]('2.7.7') + assert injector.filename == filename + + @pytest.mark.parametrize('source,script_name', [ + ('ec2', 'ec2.py'), + ('rhv', 'ovirt4.py'), + ('satellite6', 'foreman.py'), + ('openstack', 'openstack_inventory.py') + ], ids=['ec2', 'rhv', 'satellite6', 'openstack']) + def test_script_filenames(self, source, script_name): + """Ansible has several exceptions in naming of scripts + """ + injector = InventorySource.injectors[source]('2.7.7') + assert injector.script_name == script_name + + def test_group_by_azure(self): + injector = InventorySource.injectors['azure_rm']('2.9') + inv_src = InventorySource( + name='azure source', source='azure_rm', + compatibility_mode=True, + source_vars={'group_by_os_family': True} + ) + group_by_on = injector.inventory_as_dict(inv_src, '/tmp/foo') + # suspicious, yes, that is just what the script did + expected_groups = 6 + assert len(group_by_on['keyed_groups']) == expected_groups + inv_src.source_vars = json.dumps({'group_by_os_family': False}) + group_by_off = injector.inventory_as_dict(inv_src, '/tmp/foo') + # much better, everyone should turn off the flag and live in the future + assert len(group_by_off['keyed_groups']) == expected_groups - 1 + + @pytest.mark.parametrize('source', ['ec2', 'azure_rm']) + def test_default_groupings_same(self, source): + """Just a sanity check, the number of groupings should be the same + with or without compatibility mode turned on. + This was a change made during feature development. + """ + injector = InventorySource.injectors[source]('2.9') + inv_src = InventorySource( + name='test source', source=source, compatibility_mode=True) + compat_on = injector.inventory_as_dict(inv_src, '/tmp/foo') + inv_src = InventorySource( + name='test source', source=source, compatibility_mode=False) + compat_off = injector.inventory_as_dict(inv_src, '/tmp/foo') + # Both default uses should give the same number of groups + assert len(compat_on['keyed_groups']) > 0 + assert len(compat_on['keyed_groups']) == len(compat_off['keyed_groups']) + + def test_tower_plugin_named_url(self): + injector = InventorySource.injectors['tower']('2.9') + inv_src = InventorySource( + name='my tower source', source='tower', + # named URL pattern "inventory++organization" + instance_filters='Designer hair 읰++Cosmetic_products䵆' + ) + result = injector.inventory_as_dict(inv_src, '/tmp/foo') + assert result['inventory_id'] == 'Designer%20hair%20%EC%9D%B0++Cosmetic_products%E4%B5%86' + + @pytest.fixture def setup_ec2_gce(organization): ec2_inv = Inventory.objects.create(name='test_ec2', organization=organization) diff --git a/awx/main/tests/functional/test_inventory_source_injectors.py b/awx/main/tests/functional/test_inventory_source_injectors.py index d47ccde831..322204153a 100644 --- a/awx/main/tests/functional/test_inventory_source_injectors.py +++ b/awx/main/tests/functional/test_inventory_source_injectors.py @@ -6,9 +6,11 @@ import re from awx.main.tasks import RunInventoryUpdate from awx.main.models import InventorySource, Credential, CredentialType, UnifiedJob -from awx.main.constants import CLOUD_PROVIDERS +from awx.main.constants import CLOUD_PROVIDERS, STANDARD_INVENTORY_UPDATE_ENV from awx.main.tests import data +from django.conf import settings + DATA = os.path.join(os.path.dirname(data.__file__), 'inventory') TEST_SOURCE_FIELDS = { @@ -18,7 +20,8 @@ TEST_SOURCE_FIELDS = { }, 'ec2': { 'instance_filters': 'foobaa', - 'group_by': 'fouo', + # group_by selected to capture some non-trivial cross-interactions + 'group_by': 'availability_zone,instance_type,tag_keys,region', 'source_regions': 'us-east-2,ap-south-1' }, 'gce': { @@ -27,10 +30,15 @@ TEST_SOURCE_FIELDS = { 'azure_rm': { 'source_regions': 'southcentralus,westus' }, + 'tower': { + 'instance_filters': '42' + } } INI_TEST_VARS = { - 'ec2': {}, + 'ec2': { + 'boto_profile': '/tmp/my_boto_stuff' + }, 'gce': {}, 'openstack': { 'private': False, @@ -43,7 +51,10 @@ INI_TEST_VARS = { 'vmware': { # setting VMWARE_VALIDATE_CERTS is duplicated with env var }, - 'azure_rm': {}, # there are none + 'azure_rm': { + 'use_private_ip': True, + 'resource_groups': 'foo_resources,bar_resources' + }, 'satellite6': { 'satellite6_group_patterns': 'foo_group_patterns', 'satellite6_group_prefix': 'foo_group_prefix', @@ -111,12 +122,23 @@ def fake_credential_factory(source): ) -def read_content(private_data_dir, env, inventory_update): +def read_content(private_data_dir, raw_env, inventory_update): """Read the environmental data laid down by the task system template out private and secret data so they will be readable and predictable return a dictionary `content` with file contents, keyed off environment variable that references the file """ + # Filter out environment variables which come from runtime environment + env = {} + exclude_keys = set(('PATH', 'INVENTORY_SOURCE_ID', 'INVENTORY_UPDATE_ID')) + for key in dir(settings): + if key.startswith('ANSIBLE_'): + exclude_keys.add(key) + for k, v in raw_env.items(): + if k in STANDARD_INVENTORY_UPDATE_ENV or k in exclude_keys: + continue + if k not in os.environ or v != os.environ[k]: + env[k] = v inverse_env = {} for key, value in env.items(): inverse_env[value] = key @@ -131,7 +153,9 @@ def read_content(private_data_dir, env, inventory_update): for filename in os.listdir(private_data_dir): abs_file_path = os.path.join(private_data_dir, filename) if abs_file_path in inverse_env: - references[abs_file_path] = inverse_env[abs_file_path] + env_key = inverse_env[abs_file_path] + references[abs_file_path] = env_key + env[env_key] = '{{ file_reference }}' try: with open(abs_file_path, 'r') as f: dir_contents[abs_file_path] = f.read() @@ -181,21 +205,28 @@ def read_content(private_data_dir, env, inventory_update): file_content = private_key_regex.sub('{{private_key}}', file_content) content[reference_key] = file_content - return content + return (env, content) -def create_reference_data(ref_dir, content): - if not os.path.exists(ref_dir): - os.mkdir(ref_dir) - for env_name, content in content.items(): - with open(os.path.join(ref_dir, env_name), 'w') as f: - f.write(content) +def create_reference_data(source_dir, env, content): + if not os.path.exists(source_dir): + os.mkdir(source_dir) + if content: + files_dir = os.path.join(source_dir, 'files') + if not os.path.exists(files_dir): + os.mkdir(files_dir) + for env_name, content in content.items(): + with open(os.path.join(files_dir, env_name), 'w') as f: + f.write(content) + if env: + with open(os.path.join(source_dir, 'env.json'), 'w') as f: + f.write(json.dumps(env, indent=4)) @pytest.mark.django_db @pytest.mark.parametrize('this_kind', CLOUD_PROVIDERS) @pytest.mark.parametrize('script_or_plugin', ['scripts', 'plugins']) -def test_inventory_script_structure(this_kind, script_or_plugin, inventory): +def test_inventory_update_injected_content(this_kind, script_or_plugin, inventory): src_vars = dict(base_source_var='value_of_var') if this_kind in INI_TEST_VARS: src_vars.update(INI_TEST_VARS[this_kind]) @@ -206,6 +237,7 @@ def test_inventory_script_structure(this_kind, script_or_plugin, inventory): inventory=inventory, source=this_kind, source_vars=src_vars, + compatibility_mode=True, **extra_kwargs ) inventory_source.credentials.add(fake_credential_factory(this_kind)) @@ -213,44 +245,57 @@ def test_inventory_script_structure(this_kind, script_or_plugin, inventory): task = RunInventoryUpdate() use_plugin = bool(script_or_plugin == 'plugins') - if use_plugin: - if this_kind not in InventorySource.injectors: - pytest.skip('Injector class for this source is not written yet') - elif InventorySource.injectors[this_kind].initial_version is None: - pytest.skip('Use of inventory plugin is not enabled for this source') + if use_plugin and InventorySource.injectors[this_kind].plugin_name is None: + pytest.skip('Use of inventory plugin is not enabled for this source') - def substitute_run(args, cwd, env, stdout_handle, **_kw): + def substitute_run(args, cwd, call_env, stdout_handle, **_kw): """This method will replace run_pexpect instead of running, it will read the private data directory contents It will make assertions that the contents are correct If MAKE_INVENTORY_REFERENCE_FILES is set, it will produce reference files """ - private_data_dir = env['AWX_PRIVATE_DATA_DIR'] + private_data_dir = call_env.pop('AWX_PRIVATE_DATA_DIR') + assert call_env.pop('ANSIBLE_INVENTORY_ENABLED') == ('auto' if use_plugin else 'script') + assert call_env.pop('ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS') == 'never' set_files = bool(os.getenv("MAKE_INVENTORY_REFERENCE_FILES", 'false').lower()[0] not in ['f', '0']) - content = read_content(private_data_dir, env, inventory_update) + env, content = read_content(private_data_dir, call_env, inventory_update) base_dir = os.path.join(DATA, script_or_plugin) if not os.path.exists(base_dir): os.mkdir(base_dir) - ref_dir = os.path.join(base_dir, this_kind) # this_kind is a global + source_dir = os.path.join(base_dir, this_kind) # this_kind is a global if set_files: - create_reference_data(ref_dir, content) + create_reference_data(source_dir, env, content) pytest.skip('You set MAKE_INVENTORY_REFERENCE_FILES, so this created files, unset to run actual test.') else: - try: - expected_file_list = os.listdir(ref_dir) - except FileNotFoundError as e: + if not os.path.exists(source_dir): raise FileNotFoundError( 'Maybe you never made reference files? ' - 'MAKE_INVENTORY_REFERENCE_FILES=true py.test ...\noriginal: {}'.format(e)) + 'MAKE_INVENTORY_REFERENCE_FILES=true py.test ...\noriginal: {}') + files_dir = os.path.join(source_dir, 'files') + try: + expected_file_list = os.listdir(files_dir) + except FileNotFoundError: + expected_file_list = [] assert set(expected_file_list) == set(content.keys()), ( 'Inventory update runtime environment does not have expected files' ) for f_name in expected_file_list: - with open(os.path.join(ref_dir, f_name), 'r') as f: + with open(os.path.join(files_dir, f_name), 'r') as f: ref_content = f.read() - assert content[f_name] == ref_content + assert ref_content == content[f_name] + try: + with open(os.path.join(source_dir, 'env.json'), 'r') as f: + ref_env_text = f.read() + ref_env = json.loads(ref_env_text) + except FileNotFoundError: + ref_env = {} + assert ref_env == env return ('successful', 0) + mock_licenser = mock.Mock(return_value=mock.Mock( + validate=mock.Mock(return_value={'license_type': 'open'}) + )) + # Mock this so that it will not send events to the callback receiver # because doing so in pytest land creates large explosions with mock.patch('awx.main.queue.CallbackQueueDispatcher.dispatch', lambda self, obj: None): @@ -260,5 +305,7 @@ def test_inventory_script_structure(this_kind, script_or_plugin, inventory): with mock.patch.object(UnifiedJob, 'websocket_emit_status', mock.Mock()): # The point of this test is that we replace run_pexpect with assertions with mock.patch('awx.main.expect.run.run_pexpect', substitute_run): - # so this sets up everything for a run and then yields control over to substitute_run - task.run(inventory_update.pk) + # mocking the licenser is necessary for the tower source + with mock.patch('awx.main.models.inventory.get_licenser', mock_licenser): + # so this sets up everything for a run and then yields control over to substitute_run + task.run(inventory_update.pk) diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index e442643b8c..e92fa6a42e 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -167,7 +167,8 @@ def test_openstack_client_config_generation(mocker, source, expected, private_da inventory_update = mocker.Mock(**{ 'source': 'openstack', 'source_vars_dict': {}, - 'get_cloud_credential': cred_method + 'get_cloud_credential': cred_method, + 'get_extra_credentials': lambda x: [] }) cloud_config = update.build_private_data(inventory_update, private_data_dir) cloud_credential = yaml.load( @@ -208,7 +209,8 @@ def test_openstack_client_config_generation_with_private_source_vars(mocker, sou inventory_update = mocker.Mock(**{ 'source': 'openstack', 'source_vars_dict': {'private': source}, - 'get_cloud_credential': cred_method + 'get_cloud_credential': cred_method, + 'get_extra_credentials': lambda x: [] }) cloud_config = update.build_private_data(inventory_update, private_data_dir) cloud_credential = yaml.load( @@ -1759,6 +1761,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): task = tasks.RunInventoryUpdate() inventory_update.source = 'ec2' inventory_update.get_cloud_credential = mocker.Mock(return_value=None) + inventory_update.get_extra_credentials = mocker.Mock(return_value=[]) private_data_files = task.build_private_data_files(inventory_update, private_data_dir) env = task.build_env(inventory_update, private_data_dir, False, private_data_files) @@ -1781,7 +1784,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): if with_credential: azure_rm = CredentialType.defaults['azure_rm']() - def get_cred(): + def get_creds(): cred = Credential( pk=1, credential_type=azure_rm, @@ -1792,10 +1795,11 @@ class TestInventoryUpdateCredentials(TestJobExecution): 'subscription': 'some-subscription', } ) - return cred - inventory_update.get_cloud_credential = get_cred + return [cred] + inventory_update.get_extra_credentials = get_creds else: - inventory_update.get_cloud_credential = mocker.Mock(return_value=None) + inventory_update.get_extra_credentials = mocker.Mock(return_value=[]) + inventory_update.get_cloud_credential = mocker.Mock(return_value=None) env = task.build_env(inventory_update, private_data_dir, False) args = task.build_args(inventory_update, private_data_dir, {}) @@ -1818,7 +1822,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert env['AZURE_TENANT'] == 'some-tenant' assert env['AZURE_SUBSCRIPTION_ID'] == 'some-subscription' - def test_ec2_source(self, private_data_dir, inventory_update): + def test_ec2_source(self, private_data_dir, inventory_update, mocker): task = tasks.RunInventoryUpdate() aws = CredentialType.defaults['aws']() inventory_update.source = 'ec2' @@ -1832,6 +1836,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): cred.inputs['password'] = encrypt_field(cred, 'password') return cred inventory_update.get_cloud_credential = get_cred + inventory_update.get_extra_credentials = mocker.Mock(return_value=[]) private_data_files = task.build_private_data_files(inventory_update, private_data_dir) env = task.build_env(inventory_update, private_data_dir, False, private_data_files) @@ -1854,7 +1859,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert safe_env['AWS_SECRET_ACCESS_KEY'] == tasks.HIDDEN_PASSWORD - def test_vmware_source(self, inventory_update, private_data_dir): + def test_vmware_source(self, inventory_update, private_data_dir, mocker): task = tasks.RunInventoryUpdate() vmware = CredentialType.defaults['vmware']() inventory_update.source = 'vmware' @@ -1868,6 +1873,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): cred.inputs['password'] = encrypt_field(cred, 'password') return cred inventory_update.get_cloud_credential = get_cred + inventory_update.get_extra_credentials = mocker.Mock(return_value=[]) private_data_files = task.build_private_data_files(inventory_update, private_data_dir) env = task.build_env(inventory_update, private_data_dir, False, private_data_files) @@ -1886,7 +1892,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert config.get('vmware', 'password') == 'secret' assert config.get('vmware', 'server') == 'https://example.org' - def test_azure_rm_source_with_tenant(self, private_data_dir, inventory_update): + def test_azure_rm_source_with_tenant(self, private_data_dir, inventory_update, mocker): task = tasks.RunInventoryUpdate() azure_rm = CredentialType.defaults['azure_rm']() inventory_update.source = 'azure_rm' @@ -1906,6 +1912,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): ) return cred inventory_update.get_cloud_credential = get_cred + inventory_update.get_extra_credentials = mocker.Mock(return_value=[]) inventory_update.source_vars = { 'include_powerstate': 'yes', 'group_by_resource_group': 'no' @@ -1939,7 +1946,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert safe_env['AZURE_SECRET'] == tasks.HIDDEN_PASSWORD - def test_azure_rm_source_with_password(self, private_data_dir, inventory_update): + def test_azure_rm_source_with_password(self, private_data_dir, inventory_update, mocker): task = tasks.RunInventoryUpdate() azure_rm = CredentialType.defaults['azure_rm']() inventory_update.source = 'azure_rm' @@ -1958,6 +1965,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): ) return cred inventory_update.get_cloud_credential = get_cred + inventory_update.get_extra_credentials = mocker.Mock(return_value=[]) inventory_update.source_vars = { 'include_powerstate': 'yes', 'group_by_resource_group': 'no', @@ -1990,7 +1998,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert 'locations' not in config.items('azure') assert safe_env['AZURE_PASSWORD'] == tasks.HIDDEN_PASSWORD - def test_gce_source(self, inventory_update, private_data_dir): + def test_gce_source(self, inventory_update, private_data_dir, mocker): task = tasks.RunInventoryUpdate() gce = CredentialType.defaults['gce']() inventory_update.source = 'gce' @@ -2011,6 +2019,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): ) return cred inventory_update.get_cloud_credential = get_cred + inventory_update.get_extra_credentials = mocker.Mock(return_value=[]) def run(expected_gce_zone): private_data_files = task.build_private_data_files(inventory_update, private_data_dir) @@ -2042,7 +2051,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): self.instance.source_regions = 'us-east-4' run('us-east-4') - def test_openstack_source(self, inventory_update, private_data_dir): + def test_openstack_source(self, inventory_update, private_data_dir, mocker): task = tasks.RunInventoryUpdate() openstack = CredentialType.defaults['openstack']() inventory_update.source = 'openstack' @@ -2064,6 +2073,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): ) return cred inventory_update.get_cloud_credential = get_cred + inventory_update.get_extra_credentials = mocker.Mock(return_value=[]) private_data_files = task.build_private_data_files(inventory_update, private_data_dir) env = task.build_env(inventory_update, private_data_dir, False, private_data_files) @@ -2080,7 +2090,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): '' ]) in shade_config - def test_satellite6_source(self, inventory_update, private_data_dir): + def test_satellite6_source(self, inventory_update, private_data_dir, mocker): task = tasks.RunInventoryUpdate() satellite6 = CredentialType.defaults['satellite6']() inventory_update.source = 'satellite6' @@ -2100,6 +2110,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): ) return cred inventory_update.get_cloud_credential = get_cred + inventory_update.get_extra_credentials = mocker.Mock(return_value=[]) inventory_update.source_vars = '{"satellite6_group_patterns": "[a,b,c]", "satellite6_group_prefix": "hey_", "satellite6_want_hostcollections": True}' @@ -2115,7 +2126,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert config.get('ansible', 'group_prefix') == 'hey_' assert config.get('ansible', 'want_hostcollections') == 'True' - def test_cloudforms_source(self, inventory_update, private_data_dir): + def test_cloudforms_source(self, inventory_update, private_data_dir, mocker): task = tasks.RunInventoryUpdate() cloudforms = CredentialType.defaults['cloudforms']() inventory_update.source = 'cloudforms' @@ -2135,6 +2146,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): ) return cred inventory_update.get_cloud_credential = get_cred + inventory_update.get_extra_credentials = mocker.Mock(return_value=[]) inventory_update.source_vars = '{"prefer_ipv4": True}' @@ -2154,7 +2166,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert os.path.isdir(cache_path) @pytest.mark.parametrize('verify', [True, False]) - def test_tower_source(self, verify, inventory_update, private_data_dir): + def test_tower_source(self, verify, inventory_update, private_data_dir, mocker): task = tasks.RunInventoryUpdate() tower = CredentialType.defaults['tower']() inventory_update.source = 'tower' @@ -2171,6 +2183,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): cred.inputs['password'] = encrypt_field(cred, 'password') return cred inventory_update.get_cloud_credential = get_cred + inventory_update.get_extra_credentials = mocker.Mock(return_value=[]) env = task.build_env(inventory_update, private_data_dir, False) @@ -2192,7 +2205,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert env['TOWER_VERIFY_SSL'] == 'False' assert safe_env['TOWER_PASSWORD'] == tasks.HIDDEN_PASSWORD - def test_tower_source_ssl_verify_empty(self, inventory_update, private_data_dir): + def test_tower_source_ssl_verify_empty(self, inventory_update, private_data_dir, mocker): task = tasks.RunInventoryUpdate() tower = CredentialType.defaults['tower']() inventory_update.source = 'tower' @@ -2208,6 +2221,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): cred.inputs['password'] = encrypt_field(cred, 'password') return cred inventory_update.get_cloud_credential = get_cred + inventory_update.get_extra_credentials = mocker.Mock(return_value=[]) env = task.build_env(inventory_update, private_data_dir, False) safe_env = {} @@ -2220,7 +2234,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert env['TOWER_VERIFY_SSL'] == 'False' - def test_awx_task_env(self, inventory_update, private_data_dir, settings): + def test_awx_task_env(self, inventory_update, private_data_dir, settings, mocker): task = tasks.RunInventoryUpdate() gce = CredentialType.defaults['gce']() inventory_update.source = 'gce' @@ -2236,6 +2250,7 @@ class TestInventoryUpdateCredentials(TestJobExecution): ) return cred inventory_update.get_cloud_credential = get_cred + inventory_update.get_extra_credentials = mocker.Mock(return_value=[]) settings.AWX_TASK_ENV = {'FOO': 'BAR'} env = task.build_env(inventory_update, private_data_dir, False) diff --git a/awx/main/tests/unit/utils/test_ansible.py b/awx/main/tests/unit/utils/test_ansible.py index 7c0cb4f101..40b3e6ac1e 100644 --- a/awx/main/tests/unit/utils/test_ansible.py +++ b/awx/main/tests/unit/utils/test_ansible.py @@ -1,6 +1,5 @@ import os import os.path -import json import pytest @@ -32,10 +31,3 @@ def test_could_be_inventory(filename): def test_is_not_inventory(filename): path = os.path.join(DATA, 'inventories', 'invalid') assert could_be_inventory(DATA, path, filename) is None - - -def test_filter_non_json_lines(): - data = {'foo': 'bar', 'bar': 'foo'} - dumped_data = json.dumps(data, indent=2) - output = 'Openstack does this\nOh why oh why\n{}\ntrailing lines\nneed testing too'.format(dumped_data) - assert filter_non_json_lines(output) == dumped_data diff --git a/awx/main/utils/ansible.py b/awx/main/utils/ansible.py index 4854163d36..7e68d88189 100644 --- a/awx/main/utils/ansible.py +++ b/awx/main/utils/ansible.py @@ -11,7 +11,7 @@ from itertools import islice from django.utils.encoding import smart_str -__all__ = ['skip_directory', 'could_be_playbook', 'could_be_inventory', 'filter_non_json_lines'] +__all__ = ['skip_directory', 'could_be_playbook', 'could_be_inventory'] valid_playbook_re = re.compile(r'^\s*?-?\s*?(?:hosts|include|import_playbook):\s*?.*?$') @@ -97,67 +97,3 @@ def could_be_inventory(project_path, dir_path, filename): except IOError: return None return inventory_rel_path - - -# This method is copied directly from Ansible core code base -# lib/ansible/module_utils/json_utils.py -# For purpose, see: https://github.com/ansible/ansible/issues/50100 -# Any patches to this method should sync from that version -# NB: a copy of this function exists in ../../modules/core/async_wrapper.py. Ensure any -# changes are propagated there. -def _filter_non_json_lines(data): - ''' - Used to filter unrelated output around module JSON output, like messages from - tcagetattr, or where dropbear spews MOTD on every single command (which is nuts). - Filters leading lines before first line-starting occurrence of '{' or '[', and filter all - trailing lines after matching close character (working from the bottom of output). - ''' - warnings = [] - - # Filter initial junk - lines = data.splitlines() - - for start, line in enumerate(lines): - line = line.strip() - if line.startswith(u'{'): - endchar = u'}' - break - elif line.startswith(u'['): - endchar = u']' - break - else: - raise ValueError('No start of json char found') - - # Filter trailing junk - lines = lines[start:] - - for reverse_end_offset, line in enumerate(reversed(lines)): - if line.strip().endswith(endchar): - break - else: - raise ValueError('No end of json char found') - - if reverse_end_offset > 0: - # Trailing junk is uncommon and can point to things the user might - # want to change. So print a warning if we find any - trailing_junk = lines[len(lines) - reverse_end_offset:] - for line in trailing_junk: - if line.strip(): - warnings.append('Module invocation had junk after the JSON data: %s' % '\n'.join(trailing_junk)) - break - - lines = lines[:(len(lines) - reverse_end_offset)] - - # NOTE: warnings are undesired (would prevent JSON parsing) - # so this change diverges from the source by not using the warnings - # original: - # return ('\n'.join(lines), warnings) - return '\n'.join(lines) - - -def filter_non_json_lines(data): - # Optimization on top of Ansible's method to avoid operations on large - # strings when it is given in standard ansible-inventory form - if data.startswith(u'{') and data.endswith(u'}'): - return data - return _filter_non_json_lines(data) diff --git a/awx/main/utils/common.py b/awx/main/utils/common.py index a3279aa094..34f58a0b41 100644 --- a/awx/main/utils/common.py +++ b/awx/main/utils/common.py @@ -168,8 +168,8 @@ def _get_ansible_version(ansible_path): @memoize() -def get_ansible_version(ansible_path='ansible'): - return _get_ansible_version(ansible_path) +def get_ansible_version(): + return _get_ansible_version('ansible') @memoize() diff --git a/awx/ui/client/features/output/details.component.js b/awx/ui/client/features/output/details.component.js index 67f3bee4a0..1ccb2047e1 100644 --- a/awx/ui/client/features/output/details.component.js +++ b/awx/ui/client/features/output/details.component.js @@ -225,6 +225,17 @@ function getOverwriteVarsDetails () { return { label, value }; } +function getCompatibilityModeDetails () { + if (!resource.model.has('compatibility_mode')) { + return null; + } + + const label = strings.get('labels.COMPATIBILITY_MODE'); + const value = resource.model.get('compatibility_mode'); + + return { label, value }; +} + function getLicenseErrorDetails () { if (!resource.model.has('license_error')) { return null; @@ -815,6 +826,7 @@ function JobDetailsController ( vm.inventorySource = getInventorySourceDetails(); vm.overwrite = getOverwriteDetails(); vm.overwriteVars = getOverwriteVarsDetails(); + vm.compatibilityMode = getCompatibilityModeDetails(); vm.licenseError = getLicenseErrorDetails(); vm.hostLimitError = getHostLimitErrorDetails(); diff --git a/awx/ui/client/features/output/details.partial.html b/awx/ui/client/features/output/details.partial.html index 5ad6dde62e..0691841b22 100644 --- a/awx/ui/client/features/output/details.partial.html +++ b/awx/ui/client/features/output/details.partial.html @@ -291,6 +291,14 @@ + +
+ +
+ {{ vm.compatibilityMode.value }} +
+
+
diff --git a/awx/ui/client/features/output/output.strings.js b/awx/ui/client/features/output/output.strings.js index 982070837e..de0cd5db59 100644 --- a/awx/ui/client/features/output/output.strings.js +++ b/awx/ui/client/features/output/output.strings.js @@ -76,6 +76,7 @@ function OutputStrings (BaseString) { NAME: t.s('Name'), OVERWRITE: t.s('Overwrite'), OVERWRITE_VARS: t.s('Overwrite Vars'), + COMPATIBILITY_MODE: t.s('Compatibility Mode'), PLAYBOOK: t.s('Playbook'), PROJECT: t.s('Project'), RESULT_TRACEBACK: t.s('Error Details'), diff --git a/awx/ui/client/src/inventories-hosts/inventories/related/sources/add/sources-add.controller.js b/awx/ui/client/src/inventories-hosts/inventories/related/sources/add/sources-add.controller.js index c4c7521859..51bf440d36 100644 --- a/awx/ui/client/src/inventories-hosts/inventories/related/sources/add/sources-add.controller.js +++ b/awx/ui/client/src/inventories-hosts/inventories/related/sources/add/sources-add.controller.js @@ -198,6 +198,7 @@ export default ['$state', '$stateParams', '$scope', 'SourcesFormDefinition', $scope.group_by = null; $scope.group_by_choices = []; $scope.overwrite_vars = false; + $scope.compatibility_mode = true; initRegionSelect(); }; // region / source options callback @@ -276,6 +277,7 @@ export default ['$state', '$stateParams', '$scope', 'SourcesFormDefinition', credential: $scope.credential, overwrite: $scope.overwrite, overwrite_vars: $scope.overwrite_vars, + compatibility_mode: $scope.compatibility_mode, update_on_launch: $scope.update_on_launch, verbosity: $scope.verbosity.value, update_cache_timeout: $scope.update_cache_timeout || 0, diff --git a/awx/ui/client/src/inventories-hosts/inventories/related/sources/edit/sources-edit.controller.js b/awx/ui/client/src/inventories-hosts/inventories/related/sources/edit/sources-edit.controller.js index ac1a315882..e35589a29f 100644 --- a/awx/ui/client/src/inventories-hosts/inventories/related/sources/edit/sources-edit.controller.js +++ b/awx/ui/client/src/inventories-hosts/inventories/related/sources/edit/sources-edit.controller.js @@ -25,6 +25,7 @@ export default ['$state', '$scope', 'ParseVariableString', 'ParseTypeChange', {credential: inventorySourceData.credential}, {overwrite: inventorySourceData.overwrite}, {overwrite_vars: inventorySourceData.overwrite_vars}, + {compatibility_mode: inventorySourceData.compatibility_mode}, {update_on_launch: inventorySourceData.update_on_launch}, {update_cache_timeout: inventorySourceData.update_cache_timeout}, {instance_filters: inventorySourceData.instance_filters}, @@ -326,6 +327,7 @@ export default ['$state', '$scope', 'ParseVariableString', 'ParseTypeChange', credential: $scope.credential, overwrite: $scope.overwrite, overwrite_vars: $scope.overwrite_vars, + compatibility_mode: $scope.compatibility_mode, update_on_launch: $scope.update_on_launch, update_cache_timeout: $scope.update_cache_timeout || 0, verbosity: $scope.verbosity.value, @@ -402,6 +404,7 @@ export default ['$state', '$scope', 'ParseVariableString', 'ParseTypeChange', $scope.group_by = null; $scope.group_by_choices = []; $scope.overwrite_vars = false; + $scope.compatibility_mode = true; initRegionSelect(); diff --git a/awx/ui/client/src/inventories-hosts/inventories/related/sources/sources.form.js b/awx/ui/client/src/inventories-hosts/inventories/related/sources/sources.form.js index 2a2c01d389..a2ad54bdef 100644 --- a/awx/ui/client/src/inventories-hosts/inventories/related/sources/sources.form.js +++ b/awx/ui/client/src/inventories-hosts/inventories/related/sources/sources.form.js @@ -354,6 +354,17 @@ export default ['NotificationsList', 'i18n', function(NotificationsList, i18n){ dataContainer: 'body', dataPlacement: 'right', ngDisabled: "(!(inventory_source_obj.summary_fields.user_capabilities.edit || canAdd))" + }, { + name: 'compatibility_mode', + label: i18n._('Compatibility Mode'), + type: 'checkbox', + ngShow: "source.value !== '' && source.value !== null", + awPopOver: "

" + i18n._("If checked, additional duplicate host variables will be added to obtain compatibility with the old inventory scripts.") + '

' + + i18n._("When not checked and running as inventory plugins, only modern variable names will be used.") + "

", + dataTitle: i18n._('Compatibility Mode'), + dataContainer: 'body', + dataPlacement: 'right', + ngDisabled: "(!(inventory_source_obj.summary_fields.user_capabilities.edit || canAdd))" }, { name: 'update_on_launch', label: i18n._('Update on Launch'), diff --git a/docs/inventory_plugins.md b/docs/inventory_plugins.md new file mode 100644 index 0000000000..5980c214de --- /dev/null +++ b/docs/inventory_plugins.md @@ -0,0 +1,181 @@ +# Transition to Ansible Inventory Plugins +Inventory updates change from using scripts which are vendored as executable +python scripts in the AWX folder `awx/plugins/inventory` (taken originally from +Ansible folder `contrib/inventory`) to using dynamically-generated +YAML files which conform to the specifications of the `auto` inventory plugin +which are then parsed by their respective inventory plugin. + +The major organizational change is that the inventory plugins are +part of the Ansible core distribution, whereas the same logic used to +be a part of AWX source. + +## Prior Background for Transition + +AWX used to maintain logic that parsed `.ini` inventory file contents, +in addition to interpreting the JSON output of scripts, re-calling with +the `--host` option in the case the `_meta.hostvars` key was not provided. + +### Switch to Ansible Inventory + +The CLI entry point `ansible-inventory` was introduced in Ansible 2.4. +In Tower 3.2, inventory imports began running this command +as an intermediary between the inventory and +the import's logic to save content to database. Using `ansible-inventory` +eliminates the need to maintain source-specific logic, +relying on Ansible's code instead. This also allows us to +count on a consistent data structure outputted from `ansible-inventory`. +There are many valid structures that a script can provide, but the output +from `ansible-inventory` will always be the same, +thus the AWX logic to parse the content is simplified. +This is why even scripts must be ran through the `ansible-inventory` CLI. + +Along with this switchover, a backported version of +`ansible-inventory` was provided that supported Ansible versions 2.2 and 2.3. + +### Removal of Backport + +In AWX 3.0.0 (and Tower 3.5), the backport of `ansible-inventory` +was removed, and support for using custom virtual environments was added. +This set the minimum version of Ansible necessary to run _any_ +inventory update to 2.4. + +## Inventory Plugin Versioning + +Beginning in Ansible 2.5, inventory sources in Ansible started migrating +away from "contrib" scripts (meaning they lived in the contrib folder) +to the inventory plugin model. + +In AWX 4.0.0 (and Tower 3.5) inventory source types start to switchover +to plugins, provided that sufficient compatibility is in place for +the version of Ansible present in the local virtualenv. + +To see what version the plugin transition will happen, see +`awx/main/models/inventory.py` and look for the source name as a +subclass of `PluginFileInjector`, and there should be an `initial_version` +which is the first version that testing deemed to have sufficient parity +in the content its inventory plugin returns. For example, `openstack` will +begin using the inventory plugin in Ansible version 2.8. +If you run an openstack inventory update with Ansible +2.7.x or lower, it will use the script. + +### Sunsetting the scripts + +Eventually, it is intended that all source types will have moved to +plugins. For any given source, after the `initial_version` for plugin use +is higher than the lowest supported Ansible version, the script can be +removed and the logic for script credential injection will also be removed. + +For example, after AWX no longer supports Ansible 2.7, the script +`awx/plugins/openstack_inventory.py` will be removed. + +## Changes to Expect in Imports + +An effort was made to keep imports working in the exact same way after +the switchover. However, the inventory plugins are a fundamental rewrite +and many elements of default behavior has changed. Because of that, +a `compatibility_mode` toggle was added. This defaults to True. + +Turning off compatibility mode will be more future-proof. +Keeping it on, will be more stable and consistent. + +### Changes with Compatibility Mode Off + +The set of `hostvars` will be almost completely different, using new names +for data which is mostly the same content. You can see the jinja2 keyed_groups +construction used in compatibility mode to help get a sense of what +new names replace old names. + +If you turn compatibility mode off or downgrade Ansible, you should +consider turning on `overwrite` and `overwrite_vars` to get rid of stale +variables (and potentially groups) no longer returned by the import. + +In many cases, the host names will change. In all cases, accurate host +tracking will still be maintained via the host `instance_id`. +(after: https://github.com/ansible/awx/pull/3362) + +Group names will be sanitized with compatibility mode turned off. +That means that characters such as "-" will +be replaced by underscores "\_". In some cases, this means that a large +fraction of groups get renamed as you move from scripts to plugins. +This will become the default Ansible behavior on the CLI eventually. + +### Changes with Compatibility Mode On + +Programatically-generated examples of inventory file syntax used in +updates (with dummy data) can be found in `awx/main/tests/data/inventory/scripts`, +these demonstrate the inventory file syntax used to restore old behavior +from the inventory scripts. + +#### hostvar keys and values + +More hostvars will appear if the inventory plugins are used with compatibility +mode on. To maintain backward compatibility, +the old names are added back where they have the same meaning as a +variable returned by the plugin. New names are not removed. + +Some hostvars will be lost, because of general deprecation needs. + + - ec2, see https://github.com/ansible/ansible/issues/52358 + - gce (see https://github.com/ansible/ansible/issues/51884) + - `gce_uuid` this came from libcloud and isn't a true GCP field + inventory plugins have moved away from libcloud + + The syntax of some hostvars, for some values, will change. + + - ec2 + - old: "ec2_block_devices": {"sda1": "vol-xxxxxx"} + - new: "ec2_block_devices": {"/dev/sda1": "vol-xxxxxx"} + +#### Host names + +Host names might change, but tracking host identity via `instance_id` +will still be reliable. + +## How do I write my own Inventory File? + +If you do not want any of this compatibility-related functionality, then +you can add an SCM inventory source that points to your own file. +You can also apply a credential of a `managed_by_tower` type to that inventory +source that matches the credential you are using, as long as that is +not `gce` or `openstack`. + +All other sources provide _secrets_ via environment variables, so this +can be re-used without any problems for SCM-based inventory, and your +inventory file can be used securely to specify non-sensitive configuration +details such as the keyed_groups to provide, or hostvars to construct. + +## Notes on Technical Implementation of Injectors + +For an inventory source with a given value of the `source` field that is +of the built-in sources, a credential of the corresponding +credential type is required in most cases (exception being ec2 IAM roles). +This privileged credential is obtained by the method `get_cloud_credential`. + +The `inputs` for this credential constitute one source of data for running +inventory updates. The following fields from the +`InventoryUpdate` model are also data sources, including: + + - `source_vars` + - `source_regions` + - `instance_filters` + - `group_by` + +The way these data are applied to the environment (including files and +environment vars) is highly dependent on the specific source. + +With plugins, the inventory file may reference files that contain secrets +from the credential. With scripts, typically an environment variable +will reference a filename that contains a ConfigParser format file with +parameters for the update, and possibly including fields from the credential. + +Caution: Please do not put secrets from the credential into the +inventory file for the plugin. Right now there appears to be no need to do +this, and by using environment variables to specify secrets, this keeps +open the possibility of showing the inventory file contents to the user +as a latter enhancement. + +Logic for setup for inventory updates using both plugins and scripts live +inventory injector class, specific to the source type. + +Any credentials which are not source-specific will use the generic +injection logic which is also used in playbook runs. diff --git a/docs/licenses/cachetools.txt b/docs/licenses/cachetools.txt new file mode 100644 index 0000000000..7da84f4c63 --- /dev/null +++ b/docs/licenses/cachetools.txt @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2014-2019 Thomas Kemmer + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/docs/licenses/futures.txt b/docs/licenses/futures.txt new file mode 100644 index 0000000000..a8d65b16b6 --- /dev/null +++ b/docs/licenses/futures.txt @@ -0,0 +1,48 @@ +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python +alone or in any derivative version, provided, however, that PSF's +License Agreement and PSF's notice of copyright, i.e., "Copyright (c) +2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation; All Rights +Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. diff --git a/docs/licenses/os-client-config.txt b/docs/licenses/google-auth.txt similarity index 89% rename from docs/licenses/os-client-config.txt rename to docs/licenses/google-auth.txt index 67db858821..261eeb9e9f 100644 --- a/docs/licenses/os-client-config.txt +++ b/docs/licenses/google-auth.txt @@ -1,4 +1,3 @@ - Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ @@ -173,3 +172,30 @@ defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/docs/licenses/rsa.txt b/docs/licenses/rsa.txt new file mode 100644 index 0000000000..67589cbb86 --- /dev/null +++ b/docs/licenses/rsa.txt @@ -0,0 +1,13 @@ +Copyright 2011 Sybren A. Stüvel + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/docs/licenses/shade.txt b/docs/licenses/shade.txt deleted file mode 100644 index 67db858821..0000000000 --- a/docs/licenses/shade.txt +++ /dev/null @@ -1,175 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. diff --git a/requirements/requirements_ansible.in b/requirements/requirements_ansible.in index 78cd44124d..f5a212c9ba 100644 --- a/requirements/requirements_ansible.in +++ b/requirements/requirements_ansible.in @@ -30,7 +30,8 @@ azure-graphrbac==0.40.0 # AWS boto==2.47.0 # last which does not break ec2 scripts boto3==1.6.2 -google-auth==1.6.2 # needed for gce inventory imports +google-auth==1.6.2 # needed for gce inventory imports +jinja2==2.10 # required for native jinja2 types for inventory compat mode # netconf for network modules ncclient==0.6.3 # netaddr filter diff --git a/requirements/requirements_ansible.txt b/requirements/requirements_ansible.txt index b575a21a09..f784719888 100644 --- a/requirements/requirements_ansible.txt +++ b/requirements/requirements_ansible.txt @@ -58,11 +58,12 @@ idna==2.6 # via cryptography, requests ipaddress==1.0.19 # via cryptography, openstacksdk iso8601==0.1.12 # via keystoneauth1, openstacksdk isodate==0.6.0 # via msrest +jinja2==2.10 jmespath==0.9.3 # via azure-cli-core, boto3, botocore, knack, openstacksdk jsonpatch==1.21 # via openstacksdk jsonpointer==2.0 # via jsonpatch keyring==15.1.0 # via msrestazure -keystoneauth1==3.4.0 # via openstacksdk, os-client-config +keystoneauth1==3.11.2 # via openstacksdk, os-client-config knack==0.3.3 # via azure-cli-core lxml==4.1.1 # via ncclient, pyvmomi monotonic==1.4 # via humanfriendly