mirror of
https://github.com/ansible/awx.git
synced 2026-01-09 23:12:08 -03:30
Merge branch 'release_3.2.0' into devel
* release_3.2.0: (66 commits) fix workflow maker lookup issues adding extra logic check for ansible_facts in smart search adding "admin_role" as a default query param for insights cred lookup changing insights cred lookup to not use hard coded cred type fix rounding of capacity percentage Catch potential unicode errors when looking up addrinfo fixing typo with adding query params for instance groups modal move percentage capacitty to variable Add unit test for inventory_sources_already_updated Check for inventory sources already updated from start args Fixed inventory completed jobs pagination bug by setting default page size Remove the logic blocking dependent inventory updates on callbacks fix instance group percentage Remove host-filter-modal import Fix partial hover highlight of host filter modal row Removed leading slash on basePath Fixed host nested groups pagination Added trailing slash to basePath Fixed nested groups pagination Fixed host_filter searching related fields ...
This commit is contained in:
commit
6fa283fc98
@ -89,7 +89,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
# Special handling of inventory source_region choices that vary based on
|
||||
# selected inventory source.
|
||||
if field.field_name == 'source_regions':
|
||||
for cp in ('azure', 'ec2', 'gce'):
|
||||
for cp in ('azure_rm', 'ec2', 'gce'):
|
||||
get_regions = getattr(InventorySource, 'get_%s_region_choices' % cp)
|
||||
field_info['%s_region_choices' % cp] = get_regions()
|
||||
|
||||
|
||||
@ -3087,6 +3087,8 @@ class JobTemplateCallback(GenericAPIView):
|
||||
matches.update(host_mappings[host_name])
|
||||
except socket.gaierror:
|
||||
pass
|
||||
except UnicodeError:
|
||||
pass
|
||||
return matches
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
|
||||
@ -2,13 +2,16 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
# Django
|
||||
from django.core.signals import setting_changed
|
||||
from django.dispatch import receiver
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import APIException
|
||||
|
||||
# Tower
|
||||
from awx.main.utils.common import get_licenser, memoize
|
||||
from awx.main.utils.common import get_licenser
|
||||
from awx.main.utils import memoize, memoize_delete
|
||||
|
||||
__all__ = ['LicenseForbids', 'get_license', 'get_licensed_features',
|
||||
'feature_enabled', 'feature_exists']
|
||||
@ -23,6 +26,13 @@ def _get_validated_license_data():
|
||||
return get_licenser().validate()
|
||||
|
||||
|
||||
@receiver(setting_changed)
|
||||
def _on_setting_changed(sender, **kwargs):
|
||||
# Clear cached result above when license changes.
|
||||
if kwargs.get('setting', None) == 'LICENSE':
|
||||
memoize_delete('feature_enabled')
|
||||
|
||||
|
||||
def get_license(show_key=False):
|
||||
"""Return a dictionary representing the active license on this Tower instance."""
|
||||
license_data = _get_validated_license_data()
|
||||
@ -40,7 +50,7 @@ def get_licensed_features():
|
||||
return features
|
||||
|
||||
|
||||
@memoize(cache_name='ephemeral')
|
||||
@memoize(track_function=True)
|
||||
def feature_enabled(name):
|
||||
"""Return True if the requested feature is enabled, False otherwise."""
|
||||
validated_license_data = _get_validated_license_data()
|
||||
|
||||
@ -54,6 +54,13 @@ class Command(BaseCommand):
|
||||
default=False,
|
||||
help=_('Skip commenting out settings in files.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--comment-only',
|
||||
action='store_true',
|
||||
dest='comment_only',
|
||||
default=False,
|
||||
help=_('Skip migrating and only comment out settings in files.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--backup-suffix',
|
||||
dest='backup_suffix',
|
||||
@ -67,6 +74,7 @@ class Command(BaseCommand):
|
||||
self.dry_run = bool(options.get('dry_run', False))
|
||||
self.skip_errors = bool(options.get('skip_errors', False))
|
||||
self.no_comment = bool(options.get('no_comment', False))
|
||||
self.comment_only = bool(options.get('comment_only', False))
|
||||
self.backup_suffix = options.get('backup_suffix', '')
|
||||
self.categories = options.get('category', None) or ['all']
|
||||
self.style.HEADING = self.style.MIGRATE_HEADING
|
||||
@ -103,7 +111,7 @@ class Command(BaseCommand):
|
||||
def _get_settings_file_patterns(self):
|
||||
if MODE == 'development':
|
||||
return [
|
||||
'/etc/tower/settings.py',
|
||||
'/etc/tower/settings.py',
|
||||
'/etc/tower/conf.d/*.py',
|
||||
os.path.join(os.path.dirname(__file__), '..', '..', '..', 'settings', 'local_*.py')
|
||||
]
|
||||
@ -360,14 +368,15 @@ class Command(BaseCommand):
|
||||
if filename:
|
||||
self._display_diff_summary(filename, lines_added, lines_removed)
|
||||
|
||||
def _migrate_settings(self, registered_settings):
|
||||
patterns = self._get_settings_file_patterns()
|
||||
|
||||
# Determine which settings need to be commented/migrated.
|
||||
def _discover_settings(self, registered_settings):
|
||||
if self.verbosity >= 1:
|
||||
self.stdout.write(self.style.HEADING('Discovering settings to be migrated and commented:'))
|
||||
|
||||
# Determine which settings need to be commented/migrated.
|
||||
to_migrate = collections.OrderedDict()
|
||||
to_comment = collections.OrderedDict()
|
||||
patterns = self._get_settings_file_patterns()
|
||||
|
||||
for name in registered_settings:
|
||||
comment_error, migrate_error = None, None
|
||||
files_to_comment = []
|
||||
@ -398,8 +407,9 @@ class Command(BaseCommand):
|
||||
self._display_tbd(name, files_to_comment, migrate_value, comment_error, migrate_error)
|
||||
if self.verbosity == 1 and not to_migrate and not to_comment:
|
||||
self.stdout.write(' No settings found to migrate or comment!')
|
||||
return (to_migrate, to_comment)
|
||||
|
||||
# Now migrate those settings to the database.
|
||||
def _migrate(self, to_migrate):
|
||||
if self.verbosity >= 1:
|
||||
if self.dry_run:
|
||||
self.stdout.write(self.style.HEADING('Migrating settings to database (dry-run):'))
|
||||
@ -407,6 +417,8 @@ class Command(BaseCommand):
|
||||
self.stdout.write(self.style.HEADING('Migrating settings to database:'))
|
||||
if not to_migrate:
|
||||
self.stdout.write(' No settings to migrate!')
|
||||
|
||||
# Now migrate those settings to the database.
|
||||
for name, db_value in to_migrate.items():
|
||||
display_value = json.dumps(db_value, indent=4)
|
||||
setting = Setting.objects.filter(key=name, user__isnull=True).order_by('pk').first()
|
||||
@ -422,7 +434,7 @@ class Command(BaseCommand):
|
||||
setting.save(update_fields=['value'])
|
||||
self._display_migrate(name, action, display_value)
|
||||
|
||||
# Now comment settings in settings files.
|
||||
def _comment(self, to_comment):
|
||||
if self.verbosity >= 1:
|
||||
if bool(self.dry_run or self.no_comment):
|
||||
self.stdout.write(self.style.HEADING('Commenting settings in files (dry-run):'))
|
||||
@ -430,6 +442,8 @@ class Command(BaseCommand):
|
||||
self.stdout.write(self.style.HEADING('Commenting settings in files:'))
|
||||
if not to_comment:
|
||||
self.stdout.write(' No settings to comment!')
|
||||
|
||||
# Now comment settings in settings files.
|
||||
if to_comment:
|
||||
to_comment_patterns = []
|
||||
license_file_to_comment = None
|
||||
@ -457,3 +471,10 @@ class Command(BaseCommand):
|
||||
if custom_logo_file_to_comment:
|
||||
diffs.extend(self._comment_custom_logo_file(dry_run=False))
|
||||
self._display_comment(diffs)
|
||||
|
||||
def _migrate_settings(self, registered_settings):
|
||||
to_migrate, to_comment = self._discover_settings(registered_settings)
|
||||
|
||||
if not bool(self.comment_only):
|
||||
self._migrate(to_migrate)
|
||||
self._comment(to_comment)
|
||||
|
||||
@ -5,7 +5,7 @@ msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2017-08-27 19:27+0000\n"
|
||||
"PO-Revision-Date: 2017-08-29 04:04+0000\n"
|
||||
"PO-Revision-Date: 2017-09-15 11:22+0000\n"
|
||||
"Last-Translator: asasaki <asasaki@redhat.com>\n"
|
||||
"Language-Team: Japanese\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
@ -1751,7 +1751,7 @@ msgstr ""
|
||||
|
||||
#: awx/main/conf.py:419
|
||||
msgid "Log System Tracking Facts Individually"
|
||||
msgstr "ログシステムトラッキングの個別ファクト"
|
||||
msgstr "ログシステムによるファクトの個別トラッキング"
|
||||
|
||||
#: awx/main/conf.py:420
|
||||
msgid ""
|
||||
@ -1876,11 +1876,11 @@ msgstr "%s に必須です"
|
||||
|
||||
#: awx/main/fields.py:571
|
||||
msgid "must be set when SSH key is encrypted."
|
||||
msgstr "SSH 鍵が暗号化されている場合に設定する必要があります。"
|
||||
msgstr "SSH キーが暗号化されている場合に設定する必要があります。"
|
||||
|
||||
#: awx/main/fields.py:577
|
||||
msgid "should not be set when SSH key is not encrypted."
|
||||
msgstr "SSH 鍵が暗号化されていない場合は設定できません。"
|
||||
msgstr "SSH キーが暗号化されていない場合は設定できません。"
|
||||
|
||||
#: awx/main/fields.py:635
|
||||
msgid "'dependencies' is not supported for custom credentials."
|
||||
@ -2116,7 +2116,7 @@ msgstr "パスワードの代わりに使用される RSA または DSA 秘密
|
||||
|
||||
#: awx/main/models/credential.py:131
|
||||
msgid "SSH key unlock"
|
||||
msgstr "SSH 鍵のロック解除"
|
||||
msgstr "SSH キーのロック解除"
|
||||
|
||||
#: awx/main/models/credential.py:132
|
||||
msgid ""
|
||||
@ -2147,11 +2147,11 @@ msgstr "Vault パスワード (またはユーザーにプロンプトを出す
|
||||
|
||||
#: awx/main/models/credential.py:162
|
||||
msgid "Whether to use the authorize mechanism."
|
||||
msgstr "承認メカニズムを使用するかどうか。"
|
||||
msgstr "認証メカニズムを使用するかどうか。"
|
||||
|
||||
#: awx/main/models/credential.py:168
|
||||
msgid "Password used by the authorize mechanism."
|
||||
msgstr "承認メカニズムで使用されるパスワード。"
|
||||
msgstr "認証メカニズムで使用されるパスワード。"
|
||||
|
||||
#: awx/main/models/credential.py:174
|
||||
msgid "Client Id or Application Id for the credential"
|
||||
|
||||
@ -5,7 +5,7 @@ import re
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
CLOUD_PROVIDERS = ('azure', 'azure_rm', 'ec2', 'gce', 'rax', 'vmware', 'openstack', 'satellite6', 'cloudforms')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'satellite6', 'cloudforms')
|
||||
SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + ('custom', 'scm',)
|
||||
PRIVILEGE_ESCALATION_METHODS = [ ('sudo', _('Sudo')), ('su', _('Su')), ('pbrun', _('Pbrun')), ('pfexec', _('Pfexec')), ('dzdo', _('DZDO')), ('pmrun', _('Pmrun')), ('runas', _('Runas'))]
|
||||
ANSI_SGR_PATTERN = re.compile(r'\x1b\[[0-9;]*m')
|
||||
|
||||
@ -185,10 +185,20 @@ class AnsibleInventoryLoader(object):
|
||||
data.setdefault('_meta', {})
|
||||
data['_meta'].setdefault('hostvars', {})
|
||||
logger.warning('Re-calling script for hostvars individually.')
|
||||
for group_name, group_dict in data.iteritems():
|
||||
for group_name, group_data in data.iteritems():
|
||||
if group_name == '_meta':
|
||||
continue
|
||||
for hostname in group_dict.get('hosts', []):
|
||||
|
||||
if isinstance(group_data, dict):
|
||||
group_host_list = group_data.get('hosts', [])
|
||||
elif isinstance(group_data, list):
|
||||
group_host_list = group_data
|
||||
else:
|
||||
logger.warning('Group data for "%s" is not a dict or list',
|
||||
group_name)
|
||||
group_host_list = []
|
||||
|
||||
for hostname in group_host_list:
|
||||
logger.debug('Obtaining hostvars for %s' % hostname.encode('utf-8'))
|
||||
hostdata = self.command_to_json(
|
||||
base_args + ['--host', hostname.encode("utf-8")]
|
||||
@ -196,7 +206,7 @@ class AnsibleInventoryLoader(object):
|
||||
if isinstance(hostdata, dict):
|
||||
data['_meta']['hostvars'][hostname] = hostdata
|
||||
else:
|
||||
self.logger.warning(
|
||||
logger.warning(
|
||||
'Expected dict of vars for host "%s" when '
|
||||
'calling with `--host`, got %s instead',
|
||||
k, str(type(data))
|
||||
@ -218,7 +228,6 @@ def load_inventory_source(source, group_filter_re=None,
|
||||
'''
|
||||
# Sanity check: We sanitize these module names for our API but Ansible proper doesn't follow
|
||||
# good naming conventions
|
||||
source = source.replace('azure.py', 'windows_azure.py')
|
||||
source = source.replace('satellite6.py', 'foreman.py')
|
||||
source = source.replace('vmware.py', 'vmware_inventory.py')
|
||||
if not os.path.exists(source):
|
||||
@ -504,6 +513,12 @@ class Command(NoArgsCommand):
|
||||
group_names = all_group_names[offset:(offset + self._batch_size)]
|
||||
for group_pk in groups_qs.filter(name__in=group_names).values_list('pk', flat=True):
|
||||
del_group_pks.discard(group_pk)
|
||||
if self.inventory_source.deprecated_group_id in del_group_pks: # TODO: remove in 3.3
|
||||
logger.warning(
|
||||
'Group "%s" from v1 API is not deleted by overwrite',
|
||||
self.inventory_source.deprecated_group.name
|
||||
)
|
||||
del_group_pks.discard(self.inventory_source.deprecated_group_id)
|
||||
# Now delete all remaining groups in batches.
|
||||
all_del_pks = sorted(list(del_group_pks))
|
||||
for offset in xrange(0, len(all_del_pks), self._batch_size):
|
||||
@ -532,6 +547,12 @@ class Command(NoArgsCommand):
|
||||
group_host_count = 0
|
||||
db_groups = self.inventory_source.groups
|
||||
for db_group in db_groups.all():
|
||||
if self.inventory_source.deprecated_group_id == db_group.id: # TODO: remove in 3.3
|
||||
logger.info(
|
||||
'Group "%s" from v1 API child group/host connections preserved',
|
||||
db_group.name
|
||||
)
|
||||
continue
|
||||
# Delete child group relationships not present in imported data.
|
||||
db_children = db_group.children
|
||||
db_children_name_pk_map = dict(db_children.values_list('name', 'pk'))
|
||||
|
||||
@ -145,12 +145,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure', 'Microsoft Azure Classic (deprecated)'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
field=models.CharField(default=b'', max_length=32, blank=True, choices=[(b'', 'Manual'), (b'file', 'File, Directory or Script'), (b'scm', 'Sourced from a Project'), (b'ec2', 'Amazon EC2'), (b'gce', 'Google Compute Engine'), (b'azure_rm', 'Microsoft Azure Resource Manager'), (b'vmware', 'VMware vCenter'), (b'satellite6', 'Red Hat Satellite 6'), (b'cloudforms', 'Red Hat CloudForms'), (b'openstack', 'OpenStack'), (b'custom', 'Custom Script')]),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
|
||||
@ -11,6 +11,7 @@ from awx.main.migrations import _migration_utils as migration_utils
|
||||
from awx.main.migrations import _reencrypt as reencrypt
|
||||
from awx.main.migrations import _scan_jobs as scan_jobs
|
||||
from awx.main.migrations import _credentialtypes as credentialtypes
|
||||
from awx.main.migrations import _azure_credentials as azurecreds
|
||||
import awx.main.fields
|
||||
|
||||
|
||||
@ -24,6 +25,8 @@ class Migration(migrations.Migration):
|
||||
# Inventory Refresh
|
||||
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
|
||||
migrations.RunPython(invsrc.remove_rax_inventory_sources),
|
||||
migrations.RunPython(azurecreds.remove_azure_credentials),
|
||||
migrations.RunPython(invsrc.remove_azure_inventory_sources),
|
||||
migrations.RunPython(invsrc.remove_inventory_source_with_no_inventory_link),
|
||||
migrations.RunPython(invsrc.rename_inventory_sources),
|
||||
migrations.RunPython(reencrypt.replace_aesecb_fernet),
|
||||
|
||||
15
awx/main/migrations/_azure_credentials.py
Normal file
15
awx/main/migrations/_azure_credentials.py
Normal file
@ -0,0 +1,15 @@
|
||||
import logging
|
||||
|
||||
from django.db.models import Q
|
||||
|
||||
logger = logging.getLogger('awx.main.migrations')
|
||||
|
||||
|
||||
def remove_azure_credentials(apps, schema_editor):
|
||||
'''Azure is not supported as of 3.2 and greater. Instead, azure_rm is
|
||||
supported.
|
||||
'''
|
||||
Credential = apps.get_model('main', 'Credential')
|
||||
logger.debug("Removing all Azure Credentials from database.")
|
||||
Credential.objects.filter(kind='azure').delete()
|
||||
|
||||
@ -51,3 +51,12 @@ def remove_inventory_source_with_no_inventory_link(apps, schema_editor):
|
||||
InventorySource = apps.get_model('main', 'InventorySource')
|
||||
logger.debug("Removing all InventorySource that have no link to an Inventory from database.")
|
||||
InventorySource.objects.filter(Q(inventory__organization=None) & Q(deprecated_group__inventory=None)).delete()
|
||||
|
||||
|
||||
def remove_azure_inventory_sources(apps, schema_editor):
|
||||
'''Azure inventory sources are not supported since 3.2, remove them.
|
||||
'''
|
||||
InventorySource = apps.get_model('main', 'InventorySource')
|
||||
logger.debug("Removing all Azure InventorySource from database.")
|
||||
InventorySource.objects.filter(source='azure').delete()
|
||||
|
||||
|
||||
@ -52,7 +52,7 @@ PROJECT_UPDATE_JOB_TYPE_CHOICES = [
|
||||
(PERM_INVENTORY_CHECK, _('Check')),
|
||||
]
|
||||
|
||||
CLOUD_INVENTORY_SOURCES = ['ec2', 'rax', 'vmware', 'gce', 'azure', 'azure_rm', 'openstack', 'custom', 'satellite6', 'cloudforms', 'scm',]
|
||||
CLOUD_INVENTORY_SOURCES = ['ec2', 'vmware', 'gce', 'azure_rm', 'openstack', 'custom', 'satellite6', 'cloudforms', 'scm',]
|
||||
|
||||
VERBOSITY_CHOICES = [
|
||||
(0, '0 (Normal)'),
|
||||
|
||||
@ -57,7 +57,6 @@ class V1Credential(object):
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('cloudforms', 'Red Hat CloudForms'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure', 'Microsoft Azure Classic (deprecated)'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('openstack', 'OpenStack'),
|
||||
('insights', 'Insights'),
|
||||
@ -934,35 +933,6 @@ def gce(cls):
|
||||
)
|
||||
|
||||
|
||||
@CredentialType.default
|
||||
def azure(cls):
|
||||
return cls(
|
||||
kind='cloud',
|
||||
name='Microsoft Azure Classic (deprecated)',
|
||||
managed_by_tower=True,
|
||||
inputs={
|
||||
'fields': [{
|
||||
'id': 'username',
|
||||
'label': 'Subscription ID',
|
||||
'type': 'string',
|
||||
'help_text': ('Subscription ID is an Azure construct, which is '
|
||||
'mapped to a username.')
|
||||
}, {
|
||||
'id': 'ssh_key_data',
|
||||
'label': 'Management Certificate',
|
||||
'type': 'string',
|
||||
'format': 'ssh_private_key',
|
||||
'secret': True,
|
||||
'multiline': True,
|
||||
'help_text': ('Paste the contents of the PEM file that corresponds '
|
||||
'to the certificate you uploaded in the Microsoft '
|
||||
'Azure console.')
|
||||
}],
|
||||
'required': ['username', 'ssh_key_data'],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@CredentialType.default
|
||||
def azure_rm(cls):
|
||||
return cls(
|
||||
|
||||
@ -867,7 +867,6 @@ class InventorySourceOptions(BaseModel):
|
||||
('scm', _('Sourced from a Project')),
|
||||
('ec2', _('Amazon EC2')),
|
||||
('gce', _('Google Compute Engine')),
|
||||
('azure', _('Microsoft Azure Classic (deprecated)')),
|
||||
('azure_rm', _('Microsoft Azure Resource Manager')),
|
||||
('vmware', _('VMware vCenter')),
|
||||
('satellite6', _('Red Hat Satellite 6')),
|
||||
@ -1087,7 +1086,7 @@ class InventorySourceOptions(BaseModel):
|
||||
return regions
|
||||
|
||||
@classmethod
|
||||
def get_azure_region_choices(self):
|
||||
def get_azure_rm_region_choices(self):
|
||||
"""Return a complete list of regions in Microsoft Azure, as a list of
|
||||
two-tuples.
|
||||
"""
|
||||
@ -1095,14 +1094,10 @@ class InventorySourceOptions(BaseModel):
|
||||
# authenticating first (someone reading these might think there's
|
||||
# a pattern here!). Therefore, you guessed it, use a list from
|
||||
# settings.
|
||||
regions = list(getattr(settings, 'AZURE_REGION_CHOICES', []))
|
||||
regions = list(getattr(settings, 'AZURE_RM_REGION_CHOICES', []))
|
||||
regions.insert(0, ('all', 'All'))
|
||||
return regions
|
||||
|
||||
@classmethod
|
||||
def get_azure_rm_region_choices(self):
|
||||
return InventorySourceOptions.get_azure_region_choices()
|
||||
|
||||
@classmethod
|
||||
def get_vmware_region_choices(self):
|
||||
"""Return a complete list of regions in VMware, as a list of two-tuples
|
||||
|
||||
@ -236,7 +236,9 @@ class AuthToken(BaseModel):
|
||||
valid_n_tokens_qs = self.user.auth_tokens.filter(
|
||||
expires__gt=now,
|
||||
reason='',
|
||||
).order_by('-created')[0:settings.AUTH_TOKEN_PER_USER]
|
||||
).order_by('-created')
|
||||
if settings.AUTH_TOKEN_PER_USER != -1:
|
||||
valid_n_tokens_qs = valid_n_tokens_qs[0:settings.AUTH_TOKEN_PER_USER]
|
||||
valid_n_tokens = valid_n_tokens_qs.values_list('key', flat=True)
|
||||
|
||||
return bool(self.key in valid_n_tokens)
|
||||
|
||||
@ -5,6 +5,7 @@
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import uuid
|
||||
import json
|
||||
from sets import Set
|
||||
|
||||
# Django
|
||||
@ -37,6 +38,7 @@ from awx.main.signals import disable_activity_stream
|
||||
|
||||
from awx.main.scheduler.dependency_graph import DependencyGraph
|
||||
from awx.main import tasks as awx_tasks
|
||||
from awx.main.utils import decrypt_field
|
||||
|
||||
# Celery
|
||||
from celery.task.control import inspect
|
||||
@ -97,7 +99,7 @@ class TaskManager():
|
||||
~Q(polymorphic_ctype_id=workflow_ctype_id))
|
||||
for j in jobs:
|
||||
if j.execution_node:
|
||||
execution_nodes.setdefault(j.execution_node, [j]).append(j)
|
||||
execution_nodes.setdefault(j.execution_node, []).append(j)
|
||||
else:
|
||||
waiting_jobs.append(j)
|
||||
return (execution_nodes, waiting_jobs)
|
||||
@ -142,10 +144,10 @@ class TaskManager():
|
||||
active_tasks = set()
|
||||
map(lambda at: active_tasks.add(at['id']), active_task_queues[queue])
|
||||
|
||||
# celery worker name is of the form celery@myhost.com
|
||||
queue_name = queue.split('@')
|
||||
queue_name = queue_name[1 if len(queue_name) > 1 else 0]
|
||||
queues[queue_name] = active_tasks
|
||||
# celery worker name is of the form celery@myhost.com
|
||||
queue_name = queue.split('@')
|
||||
queue_name = queue_name[1 if len(queue_name) > 1 else 0]
|
||||
queues[queue_name] = active_tasks
|
||||
else:
|
||||
if not hasattr(settings, 'CELERY_UNIT_TEST'):
|
||||
return (None, None)
|
||||
@ -390,17 +392,22 @@ class TaskManager():
|
||||
dependencies.append(latest_project_update)
|
||||
|
||||
# Inventory created 2 seconds behind job
|
||||
if task.launch_type != 'callback':
|
||||
for inventory_source in [invsrc for invsrc in self.all_inventory_sources if invsrc.inventory == task.inventory]:
|
||||
if not inventory_source.update_on_launch:
|
||||
continue
|
||||
latest_inventory_update = self.get_latest_inventory_update(inventory_source)
|
||||
if self.should_update_inventory_source(task, latest_inventory_update):
|
||||
inventory_task = self.create_inventory_update(task, inventory_source)
|
||||
dependencies.append(inventory_task)
|
||||
else:
|
||||
if latest_inventory_update.status in ['waiting', 'pending', 'running']:
|
||||
dependencies.append(latest_inventory_update)
|
||||
try:
|
||||
start_args = json.loads(decrypt_field(task, field_name="start_args"))
|
||||
except ValueError:
|
||||
start_args = dict()
|
||||
for inventory_source in [invsrc for invsrc in self.all_inventory_sources if invsrc.inventory == task.inventory]:
|
||||
if "inventory_sources_already_updated" in start_args and inventory_source.id in start_args['inventory_sources_already_updated']:
|
||||
continue
|
||||
if not inventory_source.update_on_launch:
|
||||
continue
|
||||
latest_inventory_update = self.get_latest_inventory_update(inventory_source)
|
||||
if self.should_update_inventory_source(task, latest_inventory_update):
|
||||
inventory_task = self.create_inventory_update(task, inventory_source)
|
||||
dependencies.append(inventory_task)
|
||||
else:
|
||||
if latest_inventory_update.status in ['waiting', 'pending', 'running']:
|
||||
dependencies.append(latest_inventory_update)
|
||||
|
||||
if len(dependencies) > 0:
|
||||
self.capture_chain_failure_dependencies(task, dependencies)
|
||||
@ -527,7 +534,9 @@ class TaskManager():
|
||||
- instance is unknown to tower, system is improperly configured
|
||||
- instance is reported as down, then fail all jobs on the node
|
||||
- instance is an isolated node, then check running tasks
|
||||
among all allowed controller nodes for management process
|
||||
among all allowed controller nodes for management process
|
||||
- valid healthy instance not included in celery task list
|
||||
probably a netsplit case, leave it alone
|
||||
'''
|
||||
instance = Instance.objects.filter(hostname=node).first()
|
||||
|
||||
|
||||
@ -33,7 +33,7 @@ from celery.signals import celeryd_init, worker_process_init, worker_shutdown
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.db import transaction, DatabaseError, IntegrityError, OperationalError
|
||||
from django.db import transaction, DatabaseError, IntegrityError
|
||||
from django.utils.timezone import now, timedelta
|
||||
from django.utils.encoding import smart_str
|
||||
from django.core.mail import send_mail
|
||||
@ -455,12 +455,12 @@ def delete_inventory(self, inventory_id, user_id):
|
||||
{'group_name': 'inventories', 'inventory_id': inventory_id, 'status': 'deleted'}
|
||||
)
|
||||
logger.debug('Deleted inventory %s as user %s.' % (inventory_id, user_id))
|
||||
except OperationalError:
|
||||
logger.warning('Database error deleting inventory {}, but will retry.'.format(inventory_id))
|
||||
self.retry(countdown=10)
|
||||
except Inventory.DoesNotExist:
|
||||
logger.error("Delete Inventory failed due to missing inventory: " + str(inventory_id))
|
||||
return
|
||||
except DatabaseError:
|
||||
logger.warning('Database error deleting inventory {}, but will retry.'.format(inventory_id))
|
||||
self.retry(countdown=10)
|
||||
|
||||
|
||||
def with_path_cleanup(f):
|
||||
@ -769,7 +769,10 @@ class BaseTask(LogErrorsTask):
|
||||
'''
|
||||
Run the job/task and capture its output.
|
||||
'''
|
||||
instance = self.update_model(pk, status='running')
|
||||
execution_node = settings.CLUSTER_HOST_ID
|
||||
if isolated_host is not None:
|
||||
execution_node = isolated_host
|
||||
instance = self.update_model(pk, status='running', execution_node=execution_node)
|
||||
|
||||
instance.websocket_emit_status("running")
|
||||
status, rc, tb = 'error', None, ''
|
||||
@ -856,12 +859,7 @@ class BaseTask(LogErrorsTask):
|
||||
pexpect_timeout=getattr(settings, 'PEXPECT_TIMEOUT', 5),
|
||||
proot_cmd=getattr(settings, 'AWX_PROOT_CMD', 'bwrap'),
|
||||
)
|
||||
execution_node = settings.CLUSTER_HOST_ID
|
||||
if isolated_host is not None:
|
||||
execution_node = isolated_host
|
||||
instance = self.update_model(instance.pk, status='running',
|
||||
execution_node=execution_node,
|
||||
output_replacements=output_replacements)
|
||||
instance = self.update_model(instance.pk, output_replacements=output_replacements)
|
||||
if isolated_host:
|
||||
manager_instance = isolated_manager.IsolatedManager(
|
||||
args, cwd, env, stdout_handle, ssh_key_path, **_kw
|
||||
@ -1057,9 +1055,6 @@ class RunJob(BaseTask):
|
||||
env['GCE_EMAIL'] = cloud_cred.username
|
||||
env['GCE_PROJECT'] = cloud_cred.project
|
||||
env['GCE_PEM_FILE_PATH'] = cred_files.get(cloud_cred, '')
|
||||
elif cloud_cred and cloud_cred.kind == 'azure':
|
||||
env['AZURE_SUBSCRIPTION_ID'] = cloud_cred.username
|
||||
env['AZURE_CERT_PATH'] = cred_files.get(cloud_cred, '')
|
||||
elif cloud_cred and cloud_cred.kind == 'azure_rm':
|
||||
if len(cloud_cred.client) and len(cloud_cred.tenant):
|
||||
env['AZURE_CLIENT_ID'] = cloud_cred.client
|
||||
@ -1630,8 +1625,8 @@ class RunInventoryUpdate(BaseTask):
|
||||
If no private data is needed, return None.
|
||||
"""
|
||||
private_data = {'credentials': {}}
|
||||
# If this is Microsoft Azure or GCE, return the RSA key
|
||||
if inventory_update.source in ('azure', 'gce'):
|
||||
# If this is GCE, return the RSA key
|
||||
if inventory_update.source == 'gce':
|
||||
credential = inventory_update.credential
|
||||
private_data['credentials'][credential] = decrypt_field(credential, 'ssh_key_data')
|
||||
return private_data
|
||||
@ -1719,7 +1714,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
section = 'vmware'
|
||||
cp.add_section(section)
|
||||
cp.set('vmware', 'cache_max_age', 0)
|
||||
|
||||
cp.set('vmware', 'validate_certs', str(settings.VMWARE_VALIDATE_CERTS))
|
||||
cp.set('vmware', 'username', credential.username)
|
||||
cp.set('vmware', 'password', decrypt_field(credential, 'password'))
|
||||
cp.set('vmware', 'server', credential.host)
|
||||
@ -1793,7 +1788,7 @@ class RunInventoryUpdate(BaseTask):
|
||||
cp.set(section, 'group_by_resource_group', 'yes')
|
||||
cp.set(section, 'group_by_location', 'yes')
|
||||
cp.set(section, 'group_by_tag', 'yes')
|
||||
if inventory_update.source_regions:
|
||||
if inventory_update.source_regions and 'all' not in inventory_update.source_regions:
|
||||
cp.set(
|
||||
section, 'locations',
|
||||
','.join([x.strip() for x in inventory_update.source_regions.split(',')])
|
||||
@ -1861,9 +1856,6 @@ class RunInventoryUpdate(BaseTask):
|
||||
env['EC2_INI_PATH'] = cloud_credential
|
||||
elif inventory_update.source == 'vmware':
|
||||
env['VMWARE_INI_PATH'] = cloud_credential
|
||||
elif inventory_update.source == 'azure':
|
||||
env['AZURE_SUBSCRIPTION_ID'] = passwords.get('source_username', '')
|
||||
env['AZURE_CERT_PATH'] = cloud_credential
|
||||
elif inventory_update.source == 'azure_rm':
|
||||
if len(passwords.get('source_client', '')) and \
|
||||
len(passwords.get('source_tenant', '')):
|
||||
|
||||
@ -1071,43 +1071,6 @@ def test_gce_create_ok(post, organization, admin, version, params):
|
||||
assert decrypt_field(cred, 'ssh_key_data') == EXAMPLE_PRIVATE_KEY
|
||||
|
||||
|
||||
#
|
||||
# Azure Classic
|
||||
#
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('version, params', [
|
||||
['v1', {
|
||||
'kind': 'azure',
|
||||
'name': 'Best credential ever',
|
||||
'username': 'some_username',
|
||||
'ssh_key_data': EXAMPLE_PRIVATE_KEY
|
||||
}],
|
||||
['v2', {
|
||||
'credential_type': 1,
|
||||
'name': 'Best credential ever',
|
||||
'inputs': {
|
||||
'username': 'some_username',
|
||||
'ssh_key_data': EXAMPLE_PRIVATE_KEY
|
||||
}
|
||||
}]
|
||||
])
|
||||
def test_azure_create_ok(post, organization, admin, version, params):
|
||||
azure = CredentialType.defaults['azure']()
|
||||
azure.save()
|
||||
params['organization'] = organization.id
|
||||
response = post(
|
||||
reverse('api:credential_list', kwargs={'version': version}),
|
||||
params,
|
||||
admin
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
assert Credential.objects.count() == 1
|
||||
cred = Credential.objects.all()[:1].get()
|
||||
assert cred.inputs['username'] == 'some_username'
|
||||
assert decrypt_field(cred, 'ssh_key_data') == EXAMPLE_PRIVATE_KEY
|
||||
|
||||
|
||||
#
|
||||
# Azure Resource Manager
|
||||
#
|
||||
|
||||
@ -1,11 +1,13 @@
|
||||
import pytest
|
||||
import mock
|
||||
import json
|
||||
from datetime import timedelta, datetime
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.utils.timezone import now as tz_now
|
||||
|
||||
from awx.main.scheduler import TaskManager
|
||||
from awx.main.utils import encrypt_field
|
||||
from awx.main.models import (
|
||||
Job,
|
||||
Instance,
|
||||
@ -154,7 +156,36 @@ def test_single_job_dependencies_inventory_update_launch(default_instance_group,
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j, default_instance_group, [])
|
||||
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_job_dependency_with_already_updated(default_instance_group, job_template_factory, mocker, inventory_source_factory):
|
||||
objects = job_template_factory('jt', organization='org1', project='proj',
|
||||
inventory='inv', credential='cred',
|
||||
jobs=["job_should_start"])
|
||||
j = objects.jobs["job_should_start"]
|
||||
j.status = 'pending'
|
||||
j.save()
|
||||
i = objects.inventory
|
||||
ii = inventory_source_factory("ec2")
|
||||
ii.source = "ec2"
|
||||
ii.update_on_launch = True
|
||||
ii.update_cache_timeout = 0
|
||||
ii.save()
|
||||
i.inventory_sources.add(ii)
|
||||
j.start_args = json.dumps(dict(inventory_sources_already_updated=[ii.id]))
|
||||
j.save()
|
||||
j.start_args = encrypt_field(j, field_name="start_args")
|
||||
j.save()
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
tm = TaskManager()
|
||||
with mock.patch.object(TaskManager, "create_inventory_update", wraps=tm.create_inventory_update) as mock_iu:
|
||||
tm.schedule()
|
||||
mock_iu.assert_not_called()
|
||||
with mock.patch("awx.main.scheduler.TaskManager.start_task"):
|
||||
TaskManager().schedule()
|
||||
TaskManager.start_task.assert_called_once_with(j, default_instance_group, [])
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_shared_dependencies_launch(default_instance_group, job_template_factory, mocker, inventory_source_factory):
|
||||
|
||||
@ -19,7 +19,6 @@ EXAMPLE_ENCRYPTED_PRIVATE_KEY = '-----BEGIN PRIVATE KEY-----\nProc-Type: 4,ENCRY
|
||||
def test_default_cred_types():
|
||||
assert sorted(CredentialType.defaults.keys()) == [
|
||||
'aws',
|
||||
'azure',
|
||||
'azure_rm',
|
||||
'cloudforms',
|
||||
'gce',
|
||||
|
||||
@ -269,22 +269,6 @@ def test_gce_migration():
|
||||
assert Credential.objects.count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_azure_classic_migration():
|
||||
cred = Credential(name='My Credential')
|
||||
with migrate(cred, 'azure'):
|
||||
cred.__dict__.update({
|
||||
'username': 'bob',
|
||||
'ssh_key_data': EXAMPLE_PRIVATE_KEY
|
||||
})
|
||||
|
||||
assert cred.credential_type.name == 'Microsoft Azure Classic (deprecated)'
|
||||
assert cred.inputs['username'] == 'bob'
|
||||
assert cred.inputs['ssh_key_data'].startswith('$encrypted$')
|
||||
assert decrypt_field(cred, 'ssh_key_data') == EXAMPLE_PRIVATE_KEY
|
||||
assert Credential.objects.count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_azure_rm_migration():
|
||||
cred = Credential(name='My Credential')
|
||||
|
||||
@ -35,3 +35,13 @@ def test_inv_src_rename(inventory_source_factory):
|
||||
inv_src01.refresh_from_db()
|
||||
# inv-is-t1 is generated in the inventory_source_factory
|
||||
assert inv_src01.name == 't1 - inv-is-t1 - 0'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_azure_inv_src_removal(inventory_source):
|
||||
inventory_source.source = 'azure'
|
||||
inventory_source.save()
|
||||
|
||||
assert InventorySource.objects.filter(pk=inventory_source.pk).exists()
|
||||
invsrc.remove_azure_inventory_sources(apps, None)
|
||||
assert not InventorySource.objects.filter(pk=inventory_source.pk).exists()
|
||||
|
||||
@ -50,3 +50,20 @@ class TestCleanupInconsistentCeleryTasks():
|
||||
tm.cleanup_inconsistent_celery_tasks()
|
||||
job.save.assert_called_once()
|
||||
logger_mock.error.assert_called_once_with("Task job 2 (failed) DB error in marking failed. Job possibly deleted.")
|
||||
|
||||
@mock.patch.object(InstanceGroup.objects, 'prefetch_related', return_value=[])
|
||||
@mock.patch('awx.main.scheduler.task_manager.inspect')
|
||||
def test_multiple_active_instances_sanity_check(self, inspect_mock, *args):
|
||||
class MockInspector:
|
||||
pass
|
||||
|
||||
mock_inspector = MockInspector()
|
||||
mock_inspector.active = lambda: {
|
||||
'celery@host1': [],
|
||||
'celery@host2': []
|
||||
}
|
||||
inspect_mock.return_value = mock_inspector
|
||||
tm = TaskManager()
|
||||
active_task_queues, queues = tm.get_active_tasks()
|
||||
assert 'host1' in queues
|
||||
assert 'host2' in queues
|
||||
|
||||
@ -259,7 +259,7 @@ class TestGenericRun(TestJobExecution):
|
||||
with pytest.raises(Exception):
|
||||
self.task.run(self.pk)
|
||||
for c in [
|
||||
mock.call(self.pk, status='running'),
|
||||
mock.call(self.pk, execution_node=settings.CLUSTER_HOST_ID, status='running'),
|
||||
mock.call(self.pk, output_replacements=[], result_traceback=mock.ANY, status='canceled')
|
||||
]:
|
||||
assert c in self.task.update_model.call_args_list
|
||||
@ -544,29 +544,6 @@ class TestJobCredentials(TestJobExecution):
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
self.task.run(self.pk)
|
||||
|
||||
def test_azure_credentials(self):
|
||||
azure = CredentialType.defaults['azure']()
|
||||
credential = Credential(
|
||||
pk=1,
|
||||
credential_type=azure,
|
||||
inputs = {
|
||||
'username': 'bob',
|
||||
'ssh_key_data': self.EXAMPLE_PRIVATE_KEY
|
||||
}
|
||||
)
|
||||
credential.inputs['ssh_key_data'] = encrypt_field(credential, 'ssh_key_data')
|
||||
self.instance.extra_credentials.add(credential)
|
||||
|
||||
def run_pexpect_side_effect(*args, **kwargs):
|
||||
args, cwd, env, stdout = args
|
||||
assert env['AZURE_SUBSCRIPTION_ID'] == 'bob'
|
||||
ssh_key_data = env['AZURE_CERT_PATH']
|
||||
assert open(ssh_key_data, 'rb').read() == self.EXAMPLE_PRIVATE_KEY
|
||||
return ['successful', 0]
|
||||
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
self.task.run(self.pk)
|
||||
|
||||
def test_azure_rm_with_tenant(self):
|
||||
azure = CredentialType.defaults['azure_rm']()
|
||||
credential = Credential(
|
||||
@ -1038,29 +1015,25 @@ class TestJobCredentials(TestJobExecution):
|
||||
gce_credential.inputs['ssh_key_data'] = encrypt_field(gce_credential, 'ssh_key_data')
|
||||
self.instance.extra_credentials.add(gce_credential)
|
||||
|
||||
azure = CredentialType.defaults['azure']()
|
||||
azure_credential = Credential(
|
||||
azure_rm = CredentialType.defaults['azure_rm']()
|
||||
azure_rm_credential = Credential(
|
||||
pk=2,
|
||||
credential_type=azure,
|
||||
credential_type=azure_rm,
|
||||
inputs = {
|
||||
'username': 'joe',
|
||||
'ssh_key_data': 'AZURE: %s' % self.EXAMPLE_PRIVATE_KEY
|
||||
'subscription': 'some-subscription',
|
||||
'username': 'bob',
|
||||
'password': 'secret'
|
||||
}
|
||||
)
|
||||
azure_credential.inputs['ssh_key_data'] = encrypt_field(azure_credential, 'ssh_key_data')
|
||||
self.instance.extra_credentials.add(azure_credential)
|
||||
azure_rm_credential.inputs['secret'] = encrypt_field(azure_rm_credential, 'secret')
|
||||
self.instance.extra_credentials.add(azure_rm_credential)
|
||||
|
||||
def run_pexpect_side_effect(*args, **kwargs):
|
||||
args, cwd, env, stdout = args
|
||||
|
||||
assert env['GCE_EMAIL'] == 'bob'
|
||||
assert env['GCE_PROJECT'] == 'some-project'
|
||||
ssh_key_data = env['GCE_PEM_FILE_PATH']
|
||||
assert open(ssh_key_data, 'rb').read() == 'GCE: %s' % self.EXAMPLE_PRIVATE_KEY
|
||||
|
||||
assert env['AZURE_SUBSCRIPTION_ID'] == 'joe'
|
||||
ssh_key_data = env['AZURE_CERT_PATH']
|
||||
assert open(ssh_key_data, 'rb').read() == 'AZURE: %s' % self.EXAMPLE_PRIVATE_KEY
|
||||
assert env['AZURE_SUBSCRIPTION_ID'] == 'some-subscription'
|
||||
assert env['AZURE_AD_USER'] == 'bob'
|
||||
assert env['AZURE_PASSWORD'] == 'secret'
|
||||
|
||||
return ['successful', 0]
|
||||
|
||||
@ -1278,31 +1251,6 @@ class TestInventoryUpdateCredentials(TestJobExecution):
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
self.task.run(self.pk)
|
||||
|
||||
def test_azure_source(self):
|
||||
azure = CredentialType.defaults['azure']()
|
||||
self.instance.source = 'azure'
|
||||
self.instance.credential = Credential(
|
||||
pk=1,
|
||||
credential_type=azure,
|
||||
inputs = {
|
||||
'username': 'bob',
|
||||
'ssh_key_data': self.EXAMPLE_PRIVATE_KEY
|
||||
}
|
||||
)
|
||||
self.instance.credential.inputs['ssh_key_data'] = encrypt_field(
|
||||
self.instance.credential, 'ssh_key_data'
|
||||
)
|
||||
|
||||
def run_pexpect_side_effect(*args, **kwargs):
|
||||
args, cwd, env, stdout = args
|
||||
assert env['AZURE_SUBSCRIPTION_ID'] == 'bob'
|
||||
ssh_key_data = env['AZURE_CERT_PATH']
|
||||
assert open(ssh_key_data, 'rb').read() == self.EXAMPLE_PRIVATE_KEY
|
||||
return ['successful', 0]
|
||||
|
||||
self.run_pexpect.side_effect = run_pexpect_side_effect
|
||||
self.task.run(self.pk)
|
||||
|
||||
def test_gce_source(self):
|
||||
gce = CredentialType.defaults['gce']()
|
||||
self.instance.source = 'gce'
|
||||
|
||||
@ -6,6 +6,8 @@ import os
|
||||
import pytest
|
||||
from uuid import uuid4
|
||||
|
||||
from django.core.cache import cache
|
||||
|
||||
from awx.main.utils import common
|
||||
|
||||
from awx.main.models import (
|
||||
@ -18,6 +20,14 @@ from awx.main.models import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def clear_cache():
|
||||
'''
|
||||
Clear cache (local memory) for each test to prevent using cached settings.
|
||||
'''
|
||||
cache.clear()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('input_, output', [
|
||||
({"foo": "bar"}, {"foo": "bar"}),
|
||||
('{"foo": "bar"}', {"foo": "bar"}),
|
||||
@ -49,3 +59,59 @@ def test_set_environ():
|
||||
])
|
||||
def test_get_type_for_model(model, name):
|
||||
assert common.get_type_for_model(model) == name
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def memoized_function(mocker):
|
||||
@common.memoize(track_function=True)
|
||||
def myfunction(key, value):
|
||||
if key not in myfunction.calls:
|
||||
myfunction.calls[key] = 0
|
||||
|
||||
myfunction.calls[key] += 1
|
||||
|
||||
if myfunction.calls[key] == 1:
|
||||
return value
|
||||
else:
|
||||
return '%s called %s times' % (value, myfunction.calls[key])
|
||||
myfunction.calls = dict()
|
||||
return myfunction
|
||||
|
||||
|
||||
def test_memoize_track_function(memoized_function):
|
||||
assert memoized_function('scott', 'scotterson') == 'scotterson'
|
||||
assert cache.get('myfunction') == {u'scott-scotterson': 'scotterson'}
|
||||
assert memoized_function('scott', 'scotterson') == 'scotterson'
|
||||
|
||||
assert memoized_function.calls['scott'] == 1
|
||||
|
||||
assert memoized_function('john', 'smith') == 'smith'
|
||||
assert cache.get('myfunction') == {u'scott-scotterson': 'scotterson', u'john-smith': 'smith'}
|
||||
assert memoized_function('john', 'smith') == 'smith'
|
||||
|
||||
assert memoized_function.calls['john'] == 1
|
||||
|
||||
|
||||
def test_memoize_delete(memoized_function):
|
||||
assert memoized_function('john', 'smith') == 'smith'
|
||||
assert memoized_function('john', 'smith') == 'smith'
|
||||
assert memoized_function.calls['john'] == 1
|
||||
|
||||
assert cache.get('myfunction') == {u'john-smith': 'smith'}
|
||||
|
||||
common.memoize_delete('myfunction')
|
||||
|
||||
assert cache.get('myfunction') is None
|
||||
|
||||
assert memoized_function('john', 'smith') == 'smith called 2 times'
|
||||
assert memoized_function.calls['john'] == 2
|
||||
|
||||
|
||||
def test_memoize_parameter_error():
|
||||
@common.memoize(cache_key='foo', track_function=True)
|
||||
def fn():
|
||||
return
|
||||
|
||||
with pytest.raises(common.IllegalArgumentError):
|
||||
fn()
|
||||
|
||||
|
||||
@ -35,7 +35,7 @@ from django.apps import apps
|
||||
|
||||
logger = logging.getLogger('awx.main.utils')
|
||||
|
||||
__all__ = ['get_object_or_400', 'get_object_or_403', 'camelcase_to_underscore', 'memoize',
|
||||
__all__ = ['get_object_or_400', 'get_object_or_403', 'camelcase_to_underscore', 'memoize', 'memoize_delete',
|
||||
'get_ansible_version', 'get_ssh_version', 'get_licenser', 'get_awx_version', 'update_scm_url',
|
||||
'get_type_for_model', 'get_model_for_type', 'copy_model_by_class',
|
||||
'copy_m2m_relationships' ,'cache_list_capabilities', 'to_python_boolean',
|
||||
@ -45,7 +45,7 @@ __all__ = ['get_object_or_400', 'get_object_or_403', 'camelcase_to_underscore',
|
||||
'callback_filter_out_ansible_extra_vars', 'get_search_fields', 'get_system_task_capacity',
|
||||
'wrap_args_with_proot', 'build_proot_temp_dir', 'check_proot_installed', 'model_to_dict',
|
||||
'model_instance_diff', 'timestamp_apiformat', 'parse_yaml_or_json', 'RequireDebugTrueOrTest',
|
||||
'has_model_field_prefetched', 'set_environ']
|
||||
'has_model_field_prefetched', 'set_environ', 'IllegalArgumentError',]
|
||||
|
||||
|
||||
def get_object_or_400(klass, *args, **kwargs):
|
||||
@ -108,23 +108,48 @@ class RequireDebugTrueOrTest(logging.Filter):
|
||||
return settings.DEBUG or 'test' in sys.argv
|
||||
|
||||
|
||||
def memoize(ttl=60, cache_key=None, cache_name='default'):
|
||||
class IllegalArgumentError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
def memoize(ttl=60, cache_key=None, track_function=False):
|
||||
'''
|
||||
Decorator to wrap a function and cache its result.
|
||||
'''
|
||||
from django.core.cache import caches
|
||||
from django.core.cache import cache
|
||||
|
||||
|
||||
def _memoizer(f, *args, **kwargs):
|
||||
cache = caches[cache_name]
|
||||
key = cache_key or slugify('%s %r %r' % (f.__name__, args, kwargs))
|
||||
value = cache.get(key)
|
||||
if value is None:
|
||||
value = f(*args, **kwargs)
|
||||
cache.set(key, value, ttl)
|
||||
if cache_key and track_function:
|
||||
raise IllegalArgumentError("Can not specify cache_key when track_function is True")
|
||||
|
||||
if track_function:
|
||||
cache_dict_key = slugify('%r %r' % (args, kwargs))
|
||||
key = slugify("%s" % f.__name__)
|
||||
cache_dict = cache.get(key) or dict()
|
||||
if cache_dict_key not in cache_dict:
|
||||
value = f(*args, **kwargs)
|
||||
cache_dict[cache_dict_key] = value
|
||||
cache.set(key, cache_dict, ttl)
|
||||
else:
|
||||
value = cache_dict[cache_dict_key]
|
||||
else:
|
||||
key = cache_key or slugify('%s %r %r' % (f.__name__, args, kwargs))
|
||||
value = cache.get(key)
|
||||
if value is None:
|
||||
value = f(*args, **kwargs)
|
||||
cache.set(key, value, ttl)
|
||||
|
||||
return value
|
||||
return decorator(_memoizer)
|
||||
|
||||
|
||||
def memoize_delete(function_name):
|
||||
from django.core.cache import cache
|
||||
|
||||
return cache.delete(function_name)
|
||||
|
||||
|
||||
@memoize()
|
||||
def get_ansible_version():
|
||||
'''
|
||||
|
||||
@ -1,284 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
'''
|
||||
Windows Azure external inventory script
|
||||
=======================================
|
||||
|
||||
Generates inventory that Ansible can understand by making API request to
|
||||
Windows Azure using the azure python library.
|
||||
|
||||
NOTE: This script assumes Ansible is being executed where azure is already
|
||||
installed.
|
||||
|
||||
pip install azure
|
||||
|
||||
Adapted from the ansible Linode plugin by Dan Slimmon.
|
||||
'''
|
||||
|
||||
# (c) 2013, John Whitbeck
|
||||
#
|
||||
# This file is part of Ansible,
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
######################################################################
|
||||
|
||||
# Standard imports
|
||||
import re
|
||||
import sys
|
||||
import argparse
|
||||
import os
|
||||
from urlparse import urlparse
|
||||
from time import time
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
import simplejson as json
|
||||
|
||||
try:
|
||||
from azure.servicemanagement import ServiceManagementService
|
||||
except ImportError as e:
|
||||
sys.exit("ImportError: {0}".format(str(e)))
|
||||
|
||||
# Imports for ansible
|
||||
import ConfigParser
|
||||
|
||||
class AzureInventory(object):
|
||||
def __init__(self):
|
||||
"""Main execution path."""
|
||||
# Inventory grouped by display group
|
||||
self.inventory = {}
|
||||
# Index of deployment name -> host
|
||||
self.index = {}
|
||||
self.host_metadata = {}
|
||||
|
||||
# Cache setting defaults.
|
||||
# These can be overridden in settings (see `read_settings`).
|
||||
cache_dir = os.path.expanduser('~')
|
||||
self.cache_path_cache = os.path.join(cache_dir, '.ansible-azure.cache')
|
||||
self.cache_path_index = os.path.join(cache_dir, '.ansible-azure.index')
|
||||
self.cache_max_age = 0
|
||||
|
||||
# Read settings and parse CLI arguments
|
||||
self.read_settings()
|
||||
self.read_environment()
|
||||
self.parse_cli_args()
|
||||
|
||||
# Initialize Azure ServiceManagementService
|
||||
self.sms = ServiceManagementService(self.subscription_id, self.cert_path)
|
||||
|
||||
# Cache
|
||||
if self.args.refresh_cache:
|
||||
self.do_api_calls_update_cache()
|
||||
elif not self.is_cache_valid():
|
||||
self.do_api_calls_update_cache()
|
||||
|
||||
if self.args.list_images:
|
||||
data_to_print = self.json_format_dict(self.get_images(), True)
|
||||
elif self.args.list or self.args.host:
|
||||
# Display list of nodes for inventory
|
||||
if len(self.inventory) == 0:
|
||||
data = json.loads(self.get_inventory_from_cache())
|
||||
else:
|
||||
data = self.inventory
|
||||
|
||||
if self.args.host:
|
||||
data_to_print = self.get_host(self.args.host)
|
||||
else:
|
||||
# Add the `['_meta']['hostvars']` information.
|
||||
hostvars = {}
|
||||
if len(data) > 0:
|
||||
for host in set([h for hosts in data.values() for h in hosts if h]):
|
||||
hostvars[host] = self.get_host(host, jsonify=False)
|
||||
data['_meta'] = {'hostvars': hostvars}
|
||||
|
||||
# JSONify the data.
|
||||
data_to_print = self.json_format_dict(data, pretty=True)
|
||||
print(data_to_print)
|
||||
|
||||
def get_host(self, hostname, jsonify=True):
|
||||
"""Return information about the given hostname, based on what
|
||||
the Windows Azure API provides.
|
||||
"""
|
||||
if hostname not in self.host_metadata:
|
||||
return "No host found: %s" % json.dumps(self.host_metadata)
|
||||
if jsonify:
|
||||
return json.dumps(self.host_metadata[hostname])
|
||||
return self.host_metadata[hostname]
|
||||
|
||||
def get_images(self):
|
||||
images = []
|
||||
for image in self.sms.list_os_images():
|
||||
if str(image.label).lower().find(self.args.list_images.lower()) >= 0:
|
||||
images.append(vars(image))
|
||||
return json.loads(json.dumps(images, default=lambda o: o.__dict__))
|
||||
|
||||
def is_cache_valid(self):
|
||||
"""Determines if the cache file has expired, or if it is still valid."""
|
||||
if os.path.isfile(self.cache_path_cache):
|
||||
mod_time = os.path.getmtime(self.cache_path_cache)
|
||||
current_time = time()
|
||||
if (mod_time + self.cache_max_age) > current_time:
|
||||
if os.path.isfile(self.cache_path_index):
|
||||
return True
|
||||
return False
|
||||
|
||||
def read_settings(self):
|
||||
"""Reads the settings from the .ini file."""
|
||||
config = ConfigParser.SafeConfigParser()
|
||||
config.read(os.path.dirname(os.path.realpath(__file__)) + '/windows_azure.ini')
|
||||
|
||||
# Credentials related
|
||||
if config.has_option('azure', 'subscription_id'):
|
||||
self.subscription_id = config.get('azure', 'subscription_id')
|
||||
if config.has_option('azure', 'cert_path'):
|
||||
self.cert_path = config.get('azure', 'cert_path')
|
||||
|
||||
# Cache related
|
||||
if config.has_option('azure', 'cache_path'):
|
||||
cache_path = os.path.expandvars(os.path.expanduser(config.get('azure', 'cache_path')))
|
||||
self.cache_path_cache = os.path.join(cache_path, 'ansible-azure.cache')
|
||||
self.cache_path_index = os.path.join(cache_path, 'ansible-azure.index')
|
||||
if config.has_option('azure', 'cache_max_age'):
|
||||
self.cache_max_age = config.getint('azure', 'cache_max_age')
|
||||
|
||||
def read_environment(self):
|
||||
''' Reads the settings from environment variables '''
|
||||
# Credentials
|
||||
if os.getenv("AZURE_SUBSCRIPTION_ID"):
|
||||
self.subscription_id = os.getenv("AZURE_SUBSCRIPTION_ID")
|
||||
if os.getenv("AZURE_CERT_PATH"):
|
||||
self.cert_path = os.getenv("AZURE_CERT_PATH")
|
||||
|
||||
def parse_cli_args(self):
|
||||
"""Command line argument processing"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Produce an Ansible Inventory file based on Azure',
|
||||
)
|
||||
parser.add_argument('--list', action='store_true', default=True,
|
||||
help='List nodes (default: True)')
|
||||
parser.add_argument('--list-images', action='store',
|
||||
help='Get all available images.')
|
||||
parser.add_argument('--refresh-cache',
|
||||
action='store_true', default=False,
|
||||
help='Force refresh of thecache by making API requests to Azure '
|
||||
'(default: False - use cache files)',
|
||||
)
|
||||
parser.add_argument('--host', action='store',
|
||||
help='Get all information about an instance.')
|
||||
self.args = parser.parse_args()
|
||||
|
||||
def do_api_calls_update_cache(self):
|
||||
"""Do API calls, and save data in cache files."""
|
||||
self.add_cloud_services()
|
||||
self.write_to_cache(self.inventory, self.cache_path_cache)
|
||||
self.write_to_cache(self.index, self.cache_path_index)
|
||||
|
||||
def add_cloud_services(self):
|
||||
"""Makes an Azure API call to get the list of cloud services."""
|
||||
try:
|
||||
for cloud_service in self.sms.list_hosted_services():
|
||||
self.add_deployments(cloud_service)
|
||||
except Exception as e:
|
||||
sys.exit("Error: Failed to access cloud services - {0}".format(str(e)))
|
||||
|
||||
def add_deployments(self, cloud_service):
|
||||
"""Makes an Azure API call to get the list of virtual machines
|
||||
associated with a cloud service.
|
||||
"""
|
||||
try:
|
||||
for deployment in self.sms.get_hosted_service_properties(cloud_service.service_name,embed_detail=True).deployments.deployments:
|
||||
self.add_deployment(cloud_service, deployment)
|
||||
except Exception as e:
|
||||
sys.exit("Error: Failed to access deployments - {0}".format(str(e)))
|
||||
|
||||
def add_deployment(self, cloud_service, deployment):
|
||||
"""Adds a deployment to the inventory and index"""
|
||||
for role in deployment.role_instance_list.role_instances:
|
||||
try:
|
||||
# Default port 22 unless port found with name 'SSH'
|
||||
port = '22'
|
||||
for ie in role.instance_endpoints.instance_endpoints:
|
||||
if ie.name == 'SSH':
|
||||
port = ie.public_port
|
||||
break
|
||||
except AttributeError as e:
|
||||
pass
|
||||
finally:
|
||||
self.add_instance(role.instance_name, deployment, port, cloud_service, role.instance_status)
|
||||
|
||||
def add_instance(self, hostname, deployment, ssh_port, cloud_service, status):
|
||||
"""Adds an instance to the inventory and index"""
|
||||
|
||||
dest = urlparse(deployment.url).hostname
|
||||
|
||||
# Add to index
|
||||
self.index[hostname] = deployment.name
|
||||
|
||||
self.host_metadata[hostname] = dict(ansible_ssh_host=dest,
|
||||
ansible_ssh_port=int(ssh_port),
|
||||
instance_status=status,
|
||||
private_id=deployment.private_id)
|
||||
|
||||
# List of all azure deployments
|
||||
self.push(self.inventory, "azure", hostname)
|
||||
|
||||
# Inventory: Group by service name
|
||||
self.push(self.inventory, self.to_safe(cloud_service.service_name), hostname)
|
||||
|
||||
if int(ssh_port) == 22:
|
||||
self.push(self.inventory, "Cloud_services", hostname)
|
||||
|
||||
# Inventory: Group by region
|
||||
self.push(self.inventory, self.to_safe(cloud_service.hosted_service_properties.location), hostname)
|
||||
|
||||
def push(self, my_dict, key, element):
|
||||
"""Pushed an element onto an array that may not have been defined in the dict."""
|
||||
if key in my_dict:
|
||||
my_dict[key].append(element)
|
||||
else:
|
||||
my_dict[key] = [element]
|
||||
|
||||
def get_inventory_from_cache(self):
|
||||
"""Reads the inventory from the cache file and returns it as a JSON object."""
|
||||
cache = open(self.cache_path_cache, 'r')
|
||||
json_inventory = cache.read()
|
||||
return json_inventory
|
||||
|
||||
def load_index_from_cache(self):
|
||||
"""Reads the index from the cache file and sets self.index."""
|
||||
cache = open(self.cache_path_index, 'r')
|
||||
json_index = cache.read()
|
||||
self.index = json.loads(json_index)
|
||||
|
||||
def write_to_cache(self, data, filename):
|
||||
"""Writes data in JSON format to a file."""
|
||||
json_data = self.json_format_dict(data, True)
|
||||
cache = open(filename, 'w')
|
||||
cache.write(json_data)
|
||||
cache.close()
|
||||
|
||||
def to_safe(self, word):
|
||||
"""Escapes any characters that would be invalid in an ansible group name."""
|
||||
return re.sub("[^A-Za-z0-9\-]", "_", word)
|
||||
|
||||
def json_format_dict(self, data, pretty=False):
|
||||
"""Converts a dict to a JSON object and dumps it as a formatted string."""
|
||||
if pretty:
|
||||
return json.dumps(data, sort_keys=True, indent=2)
|
||||
else:
|
||||
return json.dumps(data)
|
||||
|
||||
|
||||
AzureInventory()
|
||||
@ -481,9 +481,6 @@ if is_testing():
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
},
|
||||
'ephemeral': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
},
|
||||
}
|
||||
else:
|
||||
CACHES = {
|
||||
@ -491,9 +488,6 @@ else:
|
||||
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
|
||||
'LOCATION': 'memcached:11211',
|
||||
},
|
||||
'ephemeral': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
},
|
||||
}
|
||||
|
||||
# Social Auth configuration.
|
||||
@ -776,14 +770,12 @@ GCE_HOST_FILTER = r'^.+$'
|
||||
GCE_EXCLUDE_EMPTY_GROUPS = True
|
||||
GCE_INSTANCE_ID_VAR = None
|
||||
|
||||
|
||||
# -------------------
|
||||
# -- Microsoft Azure --
|
||||
# -------------------
|
||||
|
||||
# --------------------------------------
|
||||
# -- Microsoft Azure Resource Manager --
|
||||
# --------------------------------------
|
||||
# It's not possible to get zones in Azure without authenticating, so we
|
||||
# provide a list here.
|
||||
AZURE_REGION_CHOICES = [
|
||||
AZURE_RM_REGION_CHOICES = [
|
||||
('eastus', _('US East')),
|
||||
('eastus2', _('US East 2')),
|
||||
('centralus', _('US Central')),
|
||||
@ -810,23 +802,8 @@ AZURE_REGION_CHOICES = [
|
||||
('koreacentral', _('Korea Central')),
|
||||
('koreasouth', _('Korea South')),
|
||||
]
|
||||
AZURE_REGIONS_BLACKLIST = []
|
||||
AZURE_RM_REGIONS_BLACKLIST = []
|
||||
|
||||
# Inventory variable name/value for determining whether a host is active
|
||||
# in Microsoft Azure.
|
||||
AZURE_ENABLED_VAR = 'instance_status'
|
||||
AZURE_ENABLED_VALUE = 'ReadyRole'
|
||||
|
||||
# Filter for allowed group and host names when importing inventory from
|
||||
# Microsoft Azure.
|
||||
AZURE_GROUP_FILTER = r'^.+$'
|
||||
AZURE_HOST_FILTER = r'^.+$'
|
||||
AZURE_EXCLUDE_EMPTY_GROUPS = True
|
||||
AZURE_INSTANCE_ID_VAR = 'private_id'
|
||||
|
||||
# --------------------------------------
|
||||
# -- Microsoft Azure Resource Manager --
|
||||
# --------------------------------------
|
||||
AZURE_RM_GROUP_FILTER = r'^.+$'
|
||||
AZURE_RM_HOST_FILTER = r'^.+$'
|
||||
AZURE_RM_ENABLED_VAR = 'powerstate'
|
||||
|
||||
@ -136,8 +136,7 @@ class LDAPBackend(BaseLDAPBackend):
|
||||
def _decorate_enterprise_user(user, provider):
|
||||
user.set_unusable_password()
|
||||
user.save()
|
||||
enterprise_auth = UserEnterpriseAuth(user=user, provider=provider)
|
||||
enterprise_auth.save()
|
||||
enterprise_auth, _ = UserEnterpriseAuth.objects.get_or_create(user=user, provider=provider)
|
||||
return enterprise_auth
|
||||
|
||||
|
||||
@ -269,16 +268,12 @@ class SAMLAuth(BaseSAMLAuth):
|
||||
if not feature_enabled('enterprise_auth'):
|
||||
logger.error("Unable to authenticate, license does not support SAML authentication")
|
||||
return None
|
||||
created = False
|
||||
try:
|
||||
user = User.objects.get(username=kwargs.get('username', ''))
|
||||
if user and not user.is_in_enterprise_category('saml'):
|
||||
return None
|
||||
except User.DoesNotExist:
|
||||
created = True
|
||||
user = super(SAMLAuth, self).authenticate(*args, **kwargs)
|
||||
if user and created:
|
||||
# Comes from https://github.com/omab/python-social-auth/blob/v0.2.21/social/backends/base.py#L91
|
||||
if getattr(user, 'is_new', False):
|
||||
_decorate_enterprise_user(user, 'saml')
|
||||
elif user and not user.is_in_enterprise_category('saml'):
|
||||
return None
|
||||
return user
|
||||
|
||||
def get_user(self, user_id):
|
||||
|
||||
@ -71,6 +71,8 @@ function ComponentsStrings (BaseString) {
|
||||
SETTINGS: t.s('Settings'),
|
||||
FOOTER_ABOUT: t.s('About'),
|
||||
FOOTER_COPYRIGHT: t.s('Copyright © 2017 Red Hat, Inc.')
|
||||
ns.capacityBar = {
|
||||
IS_OFFLINE: t.s('Unavailable to run jobs.')
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@ -76,7 +76,6 @@
|
||||
@import '../../src/inventories-hosts/inventories/insights/insights.block.less';
|
||||
@import '../../src/inventories-hosts/inventories/list/host-summary-popover/host-summary-popover.block.less';
|
||||
@import '../../src/inventories-hosts/inventories/related/hosts/related-groups-labels/relatedGroupsLabelsList.block.less';
|
||||
@import '../../src/inventories-hosts/inventories/smart-inventory/smart-inventory-host-filter/host-filter-modal/host-filter-modal.block.less';
|
||||
@import '../../src/inventories-hosts/inventories/inventories.block.less';
|
||||
@import '../../src/inventories-hosts/shared/associate-groups/associate-groups.block.less';
|
||||
@import '../../src/inventories-hosts/shared/associate-hosts/associate-hosts.block.less';
|
||||
|
||||
@ -277,9 +277,13 @@ angular
|
||||
$(this).remove();
|
||||
});
|
||||
|
||||
$('.ui-dialog-content').each(function() {
|
||||
$(this).dialog('close');
|
||||
});
|
||||
if (next.name !== "templates.editWorkflowJobTemplate.workflowMaker" &&
|
||||
next.name !== "templates.editWorkflowJobTemplate.workflowMaker.inventory" &&
|
||||
next.name !== "templates.editWorkflowJobTemplate.workflowMaker.credential") {
|
||||
$('.ui-dialog-content').each(function() {
|
||||
$(this).dialog('close');
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
$('#help-modal').dialog('close');
|
||||
|
||||
@ -150,7 +150,7 @@ export default ['i18n', function(i18n) {
|
||||
"subscription": {
|
||||
label: i18n._("Subscription ID"),
|
||||
type: 'text',
|
||||
ngShow: "kind.value == 'azure' || kind.value == 'azure_rm'",
|
||||
ngShow: "kind.value == 'azure_rm'",
|
||||
awRequiredWhen: {
|
||||
reqExpression: 'subscription_required',
|
||||
init: false
|
||||
@ -169,7 +169,7 @@ export default ['i18n', function(i18n) {
|
||||
labelBind: 'usernameLabel',
|
||||
type: 'text',
|
||||
ngShow: "kind.value && kind.value !== 'aws' && " +
|
||||
"kind.value !== 'gce' && kind.value!=='azure'",
|
||||
"kind.value !== 'gce'",
|
||||
awRequiredWhen: {
|
||||
reqExpression: 'username_required',
|
||||
init: false
|
||||
@ -241,7 +241,7 @@ export default ['i18n', function(i18n) {
|
||||
labelBind: 'sshKeyDataLabel',
|
||||
type: 'textarea',
|
||||
ngShow: "kind.value == 'ssh' || kind.value == 'scm' || " +
|
||||
"kind.value == 'gce' || kind.value == 'azure' || kind.value == 'net'",
|
||||
"kind.value == 'gce' || kind.value == 'net'",
|
||||
awRequiredWhen: {
|
||||
reqExpression: 'key_required',
|
||||
init: true
|
||||
|
||||
@ -34,12 +34,6 @@ export default
|
||||
"two words followed by a three digit number. Such " +
|
||||
"as: ") + "</p><p>adjective-noun-000</p>";
|
||||
break;
|
||||
case 'azure':
|
||||
scope.sshKeyDataLabel = i18n._('Management Certificate');
|
||||
scope.subscription_required = true;
|
||||
scope.key_required = true;
|
||||
scope.key_description = i18n._("Paste the contents of the PEM file that corresponds to the certificate you uploaded in the Microsoft Azure console.");
|
||||
break;
|
||||
case 'azure_rm':
|
||||
scope.usernameLabel = i18n._("Username");
|
||||
scope.subscription_required = true;
|
||||
|
||||
@ -91,12 +91,6 @@ export default
|
||||
"two words followed by a three digit number. Such " +
|
||||
"as: ") + "</p><p>adjective-noun-000</p>";
|
||||
break;
|
||||
case 'azure':
|
||||
scope.sshKeyDataLabel = i18n._('Management Certificate');
|
||||
scope.subscription_required = true;
|
||||
scope.key_required = true;
|
||||
scope.key_description = i18n._("Paste the contents of the PEM file that corresponds to the certificate you uploaded in the Microsoft Azure console.");
|
||||
break;
|
||||
case 'azure_rm':
|
||||
scope.usernameLabel = i18n._("Username");
|
||||
scope.subscription_required = true;
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
capacity-bar {
|
||||
|
||||
width: 50%;
|
||||
margin-right: 10px;
|
||||
margin-right: 25px;
|
||||
min-width: 100px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
|
||||
.CapacityBar {
|
||||
background-color: @default-bg;
|
||||
@ -13,6 +15,7 @@ capacity-bar {
|
||||
width: 100%;
|
||||
border-radius: 100vw;
|
||||
overflow: hidden;
|
||||
margin-right: 10px;
|
||||
}
|
||||
|
||||
.CapacityBar-remaining {
|
||||
@ -23,4 +26,16 @@ capacity-bar {
|
||||
.CapacityBar-consumed {
|
||||
flex: 0 0 auto;
|
||||
}
|
||||
|
||||
.CapacityBar--offline {
|
||||
border-color: @d7grey;
|
||||
|
||||
.CapacityBar-remaining {
|
||||
background-color: @d7grey;
|
||||
}
|
||||
}
|
||||
|
||||
.Capacity-details--percentage {
|
||||
color: @default-data-txt;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,16 +1,42 @@
|
||||
export default ['templateUrl',
|
||||
function (templateUrl) {
|
||||
export default ['templateUrl', 'ComponentsStrings',
|
||||
function (templateUrl, strings) {
|
||||
return {
|
||||
scope: {
|
||||
capacity: '='
|
||||
capacity: '=',
|
||||
totalCapacity: '='
|
||||
},
|
||||
templateUrl: templateUrl('instance-groups/capacity-bar/capacity-bar'),
|
||||
restrict: 'E',
|
||||
link: function(scope) {
|
||||
scope.isOffline = false;
|
||||
|
||||
scope.$watch('totalCapacity', function(val) {
|
||||
if (val === 0) {
|
||||
scope.isOffline = true;
|
||||
scope.offlineTip = strings.get(`capacityBar.IS_OFFLINE`);
|
||||
} else {
|
||||
scope.isOffline = false;
|
||||
scope.offlineTip = null;
|
||||
}
|
||||
}, true);
|
||||
|
||||
scope.$watch('capacity', function() {
|
||||
scope.CapacityStyle = {
|
||||
'flex-grow': scope.capacity * 0.01
|
||||
};
|
||||
if (scope.totalCapacity !== 0) {
|
||||
var percentageCapacity = Math
|
||||
.round(scope.capacity / scope.totalCapacity * 1000) / 10;
|
||||
|
||||
scope.CapacityStyle = {
|
||||
'flex-grow': percentageCapacity * 0.01
|
||||
};
|
||||
|
||||
scope.consumedCapacity = `${percentageCapacity}%`;
|
||||
} else {
|
||||
scope.CapacityStyle = {
|
||||
'flex-grow': 1
|
||||
};
|
||||
|
||||
scope.consumedCapacity = null;
|
||||
}
|
||||
}, true);
|
||||
}
|
||||
};
|
||||
|
||||
@ -1,4 +1,11 @@
|
||||
<div class="CapacityBar">
|
||||
<div class="CapacityBar-remaining" ng-style="CapacityStyle"></div>
|
||||
<div class="CapacityBar-consumed"></div>
|
||||
</div>
|
||||
<div class="CapacityBar"
|
||||
ng-class="{'CapacityBar--offline': isOffline}"
|
||||
aw-tool-tip="{{ offlineTip }}"
|
||||
data-tip-watch="offlineTip"
|
||||
data-placement="top"
|
||||
data-trigger="hover"
|
||||
data-container="body">
|
||||
<div class="CapacityBar-remaining" ng-style="CapacityStyle"></div>
|
||||
<div class="CapacityBar-consumed"></div>
|
||||
</div>
|
||||
<span class="Capacity-details--percentage" ng-show="consumedCapacity">{{ consumedCapacity }}</span>
|
||||
|
||||
@ -24,10 +24,6 @@
|
||||
margin: 0 10px 0 0;
|
||||
width: 100px;
|
||||
}
|
||||
|
||||
.Capacity-details--percentage {
|
||||
color: @default-data-txt;
|
||||
}
|
||||
}
|
||||
|
||||
.RunningJobs-details {
|
||||
|
||||
@ -8,8 +8,7 @@
|
||||
<div class="List-details">
|
||||
<div class="Capacity-details">
|
||||
<p class="Capacity-details--label" translate>Used Capacity</p>
|
||||
<capacity-bar capacity="instanceGroupCapacity"></capacity-bar>
|
||||
<span class="Capacity-details--percentage">{{ instanceGroupCapacity }}%</span>
|
||||
<capacity-bar capacity="instanceGroupCapacity" total-capacity="instanceGroupTotalCapacity"></capacity-bar>
|
||||
</div>
|
||||
<div class="RunningJobs-details">
|
||||
<p class="RunningJobs-details--label" translate>Running Jobs</p>
|
||||
@ -31,4 +30,4 @@
|
||||
</div>
|
||||
</div>
|
||||
<div ui-view="list"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@ -8,8 +8,7 @@
|
||||
<div class="List-details">
|
||||
<div class="Capacity-details">
|
||||
<p class="Capacity-details--label" translate>Used Capacity</p>
|
||||
<capacity-bar capacity="instanceCapacity"></capacity-bar>
|
||||
<span class="Capacity-details--percentage">{{ instanceCapacity }}%</span>
|
||||
<capacity-bar capacity="instanceCapacity" total-capacity="instanceTotalCapacity"></capacity-bar>
|
||||
</div>
|
||||
<div class="RunningJobs-details">
|
||||
<p class="RunningJobs-details--label" translate>Running Jobs</p>
|
||||
@ -30,4 +29,4 @@
|
||||
</div>
|
||||
</div>
|
||||
<div class="instance-jobs-list" ui-view="list"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@ -13,6 +13,7 @@ export default {
|
||||
controller: function($scope, $rootScope, instance) {
|
||||
$scope.instanceName = instance.hostname;
|
||||
$scope.instanceCapacity = instance.consumed_capacity;
|
||||
$scope.instanceTotalCapacity = instance.capacity;
|
||||
$scope.instanceJobsRunning = instance.jobs_running;
|
||||
$rootScope.breadcrumb.instance_name = instance.hostname;
|
||||
}
|
||||
@ -34,4 +35,4 @@ export default {
|
||||
});
|
||||
}]
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@ -35,7 +35,7 @@
|
||||
</a>
|
||||
</td>
|
||||
<td class="List-tableCell List-tableCell--capacityColumn ng-binding">
|
||||
<capacity-bar capacity="instance.consumed_capacity"></capacity-bar><span>{{ instance.consumed_capacity }}%</span>
|
||||
<capacity-bar capacity="instance.consumed_capacity" total-capacity="instance.capacity">
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
|
||||
@ -10,6 +10,7 @@ export default {
|
||||
controller: function($scope, $rootScope, instanceGroup) {
|
||||
$scope.instanceGroupName = instanceGroup.name;
|
||||
$scope.instanceGroupCapacity = instanceGroup.consumed_capacity;
|
||||
$scope.instanceGroupTotalCapacity = instanceGroup.capacity;
|
||||
$scope.instanceGroupJobsRunning = instanceGroup.jobs_running;
|
||||
$rootScope.breadcrumb.instance_group_name = instanceGroup.name;
|
||||
}
|
||||
@ -31,4 +32,4 @@ export default {
|
||||
});
|
||||
}]
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@ -47,7 +47,7 @@
|
||||
</a>
|
||||
</td>
|
||||
<td class="List-tableCell List-tableCell--capacityColumn ng-binding">
|
||||
<capacity-bar capacity="instance_group.consumed_capacity"></capacity-bar><span>{{ instance_group.consumed_capacity }}%</span>
|
||||
<capacity-bar capacity="instance_group.consumed_capacity" total-capacity="instance_group.capacity"></capacity-bar>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
|
||||
@ -20,8 +20,7 @@
|
||||
<div class="Modal-body">
|
||||
<div>
|
||||
<div class="Prompt-bodyQuery">
|
||||
<span translate>Are you sure you want to disassociate the host below from</span> {{disassociateGroup.name}}?<br /><br />
|
||||
<span translate>Note that only hosts directly in this group can be disassociated. Hosts in sub-groups must be disassociated directly from the sub-group level that they belong.</span>
|
||||
<span translate>Are you sure you want to disassociate the host below from</span> {{disassociateGroup.name}}?
|
||||
</div>
|
||||
<div class="Prompt-bodyTarget">{{ host.name }}</div>
|
||||
</div>
|
||||
|
||||
@ -5,6 +5,7 @@ export default {
|
||||
params: {
|
||||
completed_job_search: {
|
||||
value: {
|
||||
page_size: '20',
|
||||
or__job__inventory:"",
|
||||
or__adhoccommand__inventory:"",
|
||||
or__inventoryupdate__inventory_source__inventory:"",
|
||||
|
||||
@ -15,7 +15,7 @@
|
||||
hover: true,
|
||||
multiSelect: true,
|
||||
trackBy: 'nested_group.id',
|
||||
basePath: 'api/v2/inventories/{{$stateParams.inventory_id}}/root_groups/',
|
||||
basePath: 'api/v2/groups/{{$stateParams.group_id}}/children/',
|
||||
|
||||
fields: {
|
||||
failed_hosts: {
|
||||
|
||||
@ -22,7 +22,7 @@
|
||||
</div>
|
||||
<div class="Modal-body">
|
||||
<div>
|
||||
<div class="Prompt-bodyQuery"><translate>Are you sure you want to disassociate the host below from</translate> {{disassociateFrom.name}}?</div>
|
||||
<div class="Prompt-bodyQuery"><translate>Are you sure you want to disassociate the host below from</translate> {{disassociateFrom.name}}?<br /><br /><span translate>Note that only hosts directly in this group can be disassociated. Hosts in sub-groups must be disassociated directly from the sub-group level that they belong.</span></div>
|
||||
<div class="Prompt-bodyTarget">{{ toDisassociate.name }}</div>
|
||||
</div>
|
||||
<div class="Modal-footer">
|
||||
|
||||
@ -22,7 +22,8 @@
|
||||
</div>
|
||||
<div class="Modal-body">
|
||||
<div>
|
||||
<div class="Prompt-bodyQuery"><translate>Are you sure you want to disassociate the host below from</translate> {{disassociateFrom.name}}?</div>
|
||||
<div class="Prompt-bodyQuery">
|
||||
<translate>Are you sure you want to disassociate the host below from</translate> {{disassociateFrom.name}}?</div>
|
||||
<div class="Prompt-bodyTarget">{{ toDisassociate.name }}</div>
|
||||
</div>
|
||||
<div class="Modal-footer">
|
||||
|
||||
@ -15,7 +15,7 @@
|
||||
hover: true,
|
||||
multiSelect: true,
|
||||
trackBy: 'nested_group.id',
|
||||
basePath: 'api/v2/inventories/{{$stateParams.inventory_id}}/root_groups/',
|
||||
basePath: 'api/v2/hosts/{{$stateParams.host_id}}/all_groups/',
|
||||
|
||||
fields: {
|
||||
failed_hosts: {
|
||||
|
||||
@ -251,7 +251,7 @@ export default ['$state', '$stateParams', '$scope', 'SourcesFormDefinition',
|
||||
scope: $scope,
|
||||
field: 'source_regions',
|
||||
variable: 'azure_regions',
|
||||
choice_name: 'azure_region_choices',
|
||||
choice_name: 'azure_rm_region_choices',
|
||||
options: inventorySourcesOptions
|
||||
});
|
||||
|
||||
|
||||
@ -218,7 +218,7 @@ export default ['$state', '$stateParams', '$scope', 'ParseVariableString',
|
||||
scope: $scope,
|
||||
field: 'source_regions',
|
||||
variable: 'azure_regions',
|
||||
choice_name: 'azure_region_choices',
|
||||
choice_name: 'azure_rm_region_choices',
|
||||
options: inventorySourcesOptions
|
||||
});
|
||||
GetChoices({
|
||||
|
||||
@ -132,7 +132,7 @@ return {
|
||||
type: 'select',
|
||||
ngOptions: 'source.label for source in source_region_choices track by source.value',
|
||||
multiSelect: true,
|
||||
ngShow: "source && (source.value == 'rax' || source.value == 'ec2' || source.value == 'gce' || source.value == 'azure' || source.value == 'azure_rm')",
|
||||
ngShow: "source && (source.value == 'rax' || source.value == 'ec2' || source.value == 'gce' || source.value == 'azure_rm')",
|
||||
dataTitle: i18n._('Source Regions'),
|
||||
dataPlacement: 'right',
|
||||
awPopOver: "<p>" + i18n._("Click on the regions field to see a list of regions for your cloud provider. You can select multiple regions, or choose") +
|
||||
@ -328,7 +328,7 @@ return {
|
||||
label: i18n._('Overwrite'),
|
||||
type: 'checkbox',
|
||||
ngShow: "source.value !== '' && source.value !== null",
|
||||
awPopOver: "<p>" + i18n._("If checked, all child groups and hosts not found on the external source will be deleted from the local inventory.") + '</p><p>' +
|
||||
awPopOver: "<p>" + i18n._("If checked, any hosts and groups that were previously present on the external source but are now removed will be removed from the Tower inventory. Hosts and groups that were not managed by the inventory source will be promoted to the next manually created group or if there is no manually created group to promote them into, they will be left in the \"all\" default group for the inventory.") + '</p><p>' +
|
||||
i18n._("When not checked, local child hosts and groups not found on the external source will remain untouched by the inventory update process.") + "</p>",
|
||||
dataTitle: i18n._('Overwrite'),
|
||||
dataContainer: 'body',
|
||||
|
||||
@ -1,3 +0,0 @@
|
||||
.HostFilterModal-tableRow:hover {
|
||||
background-color: @default-bg;
|
||||
}
|
||||
@ -72,9 +72,6 @@ function(i18n, InventoryCompletedJobsList) {
|
||||
basePath: 'credentials',
|
||||
sourceModel: 'insights_credential',
|
||||
sourceField: 'name',
|
||||
search: {
|
||||
credential_type: '13' //insights
|
||||
},
|
||||
ngDisabled: '!(inventory_obj.summary_fields.user_capabilities.edit || canAdd) || !canEditOrg',
|
||||
},
|
||||
instance_groups: {
|
||||
|
||||
@ -5,8 +5,8 @@
|
||||
*************************************************/
|
||||
|
||||
|
||||
export default ['$q', 'Prompt', '$filter', 'Wait', 'Rest', '$state', 'ProcessErrors', 'InitiatePlaybookRun', 'GetBasePath', 'Alert', '$rootScope',
|
||||
function ($q, Prompt, $filter, Wait, Rest, $state, ProcessErrors, InitiatePlaybookRun, GetBasePath, Alert, $rootScope) {
|
||||
export default ['$q', 'Prompt', '$filter', 'Wait', 'Rest', '$state', 'ProcessErrors', 'InitiatePlaybookRun', 'GetBasePath', 'Alert', '$rootScope', 'i18n',
|
||||
function ($q, Prompt, $filter, Wait, Rest, $state, ProcessErrors, InitiatePlaybookRun, GetBasePath, Alert, $rootScope, i18n) {
|
||||
var val = {
|
||||
// the playbook_on_stats event returns the count data in a weird format.
|
||||
// format to what we need!
|
||||
@ -90,9 +90,9 @@ function ($q, Prompt, $filter, Wait, Rest, $state, ProcessErrors, InitiatePlaybo
|
||||
},
|
||||
deleteJob: function(job) {
|
||||
Prompt({
|
||||
hdr: 'Delete Job',
|
||||
hdr: i18n._("Delete Job"),
|
||||
body: `<div class='Prompt-bodyQuery'>
|
||||
Are you sure you want to delete the job below?
|
||||
${i18n._("Are you sure you want to delete the job below?")}
|
||||
</div>
|
||||
<div class='Prompt-bodyTarget'>
|
||||
#${job.id} ${$filter('sanitize')(job.name)}
|
||||
@ -116,7 +116,7 @@ function ($q, Prompt, $filter, Wait, Rest, $state, ProcessErrors, InitiatePlaybo
|
||||
});
|
||||
});
|
||||
},
|
||||
actionText: 'DELETE'
|
||||
actionText: i18n._('DELETE')
|
||||
});
|
||||
},
|
||||
cancelJob: function(job) {
|
||||
@ -139,9 +139,9 @@ function ($q, Prompt, $filter, Wait, Rest, $state, ProcessErrors, InitiatePlaybo
|
||||
};
|
||||
|
||||
Prompt({
|
||||
hdr: 'Cancel Job',
|
||||
body: `<div class='Prompt-bodyQuery'>
|
||||
Are you sure you want to cancel the job below?
|
||||
hdr: i18n._('Cancel Job'),
|
||||
body: `<div class='Prompt-bodyQuery' translate>
|
||||
${i18n._("Are you sure you want to cancel the job below?")}
|
||||
</div>
|
||||
<div class='Prompt-bodyTarget'>
|
||||
#${job.id} ${$filter('sanitize')(job.name)}
|
||||
@ -163,7 +163,7 @@ function ($q, Prompt, $filter, Wait, Rest, $state, ProcessErrors, InitiatePlaybo
|
||||
}
|
||||
});
|
||||
},
|
||||
actionText: 'PROCEED'
|
||||
actionText: i18n._('PROCEED')
|
||||
});
|
||||
},
|
||||
relaunchJob: function(scope) {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
export default
|
||||
export default
|
||||
function DeleteJob($state, Find, Rest, Wait, ProcessErrors, Prompt, Alert,
|
||||
$filter, i18n) {
|
||||
return function(params) {
|
||||
@ -76,7 +76,7 @@ export default
|
||||
reloadListStateParams = _.cloneDeep($state.params);
|
||||
reloadListStateParams.job_search.page = (parseInt(reloadListStateParams.job_search.page)-1).toString();
|
||||
}
|
||||
|
||||
|
||||
$state.go('.', reloadListStateParams, {reload: true});
|
||||
Wait('stop');
|
||||
}
|
||||
@ -107,12 +107,12 @@ export default
|
||||
}
|
||||
scope.removeCancelJob = scope.$on('CancelJob', function() {
|
||||
var cancelBody = "<div class=\"Prompt-bodyQuery\">" + i18n._("Submit the request to cancel?") + "</div>";
|
||||
var deleteBody = "<div class=\"Prompt-bodyQuery\">" + i18n._("Are you sure you want to delete the job below?") + "</div><div class=\"Prompt-bodyTarget\">#" + id + " " + $filter('sanitize')(job.name) + "</div>";
|
||||
var deleteBody = "<div class=\"Prompt-bodyQuery\">" + i18n._("Are you sure you want to delete the job below?") + "</div><div class=\"Prompt-bodyTarget\" translate>#" + id + " " + $filter('sanitize')(job.name) + "</div>";
|
||||
Prompt({
|
||||
hdr: hdr,
|
||||
body: (action_label === 'cancel' || job.status === 'new') ? cancelBody : deleteBody,
|
||||
action: action,
|
||||
actionText: (action_label === 'cancel' || job.status === 'new') ? "OK" : "DELETE"
|
||||
actionText: (action_label === 'cancel' || job.status === 'new') ? i18n._("OK") : i18n._("DELETE")
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@ -25,7 +25,13 @@ export default
|
||||
$cookies.remove('token');
|
||||
$cookies.remove('token_expires');
|
||||
$cookies.remove('userLoggedIn');
|
||||
$cookies.put('token', token);
|
||||
|
||||
if (token && !(/^"[a-f0-9]+"$/ig.test(token))) {
|
||||
$cookies.put('token', `"${token}"`);
|
||||
} else {
|
||||
$cookies.put('token', token);
|
||||
}
|
||||
|
||||
$cookies.put('token_expires', expires);
|
||||
$cookies.put('userLoggedIn', true);
|
||||
$cookies.put('sessionExpired', false);
|
||||
@ -45,7 +51,13 @@ export default
|
||||
},
|
||||
|
||||
getToken: function () {
|
||||
return ($rootScope.token) ? $rootScope.token : $cookies.get('token');
|
||||
if ($rootScope.token) {
|
||||
return $rootScope.token;
|
||||
}
|
||||
|
||||
let token = $cookies.get('token');
|
||||
|
||||
return token ? token.replace(/"/g, '') : undefined;
|
||||
},
|
||||
|
||||
retrieveToken: function (username, password) {
|
||||
@ -101,7 +113,7 @@ export default
|
||||
$rootScope.lastPath = '/home';
|
||||
}
|
||||
x = Store('sessionTime');
|
||||
if ($rootScope.current_user) {
|
||||
if ($rootScope.current_user && x && x[$rootScope.current_user.id]) {
|
||||
x[$rootScope.current_user.id].loggedIn = false;
|
||||
}
|
||||
Store('sessionTime', x);
|
||||
|
||||
@ -535,11 +535,11 @@ angular.module('FormGenerator', [GeneratorHelpers.name, 'Utilities', listGenerat
|
||||
html += (field.flag) ? field.flag : 'enabled';
|
||||
html += "' ";
|
||||
html += (field.ngDisabled) ? `ng-disabled="${field.ngDisabled}" ` : "";
|
||||
html += " class='ScheduleToggle-switch is-on' ng-click='" + field.ngClick + "'>" + i18n._("ON") + "</button><button ng-show='!" + form.iterator + "." ;
|
||||
html += " class='ScheduleToggle-switch is-on' ng-click='" + field.ngClick + "' translate>" + i18n._("ON") + "</button><button ng-show='!" + form.iterator + "." ;
|
||||
html += (field.flag) ? field.flag : "enabled";
|
||||
html += "' ";
|
||||
html += (field.ngDisabled) ? `ng-disabled="${field.ngDisabled}" ` : "";
|
||||
html += " class='ScheduleToggle-switch' ng-click='" + field.ngClick + "'>" + i18n._("OFF") + "</button></div></div>";
|
||||
html += " class='ScheduleToggle-switch' ng-click='" + field.ngClick + "' translate>" + i18n._("OFF") + "</button></div></div>";
|
||||
}
|
||||
return html;
|
||||
},
|
||||
@ -706,9 +706,9 @@ angular.module('FormGenerator', [GeneratorHelpers.name, 'Utilities', listGenerat
|
||||
html += (field.ngDisabled) ? ', "ScheduleToggle--disabled": ' + field.ngDisabled : '';
|
||||
html += "\}' aw-tool-tip='" + field.awToolTip + "' data-placement='" + field.dataPlacement + "' data-tip-watch='" + field.dataTipWatch + "'><div ng-show='" + form.iterator + "." ;
|
||||
html += (field.flag) ? field.flag : 'enabled';
|
||||
html += "' class='ScheduleToggle-switch is-on' ng-click='" + field.ngClick + "'>ON</div><div ng-show='!" + form.iterator + "." ;
|
||||
html += "' class='ScheduleToggle-switch is-on' ng-click='" + field.ngClick + "' translate>ON</div><div ng-show='!" + form.iterator + "." ;
|
||||
html += (field.flag) ? field.flag : "enabled";
|
||||
html += "' class='ScheduleToggle-switch' ng-click='" + field.ngClick + "'>OFF</div></div></td>";
|
||||
html += "' class='ScheduleToggle-switch' ng-click='" + field.ngClick + "' translate>OFF</div></div></td>";
|
||||
}
|
||||
|
||||
if (field.type === 'alertblock') {
|
||||
@ -768,9 +768,9 @@ angular.module('FormGenerator', [GeneratorHelpers.name, 'Utilities', listGenerat
|
||||
html += (field.ngShow) ? "ng-show=\"" + field.ngShow + "\" " : "";
|
||||
html += `data-placement="top">`;
|
||||
html += `<button ng-show="${field.toggleSource}" class="ScheduleToggle-switch is-on" ng-click="toggleForm('${field.toggleSource}')"
|
||||
ng-disabled="${field.ngDisabled}">ON</button>
|
||||
ng-disabled="${field.ngDisabled}" translate>${i18n._("ON")}</button>
|
||||
<button ng-show="!${field.toggleSource}" class="ScheduleToggle-switch" ng-click="toggleForm('${field.toggleSource}')"
|
||||
ng-disabled="${field.ngDisabled}">OFF</button>
|
||||
ng-disabled="${field.ngDisabled}" translate>${i18n._("OFF")}</button>
|
||||
</div>`;
|
||||
}
|
||||
|
||||
@ -1875,7 +1875,7 @@ angular.module('FormGenerator', [GeneratorHelpers.name, 'Utilities', listGenerat
|
||||
<div
|
||||
class="row"
|
||||
ng-show="${itm}.length === 0 && !(searchTags | isEmpty)">
|
||||
<div class="col-lg-12 List-searchNoResults">`;
|
||||
<div class="col-lg-12 List-searchNoResults" translate>`;
|
||||
html += i18n._('No records matched your search.');
|
||||
html += `</div>
|
||||
</div>
|
||||
|
||||
@ -36,7 +36,7 @@ export default ['templateUrl', '$window', function(templateUrl, $window) {
|
||||
page_size: 5
|
||||
};
|
||||
|
||||
qs.search(GetBasePath('instance_groups'), $scope.instance_groups_queryset)
|
||||
qs.search(GetBasePath('instance_groups'), $scope.instance_group_queryset)
|
||||
.then(res => {
|
||||
$scope.instance_group_dataset = res.data;
|
||||
$scope.instance_groups = $scope.instance_group_dataset.results;
|
||||
|
||||
@ -237,7 +237,7 @@ export default ['$compile', 'Attr', 'Icon',
|
||||
// Message for when a search returns no results. This should only get shown after a search is executed with no results.
|
||||
html +=`
|
||||
<div class="row" ng-show="${list.name}.length === 0 && !(searchTags | isEmpty)">
|
||||
<div class="col-lg-12 List-searchNoResults">No records matched your search.</div>
|
||||
<div class="col-lg-12 List-searchNoResults" translate>No records matched your search.</div>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
|
||||
@ -93,7 +93,7 @@ export default ['$q', 'Rest', 'ProcessErrors', '$rootScope', 'Wait', 'DjangoSear
|
||||
}
|
||||
else if(params.relatedSearchTerm) {
|
||||
if(params.singleSearchParam) {
|
||||
paramString += keySplit[0] + '__search';
|
||||
paramString += keySplit[0];
|
||||
}
|
||||
else {
|
||||
paramString += keySplit[0] + '__search_DEFAULT';
|
||||
|
||||
@ -193,7 +193,25 @@ export default ['$stateParams', '$scope', '$state', 'GetBasePath', 'QuerySet', '
|
||||
params = _.merge(params, searchWithoutKey(term), combineSameSearches);
|
||||
}
|
||||
else {
|
||||
params = _.merge(params, qs.encodeParam({term: term, searchTerm: true, singleSearchParam: $scope.singleSearchParam ? $scope.singleSearchParam : false}), combineSameSearches);
|
||||
let root = termParts[0].split(".")[0].replace(/^-/, '');
|
||||
if(_.has($scope.models[$scope.list.name].base, root) || root === "ansible_facts") {
|
||||
if(_.has($scope.models[$scope.list.name].base[root], "type") && $scope.models[$scope.list.name].base[root].type === 'field'){
|
||||
// Intent is to land here for searching on the base model.
|
||||
params = _.merge(params, qs.encodeParam({term: term, relatedSearchTerm: true, singleSearchParam: $scope.singleSearchParam ? $scope.singleSearchParam : false}), combineSameSearches);
|
||||
}
|
||||
else {
|
||||
// Intent is to land here when performing ansible_facts searches
|
||||
params = _.merge(params, qs.encodeParam({term: term, searchTerm: true, singleSearchParam: $scope.singleSearchParam ? $scope.singleSearchParam : false}), combineSameSearches);
|
||||
}
|
||||
}
|
||||
else if(_.contains($scope.models[$scope.list.name].related, root)) {
|
||||
// Intent is to land here for related searches
|
||||
params = _.merge(params, qs.encodeParam({term: term, relatedSearchTerm: true, singleSearchParam: $scope.singleSearchParam ? $scope.singleSearchParam : false}), combineSameSearches);
|
||||
}
|
||||
// Its not a search term or a related search term - treat it as a string
|
||||
else {
|
||||
params = _.merge(params, searchWithoutKey(term), combineSameSearches);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -27,15 +27,13 @@ export default
|
||||
if (!$rootScope.sessionTimer || ($rootScope.sessionTimer && !$rootScope.sessionTimer.isExpired())) {
|
||||
|
||||
$log.debug('Socket connecting to: ' + url);
|
||||
|
||||
self.socket = new ReconnectingWebSocket(url, null, {
|
||||
timeoutInterval: 3000,
|
||||
maxReconnectAttempts: 10 });
|
||||
|
||||
self.socket.onopen = function () {
|
||||
$log.debug("Websocket connection opened.");
|
||||
$log.debug("Websocket connection opened. Socket readyState: " + self.socket.readyState);
|
||||
socketPromise.resolve();
|
||||
console.log('promise resolved, and readyState: '+ self.readyState);
|
||||
self.checkStatus();
|
||||
if(needsResubscribing){
|
||||
self.subscribe(self.getLast());
|
||||
@ -118,7 +116,6 @@ export default
|
||||
if(this.socket){
|
||||
this.socket.close();
|
||||
delete this.socket;
|
||||
console.log("Socket deleted: "+this.socket);
|
||||
}
|
||||
},
|
||||
subscribe: function(state){
|
||||
@ -187,13 +184,14 @@ export default
|
||||
// Function used for sending objects to the API over the
|
||||
// websocket.
|
||||
var self = this;
|
||||
$log.debug('Sent to Websocket Server: ' + data);
|
||||
socketPromise.promise.then(function(){
|
||||
console.log("socket readyState at emit: " + self.socket.readyState);
|
||||
// if(self.socket.readyState === 0){
|
||||
// self.subscribe(self.getLast());
|
||||
// }
|
||||
if(self.socket.readyState === 1){
|
||||
if(self.socket.readyState === 0){
|
||||
$log.debug('Unable to send message, waiting 500ms to resend. Socket readyState: ' + self.socket.readyState);
|
||||
setTimeout(function(){
|
||||
self.subscribe(self.getLast());
|
||||
}, 500);
|
||||
}
|
||||
else if(self.socket.readyState === 1){
|
||||
self.socket.send(data, function () {
|
||||
var args = arguments;
|
||||
self.scope.$apply(function () {
|
||||
@ -202,6 +200,7 @@ export default
|
||||
}
|
||||
});
|
||||
});
|
||||
$log.debug('Sent to Websocket Server: ' + data);
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
@ -722,7 +722,8 @@ function($injector, $stateExtender, $log, i18n) {
|
||||
function buildFieldDefinition(field) {
|
||||
|
||||
// Some lookup modals require some additional default params,
|
||||
// namely organization and inventory_script. If these params
|
||||
// namely organization and inventory_script, and insights
|
||||
// credentials. If these params
|
||||
// aren't set as default params out of the gate, then smart
|
||||
// search will think they need to be set as search tags.
|
||||
var params;
|
||||
@ -739,6 +740,13 @@ function($injector, $stateExtender, $log, i18n) {
|
||||
organization: null
|
||||
};
|
||||
}
|
||||
else if(field.sourceModel === "insights_credential"){
|
||||
params = {
|
||||
page_size: '5',
|
||||
role_level: 'admin_role',
|
||||
credential_type: null
|
||||
};
|
||||
}
|
||||
else if(field.sourceModel === 'host') {
|
||||
params = {
|
||||
page_size: '5'
|
||||
@ -805,8 +813,24 @@ function($injector, $stateExtender, $log, i18n) {
|
||||
return;
|
||||
}
|
||||
}],
|
||||
Dataset: ['ListDefinition', 'QuerySet', '$stateParams', 'GetBasePath', '$interpolate', '$rootScope', '$state', 'OrganizationId',
|
||||
(list, qs, $stateParams, GetBasePath, $interpolate, $rootScope, $state, OrganizationId) => {
|
||||
InsightsCredTypePK: ['ListDefinition', 'Rest', 'GetBasePath', 'ProcessErrors',
|
||||
function(list, Rest, GetBasePath,ProcessErrors) {
|
||||
if(list.iterator === 'insights_credential'){
|
||||
Rest.setUrl(GetBasePath('credential_types') + '?name=Insights');
|
||||
return Rest.get()
|
||||
.then(({data}) => {
|
||||
return data.results[0].id;
|
||||
})
|
||||
.catch(({data, status}) => {
|
||||
ProcessErrors(null, data, status, null, {
|
||||
hdr: 'Error!',
|
||||
msg: 'Failed to get credential type data: ' + status
|
||||
});
|
||||
});
|
||||
}
|
||||
}],
|
||||
Dataset: ['ListDefinition', 'QuerySet', '$stateParams', 'GetBasePath', '$interpolate', '$rootScope', '$state', 'OrganizationId', 'InsightsCredTypePK',
|
||||
(list, qs, $stateParams, GetBasePath, $interpolate, $rootScope, $state, OrganizationId, InsightsCredTypePK) => {
|
||||
// allow lookup field definitions to use interpolated $stateParams / $rootScope in basePath field
|
||||
// the basePath on a form's lookup field will take precedence over the general model list's basepath
|
||||
let path, interpolator;
|
||||
@ -830,6 +854,11 @@ function($injector, $stateExtender, $log, i18n) {
|
||||
$stateParams[`${list.iterator}_search`].role_level = "admin_role";
|
||||
$stateParams[`${list.iterator}_search`].organization = OrganizationId;
|
||||
}
|
||||
if(list.iterator === "insights_credential"){
|
||||
$stateParams[`${list.iterator}_search`].role_level = "admin_role";
|
||||
$stateParams[`${list.iterator}_search`].credential_type = InsightsCredTypePK.toString() ;
|
||||
}
|
||||
|
||||
|
||||
return qs.search(path, $stateParams[`${list.iterator}_search`]);
|
||||
}
|
||||
|
||||
@ -18,7 +18,7 @@ export default
|
||||
'Empty', 'Prompt', 'ToJSON', 'GetChoices', 'CallbackHelpInit',
|
||||
'InitiatePlaybookRun' , 'initSurvey', '$state', 'CreateSelect2',
|
||||
'ToggleNotification','$q', 'InstanceGroupsService', 'InstanceGroupsData', 'MultiCredentialService', 'availableLabels',
|
||||
'canGetProject', 'canGetInventory', 'jobTemplateData', 'ParseVariableString',
|
||||
'projectGetPermissionDenied', 'inventoryGetPermissionDenied', 'jobTemplateData', 'ParseVariableString',
|
||||
function(
|
||||
$filter, $scope, $rootScope,
|
||||
$location, $stateParams, JobTemplateForm, GenerateForm, Rest, Alert,
|
||||
@ -26,7 +26,7 @@ export default
|
||||
ParseTypeChange, Wait, selectedLabels, i18n,
|
||||
Empty, Prompt, ToJSON, GetChoices, CallbackHelpInit, InitiatePlaybookRun, SurveyControllerInit, $state,
|
||||
CreateSelect2, ToggleNotification, $q, InstanceGroupsService, InstanceGroupsData, MultiCredentialService, availableLabels,
|
||||
canGetProject, canGetInventory, jobTemplateData, ParseVariableString
|
||||
projectGetPermissionDenied, inventoryGetPermissionDenied, jobTemplateData, ParseVariableString
|
||||
) {
|
||||
|
||||
$scope.$watch('job_template_obj.summary_fields.user_capabilities.edit', function(val) {
|
||||
@ -360,7 +360,7 @@ export default
|
||||
MultiCredentialService.loadCredentials(jobTemplateData)
|
||||
.then(([selectedCredentials, credTypes, credTypeOptions,
|
||||
credTags, credentialGetPermissionDenied]) => {
|
||||
$scope.canGetAllRelatedResources = canGetProject && canGetInventory && !credentialGetPermissionDenied ? true : false;
|
||||
$scope.canGetAllRelatedResources = !projectGetPermissionDenied && !inventoryGetPermissionDenied && !credentialGetPermissionDenied ? true : false;
|
||||
$scope.selectedCredentials = selectedCredentials;
|
||||
$scope.credential_types = credTypes;
|
||||
$scope.credentialTypeOptions = credTypeOptions;
|
||||
|
||||
@ -8,12 +8,12 @@ export default ['$scope', '$rootScope',
|
||||
'Alert','TemplateList', 'Prompt', 'ProcessErrors',
|
||||
'GetBasePath', 'InitiatePlaybookRun', 'Wait', '$state', '$filter',
|
||||
'Dataset', 'rbacUiControlService', 'TemplatesService','QuerySet',
|
||||
'TemplateCopyService',
|
||||
'TemplateCopyService', 'i18n',
|
||||
function(
|
||||
$scope, $rootScope, Alert,
|
||||
TemplateList, Prompt, ProcessErrors, GetBasePath,
|
||||
InitiatePlaybookRun, Wait, $state, $filter, Dataset, rbacUiControlService, TemplatesService,
|
||||
qs, TemplateCopyService
|
||||
qs, TemplateCopyService, i18n
|
||||
) {
|
||||
|
||||
var list = TemplateList;
|
||||
@ -99,8 +99,8 @@ export default ['$scope', '$rootScope',
|
||||
$scope.deleteJobTemplate = function(template) {
|
||||
if(template) {
|
||||
Prompt({
|
||||
hdr: 'Delete',
|
||||
body: '<div class="Prompt-bodyQuery">Are you sure you want to delete the template below?</div><div class="Prompt-bodyTarget">' + $filter('sanitize')(template.name) + '</div>',
|
||||
hdr: i18n._('Delete'),
|
||||
body: `<div class="Prompt-bodyQuery">${i18n._("Are you sure you want to delete the template below?")}</div><div class="Prompt-bodyTarget">${$filter('sanitize')(template.name)}</div>`,
|
||||
action: function() {
|
||||
|
||||
function handleSuccessfulDelete(isWorkflow) {
|
||||
@ -151,7 +151,7 @@ export default ['$scope', '$rootScope',
|
||||
Alert('Error: Unable to determine template type', 'We were unable to determine this template\'s type while deleting.');
|
||||
}
|
||||
},
|
||||
actionText: 'DELETE'
|
||||
actionText: i18n._('DELETE')
|
||||
});
|
||||
}
|
||||
else {
|
||||
|
||||
@ -150,47 +150,57 @@ angular.module('templates', [surveyMaker.name, templatesList.name, jobTemplates.
|
||||
});
|
||||
});
|
||||
}],
|
||||
canGetProject: ['Rest', 'ProcessErrors', 'jobTemplateData',
|
||||
projectGetPermissionDenied: ['Rest', 'ProcessErrors', 'jobTemplateData',
|
||||
function(Rest, ProcessErrors, jobTemplateData) {
|
||||
Rest.setUrl(jobTemplateData.related.project);
|
||||
return Rest.get()
|
||||
.then(() => {
|
||||
return true;
|
||||
})
|
||||
.catch(({data, status}) => {
|
||||
if (status === 403) {
|
||||
/* User doesn't have read access to the project, no problem. */
|
||||
} else {
|
||||
ProcessErrors(null, data, status, null, {
|
||||
hdr: 'Error!',
|
||||
msg: 'Failed to get project. GET returned ' +
|
||||
'status: ' + status
|
||||
});
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
if(jobTemplateData.related.project) {
|
||||
Rest.setUrl(jobTemplateData.related.project);
|
||||
return Rest.get()
|
||||
.then(() => {
|
||||
return false;
|
||||
})
|
||||
.catch(({data, status}) => {
|
||||
if (status !== 403) {
|
||||
ProcessErrors(null, data, status, null, {
|
||||
hdr: 'Error!',
|
||||
msg: 'Failed to get project. GET returned ' +
|
||||
'status: ' + status
|
||||
});
|
||||
return false;
|
||||
}
|
||||
else {
|
||||
return true;
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
}],
|
||||
canGetInventory: ['Rest', 'ProcessErrors', 'jobTemplateData',
|
||||
inventoryGetPermissionDenied: ['Rest', 'ProcessErrors', 'jobTemplateData',
|
||||
function(Rest, ProcessErrors, jobTemplateData) {
|
||||
Rest.setUrl(jobTemplateData.related.inventory);
|
||||
return Rest.get()
|
||||
.then(() => {
|
||||
return true;
|
||||
})
|
||||
.catch(({data, status}) => {
|
||||
if (status === 403) {
|
||||
/* User doesn't have read access to the project, no problem. */
|
||||
} else {
|
||||
ProcessErrors(null, data, status, null, {
|
||||
hdr: 'Error!',
|
||||
msg: 'Failed to get project. GET returned ' +
|
||||
'status: ' + status
|
||||
});
|
||||
}
|
||||
|
||||
return false;
|
||||
});
|
||||
if(jobTemplateData.related.inventory) {
|
||||
Rest.setUrl(jobTemplateData.related.inventory);
|
||||
return Rest.get()
|
||||
.then(() => {
|
||||
return false;
|
||||
})
|
||||
.catch(({data, status}) => {
|
||||
if (status !== 403) {
|
||||
ProcessErrors(null, data, status, null, {
|
||||
hdr: 'Error!',
|
||||
msg: 'Failed to get project. GET returned ' +
|
||||
'status: ' + status
|
||||
});
|
||||
return false;
|
||||
}
|
||||
else {
|
||||
return true;
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
}],
|
||||
InstanceGroupsData: ['$stateParams', 'Rest', 'GetBasePath', 'ProcessErrors',
|
||||
function($stateParams, Rest, GetBasePath, ProcessErrors){
|
||||
|
||||
838
awx/ui/po/es.po
838
awx/ui/po/es.po
File diff suppressed because it is too large
Load Diff
838
awx/ui/po/fr.po
838
awx/ui/po/fr.po
File diff suppressed because it is too large
Load Diff
852
awx/ui/po/ja.po
852
awx/ui/po/ja.po
File diff suppressed because it is too large
Load Diff
857
awx/ui/po/nl.po
857
awx/ui/po/nl.po
File diff suppressed because it is too large
Load Diff
@ -14,7 +14,7 @@
|
||||
# W391 - Blank line at end of file
|
||||
# W293 - Blank line contains whitespace
|
||||
ignore=E201,E203,E221,E225,E231,E241,E251,E261,E265,E303,E501,W291,W391,W293
|
||||
exclude=.tox,venv,awx/lib/site-packages,awx/plugins/inventory/ec2.py,awx/plugins/inventory/gce.py,awx/plugins/inventory/vmware.py,awx/plugins/inventory/windows_azure.py,awx/plugins/inventory/openstack.py,awx/ui,awx/api/urls.py,awx/main/migrations,awx/main/south_migrations,awx/main/tests/data,installer/openshift/settings.py
|
||||
exclude=.tox,venv,awx/lib/site-packages,awx/plugins/inventory/ec2.py,awx/plugins/inventory/gce.py,awx/plugins/inventory/vmware.py,awx/plugins/inventory/openstack.py,awx/ui,awx/api/urls.py,awx/main/migrations,awx/main/south_migrations,awx/main/tests/data,installer/openshift/settings.py
|
||||
|
||||
[flake8]
|
||||
ignore=E201,E203,E221,E225,E231,E241,E251,E261,E265,E303,E501,W291,W391,W293,E731,F405
|
||||
|
||||
5
setup.py
5
setup.py
@ -52,6 +52,7 @@ else:
|
||||
|
||||
class sdist_isolated(sdist):
|
||||
includes = [
|
||||
'include VERSION',
|
||||
'include Makefile',
|
||||
'include awx/__init__.py',
|
||||
'include awx/main/expect/run.py',
|
||||
@ -60,6 +61,10 @@ class sdist_isolated(sdist):
|
||||
'recursive-include awx/lib *.py',
|
||||
]
|
||||
|
||||
def __init__(self, dist):
|
||||
sdist.__init__(self, dist)
|
||||
dist.metadata.version = get_version()
|
||||
|
||||
def get_file_list(self):
|
||||
self.filelist.process_template_line('include setup.py')
|
||||
for line in self.includes:
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user