mirror of
https://github.com/ansible/awx.git
synced 2026-01-11 10:00:01 -03:30
Merge pull request #10490 from shanemcd/downstream-fixes
Downstream fixes Reviewed-by: Rebeccah Hunter <rhunter@redhat.com> Reviewed-by: Alan Rominger <arominge@redhat.com> Reviewed-by: Tiago Góes <tiago.goes2009@gmail.com> Reviewed-by: Christian Adams <rooftopcellist@gmail.com>
This commit is contained in:
commit
2afa406b7f
@ -144,7 +144,6 @@ SUMMARIZABLE_FK_FIELDS = {
|
||||
'inventory_sources_with_failures',
|
||||
'organization_id',
|
||||
'kind',
|
||||
'insights_credential_id',
|
||||
),
|
||||
'host': DEFAULT_SUMMARY_FIELDS,
|
||||
'group': DEFAULT_SUMMARY_FIELDS,
|
||||
@ -171,7 +170,6 @@ SUMMARIZABLE_FK_FIELDS = {
|
||||
'role': ('id', 'role_field'),
|
||||
'notification_template': DEFAULT_SUMMARY_FIELDS,
|
||||
'instance_group': ('id', 'name', 'is_container_group'),
|
||||
'insights_credential': DEFAULT_SUMMARY_FIELDS,
|
||||
'source_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
'target_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
'webhook_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'),
|
||||
@ -768,6 +766,7 @@ class UnifiedJobSerializer(BaseSerializer):
|
||||
'result_traceback',
|
||||
'event_processing_finished',
|
||||
'launched_by',
|
||||
'work_unit_id',
|
||||
)
|
||||
|
||||
extra_kwargs = {
|
||||
@ -1410,11 +1409,11 @@ class ProjectOptionsSerializer(BaseSerializer):
|
||||
|
||||
class ExecutionEnvironmentSerializer(BaseSerializer):
|
||||
show_capabilities = ['edit', 'delete', 'copy']
|
||||
managed_by_tower = serializers.ReadOnlyField()
|
||||
managed = serializers.ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
model = ExecutionEnvironment
|
||||
fields = ('*', 'organization', 'image', 'managed_by_tower', 'credential', 'pull')
|
||||
fields = ('*', 'organization', 'image', 'managed', 'credential', 'pull')
|
||||
|
||||
def get_related(self, obj):
|
||||
res = super(ExecutionEnvironmentSerializer, self).get_related(obj)
|
||||
@ -1660,7 +1659,6 @@ class InventorySerializer(BaseSerializerWithVariables):
|
||||
'has_inventory_sources',
|
||||
'total_inventory_sources',
|
||||
'inventory_sources_with_failures',
|
||||
'insights_credential',
|
||||
'pending_deletion',
|
||||
)
|
||||
|
||||
@ -1685,8 +1683,6 @@ class InventorySerializer(BaseSerializerWithVariables):
|
||||
copy=self.reverse('api:inventory_copy', kwargs={'pk': obj.pk}),
|
||||
)
|
||||
)
|
||||
if obj.insights_credential:
|
||||
res['insights_credential'] = self.reverse('api:credential_detail', kwargs={'pk': obj.insights_credential.pk})
|
||||
if obj.organization:
|
||||
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
|
||||
return res
|
||||
@ -2485,14 +2481,14 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
||||
|
||||
class CredentialTypeSerializer(BaseSerializer):
|
||||
show_capabilities = ['edit', 'delete']
|
||||
managed_by_tower = serializers.ReadOnlyField()
|
||||
managed = serializers.ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
model = CredentialType
|
||||
fields = ('*', 'kind', 'namespace', 'name', 'managed_by_tower', 'inputs', 'injectors')
|
||||
fields = ('*', 'kind', 'namespace', 'name', 'managed', 'inputs', 'injectors')
|
||||
|
||||
def validate(self, attrs):
|
||||
if self.instance and self.instance.managed_by_tower:
|
||||
if self.instance and self.instance.managed:
|
||||
raise PermissionDenied(detail=_("Modifications not allowed for managed credential types"))
|
||||
|
||||
old_inputs = {}
|
||||
@ -2524,8 +2520,8 @@ class CredentialTypeSerializer(BaseSerializer):
|
||||
def to_representation(self, data):
|
||||
value = super(CredentialTypeSerializer, self).to_representation(data)
|
||||
|
||||
# translate labels and help_text for credential fields "managed by Tower"
|
||||
if value.get('managed_by_tower'):
|
||||
# translate labels and help_text for credential fields "managed"
|
||||
if value.get('managed'):
|
||||
value['name'] = _(value['name'])
|
||||
for field in value.get('inputs', {}).get('fields', []):
|
||||
field['label'] = _(field['label'])
|
||||
@ -2544,11 +2540,11 @@ class CredentialTypeSerializer(BaseSerializer):
|
||||
class CredentialSerializer(BaseSerializer):
|
||||
show_capabilities = ['edit', 'delete', 'copy', 'use']
|
||||
capabilities_prefetch = ['admin', 'use']
|
||||
managed_by_tower = serializers.ReadOnlyField()
|
||||
managed = serializers.ReadOnlyField()
|
||||
|
||||
class Meta:
|
||||
model = Credential
|
||||
fields = ('*', 'organization', 'credential_type', 'managed_by_tower', 'inputs', 'kind', 'cloud', 'kubernetes')
|
||||
fields = ('*', 'organization', 'credential_type', 'managed', 'inputs', 'kind', 'cloud', 'kubernetes')
|
||||
extra_kwargs = {'credential_type': {'label': _('Credential Type')}}
|
||||
|
||||
def to_representation(self, data):
|
||||
@ -2615,7 +2611,7 @@ class CredentialSerializer(BaseSerializer):
|
||||
return summary_dict
|
||||
|
||||
def validate(self, attrs):
|
||||
if self.instance and self.instance.managed_by_tower:
|
||||
if self.instance and self.instance.managed:
|
||||
raise PermissionDenied(detail=_("Modifications not allowed for managed credentials"))
|
||||
return super(CredentialSerializer, self).validate(attrs)
|
||||
|
||||
@ -2627,7 +2623,7 @@ class CredentialSerializer(BaseSerializer):
|
||||
return ret
|
||||
|
||||
def validate_organization(self, org):
|
||||
if self.instance and self.instance.credential_type.kind == 'galaxy' and org is None:
|
||||
if self.instance and (not self.instance.managed) and self.instance.credential_type.kind == 'galaxy' and org is None:
|
||||
raise serializers.ValidationError(_("Galaxy credentials must be owned by an Organization."))
|
||||
return org
|
||||
|
||||
@ -2635,7 +2631,6 @@ class CredentialSerializer(BaseSerializer):
|
||||
if self.instance and credential_type.pk != self.instance.credential_type.pk:
|
||||
for related_objects in (
|
||||
'ad_hoc_commands',
|
||||
'insights_inventories',
|
||||
'unifiedjobs',
|
||||
'unifiedjobtemplates',
|
||||
'projects',
|
||||
@ -4194,7 +4189,7 @@ class JobLaunchSerializer(BaseSerializer):
|
||||
elif field_name == 'credentials':
|
||||
for cred in obj.credentials.all():
|
||||
cred_dict = dict(id=cred.id, name=cred.name, credential_type=cred.credential_type.pk, passwords_needed=cred.passwords_needed)
|
||||
if cred.credential_type.managed_by_tower and 'vault_id' in cred.credential_type.defined_fields:
|
||||
if cred.credential_type.managed and 'vault_id' in cred.credential_type.defined_fields:
|
||||
cred_dict['vault_id'] = cred.get_input('vault_id', default=None)
|
||||
defaults_dict.setdefault(field_name, []).append(cred_dict)
|
||||
else:
|
||||
@ -4993,7 +4988,7 @@ class ActivityStreamSerializer(BaseSerializer):
|
||||
('notification', ('id', 'status', 'notification_type', 'notification_template_id')),
|
||||
('o_auth2_access_token', ('id', 'user_id', 'description', 'application_id', 'scope')),
|
||||
('o_auth2_application', ('id', 'name', 'description')),
|
||||
('credential_type', ('id', 'name', 'description', 'kind', 'managed_by_tower')),
|
||||
('credential_type', ('id', 'name', 'description', 'kind', 'managed')),
|
||||
('ad_hoc_command', ('id', 'name', 'status', 'limit')),
|
||||
('workflow_approval', ('id', 'name', 'unified_job_id')),
|
||||
]
|
||||
|
||||
@ -708,7 +708,7 @@ class ExecutionEnvironmentDetail(RetrieveUpdateDestroyAPIView):
|
||||
def update(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
fields_to_check = ['name', 'description', 'organization', 'image', 'credential']
|
||||
if instance.managed_by_tower and request.user.can_access(models.ExecutionEnvironment, 'change', instance):
|
||||
if instance.managed and request.user.can_access(models.ExecutionEnvironment, 'change', instance):
|
||||
for field in fields_to_check:
|
||||
left = getattr(instance, field, None)
|
||||
right = request.data.get(field, None)
|
||||
@ -1306,7 +1306,7 @@ class CredentialTypeDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
if instance.managed_by_tower:
|
||||
if instance.managed:
|
||||
raise PermissionDenied(detail=_("Deletion not allowed for managed credential types"))
|
||||
if instance.credentials.exists():
|
||||
raise PermissionDenied(detail=_("Credential types that are in use cannot be deleted"))
|
||||
@ -1421,7 +1421,7 @@ class CredentialDetail(RetrieveUpdateDestroyAPIView):
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
if instance.managed_by_tower:
|
||||
if instance.managed:
|
||||
raise PermissionDenied(detail=_("Deletion not allowed for managed credentials"))
|
||||
return super(CredentialDetail, self).destroy(request, *args, **kwargs)
|
||||
|
||||
|
||||
@ -867,13 +867,11 @@ class InventoryAccess(BaseAccess):
|
||||
# If no data is specified, just checking for generic add permission?
|
||||
if not data:
|
||||
return Organization.accessible_objects(self.user, 'inventory_admin_role').exists()
|
||||
return self.check_related('organization', Organization, data, role_field='inventory_admin_role') and self.check_related(
|
||||
'insights_credential', Credential, data, role_field='use_role'
|
||||
)
|
||||
return self.check_related('organization', Organization, data, role_field='inventory_admin_role')
|
||||
|
||||
@check_superuser
|
||||
def can_change(self, obj, data):
|
||||
return self.can_admin(obj, data) and self.check_related('insights_credential', Credential, data, obj=obj, role_field='use_role')
|
||||
return self.can_admin(obj, data)
|
||||
|
||||
@check_superuser
|
||||
def can_admin(self, obj, data):
|
||||
@ -1038,7 +1036,7 @@ class InventorySourceAccess(NotificationAttachMixin, BaseAccess):
|
||||
|
||||
def can_add(self, data):
|
||||
if not data or 'inventory' not in data:
|
||||
return Organization.accessible_objects(self.user, 'admin_role').exists()
|
||||
return Inventory.accessible_objects(self.user, 'admin_role').exists()
|
||||
|
||||
if not self.check_related('source_project', Project, data, role_field='use_role'):
|
||||
return False
|
||||
@ -1121,7 +1119,7 @@ class CredentialTypeAccess(BaseAccess):
|
||||
I can create when:
|
||||
- I'm a superuser:
|
||||
I can change when:
|
||||
- I'm a superuser and the type is not "managed by Tower"
|
||||
- I'm a superuser and the type is not "managed"
|
||||
"""
|
||||
|
||||
model = CredentialType
|
||||
@ -1207,7 +1205,7 @@ class CredentialAccess(BaseAccess):
|
||||
def get_user_capabilities(self, obj, **kwargs):
|
||||
user_capabilities = super(CredentialAccess, self).get_user_capabilities(obj, **kwargs)
|
||||
user_capabilities['use'] = self.can_use(obj)
|
||||
if getattr(obj, 'managed_by_tower', False) is True:
|
||||
if getattr(obj, 'managed', False) is True:
|
||||
user_capabilities['edit'] = user_capabilities['delete'] = False
|
||||
return user_capabilities
|
||||
|
||||
@ -1370,7 +1368,7 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
||||
return self.check_related('organization', Organization, data, obj=obj, mandatory=True, role_field='execution_environment_admin_role')
|
||||
|
||||
def can_delete(self, obj):
|
||||
if obj.managed_by_tower:
|
||||
if obj.managed:
|
||||
raise PermissionDenied
|
||||
return self.can_change(obj, None)
|
||||
|
||||
|
||||
@ -175,12 +175,12 @@ def org_counts(since, **kwargs):
|
||||
def cred_type_counts(since, **kwargs):
|
||||
counts = {}
|
||||
for cred_type in models.CredentialType.objects.annotate(num_credentials=Count('credentials', distinct=True)).values(
|
||||
'name', 'id', 'managed_by_tower', 'num_credentials'
|
||||
'name', 'id', 'managed', 'num_credentials'
|
||||
):
|
||||
counts[cred_type['id']] = {
|
||||
'name': cred_type['name'],
|
||||
'credential_count': cred_type['num_credentials'],
|
||||
'managed_by_tower': cred_type['managed_by_tower'],
|
||||
'managed': cred_type['managed'],
|
||||
}
|
||||
return counts
|
||||
|
||||
|
||||
@ -14,7 +14,7 @@ __all__ = [
|
||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||
]
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'tower', 'insights')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights')
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')),
|
||||
('su', _('Su')),
|
||||
|
||||
@ -642,7 +642,7 @@ class CredentialInputField(JSONSchemaField):
|
||||
|
||||
# `ssh_key_unlock` requirements are very specific and can't be
|
||||
# represented without complicated JSON schema
|
||||
if model_instance.credential_type.managed_by_tower is True and 'ssh_key_unlock' in defined_fields:
|
||||
if model_instance.credential_type.managed is True and 'ssh_key_unlock' in defined_fields:
|
||||
|
||||
# in order to properly test the necessity of `ssh_key_unlock`, we
|
||||
# need to know the real value of `ssh_key_data`; for a payload like:
|
||||
@ -711,7 +711,7 @@ class CredentialTypeInputField(JSONSchemaField):
|
||||
}
|
||||
|
||||
def validate(self, value, model_instance):
|
||||
if isinstance(value, dict) and 'dependencies' in value and not model_instance.managed_by_tower:
|
||||
if isinstance(value, dict) and 'dependencies' in value and not model_instance.managed:
|
||||
raise django_exceptions.ValidationError(
|
||||
_("'dependencies' is not supported for custom credentials."),
|
||||
code='invalid',
|
||||
|
||||
@ -44,7 +44,7 @@ class Command(BaseCommand):
|
||||
|
||||
public_galaxy_credential = Credential(
|
||||
name='Ansible Galaxy',
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
credential_type=CredentialType.objects.get(kind='galaxy'),
|
||||
inputs={'url': 'https://galaxy.ansible.com/'},
|
||||
)
|
||||
|
||||
@ -76,7 +76,7 @@ class Command(BaseCommand):
|
||||
}
|
||||
registry_cred, cred_created = Credential.objects.get_or_create(
|
||||
name="Default Execution Environment Registry Credential",
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
credential_type=registry_cred_type[0],
|
||||
defaults={'inputs': inputs},
|
||||
)
|
||||
@ -114,7 +114,7 @@ class Command(BaseCommand):
|
||||
# Create the control plane execution environment that is used for project updates and system jobs
|
||||
ee = settings.CONTROL_PLANE_EXECUTION_ENVIRONMENT
|
||||
_this_ee, cp_created = ExecutionEnvironment.objects.get_or_create(
|
||||
name="Control Plane Execution Environment", defaults={'image': ee, 'managed_by_tower': True, 'credential': registry_cred}
|
||||
name="Control Plane Execution Environment", defaults={'image': ee, 'managed': True, 'credential': registry_cred}
|
||||
)
|
||||
if cp_created:
|
||||
changed = True
|
||||
|
||||
@ -33,7 +33,7 @@ class HostManager(models.Manager):
|
||||
- Only consider results that are unique
|
||||
- Return the count of this query
|
||||
"""
|
||||
return self.order_by().exclude(inventory_sources__source='tower').values('name').distinct().count()
|
||||
return self.order_by().exclude(inventory_sources__source='controller').values('name').distinct().count()
|
||||
|
||||
def org_active_count(self, org_id):
|
||||
"""Return count of active, unique hosts used by an organization.
|
||||
@ -45,7 +45,7 @@ class HostManager(models.Manager):
|
||||
- Only consider results that are unique
|
||||
- Return the count of this query
|
||||
"""
|
||||
return self.order_by().exclude(inventory_sources__source='tower').filter(inventory__organization=org_id).values('name').distinct().count()
|
||||
return self.order_by().exclude(inventory_sources__source='controller').filter(inventory__organization=org_id).values('name').distinct().count()
|
||||
|
||||
def get_queryset(self):
|
||||
"""When the parent instance of the host query set has a `kind=smart` and a `host_filter`
|
||||
|
||||
@ -10,7 +10,7 @@ from awx.main.utils.common import set_current_apps
|
||||
|
||||
def migrate_to_static_inputs(apps, schema_editor):
|
||||
set_current_apps(apps)
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
CredentialType.setup_tower_managed_defaults(apps)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@ -14,7 +14,7 @@ from awx.main.utils.common import set_current_apps
|
||||
|
||||
def setup_tower_managed_defaults(apps, schema_editor):
|
||||
set_current_apps(apps)
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
CredentialType.setup_tower_managed_defaults(apps)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@ -8,7 +8,7 @@ from awx.main.utils.common import set_current_apps
|
||||
|
||||
def setup_tower_managed_defaults(apps, schema_editor):
|
||||
set_current_apps(apps)
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
CredentialType.setup_tower_managed_defaults(apps)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@ -9,7 +9,7 @@ from awx.main.utils.common import set_current_apps
|
||||
|
||||
def create_new_credential_types(apps, schema_editor):
|
||||
set_current_apps(apps)
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
CredentialType.setup_tower_managed_defaults(apps)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@ -5,7 +5,7 @@ from awx.main.models import CredentialType
|
||||
|
||||
|
||||
def update_cyberark_aim_name(apps, schema_editor):
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
CredentialType.setup_tower_managed_defaults(apps)
|
||||
aim_types = apps.get_model('main', 'CredentialType').objects.filter(namespace='aim').order_by('id')
|
||||
|
||||
if aim_types.count() == 2:
|
||||
|
||||
@ -6,7 +6,7 @@ from awx.main.utils.common import set_current_apps
|
||||
|
||||
def setup_tower_managed_defaults(apps, schema_editor):
|
||||
set_current_apps(apps)
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
CredentialType.setup_tower_managed_defaults(apps)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@ -1,19 +1,25 @@
|
||||
# Generated by Django 2.2.16 on 2021-04-21 15:02
|
||||
|
||||
from django.db import migrations, models
|
||||
from django.db import migrations, models, transaction
|
||||
|
||||
|
||||
def remove_iso_instances(apps, schema_editor):
|
||||
Instance = apps.get_model('main', 'Instance')
|
||||
Instance.objects.filter(rampart_groups__controller__isnull=False).delete()
|
||||
with transaction.atomic():
|
||||
Instance.objects.filter(rampart_groups__controller__isnull=False).delete()
|
||||
|
||||
|
||||
def remove_iso_groups(apps, schema_editor):
|
||||
InstanceGroup = apps.get_model('main', 'InstanceGroup')
|
||||
InstanceGroup.objects.filter(controller__isnull=False).delete()
|
||||
UnifiedJob = apps.get_model('main', 'UnifiedJob')
|
||||
with transaction.atomic():
|
||||
for ig in InstanceGroup.objects.filter(controller__isnull=False):
|
||||
UnifiedJob.objects.filter(instance_group=ig).update(instance_group=None)
|
||||
ig.delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
('main', '0138_custom_inventory_scripts_removal'),
|
||||
|
||||
20
awx/main/migrations/0148_unifiedjob_receptor_unit_id.py
Normal file
20
awx/main/migrations/0148_unifiedjob_receptor_unit_id.py
Normal file
@ -0,0 +1,20 @@
|
||||
# Generated by Django 2.2.16 on 2021-06-11 04:50
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0147_validate_ee_image_field'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='unifiedjob',
|
||||
name='work_unit_id',
|
||||
field=models.CharField(
|
||||
blank=True, default=None, editable=False, help_text='The Receptor work unit ID associated with this job.', max_length=255, null=True
|
||||
),
|
||||
),
|
||||
]
|
||||
@ -0,0 +1,17 @@
|
||||
# Generated by Django 2.2.16 on 2021-06-16 21:00
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0148_unifiedjob_receptor_unit_id'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='inventory',
|
||||
name='insights_credential',
|
||||
),
|
||||
]
|
||||
113
awx/main/migrations/0150_rename_inv_sources_inv_updates.py
Normal file
113
awx/main/migrations/0150_rename_inv_sources_inv_updates.py
Normal file
@ -0,0 +1,113 @@
|
||||
# Generated by Django 2.2.16 on 2021-06-17 13:12
|
||||
import logging
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
from awx.main.models.credential import ManagedCredentialType, CredentialType as ModernCredentialType
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def forwards(apps, schema_editor):
|
||||
InventoryUpdate = apps.get_model('main', 'InventoryUpdate')
|
||||
InventorySource = apps.get_model('main', 'InventorySource')
|
||||
|
||||
r = InventoryUpdate.objects.filter(source='tower').update(source='controller')
|
||||
if r:
|
||||
logger.warn(f'Renamed {r} tower inventory updates to controller')
|
||||
InventorySource.objects.filter(source='tower').update(source='controller')
|
||||
if r:
|
||||
logger.warn(f'Renamed {r} tower inventory sources to controller')
|
||||
|
||||
CredentialType = apps.get_model('main', 'CredentialType')
|
||||
|
||||
tower_type = CredentialType.objects.filter(managed_by_tower=True, namespace='tower').first()
|
||||
if tower_type is not None:
|
||||
controller_type = CredentialType.objects.filter(managed_by_tower=True, namespace='controller', kind='cloud').first()
|
||||
if controller_type:
|
||||
# this gets created by prior migrations in upgrade scenarios
|
||||
controller_type.delete()
|
||||
|
||||
registry_type = ManagedCredentialType.registry.get('controller')
|
||||
if not registry_type:
|
||||
raise RuntimeError('Excpected to find controller credential, this may need to be edited in the future!')
|
||||
logger.warn('Renaming the Ansible Tower credential type for existing install')
|
||||
tower_type.name = registry_type.name # sensitive to translations
|
||||
tower_type.namespace = 'controller' # if not done, will error setup_tower_managed_defaults
|
||||
tower_type.save(update_fields=['name', 'namespace'])
|
||||
|
||||
ModernCredentialType.setup_tower_managed_defaults(apps)
|
||||
|
||||
|
||||
def backwards(apps, schema_editor):
|
||||
InventoryUpdate = apps.get_model('main', 'InventoryUpdate')
|
||||
InventorySource = apps.get_model('main', 'InventorySource')
|
||||
|
||||
r = InventoryUpdate.objects.filter(source='controller').update(source='tower')
|
||||
if r:
|
||||
logger.warn(f'Renamed {r} controller inventory updates to tower')
|
||||
r = InventorySource.objects.filter(source='controller').update(source='tower')
|
||||
if r:
|
||||
logger.warn(f'Renamed {r} controller inventory sources to tower')
|
||||
|
||||
CredentialType = apps.get_model('main', 'CredentialType')
|
||||
|
||||
tower_type = CredentialType.objects.filter(managed_by_tower=True, namespace='controller', kind='cloud').first()
|
||||
if tower_type is not None and not CredentialType.objects.filter(managed_by_tower=True, namespace='tower').exists():
|
||||
logger.info('Renaming the controller credential type back')
|
||||
tower_type.namespace = 'tower'
|
||||
tower_type.name = 'Ansible Tower'
|
||||
tower_type.save(update_fields=['namespace', 'name'])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('main', '0149_remove_inventory_insights_credential'),
|
||||
]
|
||||
operations = [
|
||||
migrations.RunPython(migrations.RunPython.noop, backwards),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.RunPython(forwards, migrations.RunPython.noop),
|
||||
]
|
||||
28
awx/main/migrations/0151_rename_managed_by_tower.py
Normal file
28
awx/main/migrations/0151_rename_managed_by_tower.py
Normal file
@ -0,0 +1,28 @@
|
||||
# Generated by Django 2.2.16 on 2021-06-17 18:32
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0150_rename_inv_sources_inv_updates'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='credential',
|
||||
old_name='managed_by_tower',
|
||||
new_name='managed',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='credentialtype',
|
||||
old_name='managed_by_tower',
|
||||
new_name='managed',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='executionenvironment',
|
||||
old_name='managed_by_tower',
|
||||
new_name='managed',
|
||||
),
|
||||
]
|
||||
@ -19,7 +19,7 @@ def migrate_galaxy_settings(apps, schema_editor):
|
||||
# nothing to migrate
|
||||
return
|
||||
set_current_apps(apps)
|
||||
ModernCredentialType.setup_tower_managed_defaults()
|
||||
ModernCredentialType.setup_tower_managed_defaults(apps)
|
||||
CredentialType = apps.get_model('main', 'CredentialType')
|
||||
Credential = apps.get_model('main', 'Credential')
|
||||
Setting = apps.get_model('conf', 'Setting')
|
||||
@ -34,10 +34,21 @@ def migrate_galaxy_settings(apps, schema_editor):
|
||||
if public_galaxy_setting and public_galaxy_setting.value is False:
|
||||
# ...UNLESS this behavior was explicitly disabled via this setting
|
||||
public_galaxy_enabled = False
|
||||
|
||||
public_galaxy_credential = Credential(
|
||||
created=now(), modified=now(), name='Ansible Galaxy', managed_by_tower=True, credential_type=galaxy_type, inputs={'url': 'https://galaxy.ansible.com/'}
|
||||
)
|
||||
try:
|
||||
# Needed for old migrations
|
||||
public_galaxy_credential = Credential(
|
||||
created=now(),
|
||||
modified=now(),
|
||||
name='Ansible Galaxy',
|
||||
managed_by_tower=True,
|
||||
credential_type=galaxy_type,
|
||||
inputs={'url': 'https://galaxy.ansible.com/'},
|
||||
)
|
||||
except:
|
||||
# Needed for new migrations, tests
|
||||
public_galaxy_credential = Credential(
|
||||
created=now(), modified=now(), name='Ansible Galaxy', managed=True, credential_type=galaxy_type, inputs={'url': 'https://galaxy.ansible.com/'}
|
||||
)
|
||||
public_galaxy_credential.save()
|
||||
|
||||
for org in Organization.objects.all():
|
||||
|
||||
@ -19,6 +19,7 @@ from django.utils.translation import ugettext_lazy as _, ugettext_noop
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.encoding import force_text
|
||||
from django.utils.functional import cached_property
|
||||
from django.utils.timezone import now
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
@ -92,7 +93,7 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
on_delete=models.CASCADE,
|
||||
help_text=_('Specify the type of credential you want to create. Refer ' 'to the documentation for details on each type.'),
|
||||
)
|
||||
managed_by_tower = models.BooleanField(default=False, editable=False)
|
||||
managed = models.BooleanField(default=False, editable=False)
|
||||
organization = models.ForeignKey(
|
||||
'Organization',
|
||||
null=True,
|
||||
@ -341,7 +342,7 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
)
|
||||
|
||||
kind = models.CharField(max_length=32, choices=KIND_CHOICES)
|
||||
managed_by_tower = models.BooleanField(default=False, editable=False)
|
||||
managed = models.BooleanField(default=False, editable=False)
|
||||
namespace = models.CharField(max_length=1024, null=True, default=None, editable=False)
|
||||
inputs = CredentialTypeInputField(
|
||||
blank=True, default=dict, help_text=_('Enter inputs using either JSON or YAML syntax. ' 'Refer to the documentation for example syntax.')
|
||||
@ -355,7 +356,7 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
@classmethod
|
||||
def from_db(cls, db, field_names, values):
|
||||
instance = super(CredentialType, cls).from_db(db, field_names, values)
|
||||
if instance.managed_by_tower and instance.namespace:
|
||||
if instance.managed and instance.namespace:
|
||||
native = ManagedCredentialType.registry[instance.namespace]
|
||||
instance.inputs = native.inputs
|
||||
instance.injectors = native.injectors
|
||||
@ -395,9 +396,13 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
return dict((k, functools.partial(v.create)) for k, v in ManagedCredentialType.registry.items())
|
||||
|
||||
@classmethod
|
||||
def setup_tower_managed_defaults(cls):
|
||||
def setup_tower_managed_defaults(cls, apps=None):
|
||||
if apps is not None:
|
||||
ct_class = apps.get_model('main', 'CredentialType')
|
||||
else:
|
||||
ct_class = CredentialType
|
||||
for default in ManagedCredentialType.registry.values():
|
||||
existing = CredentialType.objects.filter(name=default.name, kind=default.kind).first()
|
||||
existing = ct_class.objects.filter(name=default.name, kind=default.kind).first()
|
||||
if existing is not None:
|
||||
existing.namespace = default.namespace
|
||||
existing.inputs = {}
|
||||
@ -405,7 +410,11 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
existing.save()
|
||||
continue
|
||||
logger.debug(_("adding %s credential type" % default.name))
|
||||
created = default.create()
|
||||
params = default.get_creation_params()
|
||||
if 'managed' not in [f.name for f in ct_class._meta.get_fields()]:
|
||||
params['managed_by_tower'] = params.pop('managed')
|
||||
params['created'] = params['modified'] = now() # CreatedModifiedModel service
|
||||
created = ct_class(**params)
|
||||
created.inputs = created.injectors = {}
|
||||
created.save()
|
||||
|
||||
@ -439,7 +448,7 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
files)
|
||||
"""
|
||||
if not self.injectors:
|
||||
if self.managed_by_tower and credential.credential_type.namespace in dir(builtin_injectors):
|
||||
if self.managed and credential.credential_type.namespace in dir(builtin_injectors):
|
||||
injected_env = {}
|
||||
getattr(builtin_injectors, credential.credential_type.namespace)(credential, injected_env, private_data_dir)
|
||||
env.update(injected_env)
|
||||
@ -556,16 +565,19 @@ class ManagedCredentialType(SimpleNamespace):
|
||||
)
|
||||
ManagedCredentialType.registry[namespace] = self
|
||||
|
||||
def create(self):
|
||||
return CredentialType(
|
||||
def get_creation_params(self):
|
||||
return dict(
|
||||
namespace=self.namespace,
|
||||
kind=self.kind,
|
||||
name=self.name,
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs=self.inputs,
|
||||
injectors=self.injectors,
|
||||
)
|
||||
|
||||
def create(self):
|
||||
return CredentialType(**self.get_creation_params())
|
||||
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='ssh',
|
||||
@ -606,7 +618,7 @@ ManagedCredentialType(
|
||||
namespace='scm',
|
||||
kind='scm',
|
||||
name=ugettext_noop('Source Control'),
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'},
|
||||
@ -621,7 +633,7 @@ ManagedCredentialType(
|
||||
namespace='vault',
|
||||
kind='vault',
|
||||
name=ugettext_noop('Vault'),
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'vault_password', 'label': ugettext_noop('Vault Password'), 'type': 'string', 'secret': True, 'ask_at_runtime': True},
|
||||
@ -647,7 +659,7 @@ ManagedCredentialType(
|
||||
namespace='net',
|
||||
kind='net',
|
||||
name=ugettext_noop('Network'),
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'},
|
||||
@ -687,7 +699,7 @@ ManagedCredentialType(
|
||||
namespace='aws',
|
||||
kind='cloud',
|
||||
name=ugettext_noop('Amazon Web Services'),
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'username', 'label': ugettext_noop('Access Key'), 'type': 'string'},
|
||||
@ -718,7 +730,7 @@ ManagedCredentialType(
|
||||
namespace='openstack',
|
||||
kind='cloud',
|
||||
name=ugettext_noop('OpenStack'),
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'},
|
||||
@ -776,7 +788,7 @@ ManagedCredentialType(
|
||||
namespace='vmware',
|
||||
kind='cloud',
|
||||
name=ugettext_noop('VMware vCenter'),
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
@ -801,7 +813,7 @@ ManagedCredentialType(
|
||||
namespace='satellite6',
|
||||
kind='cloud',
|
||||
name=ugettext_noop('Red Hat Satellite 6'),
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
@ -826,7 +838,7 @@ ManagedCredentialType(
|
||||
namespace='gce',
|
||||
kind='cloud',
|
||||
name=ugettext_noop('Google Compute Engine'),
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
@ -864,7 +876,7 @@ ManagedCredentialType(
|
||||
namespace='azure_rm',
|
||||
kind='cloud',
|
||||
name=ugettext_noop('Microsoft Azure Resource Manager'),
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
@ -903,7 +915,7 @@ ManagedCredentialType(
|
||||
namespace='github_token',
|
||||
kind='token',
|
||||
name=ugettext_noop('GitHub Personal Access Token'),
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
@ -922,7 +934,7 @@ ManagedCredentialType(
|
||||
namespace='gitlab_token',
|
||||
kind='token',
|
||||
name=ugettext_noop('GitLab Personal Access Token'),
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
@ -941,7 +953,7 @@ ManagedCredentialType(
|
||||
namespace='insights',
|
||||
kind='insights',
|
||||
name=ugettext_noop('Insights'),
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'username', 'label': ugettext_noop('Username'), 'type': 'string'},
|
||||
@ -965,7 +977,7 @@ ManagedCredentialType(
|
||||
namespace='rhv',
|
||||
kind='cloud',
|
||||
name=ugettext_noop('Red Hat Virtualization'),
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'host', 'label': ugettext_noop('Host (Authentication URL)'), 'type': 'string', 'help_text': ugettext_noop('The host to authenticate with.')},
|
||||
@ -1006,23 +1018,25 @@ ManagedCredentialType(
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='tower',
|
||||
namespace='controller',
|
||||
kind='cloud',
|
||||
name=ugettext_noop('Ansible Tower'),
|
||||
managed_by_tower=True,
|
||||
name=ugettext_noop('Red Hat Ansible Automation Platform'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
'id': 'host',
|
||||
'label': ugettext_noop('Ansible Tower Hostname'),
|
||||
'label': ugettext_noop('Red Hat Ansible Automation Platform'),
|
||||
'type': 'string',
|
||||
'help_text': ugettext_noop('The Ansible Tower base URL to authenticate with.'),
|
||||
'help_text': ugettext_noop('Red Hat Ansible Automation Platform base URL to authenticate with.'),
|
||||
},
|
||||
{
|
||||
'id': 'username',
|
||||
'label': ugettext_noop('Username'),
|
||||
'type': 'string',
|
||||
'help_text': ugettext_noop('The Ansible Tower user to authenticate as.' 'This should not be set if an OAuth token is being used.'),
|
||||
'help_text': ugettext_noop(
|
||||
'Red Hat Ansible Automation Platform username id to authenticate as.' 'This should not be set if an OAuth token is being used.'
|
||||
),
|
||||
},
|
||||
{
|
||||
'id': 'password',
|
||||
@ -1048,6 +1062,11 @@ ManagedCredentialType(
|
||||
'TOWER_PASSWORD': '{{password}}',
|
||||
'TOWER_VERIFY_SSL': '{{verify_ssl}}',
|
||||
'TOWER_OAUTH_TOKEN': '{{oauth_token}}',
|
||||
'CONTROLLER_HOST': '{{host}}',
|
||||
'CONTROLLER_USERNAME': '{{username}}',
|
||||
'CONTROLLER_PASSWORD': '{{password}}',
|
||||
'CONTROLLER_VERIFY_SSL': '{{verify_ssl}}',
|
||||
'CONTROLLER_OAUTH_TOKEN': '{{oauth_token}}',
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@ -34,7 +34,7 @@ class ExecutionEnvironment(CommonModel):
|
||||
help_text=_("The full image location, including the container registry, image name, and version tag."),
|
||||
validators=[validate_container_image_name],
|
||||
)
|
||||
managed_by_tower = models.BooleanField(default=False, editable=False)
|
||||
managed = models.BooleanField(default=False, editable=False)
|
||||
credential = models.ForeignKey(
|
||||
'Credential',
|
||||
related_name='%(class)ss',
|
||||
|
||||
@ -165,15 +165,6 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
'admin_role',
|
||||
]
|
||||
)
|
||||
insights_credential = models.ForeignKey(
|
||||
'Credential',
|
||||
related_name='insights_inventories',
|
||||
help_text=_('Credentials to be used by hosts belonging to this inventory when accessing Red Hat Insights API.'),
|
||||
on_delete=models.SET_NULL,
|
||||
blank=True,
|
||||
null=True,
|
||||
default=None,
|
||||
)
|
||||
pending_deletion = models.BooleanField(
|
||||
default=False,
|
||||
editable=False,
|
||||
@ -315,7 +306,12 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
for host in hosts:
|
||||
data['_meta']['hostvars'][host.name] = host.variables_dict
|
||||
if towervars:
|
||||
tower_dict = dict(remote_tower_enabled=str(host.enabled).lower(), remote_tower_id=host.id)
|
||||
tower_dict = dict(
|
||||
remote_tower_enabled=str(host.enabled).lower(),
|
||||
remote_tower_id=host.id,
|
||||
remote_host_enabled=str(host.enabled).lower(),
|
||||
remote_host_id=host.id,
|
||||
)
|
||||
data['_meta']['hostvars'][host.name].update(tower_dict)
|
||||
|
||||
return data
|
||||
@ -368,13 +364,6 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
group_pks = self.groups.values_list('pk', flat=True)
|
||||
return self.groups.exclude(parents__pk__in=group_pks).distinct()
|
||||
|
||||
def clean_insights_credential(self):
|
||||
if self.kind == 'smart' and self.insights_credential:
|
||||
raise ValidationError(_("Assignment not allowed for Smart Inventory"))
|
||||
if self.insights_credential and self.insights_credential.credential_type.kind != 'insights':
|
||||
raise ValidationError(_("Credential kind must be 'insights'."))
|
||||
return self.insights_credential
|
||||
|
||||
@transaction.atomic
|
||||
def schedule_deletion(self, user_id=None):
|
||||
from awx.main.tasks import delete_inventory
|
||||
@ -821,7 +810,7 @@ class InventorySourceOptions(BaseModel):
|
||||
('satellite6', _('Red Hat Satellite 6')),
|
||||
('openstack', _('OpenStack')),
|
||||
('rhv', _('Red Hat Virtualization')),
|
||||
('tower', _('Ansible Tower')),
|
||||
('controller', _('Red Hat Ansible Automation Platform')),
|
||||
('insights', _('Red Hat Insights')),
|
||||
]
|
||||
|
||||
@ -1384,7 +1373,7 @@ class PluginFileInjector(object):
|
||||
return env
|
||||
|
||||
def _get_shared_env(self, inventory_update, private_data_dir, private_data_files):
|
||||
"""By default, we will apply the standard managed_by_tower injectors"""
|
||||
"""By default, we will apply the standard managed injectors"""
|
||||
injected_env = {}
|
||||
credential = inventory_update.get_cloud_credential()
|
||||
# some sources may have no credential, specifically ec2
|
||||
@ -1403,7 +1392,7 @@ class PluginFileInjector(object):
|
||||
args = []
|
||||
credential.credential_type.inject_credential(credential, injected_env, safe_env, args, private_data_dir)
|
||||
# NOTE: safe_env is handled externally to injector class by build_safe_env static method
|
||||
# that means that managed_by_tower injectors must only inject detectable env keys
|
||||
# that means that managed injectors must only inject detectable env keys
|
||||
# enforcement of this is accomplished by tests
|
||||
return injected_env
|
||||
|
||||
@ -1545,8 +1534,8 @@ class satellite6(PluginFileInjector):
|
||||
return ret
|
||||
|
||||
|
||||
class tower(PluginFileInjector):
|
||||
plugin_name = 'tower'
|
||||
class controller(PluginFileInjector):
|
||||
plugin_name = 'tower' # TODO: relying on routing for now, update after EEs pick up revised collection
|
||||
base_injector = 'template'
|
||||
namespace = 'awx'
|
||||
collection = 'awx'
|
||||
@ -1561,13 +1550,7 @@ class insights(PluginFileInjector):
|
||||
collection = 'insights'
|
||||
downstream_namespace = 'redhat'
|
||||
downstream_collection = 'insights'
|
||||
use_fqcn = 'true'
|
||||
|
||||
def inventory_as_dict(self, inventory_update, private_data_dir):
|
||||
ret = super(insights, self).inventory_as_dict(inventory_update, private_data_dir)
|
||||
# this inventory plugin requires the fully qualified inventory plugin name
|
||||
ret['plugin'] = f'{self.namespace}.{self.collection}.{self.plugin_name}'
|
||||
return ret
|
||||
use_fqcn = True
|
||||
|
||||
|
||||
for cls in PluginFileInjector.__subclasses__():
|
||||
|
||||
@ -117,7 +117,7 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi
|
||||
def create_default_galaxy_credential(self):
|
||||
from awx.main.models import Credential
|
||||
|
||||
public_galaxy_credential = Credential.objects.filter(managed_by_tower=True, name='Ansible Galaxy').first()
|
||||
public_galaxy_credential = Credential.objects.filter(managed=True, name='Ansible Galaxy').first()
|
||||
if public_galaxy_credential not in self.galaxy_credentials.all():
|
||||
self.galaxy_credentials.add(public_galaxy_credential)
|
||||
|
||||
|
||||
@ -717,6 +717,9 @@ class UnifiedJob(
|
||||
editable=False,
|
||||
help_text=_("The version of Ansible Core installed in the execution environment."),
|
||||
)
|
||||
work_unit_id = models.CharField(
|
||||
max_length=255, blank=True, default=None, editable=False, null=True, help_text=_("The Receptor work unit ID associated with this job.")
|
||||
)
|
||||
|
||||
def get_absolute_url(self, request=None):
|
||||
RealClass = self.get_real_instance_class()
|
||||
|
||||
@ -472,6 +472,33 @@ def cluster_node_heartbeat():
|
||||
logger.exception('Error marking {} as lost'.format(other_inst.hostname))
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
def awx_receptor_workunit_reaper():
|
||||
"""
|
||||
When an AWX job is launched via receptor, files such as status, stdin, and stdout are created
|
||||
in a specific receptor directory. This directory on disk is a random 8 character string, e.g. qLL2JFNT
|
||||
This is also called the work Unit ID in receptor, and is used in various receptor commands,
|
||||
e.g. "work results qLL2JFNT"
|
||||
After an AWX job executes, the receptor work unit directory is cleaned up by
|
||||
issuing the work release command. In some cases the release process might fail, or
|
||||
if AWX crashes during a job's execution, the work release command is never issued to begin with.
|
||||
As such, this periodic task will obtain a list of all receptor work units, and find which ones
|
||||
belong to AWX jobs that are in a completed state (status is canceled, error, or succeeded).
|
||||
This task will call "work release" on each of these work units to clean up the files on disk.
|
||||
"""
|
||||
if not settings.RECEPTOR_RELEASE_WORK:
|
||||
return
|
||||
logger.debug("Checking for unreleased receptor work units")
|
||||
receptor_ctl = get_receptor_ctl()
|
||||
receptor_work_list = receptor_ctl.simple_command("work list")
|
||||
|
||||
unit_ids = [id for id in receptor_work_list]
|
||||
jobs_with_unreleased_receptor_units = UnifiedJob.objects.filter(work_unit_id__in=unit_ids).exclude(status__in=ACTIVE_STATES)
|
||||
for job in jobs_with_unreleased_receptor_units:
|
||||
logger.debug(f"{job.log_format} is not active, reaping receptor work unit {job.work_unit_id}")
|
||||
receptor_ctl.simple_command(f"work release {job.work_unit_id}")
|
||||
|
||||
|
||||
@task(queue=get_local_queuename)
|
||||
def awx_k8s_reaper():
|
||||
if not settings.RECEPTOR_RELEASE_WORK:
|
||||
@ -729,6 +756,10 @@ def with_path_cleanup(f):
|
||||
return _wrapped
|
||||
|
||||
|
||||
def get_receptor_ctl():
|
||||
return ReceptorControl('/var/run/receptor/receptor.sock')
|
||||
|
||||
|
||||
class BaseTask(object):
|
||||
model = None
|
||||
event_model = None
|
||||
@ -1370,8 +1401,8 @@ class BaseTask(object):
|
||||
)
|
||||
else:
|
||||
receptor_job = AWXReceptorJob(self, params)
|
||||
self.unit_id = receptor_job.unit_id
|
||||
res = receptor_job.run()
|
||||
self.unit_id = receptor_job.unit_id
|
||||
|
||||
if not res:
|
||||
return
|
||||
@ -2890,7 +2921,7 @@ class AWXReceptorJob:
|
||||
|
||||
def run(self):
|
||||
# We establish a connection to the Receptor socket
|
||||
receptor_ctl = ReceptorControl('/var/run/receptor/receptor.sock')
|
||||
receptor_ctl = get_receptor_ctl()
|
||||
|
||||
try:
|
||||
return self._run_internal(receptor_ctl)
|
||||
@ -2912,6 +2943,7 @@ class AWXReceptorJob:
|
||||
# in the right side of our socketpair for reading.
|
||||
result = receptor_ctl.submit_work(worktype=self.work_type, payload=sockout.makefile('rb'), params=self.receptor_params)
|
||||
self.unit_id = result['unitid']
|
||||
self.task.update_model(self.task.instance.pk, work_unit_id=result['unitid'])
|
||||
|
||||
sockin.close()
|
||||
sockout.close()
|
||||
@ -3026,10 +3058,6 @@ class AWXReceptorJob:
|
||||
result = namedtuple('result', ['status', 'rc'])
|
||||
return result('canceled', 1)
|
||||
|
||||
if hasattr(self, 'unit_id') and 'RECEPTOR_UNIT_ID' not in self.task.instance.job_env:
|
||||
self.task.instance.job_env['RECEPTOR_UNIT_ID'] = self.unit_id
|
||||
self.task.update_model(self.task.instance.pk, job_env=self.task.instance.job_env)
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
@property
|
||||
|
||||
13
awx/main/tests/data/inventory/plugins/controller/env.json
Normal file
13
awx/main/tests/data/inventory/plugins/controller/env.json
Normal file
@ -0,0 +1,13 @@
|
||||
{
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"TOWER_HOST": "https://foo.invalid",
|
||||
"TOWER_PASSWORD": "fooo",
|
||||
"TOWER_USERNAME": "fooo",
|
||||
"TOWER_OAUTH_TOKEN": "",
|
||||
"TOWER_VERIFY_SSL": "False",
|
||||
"CONTROLLER_HOST": "https://foo.invalid",
|
||||
"CONTROLLER_PASSWORD": "fooo",
|
||||
"CONTROLLER_USERNAME": "fooo",
|
||||
"CONTROLLER_OAUTH_TOKEN": "",
|
||||
"CONTROLLER_VERIFY_SSL": "False"
|
||||
}
|
||||
@ -1,8 +0,0 @@
|
||||
{
|
||||
"ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS": "never",
|
||||
"TOWER_HOST": "https://foo.invalid",
|
||||
"TOWER_PASSWORD": "fooo",
|
||||
"TOWER_USERNAME": "fooo",
|
||||
"TOWER_OAUTH_TOKEN": "",
|
||||
"TOWER_VERIFY_SSL": "False"
|
||||
}
|
||||
@ -5,7 +5,7 @@ import pytest
|
||||
|
||||
from django.utils.encoding import smart_str
|
||||
|
||||
from awx.main.models import AdHocCommand, Credential, CredentialType, Job, JobTemplate, Inventory, InventorySource, Project, WorkflowJobNode
|
||||
from awx.main.models import AdHocCommand, Credential, CredentialType, Job, JobTemplate, InventorySource, Project, WorkflowJobNode
|
||||
from awx.main.utils import decrypt_field
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
@ -857,7 +857,6 @@ def test_field_removal(put, organization, admin, credentialtype_ssh):
|
||||
'relation, related_obj',
|
||||
[
|
||||
['ad_hoc_commands', AdHocCommand()],
|
||||
['insights_inventories', Inventory()],
|
||||
['unifiedjobs', Job()],
|
||||
['unifiedjobtemplates', JobTemplate()],
|
||||
['unifiedjobtemplates', InventorySource(source='ec2')],
|
||||
|
||||
@ -75,7 +75,7 @@ def test_update_as_unauthorized_xfail(patch, delete):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_update_managed_by_tower_xfail(patch, delete, admin):
|
||||
def test_update_managed_xfail(patch, delete, admin):
|
||||
ssh = CredentialType.defaults['ssh']()
|
||||
ssh.save()
|
||||
url = reverse('api:credential_type_detail', kwargs={'pk': ssh.pk})
|
||||
@ -161,19 +161,19 @@ def test_create_as_admin(get, post, admin):
|
||||
assert response.data['results'][0]['name'] == 'Custom Credential Type'
|
||||
assert response.data['results'][0]['inputs'] == {}
|
||||
assert response.data['results'][0]['injectors'] == {}
|
||||
assert response.data['results'][0]['managed_by_tower'] is False
|
||||
assert response.data['results'][0]['managed'] is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_managed_by_tower_readonly(get, post, admin):
|
||||
def test_create_managed_readonly(get, post, admin):
|
||||
response = post(
|
||||
reverse('api:credential_type_list'), {'kind': 'cloud', 'name': 'Custom Credential Type', 'inputs': {}, 'injectors': {}, 'managed_by_tower': True}, admin
|
||||
reverse('api:credential_type_list'), {'kind': 'cloud', 'name': 'Custom Credential Type', 'inputs': {}, 'injectors': {}, 'managed': True}, admin
|
||||
)
|
||||
assert response.status_code == 201
|
||||
|
||||
response = get(reverse('api:credential_type_list'), admin)
|
||||
assert response.data['count'] == 1
|
||||
assert response.data['results'][0]['managed_by_tower'] is False
|
||||
assert response.data['results'][0]['managed'] is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@ -592,23 +592,3 @@ class TestControlledBySCM:
|
||||
rando,
|
||||
expect=403,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestInsightsCredential:
|
||||
def test_insights_credential(self, patch, insights_inventory, admin_user, insights_credential):
|
||||
patch(insights_inventory.get_absolute_url(), {'insights_credential': insights_credential.id}, admin_user, expect=200)
|
||||
|
||||
def test_insights_credential_protection(self, post, patch, insights_inventory, alice, insights_credential):
|
||||
insights_inventory.organization.admin_role.members.add(alice)
|
||||
insights_inventory.admin_role.members.add(alice)
|
||||
post(
|
||||
reverse('api:inventory_list'),
|
||||
{"name": "test", "organization": insights_inventory.organization.id, "insights_credential": insights_credential.id},
|
||||
alice,
|
||||
expect=403,
|
||||
)
|
||||
patch(insights_inventory.get_absolute_url(), {'insights_credential': insights_credential.id}, alice, expect=403)
|
||||
|
||||
def test_non_insights_credential(self, patch, insights_inventory, admin_user, scm_credential):
|
||||
patch(insights_inventory.get_absolute_url(), {'insights_credential': scm_credential.id}, admin_user, expect=400)
|
||||
|
||||
@ -266,7 +266,7 @@ def credentialtype_external():
|
||||
|
||||
with mock.patch('awx.main.models.credential.CredentialType.plugin', new_callable=PropertyMock) as mock_plugin:
|
||||
mock_plugin.return_value = MockPlugin()
|
||||
external_type = CredentialType(kind='external', managed_by_tower=True, name='External Service', inputs=external_type_inputs)
|
||||
external_type = CredentialType(kind='external', managed=True, name='External Service', inputs=external_type_inputs)
|
||||
external_type.save()
|
||||
yield external_type
|
||||
|
||||
@ -825,9 +825,9 @@ def slice_job_factory(slice_jt_factory):
|
||||
|
||||
@pytest.fixture
|
||||
def control_plane_execution_environment():
|
||||
return ExecutionEnvironment.objects.create(name="Control Plane EE", managed_by_tower=True)
|
||||
return ExecutionEnvironment.objects.create(name="Control Plane EE", managed=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def default_job_execution_environment():
|
||||
return ExecutionEnvironment.objects.create(name="Default Job EE", managed_by_tower=False)
|
||||
return ExecutionEnvironment.objects.create(name="Default Job EE", managed=False)
|
||||
|
||||
@ -121,7 +121,7 @@ def somecloud_type():
|
||||
return CredentialType.objects.create(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
managed=False,
|
||||
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string', 'secret': True}]},
|
||||
injectors={'env': {'MY_CLOUD_API_TOKEN': '{{api_token.foo()}}'}},
|
||||
)
|
||||
@ -271,10 +271,10 @@ def test_cluster_node_long_node_name(inventory, project):
|
||||
@pytest.mark.django_db
|
||||
def test_credential_defaults_idempotency():
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
old_inputs = CredentialType.objects.get(name='Ansible Tower', kind='cloud').inputs
|
||||
old_inputs = CredentialType.objects.get(name='Red Hat Ansible Automation Platform', kind='cloud').inputs
|
||||
prior_count = ActivityStream.objects.count()
|
||||
# this is commonly re-ran in migrations, and no changes should be shown
|
||||
# because inputs and injectors are not actually tracked in the database
|
||||
CredentialType.setup_tower_managed_defaults()
|
||||
assert CredentialType.objects.get(name='Ansible Tower', kind='cloud').inputs == old_inputs
|
||||
assert CredentialType.objects.get(name='Red Hat Ansible Automation Platform', kind='cloud').inputs == old_inputs
|
||||
assert ActivityStream.objects.count() == prior_count
|
||||
|
||||
@ -22,6 +22,8 @@ class TestInventoryScript:
|
||||
assert inventory.get_script_data(hostvars=True, towervars=True)['_meta']['hostvars']['ahost'] == {
|
||||
'remote_tower_enabled': 'true',
|
||||
'remote_tower_id': host.id,
|
||||
'remote_host_enabled': 'true',
|
||||
'remote_host_id': host.id,
|
||||
}
|
||||
|
||||
def test_all_group(self, inventory):
|
||||
@ -104,7 +106,7 @@ class TestActiveCount:
|
||||
|
||||
def test_active_count_minus_tower(self, inventory):
|
||||
inventory.hosts.create(name='locally-managed-host')
|
||||
source = inventory.inventory_sources.create(name='tower-source', source='tower')
|
||||
source = inventory.inventory_sources.create(name='tower-source', source='controller')
|
||||
source.hosts.create(name='remotely-managed-host', inventory=inventory)
|
||||
assert Host.objects.active_count() == 1
|
||||
|
||||
@ -210,7 +212,7 @@ class TestInventorySourceInjectors:
|
||||
('rhv', 'ovirt.ovirt.ovirt'),
|
||||
('satellite6', 'theforeman.foreman.foreman'),
|
||||
('insights', 'redhatinsights.insights.insights'),
|
||||
('tower', 'awx.awx.tower'),
|
||||
('controller', 'awx.awx.tower'),
|
||||
],
|
||||
)
|
||||
def test_plugin_proper_names(self, source, proper_name):
|
||||
|
||||
@ -81,6 +81,7 @@ def test_default_cred_types():
|
||||
'azure_rm',
|
||||
'centrify_vault_kv',
|
||||
'conjur',
|
||||
'controller',
|
||||
'galaxy_api_token',
|
||||
'gce',
|
||||
'github_token',
|
||||
@ -97,19 +98,19 @@ def test_default_cred_types():
|
||||
'scm',
|
||||
'ssh',
|
||||
'thycotic_dsv',
|
||||
'tower',
|
||||
'vault',
|
||||
'vmware',
|
||||
]
|
||||
|
||||
for type_ in CredentialType.defaults.values():
|
||||
assert type_().managed_by_tower is True
|
||||
assert type_().managed is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_credential_creation(organization_factory):
|
||||
org = organization_factory('test').organization
|
||||
type_ = CredentialType(
|
||||
kind='cloud', name='SomeCloud', managed_by_tower=True, inputs={'fields': [{'id': 'username', 'label': 'Username for SomeCloud', 'type': 'string'}]}
|
||||
kind='cloud', name='SomeCloud', managed=True, inputs={'fields': [{'id': 'username', 'label': 'Username for SomeCloud', 'type': 'string'}]}
|
||||
)
|
||||
type_.save()
|
||||
|
||||
@ -286,7 +287,7 @@ def test_credential_get_input(organization_factory):
|
||||
type_ = CredentialType(
|
||||
kind='vault',
|
||||
name='somevault',
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
|
||||
@ -1,117 +0,0 @@
|
||||
import importlib
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
import pytest
|
||||
|
||||
from awx.main.models import Credential, Organization
|
||||
from awx.conf.models import Setting
|
||||
from awx.main.migrations import _galaxy as galaxy
|
||||
|
||||
|
||||
class FakeApps(object):
|
||||
def get_model(self, app, model):
|
||||
if app == 'contenttypes':
|
||||
return ContentType
|
||||
return getattr(importlib.import_module(f'awx.{app}.models'), model)
|
||||
|
||||
|
||||
apps = FakeApps()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_default_public_galaxy():
|
||||
org = Organization.objects.create()
|
||||
assert org.galaxy_credentials.count() == 0
|
||||
galaxy.migrate_galaxy_settings(apps, None)
|
||||
assert org.galaxy_credentials.count() == 1
|
||||
creds = org.galaxy_credentials.all()
|
||||
assert creds[0].name == 'Ansible Galaxy'
|
||||
assert creds[0].inputs['url'] == 'https://galaxy.ansible.com/'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_public_galaxy_disabled():
|
||||
Setting.objects.create(key='PUBLIC_GALAXY_ENABLED', value=False)
|
||||
org = Organization.objects.create()
|
||||
assert org.galaxy_credentials.count() == 0
|
||||
galaxy.migrate_galaxy_settings(apps, None)
|
||||
assert org.galaxy_credentials.count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_rh_automation_hub():
|
||||
Setting.objects.create(key='PRIMARY_GALAXY_URL', value='https://cloud.redhat.com/api/automation-hub/')
|
||||
Setting.objects.create(key='PRIMARY_GALAXY_TOKEN', value='secret123')
|
||||
org = Organization.objects.create()
|
||||
assert org.galaxy_credentials.count() == 0
|
||||
galaxy.migrate_galaxy_settings(apps, None)
|
||||
assert org.galaxy_credentials.count() == 2
|
||||
assert org.galaxy_credentials.first().name == 'Ansible Automation Hub (https://cloud.redhat.com/api/automation-hub/)' # noqa
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_multiple_galaxies():
|
||||
for i in range(5):
|
||||
Organization.objects.create(name=f'Org {i}')
|
||||
|
||||
Setting.objects.create(key='PRIMARY_GALAXY_URL', value='https://example.org/')
|
||||
Setting.objects.create(key='PRIMARY_GALAXY_AUTH_URL', value='https://auth.example.org/')
|
||||
Setting.objects.create(key='PRIMARY_GALAXY_USERNAME', value='user')
|
||||
Setting.objects.create(key='PRIMARY_GALAXY_PASSWORD', value='pass')
|
||||
Setting.objects.create(key='PRIMARY_GALAXY_TOKEN', value='secret123')
|
||||
|
||||
for org in Organization.objects.all():
|
||||
assert org.galaxy_credentials.count() == 0
|
||||
|
||||
galaxy.migrate_galaxy_settings(apps, None)
|
||||
|
||||
for org in Organization.objects.all():
|
||||
assert org.galaxy_credentials.count() == 2
|
||||
creds = org.galaxy_credentials.all()
|
||||
assert creds[0].name == 'Private Galaxy (https://example.org/)'
|
||||
assert creds[0].inputs['url'] == 'https://example.org/'
|
||||
assert creds[0].inputs['auth_url'] == 'https://auth.example.org/'
|
||||
assert creds[0].inputs['token'].startswith('$encrypted$')
|
||||
assert creds[0].get_input('token') == 'secret123'
|
||||
|
||||
assert creds[1].name == 'Ansible Galaxy'
|
||||
assert creds[1].inputs['url'] == 'https://galaxy.ansible.com/'
|
||||
|
||||
public_galaxy_creds = Credential.objects.filter(name='Ansible Galaxy')
|
||||
assert public_galaxy_creds.count() == 1
|
||||
assert public_galaxy_creds.first().managed_by_tower is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fallback_galaxies():
|
||||
org = Organization.objects.create()
|
||||
assert org.galaxy_credentials.count() == 0
|
||||
Setting.objects.create(key='PRIMARY_GALAXY_URL', value='https://example.org/')
|
||||
Setting.objects.create(key='PRIMARY_GALAXY_AUTH_URL', value='https://auth.example.org/')
|
||||
Setting.objects.create(key='PRIMARY_GALAXY_TOKEN', value='secret123')
|
||||
try:
|
||||
settings.FALLBACK_GALAXY_SERVERS = [
|
||||
{
|
||||
'id': 'abc123',
|
||||
'url': 'https://some-other-galaxy.example.org/',
|
||||
'auth_url': 'https://some-other-galaxy.sso.example.org/',
|
||||
'username': 'user',
|
||||
'password': 'pass',
|
||||
'token': 'fallback123',
|
||||
}
|
||||
]
|
||||
galaxy.migrate_galaxy_settings(apps, None)
|
||||
finally:
|
||||
settings.FALLBACK_GALAXY_SERVERS = []
|
||||
assert org.galaxy_credentials.count() == 3
|
||||
creds = org.galaxy_credentials.all()
|
||||
assert creds[0].name == 'Private Galaxy (https://example.org/)'
|
||||
assert creds[0].inputs['url'] == 'https://example.org/'
|
||||
assert creds[1].name == 'Ansible Galaxy (https://some-other-galaxy.example.org/)'
|
||||
assert creds[1].inputs['url'] == 'https://some-other-galaxy.example.org/'
|
||||
assert creds[1].inputs['auth_url'] == 'https://some-other-galaxy.sso.example.org/'
|
||||
assert creds[1].inputs['token'].startswith('$encrypted$')
|
||||
assert creds[1].get_input('token') == 'fallback123'
|
||||
assert creds[2].name == 'Ansible Galaxy'
|
||||
assert creds[2].inputs['url'] == 'https://galaxy.ansible.com/'
|
||||
@ -62,7 +62,7 @@ def fake_credential_factory():
|
||||
for var in var_specs.keys():
|
||||
inputs[var] = generate_fake_var(var_specs[var])
|
||||
|
||||
if source == 'tower':
|
||||
if source == 'controller':
|
||||
inputs.pop('oauth_token') # mutually exclusive with user/pass
|
||||
|
||||
return Credential.objects.create(credential_type=ct, inputs=inputs)
|
||||
@ -182,8 +182,8 @@ def create_reference_data(source_dir, env, content):
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('this_kind', CLOUD_PROVIDERS)
|
||||
def test_inventory_update_injected_content(this_kind, inventory, fake_credential_factory):
|
||||
ExecutionEnvironment.objects.create(name='Control Plane EE', managed_by_tower=True)
|
||||
ExecutionEnvironment.objects.create(name='Default Job EE', managed_by_tower=False)
|
||||
ExecutionEnvironment.objects.create(name='Control Plane EE', managed=True)
|
||||
ExecutionEnvironment.objects.create(name='Default Job EE', managed=False)
|
||||
|
||||
injector = InventorySource.injectors[this_kind]
|
||||
if injector.plugin_name is None:
|
||||
|
||||
@ -37,7 +37,7 @@ def test_cloudforms_inventory_removal(inventory):
|
||||
name='Red Hat CloudForms',
|
||||
namespace='cloudforms',
|
||||
kind='cloud',
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={},
|
||||
)
|
||||
CredentialType.defaults['cloudforms']().save()
|
||||
|
||||
@ -12,7 +12,7 @@ from django.urls import URLResolver, URLPattern
|
||||
|
||||
@pytest.fixture()
|
||||
def execution_environment():
|
||||
return ExecutionEnvironment(name="test-ee", description="test-ee", managed_by_tower=True)
|
||||
return ExecutionEnvironment(name="test-ee", description="test-ee", managed=True)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
|
||||
@ -1,15 +1,11 @@
|
||||
import pytest
|
||||
from unittest import mock
|
||||
import json
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from awx.main.models import (
|
||||
UnifiedJob,
|
||||
InventoryUpdate,
|
||||
Inventory,
|
||||
Credential,
|
||||
CredentialType,
|
||||
InventorySource,
|
||||
)
|
||||
|
||||
@ -39,42 +35,6 @@ def test__build_job_explanation():
|
||||
)
|
||||
|
||||
|
||||
def test_valid_clean_insights_credential():
|
||||
cred_type = CredentialType.defaults['insights']()
|
||||
insights_cred = Credential(credential_type=cred_type)
|
||||
inv = Inventory(insights_credential=insights_cred)
|
||||
|
||||
inv.clean_insights_credential()
|
||||
|
||||
|
||||
def test_invalid_clean_insights_credential():
|
||||
cred_type = CredentialType.defaults['scm']()
|
||||
cred = Credential(credential_type=cred_type)
|
||||
inv = Inventory(insights_credential=cred)
|
||||
|
||||
with pytest.raises(ValidationError) as e:
|
||||
inv.clean_insights_credential()
|
||||
|
||||
assert json.dumps(str(e.value)) == json.dumps(str([u"Credential kind must be 'insights'."]))
|
||||
|
||||
|
||||
def test_valid_kind_clean_insights_credential():
|
||||
inv = Inventory(kind='smart')
|
||||
|
||||
inv.clean_insights_credential()
|
||||
|
||||
|
||||
def test_invalid_kind_clean_insights_credential():
|
||||
cred_type = CredentialType.defaults['insights']()
|
||||
insights_cred = Credential(credential_type=cred_type)
|
||||
inv = Inventory(kind='smart', insights_credential=insights_cred)
|
||||
|
||||
with pytest.raises(ValidationError) as e:
|
||||
inv.clean_insights_credential()
|
||||
|
||||
assert json.dumps(str(e.value)) == json.dumps(str([u'Assignment not allowed for Smart Inventory']))
|
||||
|
||||
|
||||
class TestControlledBySCM:
|
||||
def test_clean_source_path_valid(self):
|
||||
inv_src = InventorySource(source_path='/not_real/', source='scm')
|
||||
|
||||
@ -93,7 +93,7 @@ def test_custom_error_messages(schema, given, message):
|
||||
],
|
||||
)
|
||||
def test_cred_type_input_schema_validity(input_, valid):
|
||||
type_ = CredentialType(kind='cloud', name='SomeCloud', managed_by_tower=True, inputs=input_)
|
||||
type_ = CredentialType(kind='cloud', name='SomeCloud', managed=True, inputs=input_)
|
||||
field = CredentialType._meta.get_field('inputs')
|
||||
if valid is False:
|
||||
with pytest.raises(ValidationError):
|
||||
@ -151,7 +151,7 @@ def test_cred_type_injectors_schema(injectors, valid):
|
||||
type_ = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{'id': 'username', 'type': 'string', 'label': '_'},
|
||||
@ -190,7 +190,7 @@ def test_credential_creation_validation_failure(inputs):
|
||||
type_ = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=True,
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [{'id': 'username', 'label': 'Username for SomeCloud', 'type': 'string'}, {'id': 'flag', 'label': 'Some Boolean Flag', 'type': 'boolean'}]
|
||||
},
|
||||
|
||||
@ -588,8 +588,8 @@ class TestGenericRun:
|
||||
@pytest.mark.django_db
|
||||
class TestAdhocRun(TestJobExecution):
|
||||
def test_options_jinja_usage(self, adhoc_job, adhoc_update_model_wrapper):
|
||||
ExecutionEnvironment.objects.create(name='Control Plane EE', managed_by_tower=True)
|
||||
ExecutionEnvironment.objects.create(name='Default Job EE', managed_by_tower=False)
|
||||
ExecutionEnvironment.objects.create(name='Control Plane EE', managed=True)
|
||||
ExecutionEnvironment.objects.create(name='Default Job EE', managed=False)
|
||||
|
||||
adhoc_job.module_args = '{{ ansible_ssh_pass }}'
|
||||
adhoc_job.websocket_emit_status = mock.Mock()
|
||||
@ -1095,7 +1095,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
managed=False,
|
||||
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
|
||||
injectors={'env': {'MY_CLOUD_API_TOKEN': '{{api_token.foo()}}'}},
|
||||
)
|
||||
@ -1108,7 +1108,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
managed=False,
|
||||
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
|
||||
injectors={'env': {'MY_CLOUD_API_TOKEN': '{{api_token}}'}},
|
||||
)
|
||||
@ -1123,7 +1123,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
managed=False,
|
||||
inputs={'fields': [{'id': 'turbo_button', 'label': 'Turbo Button', 'type': 'boolean'}]},
|
||||
injectors={'env': {'TURBO_BUTTON': '{{turbo_button}}'}},
|
||||
)
|
||||
@ -1140,7 +1140,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
managed=False,
|
||||
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
|
||||
injectors={'env': {'JOB_ID': 'reserved'}},
|
||||
)
|
||||
@ -1155,7 +1155,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
managed=False,
|
||||
inputs={'fields': [{'id': 'password', 'label': 'Password', 'type': 'string', 'secret': True}]},
|
||||
injectors={'env': {'MY_CLOUD_PRIVATE_VAR': '{{password}}'}},
|
||||
)
|
||||
@ -1175,7 +1175,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
managed=False,
|
||||
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
|
||||
injectors={'extra_vars': {'api_token': '{{api_token}}'}},
|
||||
)
|
||||
@ -1194,7 +1194,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
managed=False,
|
||||
inputs={'fields': [{'id': 'turbo_button', 'label': 'Turbo Button', 'type': 'boolean'}]},
|
||||
injectors={'extra_vars': {'turbo_button': '{{turbo_button}}'}},
|
||||
)
|
||||
@ -1213,7 +1213,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
managed=False,
|
||||
inputs={'fields': [{'id': 'turbo_button', 'label': 'Turbo Button', 'type': 'boolean'}]},
|
||||
injectors={'extra_vars': {'turbo_button': '{% if turbo_button %}FAST!{% else %}SLOW!{% endif %}'}},
|
||||
)
|
||||
@ -1234,7 +1234,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
managed=False,
|
||||
inputs={'fields': [{'id': 'password', 'label': 'Password', 'type': 'string', 'secret': True}]},
|
||||
injectors={'extra_vars': {'password': '{{password}}'}},
|
||||
)
|
||||
@ -1252,7 +1252,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
managed=False,
|
||||
inputs={'fields': [{'id': 'api_token', 'label': 'API Token', 'type': 'string'}]},
|
||||
injectors={'file': {'template': '[mycloud]\n{{api_token}}'}, 'env': {'MY_CLOUD_INI_FILE': '{{tower.filename}}'}},
|
||||
)
|
||||
@ -1269,7 +1269,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
managed=False,
|
||||
inputs={'fields': []},
|
||||
injectors={'file': {'template': value}, 'env': {'MY_CLOUD_INI_FILE': '{{tower.filename}}'}},
|
||||
)
|
||||
@ -1288,7 +1288,7 @@ class TestJobCredentials(TestJobExecution):
|
||||
some_cloud = CredentialType(
|
||||
kind='cloud',
|
||||
name='SomeCloud',
|
||||
managed_by_tower=False,
|
||||
managed=False,
|
||||
inputs={'fields': [{'id': 'cert', 'label': 'Certificate', 'type': 'string'}, {'id': 'key', 'label': 'Key', 'type': 'string'}]},
|
||||
injectors={
|
||||
'file': {'template.cert': '[mycert]\n{{cert}}', 'template.key': '[mykey]\n{{key}}'},
|
||||
@ -1778,8 +1778,8 @@ class TestInventoryUpdateCredentials(TestJobExecution):
|
||||
def test_tower_source(self, verify, inventory_update, private_data_dir, mocker):
|
||||
task = tasks.RunInventoryUpdate()
|
||||
task.instance = inventory_update
|
||||
tower = CredentialType.defaults['tower']()
|
||||
inventory_update.source = 'tower'
|
||||
tower = CredentialType.defaults['controller']()
|
||||
inventory_update.source = 'controller'
|
||||
inputs = {'host': 'https://tower.example.org', 'username': 'bob', 'password': 'secret', 'verify_ssl': verify}
|
||||
|
||||
def get_cred():
|
||||
@ -1794,20 +1794,20 @@ class TestInventoryUpdateCredentials(TestJobExecution):
|
||||
|
||||
safe_env = build_safe_env(env)
|
||||
|
||||
assert env['TOWER_HOST'] == 'https://tower.example.org'
|
||||
assert env['TOWER_USERNAME'] == 'bob'
|
||||
assert env['TOWER_PASSWORD'] == 'secret'
|
||||
assert env['CONTROLLER_HOST'] == 'https://tower.example.org'
|
||||
assert env['CONTROLLER_USERNAME'] == 'bob'
|
||||
assert env['CONTROLLER_PASSWORD'] == 'secret'
|
||||
if verify:
|
||||
assert env['TOWER_VERIFY_SSL'] == 'True'
|
||||
assert env['CONTROLLER_VERIFY_SSL'] == 'True'
|
||||
else:
|
||||
assert env['TOWER_VERIFY_SSL'] == 'False'
|
||||
assert safe_env['TOWER_PASSWORD'] == tasks.HIDDEN_PASSWORD
|
||||
assert env['CONTROLLER_VERIFY_SSL'] == 'False'
|
||||
assert safe_env['CONTROLLER_PASSWORD'] == tasks.HIDDEN_PASSWORD
|
||||
|
||||
def test_tower_source_ssl_verify_empty(self, inventory_update, private_data_dir, mocker):
|
||||
task = tasks.RunInventoryUpdate()
|
||||
task.instance = inventory_update
|
||||
tower = CredentialType.defaults['tower']()
|
||||
inventory_update.source = 'tower'
|
||||
tower = CredentialType.defaults['controller']()
|
||||
inventory_update.source = 'controller'
|
||||
inputs = {
|
||||
'host': 'https://tower.example.org',
|
||||
'username': 'bob',
|
||||
@ -1921,7 +1921,7 @@ def test_aquire_lock_acquisition_fail_logged(fcntl_lockf, logging_getLogger, os_
|
||||
def test_managed_injector_redaction(injector_cls):
|
||||
"""See awx.main.models.inventory.PluginFileInjector._get_shared_env
|
||||
The ordering within awx.main.tasks.BaseTask and contract with build_env
|
||||
requires that all managed_by_tower injectors are safely redacted by the
|
||||
requires that all managed injectors are safely redacted by the
|
||||
static method build_safe_env without having to employ the safe namespace
|
||||
as in inject_credential
|
||||
|
||||
|
||||
@ -366,7 +366,7 @@ def get_allowed_fields(obj, serializer_mapping):
|
||||
fields_excluded = ACTIVITY_STREAM_FIELD_EXCLUSIONS.get(model_name, [])
|
||||
# see definition of from_db for CredentialType
|
||||
# injection logic of any managed types are incompatible with activity stream
|
||||
if model_name == 'credentialtype' and obj.managed_by_tower and obj.namespace:
|
||||
if model_name == 'credentialtype' and obj.managed and obj.namespace:
|
||||
fields_excluded.extend(['inputs', 'injectors'])
|
||||
if fields_excluded:
|
||||
allowed_fields = [f for f in allowed_fields if f not in fields_excluded]
|
||||
|
||||
@ -7,18 +7,18 @@ from awx.main.models.execution_environments import ExecutionEnvironment
|
||||
|
||||
|
||||
def get_control_plane_execution_environment():
|
||||
return ExecutionEnvironment.objects.filter(organization=None, managed_by_tower=True).first()
|
||||
return ExecutionEnvironment.objects.filter(organization=None, managed=True).first()
|
||||
|
||||
|
||||
def get_default_execution_environment():
|
||||
if settings.DEFAULT_EXECUTION_ENVIRONMENT is not None:
|
||||
return settings.DEFAULT_EXECUTION_ENVIRONMENT
|
||||
installed_default = ExecutionEnvironment.objects.filter(
|
||||
image__in=[ee['image'] for ee in settings.GLOBAL_JOB_EXECUTION_ENVIRONMENTS], organization=None, managed_by_tower=False
|
||||
image__in=[ee['image'] for ee in settings.GLOBAL_JOB_EXECUTION_ENVIRONMENTS], organization=None, managed=False
|
||||
).first()
|
||||
if installed_default:
|
||||
return installed_default
|
||||
return ExecutionEnvironment.objects.filter(organization=None, managed_by_tower=False).first()
|
||||
return ExecutionEnvironment.objects.filter(organization=None, managed=False).first()
|
||||
|
||||
|
||||
def get_default_pod_spec():
|
||||
|
||||
@ -185,7 +185,7 @@ DEFAULT_EXECUTION_ENVIRONMENT = None
|
||||
GLOBAL_JOB_EXECUTION_ENVIRONMENTS = [{'name': 'AWX EE 0.3.0', 'image': 'quay.io/ansible/awx-ee:0.3.0'}]
|
||||
# This setting controls which EE will be used for project updates.
|
||||
# The awx-manage register_default_execution_environments command reads this setting and registers the EE
|
||||
# This image is distinguished from others by having "managed_by_tower" set to True and users have limited
|
||||
# This image is distinguished from others by having "managed" set to True and users have limited
|
||||
# ability to modify it through the API.
|
||||
# If a registry credential is needed to pull the image, that can be provided to the awx-manage command
|
||||
CONTROL_PLANE_EXECUTION_ENVIRONMENT = 'quay.io/ansible/awx-ee:0.3.0'
|
||||
@ -427,6 +427,7 @@ CELERYBEAT_SCHEDULE = {
|
||||
'gather_analytics': {'task': 'awx.main.tasks.gather_analytics', 'schedule': timedelta(minutes=5)},
|
||||
'task_manager': {'task': 'awx.main.scheduler.tasks.run_task_manager', 'schedule': timedelta(seconds=20), 'options': {'expires': 20}},
|
||||
'k8s_reaper': {'task': 'awx.main.tasks.awx_k8s_reaper', 'schedule': timedelta(seconds=60), 'options': {'expires': 50}},
|
||||
'receptor_reaper': {'task': 'awx.main.tasks.awx_receptor_workunit_reaper', 'schedule': timedelta(seconds=60)},
|
||||
'send_subsystem_metrics': {'task': 'awx.main.analytics.analytics_tasks.send_subsystem_metrics', 'schedule': timedelta(seconds=20)},
|
||||
'cleanup_images': {'task': 'awx.main.tasks.cleanup_execution_environment_images', 'schedule': timedelta(hours=3)},
|
||||
}
|
||||
@ -677,12 +678,12 @@ RHV_EXCLUDE_EMPTY_GROUPS = True
|
||||
RHV_INSTANCE_ID_VAR = 'id'
|
||||
|
||||
# ---------------------
|
||||
# ----- Tower -----
|
||||
# ----- Controller -----
|
||||
# ---------------------
|
||||
TOWER_ENABLED_VAR = 'remote_tower_enabled'
|
||||
TOWER_ENABLED_VALUE = 'true'
|
||||
TOWER_EXCLUDE_EMPTY_GROUPS = True
|
||||
TOWER_INSTANCE_ID_VAR = 'remote_tower_id'
|
||||
CONTROLLER_ENABLED_VAR = 'remote_tower_enabled'
|
||||
CONTROLLER_ENABLED_VALUE = 'true'
|
||||
CONTROLLER_EXCLUDE_EMPTY_GROUPS = True
|
||||
CONTROLLER_INSTANCE_ID_VAR = 'remote_tower_id'
|
||||
|
||||
# ---------------------
|
||||
# ----- Foreman -----
|
||||
|
||||
@ -13,7 +13,7 @@ from awx.main.models import User, Team, Organization, Credential, CredentialType
|
||||
def galaxy_credential():
|
||||
galaxy_type = CredentialType.objects.create(kind='galaxy')
|
||||
cred = Credential(
|
||||
created=now(), modified=now(), name='Ansible Galaxy', managed_by_tower=True, credential_type=galaxy_type, inputs={'url': 'https://galaxy.ansible.com/'}
|
||||
created=now(), modified=now(), name='Ansible Galaxy', managed=True, credential_type=galaxy_type, inputs={'url': 'https://galaxy.ansible.com/'}
|
||||
)
|
||||
cred.save()
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@ import React, { useState, useRef, useEffect, Fragment } from 'react';
|
||||
import { t } from '@lingui/macro';
|
||||
import PropTypes from 'prop-types';
|
||||
import { Dropdown, DropdownPosition } from '@patternfly/react-core';
|
||||
import { ToolbarAddButton } from '../PaginatedDataList';
|
||||
import { ToolbarAddButton } from '../PaginatedTable';
|
||||
import { useKebabifiedMenu } from '../../contexts/Kebabified';
|
||||
|
||||
function AddDropDownButton({ dropdownItems, ouiaId }) {
|
||||
|
||||
@ -15,7 +15,6 @@ import { FormSelect, FormSelectOption } from '@patternfly/react-core';
|
||||
function AnsibleSelect({
|
||||
id,
|
||||
data,
|
||||
|
||||
isValid,
|
||||
onBlur,
|
||||
value,
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import 'styled-components/macro';
|
||||
import React from 'react';
|
||||
import PropTypes from 'prop-types';
|
||||
import { t } from '@lingui/macro';
|
||||
@ -28,6 +29,7 @@ const CheckboxListItem = ({
|
||||
ouiaId={`list-item-${itemId}`}
|
||||
id={`list-item-${itemId}`}
|
||||
onClick={handleRowClick}
|
||||
css="cursor: default"
|
||||
>
|
||||
<Td
|
||||
id={`check-action-item-${itemId}`}
|
||||
|
||||
@ -25,7 +25,7 @@ function ArrayDetail({ label, value, dataCy }) {
|
||||
</DetailName>
|
||||
<Value component={TextListItemVariants.dd} data-cy={valueCy}>
|
||||
{vals.map(v => (
|
||||
<div>{v}</div>
|
||||
<div key={v}>{v}</div>
|
||||
))}
|
||||
</Value>
|
||||
</div>
|
||||
|
||||
@ -1,21 +1,29 @@
|
||||
import React from 'react';
|
||||
import { bool, string } from 'prop-types';
|
||||
import { Link } from 'react-router-dom';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { Tooltip } from '@patternfly/react-core';
|
||||
import { t, Trans } from '@lingui/macro';
|
||||
import { Popover, Tooltip } from '@patternfly/react-core';
|
||||
import styled from 'styled-components';
|
||||
|
||||
import { ExclamationTriangleIcon as PFExclamationTriangleIcon } from '@patternfly/react-icons';
|
||||
|
||||
import { Detail } from '../DetailList';
|
||||
import { ExecutionEnvironment } from '../../types';
|
||||
import getDocsBaseUrl from '../../util/getDocsBaseUrl';
|
||||
import { useConfig } from '../../contexts/Config';
|
||||
|
||||
const ExclamationTriangleIcon = styled(PFExclamationTriangleIcon)`
|
||||
color: var(--pf-global--warning-color--100);
|
||||
margin-left: 18px;
|
||||
cursor: pointer;
|
||||
`;
|
||||
|
||||
const ExclamationTrianglePopover = styled(PFExclamationTriangleIcon)`
|
||||
color: var(--pf-global--warning-color--100);
|
||||
margin-left: 18px;
|
||||
cursor: pointer;
|
||||
`;
|
||||
|
||||
ExclamationTrianglePopover.displayName = 'ExclamationTrianglePopover';
|
||||
|
||||
function ExecutionEnvironmentDetail({
|
||||
executionEnvironment,
|
||||
isDefaultEnvironment,
|
||||
@ -23,6 +31,10 @@ function ExecutionEnvironmentDetail({
|
||||
verifyMissingVirtualEnv,
|
||||
helpText,
|
||||
}) {
|
||||
const config = useConfig();
|
||||
const docsLink = `${getDocsBaseUrl(
|
||||
config
|
||||
)}/html/upgrade-migration-guide/upgrade_to_ees.html`;
|
||||
const label = isDefaultEnvironment
|
||||
? t`Default Execution Environment`
|
||||
: t`Execution Environment`;
|
||||
@ -51,12 +63,29 @@ function ExecutionEnvironmentDetail({
|
||||
<>
|
||||
{t`Missing resource`}
|
||||
<span>
|
||||
<Tooltip
|
||||
content={t`Custom virtual environment ${virtualEnvironment} must be replaced by an execution environment.`}
|
||||
<Popover
|
||||
className="missing-execution-environment"
|
||||
headerContent={<div>{t`Execution Environment Missing`}</div>}
|
||||
bodyContent={
|
||||
<div>
|
||||
<Trans>
|
||||
Custom virtual environment {virtualEnvironment} must be
|
||||
replaced by an execution environment. For more information
|
||||
about migrating to execution environments see{' '}
|
||||
<a
|
||||
href={docsLink}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
the documentation.
|
||||
</a>
|
||||
</Trans>
|
||||
</div>
|
||||
}
|
||||
position="right"
|
||||
>
|
||||
<ExclamationTriangleIcon />
|
||||
</Tooltip>
|
||||
<ExclamationTrianglePopover />
|
||||
</Popover>
|
||||
</span>
|
||||
</>
|
||||
}
|
||||
|
||||
@ -57,9 +57,8 @@ describe('<ExecutionEnvironmentDetail/>', () => {
|
||||
'Execution Environment'
|
||||
);
|
||||
expect(executionEnvironment.find('dd').text()).toEqual('Missing resource');
|
||||
expect(wrapper.find('Tooltip').prop('content')).toEqual(
|
||||
`Custom virtual environment ${virtualEnvironment} must be replaced by an execution environment.`
|
||||
);
|
||||
expect(wrapper.find('ExclamationTrianglePopover').length).toBe(1);
|
||||
expect(wrapper.find('Popover').length).toBe(1);
|
||||
});
|
||||
|
||||
test('should display warning deleted execution environment', async () => {
|
||||
|
||||
@ -6,8 +6,11 @@ import { Card } from '@patternfly/react-core';
|
||||
import AlertModal from '../AlertModal';
|
||||
import DatalistToolbar from '../DataListToolbar';
|
||||
import ErrorDetail from '../ErrorDetail';
|
||||
import { ToolbarDeleteButton } from '../PaginatedDataList';
|
||||
import PaginatedTable, { HeaderRow, HeaderCell } from '../PaginatedTable';
|
||||
import PaginatedTable, {
|
||||
HeaderRow,
|
||||
HeaderCell,
|
||||
ToolbarDeleteButton,
|
||||
} from '../PaginatedTable';
|
||||
import useRequest, {
|
||||
useDeleteItems,
|
||||
useDismissableError,
|
||||
@ -232,7 +235,10 @@ function JobList({ defaultParams, showTypeColumn = false }) {
|
||||
<ToolbarDeleteButton
|
||||
key="delete"
|
||||
onDelete={handleJobDelete}
|
||||
itemsToDelete={selected}
|
||||
itemsToDelete={selected.map(({ ...item }) => {
|
||||
item.name = `${item.id} - ${item.name}`;
|
||||
return item;
|
||||
})}
|
||||
pluralizedItemName={t`Jobs`}
|
||||
cannotDelete={item =>
|
||||
isJobRunning(item.status) ||
|
||||
|
||||
@ -37,7 +37,6 @@ function LaunchButton({ resource, children, history }) {
|
||||
const [launchConfig, setLaunchConfig] = useState(null);
|
||||
const [surveyConfig, setSurveyConfig] = useState(null);
|
||||
const [isLaunching, setIsLaunching] = useState(false);
|
||||
const [resourceCredentials, setResourceCredentials] = useState([]);
|
||||
const [error, setError] = useState(null);
|
||||
|
||||
const handleLaunch = async () => {
|
||||
@ -60,17 +59,6 @@ function LaunchButton({ resource, children, history }) {
|
||||
setSurveyConfig(data);
|
||||
}
|
||||
|
||||
if (
|
||||
launch.ask_credential_on_launch &&
|
||||
resource.type === 'workflow_job_template'
|
||||
) {
|
||||
const {
|
||||
data: { results: jobTemplateCredentials },
|
||||
} = await JobTemplatesAPI.readCredentials(resource.id);
|
||||
|
||||
setResourceCredentials(jobTemplateCredentials);
|
||||
}
|
||||
|
||||
if (canLaunchWithoutPrompt(launch)) {
|
||||
launchWithParams({});
|
||||
} else {
|
||||
@ -184,7 +172,6 @@ function LaunchButton({ resource, children, history }) {
|
||||
resource={resource}
|
||||
onLaunch={launchWithParams}
|
||||
onCancel={() => setShowLaunchPrompt(false)}
|
||||
resourceDefaultCredentials={resourceCredentials}
|
||||
/>
|
||||
)}
|
||||
</Fragment>
|
||||
|
||||
@ -16,7 +16,6 @@ function PromptModalForm({
|
||||
onSubmit,
|
||||
resource,
|
||||
surveyConfig,
|
||||
resourceDefaultCredentials,
|
||||
}) {
|
||||
const { setFieldTouched, values } = useFormikContext();
|
||||
const [showDescription, setShowDescription] = useState(false);
|
||||
@ -28,12 +27,7 @@ function PromptModalForm({
|
||||
visitStep,
|
||||
visitAllSteps,
|
||||
contentError,
|
||||
} = useLaunchSteps(
|
||||
launchConfig,
|
||||
surveyConfig,
|
||||
resource,
|
||||
resourceDefaultCredentials
|
||||
);
|
||||
} = useLaunchSteps(launchConfig, surveyConfig, resource);
|
||||
|
||||
const handleSubmit = () => {
|
||||
const postValues = {};
|
||||
@ -58,6 +52,7 @@ function PromptModalForm({
|
||||
: resource.extra_vars;
|
||||
setValue('extra_vars', mergeExtraVars(extraVars, surveyValues));
|
||||
setValue('scm_branch', values.scm_branch);
|
||||
setValue('verbosity', values.verbosity);
|
||||
|
||||
onSubmit(postValues);
|
||||
};
|
||||
|
||||
@ -7,6 +7,7 @@ import {
|
||||
import LaunchPrompt from './LaunchPrompt';
|
||||
import InventoryStep from './steps/InventoryStep';
|
||||
import CredentialsStep from './steps/CredentialsStep';
|
||||
import CredentialPasswordsStep from './steps/CredentialPasswordsStep';
|
||||
import OtherPromptsStep from './steps/OtherPromptsStep';
|
||||
import PreviewStep from './steps/PreviewStep';
|
||||
import {
|
||||
@ -27,6 +28,18 @@ const resource = {
|
||||
description: 'Foo Description',
|
||||
name: 'Foobar',
|
||||
type: 'job_template',
|
||||
summary_fields: {
|
||||
credentials: [
|
||||
{
|
||||
id: 5,
|
||||
name: 'cred that prompts',
|
||||
credential_type: 1,
|
||||
inputs: {
|
||||
password: 'ASK',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
const noop = () => {};
|
||||
|
||||
@ -101,7 +114,12 @@ describe('LaunchPrompt', () => {
|
||||
summary_fields: {
|
||||
credentials: [
|
||||
{
|
||||
id: 1,
|
||||
id: 5,
|
||||
name: 'cred that prompts',
|
||||
credential_type: 1,
|
||||
inputs: {
|
||||
password: 'ASK',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
@ -126,16 +144,6 @@ describe('LaunchPrompt', () => {
|
||||
},
|
||||
],
|
||||
}}
|
||||
resourceDefaultCredentials={[
|
||||
{
|
||||
id: 5,
|
||||
name: 'cred that prompts',
|
||||
credential_type: 1,
|
||||
inputs: {
|
||||
password: 'ASK',
|
||||
},
|
||||
},
|
||||
]}
|
||||
/>
|
||||
);
|
||||
});
|
||||
@ -197,10 +205,13 @@ describe('LaunchPrompt', () => {
|
||||
const wizard = await waitForElement(wrapper, 'Wizard');
|
||||
const steps = wizard.prop('steps');
|
||||
|
||||
expect(steps).toHaveLength(2);
|
||||
expect(steps).toHaveLength(3);
|
||||
expect(steps[0].name.props.children).toEqual('Credentials');
|
||||
expect(isElementOfType(steps[0].component, CredentialsStep)).toEqual(true);
|
||||
expect(isElementOfType(steps[1].component, PreviewStep)).toEqual(true);
|
||||
expect(
|
||||
isElementOfType(steps[1].component, CredentialPasswordsStep)
|
||||
).toEqual(true);
|
||||
expect(isElementOfType(steps[2].component, PreviewStep)).toEqual(true);
|
||||
});
|
||||
|
||||
test('should add other prompts step', async () => {
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
import React, { useState } from 'react';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { useField } from 'formik';
|
||||
import {
|
||||
@ -10,7 +9,6 @@ import {
|
||||
SelectVariant,
|
||||
} from '@patternfly/react-core';
|
||||
import FormField from '../../FormField';
|
||||
import AnsibleSelect from '../../AnsibleSelect';
|
||||
import Popover from '../../Popover';
|
||||
import {
|
||||
required,
|
||||
@ -92,12 +90,22 @@ function NumberField({ question }) {
|
||||
}
|
||||
|
||||
function MultipleChoiceField({ question }) {
|
||||
const [field, meta] = useField({
|
||||
const [field, meta, helpers] = useField({
|
||||
name: `survey_${question.variable}`,
|
||||
validate: question.required ? required(null) : null,
|
||||
});
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const id = `survey-question-${question.variable}`;
|
||||
const isValid = !(meta.touched && meta.error);
|
||||
|
||||
let options = [];
|
||||
|
||||
if (typeof question.choices === 'string') {
|
||||
options = question.choices.split('\n');
|
||||
} else if (Array.isArray(question.choices)) {
|
||||
options = [...question.choices];
|
||||
}
|
||||
|
||||
return (
|
||||
<FormGroup
|
||||
fieldId={id}
|
||||
@ -107,16 +115,26 @@ function MultipleChoiceField({ question }) {
|
||||
label={question.question_name}
|
||||
labelIcon={<Popover content={question.question_description} />}
|
||||
>
|
||||
<AnsibleSelect
|
||||
<Select
|
||||
onToggle={setIsOpen}
|
||||
onSelect={(event, option) => {
|
||||
helpers.setValue(option);
|
||||
setIsOpen(false);
|
||||
}}
|
||||
selections={field.value}
|
||||
variant={SelectVariant.single}
|
||||
id={id}
|
||||
isValid={isValid}
|
||||
{...field}
|
||||
data={question.choices.split('\n').map(opt => ({
|
||||
key: opt,
|
||||
value: opt,
|
||||
label: opt,
|
||||
}))}
|
||||
/>
|
||||
isOpen={isOpen}
|
||||
placeholderText={t`Select an option`}
|
||||
onClear={() => {
|
||||
helpers.setTouched(true);
|
||||
helpers.setValue('');
|
||||
}}
|
||||
>
|
||||
{options.map(opt => (
|
||||
<SelectOption key={opt} value={opt} />
|
||||
))}
|
||||
</Select>
|
||||
</FormGroup>
|
||||
);
|
||||
}
|
||||
@ -131,6 +149,14 @@ function MultiSelectField({ question }) {
|
||||
const hasActualValue = !question.required || meta.value?.length > 0;
|
||||
const isValid = !meta.touched || (!meta.error && hasActualValue);
|
||||
|
||||
let options = [];
|
||||
|
||||
if (typeof question.choices === 'string') {
|
||||
options = question.choices.split('\n');
|
||||
} else if (Array.isArray(question.choices)) {
|
||||
options = [...question.choices];
|
||||
}
|
||||
|
||||
return (
|
||||
<FormGroup
|
||||
fieldId={id}
|
||||
@ -145,6 +171,7 @@ function MultiSelectField({ question }) {
|
||||
<Select
|
||||
variant={SelectVariant.typeaheadMulti}
|
||||
id={id}
|
||||
placeholderText={!field.value.length && t`Select option(s)`}
|
||||
onToggle={setIsOpen}
|
||||
onSelect={(event, option) => {
|
||||
if (field?.value?.includes(option)) {
|
||||
@ -161,7 +188,7 @@ function MultiSelectField({ question }) {
|
||||
helpers.setValue([]);
|
||||
}}
|
||||
>
|
||||
{question.choices.split('\n').map(opt => (
|
||||
{options.map(opt => (
|
||||
<SelectOption key={opt} value={opt} />
|
||||
))}
|
||||
</Select>
|
||||
|
||||
@ -0,0 +1,88 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { Formik } from 'formik';
|
||||
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
|
||||
import SurveyStep from './SurveyStep';
|
||||
|
||||
describe('SurveyStep', () => {
|
||||
test('should handle choices as a string', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={{ job_type: 'run' }}>
|
||||
<SurveyStep
|
||||
surveyConfig={{
|
||||
name: 'survey',
|
||||
description: '',
|
||||
spec: [
|
||||
{
|
||||
question_name: 'q1',
|
||||
question_description: '',
|
||||
required: true,
|
||||
type: 'multiplechoice',
|
||||
variable: 'q1',
|
||||
min: null,
|
||||
max: null,
|
||||
default: '',
|
||||
choices: '1\n2\n3\n4\n5\n6',
|
||||
},
|
||||
],
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
wrapper.find('SelectToggle').simulate('click');
|
||||
});
|
||||
wrapper.update();
|
||||
const selectOptions = wrapper.find('SelectOption');
|
||||
expect(selectOptions.at(0).prop('value')).toEqual('1');
|
||||
expect(selectOptions.at(1).prop('value')).toEqual('2');
|
||||
expect(selectOptions.at(2).prop('value')).toEqual('3');
|
||||
expect(selectOptions.at(3).prop('value')).toEqual('4');
|
||||
expect(selectOptions.at(4).prop('value')).toEqual('5');
|
||||
expect(selectOptions.at(5).prop('value')).toEqual('6');
|
||||
});
|
||||
test('should handle choices as an array', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={{ job_type: 'run' }}>
|
||||
<SurveyStep
|
||||
surveyConfig={{
|
||||
name: 'survey',
|
||||
description: '',
|
||||
spec: [
|
||||
{
|
||||
question_name: 'q1',
|
||||
question_description: '',
|
||||
required: true,
|
||||
type: 'multiplechoice',
|
||||
variable: 'q1',
|
||||
min: null,
|
||||
max: null,
|
||||
default: '',
|
||||
choices: ['1', '2', '3', '4', '5', '6'],
|
||||
},
|
||||
],
|
||||
}}
|
||||
/>
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
wrapper.find('SelectToggle').simulate('click');
|
||||
});
|
||||
wrapper.update();
|
||||
const selectOptions = wrapper.find('SelectOption');
|
||||
expect(selectOptions.at(0).prop('value')).toEqual('1');
|
||||
expect(selectOptions.at(1).prop('value')).toEqual('2');
|
||||
expect(selectOptions.at(2).prop('value')).toEqual('3');
|
||||
expect(selectOptions.at(3).prop('value')).toEqual('4');
|
||||
expect(selectOptions.at(4).prop('value')).toEqual('5');
|
||||
expect(selectOptions.at(5).prop('value')).toEqual('6');
|
||||
});
|
||||
});
|
||||
@ -17,19 +17,18 @@ export default function credentialsValidator(
|
||||
if (
|
||||
!selectedCredentials.find(selectedCredential => {
|
||||
return (
|
||||
(selectedCredential.credential_type ===
|
||||
defaultCredential.credential_type &&
|
||||
!selectedCredential.inputs.vault_id &&
|
||||
!defaultCredential.inputs.vault_id) ||
|
||||
(selectedCredential.inputs.vault_id &&
|
||||
defaultCredential.inputs.vault_id &&
|
||||
selectedCredential.inputs.vault_id ===
|
||||
defaultCredential.inputs.vault_id)
|
||||
(selectedCredential?.credential_type ===
|
||||
defaultCredential?.credential_type &&
|
||||
!selectedCredential.inputs?.vault_id &&
|
||||
!defaultCredential.inputs?.vault_id) ||
|
||||
(defaultCredential.inputs?.vault_id &&
|
||||
selectedCredential.inputs?.vault_id ===
|
||||
defaultCredential.inputs?.vault_id)
|
||||
);
|
||||
})
|
||||
) {
|
||||
missingCredentialTypes.push(
|
||||
defaultCredential.inputs.vault_id
|
||||
defaultCredential.inputs?.vault_id
|
||||
? `${defaultCredential.summary_fields.credential_type.name} | ${defaultCredential.inputs.vault_id}`
|
||||
: defaultCredential.summary_fields.credential_type.name
|
||||
);
|
||||
|
||||
@ -11,7 +11,6 @@ export default function useCredentialsStep(
|
||||
launchConfig,
|
||||
resource,
|
||||
resourceDefaultCredentials,
|
||||
|
||||
allowCredentialsWithPasswords = false
|
||||
) {
|
||||
const [field, meta, helpers] = useField('credentials');
|
||||
@ -22,7 +21,6 @@ export default function useCredentialsStep(
|
||||
return {
|
||||
step: getStep(
|
||||
launchConfig,
|
||||
|
||||
allowCredentialsWithPasswords,
|
||||
formError,
|
||||
resourceDefaultCredentials
|
||||
|
||||
@ -71,9 +71,6 @@ function getInitialValues(launchConfig, surveyConfig, resource) {
|
||||
values[`survey_${question.variable}`] = question.default
|
||||
? question.default.split('\n')
|
||||
: [];
|
||||
} else if (question.type === 'multiplechoice') {
|
||||
values[`survey_${question.variable}`] =
|
||||
question.default || question.choices.split('\n')[0];
|
||||
} else {
|
||||
values[`survey_${question.variable}`] = question.default || '';
|
||||
}
|
||||
|
||||
@ -39,12 +39,7 @@ function showCredentialPasswordsStep(credentials = [], launchConfig) {
|
||||
return credentialPasswordStepRequired;
|
||||
}
|
||||
|
||||
export default function useLaunchSteps(
|
||||
launchConfig,
|
||||
surveyConfig,
|
||||
resource,
|
||||
resourceDefaultCredentials
|
||||
) {
|
||||
export default function useLaunchSteps(launchConfig, surveyConfig, resource) {
|
||||
const [visited, setVisited] = useState({});
|
||||
const [isReady, setIsReady] = useState(false);
|
||||
const { touched, values: formikValues } = useFormikContext();
|
||||
@ -53,7 +48,7 @@ export default function useLaunchSteps(
|
||||
useCredentialsStep(
|
||||
launchConfig,
|
||||
resource,
|
||||
resourceDefaultCredentials,
|
||||
resource.summary_fields.credentials || [],
|
||||
true
|
||||
),
|
||||
useCredentialPasswordsStep(
|
||||
|
||||
@ -18,7 +18,7 @@ import ChipGroup from '../ChipGroup';
|
||||
import Popover from '../Popover';
|
||||
import DataListToolbar from '../DataListToolbar';
|
||||
import LookupErrorMessage from './shared/LookupErrorMessage';
|
||||
import PaginatedDataList from '../PaginatedDataList';
|
||||
import PaginatedTable, { HeaderCell, HeaderRow } from '../PaginatedTable';
|
||||
import HostListItem from './HostListItem';
|
||||
import { HostsAPI } from '../../api';
|
||||
import { getQSConfig, mergeParams, parseQueryString } from '../../util/qs';
|
||||
@ -352,20 +352,20 @@ function HostFilterLookup({
|
||||
]}
|
||||
>
|
||||
<ModalList>
|
||||
<PaginatedDataList
|
||||
<PaginatedTable
|
||||
contentError={error}
|
||||
hasContentLoading={isLoading}
|
||||
itemCount={count}
|
||||
items={hosts}
|
||||
onRowClick={() => {}}
|
||||
pluralizedItemName={t`hosts`}
|
||||
qsConfig={QS_CONFIG}
|
||||
renderItem={item => (
|
||||
<HostListItem
|
||||
key={item.id}
|
||||
item={{ ...item, url: `/hosts/${item.id}/details` }}
|
||||
/>
|
||||
)}
|
||||
headerRow={
|
||||
<HeaderRow qsConfig={QS_CONFIG} isSelectable={false}>
|
||||
<HeaderCell sortKey="name">{t`Name`}</HeaderCell>
|
||||
<HeaderCell>{t`Inventory`}</HeaderCell>
|
||||
</HeaderRow>
|
||||
}
|
||||
renderRow={item => <HostListItem key={item.id} item={item} />}
|
||||
renderToolbar={props => (
|
||||
<DataListToolbar
|
||||
{...props}
|
||||
@ -375,20 +375,6 @@ function HostFilterLookup({
|
||||
/>
|
||||
)}
|
||||
toolbarSearchColumns={searchColumns}
|
||||
toolbarSortColumns={[
|
||||
{
|
||||
name: t`Name`,
|
||||
key: 'name',
|
||||
},
|
||||
{
|
||||
name: t`Created`,
|
||||
key: 'created',
|
||||
},
|
||||
{
|
||||
name: t`Modified`,
|
||||
key: 'modified',
|
||||
},
|
||||
]}
|
||||
toolbarSearchableKeys={searchableKeys}
|
||||
toolbarRelatedSearchableKeys={relatedSearchableKeys}
|
||||
/>
|
||||
|
||||
@ -1,39 +1,13 @@
|
||||
import React from 'react';
|
||||
import { Link } from 'react-router-dom';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import {
|
||||
DataListItem,
|
||||
DataListItemRow,
|
||||
DataListItemCells,
|
||||
TextContent,
|
||||
} from '@patternfly/react-core';
|
||||
import DataListCell from '../DataListCell';
|
||||
import { Td, Tr } from '@patternfly/react-table';
|
||||
|
||||
function HostListItem({ item }) {
|
||||
return (
|
||||
<DataListItem
|
||||
aria-labelledby={`items-list-item-${item.id}`}
|
||||
key={item.id}
|
||||
id={`${item.id}`}
|
||||
>
|
||||
<DataListItemRow>
|
||||
<DataListItemCells
|
||||
dataListCells={[
|
||||
<DataListCell key="name" aria-label={t`name`}>
|
||||
<TextContent>
|
||||
<Link to={{ pathname: item.url }}>
|
||||
<b id={`items-list-item-${item.id}`}>{item.name}</b>
|
||||
</Link>
|
||||
</TextContent>
|
||||
</DataListCell>,
|
||||
<DataListCell key="inventory" aria-label={t`inventory`}>
|
||||
{item.summary_fields.inventory.name}
|
||||
</DataListCell>,
|
||||
]}
|
||||
/>
|
||||
</DataListItemRow>
|
||||
</DataListItem>
|
||||
<Tr ouiaId={`host-list-item-${item.id}`}>
|
||||
<Td dataLabel={t`Name`}>{item.name}</Td>
|
||||
<Td dataLabel={t`Inventory`}>{item.summary_fields.inventory.name}</Td>
|
||||
</Tr>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@ -15,11 +15,25 @@ describe('HostListItem', () => {
|
||||
},
|
||||
};
|
||||
test('initially renders successfully', () => {
|
||||
wrapper = mountWithContexts(<HostListItem item={mockInventory} />);
|
||||
expect(wrapper.find('HostListItem').length).toBe(1);
|
||||
expect(wrapper.find('DataListCell[aria-label="name"]').text()).toBe('Foo');
|
||||
expect(wrapper.find('DataListCell[aria-label="inventory"]').text()).toBe(
|
||||
'Bar'
|
||||
wrapper = mountWithContexts(
|
||||
<table>
|
||||
<tbody>
|
||||
<HostListItem item={mockInventory} />
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
expect(wrapper.find('HostListItem').length).toBe(1);
|
||||
expect(
|
||||
wrapper
|
||||
.find('Td')
|
||||
.at(0)
|
||||
.text()
|
||||
).toBe('Foo');
|
||||
expect(
|
||||
wrapper
|
||||
.find('Td')
|
||||
.at(1)
|
||||
.text()
|
||||
).toBe('Bar');
|
||||
});
|
||||
});
|
||||
|
||||
@ -17,13 +17,19 @@ export function toSearchParams(string = '') {
|
||||
});
|
||||
}
|
||||
|
||||
const unescapeString = v => {
|
||||
// This is necessary when editing a string that was initially
|
||||
// escaped to allow white space
|
||||
return v.replace(/"/g, '');
|
||||
};
|
||||
|
||||
return orArr
|
||||
.join(' and ')
|
||||
.split(/ and | or /)
|
||||
.map(s => s.split('='))
|
||||
.reduce((searchParams, [k, v]) => {
|
||||
const key = decodeURIComponent(k);
|
||||
const value = decodeURIComponent(v);
|
||||
const value = decodeURIComponent(unescapeString(v));
|
||||
if (searchParams[key] === undefined) {
|
||||
searchParams[key] = value;
|
||||
} else if (Array.isArray(searchParams[key])) {
|
||||
@ -61,6 +67,27 @@ export function toQueryString(config, searchParams = {}) {
|
||||
.join('&');
|
||||
}
|
||||
|
||||
/**
|
||||
* Escape a string with double quote in case there was a white space
|
||||
* @param {string} value A string to be parsed
|
||||
* @return {string} string
|
||||
*/
|
||||
const escapeString = value => {
|
||||
if (verifySpace(value)) {
|
||||
return `"${value}"`;
|
||||
}
|
||||
return value;
|
||||
};
|
||||
|
||||
/**
|
||||
* Verify whether a string has white spaces
|
||||
* @param {string} value A string to be parsed
|
||||
* @return {bool} true if a string has white spaces
|
||||
*/
|
||||
const verifySpace = value => {
|
||||
return value.trim().indexOf(' ') >= 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert params object to host filter string
|
||||
* @param {object} searchParams A string or array of strings keyed by query param key
|
||||
@ -71,9 +98,9 @@ export function toHostFilter(searchParams = {}) {
|
||||
.sort()
|
||||
.flatMap(key => {
|
||||
if (Array.isArray(searchParams[key])) {
|
||||
return searchParams[key].map(val => `${key}=${val}`);
|
||||
return searchParams[key].map(val => `${key}=${escapeString(val)}`);
|
||||
}
|
||||
return `${key}=${searchParams[key]}`;
|
||||
return `${key}=${escapeString(searchParams[key])}`;
|
||||
});
|
||||
|
||||
const filteredSearchParams = flattenSearchParams.filter(
|
||||
|
||||
@ -104,6 +104,18 @@ describe('toHostFilter', () => {
|
||||
);
|
||||
});
|
||||
|
||||
test('should return a host filter with escaped string', () => {
|
||||
const object = {
|
||||
or__description__contains: 'bar biz',
|
||||
enabled: 'true',
|
||||
name__contains: 'x',
|
||||
or__name: 'foo',
|
||||
};
|
||||
expect(toHostFilter(object)).toEqual(
|
||||
'enabled=true and name__contains=x or description__contains="bar biz" or name=foo'
|
||||
);
|
||||
});
|
||||
|
||||
test('should return a host filter with and conditional', () => {
|
||||
const object = {
|
||||
enabled: 'true',
|
||||
|
||||
@ -1,213 +0,0 @@
|
||||
import React, { Fragment } from 'react';
|
||||
|
||||
import PropTypes from 'prop-types';
|
||||
import { DataList } from '@patternfly/react-core';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { withRouter, useHistory, useLocation } from 'react-router-dom';
|
||||
|
||||
import ListHeader from '../ListHeader';
|
||||
import ContentEmpty from '../ContentEmpty';
|
||||
import ContentError from '../ContentError';
|
||||
import ContentLoading from '../ContentLoading';
|
||||
import Pagination from '../Pagination';
|
||||
import DataListToolbar from '../DataListToolbar';
|
||||
|
||||
import { parseQueryString, updateQueryString } from '../../util/qs';
|
||||
|
||||
import { QSConfig, SearchColumns, SortColumns } from '../../types';
|
||||
|
||||
import PaginatedDataListItem from './PaginatedDataListItem';
|
||||
import LoadingSpinner from '../LoadingSpinner';
|
||||
|
||||
function PaginatedDataList({
|
||||
items,
|
||||
onRowClick,
|
||||
contentError,
|
||||
hasContentLoading,
|
||||
emptyStateControls,
|
||||
itemCount,
|
||||
qsConfig,
|
||||
renderItem,
|
||||
toolbarSearchColumns,
|
||||
toolbarSearchableKeys,
|
||||
toolbarRelatedSearchableKeys,
|
||||
toolbarSortColumns,
|
||||
pluralizedItemName,
|
||||
showPageSizeOptions,
|
||||
location,
|
||||
renderToolbar,
|
||||
}) {
|
||||
const { search, pathname } = useLocation();
|
||||
const history = useHistory();
|
||||
const handleListItemSelect = (id = 0) => {
|
||||
const match = items.find(item => item.id === Number(id));
|
||||
onRowClick(match);
|
||||
};
|
||||
|
||||
const handleSetPage = (event, pageNumber) => {
|
||||
const qs = updateQueryString(qsConfig, search, {
|
||||
page: pageNumber,
|
||||
});
|
||||
pushHistoryState(qs);
|
||||
};
|
||||
|
||||
const handleSetPageSize = (event, pageSize, page) => {
|
||||
const qs = updateQueryString(qsConfig, search, {
|
||||
page_size: pageSize,
|
||||
page,
|
||||
});
|
||||
pushHistoryState(qs);
|
||||
};
|
||||
|
||||
const pushHistoryState = qs => {
|
||||
history.push(qs ? `${pathname}?${qs}` : pathname);
|
||||
};
|
||||
|
||||
const searchColumns = toolbarSearchColumns.length
|
||||
? toolbarSearchColumns
|
||||
: [
|
||||
{
|
||||
name: t`Name`,
|
||||
key: 'name',
|
||||
isDefault: true,
|
||||
},
|
||||
];
|
||||
const sortColumns = toolbarSortColumns.length
|
||||
? toolbarSortColumns
|
||||
: [
|
||||
{
|
||||
name: t`Name`,
|
||||
key: 'name',
|
||||
},
|
||||
];
|
||||
const queryParams = parseQueryString(qsConfig, location.search);
|
||||
|
||||
const dataListLabel = t`${pluralizedItemName} List`;
|
||||
const emptyContentMessage = t`Please add ${pluralizedItemName} to populate this list `;
|
||||
const emptyContentTitle = t`No ${pluralizedItemName} Found `;
|
||||
|
||||
let Content;
|
||||
if (hasContentLoading && items.length <= 0) {
|
||||
Content = <ContentLoading />;
|
||||
} else if (contentError) {
|
||||
Content = <ContentError error={contentError} />;
|
||||
} else if (items.length <= 0) {
|
||||
Content = (
|
||||
<ContentEmpty title={emptyContentTitle} message={emptyContentMessage} />
|
||||
);
|
||||
} else {
|
||||
Content = (
|
||||
<>
|
||||
{hasContentLoading && <LoadingSpinner />}
|
||||
<DataList
|
||||
aria-label={dataListLabel}
|
||||
onSelectDataListItem={id => handleListItemSelect(id)}
|
||||
>
|
||||
{items.map(renderItem)}
|
||||
</DataList>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
const ToolbarPagination = (
|
||||
<Pagination
|
||||
isCompact
|
||||
dropDirection="down"
|
||||
itemCount={itemCount}
|
||||
page={queryParams.page || 1}
|
||||
perPage={queryParams.page_size}
|
||||
perPageOptions={
|
||||
showPageSizeOptions
|
||||
? [
|
||||
{ title: '5', value: 5 },
|
||||
{ title: '10', value: 10 },
|
||||
{ title: '20', value: 20 },
|
||||
{ title: '50', value: 50 },
|
||||
]
|
||||
: []
|
||||
}
|
||||
onSetPage={handleSetPage}
|
||||
onPerPageSelect={handleSetPageSize}
|
||||
/>
|
||||
);
|
||||
|
||||
return (
|
||||
<Fragment>
|
||||
<ListHeader
|
||||
itemCount={itemCount}
|
||||
renderToolbar={renderToolbar}
|
||||
emptyStateControls={emptyStateControls}
|
||||
searchColumns={searchColumns}
|
||||
sortColumns={sortColumns}
|
||||
searchableKeys={toolbarSearchableKeys}
|
||||
relatedSearchableKeys={toolbarRelatedSearchableKeys}
|
||||
qsConfig={qsConfig}
|
||||
pagination={ToolbarPagination}
|
||||
/>
|
||||
{Content}
|
||||
{items.length ? (
|
||||
<Pagination
|
||||
variant="bottom"
|
||||
itemCount={itemCount}
|
||||
page={queryParams.page || 1}
|
||||
perPage={queryParams.page_size}
|
||||
perPageOptions={
|
||||
showPageSizeOptions
|
||||
? [
|
||||
{ title: '5', value: 5 },
|
||||
{ title: '10', value: 10 },
|
||||
{ title: '20', value: 20 },
|
||||
{ title: '50', value: 50 },
|
||||
]
|
||||
: []
|
||||
}
|
||||
onSetPage={handleSetPage}
|
||||
onPerPageSelect={handleSetPageSize}
|
||||
/>
|
||||
) : null}
|
||||
</Fragment>
|
||||
);
|
||||
}
|
||||
|
||||
const Item = PropTypes.shape({
|
||||
id: PropTypes.number.isRequired,
|
||||
url: PropTypes.string.isRequired,
|
||||
name: PropTypes.string,
|
||||
});
|
||||
|
||||
PaginatedDataList.propTypes = {
|
||||
items: PropTypes.arrayOf(Item).isRequired,
|
||||
itemCount: PropTypes.number.isRequired,
|
||||
pluralizedItemName: PropTypes.string,
|
||||
qsConfig: QSConfig.isRequired,
|
||||
renderItem: PropTypes.func,
|
||||
toolbarSearchColumns: SearchColumns,
|
||||
toolbarSearchableKeys: PropTypes.arrayOf(PropTypes.string),
|
||||
toolbarRelatedSearchableKeys: PropTypes.arrayOf(PropTypes.string),
|
||||
toolbarSortColumns: SortColumns,
|
||||
showPageSizeOptions: PropTypes.bool,
|
||||
renderToolbar: PropTypes.func,
|
||||
hasContentLoading: PropTypes.bool,
|
||||
contentError: PropTypes.shape(),
|
||||
onRowClick: PropTypes.func,
|
||||
};
|
||||
|
||||
PaginatedDataList.defaultProps = {
|
||||
hasContentLoading: false,
|
||||
contentError: null,
|
||||
toolbarSearchColumns: [],
|
||||
toolbarSearchableKeys: [],
|
||||
toolbarRelatedSearchableKeys: [],
|
||||
toolbarSortColumns: [],
|
||||
pluralizedItemName: 'Items',
|
||||
showPageSizeOptions: true,
|
||||
renderItem: ({ id, ...rest }) => (
|
||||
<PaginatedDataListItem key={id} item={{ id, ...rest }} />
|
||||
),
|
||||
renderToolbar: props => <DataListToolbar {...props} />,
|
||||
onRowClick: () => null,
|
||||
};
|
||||
|
||||
export { PaginatedDataList as _PaginatedDataList };
|
||||
export default withRouter(PaginatedDataList);
|
||||
@ -1,93 +0,0 @@
|
||||
import React from 'react';
|
||||
import { createMemoryHistory } from 'history';
|
||||
import { mountWithContexts } from '../../../testUtils/enzymeHelpers';
|
||||
import PaginatedDataList from './PaginatedDataList';
|
||||
|
||||
const mockData = [
|
||||
{ id: 1, name: 'one', url: '/org/team/1' },
|
||||
{ id: 2, name: 'two', url: '/org/team/2' },
|
||||
{ id: 3, name: 'three', url: '/org/team/3' },
|
||||
{ id: 4, name: 'four', url: '/org/team/4' },
|
||||
{ id: 5, name: 'five', url: '/org/team/5' },
|
||||
];
|
||||
|
||||
const qsConfig = {
|
||||
namespace: 'item',
|
||||
defaultParams: { page: 1, page_size: 5, order_by: 'name' },
|
||||
integerFields: ['page', 'page_size'],
|
||||
};
|
||||
|
||||
describe('<PaginatedDataList />', () => {
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
test('initially renders successfully', () => {
|
||||
mountWithContexts(
|
||||
<PaginatedDataList
|
||||
items={mockData}
|
||||
itemCount={7}
|
||||
queryParams={{
|
||||
page: 1,
|
||||
page_size: 5,
|
||||
order_by: 'name',
|
||||
}}
|
||||
qsConfig={qsConfig}
|
||||
/>
|
||||
);
|
||||
});
|
||||
|
||||
test('should navigate to page when Pagination calls onSetPage prop', () => {
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/organizations/1/teams'],
|
||||
});
|
||||
const wrapper = mountWithContexts(
|
||||
<PaginatedDataList
|
||||
items={mockData}
|
||||
itemCount={7}
|
||||
queryParams={{
|
||||
page: 1,
|
||||
page_size: 5,
|
||||
order_by: 'name',
|
||||
}}
|
||||
qsConfig={qsConfig}
|
||||
/>,
|
||||
{ context: { router: { history } } }
|
||||
);
|
||||
|
||||
const pagination = wrapper.find('Pagination').at(1);
|
||||
pagination.prop('onSetPage')(null, 2);
|
||||
expect(history.location.search).toEqual('?item.page=2');
|
||||
wrapper.update();
|
||||
pagination.prop('onSetPage')(null, 1);
|
||||
// since page = 1 is the default, that should be strip out of the search
|
||||
expect(history.location.search).toEqual('');
|
||||
});
|
||||
|
||||
test('should navigate to page when Pagination calls onPerPageSelect prop', () => {
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/organizations/1/teams'],
|
||||
});
|
||||
const wrapper = mountWithContexts(
|
||||
<PaginatedDataList
|
||||
items={mockData}
|
||||
itemCount={7}
|
||||
queryParams={{
|
||||
page: 1,
|
||||
page_size: 5,
|
||||
order_by: 'name',
|
||||
}}
|
||||
qsConfig={qsConfig}
|
||||
/>,
|
||||
{ context: { router: { history } } }
|
||||
);
|
||||
|
||||
const pagination = wrapper.find('Pagination').at(1);
|
||||
pagination.prop('onPerPageSelect')(null, 25, 2);
|
||||
expect(history.location.search).toEqual('?item.page=2&item.page_size=25');
|
||||
wrapper.update();
|
||||
// since page_size = 5 is the default, that should be strip out of the search
|
||||
pagination.prop('onPerPageSelect')(null, 5, 2);
|
||||
expect(history.location.search).toEqual('?item.page=2');
|
||||
});
|
||||
});
|
||||
@ -1,42 +0,0 @@
|
||||
import React from 'react';
|
||||
import { Link } from 'react-router-dom';
|
||||
import {
|
||||
DataListItem,
|
||||
DataListItemRow,
|
||||
DataListItemCells,
|
||||
TextContent,
|
||||
} from '@patternfly/react-core';
|
||||
import styled from 'styled-components';
|
||||
import DataListCell from '../DataListCell';
|
||||
|
||||
const DetailWrapper = styled(TextContent)`
|
||||
display: grid;
|
||||
grid-template-columns:
|
||||
minmax(70px, max-content)
|
||||
repeat(auto-fit, minmax(60px, max-content));
|
||||
grid-gap: 10px;
|
||||
`;
|
||||
|
||||
export default function PaginatedDataListItem({ item }) {
|
||||
return (
|
||||
<DataListItem
|
||||
aria-labelledby={`items-list-item-${item.id}`}
|
||||
key={item.id}
|
||||
id={`${item.id}`}
|
||||
>
|
||||
<DataListItemRow>
|
||||
<DataListItemCells
|
||||
dataListCells={[
|
||||
<DataListCell key="name">
|
||||
<DetailWrapper>
|
||||
<Link to={{ pathname: item.url }}>
|
||||
<b id={`items-list-item-${item.id}`}>{item.name}</b>
|
||||
</Link>
|
||||
</DetailWrapper>
|
||||
</DataListCell>,
|
||||
]}
|
||||
/>
|
||||
</DataListItemRow>
|
||||
</DataListItem>
|
||||
);
|
||||
}
|
||||
@ -1,4 +0,0 @@
|
||||
export { default } from './PaginatedDataList';
|
||||
export { default as PaginatedDataListItem } from './PaginatedDataListItem';
|
||||
export { default as ToolbarDeleteButton } from './ToolbarDeleteButton';
|
||||
export { default as ToolbarAddButton } from './ToolbarAddButton';
|
||||
@ -2,3 +2,5 @@ export { default } from './PaginatedTable';
|
||||
export { default as ActionsTd } from './ActionsTd';
|
||||
export { default as HeaderRow, HeaderCell } from './HeaderRow';
|
||||
export { default as ActionItem } from './ActionItem';
|
||||
export { default as ToolbarDeleteButton } from './ToolbarDeleteButton';
|
||||
export { default as ToolbarAddButton } from './ToolbarAddButton';
|
||||
|
||||
@ -5,8 +5,11 @@ import { RolesAPI, TeamsAPI, UsersAPI } from '../../api';
|
||||
import AddResourceRole from '../AddRole/AddResourceRole';
|
||||
import AlertModal from '../AlertModal';
|
||||
import DataListToolbar from '../DataListToolbar';
|
||||
import PaginatedTable, { HeaderRow, HeaderCell } from '../PaginatedTable';
|
||||
import { ToolbarAddButton } from '../PaginatedDataList';
|
||||
import PaginatedTable, {
|
||||
HeaderRow,
|
||||
HeaderCell,
|
||||
ToolbarAddButton,
|
||||
} from '../PaginatedTable';
|
||||
import { getQSConfig, parseQueryString } from '../../util/qs';
|
||||
import useRequest, { useDeleteItems } from '../../util/useRequest';
|
||||
import DeleteRoleConfirmationModal from './DeleteRoleConfirmationModal';
|
||||
|
||||
@ -6,9 +6,13 @@ import { t } from '@lingui/macro';
|
||||
import { SchedulesAPI } from '../../../api';
|
||||
import AlertModal from '../../AlertModal';
|
||||
import ErrorDetail from '../../ErrorDetail';
|
||||
import PaginatedTable, { HeaderRow, HeaderCell } from '../../PaginatedTable';
|
||||
import PaginatedTable, {
|
||||
HeaderRow,
|
||||
HeaderCell,
|
||||
ToolbarAddButton,
|
||||
ToolbarDeleteButton,
|
||||
} from '../../PaginatedTable';
|
||||
import DataListToolbar from '../../DataListToolbar';
|
||||
import { ToolbarAddButton, ToolbarDeleteButton } from '../../PaginatedDataList';
|
||||
import useRequest, { useDeleteItems } from '../../../util/useRequest';
|
||||
import useSelected from '../../../util/useSelected';
|
||||
import { getQSConfig, parseQueryString } from '../../../util/qs';
|
||||
|
||||
@ -10,8 +10,11 @@ import {
|
||||
import AlertModal from '../AlertModal';
|
||||
import DatalistToolbar from '../DataListToolbar';
|
||||
import ErrorDetail from '../ErrorDetail';
|
||||
import { ToolbarDeleteButton } from '../PaginatedDataList';
|
||||
import PaginatedTable, { HeaderRow, HeaderCell } from '../PaginatedTable';
|
||||
import PaginatedTable, {
|
||||
HeaderRow,
|
||||
HeaderCell,
|
||||
ToolbarDeleteButton,
|
||||
} from '../PaginatedTable';
|
||||
import useRequest, { useDeleteItems } from '../../util/useRequest';
|
||||
import useSelected from '../../util/useSelected';
|
||||
import { getQSConfig, parseQueryString } from '../../util/qs';
|
||||
|
||||
@ -1,10 +1,9 @@
|
||||
import 'styled-components/macro';
|
||||
import React, { useState, useCallback } from 'react';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { Button, Tooltip, Chip } from '@patternfly/react-core';
|
||||
import { Button, Popover, Tooltip, Chip } from '@patternfly/react-core';
|
||||
import { Tr, Td, ExpandableRowContent } from '@patternfly/react-table';
|
||||
import { t } from '@lingui/macro';
|
||||
|
||||
import { t, Trans } from '@lingui/macro';
|
||||
import {
|
||||
ExclamationTriangleIcon,
|
||||
PencilAltIcon,
|
||||
@ -12,25 +11,28 @@ import {
|
||||
RocketIcon,
|
||||
} from '@patternfly/react-icons';
|
||||
import styled from 'styled-components';
|
||||
|
||||
import { ActionsTd, ActionItem } from '../PaginatedTable';
|
||||
import { DetailList, Detail, DeletedDetail } from '../DetailList';
|
||||
import ChipGroup from '../ChipGroup';
|
||||
import CredentialChip from '../CredentialChip';
|
||||
import ExecutionEnvironmentDetail from '../ExecutionEnvironmentDetail';
|
||||
import { timeOfDay, formatDateString } from '../../util/dates';
|
||||
|
||||
import { JobTemplatesAPI, WorkflowJobTemplatesAPI } from '../../api';
|
||||
import { LaunchButton } from '../LaunchButton';
|
||||
import Sparkline from '../Sparkline';
|
||||
import { toTitleCase } from '../../util/strings';
|
||||
import CopyButton from '../CopyButton';
|
||||
import getDocsBaseUrl from '../../util/getDocsBaseUrl';
|
||||
import { useConfig } from '../../contexts/Config';
|
||||
|
||||
const ExclamationTriangleIconWarning = styled(ExclamationTriangleIcon)`
|
||||
color: var(--pf-global--warning-color--100);
|
||||
margin-left: 18px;
|
||||
cursor: pointer;
|
||||
`;
|
||||
|
||||
ExclamationTriangleIconWarning.displayName = 'ExclamationTriangleIconWarning';
|
||||
|
||||
function TemplateListItem({
|
||||
template,
|
||||
isSelected,
|
||||
@ -39,10 +41,15 @@ function TemplateListItem({
|
||||
fetchTemplates,
|
||||
rowIndex,
|
||||
}) {
|
||||
const config = useConfig();
|
||||
const [isExpanded, setIsExpanded] = useState(false);
|
||||
const [isDisabled, setIsDisabled] = useState(false);
|
||||
const labelId = `check-action-${template.id}`;
|
||||
|
||||
const docsLink = `${getDocsBaseUrl(
|
||||
config
|
||||
)}/html/upgrade-migration-guide/upgrade_to_ees.html`;
|
||||
|
||||
const copyTemplate = useCallback(async () => {
|
||||
if (template.type === 'job_template') {
|
||||
await JobTemplatesAPI.copy(template.id, {
|
||||
@ -139,13 +146,29 @@ function TemplateListItem({
|
||||
)}
|
||||
{missingExecutionEnvironment && (
|
||||
<span>
|
||||
<Tooltip
|
||||
<Popover
|
||||
className="missing-execution-environment"
|
||||
content={t`Custom virtual environment ${template.custom_virtualenv} must be replaced by an execution environment.`}
|
||||
headerContent={<div>{t`Execution Environment Missing`}</div>}
|
||||
bodyContent={
|
||||
<div>
|
||||
<Trans>
|
||||
Custom virtual environment {template.custom_virtualenv}{' '}
|
||||
must be replaced by an execution environment. For more
|
||||
information about migrating to execution environments see{' '}
|
||||
<a
|
||||
href={docsLink}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
the documentation.
|
||||
</a>
|
||||
</Trans>
|
||||
</div>
|
||||
}
|
||||
position="right"
|
||||
>
|
||||
<ExclamationTriangleIconWarning />
|
||||
</Tooltip>
|
||||
</Popover>
|
||||
</span>
|
||||
)}
|
||||
</Td>
|
||||
@ -274,10 +297,12 @@ function TemplateListItem({
|
||||
dataCy={`template-${template.id}-project`}
|
||||
/>
|
||||
)}
|
||||
<ExecutionEnvironmentDetail
|
||||
virtualEnvironment={template.custom_virtualenv}
|
||||
executionEnvironment={summaryFields?.execution_environment}
|
||||
/>
|
||||
{template.type === 'job_template' && (
|
||||
<ExecutionEnvironmentDetail
|
||||
virtualEnvironment={template.custom_virtualenv}
|
||||
executionEnvironment={summaryFields?.execution_environment}
|
||||
/>
|
||||
)}
|
||||
<Detail
|
||||
label={t`Last Modified`}
|
||||
value={formatDateString(template.modified)}
|
||||
|
||||
@ -371,11 +371,8 @@ describe('<TemplateListItem />', () => {
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
expect(
|
||||
wrapper.find('.missing-execution-environment').prop('content')
|
||||
).toEqual(
|
||||
'Custom virtual environment /var/lib/awx/env must be replaced by an execution environment.'
|
||||
);
|
||||
|
||||
expect(wrapper.find('ExclamationTriangleIconWarning').length).toBe(1);
|
||||
});
|
||||
|
||||
test('should render expected details in expanded section', async () => {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -2,9 +2,11 @@ import React, { useCallback, useEffect } from 'react';
|
||||
import { useParams, useLocation } from 'react-router-dom';
|
||||
import { t } from '@lingui/macro';
|
||||
|
||||
import PaginatedDataList, {
|
||||
import PaginatedTable, {
|
||||
HeaderCell,
|
||||
HeaderRow,
|
||||
ToolbarDeleteButton,
|
||||
} from '../../../components/PaginatedDataList';
|
||||
} from '../../../components/PaginatedTable';
|
||||
import { getQSConfig, parseQueryString } from '../../../util/qs';
|
||||
import { TokensAPI, ApplicationsAPI } from '../../../api';
|
||||
import ErrorDetail from '../../../components/ErrorDetail';
|
||||
@ -67,9 +69,13 @@ function ApplicationTokenList() {
|
||||
fetchTokens();
|
||||
}, [fetchTokens]);
|
||||
|
||||
const { selected, isAllSelected, handleSelect, setSelected } = useSelected(
|
||||
tokens
|
||||
);
|
||||
const {
|
||||
selected,
|
||||
isAllSelected,
|
||||
handleSelect,
|
||||
selectAll,
|
||||
clearSelected,
|
||||
} = useSelected(tokens);
|
||||
const {
|
||||
isLoading: deleteLoading,
|
||||
deletionError,
|
||||
@ -90,19 +96,18 @@ function ApplicationTokenList() {
|
||||
|
||||
const handleDelete = async () => {
|
||||
await handleDeleteApplications();
|
||||
setSelected([]);
|
||||
clearSelected();
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<PaginatedDataList
|
||||
<PaginatedTable
|
||||
contentError={error}
|
||||
hasContentLoading={isLoading || deleteLoading}
|
||||
items={tokens}
|
||||
itemCount={itemCount}
|
||||
pluralizedItemName={t`Tokens`}
|
||||
qsConfig={QS_CONFIG}
|
||||
onRowClick={handleSelect}
|
||||
toolbarSearchColumns={[
|
||||
{
|
||||
name: t`Name`,
|
||||
@ -110,28 +115,7 @@ function ApplicationTokenList() {
|
||||
isDefault: true,
|
||||
},
|
||||
]}
|
||||
toolbarSortColumns={[
|
||||
{
|
||||
name: t`Name`,
|
||||
key: 'user__username',
|
||||
},
|
||||
{
|
||||
name: t`Scope`,
|
||||
key: 'scope',
|
||||
},
|
||||
{
|
||||
name: t`Expiration`,
|
||||
key: 'expires',
|
||||
},
|
||||
{
|
||||
name: t`Created`,
|
||||
key: 'created',
|
||||
},
|
||||
{
|
||||
name: t`Modified`,
|
||||
key: 'modified',
|
||||
},
|
||||
]}
|
||||
clearSelected={clearSelected}
|
||||
toolbarSearchableKeys={searchableKeys}
|
||||
toolbarRelatedSearchableKeys={relatedSearchableKeys}
|
||||
renderToolbar={props => (
|
||||
@ -139,9 +123,7 @@ function ApplicationTokenList() {
|
||||
{...props}
|
||||
showSelectAll
|
||||
isAllSelected={isAllSelected}
|
||||
onSelectAll={isSelected =>
|
||||
setSelected(isSelected ? [...tokens] : [])
|
||||
}
|
||||
onSelectAll={selectAll}
|
||||
qsConfig={QS_CONFIG}
|
||||
additionalControls={[
|
||||
<ToolbarDeleteButton
|
||||
@ -153,7 +135,14 @@ function ApplicationTokenList() {
|
||||
]}
|
||||
/>
|
||||
)}
|
||||
renderItem={token => (
|
||||
headerRow={
|
||||
<HeaderRow qsConfig={QS_CONFIG}>
|
||||
<HeaderCell sortKey="user__username">{t`Name`}</HeaderCell>
|
||||
<HeaderCell sortKey="scope">{t`Scope`}</HeaderCell>
|
||||
<HeaderCell sortKey="expires">{t`Expires`}</HeaderCell>
|
||||
</HeaderRow>
|
||||
}
|
||||
renderRow={(token, index) => (
|
||||
<ApplicationTokenListItem
|
||||
key={token.id}
|
||||
value={token.name}
|
||||
@ -161,6 +150,7 @@ function ApplicationTokenList() {
|
||||
detailUrl={`/users/${token.summary_fields.user.id}/details`}
|
||||
onSelect={() => handleSelect(token)}
|
||||
isSelected={selected.some(row => row.id === token.id)}
|
||||
rowIndex={index}
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
|
||||
@ -1,10 +1,7 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
|
||||
import {
|
||||
mountWithContexts,
|
||||
waitForElement,
|
||||
} from '../../../../testUtils/enzymeHelpers';
|
||||
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
|
||||
import { ApplicationsAPI, TokensAPI } from '../../../api';
|
||||
import ApplicationTokenList from './ApplicationTokenList';
|
||||
|
||||
@ -100,14 +97,16 @@ describe('<ApplicationTokenList/>', () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<ApplicationTokenList />);
|
||||
});
|
||||
await waitForElement(wrapper, 'ApplicationTokenList', el => el.length > 0);
|
||||
wrapper.update();
|
||||
expect(wrapper.find('ApplicationTokenList')).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should have data fetched and render 2 rows', async () => {
|
||||
ApplicationsAPI.readTokens.mockResolvedValue(tokens);
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<ApplicationTokenList />);
|
||||
});
|
||||
await waitForElement(wrapper, 'ApplicationTokenList', el => el.length > 0);
|
||||
wrapper.update();
|
||||
expect(wrapper.find('ApplicationTokenListItem').length).toBe(2);
|
||||
expect(ApplicationsAPI.readTokens).toBeCalled();
|
||||
});
|
||||
@ -117,15 +116,22 @@ describe('<ApplicationTokenList/>', () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<ApplicationTokenList />);
|
||||
});
|
||||
waitForElement(wrapper, 'ApplicationTokenList', el => el.length > 0);
|
||||
|
||||
wrapper
|
||||
.find('input#select-token-2')
|
||||
.simulate('change', tokens.data.results[0]);
|
||||
|
||||
wrapper.update();
|
||||
|
||||
expect(wrapper.find('input#select-token-2').prop('checked')).toBe(true);
|
||||
wrapper
|
||||
.find('.pf-c-table__check')
|
||||
.at(0)
|
||||
.find('input')
|
||||
.simulate('change', tokens.data.results[0]);
|
||||
wrapper.update();
|
||||
|
||||
expect(
|
||||
wrapper
|
||||
.find('.pf-c-table__check')
|
||||
.at(0)
|
||||
.find('input')
|
||||
.prop('checked')
|
||||
).toBe(true);
|
||||
await act(async () =>
|
||||
wrapper.find('Button[aria-label="Delete"]').prop('onClick')()
|
||||
);
|
||||
@ -153,8 +159,8 @@ describe('<ApplicationTokenList/>', () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<ApplicationTokenList />);
|
||||
});
|
||||
wrapper.update();
|
||||
|
||||
await waitForElement(wrapper, 'ApplicationTokenList', el => el.length > 0);
|
||||
expect(wrapper.find('ContentError').length).toBe(1);
|
||||
});
|
||||
|
||||
@ -174,13 +180,23 @@ describe('<ApplicationTokenList/>', () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<ApplicationTokenList />);
|
||||
});
|
||||
waitForElement(wrapper, 'ApplicationTokenList', el => el.length > 0);
|
||||
wrapper.update();
|
||||
|
||||
wrapper.find('input#select-token-2').simulate('change', 'a');
|
||||
wrapper
|
||||
.find('.pf-c-table__check')
|
||||
.at(0)
|
||||
.find('input')
|
||||
.simulate('change', 'a');
|
||||
|
||||
wrapper.update();
|
||||
|
||||
expect(wrapper.find('input#select-token-2').prop('checked')).toBe(true);
|
||||
expect(
|
||||
wrapper
|
||||
.find('.pf-c-table__check')
|
||||
.at(0)
|
||||
.find('input')
|
||||
.prop('checked')
|
||||
).toBe(true);
|
||||
await act(async () =>
|
||||
wrapper.find('Button[aria-label="Delete"]').prop('onClick')()
|
||||
);
|
||||
@ -191,7 +207,9 @@ describe('<ApplicationTokenList/>', () => {
|
||||
wrapper.find('Button[aria-label="confirm delete"]').prop('onClick')()
|
||||
);
|
||||
wrapper.update();
|
||||
expect(wrapper.find('ErrorDetail').length).toBe(1);
|
||||
|
||||
expect(!!wrapper.find('AlertModal').prop('isOpen')).toEqual(true);
|
||||
expect(wrapper.find('ErrorDetail')).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should not render add button', async () => {
|
||||
@ -200,7 +218,7 @@ describe('<ApplicationTokenList/>', () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<ApplicationTokenList />);
|
||||
});
|
||||
waitForElement(wrapper, 'ApplicationTokenList', el => el.length > 0);
|
||||
wrapper.update();
|
||||
expect(wrapper.find('ToolbarAddButton').length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,55 +1,36 @@
|
||||
import React from 'react';
|
||||
import { string, bool, func } from 'prop-types';
|
||||
|
||||
import { string, bool, func, number } from 'prop-types';
|
||||
import { t } from '@lingui/macro';
|
||||
import { Link } from 'react-router-dom';
|
||||
import {
|
||||
DataListCheck,
|
||||
DataListItem,
|
||||
DataListItemCells,
|
||||
DataListItemRow,
|
||||
} from '@patternfly/react-core';
|
||||
import styled from 'styled-components';
|
||||
import { Tr, Td } from '@patternfly/react-table';
|
||||
|
||||
import { Token } from '../../../types';
|
||||
import { formatDateString } from '../../../util/dates';
|
||||
import { toTitleCase } from '../../../util/strings';
|
||||
import DataListCell from '../../../components/DataListCell';
|
||||
|
||||
const Label = styled.b`
|
||||
margin-right: 20px;
|
||||
`;
|
||||
|
||||
function ApplicationTokenListItem({ token, isSelected, onSelect, detailUrl }) {
|
||||
const labelId = `check-action-${token.id}`;
|
||||
function ApplicationTokenListItem({
|
||||
token,
|
||||
isSelected,
|
||||
onSelect,
|
||||
detailUrl,
|
||||
rowIndex,
|
||||
}) {
|
||||
return (
|
||||
<DataListItem key={token.id} aria-labelledby={labelId} id={`${token.id}`}>
|
||||
<DataListItemRow>
|
||||
<DataListCheck
|
||||
id={`select-token-${token.id}`}
|
||||
checked={isSelected}
|
||||
onChange={onSelect}
|
||||
aria-labelledby={labelId}
|
||||
/>
|
||||
<DataListItemCells
|
||||
dataListCells={[
|
||||
<DataListCell key="divider" aria-label={t`token name`}>
|
||||
<Link to={`${detailUrl}`}>
|
||||
<b>{token.summary_fields.user.username}</b>
|
||||
</Link>
|
||||
</DataListCell>,
|
||||
<DataListCell key="scope" aria-label={t`scope`}>
|
||||
<Label>{t`Scope`}</Label>
|
||||
<span>{toTitleCase(token.scope)}</span>
|
||||
</DataListCell>,
|
||||
<DataListCell key="expiration" aria-label={t`expiration`}>
|
||||
<Label>{t`Expiration`}</Label>
|
||||
<span>{formatDateString(token.expires)}</span>
|
||||
</DataListCell>,
|
||||
]}
|
||||
/>
|
||||
</DataListItemRow>
|
||||
</DataListItem>
|
||||
<Tr id={`token-row-${token.id}`}>
|
||||
<Td
|
||||
select={{
|
||||
rowIndex,
|
||||
isSelected,
|
||||
onSelect,
|
||||
}}
|
||||
dataLabel={t`Selected`}
|
||||
/>
|
||||
<Td dataLabel={t`Name`}>
|
||||
<Link to={detailUrl}>{token.summary_fields.user.username}</Link>
|
||||
</Td>
|
||||
<Td dataLabel={t`Scope`}>{toTitleCase(token.scope)}</Td>
|
||||
<Td dataLabel={t`Expires`}>{formatDateString(token.expires)}</Td>
|
||||
</Tr>
|
||||
);
|
||||
}
|
||||
|
||||
@ -58,6 +39,7 @@ ApplicationTokenListItem.propTypes = {
|
||||
detailUrl: string.isRequired,
|
||||
isSelected: bool.isRequired,
|
||||
onSelect: func.isRequired,
|
||||
rowIndex: number.isRequired,
|
||||
};
|
||||
|
||||
export default ApplicationTokenListItem;
|
||||
|
||||
@ -42,49 +42,79 @@ describe('<ApplicationTokenListItem/>', () => {
|
||||
test('should mount successfully', async () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ApplicationTokenListItem
|
||||
token={token}
|
||||
detailUrl="/users/2/details"
|
||||
isSelected={false}
|
||||
onSelect={() => {}}
|
||||
/>
|
||||
<table>
|
||||
<tbody>
|
||||
<ApplicationTokenListItem
|
||||
token={token}
|
||||
detailUrl="/users/2/details"
|
||||
isSelected={false}
|
||||
onSelect={() => {}}
|
||||
rowIndex={1}
|
||||
/>
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
});
|
||||
expect(wrapper.find('ApplicationTokenListItem').length).toBe(1);
|
||||
});
|
||||
|
||||
test('should render the proper data', async () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ApplicationTokenListItem
|
||||
token={token}
|
||||
detailUrl="/users/2/details"
|
||||
isSelected={false}
|
||||
onSelect={() => {}}
|
||||
/>
|
||||
<table>
|
||||
<tbody>
|
||||
<ApplicationTokenListItem
|
||||
token={token}
|
||||
detailUrl="/users/2/details"
|
||||
isSelected={false}
|
||||
onSelect={() => {}}
|
||||
rowIndex={1}
|
||||
/>
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
});
|
||||
expect(wrapper.find('DataListCell[aria-label="token name"]').text()).toBe(
|
||||
'admin'
|
||||
);
|
||||
expect(wrapper.find('DataListCell[aria-label="scope"]').text()).toBe(
|
||||
'ScopeRead'
|
||||
);
|
||||
expect(wrapper.find('DataListCell[aria-label="expiration"]').text()).toBe(
|
||||
'Expiration10/25/3019, 7:56:38 PM'
|
||||
);
|
||||
expect(wrapper.find('input#select-token-2').prop('checked')).toBe(false);
|
||||
expect(
|
||||
wrapper
|
||||
.find('Td')
|
||||
.at(1)
|
||||
.text()
|
||||
).toBe('admin');
|
||||
expect(
|
||||
wrapper
|
||||
.find('Td')
|
||||
.at(2)
|
||||
.text()
|
||||
).toBe('Read');
|
||||
expect(
|
||||
wrapper
|
||||
.find('Td')
|
||||
.at(3)
|
||||
.text()
|
||||
).toBe('10/25/3019, 7:56:38 PM');
|
||||
});
|
||||
|
||||
test('should be checked', async () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ApplicationTokenListItem
|
||||
token={token}
|
||||
detailUrl="/users/2/details"
|
||||
isSelected
|
||||
onSelect={() => {}}
|
||||
/>
|
||||
<table>
|
||||
<tbody>
|
||||
<ApplicationTokenListItem
|
||||
token={token}
|
||||
detailUrl="/users/2/details"
|
||||
isSelected
|
||||
onSelect={() => {}}
|
||||
rowIndex={1}
|
||||
/>
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
});
|
||||
expect(wrapper.find('input#select-token-2').prop('checked')).toBe(true);
|
||||
expect(
|
||||
wrapper
|
||||
.find('Td')
|
||||
.at(0)
|
||||
.prop('select').isSelected
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@ -14,11 +14,9 @@ import { ApplicationsAPI } from '../../../api';
|
||||
import PaginatedTable, {
|
||||
HeaderRow,
|
||||
HeaderCell,
|
||||
} from '../../../components/PaginatedTable';
|
||||
import {
|
||||
ToolbarDeleteButton,
|
||||
ToolbarAddButton,
|
||||
} from '../../../components/PaginatedDataList';
|
||||
} from '../../../components/PaginatedTable';
|
||||
import useSelected from '../../../util/useSelected';
|
||||
|
||||
import ApplicationListItem from './ApplicationListItem';
|
||||
|
||||
@ -38,7 +38,7 @@ const mockCredentialResults = {
|
||||
description: '',
|
||||
kind: 'ssh',
|
||||
namespace: 'ssh',
|
||||
managed_by_tower: true,
|
||||
managed: true,
|
||||
inputs: {
|
||||
fields: [
|
||||
{
|
||||
|
||||
@ -63,7 +63,7 @@ function CredentialDetail({ credential }) {
|
||||
useCallback(async () => {
|
||||
const [
|
||||
{
|
||||
data: { inputs: credentialTypeInputs, managed_by_tower },
|
||||
data: { inputs: credentialTypeInputs, managed },
|
||||
},
|
||||
{
|
||||
data: { results: loadedInputSources },
|
||||
@ -74,7 +74,7 @@ function CredentialDetail({ credential }) {
|
||||
]);
|
||||
return {
|
||||
fields: credentialTypeInputs.fields || [],
|
||||
managedByTower: managed_by_tower,
|
||||
managedByTower: managed,
|
||||
inputSources: loadedInputSources.reduce(
|
||||
(inputSourcesMap, inputSource) => {
|
||||
inputSourcesMap[inputSource.input_field_name] = inputSource;
|
||||
|
||||
@ -67,7 +67,7 @@ describe('<CredentialDetail />', () => {
|
||||
test('should have proper number of delete detail requests', () => {
|
||||
expect(
|
||||
wrapper.find('DeleteButton').prop('deleteDetailsRequests')
|
||||
).toHaveLength(6);
|
||||
).toHaveLength(5);
|
||||
});
|
||||
|
||||
test('should render details', () => {
|
||||
|
||||
@ -158,7 +158,7 @@ const mockCredentialResults = {
|
||||
description: '',
|
||||
kind: 'ssh',
|
||||
namespace: 'ssh',
|
||||
managed_by_tower: true,
|
||||
managed: true,
|
||||
inputs: {
|
||||
fields: [
|
||||
{
|
||||
@ -238,7 +238,7 @@ const mockCredentialResults = {
|
||||
description: '',
|
||||
kind: 'cloud',
|
||||
namespace: 'gce',
|
||||
managed_by_tower: true,
|
||||
managed: true,
|
||||
inputs: {
|
||||
fields: [
|
||||
{
|
||||
|
||||
@ -7,13 +7,11 @@ import useSelected from '../../../util/useSelected';
|
||||
import AlertModal from '../../../components/AlertModal';
|
||||
import ErrorDetail from '../../../components/ErrorDetail';
|
||||
import DataListToolbar from '../../../components/DataListToolbar';
|
||||
import {
|
||||
ToolbarAddButton,
|
||||
ToolbarDeleteButton,
|
||||
} from '../../../components/PaginatedDataList';
|
||||
import PaginatedTable, {
|
||||
HeaderRow,
|
||||
HeaderCell,
|
||||
ToolbarAddButton,
|
||||
ToolbarDeleteButton,
|
||||
} from '../../../components/PaginatedTable';
|
||||
import useRequest, { useDeleteItems } from '../../../util/useRequest';
|
||||
import { getQSConfig, parseQueryString } from '../../../util/qs';
|
||||
|
||||
@ -43,7 +43,7 @@ describe('<CredentialList />', () => {
|
||||
test('should have proper number of delete detail requests', () => {
|
||||
expect(
|
||||
wrapper.find('ToolbarDeleteButton').prop('deleteDetailsRequests')
|
||||
).toHaveLength(6);
|
||||
).toHaveLength(5);
|
||||
});
|
||||
|
||||
test('should fetch credentials from api and render the in the list', () => {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user