diff --git a/.dockerignore b/.dockerignore index 46c83b0467..07c13d382d 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1 +1,3 @@ awx/ui/node_modules +awx/ui_next/node_modules +Dockerfile diff --git a/Makefile b/Makefile index b3d14d4ba9..858a6f67e4 100644 --- a/Makefile +++ b/Makefile @@ -383,7 +383,8 @@ test_collection: rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt if [ "$(VENV_BASE)" ]; then \ . $(VENV_BASE)/awx/bin/activate; \ - fi; \ + fi && \ + pip install ansible && \ py.test $(COLLECTION_TEST_DIRS) -v # The python path needs to be modified so that the tests can find Ansible within the container # First we will use anything expility set as PYTHONPATH diff --git a/awx/api/metadata.py b/awx/api/metadata.py index 0b60f9a1ef..dedeeba8fb 100644 --- a/awx/api/metadata.py +++ b/awx/api/metadata.py @@ -24,7 +24,7 @@ from rest_framework.request import clone_request from awx.api.fields import ChoiceNullField from awx.main.fields import JSONField, ImplicitRoleField from awx.main.models import NotificationTemplate -from awx.main.scheduler.kubernetes import PodManager +from awx.main.tasks import AWXReceptorJob class Metadata(metadata.SimpleMetadata): @@ -209,7 +209,7 @@ class Metadata(metadata.SimpleMetadata): continue if field == "pod_spec_override": - meta['default'] = PodManager().pod_definition + meta['default'] = AWXReceptorJob().pod_definition # Add type choices if available from the serializer. if field == 'type' and hasattr(serializer, 'get_type_choices'): diff --git a/awx/api/serializers.py b/awx/api/serializers.py index d34c0d924a..6f8dcd3fad 100644 --- a/awx/api/serializers.py +++ b/awx/api/serializers.py @@ -50,7 +50,7 @@ from awx.main.constants import ( ) from awx.main.models import ( ActivityStream, AdHocCommand, AdHocCommandEvent, Credential, CredentialInputSource, - CredentialType, CustomInventoryScript, Group, Host, Instance, + CredentialType, CustomInventoryScript, ExecutionEnvironment, Group, Host, Instance, InstanceGroup, Inventory, InventorySource, InventoryUpdate, InventoryUpdateEvent, Job, JobEvent, JobHostSummary, JobLaunchConfig, JobNotificationMixin, JobTemplate, Label, Notification, NotificationTemplate, @@ -107,6 +107,8 @@ SUMMARIZABLE_FK_FIELDS = { 'insights_credential_id',), 'host': DEFAULT_SUMMARY_FIELDS, 'group': DEFAULT_SUMMARY_FIELDS, + 'default_environment': DEFAULT_SUMMARY_FIELDS + ('image',), + 'execution_environment': DEFAULT_SUMMARY_FIELDS + ('image',), 'project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'), 'source_project': DEFAULT_SUMMARY_FIELDS + ('status', 'scm_type'), 'project_update': DEFAULT_SUMMARY_FIELDS + ('status', 'failed',), @@ -129,7 +131,7 @@ SUMMARIZABLE_FK_FIELDS = { 'source_script': DEFAULT_SUMMARY_FIELDS, 'role': ('id', 'role_field'), 'notification_template': DEFAULT_SUMMARY_FIELDS, - 'instance_group': ('id', 'name', 'controller_id', 'is_containerized'), + 'instance_group': ('id', 'name', 'controller_id', 'is_container_group'), 'insights_credential': DEFAULT_SUMMARY_FIELDS, 'source_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'), 'target_credential': DEFAULT_SUMMARY_FIELDS + ('kind', 'cloud', 'credential_type_id'), @@ -647,7 +649,7 @@ class UnifiedJobTemplateSerializer(BaseSerializer): class Meta: model = UnifiedJobTemplate fields = ('*', 'last_job_run', 'last_job_failed', - 'next_job_run', 'status') + 'next_job_run', 'status', 'execution_environment') def get_related(self, obj): res = super(UnifiedJobTemplateSerializer, self).get_related(obj) @@ -657,6 +659,9 @@ class UnifiedJobTemplateSerializer(BaseSerializer): res['last_job'] = obj.last_job.get_absolute_url(request=self.context.get('request')) if obj.next_schedule: res['next_schedule'] = obj.next_schedule.get_absolute_url(request=self.context.get('request')) + if obj.execution_environment_id: + res['execution_environment'] = self.reverse('api:execution_environment_detail', + kwargs={'pk': obj.execution_environment_id}) return res def get_types(self): @@ -711,6 +716,7 @@ class UnifiedJobSerializer(BaseSerializer): class Meta: model = UnifiedJob fields = ('*', 'unified_job_template', 'launch_type', 'status', + 'execution_environment', 'failed', 'started', 'finished', 'canceled_on', 'elapsed', 'job_args', 'job_cwd', 'job_env', 'job_explanation', 'execution_node', 'controller_node', @@ -748,6 +754,9 @@ class UnifiedJobSerializer(BaseSerializer): res['stdout'] = self.reverse('api:ad_hoc_command_stdout', kwargs={'pk': obj.pk}) if obj.workflow_job_id: res['source_workflow_job'] = self.reverse('api:workflow_job_detail', kwargs={'pk': obj.workflow_job_id}) + if obj.execution_environment_id: + res['execution_environment'] = self.reverse('api:execution_environment_detail', + kwargs={'pk': obj.execution_environment_id}) return res def get_summary_fields(self, obj): @@ -1243,11 +1252,12 @@ class OrganizationSerializer(BaseSerializer): class Meta: model = Organization - fields = ('*', 'max_hosts', 'custom_virtualenv',) + fields = ('*', 'max_hosts', 'custom_virtualenv', 'default_environment',) def get_related(self, obj): res = super(OrganizationSerializer, self).get_related(obj) - res.update(dict( + res.update( + execution_environments = self.reverse('api:organization_execution_environments_list', kwargs={'pk': obj.pk}), projects = self.reverse('api:organization_projects_list', kwargs={'pk': obj.pk}), inventories = self.reverse('api:organization_inventories_list', kwargs={'pk': obj.pk}), job_templates = self.reverse('api:organization_job_templates_list', kwargs={'pk': obj.pk}), @@ -1267,7 +1277,10 @@ class OrganizationSerializer(BaseSerializer): access_list = self.reverse('api:organization_access_list', kwargs={'pk': obj.pk}), instance_groups = self.reverse('api:organization_instance_groups_list', kwargs={'pk': obj.pk}), galaxy_credentials = self.reverse('api:organization_galaxy_credentials_list', kwargs={'pk': obj.pk}), - )) + ) + if obj.default_environment: + res['default_environment'] = self.reverse('api:execution_environment_detail', + kwargs={'pk': obj.default_environment_id}) return res def get_summary_fields(self, obj): @@ -1347,6 +1360,29 @@ class ProjectOptionsSerializer(BaseSerializer): return super(ProjectOptionsSerializer, self).validate(attrs) +class ExecutionEnvironmentSerializer(BaseSerializer): + show_capabilities = ['edit', 'delete', 'copy'] + managed_by_tower = serializers.ReadOnlyField() + + class Meta: + model = ExecutionEnvironment + fields = ('*', 'organization', 'image', 'managed_by_tower', 'credential', 'pull') + + def get_related(self, obj): + res = super(ExecutionEnvironmentSerializer, self).get_related(obj) + res.update( + activity_stream=self.reverse('api:execution_environment_activity_stream_list', kwargs={'pk': obj.pk}), + unified_job_templates=self.reverse('api:execution_environment_job_template_list', kwargs={'pk': obj.pk}), + copy=self.reverse('api:execution_environment_copy', kwargs={'pk': obj.pk}), + ) + if obj.organization: + res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk}) + if obj.credential: + res['credential'] = self.reverse('api:credential_detail', + kwargs={'pk': obj.credential.pk}) + return res + + class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer): status = serializers.ChoiceField(choices=Project.PROJECT_STATUS_CHOICES, read_only=True) @@ -1360,8 +1396,8 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer): class Meta: model = Project - fields = ('*', 'organization', 'scm_update_on_launch', - 'scm_update_cache_timeout', 'allow_override', 'custom_virtualenv',) + \ + fields = ('*', '-execution_environment', 'organization', 'scm_update_on_launch', + 'scm_update_cache_timeout', 'allow_override', 'custom_virtualenv', 'default_environment') + \ ('last_update_failed', 'last_updated') # Backwards compatibility def get_related(self, obj): @@ -1386,6 +1422,9 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer): if obj.organization: res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk}) + if obj.default_environment: + res['default_environment'] = self.reverse('api:execution_environment_detail', + kwargs={'pk': obj.default_environment_id}) # Backwards compatibility. if obj.current_update: res['current_update'] = self.reverse('api:project_update_detail', @@ -4731,7 +4770,7 @@ class InstanceGroupSerializer(BaseSerializer): 'Isolated groups have a designated controller group.'), read_only=True ) - is_containerized = serializers.BooleanField( + is_container_group = serializers.BooleanField( help_text=_('Indicates whether instances in this group are containerized.' 'Containerized groups have a designated Openshift or Kubernetes cluster.'), read_only=True @@ -4761,7 +4800,7 @@ class InstanceGroupSerializer(BaseSerializer): fields = ("id", "type", "url", "related", "name", "created", "modified", "capacity", "committed_capacity", "consumed_capacity", "percent_capacity_remaining", "jobs_running", "jobs_total", - "instances", "controller", "is_controller", "is_isolated", "is_containerized", "credential", + "instances", "controller", "is_controller", "is_isolated", "is_container_group", "credential", "policy_instance_percentage", "policy_instance_minimum", "policy_instance_list", "pod_spec_override", "summary_fields") @@ -4786,17 +4825,17 @@ class InstanceGroupSerializer(BaseSerializer): raise serializers.ValidationError(_('Isolated instances may not be added or removed from instances groups via the API.')) if self.instance and self.instance.controller_id is not None: raise serializers.ValidationError(_('Isolated instance group membership may not be managed via the API.')) - if value and self.instance and self.instance.is_containerized: + if value and self.instance and self.instance.is_container_group: raise serializers.ValidationError(_('Containerized instances may not be managed via the API')) return value def validate_policy_instance_percentage(self, value): - if value and self.instance and self.instance.is_containerized: + if value and self.instance and self.instance.is_container_group: raise serializers.ValidationError(_('Containerized instances may not be managed via the API')) return value def validate_policy_instance_minimum(self, value): - if value and self.instance and self.instance.is_containerized: + if value and self.instance and self.instance.is_container_group: raise serializers.ValidationError(_('Containerized instances may not be managed via the API')) return value diff --git a/awx/api/urls/execution_environments.py b/awx/api/urls/execution_environments.py new file mode 100644 index 0000000000..99b9cb3ddc --- /dev/null +++ b/awx/api/urls/execution_environments.py @@ -0,0 +1,20 @@ +from django.conf.urls import url + +from awx.api.views import ( + ExecutionEnvironmentList, + ExecutionEnvironmentDetail, + ExecutionEnvironmentJobTemplateList, + ExecutionEnvironmentCopy, + ExecutionEnvironmentActivityStreamList, +) + + +urls = [ + url(r'^$', ExecutionEnvironmentList.as_view(), name='execution_environment_list'), + url(r'^(?P[0-9]+)/$', ExecutionEnvironmentDetail.as_view(), name='execution_environment_detail'), + url(r'^(?P[0-9]+)/unified_job_templates/$', ExecutionEnvironmentJobTemplateList.as_view(), name='execution_environment_job_template_list'), + url(r'^(?P[0-9]+)/copy/$', ExecutionEnvironmentCopy.as_view(), name='execution_environment_copy'), + url(r'^(?P[0-9]+)/activity_stream/$', ExecutionEnvironmentActivityStreamList.as_view(), name='execution_environment_activity_stream_list'), +] + +__all__ = ['urls'] diff --git a/awx/api/urls/organization.py b/awx/api/urls/organization.py index 12b2807905..9d8fecf4bc 100644 --- a/awx/api/urls/organization.py +++ b/awx/api/urls/organization.py @@ -9,6 +9,7 @@ from awx.api.views import ( OrganizationUsersList, OrganizationAdminsList, OrganizationInventoriesList, + OrganizationExecutionEnvironmentsList, OrganizationProjectsList, OrganizationJobTemplatesList, OrganizationWorkflowJobTemplatesList, @@ -34,6 +35,7 @@ urls = [ url(r'^(?P[0-9]+)/users/$', OrganizationUsersList.as_view(), name='organization_users_list'), url(r'^(?P[0-9]+)/admins/$', OrganizationAdminsList.as_view(), name='organization_admins_list'), url(r'^(?P[0-9]+)/inventories/$', OrganizationInventoriesList.as_view(), name='organization_inventories_list'), + url(r'^(?P[0-9]+)/execution_environments/$', OrganizationExecutionEnvironmentsList.as_view(), name='organization_execution_environments_list'), url(r'^(?P[0-9]+)/projects/$', OrganizationProjectsList.as_view(), name='organization_projects_list'), url(r'^(?P[0-9]+)/job_templates/$', OrganizationJobTemplatesList.as_view(), name='organization_job_templates_list'), url(r'^(?P[0-9]+)/workflow_job_templates/$', OrganizationWorkflowJobTemplatesList.as_view(), name='organization_workflow_job_templates_list'), diff --git a/awx/api/urls/urls.py b/awx/api/urls/urls.py index 636e68e4bd..2beeb47a47 100644 --- a/awx/api/urls/urls.py +++ b/awx/api/urls/urls.py @@ -42,6 +42,7 @@ from .user import urls as user_urls from .project import urls as project_urls from .project_update import urls as project_update_urls from .inventory import urls as inventory_urls +from .execution_environments import urls as execution_environment_urls from .team import urls as team_urls from .host import urls as host_urls from .group import urls as group_urls @@ -106,6 +107,7 @@ v2_urls = [ url(r'^schedules/', include(schedule_urls)), url(r'^organizations/', include(organization_urls)), url(r'^users/', include(user_urls)), + url(r'^execution_environments/', include(execution_environment_urls)), url(r'^projects/', include(project_urls)), url(r'^project_updates/', include(project_update_urls)), url(r'^teams/', include(team_urls)), diff --git a/awx/api/views/__init__.py b/awx/api/views/__init__.py index 43e845af0c..2d29519de8 100644 --- a/awx/api/views/__init__.py +++ b/awx/api/views/__init__.py @@ -112,6 +112,7 @@ from awx.api.views.organization import ( # noqa OrganizationInventoriesList, OrganizationUsersList, OrganizationAdminsList, + OrganizationExecutionEnvironmentsList, OrganizationProjectsList, OrganizationJobTemplatesList, OrganizationWorkflowJobTemplatesList, @@ -396,7 +397,7 @@ class InstanceGroupDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAP permission_classes = (InstanceGroupTowerPermission,) def update_raw_data(self, data): - if self.get_object().is_containerized: + if self.get_object().is_container_group: data.pop('policy_instance_percentage', None) data.pop('policy_instance_minimum', None) data.pop('policy_instance_list', None) @@ -685,6 +686,52 @@ class TeamAccessList(ResourceAccessList): parent_model = models.Team +class ExecutionEnvironmentList(ListCreateAPIView): + + always_allow_superuser = False + model = models.ExecutionEnvironment + serializer_class = serializers.ExecutionEnvironmentSerializer + swagger_topic = "Execution Environments" + + +class ExecutionEnvironmentDetail(RetrieveUpdateDestroyAPIView): + + always_allow_superuser = False + model = models.ExecutionEnvironment + serializer_class = serializers.ExecutionEnvironmentSerializer + swagger_topic = "Execution Environments" + + +class ExecutionEnvironmentJobTemplateList(SubListAPIView): + + model = models.UnifiedJobTemplate + serializer_class = serializers.UnifiedJobTemplateSerializer + parent_model = models.ExecutionEnvironment + relationship = 'unifiedjobtemplates' + + +class ExecutionEnvironmentCopy(CopyAPIView): + + model = models.ExecutionEnvironment + copy_return_serializer_class = serializers.ExecutionEnvironmentSerializer + + +class ExecutionEnvironmentActivityStreamList(SubListAPIView): + + model = models.ActivityStream + serializer_class = serializers.ActivityStreamSerializer + parent_model = models.ExecutionEnvironment + relationship = 'activitystream_set' + search_fields = ('changes',) + + def get_queryset(self): + parent = self.get_parent_object() + self.check_parent_access(parent) + + qs = self.request.user.get_queryset(self.model) + return qs.filter(execution_environment=parent) + + class ProjectList(ListCreateAPIView): model = models.Project diff --git a/awx/api/views/organization.py b/awx/api/views/organization.py index d03dfcc86f..b33259a8ad 100644 --- a/awx/api/views/organization.py +++ b/awx/api/views/organization.py @@ -15,6 +15,7 @@ from awx.main.models import ( Inventory, Host, Project, + ExecutionEnvironment, JobTemplate, WorkflowJobTemplate, Organization, @@ -45,6 +46,7 @@ from awx.api.serializers import ( RoleSerializer, NotificationTemplateSerializer, InstanceGroupSerializer, + ExecutionEnvironmentSerializer, ProjectSerializer, JobTemplateSerializer, WorkflowJobTemplateSerializer, CredentialSerializer ) @@ -141,6 +143,16 @@ class OrganizationProjectsList(SubListCreateAPIView): parent_key = 'organization' +class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView): + + model = ExecutionEnvironment + serializer_class = ExecutionEnvironmentSerializer + parent_model = Organization + relationship = 'executionenvironments' + parent_key = 'organization' + swagger_topic = "Execution Environments" + + class OrganizationJobTemplatesList(SubListCreateAPIView): model = JobTemplate diff --git a/awx/api/views/root.py b/awx/api/views/root.py index 0f5e7e6cdd..d6fc20d105 100644 --- a/awx/api/views/root.py +++ b/awx/api/views/root.py @@ -100,6 +100,7 @@ class ApiVersionRootView(APIView): data['dashboard'] = reverse('api:dashboard_view', request=request) data['organizations'] = reverse('api:organization_list', request=request) data['users'] = reverse('api:user_list', request=request) + data['execution_environments'] = reverse('api:execution_environment_list', request=request) data['projects'] = reverse('api:project_list', request=request) data['project_updates'] = reverse('api:project_update_list', request=request) data['teams'] = reverse('api:team_list', request=request) diff --git a/awx/conf/fields.py b/awx/conf/fields.py index 7c9a94969d..e28a44aa32 100644 --- a/awx/conf/fields.py +++ b/awx/conf/fields.py @@ -14,6 +14,7 @@ from rest_framework.fields import ( # noqa BooleanField, CharField, ChoiceField, DictField, DateTimeField, EmailField, IntegerField, ListField, NullBooleanField ) +from rest_framework.serializers import PrimaryKeyRelatedField # noqa logger = logging.getLogger('awx.conf.fields') diff --git a/awx/main/access.py b/awx/main/access.py index 89a6c0607d..d2a2aa9f3b 100644 --- a/awx/main/access.py +++ b/awx/main/access.py @@ -29,9 +29,9 @@ from awx.main.utils import ( ) from awx.main.models import ( ActivityStream, AdHocCommand, AdHocCommandEvent, Credential, CredentialType, - CredentialInputSource, CustomInventoryScript, Group, Host, Instance, InstanceGroup, - Inventory, InventorySource, InventoryUpdate, InventoryUpdateEvent, Job, JobEvent, - JobHostSummary, JobLaunchConfig, JobTemplate, Label, Notification, + CredentialInputSource, CustomInventoryScript, ExecutionEnvironment, Group, Host, Instance, + InstanceGroup, Inventory, InventorySource, InventoryUpdate, InventoryUpdateEvent, Job, + JobEvent, JobHostSummary, JobLaunchConfig, JobTemplate, Label, Notification, NotificationTemplate, Organization, Project, ProjectUpdate, ProjectUpdateEvent, Role, Schedule, SystemJob, SystemJobEvent, SystemJobTemplate, Team, UnifiedJob, UnifiedJobTemplate, WorkflowJob, @@ -1308,6 +1308,54 @@ class TeamAccess(BaseAccess): *args, **kwargs) +class ExecutionEnvironmentAccess(BaseAccess): + """ + I can see an execution environment when: + - I'm a superuser + - I'm a member of the same organization + - it is a global ExecutionEnvironment + I can create/change an execution environment when: + - I'm a superuser + - I'm an admin for the organization(s) + """ + + model = ExecutionEnvironment + select_related = ('organization',) + prefetch_related = ('organization__admin_role', 'organization__execution_environment_admin_role') + + def filtered_queryset(self): + return ExecutionEnvironment.objects.filter( + Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role')) | + Q(organization__isnull=True) + ).distinct() + + @check_superuser + def can_add(self, data): + if not data: # So the browseable API will work + return Organization.accessible_objects(self.user, 'execution_environment_admin_role').exists() + return self.check_related('organization', Organization, data, mandatory=True, + role_field='execution_environment_admin_role') + + def can_change(self, obj, data): + if obj.managed_by_tower: + raise PermissionDenied + if self.user.is_superuser: + return True + if obj and obj.organization_id is None: + raise PermissionDenied + if self.user not in obj.organization.execution_environment_admin_role: + raise PermissionDenied + if data and 'organization' in data: + new_org = get_object_from_data('organization', Organization, data, obj=obj) + if not new_org or self.user not in new_org.execution_environment_admin_role: + return False + return self.check_related('organization', Organization, data, obj=obj, mandatory=True, + role_field='execution_environment_admin_role') + + def can_delete(self, obj): + return self.can_change(obj, None) + + class ProjectAccess(NotificationAttachMixin, BaseAccess): ''' I can see projects when: diff --git a/awx/main/analytics/collectors.py b/awx/main/analytics/collectors.py index b0ac43cc65..89bc28ea56 100644 --- a/awx/main/analytics/collectors.py +++ b/awx/main/analytics/collectors.py @@ -311,7 +311,7 @@ def events_table(since, full_path, until, **kwargs): return _copy_table(table='events', query=events_query, path=full_path) -@register('unified_jobs_table', '1.1', format='csv', description=_('Data on jobs run'), expensive=True) +@register('unified_jobs_table', '1.2', format='csv', description=_('Data on jobs run'), expensive=True) def unified_jobs_table(since, full_path, until, **kwargs): unified_job_query = '''COPY (SELECT main_unifiedjob.id, main_unifiedjob.polymorphic_ctype_id, @@ -334,7 +334,8 @@ def unified_jobs_table(since, full_path, until, **kwargs): main_unifiedjob.finished, main_unifiedjob.elapsed, main_unifiedjob.job_explanation, - main_unifiedjob.instance_group_id + main_unifiedjob.instance_group_id, + main_unifiedjob.installed_collections FROM main_unifiedjob JOIN django_content_type ON main_unifiedjob.polymorphic_ctype_id = django_content_type.id LEFT JOIN main_job ON main_unifiedjob.id = main_job.unifiedjob_ptr_id diff --git a/awx/main/conf.py b/awx/main/conf.py index 6bf86db214..f46371e22b 100644 --- a/awx/main/conf.py +++ b/awx/main/conf.py @@ -10,6 +10,7 @@ from rest_framework.fields import FloatField # Tower from awx.conf import fields, register, register_validate +from awx.main.models import ExecutionEnvironment logger = logging.getLogger('awx.main.conf') @@ -176,6 +177,18 @@ register( read_only=True, ) +register( + 'DEFAULT_EXECUTION_ENVIRONMENT', + field_class=fields.PrimaryKeyRelatedField, + allow_null=True, + default=None, + queryset=ExecutionEnvironment.objects.all(), + label=_('Global default execution environment'), + help_text=_('.'), + category=_('System'), + category_slug='system', +) + register( 'CUSTOM_VENV_PATHS', field_class=fields.StringListPathField, diff --git a/awx/main/isolated/manager.py b/awx/main/isolated/manager.py index de4783e277..abcd41c5c1 100644 --- a/awx/main/isolated/manager.py +++ b/awx/main/isolated/manager.py @@ -6,7 +6,6 @@ import stat import tempfile import time import logging -import yaml import datetime from django.conf import settings @@ -32,7 +31,7 @@ def set_pythonpath(venv_libdir, env): class IsolatedManager(object): - def __init__(self, event_handler, canceled_callback=None, check_callback=None, pod_manager=None): + def __init__(self, event_handler, canceled_callback=None, check_callback=None): """ :param event_handler: a callable used to persist event data from isolated nodes :param canceled_callback: a callable - which returns `True` or `False` @@ -45,28 +44,12 @@ class IsolatedManager(object): self.started_at = None self.captured_command_artifact = False self.instance = None - self.pod_manager = pod_manager def build_inventory(self, hosts): - if self.instance and self.instance.is_containerized: - inventory = {'all': {'hosts': {}}} - fd, path = tempfile.mkstemp( - prefix='.kubeconfig', dir=self.private_data_dir - ) - with open(path, 'wb') as temp: - temp.write(yaml.dump(self.pod_manager.kube_config).encode()) - temp.flush() - os.chmod(temp.name, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) - for host in hosts: - inventory['all']['hosts'][host] = { - "ansible_connection": "kubectl", - "ansible_kubectl_config": path, - } - else: - inventory = '\n'.join([ - '{} ansible_ssh_user={}'.format(host, settings.AWX_ISOLATED_USERNAME) - for host in hosts - ]) + inventory = '\n'.join([ + '{} ansible_ssh_user={}'.format(host, settings.AWX_ISOLATED_USERNAME) + for host in hosts + ]) return inventory diff --git a/awx/main/management/commands/create_preload_data.py b/awx/main/management/commands/create_preload_data.py index 9b1d131735..05ed18b96c 100644 --- a/awx/main/management/commands/create_preload_data.py +++ b/awx/main/management/commands/create_preload_data.py @@ -2,22 +2,22 @@ # All Rights Reserved from django.core.management.base import BaseCommand +from django.conf import settings from crum import impersonate -from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate +from awx.main.models import ( + User, Organization, Project, Inventory, CredentialType, + Credential, Host, JobTemplate, ExecutionEnvironment +) from awx.main.signals import disable_computed_fields class Command(BaseCommand): """Create preloaded data, intended for new installs """ - help = 'Creates a preload tower data iff there is none.' + help = 'Creates a preload tower data if there is none.' def handle(self, *args, **kwargs): - # Sanity check: Is there already an organization in the system? - if Organization.objects.count(): - print('An organization is already in the system, exiting.') - print('(changed: False)') - return + changed = False # Create a default organization as the first superuser found. try: @@ -26,44 +26,62 @@ class Command(BaseCommand): superuser = None with impersonate(superuser): with disable_computed_fields(): - o = Organization.objects.create(name='Default') - p = Project(name='Demo Project', - scm_type='git', - scm_url='https://github.com/ansible/ansible-tower-samples', - scm_update_on_launch=True, - scm_update_cache_timeout=0, - organization=o) - p.save(skip_update=True) - ssh_type = CredentialType.objects.filter(namespace='ssh').first() - c = Credential.objects.create(credential_type=ssh_type, - name='Demo Credential', - inputs={ - 'username': superuser.username - }, - created_by=superuser) - c.admin_role.members.add(superuser) - public_galaxy_credential = Credential( - name='Ansible Galaxy', - managed_by_tower=True, - credential_type=CredentialType.objects.get(kind='galaxy'), - inputs = { - 'url': 'https://galaxy.ansible.com/' - } - ) - public_galaxy_credential.save() - o.galaxy_credentials.add(public_galaxy_credential) - i = Inventory.objects.create(name='Demo Inventory', - organization=o, - created_by=superuser) - Host.objects.create(name='localhost', - inventory=i, - variables="ansible_connection: local\nansible_python_interpreter: '{{ ansible_playbook_python }}'", - created_by=superuser) - jt = JobTemplate.objects.create(name='Demo Job Template', - playbook='hello_world.yml', - project=p, - inventory=i) - jt.credentials.add(c) - print('Default organization added.') - print('Demo Credential, Inventory, and Job Template added.') - print('(changed: True)') + if not Organization.objects.exists(): + o = Organization.objects.create(name='Default') + + p = Project(name='Demo Project', + scm_type='git', + scm_url='https://github.com/ansible/ansible-tower-samples', + scm_update_on_launch=True, + scm_update_cache_timeout=0, + organization=o) + p.save(skip_update=True) + + ssh_type = CredentialType.objects.filter(namespace='ssh').first() + c = Credential.objects.create(credential_type=ssh_type, + name='Demo Credential', + inputs={ + 'username': superuser.username + }, + created_by=superuser) + + c.admin_role.members.add(superuser) + + public_galaxy_credential = Credential(name='Ansible Galaxy', + managed_by_tower=True, + credential_type=CredentialType.objects.get(kind='galaxy'), + inputs={'url': 'https://galaxy.ansible.com/'}) + public_galaxy_credential.save() + o.galaxy_credentials.add(public_galaxy_credential) + + i = Inventory.objects.create(name='Demo Inventory', + organization=o, + created_by=superuser) + + Host.objects.create(name='localhost', + inventory=i, + variables="ansible_connection: local\nansible_python_interpreter: '{{ ansible_playbook_python }}'", + created_by=superuser) + + jt = JobTemplate.objects.create(name='Demo Job Template', + playbook='hello_world.yml', + project=p, + inventory=i) + jt.credentials.add(c) + + print('Default organization added.') + print('Demo Credential, Inventory, and Job Template added.') + changed = True + + default_ee = settings.AWX_EXECUTION_ENVIRONMENT_DEFAULT_IMAGE + ee, created = ExecutionEnvironment.objects.get_or_create(name='Default EE', defaults={'image': default_ee, + 'managed_by_tower': True}) + + if created: + changed = True + print('Default Execution Environment registered.') + + if changed: + print('(changed: True)') + else: + print('(changed: False)') diff --git a/awx/main/managers.py b/awx/main/managers.py index ae93a552a0..1af57a9423 100644 --- a/awx/main/managers.py +++ b/awx/main/managers.py @@ -237,7 +237,7 @@ class InstanceGroupManager(models.Manager): elif t.status == 'running': # Subtract capacity from all groups that contain the instance if t.execution_node not in instance_ig_mapping: - if not t.is_containerized: + if not t.is_container_group_task: logger.warning('Detected %s running inside lost instance, ' 'may still be waiting for reaper.', t.log_format) if t.instance_group: diff --git a/awx/main/migrations/0124_execution_environments.py b/awx/main/migrations/0124_execution_environments.py new file mode 100644 index 0000000000..18aad9a174 --- /dev/null +++ b/awx/main/migrations/0124_execution_environments.py @@ -0,0 +1,59 @@ +# Generated by Django 2.2.11 on 2020-07-08 18:42 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import django.db.models.expressions +import taggit.managers + + +class Migration(migrations.Migration): + + dependencies = [ + ('taggit', '0003_taggeditem_add_unique_index'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('main', '0123_drop_hg_support'), + ] + + operations = [ + migrations.CreateModel( + name='ExecutionEnvironment', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created', models.DateTimeField(default=None, editable=False)), + ('modified', models.DateTimeField(default=None, editable=False)), + ('description', models.TextField(blank=True, default='')), + ('image', models.CharField(help_text='The registry location where the container is stored.', max_length=1024, verbose_name='image location')), + ('managed_by_tower', models.BooleanField(default=False, editable=False)), + ('created_by', models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'executionenvironment', 'model_name': 'executionenvironment', 'app_label': 'main'}(class)s_created+", to=settings.AUTH_USER_MODEL)), + ('credential', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='executionenvironments', to='main.Credential')), + ('modified_by', models.ForeignKey(default=None, editable=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name="{'class': 'executionenvironment', 'model_name': 'executionenvironment', 'app_label': 'main'}(class)s_modified+", to=settings.AUTH_USER_MODEL)), + ('organization', models.ForeignKey(blank=True, default=None, help_text='The organization used to determine access to this execution environment.', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='executionenvironments', to='main.Organization')), + ('tags', taggit.managers.TaggableManager(blank=True, help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags')), + ], + options={ + 'ordering': (django.db.models.expressions.OrderBy(django.db.models.expressions.F('organization_id'), nulls_first=True), 'image'), + 'unique_together': {('organization', 'image')}, + }, + ), + migrations.AddField( + model_name='activitystream', + name='execution_environment', + field=models.ManyToManyField(blank=True, to='main.ExecutionEnvironment'), + ), + migrations.AddField( + model_name='organization', + name='default_environment', + field=models.ForeignKey(blank=True, default=None, help_text='The default execution environment for jobs run by this organization.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='main.ExecutionEnvironment'), + ), + migrations.AddField( + model_name='unifiedjob', + name='execution_environment', + field=models.ForeignKey(blank=True, default=None, help_text='The container image to be used for execution.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='unifiedjobs', to='main.ExecutionEnvironment'), + ), + migrations.AddField( + model_name='unifiedjobtemplate', + name='execution_environment', + field=models.ForeignKey(blank=True, default=None, help_text='The container image to be used for execution.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='unifiedjobtemplates', to='main.ExecutionEnvironment'), + ), + ] diff --git a/awx/main/migrations/0125_more_ee_modeling_changes.py b/awx/main/migrations/0125_more_ee_modeling_changes.py new file mode 100644 index 0000000000..be999cbb79 --- /dev/null +++ b/awx/main/migrations/0125_more_ee_modeling_changes.py @@ -0,0 +1,46 @@ +# Generated by Django 2.2.16 on 2020-11-19 16:20 +import uuid + +import awx.main.fields +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0124_execution_environments'), + ] + + operations = [ + migrations.AlterModelOptions( + name='executionenvironment', + options={'ordering': ('-created',)}, + ), + migrations.AddField( + model_name='executionenvironment', + name='name', + field=models.CharField(default=uuid.uuid4, max_length=512, unique=True), + preserve_default=False, + ), + migrations.AddField( + model_name='organization', + name='execution_environment_admin_role', + field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role='admin_role', related_name='+', to='main.Role'), + preserve_default='True', + ), + migrations.AddField( + model_name='project', + name='default_environment', + field=models.ForeignKey(blank=True, default=None, help_text='The default execution environment for jobs run using this project.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='main.ExecutionEnvironment'), + ), + migrations.AlterField( + model_name='credentialtype', + name='kind', + field=models.CharField(choices=[('ssh', 'Machine'), ('vault', 'Vault'), ('net', 'Network'), ('scm', 'Source Control'), ('cloud', 'Cloud'), ('registry', 'Container Registry'), ('token', 'Personal Access Token'), ('insights', 'Insights'), ('external', 'External'), ('kubernetes', 'Kubernetes'), ('galaxy', 'Galaxy/Automation Hub')], max_length=32), + ), + migrations.AlterUniqueTogether( + name='executionenvironment', + unique_together=set(), + ), + ] diff --git a/awx/main/migrations/0126_executionenvironment_container_options.py b/awx/main/migrations/0126_executionenvironment_container_options.py new file mode 100644 index 0000000000..d26fcb9298 --- /dev/null +++ b/awx/main/migrations/0126_executionenvironment_container_options.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.16 on 2021-01-27 22:31 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0125_more_ee_modeling_changes'), + ] + + operations = [ + migrations.AddField( + model_name='executionenvironment', + name='pull', + field=models.CharField(choices=[('always', 'Always pull container before running.'), ('missing', 'No pull option has been selected.'), ('never', 'Never pull container before running.')], blank=True, default='', help_text='Pull image before running?', max_length=16), + ), + ] diff --git a/awx/main/migrations/0127_reset_pod_spec_override.py b/awx/main/migrations/0127_reset_pod_spec_override.py new file mode 100644 index 0000000000..c3ebe0b504 --- /dev/null +++ b/awx/main/migrations/0127_reset_pod_spec_override.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.16 on 2021-02-15 22:02 + +from django.db import migrations + +def reset_pod_specs(apps, schema_editor): + InstanceGroup = apps.get_model('main', 'InstanceGroup') + InstanceGroup.objects.update(pod_spec_override="") + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0126_executionenvironment_container_options'), + ] + + operations = [ + migrations.RunPython(reset_pod_specs) + ] diff --git a/awx/main/migrations/0128_organiaztion_read_roles_ee_admin.py b/awx/main/migrations/0128_organiaztion_read_roles_ee_admin.py new file mode 100644 index 0000000000..f03a4e0ba2 --- /dev/null +++ b/awx/main/migrations/0128_organiaztion_read_roles_ee_admin.py @@ -0,0 +1,20 @@ +# Generated by Django 2.2.16 on 2021-02-18 22:57 + +import awx.main.fields +from django.db import migrations +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0127_reset_pod_spec_override'), + ] + + operations = [ + migrations.AlterField( + model_name='organization', + name='read_role', + field=awx.main.fields.ImplicitRoleField(editable=False, null='True', on_delete=django.db.models.deletion.CASCADE, parent_role=['member_role', 'auditor_role', 'execute_role', 'project_admin_role', 'inventory_admin_role', 'workflow_admin_role', 'notification_admin_role', 'credential_admin_role', 'job_template_admin_role', 'approval_role', 'execution_environment_admin_role'], related_name='+', to='main.Role'), + ), + ] diff --git a/awx/main/migrations/0129_unifiedjob_installed_collections.py b/awx/main/migrations/0129_unifiedjob_installed_collections.py new file mode 100644 index 0000000000..897708a631 --- /dev/null +++ b/awx/main/migrations/0129_unifiedjob_installed_collections.py @@ -0,0 +1,19 @@ +# Generated by Django 2.2.16 on 2021-02-16 20:27 + +import awx.main.fields +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('main', '0128_organiaztion_read_roles_ee_admin'), + ] + + operations = [ + migrations.AddField( + model_name='unifiedjob', + name='installed_collections', + field=awx.main.fields.JSONBField(blank=True, default=dict, editable=False, help_text='The Collections names and versions installed in the execution environment.'), + ), + ] diff --git a/awx/main/models/__init__.py b/awx/main/models/__init__.py index 87fa5d791f..52cabf3774 100644 --- a/awx/main/models/__init__.py +++ b/awx/main/models/__init__.py @@ -35,6 +35,7 @@ from awx.main.models.events import ( # noqa ) from awx.main.models.ad_hoc_commands import AdHocCommand # noqa from awx.main.models.schedules import Schedule # noqa +from awx.main.models.execution_environments import ExecutionEnvironment # noqa from awx.main.models.activity_stream import ActivityStream # noqa from awx.main.models.ha import ( # noqa Instance, InstanceGroup, TowerScheduleState, @@ -45,7 +46,7 @@ from awx.main.models.rbac import ( # noqa ROLE_SINGLETON_SYSTEM_AUDITOR, ) from awx.main.models.mixins import ( # noqa - CustomVirtualEnvMixin, ResourceMixin, SurveyJobMixin, + CustomVirtualEnvMixin, ExecutionEnvironmentMixin, ResourceMixin, SurveyJobMixin, SurveyJobTemplateMixin, TaskManagerInventoryUpdateMixin, TaskManagerJobMixin, TaskManagerProjectUpdateMixin, TaskManagerUnifiedJobMixin, @@ -221,6 +222,7 @@ activity_stream_registrar.connect(CredentialType) activity_stream_registrar.connect(Team) activity_stream_registrar.connect(Project) #activity_stream_registrar.connect(ProjectUpdate) +activity_stream_registrar.connect(ExecutionEnvironment) activity_stream_registrar.connect(JobTemplate) activity_stream_registrar.connect(Job) activity_stream_registrar.connect(AdHocCommand) diff --git a/awx/main/models/activity_stream.py b/awx/main/models/activity_stream.py index 85666e49d2..1c344692d6 100644 --- a/awx/main/models/activity_stream.py +++ b/awx/main/models/activity_stream.py @@ -61,6 +61,7 @@ class ActivityStream(models.Model): team = models.ManyToManyField("Team", blank=True) project = models.ManyToManyField("Project", blank=True) project_update = models.ManyToManyField("ProjectUpdate", blank=True) + execution_environment = models.ManyToManyField("ExecutionEnvironment", blank=True) job_template = models.ManyToManyField("JobTemplate", blank=True) job = models.ManyToManyField("Job", blank=True) workflow_job_template_node = models.ManyToManyField("WorkflowJobTemplateNode", blank=True) @@ -74,6 +75,7 @@ class ActivityStream(models.Model): ad_hoc_command = models.ManyToManyField("AdHocCommand", blank=True) schedule = models.ManyToManyField("Schedule", blank=True) custom_inventory_script = models.ManyToManyField("CustomInventoryScript", blank=True) + execution_environment = models.ManyToManyField("ExecutionEnvironment", blank=True) notification_template = models.ManyToManyField("NotificationTemplate", blank=True) notification = models.ManyToManyField("Notification", blank=True) label = models.ManyToManyField("Label", blank=True) diff --git a/awx/main/models/ad_hoc_commands.py b/awx/main/models/ad_hoc_commands.py index 9787f01423..f327e2a7e6 100644 --- a/awx/main/models/ad_hoc_commands.py +++ b/awx/main/models/ad_hoc_commands.py @@ -151,8 +151,8 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin): return True @property - def is_containerized(self): - return bool(self.instance_group and self.instance_group.is_containerized) + def is_container_group_task(self): + return bool(self.instance_group and self.instance_group.is_container_group) @property def can_run_containerized(self): @@ -198,8 +198,8 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin): def copy(self): data = {} for field in ('job_type', 'inventory_id', 'limit', 'credential_id', - 'module_name', 'module_args', 'forks', 'verbosity', - 'extra_vars', 'become_enabled', 'diff_mode'): + 'execution_environment_id', 'module_name', 'module_args', + 'forks', 'verbosity', 'extra_vars', 'become_enabled', 'diff_mode'): data[field] = getattr(self, field) return AdHocCommand.objects.create(**data) @@ -209,6 +209,9 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin): self.name = Truncator(u': '.join(filter(None, (self.module_name, self.module_args)))).chars(512) if 'name' not in update_fields: update_fields.append('name') + if not self.execution_environment_id: + self.execution_environment = self.resolve_execution_environment() + update_fields.append('execution_environment') super(AdHocCommand, self).save(*args, **kwargs) @property diff --git a/awx/main/models/credential/__init__.py b/awx/main/models/credential/__init__.py index e8a2884083..7cdd9898d3 100644 --- a/awx/main/models/credential/__init__.py +++ b/awx/main/models/credential/__init__.py @@ -331,6 +331,7 @@ class CredentialType(CommonModelNameNotUnique): ('net', _('Network')), ('scm', _('Source Control')), ('cloud', _('Cloud')), + ('registry', _('Container Registry')), ('token', _('Personal Access Token')), ('insights', _('Insights')), ('external', _('External')), @@ -528,15 +529,20 @@ class CredentialType(CommonModelNameNotUnique): with open(path, 'w') as f: f.write(data) os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) + # FIXME: develop some better means of referencing paths inside containers + container_path = os.path.join( + '/runner', + os.path.basename(path) + ) # determine if filename indicates single file or many if file_label.find('.') == -1: - tower_namespace.filename = path + tower_namespace.filename = container_path else: if not hasattr(tower_namespace, 'filename'): tower_namespace.filename = TowerNamespace() file_label = file_label.split('.')[1] - setattr(tower_namespace.filename, file_label, path) + setattr(tower_namespace.filename, file_label, container_path) injector_field = self._meta.get_field('injectors') for env_var, tmpl in self.injectors.get('env', {}).items(): @@ -564,7 +570,12 @@ class CredentialType(CommonModelNameNotUnique): if extra_vars: path = build_extra_vars_file(extra_vars, private_data_dir) - args.extend(['-e', '@%s' % path]) + # FIXME: develop some better means of referencing paths inside containers + container_path = os.path.join( + '/runner', + os.path.basename(path) + ) + args.extend(['-e', '@%s' % container_path]) class ManagedCredentialType(SimpleNamespace): @@ -1123,7 +1134,6 @@ ManagedCredentialType( }, ) - ManagedCredentialType( namespace='kubernetes_bearer_token', kind='kubernetes', @@ -1155,6 +1165,37 @@ ManagedCredentialType( } ) +ManagedCredentialType( + namespace='registry', + kind='registry', + name=ugettext_noop('Container Registry'), + inputs={ + 'fields': [{ + 'id': 'host', + 'label': ugettext_noop('Authentication URL'), + 'type': 'string', + 'help_text': ugettext_noop('Authentication endpoint for the container registry.'), + }, { + 'id': 'username', + 'label': ugettext_noop('Username'), + 'type': 'string', + }, { + 'id': 'password', + 'label': ugettext_noop('Password'), + 'type': 'string', + 'secret': True, + }, { + 'id': 'token', + 'label': ugettext_noop('Access Token'), + 'type': 'string', + 'secret': True, + 'help_text': ugettext_noop('A token to use to authenticate with. ' + 'This should not be set if username/password are being used.'), + }], + 'required': ['host'], + } +) + ManagedCredentialType( namespace='galaxy_api_token', diff --git a/awx/main/models/credential/injectors.py b/awx/main/models/credential/injectors.py index 90615f2d66..75a08482cc 100644 --- a/awx/main/models/credential/injectors.py +++ b/awx/main/models/credential/injectors.py @@ -35,8 +35,8 @@ def gce(cred, env, private_data_dir): json.dump(json_cred, f, indent=2) f.close() os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) - env['GCE_CREDENTIALS_FILE_PATH'] = path - env['GCP_SERVICE_ACCOUNT_FILE'] = path + env['GCE_CREDENTIALS_FILE_PATH'] = os.path.join('/runner', os.path.basename(path)) + env['GCP_SERVICE_ACCOUNT_FILE'] = os.path.join('/runner', os.path.basename(path)) # Handle env variables for new module types. # This includes gcp_compute inventory plugin and @@ -105,7 +105,8 @@ def openstack(cred, env, private_data_dir): yaml.safe_dump(openstack_data, f, default_flow_style=False, allow_unicode=True) f.close() os.chmod(path, stat.S_IRUSR | stat.S_IWUSR) - env['OS_CLIENT_CONFIG_FILE'] = path + # TODO: constant for container base path + env['OS_CLIENT_CONFIG_FILE'] = os.path.join('/runner', os.path.basename(path)) def kubernetes_bearer_token(cred, env, private_data_dir): diff --git a/awx/main/models/execution_environments.py b/awx/main/models/execution_environments.py new file mode 100644 index 0000000000..eabd0cce7c --- /dev/null +++ b/awx/main/models/execution_environments.py @@ -0,0 +1,53 @@ +from django.db import models +from django.utils.translation import ugettext_lazy as _ + +from awx.api.versioning import reverse +from awx.main.models.base import CommonModel + + +__all__ = ['ExecutionEnvironment'] + + +class ExecutionEnvironment(CommonModel): + class Meta: + ordering = ('-created',) + + PULL_CHOICES = [ + ('always', _("Always pull container before running.")), + ('missing', _("No pull option has been selected.")), + ('never', _("Never pull container before running.")) + ] + + organization = models.ForeignKey( + 'Organization', + null=True, + default=None, + blank=True, + on_delete=models.CASCADE, + related_name='%(class)ss', + help_text=_('The organization used to determine access to this execution environment.'), + ) + image = models.CharField( + max_length=1024, + verbose_name=_('image location'), + help_text=_("The registry location where the container is stored."), + ) + managed_by_tower = models.BooleanField(default=False, editable=False) + credential = models.ForeignKey( + 'Credential', + related_name='%(class)ss', + blank=True, + null=True, + default=None, + on_delete=models.SET_NULL, + ) + pull = models.CharField( + max_length=16, + choices=PULL_CHOICES, + blank=True, + default='', + help_text=_('Pull image before running?'), + ) + + def get_absolute_url(self, request=None): + return reverse('api:execution_environment_detail', kwargs={'pk': self.pk}, request=request) diff --git a/awx/main/models/ha.py b/awx/main/models/ha.py index 5071786653..94d4b8d462 100644 --- a/awx/main/models/ha.py +++ b/awx/main/models/ha.py @@ -147,6 +147,13 @@ class Instance(HasPolicyEditsMixin, BaseModel): return self.rampart_groups.filter(controller__isnull=False).exists() def refresh_capacity(self): + if settings.IS_K8S: + self.capacity = self.cpu = self.memory = self.cpu_capacity = self.mem_capacity = 0 # noqa + self.version = awx_application_version + self.save(update_fields=['capacity', 'version', 'modified', 'cpu', + 'memory', 'cpu_capacity', 'mem_capacity']) + return + cpu = get_cpu_capacity() mem = get_mem_capacity() if self.enabled: @@ -247,7 +254,10 @@ class InstanceGroup(HasPolicyEditsMixin, BaseModel, RelatedJobsMixin): return bool(self.controller) @property - def is_containerized(self): + def is_container_group(self): + if settings.IS_K8S: + return True + return bool(self.credential and self.credential.kubernetes) ''' @@ -306,9 +316,9 @@ def schedule_policy_task(): @receiver(post_save, sender=InstanceGroup) def on_instance_group_saved(sender, instance, created=False, raw=False, **kwargs): if created or instance.has_policy_changes(): - if not instance.is_containerized: + if not instance.is_container_group: schedule_policy_task() - elif created or instance.is_containerized: + elif created or instance.is_container_group: instance.set_default_policy_fields() @@ -320,7 +330,7 @@ def on_instance_saved(sender, instance, created=False, raw=False, **kwargs): @receiver(post_delete, sender=InstanceGroup) def on_instance_group_deleted(sender, instance, using, **kwargs): - if not instance.is_containerized: + if not instance.is_container_group: schedule_policy_task() diff --git a/awx/main/models/inventory.py b/awx/main/models/inventory.py index 5305e6e532..9000fe41c4 100644 --- a/awx/main/models/inventory.py +++ b/awx/main/models/inventory.py @@ -1373,6 +1373,7 @@ class PluginFileInjector(object): collection = None collection_migration = '2.9' # Starting with this version, we use collections + # TODO: delete this method and update unit tests @classmethod def get_proper_name(cls): if cls.plugin_name is None: @@ -1397,13 +1398,12 @@ class PluginFileInjector(object): def inventory_as_dict(self, inventory_update, private_data_dir): source_vars = dict(inventory_update.source_vars_dict) # make a copy - proper_name = self.get_proper_name() ''' None conveys that we should use the user-provided plugin. Note that a plugin value of '' should still be overridden. ''' - if proper_name is not None: - source_vars['plugin'] = proper_name + if self.plugin_name is not None: + source_vars['plugin'] = self.plugin_name return source_vars def build_env(self, inventory_update, env, private_data_dir, private_data_files): @@ -1441,7 +1441,6 @@ class PluginFileInjector(object): def get_plugin_env(self, inventory_update, private_data_dir, private_data_files): env = self._get_shared_env(inventory_update, private_data_dir, private_data_files) - env['ANSIBLE_COLLECTIONS_PATHS'] = settings.AWX_ANSIBLE_COLLECTIONS_PATHS return env def build_private_data(self, inventory_update, private_data_dir): @@ -1544,7 +1543,7 @@ class openstack(PluginFileInjector): env = super(openstack, self).get_plugin_env(inventory_update, private_data_dir, private_data_files) credential = inventory_update.get_cloud_credential() cred_data = private_data_files['credentials'] - env['OS_CLIENT_CONFIG_FILE'] = cred_data[credential] + env['OS_CLIENT_CONFIG_FILE'] = os.path.join('/runner', os.path.basename(cred_data[credential])) return env @@ -1574,6 +1573,12 @@ class satellite6(PluginFileInjector): ret['FOREMAN_PASSWORD'] = credential.get_input('password', default='') return ret + def inventory_as_dict(self, inventory_update, private_data_dir): + ret = super(satellite6, self).inventory_as_dict(inventory_update, private_data_dir) + # this inventory plugin requires the fully qualified inventory plugin name + ret['plugin'] = f'{self.namespace}.{self.collection}.{self.plugin_name}' + return ret + class tower(PluginFileInjector): plugin_name = 'tower' diff --git a/awx/main/models/jobs.py b/awx/main/models/jobs.py index 638954e53c..70cdfa363a 100644 --- a/awx/main/models/jobs.py +++ b/awx/main/models/jobs.py @@ -284,7 +284,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour def _get_unified_job_field_names(cls): return set(f.name for f in JobOptions._meta.fields) | set( ['name', 'description', 'organization', 'survey_passwords', 'labels', 'credentials', - 'job_slice_number', 'job_slice_count'] + 'job_slice_number', 'job_slice_count', 'execution_environment'] ) @property @@ -768,11 +768,11 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana @property def can_run_containerized(self): - return any([ig for ig in self.preferred_instance_groups if ig.is_containerized]) + return any([ig for ig in self.preferred_instance_groups if ig.is_container_group]) @property - def is_containerized(self): - return bool(self.instance_group and self.instance_group.is_containerized) + def is_container_group_task(self): + return bool(self.instance_group and self.instance_group.is_container_group) @property def preferred_instance_groups(self): @@ -1286,6 +1286,8 @@ class SystemJob(UnifiedJob, SystemJobOptions, JobNotificationMixin): @property def task_impact(self): + if settings.IS_K8S: + return 0 return 5 @property diff --git a/awx/main/models/mixins.py b/awx/main/models/mixins.py index ce6d3717a7..549c93607d 100644 --- a/awx/main/models/mixins.py +++ b/awx/main/models/mixins.py @@ -34,7 +34,7 @@ logger = logging.getLogger('awx.main.models.mixins') __all__ = ['ResourceMixin', 'SurveyJobTemplateMixin', 'SurveyJobMixin', 'TaskManagerUnifiedJobMixin', 'TaskManagerJobMixin', 'TaskManagerProjectUpdateMixin', - 'TaskManagerInventoryUpdateMixin', 'CustomVirtualEnvMixin'] + 'TaskManagerInventoryUpdateMixin', 'ExecutionEnvironmentMixin', 'CustomVirtualEnvMixin'] class ResourceMixin(models.Model): @@ -441,6 +441,44 @@ class TaskManagerInventoryUpdateMixin(TaskManagerUpdateOnLaunchMixin): abstract = True +class ExecutionEnvironmentMixin(models.Model): + class Meta: + abstract = True + + execution_environment = models.ForeignKey( + 'ExecutionEnvironment', + null=True, + blank=True, + default=None, + on_delete=models.SET_NULL, + related_name='%(class)ss', + help_text=_('The container image to be used for execution.'), + ) + + def get_execution_environment_default(self): + from awx.main.models.execution_environments import ExecutionEnvironment + + if settings.DEFAULT_EXECUTION_ENVIRONMENT is not None: + return settings.DEFAULT_EXECUTION_ENVIRONMENT + return ExecutionEnvironment.objects.filter(organization=None, managed_by_tower=True).first() + + def resolve_execution_environment(self): + """ + Return the execution environment that should be used when creating a new job. + """ + if self.execution_environment is not None: + return self.execution_environment + if getattr(self, 'project_id', None) and self.project.default_environment is not None: + return self.project.default_environment + if getattr(self, 'organization', None) and self.organization.default_environment is not None: + return self.organization.default_environment + if getattr(self, 'inventory', None) and self.inventory.organization is not None: + if self.inventory.organization.default_environment is not None: + return self.inventory.organization.default_environment + + return self.get_execution_environment_default() + + class CustomVirtualEnvMixin(models.Model): class Meta: abstract = True diff --git a/awx/main/models/organization.py b/awx/main/models/organization.py index bf2e07d255..f0ecfea5c7 100644 --- a/awx/main/models/organization.py +++ b/awx/main/models/organization.py @@ -61,6 +61,15 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi blank=True, related_name='%(class)s_notification_templates_for_approvals' ) + default_environment = models.ForeignKey( + 'ExecutionEnvironment', + null=True, + blank=True, + default=None, + on_delete=models.SET_NULL, + related_name='+', + help_text=_('The default execution environment for jobs run by this organization.'), + ) admin_role = ImplicitRoleField( parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, @@ -86,6 +95,9 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi job_template_admin_role = ImplicitRoleField( parent_role='admin_role', ) + execution_environment_admin_role = ImplicitRoleField( + parent_role='admin_role', + ) auditor_role = ImplicitRoleField( parent_role='singleton:' + ROLE_SINGLETON_SYSTEM_AUDITOR, ) @@ -97,7 +109,8 @@ class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVi 'execute_role', 'project_admin_role', 'inventory_admin_role', 'workflow_admin_role', 'notification_admin_role', 'credential_admin_role', - 'job_template_admin_role', 'approval_role',], + 'job_template_admin_role', 'approval_role', + 'execution_environment_admin_role',], ) approval_role = ImplicitRoleField( parent_role='admin_role', diff --git a/awx/main/models/projects.py b/awx/main/models/projects.py index 65fb8304ce..c9bf87f408 100644 --- a/awx/main/models/projects.py +++ b/awx/main/models/projects.py @@ -187,6 +187,14 @@ class ProjectOptions(models.Model): pass return cred + def resolve_execution_environment(self): + """ + Project updates, themselves, will use the default execution environment. + Jobs using the project can use the default_environment, but the project updates + are not flexible enough to allow customizing the image they use. + """ + return self.get_execution_environment_default() + def get_project_path(self, check_if_exists=True): local_path = os.path.basename(self.local_path) if local_path and not local_path.startswith('.'): @@ -259,6 +267,15 @@ class Project(UnifiedJobTemplate, ProjectOptions, ResourceMixin, CustomVirtualEn app_label = 'main' ordering = ('id',) + default_environment = models.ForeignKey( + 'ExecutionEnvironment', + null=True, + blank=True, + default=None, + on_delete=models.SET_NULL, + related_name='+', + help_text=_('The default execution environment for jobs run using this project.'), + ) scm_update_on_launch = models.BooleanField( default=False, help_text=_('Update the project when a job is launched that uses the project.'), @@ -554,6 +571,8 @@ class ProjectUpdate(UnifiedJob, ProjectOptions, JobNotificationMixin, TaskManage @property def task_impact(self): + if settings.IS_K8S: + return 0 return 0 if self.job_type == 'run' else 1 @property diff --git a/awx/main/models/rbac.py b/awx/main/models/rbac.py index 67d21e873d..fe8d622ac6 100644 --- a/awx/main/models/rbac.py +++ b/awx/main/models/rbac.py @@ -40,6 +40,7 @@ role_names = { 'inventory_admin_role': _('Inventory Admin'), 'credential_admin_role': _('Credential Admin'), 'job_template_admin_role': _('Job Template Admin'), + 'execution_environment_admin_role': _('Execution Environment Admin'), 'workflow_admin_role': _('Workflow Admin'), 'notification_admin_role': _('Notification Admin'), 'auditor_role': _('Auditor'), @@ -60,6 +61,7 @@ role_descriptions = { 'inventory_admin_role': _('Can manage all inventories of the %s'), 'credential_admin_role': _('Can manage all credentials of the %s'), 'job_template_admin_role': _('Can manage all job templates of the %s'), + 'execution_environment_admin_role': _('Can manage all execution environments of the %s'), 'workflow_admin_role': _('Can manage all workflows of the %s'), 'notification_admin_role': _('Can manage all notifications of the %s'), 'auditor_role': _('Can view all aspects of the %s'), diff --git a/awx/main/models/unified_jobs.py b/awx/main/models/unified_jobs.py index 064585c6c1..45d7739ee3 100644 --- a/awx/main/models/unified_jobs.py +++ b/awx/main/models/unified_jobs.py @@ -39,7 +39,7 @@ from awx.main.models.base import ( from awx.main.dispatch import get_local_queuename from awx.main.dispatch.control import Control as ControlDispatcher from awx.main.registrar import activity_stream_registrar -from awx.main.models.mixins import ResourceMixin, TaskManagerUnifiedJobMixin +from awx.main.models.mixins import ResourceMixin, TaskManagerUnifiedJobMixin, ExecutionEnvironmentMixin from awx.main.utils import ( camelcase_to_underscore, get_model_for_type, encrypt_dict, decrypt_field, _inventory_updates, @@ -50,7 +50,7 @@ from awx.main.utils import ( from awx.main.constants import ACTIVE_STATES, CAN_CANCEL from awx.main.redact import UriCleaner, REPLACE_STR from awx.main.consumers import emit_channel_notification -from awx.main.fields import JSONField, AskForField, OrderedManyToManyField +from awx.main.fields import JSONField, JSONBField, AskForField, OrderedManyToManyField __all__ = ['UnifiedJobTemplate', 'UnifiedJob', 'StdoutMaxBytesExceeded'] @@ -59,7 +59,7 @@ logger_job_lifecycle = logging.getLogger('awx.analytics.job_lifecycle') # NOTE: ACTIVE_STATES moved to constants because it is used by parent modules -class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, NotificationFieldsModel): +class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, ExecutionEnvironmentMixin, NotificationFieldsModel): ''' Concrete base class for unified job templates. ''' @@ -376,6 +376,8 @@ class UnifiedJobTemplate(PolymorphicModel, CommonModelNameNotUnique, Notificatio for fd, val in eager_fields.items(): setattr(unified_job, fd, val) + unified_job.execution_environment = self.resolve_execution_environment() + # NOTE: slice workflow jobs _get_parent_field_name method # is not correct until this is set if not parent_field_name: @@ -527,7 +529,7 @@ class StdoutMaxBytesExceeded(Exception): class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique, - UnifiedJobTypeStringMixin, TaskManagerUnifiedJobMixin): + UnifiedJobTypeStringMixin, TaskManagerUnifiedJobMixin, ExecutionEnvironmentMixin): ''' Concrete base class for unified job run by the task engine. ''' @@ -720,6 +722,12 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique 'Credential', related_name='%(class)ss', ) + installed_collections = JSONBField( + blank=True, + default=dict, + editable=False, + help_text=_("The Collections names and versions installed in the execution environment."), + ) def get_absolute_url(self, request=None): RealClass = self.get_real_instance_class() @@ -1488,7 +1496,7 @@ class UnifiedJob(PolymorphicModel, PasswordFieldsModel, CommonModelNameNotUnique return bool(self.controller_node) @property - def is_containerized(self): + def is_container_group_task(self): return False def log_lifecycle(self, state, blocked_by=None): diff --git a/awx/main/scheduler/task_manager.py b/awx/main/scheduler/task_manager.py index f06f93834a..8d8df5eee2 100644 --- a/awx/main/scheduler/task_manager.py +++ b/awx/main/scheduler/task_manager.py @@ -70,7 +70,7 @@ class TaskManager(): ''' Init AFTER we know this instance of the task manager will run because the lock is acquired. ''' - instances = Instance.objects.filter(~Q(hostname=None), capacity__gt=0, enabled=True) + instances = Instance.objects.filter(~Q(hostname=None), enabled=True) self.real_instances = {i.hostname: i for i in instances} instances_partial = [SimpleNamespace(obj=instance, @@ -86,7 +86,7 @@ class TaskManager(): capacity_total=rampart_group.capacity, consumed_capacity=0, instances=[]) - for instance in rampart_group.instances.filter(capacity__gt=0, enabled=True).order_by('hostname'): + for instance in rampart_group.instances.filter(enabled=True).order_by('hostname'): if instance.hostname in instances_by_hostname: self.graph[rampart_group.name]['instances'].append(instances_by_hostname[instance.hostname]) @@ -283,12 +283,12 @@ class TaskManager(): task.controller_node = controller_node logger.debug('Submitting isolated {} to queue {} controlled by {}.'.format( task.log_format, task.execution_node, controller_node)) - elif rampart_group.is_containerized: + elif rampart_group.is_container_group: # find one real, non-containerized instance with capacity to # act as the controller for k8s API interaction match = None for group in InstanceGroup.objects.all(): - if group.is_containerized or group.controller_id: + if group.is_container_group or group.controller_id: continue match = group.fit_task_to_most_remaining_capacity_instance(task, group.instances.all()) if match: @@ -521,14 +521,17 @@ class TaskManager(): self.start_task(task, None, task.get_jobs_fail_chain(), None) continue for rampart_group in preferred_instance_groups: - if task.can_run_containerized and rampart_group.is_containerized: + if task.can_run_containerized and rampart_group.is_container_group: self.graph[rampart_group.name]['graph'].add_job(task) self.start_task(task, rampart_group, task.get_jobs_fail_chain(), None) found_acceptable_queue = True break remaining_capacity = self.get_remaining_capacity(rampart_group.name) - if not rampart_group.is_containerized and self.get_remaining_capacity(rampart_group.name) <= 0: + if ( + task.task_impact > 0 and # project updates have a cost of zero + not rampart_group.is_container_group and + self.get_remaining_capacity(rampart_group.name) <= 0): logger.debug("Skipping group {}, remaining_capacity {} <= 0".format( rampart_group.name, remaining_capacity)) continue @@ -536,8 +539,8 @@ class TaskManager(): execution_instance = InstanceGroup.fit_task_to_most_remaining_capacity_instance(task, self.graph[rampart_group.name]['instances']) or \ InstanceGroup.find_largest_idle_instance(self.graph[rampart_group.name]['instances']) - if execution_instance or rampart_group.is_containerized: - if not rampart_group.is_containerized: + if execution_instance or rampart_group.is_container_group: + if not rampart_group.is_container_group: execution_instance.remaining_capacity = max(0, execution_instance.remaining_capacity - task.task_impact) execution_instance.jobs_running += 1 logger.debug("Starting {} in group {} instance {} (remaining_capacity={})".format( @@ -594,7 +597,7 @@ class TaskManager(): ).exclude( execution_node__in=Instance.objects.values_list('hostname', flat=True) ): - if j.execution_node and not j.is_containerized: + if j.execution_node and not j.is_container_group_task: logger.error(f'{j.execution_node} is not a registered instance; reaping {j.log_format}') reap_job(j, 'failed') diff --git a/awx/main/signals.py b/awx/main/signals.py index 0a29fa9d6c..ac7a3d2301 100644 --- a/awx/main/signals.py +++ b/awx/main/signals.py @@ -368,6 +368,7 @@ def model_serializer_mapping(): models.Credential: serializers.CredentialSerializer, models.Team: serializers.TeamSerializer, models.Project: serializers.ProjectSerializer, + models.ExecutionEnvironment: serializers.ExecutionEnvironmentSerializer, models.JobTemplate: serializers.JobTemplateWithSpecSerializer, models.Job: serializers.JobSerializer, models.AdHocCommand: serializers.AdHocCommandSerializer, diff --git a/awx/main/tasks.py b/awx/main/tasks.py index b6ab905837..0f02f3a507 100644 --- a/awx/main/tasks.py +++ b/awx/main/tasks.py @@ -23,6 +23,10 @@ import fcntl from pathlib import Path from uuid import uuid4 import urllib.parse as urlparse +import socket +import threading +import concurrent.futures +from base64 import b64encode # Django from django.conf import settings @@ -36,9 +40,6 @@ from django.core.cache import cache from django.core.exceptions import ObjectDoesNotExist from django_guid.middleware import GuidMiddleware -# Kubernetes -from kubernetes.client.rest import ApiException - # Django-CRUM from crum import impersonate @@ -49,6 +50,9 @@ from gitdb.exc import BadName as BadGitName # Runner import ansible_runner +# Receptor +from receptorctl.socket_interface import ReceptorControl + # AWX from awx import __version__ as awx_application_version from awx.main.constants import PRIVILEGE_ESCALATION_METHODS, STANDARD_INVENTORY_UPDATE_ENV @@ -72,9 +76,10 @@ from awx.main.dispatch import get_local_queuename, reaper from awx.main.utils import (update_scm_url, ignore_inventory_computed_fields, ignore_inventory_group_removal, extract_ansible_vars, schedule_task_manager, - get_awx_version) + get_awx_version, + deepmerge, + parse_yaml_or_json) from awx.main.utils.ansible import read_ansible_config -from awx.main.utils.common import get_custom_venv_choices from awx.main.utils.external_logging import reconfigure_rsyslog from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja from awx.main.utils.reload import stop_local_services @@ -257,7 +262,7 @@ def apply_cluster_membership_policies(): # On a differential basis, apply instances to non-isolated groups with transaction.atomic(): for g in actual_groups: - if g.obj.is_containerized: + if g.obj.is_container_group: logger.debug('Skipping containerized group {} for policy calculation'.format(g.obj.name)) continue instances_to_add = set(g.instances) - set(g.prior_instances) @@ -502,7 +507,7 @@ def cluster_node_heartbeat(): def awx_k8s_reaper(): from awx.main.scheduler.kubernetes import PodManager # prevent circular import for group in InstanceGroup.objects.filter(credential__isnull=False).iterator(): - if group.is_containerized: + if group.is_container_group: logger.debug("Checking for orphaned k8s pods for {}.".format(group)) for job in UnifiedJob.objects.filter( pk__in=list(PodManager.list_active_jobs(group)) @@ -887,6 +892,34 @@ class BaseTask(object): ''' return os.path.abspath(os.path.join(os.path.dirname(__file__), *args)) + def build_execution_environment_params(self, instance): + if settings.IS_K8S: + return {} + + if instance.execution_environment_id is None: + from awx.main.signals import disable_activity_stream + + with disable_activity_stream(): + self.instance = instance = self.update_model( + instance.pk, execution_environment=instance.resolve_execution_environment()) + + image = instance.execution_environment.image + params = { + "container_image": image, + "process_isolation": True, + "container_options": ['--user=root'], + } + + pull = instance.execution_environment.pull + if pull: + params['container_options'].append(f'--pull={pull}') + + if settings.AWX_PROOT_SHOW_PATHS: + params['container_volume_mounts'] = [] + for this_path in settings.AWX_PROOT_SHOW_PATHS: + params['container_volume_mounts'].append(f'{this_path}:{this_path}:Z') + return params + def build_private_data(self, instance, private_data_dir): ''' Return SSH private key data (only if stored in DB as ssh_key_data). @@ -981,46 +1014,6 @@ class BaseTask(object): Build ansible yaml file filled with extra vars to be passed via -e@file.yml ''' - def build_params_process_isolation(self, instance, private_data_dir, cwd): - ''' - Build ansible runner .run() parameters for process isolation. - ''' - process_isolation_params = dict() - if self.should_use_proot(instance): - local_paths = [private_data_dir] - if cwd != private_data_dir and Path(private_data_dir) not in Path(cwd).parents: - local_paths.append(cwd) - show_paths = self.proot_show_paths + local_paths + \ - settings.AWX_PROOT_SHOW_PATHS - - pi_path = settings.AWX_PROOT_BASE_PATH - if not self.instance.is_isolated() and not self.instance.is_containerized: - pi_path = tempfile.mkdtemp( - prefix='ansible_runner_pi_', - dir=settings.AWX_PROOT_BASE_PATH - ) - os.chmod(pi_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) - self.cleanup_paths.append(pi_path) - - process_isolation_params = { - 'process_isolation': True, - 'process_isolation_path': pi_path, - 'process_isolation_show_paths': show_paths, - 'process_isolation_hide_paths': [ - settings.AWX_PROOT_BASE_PATH, - '/etc/tower', - '/etc/ssh', - '/var/lib/awx', - '/var/log', - settings.PROJECTS_ROOT, - settings.JOBOUTPUT_ROOT, - ] + getattr(settings, 'AWX_PROOT_HIDE_PATHS', None) or [], - 'process_isolation_ro_paths': [settings.ANSIBLE_VENV_PATH, settings.AWX_VENV_PATH], - } - if getattr(instance, 'ansible_virtualenv_path', settings.ANSIBLE_VENV_PATH) != settings.ANSIBLE_VENV_PATH: - process_isolation_params['process_isolation_ro_paths'].append(instance.ansible_virtualenv_path) - return process_isolation_params - def build_params_resource_profiling(self, instance, private_data_dir): resource_profiling_params = {} if self.should_use_resource_profiling(instance): @@ -1031,6 +1024,8 @@ class BaseTask(object): results_dir = os.path.join(private_data_dir, 'artifacts/playbook_profiling') if not os.path.isdir(results_dir): os.makedirs(results_dir, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) + # FIXME: develop some better means of referencing paths inside containers + container_results_dir = os.path.join('/runner', 'artifacts/playbook_profiling') logger.debug('Collected the following resource profiling intervals: cpu: {} mem: {} pid: {}' .format(cpu_poll_interval, mem_poll_interval, pid_poll_interval)) @@ -1040,7 +1035,7 @@ class BaseTask(object): 'resource_profiling_cpu_poll_interval': cpu_poll_interval, 'resource_profiling_memory_poll_interval': mem_poll_interval, 'resource_profiling_pid_poll_interval': pid_poll_interval, - 'resource_profiling_results_dir': results_dir}) + 'resource_profiling_results_dir': container_results_dir}) return resource_profiling_params @@ -1063,30 +1058,18 @@ class BaseTask(object): os.chmod(path, stat.S_IRUSR) return path - def add_ansible_venv(self, venv_path, env, isolated=False): - env['VIRTUAL_ENV'] = venv_path - env['PATH'] = os.path.join(venv_path, "bin") + ":" + env['PATH'] - venv_libdir = os.path.join(venv_path, "lib") - - if not isolated and ( - not os.path.exists(venv_libdir) or - os.path.join(venv_path, '') not in get_custom_venv_choices() - ): - raise InvalidVirtualenvError(_( - 'Invalid virtual environment selected: {}'.format(venv_path) - )) - - isolated_manager.set_pythonpath(venv_libdir, env) - def add_awx_venv(self, env): env['VIRTUAL_ENV'] = settings.AWX_VENV_PATH - env['PATH'] = os.path.join(settings.AWX_VENV_PATH, "bin") + ":" + env['PATH'] + if 'PATH' in env: + env['PATH'] = os.path.join(settings.AWX_VENV_PATH, "bin") + ":" + env['PATH'] + else: + env['PATH'] = os.path.join(settings.AWX_VENV_PATH, "bin") def build_env(self, instance, private_data_dir, isolated, private_data_files=None): ''' Build environment dictionary for ansible-playbook. ''' - env = dict(os.environ.items()) + env = {} # Add ANSIBLE_* settings to the subprocess environment. for attr in dir(settings): if attr == attr.upper() and attr.startswith('ANSIBLE_'): @@ -1094,14 +1077,9 @@ class BaseTask(object): # Also set environment variables configured in AWX_TASK_ENV setting. for key, value in settings.AWX_TASK_ENV.items(): env[key] = str(value) - # Set environment variables needed for inventory and job event - # callbacks to work. - # Update PYTHONPATH to use local site-packages. - # NOTE: - # Derived class should call add_ansible_venv() or add_awx_venv() - if self.should_use_proot(instance): - env['PROOT_TMP_DIR'] = settings.AWX_PROOT_BASE_PATH + env['AWX_PRIVATE_DATA_DIR'] = private_data_dir + return env def should_use_resource_profiling(self, job): @@ -1129,12 +1107,13 @@ class BaseTask(object): for hostname, hv in script_data.get('_meta', {}).get('hostvars', {}).items() } json_data = json.dumps(script_data) - handle, path = tempfile.mkstemp(dir=private_data_dir) - f = os.fdopen(handle, 'w') - f.write('#! /usr/bin/env python\n# -*- coding: utf-8 -*-\nprint(%r)\n' % json_data) - f.close() - os.chmod(path, stat.S_IRUSR | stat.S_IXUSR | stat.S_IWUSR) - return path + path = os.path.join(private_data_dir, 'inventory') + os.makedirs(path, mode=0o700) + fn = os.path.join(path, 'hosts') + with open(fn, 'w') as f: + os.chmod(fn, stat.S_IRUSR | stat.S_IXUSR | stat.S_IWUSR) + f.write('#! /usr/bin/env python3\n# -*- coding: utf-8 -*-\nprint(%r)\n' % json_data) + return fn def build_args(self, instance, private_data_dir, passwords): raise NotImplementedError @@ -1205,17 +1184,17 @@ class BaseTask(object): instance.log_lifecycle("finalize_run") job_profiling_dir = os.path.join(private_data_dir, 'artifacts/playbook_profiling') awx_profiling_dir = '/var/log/tower/playbook_profiling/' + collections_info = os.path.join(private_data_dir, 'artifacts/', 'collections.json') + if not os.path.exists(awx_profiling_dir): os.mkdir(awx_profiling_dir) if os.path.isdir(job_profiling_dir): shutil.copytree(job_profiling_dir, os.path.join(awx_profiling_dir, str(instance.pk))) - - if instance.is_containerized: - from awx.main.scheduler.kubernetes import PodManager # prevent circular import - pm = PodManager(instance) - logger.debug(f"Deleting pod {pm.pod_name}") - pm.delete() - + if os.path.exists(collections_info): + with open(collections_info) as ee_json_info: + ee_collections_info = json.loads(ee_json_info.read()) + instance.installed_collections = ee_collections_info + instance.save(update_fields=['installed_collections']) def event_handler(self, event_data): # @@ -1355,16 +1334,6 @@ class BaseTask(object): Run the job/task and capture its output. ''' self.instance = self.model.objects.get(pk=pk) - containerized = self.instance.is_containerized - pod_manager = None - if containerized: - # Here we are trying to launch a pod before transitioning the job into a running - # state. For some scenarios (like waiting for resources to become available) we do this - # rather than marking the job as error or failed. This is not always desirable. Cases - # such as invalid authentication should surface as an error. - pod_manager = self.deploy_container_group_pod(self.instance) - if not pod_manager: - return # self.instance because of the update_model pattern and when it's used in callback handlers self.instance = self.update_model(pk, status='running', @@ -1423,12 +1392,8 @@ class BaseTask(object): passwords = self.build_passwords(self.instance, kwargs) self.build_extra_vars_file(self.instance, private_data_dir) args = self.build_args(self.instance, private_data_dir, passwords) - cwd = self.build_cwd(self.instance, private_data_dir) resource_profiling_params = self.build_params_resource_profiling(self.instance, private_data_dir) - process_isolation_params = self.build_params_process_isolation(self.instance, - private_data_dir, - cwd) env = self.build_env(self.instance, private_data_dir, isolated, private_data_files=private_data_files) self.safe_env = build_safe_env(env) @@ -1451,27 +1416,17 @@ class BaseTask(object): params = { 'ident': self.instance.id, 'private_data_dir': private_data_dir, - 'project_dir': cwd, 'playbook': self.build_playbook_path_relative_to_cwd(self.instance, private_data_dir), 'inventory': self.build_inventory(self.instance, private_data_dir), 'passwords': expect_passwords, 'envvars': env, - 'event_handler': self.event_handler, - 'cancel_callback': self.cancel_callback, - 'finished_callback': self.finished_callback, - 'status_handler': self.status_handler, 'settings': { 'job_timeout': self.get_instance_timeout(self.instance), 'suppress_ansible_output': True, - **process_isolation_params, **resource_profiling_params, }, } - if containerized: - # We don't want HOME passed through to container groups. - params['envvars'].pop('HOME') - if isinstance(self.instance, AdHocCommand): params['module'] = self.build_module_name(self.instance) params['module_args'] = self.build_module_args(self.instance) @@ -1483,6 +1438,9 @@ class BaseTask(object): # Disable Ansible fact cache. params['fact_cache_type'] = '' + if self.instance.is_container_group_task or settings.IS_K8S: + params['envvars'].pop('HOME', None) + ''' Delete parameters if the values are None or empty array ''' @@ -1491,37 +1449,24 @@ class BaseTask(object): del params[v] self.dispatcher = CallbackQueueDispatcher() - if self.instance.is_isolated() or containerized: - module_args = None - if 'module_args' in params: - # if it's adhoc, copy the module args - module_args = ansible_runner.utils.args2cmdline( - params.get('module_args'), - ) - shutil.move( - params.pop('inventory'), - os.path.join(private_data_dir, 'inventory') - ) - ansible_runner.utils.dump_artifacts(params) - isolated_manager_instance = isolated_manager.IsolatedManager( - self.event_handler, - canceled_callback=lambda: self.update_model(self.instance.pk).cancel_flag, - check_callback=self.check_handler, - pod_manager=pod_manager - ) - status, rc = isolated_manager_instance.run(self.instance, - private_data_dir, - params.get('playbook'), - params.get('module'), - module_args, - ident=str(self.instance.pk)) - self.finished_callback(None) - else: - res = ansible_runner.interface.run(**params) - status = res.status - rc = res.rc self.instance.log_lifecycle("running_playbook") + if isinstance(self.instance, SystemJob): + cwd = self.build_cwd(self.instance, private_data_dir) + res = ansible_runner.interface.run(project_dir=cwd, + event_handler=self.event_handler, + finished_callback=self.finished_callback, + status_handler=self.status_handler, + **params) + else: + receptor_job = AWXReceptorJob(self, params) + res = receptor_job.run() + + if not res: + return + + status = res.status + rc = res.rc if status == 'timeout': self.instance.job_explanation = "Job terminated due to timeout" @@ -1569,37 +1514,6 @@ class BaseTask(object): raise AwxTaskError.TaskError(self.instance, rc) - def deploy_container_group_pod(self, task): - from awx.main.scheduler.kubernetes import PodManager # Avoid circular import - pod_manager = PodManager(self.instance) - try: - log_name = task.log_format - logger.debug(f"Launching pod for {log_name}.") - pod_manager.deploy() - except (ApiException, Exception) as exc: - if isinstance(exc, ApiException) and exc.status == 403: - try: - if 'exceeded quota' in json.loads(exc.body)['message']: - # If the k8s cluster does not have capacity, we move the - # job back into pending and wait until the next run of - # the task manager. This does not exactly play well with - # our current instance group precendence logic, since it - # will just sit here forever if kubernetes returns this - # error. - logger.warn(exc.body) - logger.warn(f"Could not launch pod for {log_name}. Exceeded quota.") - self.update_model(task.pk, status='pending') - return - except Exception: - logger.exception(f"Unable to handle response from Kubernetes API for {log_name}.") - - logger.exception(f"Error when launching pod for {log_name}") - self.update_model(task.pk, status='error', result_traceback=traceback.format_exc()) - return - - self.update_model(task.pk, execution_node=pod_manager.pod_name) - return pod_manager - @@ -1690,7 +1604,6 @@ class RunJob(BaseTask): private_data_files=private_data_files) if private_data_files is None: private_data_files = {} - self.add_ansible_venv(job.ansible_virtualenv_path, env, isolated=isolated) # Set environment variables needed for inventory and job event # callbacks to work. env['JOB_ID'] = str(job.pk) @@ -1709,13 +1622,17 @@ class RunJob(BaseTask): cp_dir = os.path.join(private_data_dir, 'cp') if not os.path.exists(cp_dir): os.mkdir(cp_dir, 0o700) - env['ANSIBLE_SSH_CONTROL_PATH_DIR'] = cp_dir + # FIXME: more elegant way to manage this path in container + env['ANSIBLE_SSH_CONTROL_PATH_DIR'] = '/runner/cp' # Set environment variables for cloud credentials. cred_files = private_data_files.get('credentials', {}) for cloud_cred in job.cloud_credentials: if cloud_cred and cloud_cred.credential_type.namespace == 'openstack': - env['OS_CLIENT_CONFIG_FILE'] = cred_files.get(cloud_cred, '') + env['OS_CLIENT_CONFIG_FILE'] = os.path.join( + '/runner', + os.path.basename(cred_files.get(cloud_cred, '')) + ) for network_cred in job.network_credentials: env['ANSIBLE_NET_USERNAME'] = network_cred.get_input('username', default='') @@ -1746,7 +1663,8 @@ class RunJob(BaseTask): for path in config_values[config_setting].split(':'): if path not in paths: paths = [config_values[config_setting]] + paths - paths = [os.path.join(private_data_dir, folder)] + paths + # FIXME: again, figure out more elegant way for inside container + paths = [os.path.join('/runner', folder)] + paths env[env_key] = os.pathsep.join(paths) return env @@ -1875,10 +1793,26 @@ class RunJob(BaseTask): ''' Return whether this task should use proot. ''' - if job.is_containerized: + if job.is_container_group_task: return False return getattr(settings, 'AWX_PROOT_ENABLED', False) + def build_execution_environment_params(self, instance): + if settings.IS_K8S: + return {} + + params = super(RunJob, self).build_execution_environment_params(instance) + # If this has an insights agent and it is not already mounted then show it + insights_dir = os.path.dirname(settings.INSIGHTS_SYSTEM_ID_FILE) + if instance.use_fact_cache and os.path.exists(insights_dir): + logger.info('not parent of others') + params.setdefault('container_volume_mounts', []) + params['container_volume_mounts'].extend([ + f"{insights_dir}:{insights_dir}:Z", + ]) + + return params + def pre_run_hook(self, job, private_data_dir): super(RunJob, self).pre_run_hook(job, private_data_dir) if job.inventory is None: @@ -1989,10 +1923,10 @@ class RunJob(BaseTask): return if job.use_fact_cache: job.finish_job_fact_cache( - os.path.join(private_data_dir, 'artifacts', str(job.id), 'fact_cache'), + os.path.join(private_data_dir, 'artifacts', 'fact_cache'), fact_modification_times, ) - if isolated_manager_instance and not job.is_containerized: + if isolated_manager_instance and not job.is_container_group_task: isolated_manager_instance.cleanup() try: @@ -2068,7 +2002,6 @@ class RunProjectUpdate(BaseTask): env = super(RunProjectUpdate, self).build_env(project_update, private_data_dir, isolated=isolated, private_data_files=private_data_files) - self.add_ansible_venv(settings.ANSIBLE_VENV_PATH, env) env['ANSIBLE_RETRY_FILES_ENABLED'] = str(False) env['ANSIBLE_ASK_PASS'] = str(False) env['ANSIBLE_BECOME_ASK_PASS'] = str(False) @@ -2202,6 +2135,14 @@ class RunProjectUpdate(BaseTask): elif project_update.project.allow_override: # If branch is override-able, do extra fetch for all branches extra_vars['scm_refspec'] = 'refs/heads/*:refs/remotes/origin/*' + + if project_update.scm_type == 'archive': + # for raw archive, prevent error moving files between volumes + extra_vars['ansible_remote_tmp'] = os.path.join( + project_update.get_project_path(check_if_exists=False), + '.ansible_awx', 'tmp' + ) + self._write_extra_vars_file(private_data_dir, extra_vars) def build_cwd(self, project_update, private_data_dir): @@ -2330,10 +2271,14 @@ class RunProjectUpdate(BaseTask): # re-create root project folder if a natural disaster has destroyed it if not os.path.exists(settings.PROJECTS_ROOT): os.mkdir(settings.PROJECTS_ROOT) + project_path = instance.project.get_project_path(check_if_exists=False) + if not os.path.exists(project_path): + os.makedirs(project_path) # used as container mount + self.acquire_lock(instance) + self.original_branch = None if instance.scm_type == 'git' and instance.branch_override: - project_path = instance.project.get_project_path(check_if_exists=False) if os.path.exists(project_path): git_repo = git.Repo(project_path) if git_repo.head.is_detached: @@ -2349,7 +2294,7 @@ class RunProjectUpdate(BaseTask): # the project update playbook is not in a git repo, but uses a vendoring directory # to be consistent with the ansible-runner model, - # that is moved into the runner projecct folder here + # that is moved into the runner project folder here awx_playbooks = self.get_path_to('..', 'playbooks') copy_tree(awx_playbooks, os.path.join(private_data_dir, 'project')) @@ -2484,6 +2429,20 @@ class RunProjectUpdate(BaseTask): ''' return getattr(settings, 'AWX_PROOT_ENABLED', False) + def build_execution_environment_params(self, instance): + if settings.IS_K8S: + return {} + + params = super(RunProjectUpdate, self).build_execution_environment_params(instance) + project_path = instance.get_project_path(check_if_exists=False) + cache_path = instance.get_cache_path() + params.setdefault('container_volume_mounts', []) + params['container_volume_mounts'].extend([ + f"{project_path}:{project_path}:Z", + f"{cache_path}:{cache_path}:Z", + ]) + return params + @task(queue=get_local_queuename) class RunInventoryUpdate(BaseTask): @@ -2492,18 +2451,6 @@ class RunInventoryUpdate(BaseTask): event_model = InventoryUpdateEvent event_data_key = 'inventory_update_id' - # TODO: remove once inv updates run in containers - def should_use_proot(self, inventory_update): - ''' - Return whether this task should use proot. - ''' - return getattr(settings, 'AWX_PROOT_ENABLED', False) - - # TODO: remove once inv updates run in containers - @property - def proot_show_paths(self): - return [settings.AWX_ANSIBLE_COLLECTIONS_PATHS] - def build_private_data(self, inventory_update, private_data_dir): """ Return private data needed for inventory update. @@ -2530,17 +2477,13 @@ class RunInventoryUpdate(BaseTask): are accomplished by the inventory source injectors (in this method) or custom credential type injectors (in main run method). """ - env = super(RunInventoryUpdate, self).build_env(inventory_update, - private_data_dir, - isolated, - private_data_files=private_data_files) + env = super(RunInventoryUpdate, self).build_env( + inventory_update, private_data_dir, isolated, + private_data_files=private_data_files) + if private_data_files is None: private_data_files = {} - # TODO: remove once containers replace custom venvs - self.add_ansible_venv(inventory_update.ansible_virtualenv_path, env, isolated=isolated) - - # Legacy environment variables, were used as signal to awx-manage command - # now they are provided in case some scripts may be relying on them + # Pass inventory source ID to inventory script. env['INVENTORY_SOURCE_ID'] = str(inventory_update.inventory_source_id) env['INVENTORY_UPDATE_ID'] = str(inventory_update.pk) env.update(STANDARD_INVENTORY_UPDATE_ENV) @@ -2578,7 +2521,8 @@ class RunInventoryUpdate(BaseTask): for path in config_values[config_setting].split(':'): if path not in paths: paths = [config_values[config_setting]] + paths - paths = [os.path.join(private_data_dir, folder)] + paths + # FIXME: containers + paths = [os.path.join('/runner', folder)] + paths env[env_key] = os.pathsep.join(paths) return env @@ -2606,17 +2550,20 @@ class RunInventoryUpdate(BaseTask): args = ['ansible-inventory', '--list', '--export'] # Add arguments for the source inventory file/script/thing - source_location = self.pseudo_build_inventory(inventory_update, private_data_dir) + rel_path = self.pseudo_build_inventory(inventory_update, private_data_dir) + container_location = os.path.join('/runner', rel_path) # TODO: make container paths elegant + source_location = os.path.join(private_data_dir, rel_path) + args.append('-i') - args.append(source_location) + args.append(container_location) args.append('--output') - args.append(os.path.join(private_data_dir, 'artifacts', 'output.json')) + args.append(os.path.join('/runner', 'artifacts', 'output.json')) if os.path.isdir(source_location): - playbook_dir = source_location + playbook_dir = container_location else: - playbook_dir = os.path.dirname(source_location) + playbook_dir = os.path.dirname(container_location) args.extend(['--playbook-dir', playbook_dir]) if inventory_update.verbosity: @@ -2647,8 +2594,10 @@ class RunInventoryUpdate(BaseTask): with open(inventory_path, 'w') as f: f.write(content) os.chmod(inventory_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) + + rel_path = injector.filename elif src == 'scm': - inventory_path = os.path.join(private_data_dir, 'project', inventory_update.source_path) + rel_path = os.path.join('project', inventory_update.source_path) elif src == 'custom': handle, inventory_path = tempfile.mkstemp(dir=private_data_dir) f = os.fdopen(handle, 'w') @@ -2657,7 +2606,9 @@ class RunInventoryUpdate(BaseTask): f.write(inventory_update.source_script.script) f.close() os.chmod(inventory_path, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) - return inventory_path + + rel_path = os.path.split(inventory_path)[-1] + return rel_path def build_cwd(self, inventory_update, private_data_dir): ''' @@ -2666,9 +2617,10 @@ class RunInventoryUpdate(BaseTask): - SCM, where source needs to live in the project folder ''' src = inventory_update.source + container_dir = '/runner' # TODO: make container paths elegant if src == 'scm' and inventory_update.source_project_update: - return os.path.join(private_data_dir, 'project') - return private_data_dir + return os.path.join(container_dir, 'project') + return container_dir def build_playbook_path_relative_to_cwd(self, inventory_update, private_data_dir): return None @@ -2853,7 +2805,6 @@ class RunAdHocCommand(BaseTask): env = super(RunAdHocCommand, self).build_env(ad_hoc_command, private_data_dir, isolated=isolated, private_data_files=private_data_files) - self.add_ansible_venv(settings.ANSIBLE_VENV_PATH, env) # Set environment variables needed for inventory and ad hoc event # callbacks to work. env['AD_HOC_COMMAND_ID'] = str(ad_hoc_command.pk) @@ -2867,7 +2818,8 @@ class RunAdHocCommand(BaseTask): cp_dir = os.path.join(private_data_dir, 'cp') if not os.path.exists(cp_dir): os.mkdir(cp_dir, 0o700) - env['ANSIBLE_SSH_CONTROL_PATH'] = cp_dir + # FIXME: more elegant way to manage this path in container + env['ANSIBLE_SSH_CONTROL_PATH'] = '/runner/cp' return env @@ -2974,7 +2926,7 @@ class RunAdHocCommand(BaseTask): ''' Return whether this task should use proot. ''' - if ad_hoc_command.is_containerized: + if ad_hoc_command.is_container_group_task: return False return getattr(settings, 'AWX_PROOT_ENABLED', False) @@ -2991,6 +2943,9 @@ class RunSystemJob(BaseTask): event_model = SystemJobEvent event_data_key = 'system_job_id' + def build_execution_environment_params(self, system_job): + return {} + def build_args(self, system_job, private_data_dir, passwords): args = ['awx-manage', system_job.job_type] try: @@ -3022,10 +2977,13 @@ class RunSystemJob(BaseTask): return path def build_env(self, instance, private_data_dir, isolated=False, private_data_files=None): - env = super(RunSystemJob, self).build_env(instance, private_data_dir, - isolated=isolated, - private_data_files=private_data_files) - self.add_awx_venv(env) + base_env = super(RunSystemJob, self).build_env( + instance, private_data_dir, isolated=isolated, + private_data_files=private_data_files) + # TODO: this is able to run by turning off isolation + # the goal is to run it a container instead + env = dict(os.environ.items()) + env.update(base_env) return env def build_cwd(self, instance, private_data_dir): @@ -3103,3 +3061,235 @@ def deep_copy_model_obj( permission_check_func(creater, copy_mapping.values()) if isinstance(new_obj, Inventory): update_inventory_computed_fields.delay(new_obj.id) + + +class AWXReceptorJob: + def __init__(self, task=None, runner_params=None): + self.task = task + self.runner_params = runner_params + self.unit_id = None + + if self.task and not self.task.instance.is_container_group_task: + execution_environment_params = self.task.build_execution_environment_params(self.task.instance) + self.runner_params['settings'].update(execution_environment_params) + + def run(self): + # We establish a connection to the Receptor socket + receptor_ctl = ReceptorControl('/var/run/receptor/receptor.sock') + + try: + return self._run_internal(receptor_ctl) + finally: + # Make sure to always release the work unit if we established it + if self.unit_id is not None: + receptor_ctl.simple_command(f"work release {self.unit_id}") + + def _run_internal(self, receptor_ctl): + # Create a socketpair. Where the left side will be used for writing our payload + # (private data dir, kwargs). The right side will be passed to Receptor for + # reading. + sockin, sockout = socket.socketpair() + + threading.Thread(target=self.transmit, args=[sockin]).start() + + # submit our work, passing + # in the right side of our socketpair for reading. + result = receptor_ctl.submit_work(worktype=self.work_type, + payload=sockout.makefile('rb'), + params=self.receptor_params) + self.unit_id = result['unitid'] + + sockin.close() + sockout.close() + + resultsock, resultfile = receptor_ctl.get_work_results(self.unit_id, + return_socket=True, + return_sockfile=True) + # Both "processor" and "cancel_watcher" are spawned in separate threads. + # We wait for the first one to return. If cancel_watcher returns first, + # we yank the socket out from underneath the processor, which will cause it + # to exit. A reference to the processor_future is passed into the cancel_watcher_future, + # Which exits if the job has finished normally. The context manager ensures we do not + # leave any threads laying around. + with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor: + processor_future = executor.submit(self.processor, resultfile) + cancel_watcher_future = executor.submit(self.cancel_watcher, processor_future) + futures = [processor_future, cancel_watcher_future] + first_future = concurrent.futures.wait(futures, + return_when=concurrent.futures.FIRST_COMPLETED) + + res = list(first_future.done)[0].result() + if res.status == 'canceled': + receptor_ctl.simple_command(f"work cancel {self.unit_id}") + resultsock.shutdown(socket.SHUT_RDWR) + resultfile.close() + elif res.status == 'error': + # TODO: There should be a more efficient way of getting this information + receptor_work_list = receptor_ctl.simple_command("work list") + detail = receptor_work_list[self.unit_id]['Detail'] + if 'exceeded quota' in detail: + logger.warn(detail) + log_name = self.task.instance.log_format + logger.warn(f"Could not launch pod for {log_name}. Exceeded quota.") + self.task.update_model(self.task.instance.pk, status='pending') + return + + raise RuntimeError(detail) + + return res + + # Spawned in a thread so Receptor can start reading before we finish writing, we + # write our payload to the left side of our socketpair. + def transmit(self, _socket): + if not settings.IS_K8S and self.work_type == 'local': + self.runner_params['only_transmit_kwargs'] = True + + ansible_runner.interface.run(streamer='transmit', + _output=_socket.makefile('wb'), + **self.runner_params) + + # Socket must be shutdown here, or the reader will hang forever. + _socket.shutdown(socket.SHUT_WR) + + def processor(self, resultfile): + return ansible_runner.interface.run(streamer='process', + quiet=True, + _input=resultfile, + event_handler=self.task.event_handler, + finished_callback=self.task.finished_callback, + status_handler=self.task.status_handler, + **self.runner_params) + + @property + def receptor_params(self): + if self.task.instance.is_container_group_task: + spec_yaml = yaml.dump(self.pod_definition, explicit_start=True) + + receptor_params = { + "secret_kube_pod": spec_yaml, + } + + if self.credential: + kubeconfig_yaml = yaml.dump(self.kube_config, explicit_start=True) + receptor_params["secret_kube_config"] = kubeconfig_yaml + else: + private_data_dir = self.runner_params['private_data_dir'] + receptor_params = { + "params": f"--private-data-dir={private_data_dir}" + } + + return receptor_params + + + + @property + def work_type(self): + if self.task.instance.is_container_group_task: + if self.credential: + work_type = 'kubernetes-runtime-auth' + else: + work_type = 'kubernetes-incluster-auth' + else: + work_type = 'local' + + return work_type + + def cancel_watcher(self, processor_future): + while True: + if processor_future.done(): + return processor_future.result() + + if self.task.cancel_callback(): + result = namedtuple('result', ['status', 'rc']) + return result('canceled', 1) + time.sleep(1) + + @property + def pod_definition(self): + default_pod_spec = { + "apiVersion": "v1", + "kind": "Pod", + "metadata": { + "namespace": settings.AWX_CONTAINER_GROUP_DEFAULT_NAMESPACE + }, + "spec": { + "containers": [{ + "image": settings.AWX_CONTAINER_GROUP_DEFAULT_IMAGE, + "name": 'worker', + "args": ['ansible-runner', 'worker'] + }] + } + } + + pod_spec_override = {} + if self.task and self.task.instance.instance_group.pod_spec_override: + pod_spec_override = parse_yaml_or_json( + self.task.instance.instance_group.pod_spec_override) + pod_spec = {**default_pod_spec, **pod_spec_override} + + if self.task: + pod_spec['metadata'] = deepmerge( + pod_spec.get('metadata', {}), + dict(name=self.pod_name, + labels={ + 'ansible-awx': settings.INSTALL_UUID, + 'ansible-awx-job-id': str(self.task.instance.id) + })) + + return pod_spec + + @property + def pod_name(self): + return f"awx-job-{self.task.instance.id}" + + @property + def credential(self): + return self.task.instance.instance_group.credential + + @property + def namespace(self): + return self.pod_definition['metadata']['namespace'] + + @property + def kube_config(self): + host_input = self.credential.get_input('host') + config = { + "apiVersion": "v1", + "kind": "Config", + "preferences": {}, + "clusters": [ + { + "name": host_input, + "cluster": { + "server": host_input + } + } + ], + "users": [ + { + "name": host_input, + "user": { + "token": self.credential.get_input('bearer_token') + } + } + ], + "contexts": [ + { + "name": host_input, + "context": { + "cluster": host_input, + "user": host_input, + "namespace": self.namespace + } + } + ], + "current-context": host_input + } + + if self.credential.get_input('verify_ssl') and 'ssl_ca_cert' in self.credential.inputs: + config["clusters"][0]["cluster"]["certificate-authority-data"] = b64encode( + self.credential.get_input('ssl_ca_cert').encode() # encode to bytes + ).decode() # decode the base64 data into a str + else: + config["clusters"][0]["cluster"]["insecure-skip-tls-verify"] = True + return config diff --git a/awx/main/tests/functional/api/test_instance_group.py b/awx/main/tests/functional/api/test_instance_group.py index 43c7d51960..61c1054912 100644 --- a/awx/main/tests/functional/api/test_instance_group.py +++ b/awx/main/tests/functional/api/test_instance_group.py @@ -255,7 +255,7 @@ def test_instance_group_update_fields(patch, instance, instance_group, admin, co # policy_instance_ variables can only be updated in instance groups that are NOT containerized # instance group (not containerized) ig_url = reverse("api:instance_group_detail", kwargs={'pk': instance_group.pk}) - assert not instance_group.is_containerized + assert not instance_group.is_container_group assert not containerized_instance_group.is_isolated resp = patch(ig_url, {'policy_instance_percentage':15}, admin, expect=200) assert 15 == resp.data['policy_instance_percentage'] @@ -266,7 +266,7 @@ def test_instance_group_update_fields(patch, instance, instance_group, admin, co # containerized instance group cg_url = reverse("api:instance_group_detail", kwargs={'pk': containerized_instance_group.pk}) - assert containerized_instance_group.is_containerized + assert containerized_instance_group.is_container_group assert not containerized_instance_group.is_isolated resp = patch(cg_url, {'policy_instance_percentage':15}, admin, expect=400) assert ["Containerized instances may not be managed via the API"] == resp.data['policy_instance_percentage'] @@ -291,4 +291,3 @@ def test_containerized_group_default_fields(instance_group, kube_credential): assert ig.policy_instance_list == [] assert ig.policy_instance_minimum == 0 assert ig.policy_instance_percentage == 0 - \ No newline at end of file diff --git a/awx/main/tests/functional/conftest.py b/awx/main/tests/functional/conftest.py index 7111950003..4cbd5a40d3 100644 --- a/awx/main/tests/functional/conftest.py +++ b/awx/main/tests/functional/conftest.py @@ -52,6 +52,7 @@ from awx.main.models.events import ( from awx.main.models.workflow import WorkflowJobTemplate from awx.main.models.ad_hoc_commands import AdHocCommand from awx.main.models.oauth import OAuth2Application as Application +from awx.main.models.execution_environments import ExecutionEnvironment __SWAGGER_REQUESTS__ = {} @@ -850,3 +851,8 @@ def slice_job_factory(slice_jt_factory): node.save() return slice_job return r + + +@pytest.fixture +def execution_environment(organization): + return ExecutionEnvironment.objects.create(name="test-ee", description="test-ee", organization=organization) diff --git a/awx/main/tests/functional/task_management/test_container_groups.py b/awx/main/tests/functional/task_management/test_container_groups.py index 47d982a725..84dcaf12d7 100644 --- a/awx/main/tests/functional/task_management/test_container_groups.py +++ b/awx/main/tests/functional/task_management/test_container_groups.py @@ -29,8 +29,8 @@ def containerized_job(default_instance_group, kube_credential, job_template_fact @pytest.mark.django_db def test_containerized_job(containerized_job): - assert containerized_job.is_containerized - assert containerized_job.instance_group.is_containerized + assert containerized_job.is_container_group_task + assert containerized_job.instance_group.is_container_group assert containerized_job.instance_group.credential.kubernetes diff --git a/awx/main/tests/functional/test_credential.py b/awx/main/tests/functional/test_credential.py index 27f67b96f4..4f87c249be 100644 --- a/awx/main/tests/functional/test_credential.py +++ b/awx/main/tests/functional/test_credential.py @@ -90,6 +90,7 @@ def test_default_cred_types(): 'kubernetes_bearer_token', 'net', 'openstack', + 'registry', 'rhv', 'satellite6', 'scm', diff --git a/awx/main/tests/functional/test_execution_environments.py b/awx/main/tests/functional/test_execution_environments.py new file mode 100644 index 0000000000..5f1e430fe8 --- /dev/null +++ b/awx/main/tests/functional/test_execution_environments.py @@ -0,0 +1,19 @@ +import pytest + +from awx.main.models import (ExecutionEnvironment) + + +@pytest.mark.django_db +def test_execution_environment_creation(execution_environment, organization): + execution_env = ExecutionEnvironment.objects.create( + name='Hello Environment', + image='', + organization=organization, + managed_by_tower=False, + credential=None, + pull='missing' + ) + assert type(execution_env) is type(execution_environment) + assert execution_env.organization == organization + assert execution_env.name == 'Hello Environment' + assert execution_env.pull == 'missing' diff --git a/awx/main/tests/functional/test_inventory_source_injectors.py b/awx/main/tests/functional/test_inventory_source_injectors.py index fc28c92294..f9edfdcd22 100644 --- a/awx/main/tests/functional/test_inventory_source_injectors.py +++ b/awx/main/tests/functional/test_inventory_source_injectors.py @@ -6,7 +6,7 @@ import re from collections import namedtuple from awx.main.tasks import RunInventoryUpdate -from awx.main.models import InventorySource, Credential, CredentialType, UnifiedJob +from awx.main.models import InventorySource, Credential, CredentialType, UnifiedJob, ExecutionEnvironment from awx.main.constants import CLOUD_PROVIDERS, STANDARD_INVENTORY_UPDATE_ENV from awx.main.tests import data @@ -110,7 +110,8 @@ def read_content(private_data_dir, raw_env, inventory_update): continue # Ansible runner abs_file_path = os.path.join(private_data_dir, filename) file_aliases[abs_file_path] = filename - if abs_file_path in inverse_env: + runner_path = os.path.join('/runner', os.path.basename(abs_file_path)) + if runner_path in inverse_env: referenced_paths.add(abs_file_path) alias = 'file_reference' for i in range(10): @@ -121,7 +122,7 @@ def read_content(private_data_dir, raw_env, inventory_update): raise RuntimeError('Test not able to cope with >10 references by env vars. ' 'Something probably went very wrong.') file_aliases[abs_file_path] = alias - for env_key in inverse_env[abs_file_path]: + for env_key in inverse_env[runner_path]: env[env_key] = '{{{{ {} }}}}'.format(alias) try: with open(abs_file_path, 'r') as f: @@ -182,6 +183,8 @@ def create_reference_data(source_dir, env, content): @pytest.mark.django_db @pytest.mark.parametrize('this_kind', CLOUD_PROVIDERS) def test_inventory_update_injected_content(this_kind, inventory, fake_credential_factory): + ExecutionEnvironment.objects.create(name='test EE', managed_by_tower=True) + injector = InventorySource.injectors[this_kind] if injector.plugin_name is None: pytest.skip('Use of inventory plugin is not enabled for this source') @@ -197,12 +200,14 @@ def test_inventory_update_injected_content(this_kind, inventory, fake_credential inventory_update = inventory_source.create_unified_job() task = RunInventoryUpdate() - def substitute_run(envvars=None, **_kw): + def substitute_run(awx_receptor_job): """This method will replace run_pexpect instead of running, it will read the private data directory contents It will make assertions that the contents are correct If MAKE_INVENTORY_REFERENCE_FILES is set, it will produce reference files """ + envvars = awx_receptor_job.runner_params['envvars'] + private_data_dir = envvars.pop('AWX_PRIVATE_DATA_DIR') assert envvars.pop('ANSIBLE_INVENTORY_ENABLED') == 'auto' set_files = bool(os.getenv("MAKE_INVENTORY_REFERENCE_FILES", 'false').lower()[0] not in ['f', '0']) @@ -214,9 +219,6 @@ def test_inventory_update_injected_content(this_kind, inventory, fake_credential f"'{inventory_filename}' file not found in inventory update runtime files {content.keys()}" env.pop('ANSIBLE_COLLECTIONS_PATHS', None) # collection paths not relevant to this test - env.pop('PYTHONPATH') - env.pop('VIRTUAL_ENV') - env.pop('PROOT_TMP_DIR') base_dir = os.path.join(DATA, 'plugins') if not os.path.exists(base_dir): os.mkdir(base_dir) @@ -256,6 +258,6 @@ def test_inventory_update_injected_content(this_kind, inventory, fake_credential # Also do not send websocket status updates with mock.patch.object(UnifiedJob, 'websocket_emit_status', mock.Mock()): # The point of this test is that we replace run with assertions - with mock.patch('awx.main.tasks.ansible_runner.interface.run', substitute_run): + with mock.patch('awx.main.tasks.AWXReceptorJob.run', substitute_run): # so this sets up everything for a run and then yields control over to substitute_run task.run(inventory_update.pk) diff --git a/awx/main/tests/unit/api/serializers/test_job_serializers.py b/awx/main/tests/unit/api/serializers/test_job_serializers.py index e7b0ee7792..53cc07676d 100644 --- a/awx/main/tests/unit/api/serializers/test_job_serializers.py +++ b/awx/main/tests/unit/api/serializers/test_job_serializers.py @@ -40,7 +40,7 @@ def project_update(mocker): @pytest.fixture def job(mocker, job_template, project_update): return mocker.MagicMock(pk=5, job_template=job_template, project_update=project_update, - workflow_job_id=None) + workflow_job_id=None, execution_environment_id=None) @pytest.fixture diff --git a/awx/main/tests/unit/test_capacity.py b/awx/main/tests/unit/test_capacity.py index 16fe81053c..1da05ec1f3 100644 --- a/awx/main/tests/unit/test_capacity.py +++ b/awx/main/tests/unit/test_capacity.py @@ -11,7 +11,7 @@ class FakeObject(object): class Job(FakeObject): task_impact = 43 - is_containerized = False + is_container_group_task = False def log_format(self): return 'job 382 (fake)' diff --git a/awx/main/tests/unit/test_tasks.py b/awx/main/tests/unit/test_tasks.py index 053745cc64..3acdd7ead9 100644 --- a/awx/main/tests/unit/test_tasks.py +++ b/awx/main/tests/unit/test_tasks.py @@ -6,7 +6,6 @@ import os import shutil import tempfile -from backports.tempfile import TemporaryDirectory import fcntl from unittest import mock import pytest @@ -19,6 +18,7 @@ from awx.main.models import ( AdHocCommand, Credential, CredentialType, + ExecutionEnvironment, Inventory, InventorySource, InventoryUpdate, @@ -347,11 +347,12 @@ def pytest_generate_tests(metafunc): ) -def parse_extra_vars(args): +def parse_extra_vars(args, private_data_dir): extra_vars = {} for chunk in args: - if chunk.startswith('@/tmp/'): - with open(chunk.strip('@'), 'r') as f: + if chunk.startswith('@/runner/'): + local_path = os.path.join(private_data_dir, os.path.basename(chunk.strip('@'))) + with open(local_path, 'r') as f: extra_vars.update(yaml.load(f, Loader=SafeLoader)) return extra_vars @@ -546,44 +547,6 @@ class TestGenericRun(): job_cwd='/foobar', job_env={'switch': 'blade', 'foot': 'ball', 'secret_key': 'redacted_value'}) - def test_uses_process_isolation(self, settings): - job = Job(project=Project(), inventory=Inventory()) - task = tasks.RunJob() - task.should_use_proot = lambda instance: True - task.instance = job - - private_data_dir = '/foo' - cwd = '/bar' - - settings.AWX_PROOT_HIDE_PATHS = ['/AWX_PROOT_HIDE_PATHS1', '/AWX_PROOT_HIDE_PATHS2'] - settings.ANSIBLE_VENV_PATH = '/ANSIBLE_VENV_PATH' - settings.AWX_VENV_PATH = '/AWX_VENV_PATH' - - process_isolation_params = task.build_params_process_isolation(job, private_data_dir, cwd) - assert True is process_isolation_params['process_isolation'] - assert process_isolation_params['process_isolation_path'].startswith(settings.AWX_PROOT_BASE_PATH), \ - "Directory where a temp directory will be created for the remapping to take place" - assert private_data_dir in process_isolation_params['process_isolation_show_paths'], \ - "The per-job private data dir should be in the list of directories the user can see." - assert cwd in process_isolation_params['process_isolation_show_paths'], \ - "The current working directory should be in the list of directories the user can see." - - for p in [settings.AWX_PROOT_BASE_PATH, - '/etc/tower', - '/etc/ssh', - '/var/lib/awx', - '/var/log', - settings.PROJECTS_ROOT, - settings.JOBOUTPUT_ROOT, - '/AWX_PROOT_HIDE_PATHS1', - '/AWX_PROOT_HIDE_PATHS2']: - assert p in process_isolation_params['process_isolation_hide_paths'] - assert 9 == len(process_isolation_params['process_isolation_hide_paths']) - assert '/ANSIBLE_VENV_PATH' in process_isolation_params['process_isolation_ro_paths'] - assert '/AWX_VENV_PATH' in process_isolation_params['process_isolation_ro_paths'] - assert 2 == len(process_isolation_params['process_isolation_ro_paths']) - - @mock.patch('os.makedirs') def test_build_params_resource_profiling(self, os_makedirs): job = Job(project=Project(), inventory=Inventory()) @@ -597,7 +560,7 @@ class TestGenericRun(): assert resource_profiling_params['resource_profiling_cpu_poll_interval'] == '0.25' assert resource_profiling_params['resource_profiling_memory_poll_interval'] == '0.25' assert resource_profiling_params['resource_profiling_pid_poll_interval'] == '0.25' - assert resource_profiling_params['resource_profiling_results_dir'] == '/fake_private_data_dir/artifacts/playbook_profiling' + assert resource_profiling_params['resource_profiling_results_dir'] == '/runner/artifacts/playbook_profiling' @pytest.mark.parametrize("scenario, profiling_enabled", [ @@ -656,34 +619,13 @@ class TestGenericRun(): env = task.build_env(job, private_data_dir) assert env['FOO'] == 'BAR' - def test_valid_custom_virtualenv(self, patch_Job, private_data_dir): - job = Job(project=Project(), inventory=Inventory()) - - with TemporaryDirectory(dir=settings.BASE_VENV_PATH) as tempdir: - job.project.custom_virtualenv = tempdir - os.makedirs(os.path.join(tempdir, 'lib')) - os.makedirs(os.path.join(tempdir, 'bin', 'activate')) - - task = tasks.RunJob() - env = task.build_env(job, private_data_dir) - - assert env['PATH'].startswith(os.path.join(tempdir, 'bin')) - assert env['VIRTUAL_ENV'] == tempdir - - def test_invalid_custom_virtualenv(self, patch_Job, private_data_dir): - job = Job(project=Project(), inventory=Inventory()) - job.project.custom_virtualenv = '/var/lib/awx/venv/missing' - task = tasks.RunJob() - - with pytest.raises(tasks.InvalidVirtualenvError) as e: - task.build_env(job, private_data_dir) - - assert 'Invalid virtual environment selected: /var/lib/awx/venv/missing' == str(e.value) - +@pytest.mark.django_db class TestAdhocRun(TestJobExecution): def test_options_jinja_usage(self, adhoc_job, adhoc_update_model_wrapper): + ExecutionEnvironment.objects.create(name='test EE', managed_by_tower=True) + adhoc_job.module_args = '{{ ansible_ssh_pass }}' adhoc_job.websocket_emit_status = mock.Mock() adhoc_job.send_notification_templates = mock.Mock() @@ -1203,7 +1145,9 @@ class TestJobCredentials(TestJobExecution): credential.credential_type.inject_credential( credential, env, safe_env, [], private_data_dir ) - json_data = json.load(open(env['GCE_CREDENTIALS_FILE_PATH'], 'rb')) + runner_path = env['GCE_CREDENTIALS_FILE_PATH'] + local_path = os.path.join(private_data_dir, os.path.basename(runner_path)) + json_data = json.load(open(local_path, 'rb')) assert json_data['type'] == 'service_account' assert json_data['private_key'] == self.EXAMPLE_PRIVATE_KEY assert json_data['client_email'] == 'bob' @@ -1306,7 +1250,11 @@ class TestJobCredentials(TestJobExecution): credential, env, {}, [], private_data_dir ) - shade_config = open(env['OS_CLIENT_CONFIG_FILE'], 'r').read() + # convert container path to host machine path + config_loc = os.path.join( + private_data_dir, os.path.basename(env['OS_CLIENT_CONFIG_FILE']) + ) + shade_config = open(config_loc, 'r').read() assert shade_config == '\n'.join([ 'clouds:', ' devstack:', @@ -1344,7 +1292,7 @@ class TestJobCredentials(TestJobExecution): ) config = configparser.ConfigParser() - config.read(env['OVIRT_INI_PATH']) + config.read(os.path.join(private_data_dir, os.path.basename(env['OVIRT_INI_PATH']))) assert config.get('ovirt', 'ovirt_url') == 'some-ovirt-host.example.org' assert config.get('ovirt', 'ovirt_username') == 'bob' assert config.get('ovirt', 'ovirt_password') == 'some-pass' @@ -1577,7 +1525,7 @@ class TestJobCredentials(TestJobExecution): credential.credential_type.inject_credential( credential, {}, {}, args, private_data_dir ) - extra_vars = parse_extra_vars(args) + extra_vars = parse_extra_vars(args, private_data_dir) assert extra_vars["api_token"] == "ABC123" assert hasattr(extra_vars["api_token"], '__UNSAFE__') @@ -1612,7 +1560,7 @@ class TestJobCredentials(TestJobExecution): credential.credential_type.inject_credential( credential, {}, {}, args, private_data_dir ) - extra_vars = parse_extra_vars(args) + extra_vars = parse_extra_vars(args, private_data_dir) assert extra_vars["turbo_button"] == "True" return ['successful', 0] @@ -1647,7 +1595,7 @@ class TestJobCredentials(TestJobExecution): credential.credential_type.inject_credential( credential, {}, {}, args, private_data_dir ) - extra_vars = parse_extra_vars(args) + extra_vars = parse_extra_vars(args, private_data_dir) assert extra_vars["turbo_button"] == "FAST!" @@ -1687,7 +1635,7 @@ class TestJobCredentials(TestJobExecution): credential, {}, {}, args, private_data_dir ) - extra_vars = parse_extra_vars(args) + extra_vars = parse_extra_vars(args, private_data_dir) assert extra_vars["password"] == "SUPER-SECRET-123" def test_custom_environment_injectors_with_file(self, private_data_dir): @@ -1722,7 +1670,8 @@ class TestJobCredentials(TestJobExecution): credential, env, {}, [], private_data_dir ) - assert open(env['MY_CLOUD_INI_FILE'], 'r').read() == '[mycloud]\nABC123' + path = os.path.join(private_data_dir, os.path.basename(env['MY_CLOUD_INI_FILE'])) + assert open(path, 'r').read() == '[mycloud]\nABC123' def test_custom_environment_injectors_with_unicode_content(self, private_data_dir): value = 'Iñtërnâtiônàlizætiøn' @@ -1746,7 +1695,8 @@ class TestJobCredentials(TestJobExecution): credential, env, {}, [], private_data_dir ) - assert open(env['MY_CLOUD_INI_FILE'], 'r').read() == value + path = os.path.join(private_data_dir, os.path.basename(env['MY_CLOUD_INI_FILE'])) + assert open(path, 'r').read() == value def test_custom_environment_injectors_with_files(self, private_data_dir): some_cloud = CredentialType( @@ -1786,8 +1736,10 @@ class TestJobCredentials(TestJobExecution): credential, env, {}, [], private_data_dir ) - assert open(env['MY_CERT_INI_FILE'], 'r').read() == '[mycert]\nCERT123' - assert open(env['MY_KEY_INI_FILE'], 'r').read() == '[mykey]\nKEY123' + cert_path = os.path.join(private_data_dir, os.path.basename(env['MY_CERT_INI_FILE'])) + key_path = os.path.join(private_data_dir, os.path.basename(env['MY_KEY_INI_FILE'])) + assert open(cert_path, 'r').read() == '[mycert]\nCERT123' + assert open(key_path, 'r').read() == '[mykey]\nKEY123' def test_multi_cloud(self, private_data_dir): gce = CredentialType.defaults['gce']() @@ -1826,7 +1778,8 @@ class TestJobCredentials(TestJobExecution): assert env['AZURE_AD_USER'] == 'bob' assert env['AZURE_PASSWORD'] == 'secret' - json_data = json.load(open(env['GCE_CREDENTIALS_FILE_PATH'], 'rb')) + path = os.path.join(private_data_dir, os.path.basename(env['GCE_CREDENTIALS_FILE_PATH'])) + json_data = json.load(open(path, 'rb')) assert json_data['type'] == 'service_account' assert json_data['private_key'] == self.EXAMPLE_PRIVATE_KEY assert json_data['client_email'] == 'bob' @@ -1971,29 +1924,6 @@ class TestProjectUpdateCredentials(TestJobExecution): ] } - def test_process_isolation_exposes_projects_root(self, private_data_dir, project_update): - task = tasks.RunProjectUpdate() - task.revision_path = 'foobar' - task.instance = project_update - ssh = CredentialType.defaults['ssh']() - project_update.scm_type = 'git' - project_update.credential = Credential( - pk=1, - credential_type=ssh, - ) - process_isolation = task.build_params_process_isolation(job, private_data_dir, 'cwd') - - assert process_isolation['process_isolation'] is True - assert settings.PROJECTS_ROOT in process_isolation['process_isolation_show_paths'] - - task._write_extra_vars_file = mock.Mock() - - with mock.patch.object(Licenser, 'validate', lambda *args, **kw: {}): - task.build_extra_vars_file(project_update, private_data_dir) - - call_args, _ = task._write_extra_vars_file.call_args_list[0] - _, extra_vars = call_args - def test_username_and_password_auth(self, project_update, scm_type): task = tasks.RunProjectUpdate() ssh = CredentialType.defaults['ssh']() @@ -2107,7 +2037,8 @@ class TestInventoryUpdateCredentials(TestJobExecution): assert '-i' in ' '.join(args) script = args[args.index('-i') + 1] - with open(script, 'r') as f: + host_script = script.replace('/runner', private_data_dir) + with open(host_script, 'r') as f: assert f.read() == inventory_update.source_script.script assert env['FOO'] == 'BAR' if with_credential: @@ -2307,7 +2238,8 @@ class TestInventoryUpdateCredentials(TestJobExecution): private_data_files = task.build_private_data_files(inventory_update, private_data_dir) env = task.build_env(inventory_update, private_data_dir, False, private_data_files) - shade_config = open(env['OS_CLIENT_CONFIG_FILE'], 'r').read() + path = os.path.join(private_data_dir, os.path.basename(env['OS_CLIENT_CONFIG_FILE'])) + shade_config = open(path, 'r').read() assert '\n'.join([ 'clouds:', ' devstack:', diff --git a/awx/main/utils/common.py b/awx/main/utils/common.py index 283a028f3f..ad90d5e4ec 100644 --- a/awx/main/utils/common.py +++ b/awx/main/utils/common.py @@ -55,7 +55,8 @@ __all__ = [ 'model_instance_diff', 'parse_yaml_or_json', 'RequireDebugTrueOrTest', 'has_model_field_prefetched', 'set_environ', 'IllegalArgumentError', 'get_custom_venv_choices', 'get_external_account', 'task_manager_bulk_reschedule', - 'schedule_task_manager', 'classproperty', 'create_temporary_fifo', 'truncate_stdout' + 'schedule_task_manager', 'classproperty', 'create_temporary_fifo', 'truncate_stdout', + 'deepmerge' ] @@ -1079,3 +1080,21 @@ def truncate_stdout(stdout, size): set_count += 1 return stdout + u'\u001b[0m' * (set_count - reset_count) + + +def deepmerge(a, b): + """ + Merge dict structures and return the result. + + >>> a = {'first': {'all_rows': {'pass': 'dog', 'number': '1'}}} + >>> b = {'first': {'all_rows': {'fail': 'cat', 'number': '5'}}} + >>> import pprint; pprint.pprint(deepmerge(a, b)) + {'first': {'all_rows': {'fail': 'cat', 'number': '5', 'pass': 'dog'}}} + """ + if isinstance(a, dict) and isinstance(b, dict): + return dict([(k, deepmerge(a.get(k), b.get(k))) + for k in set(a.keys()).union(b.keys())]) + elif b is None: + return a + else: + return b diff --git a/awx/playbooks/project_update.yml b/awx/playbooks/project_update.yml index a7b7007d56..e00bed4249 100644 --- a/awx/playbooks/project_update.yml +++ b/awx/playbooks/project_update.yml @@ -24,9 +24,7 @@ tasks: - name: delete project directory before update - file: - path: "{{project_path|quote}}" - state: absent + command: "rm -rf {{project_path}}/*" # volume mounted, cannot delete folder itself tags: - delete @@ -57,6 +55,8 @@ force: "{{scm_clean}}" username: "{{scm_username|default(omit)}}" password: "{{scm_password|default(omit)}}" + # must be in_place because folder pre-existing, because it is mounted + in_place: true environment: LC_ALL: 'en_US.UTF-8' register: svn_result @@ -206,6 +206,9 @@ ANSIBLE_FORCE_COLOR: false ANSIBLE_COLLECTIONS_PATHS: "{{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_collections" GIT_SSH_COMMAND: "ssh -o StrictHostKeyChecking=no" + # Put the local tmp directory in same volume as collection destination + # otherwise, files cannot be moved accross volumes and will cause error + ANSIBLE_LOCAL_TEMP: "{{projects_root}}/.__awx_cache/{{local_path}}/stage/tmp" when: - "ansible_version.full is version_compare('2.9', '>=')" diff --git a/awx/settings/defaults.py b/awx/settings/defaults.py index e0c1db197b..ba8b8c69cf 100644 --- a/awx/settings/defaults.py +++ b/awx/settings/defaults.py @@ -59,11 +59,23 @@ DATABASES = { } } +# Whether or not the deployment is a K8S-based deployment +# In K8S-based deployments, instances have zero capacity - all playbook +# automation is intended to flow through defined Container Groups that +# interface with some (or some set of) K8S api (which may or may not include +# the K8S cluster where awx itself is running) +IS_K8S = False + +# TODO: remove this setting in favor of a default execution environment +AWX_EXECUTION_ENVIRONMENT_DEFAULT_IMAGE = 'quay.io/ansible/awx-ee' + AWX_CONTAINER_GROUP_K8S_API_TIMEOUT = 10 AWX_CONTAINER_GROUP_POD_LAUNCH_RETRIES = 100 AWX_CONTAINER_GROUP_POD_LAUNCH_RETRY_DELAY = 5 -AWX_CONTAINER_GROUP_DEFAULT_NAMESPACE = 'default' -AWX_CONTAINER_GROUP_DEFAULT_IMAGE = 'ansible/ansible-runner' +AWX_CONTAINER_GROUP_DEFAULT_NAMESPACE = os.getenv('MY_POD_NAMESPACE', 'default') + +# TODO: remove this setting in favor of a default execution environment +AWX_CONTAINER_GROUP_DEFAULT_IMAGE = AWX_EXECUTION_ENVIRONMENT_DEFAULT_IMAGE # Internationalization # https://docs.djangoproject.com/en/dev/topics/i18n/ @@ -173,6 +185,7 @@ REMOTE_HOST_HEADERS = ['REMOTE_ADDR', 'REMOTE_HOST'] PROXY_IP_ALLOWED_LIST = [] CUSTOM_VENV_PATHS = [] +DEFAULT_EXECUTION_ENVIRONMENT = None # Note: This setting may be overridden by database settings. STDOUT_MAX_BYTES_DISPLAY = 1048576 @@ -679,7 +692,7 @@ AD_HOC_COMMANDS = [ 'win_user', ] -INV_ENV_VARIABLE_BLOCKED = ("HOME", "USER", "_", "TERM") +INV_ENV_VARIABLE_BLOCKED = ("HOME", "USER", "_", "TERM", "PATH") # ---------------- # -- Amazon EC2 -- @@ -783,6 +796,8 @@ TOWER_URL_BASE = "https://towerhost" INSIGHTS_URL_BASE = "https://example.org" INSIGHTS_AGENT_MIME = 'application/example' +# See https://github.com/ansible/awx-facts-playbooks +INSIGHTS_SYSTEM_ID_FILE='/etc/redhat-access-insights/machine-id' TOWER_SETTINGS_MANIFEST = {} diff --git a/awx/settings/development.py b/awx/settings/development.py index 6181d16ec6..d181ca10fc 100644 --- a/awx/settings/development.py +++ b/awx/settings/development.py @@ -177,15 +177,6 @@ CELERYBEAT_SCHEDULE.update({ # noqa CLUSTER_HOST_ID = socket.gethostname() - -if 'Docker Desktop' in os.getenv('OS', ''): - os.environ['SDB_NOTIFY_HOST'] = 'docker.for.mac.host.internal' -else: - try: - os.environ['SDB_NOTIFY_HOST'] = os.popen('ip route').read().split(' ')[2] - except Exception: - pass - AWX_CALLBACK_PROFILE = True if 'sqlite3' not in DATABASES['default']['ENGINE']: # noqa diff --git a/awx/ui_next/SEARCH.md b/awx/ui_next/SEARCH.md index 111dfb2f56..131e4fe277 100644 --- a/awx/ui_next/SEARCH.md +++ b/awx/ui_next/SEARCH.md @@ -86,7 +86,7 @@ Instances of orgs list include: **Instance Groups list** - Name - search is ?name=ig - - ? is_containerized boolean choice (doesn't work right now in API but will soon) - search is ?is_containerized=true + - ? is_container_group boolean choice (doesn't work right now in API but will soon) - search is ?is_container_group=true - ? credential name - search is ?credentials__name=kubey Instance of instance groups list include: @@ -136,7 +136,7 @@ Instance of team lists include: **Credentials list** - Name - - ? Type (dropdown on right with different types) + - ? Type (dropdown on right with different types) - ? Created by (username) - ? Modified by (username) @@ -273,7 +273,7 @@ For the UI url params, we want to only encode those params that aren't defaults, #### mergeParams vs. replaceParams -**mergeParams** is used to suppport putting values with the same key +**mergeParams** is used to suppport putting values with the same key From a UX perspective, we wanted to be able to support searching on the same key multiple times (i.e. searching for things like `?foo=bar&foo=baz`). We do this by creating an array of all values. i.e.: @@ -361,7 +361,7 @@ Smart search will be able to craft the tag through various states. Note that th "instance_groups__search" ], ``` - + PHASE 3: keys, give by object key names for data.actions.GET - type is given for each key which we could use to help craft the value diff --git a/awx/ui_next/src/api/index.js b/awx/ui_next/src/api/index.js index 3160ebd907..d048237b74 100644 --- a/awx/ui_next/src/api/index.js +++ b/awx/ui_next/src/api/index.js @@ -7,6 +7,7 @@ import CredentialInputSources from './models/CredentialInputSources'; import CredentialTypes from './models/CredentialTypes'; import Credentials from './models/Credentials'; import Dashboard from './models/Dashboard'; +import ExecutionEnvironments from './models/ExecutionEnvironments'; import Groups from './models/Groups'; import Hosts from './models/Hosts'; import InstanceGroups from './models/InstanceGroups'; @@ -50,6 +51,7 @@ const CredentialInputSourcesAPI = new CredentialInputSources(); const CredentialTypesAPI = new CredentialTypes(); const CredentialsAPI = new Credentials(); const DashboardAPI = new Dashboard(); +const ExecutionEnvironmentsAPI = new ExecutionEnvironments(); const GroupsAPI = new Groups(); const HostsAPI = new Hosts(); const InstanceGroupsAPI = new InstanceGroups(); @@ -94,6 +96,7 @@ export { CredentialTypesAPI, CredentialsAPI, DashboardAPI, + ExecutionEnvironmentsAPI, GroupsAPI, HostsAPI, InstanceGroupsAPI, diff --git a/awx/ui_next/src/api/models/ExecutionEnvironments.js b/awx/ui_next/src/api/models/ExecutionEnvironments.js new file mode 100644 index 0000000000..2df933d53a --- /dev/null +++ b/awx/ui_next/src/api/models/ExecutionEnvironments.js @@ -0,0 +1,10 @@ +import Base from '../Base'; + +class ExecutionEnvironments extends Base { + constructor(http) { + super(http); + this.baseUrl = '/api/v2/execution_environments/'; + } +} + +export default ExecutionEnvironments; diff --git a/awx/ui_next/src/api/models/Organizations.js b/awx/ui_next/src/api/models/Organizations.js index ce236067b4..fd980fece8 100644 --- a/awx/ui_next/src/api/models/Organizations.js +++ b/awx/ui_next/src/api/models/Organizations.js @@ -30,6 +30,18 @@ class Organizations extends InstanceGroupsMixin(NotificationsMixin(Base)) { }); } + readExecutionEnvironments(id, params) { + return this.http.get(`${this.baseUrl}${id}/execution_environments/`, { + params, + }); + } + + readExecutionEnvironmentsOptions(id, params) { + return this.http.options(`${this.baseUrl}${id}/execution_environments/`, { + params, + }); + } + createUser(id, data) { return this.http.post(`${this.baseUrl}${id}/users/`, data); } diff --git a/awx/ui_next/src/components/Lookup/ExecutionEnvironmentLookup.jsx b/awx/ui_next/src/components/Lookup/ExecutionEnvironmentLookup.jsx new file mode 100644 index 0000000000..86f659c430 --- /dev/null +++ b/awx/ui_next/src/components/Lookup/ExecutionEnvironmentLookup.jsx @@ -0,0 +1,167 @@ +import React, { useCallback, useEffect } from 'react'; +import { string, func, bool } from 'prop-types'; +import { useLocation } from 'react-router-dom'; +import { withI18n } from '@lingui/react'; +import { t } from '@lingui/macro'; +import { FormGroup, Tooltip } from '@patternfly/react-core'; + +import { ExecutionEnvironmentsAPI } from '../../api'; +import { ExecutionEnvironment } from '../../types'; +import { getQSConfig, parseQueryString, mergeParams } from '../../util/qs'; +import Popover from '../Popover'; +import OptionsList from '../OptionsList'; +import useRequest from '../../util/useRequest'; + +import Lookup from './Lookup'; +import LookupErrorMessage from './shared/LookupErrorMessage'; + +const QS_CONFIG = getQSConfig('execution_environments', { + page: 1, + page_size: 5, + order_by: 'name', +}); + +function ExecutionEnvironmentLookup({ + globallyAvailable, + i18n, + isDefaultEnvironment, + isDisabled, + onChange, + organizationId, + popoverContent, + tooltip, + value, + onBlur, +}) { + const location = useLocation(); + + const { + result: { + executionEnvironments, + count, + relatedSearchableKeys, + searchableKeys, + }, + request: fetchExecutionEnvironments, + error, + isLoading, + } = useRequest( + useCallback(async () => { + const params = parseQueryString(QS_CONFIG, location.search); + const globallyAvailableParams = globallyAvailable + ? { or__organization__isnull: 'True' } + : {}; + const organizationIdParams = organizationId + ? { or__organization__id: organizationId } + : {}; + const [{ data }, actionsResponse] = await Promise.all([ + ExecutionEnvironmentsAPI.read( + mergeParams(params, { + ...globallyAvailableParams, + ...organizationIdParams, + }) + ), + ExecutionEnvironmentsAPI.readOptions(), + ]); + return { + executionEnvironments: data.results, + count: data.count, + relatedSearchableKeys: ( + actionsResponse?.data?.related_search_fields || [] + ).map(val => val.slice(0, -8)), + searchableKeys: Object.keys( + actionsResponse.data.actions?.GET || {} + ).filter(key => actionsResponse.data.actions?.GET[key].filterable), + }; + }, [location, globallyAvailable, organizationId]), + { + executionEnvironments: [], + count: 0, + relatedSearchableKeys: [], + searchableKeys: [], + } + ); + + useEffect(() => { + fetchExecutionEnvironments(); + }, [fetchExecutionEnvironments]); + + const renderLookup = () => ( + <> + ( + dispatch({ type: 'SELECT_ITEM', item })} + deselectItem={item => dispatch({ type: 'DESELECT_ITEM', item })} + /> + )} + /> + + ); + + return ( + } + > + {isDisabled ? ( + {renderLookup()} + ) : ( + renderLookup() + )} + + + + ); +} + +ExecutionEnvironmentLookup.propTypes = { + value: ExecutionEnvironment, + popoverContent: string, + onChange: func.isRequired, + isDefaultEnvironment: bool, +}; + +ExecutionEnvironmentLookup.defaultProps = { + popoverContent: '', + isDefaultEnvironment: false, + value: null, +}; + +export default withI18n()(ExecutionEnvironmentLookup); diff --git a/awx/ui_next/src/components/Lookup/ExecutionEnvironmentLookup.test.jsx b/awx/ui_next/src/components/Lookup/ExecutionEnvironmentLookup.test.jsx new file mode 100644 index 0000000000..783d43707b --- /dev/null +++ b/awx/ui_next/src/components/Lookup/ExecutionEnvironmentLookup.test.jsx @@ -0,0 +1,76 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; +import { mountWithContexts } from '../../../testUtils/enzymeHelpers'; +import ExecutionEnvironmentLookup from './ExecutionEnvironmentLookup'; +import { ExecutionEnvironmentsAPI } from '../../api'; + +jest.mock('../../api'); + +const mockedExecutionEnvironments = { + count: 1, + results: [ + { + id: 2, + name: 'Foo', + image: 'quay.io/ansible/awx-ee', + pull: 'missing', + }, + ], +}; + +const executionEnvironment = { + id: 42, + name: 'Bar', + image: 'quay.io/ansible/bar', + pull: 'missing', +}; + +describe('ExecutionEnvironmentLookup', () => { + let wrapper; + + beforeEach(() => { + ExecutionEnvironmentsAPI.read.mockResolvedValue( + mockedExecutionEnvironments + ); + }); + + afterEach(() => { + jest.clearAllMocks(); + wrapper.unmount(); + }); + + test('should render successfully', async () => { + ExecutionEnvironmentsAPI.readOptions.mockReturnValue({ + data: { + actions: { + GET: {}, + POST: {}, + }, + related_search_fields: [], + }, + }); + await act(async () => { + wrapper = mountWithContexts( + {}} + /> + ); + }); + wrapper.update(); + expect(ExecutionEnvironmentsAPI.read).toHaveBeenCalledTimes(1); + expect(wrapper.find('ExecutionEnvironmentLookup')).toHaveLength(1); + }); + + test('should fetch execution environments', async () => { + await act(async () => { + wrapper = mountWithContexts( + {}} + /> + ); + }); + expect(ExecutionEnvironmentsAPI.read).toHaveBeenCalledTimes(1); + }); +}); diff --git a/awx/ui_next/src/components/Lookup/OrganizationLookup.jsx b/awx/ui_next/src/components/Lookup/OrganizationLookup.jsx index 3fb443426e..8252c9035c 100644 --- a/awx/ui_next/src/components/Lookup/OrganizationLookup.jsx +++ b/awx/ui_next/src/components/Lookup/OrganizationLookup.jsx @@ -30,6 +30,7 @@ function OrganizationLookup({ history, autoPopulate, isDisabled, + helperText, }) { const autoPopulateLookup = useAutoPopulateLookup(onChange); @@ -79,6 +80,7 @@ function OrganizationLookup({ isRequired={required} validated={isValid ? 'default' : 'error'} label={i18n._(t`Organization`)} + helperText={helperText} > { - const { name, username } = props; - if (!name && !username) { +const requiredField = props => { + const { name, username, image } = props; + if (!name && !username && !image) { return new Error( - `One of 'name' or 'username' is required by ItemToDelete component.` + `One of 'name', 'username' or 'image' is required by ItemToDelete component.` ); } if (name) { @@ -47,13 +47,24 @@ const requireNameOrUsername = props => { 'ItemToDelete' ); } + if (image) { + checkPropTypes( + { + image: string, + }, + { image: props.image }, + 'prop', + 'ItemToDelete' + ); + } return null; }; const ItemToDelete = shape({ id: number.isRequired, - name: requireNameOrUsername, - username: requireNameOrUsername, + name: requiredField, + username: requiredField, + image: requiredField, summary_fields: shape({ user_capabilities: shape({ delete: bool.isRequired, @@ -171,7 +182,7 @@ function ToolbarDeleteButton({
{i18n._(t`This action will delete the following:`)}
{itemsToDelete.map(item => ( - {item.name || item.username} + {item.name || item.username || item.image}
))} diff --git a/awx/ui_next/src/routeConfig.js b/awx/ui_next/src/routeConfig.js index a343a7d1e0..507bc4e6d7 100644 --- a/awx/ui_next/src/routeConfig.js +++ b/awx/ui_next/src/routeConfig.js @@ -2,13 +2,13 @@ import { t } from '@lingui/macro'; import ActivityStream from './screens/ActivityStream'; import Applications from './screens/Application'; -import Credentials from './screens/Credential'; import CredentialTypes from './screens/CredentialType'; +import Credentials from './screens/Credential'; import Dashboard from './screens/Dashboard'; +import ExecutionEnvironments from './screens/ExecutionEnvironment'; import Hosts from './screens/Host'; import InstanceGroups from './screens/InstanceGroup'; import Inventory from './screens/Inventory'; -import { Jobs } from './screens/Job'; import ManagementJobs from './screens/ManagementJob'; import NotificationTemplates from './screens/NotificationTemplate'; import Organizations from './screens/Organization'; @@ -19,6 +19,7 @@ import Teams from './screens/Team'; import Templates from './screens/Template'; import Users from './screens/User'; import WorkflowApprovals from './screens/WorkflowApproval'; +import { Jobs } from './screens/Job'; // Ideally, this should just be a regular object that we export, but we // need the i18n. When lingui3 arrives, we will be able to import i18n @@ -138,6 +139,11 @@ function getRouteConfig(i18n) { path: '/applications', screen: Applications, }, + { + title: i18n._(t`Execution Environments`), + path: '/execution_environments', + screen: ExecutionEnvironments, + }, ], }, { diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironment.jsx b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironment.jsx new file mode 100644 index 0000000000..55a3228e13 --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironment.jsx @@ -0,0 +1,126 @@ +import React, { useEffect, useCallback } from 'react'; +import { + Link, + Redirect, + Route, + Switch, + useLocation, + useParams, +} from 'react-router-dom'; +import { withI18n } from '@lingui/react'; +import { t } from '@lingui/macro'; +import { Card, PageSection } from '@patternfly/react-core'; +import { CaretLeftIcon } from '@patternfly/react-icons'; + +import useRequest from '../../util/useRequest'; +import { ExecutionEnvironmentsAPI } from '../../api'; +import RoutedTabs from '../../components/RoutedTabs'; +import ContentError from '../../components/ContentError'; +import ContentLoading from '../../components/ContentLoading'; + +import ExecutionEnvironmentDetails from './ExecutionEnvironmentDetails'; +import ExecutionEnvironmentEdit from './ExecutionEnvironmentEdit'; + +function ExecutionEnvironment({ i18n, setBreadcrumb }) { + const { id } = useParams(); + const { pathname } = useLocation(); + + const { + isLoading, + error: contentError, + request: fetchExecutionEnvironments, + result: executionEnvironment, + } = useRequest( + useCallback(async () => { + const { data } = await ExecutionEnvironmentsAPI.readDetail(id); + return data; + }, [id]), + null + ); + + useEffect(() => { + fetchExecutionEnvironments(); + }, [fetchExecutionEnvironments, pathname]); + + useEffect(() => { + if (executionEnvironment) { + setBreadcrumb(executionEnvironment); + } + }, [executionEnvironment, setBreadcrumb]); + + const tabsArray = [ + { + name: ( + <> + + {i18n._(t`Back to execution environments`)} + + ), + link: '/execution_environments', + id: 99, + }, + { + name: i18n._(t`Details`), + link: `/execution_environments/${id}/details`, + id: 0, + }, + ]; + + if (!isLoading && contentError) { + return ( + + + + {contentError.response?.status === 404 && ( + + {i18n._(t`Execution environment not found.`)}{' '} + + {i18n._(t`View all execution environments`)} + + + )} + + + + ); + } + + let cardHeader = ; + if (pathname.endsWith('edit')) { + cardHeader = null; + } + + return ( + + + {cardHeader} + {isLoading && } + {!isLoading && executionEnvironment && ( + + + {executionEnvironment && ( + <> + + + + + + + + )} + + )} + + + ); +} + +export default withI18n()(ExecutionEnvironment); diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentAdd/ExecutionEnvironmentAdd.jsx b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentAdd/ExecutionEnvironmentAdd.jsx new file mode 100644 index 0000000000..2c94b46602 --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentAdd/ExecutionEnvironmentAdd.jsx @@ -0,0 +1,50 @@ +import React, { useState } from 'react'; +import { Card, PageSection } from '@patternfly/react-core'; +import { useHistory } from 'react-router-dom'; + +import { ExecutionEnvironmentsAPI } from '../../../api'; +import { Config } from '../../../contexts/Config'; +import { CardBody } from '../../../components/Card'; +import ExecutionEnvironmentForm from '../shared/ExecutionEnvironmentForm'; + +function ExecutionEnvironmentAdd() { + const history = useHistory(); + const [submitError, setSubmitError] = useState(null); + + const handleSubmit = async values => { + try { + const { data: response } = await ExecutionEnvironmentsAPI.create({ + ...values, + credential: values.credential?.id, + organization: values.organization?.id, + }); + history.push(`/execution_environments/${response.id}/details`); + } catch (error) { + setSubmitError(error); + } + }; + + const handleCancel = () => { + history.push(`/execution_environments`); + }; + return ( + + + + + {({ me }) => ( + + )} + + + + + ); +} + +export default ExecutionEnvironmentAdd; diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentAdd/ExecutionEnvironmentAdd.test.jsx b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentAdd/ExecutionEnvironmentAdd.test.jsx new file mode 100644 index 0000000000..92f18c7d33 --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentAdd/ExecutionEnvironmentAdd.test.jsx @@ -0,0 +1,109 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; +import { createMemoryHistory } from 'history'; + +import { + mountWithContexts, + waitForElement, +} from '../../../../testUtils/enzymeHelpers'; +import { ExecutionEnvironmentsAPI } from '../../../api'; +import ExecutionEnvironmentAdd from './ExecutionEnvironmentAdd'; + +jest.mock('../../../api'); + +const mockMe = { + is_superuser: true, + is_system_auditor: false, +}; + +const executionEnvironmentData = { + name: 'Test EE', + credential: 4, + description: 'A simple EE', + image: 'https://registry.com/image/container', + pull: 'one', +}; + +const mockOptions = { + data: { + actions: { + POST: { + pull: { + choices: [ + ['one', 'One'], + ['two', 'Two'], + ['three', 'Three'], + ], + }, + }, + }, + }, +}; + +ExecutionEnvironmentsAPI.readOptions.mockResolvedValue(mockOptions); +ExecutionEnvironmentsAPI.create.mockResolvedValue({ + data: { + id: 42, + }, +}); + +describe('', () => { + let wrapper; + let history; + + beforeEach(async () => { + history = createMemoryHistory({ + initialEntries: ['/execution_environments'], + }); + await act(async () => { + wrapper = mountWithContexts(, { + context: { router: { history } }, + }); + }); + }); + + afterEach(() => { + jest.clearAllMocks(); + wrapper.unmount(); + }); + + test('handleSubmit should call the api and redirect to details page', async () => { + await act(async () => { + wrapper.find('ExecutionEnvironmentForm').prop('onSubmit')({ + executionEnvironmentData, + }); + }); + wrapper.update(); + expect(ExecutionEnvironmentsAPI.create).toHaveBeenCalledWith({ + executionEnvironmentData, + }); + expect(history.location.pathname).toBe( + '/execution_environments/42/details' + ); + }); + + test('handleCancel should return the user back to the execution environments list', async () => { + await waitForElement(wrapper, 'ContentLoading', el => el.length === 0); + + wrapper.find('Button[aria-label="Cancel"]').simulate('click'); + expect(history.location.pathname).toEqual('/execution_environments'); + }); + + test('failed form submission should show an error message', async () => { + const error = { + response: { + data: { detail: 'An error occurred' }, + }, + }; + ExecutionEnvironmentsAPI.create.mockImplementationOnce(() => + Promise.reject(error) + ); + await act(async () => { + wrapper.find('ExecutionEnvironmentForm').invoke('onSubmit')( + executionEnvironmentData + ); + }); + wrapper.update(); + expect(wrapper.find('FormSubmitError').length).toBe(1); + }); +}); diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentAdd/index.js b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentAdd/index.js new file mode 100644 index 0000000000..69765fcf3b --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentAdd/index.js @@ -0,0 +1 @@ +export { default } from './ExecutionEnvironmentAdd'; diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentDetails/ExecutionEnvironmentDetails.jsx b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentDetails/ExecutionEnvironmentDetails.jsx new file mode 100644 index 0000000000..68b9f9879d --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentDetails/ExecutionEnvironmentDetails.jsx @@ -0,0 +1,138 @@ +import React, { useCallback } from 'react'; +import { withI18n } from '@lingui/react'; +import { t } from '@lingui/macro'; +import { Link, useHistory } from 'react-router-dom'; +import { Button, Label } from '@patternfly/react-core'; + +import AlertModal from '../../../components/AlertModal'; +import { CardBody, CardActionsRow } from '../../../components/Card'; +import DeleteButton from '../../../components/DeleteButton'; +import { + Detail, + DetailList, + UserDateDetail, +} from '../../../components/DetailList'; +import useRequest, { useDismissableError } from '../../../util/useRequest'; +import { toTitleCase } from '../../../util/strings'; +import { ExecutionEnvironmentsAPI } from '../../../api'; + +function ExecutionEnvironmentDetails({ executionEnvironment, i18n }) { + const history = useHistory(); + const { + id, + name, + image, + description, + pull, + organization, + summary_fields, + } = executionEnvironment; + + const { + request: deleteExecutionEnvironment, + isLoading, + error: deleteError, + } = useRequest( + useCallback(async () => { + await ExecutionEnvironmentsAPI.destroy(id); + history.push(`/execution_environments`); + }, [id, history]) + ); + + const { error, dismissError } = useDismissableError(deleteError); + + return ( + + + + + + + {summary_fields.organization.name} + + ) : ( + i18n._(t`Globally Available`) + ) + } + dataCy="execution-environment-detail-organization" + /> + + {executionEnvironment.summary_fields.credential && ( + + {executionEnvironment.summary_fields.credential.name} + + } + dataCy="execution-environment-credential" + /> + )} + + + + + + + {i18n._(t`Delete`)} + + + + {error && ( + + )} + + ); +} + +export default withI18n()(ExecutionEnvironmentDetails); diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentDetails/ExecutionEnvironmentDetails.test.jsx b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentDetails/ExecutionEnvironmentDetails.test.jsx new file mode 100644 index 0000000000..d258ebe65b --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentDetails/ExecutionEnvironmentDetails.test.jsx @@ -0,0 +1,138 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; +import { createMemoryHistory } from 'history'; + +import { mountWithContexts } from '../../../../testUtils/enzymeHelpers'; +import { ExecutionEnvironmentsAPI } from '../../../api'; + +import ExecutionEnvironmentDetails from './ExecutionEnvironmentDetails'; + +jest.mock('../../../api'); + +const executionEnvironment = { + id: 17, + type: 'execution_environment', + url: '/api/v2/execution_environments/17/', + related: { + created_by: '/api/v2/users/1/', + modified_by: '/api/v2/users/1/', + activity_stream: '/api/v2/execution_environments/17/activity_stream/', + unified_job_templates: + '/api/v2/execution_environments/17/unified_job_templates/', + credential: '/api/v2/credentials/4/', + }, + summary_fields: { + credential: { + id: 4, + name: 'Container Registry', + }, + created_by: { + id: 1, + username: 'admin', + first_name: '', + last_name: '', + }, + modified_by: { + id: 1, + username: 'admin', + first_name: '', + last_name: '', + }, + }, + name: 'Default EE', + created: '2020-09-17T20:14:15.408782Z', + modified: '2020-09-17T20:14:15.408802Z', + description: 'Foo', + organization: null, + image: 'https://localhost:90/12345/ma', + managed_by_tower: false, + credential: 4, +}; + +describe('', () => { + let wrapper; + test('should render details properly', async () => { + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + wrapper.update(); + + expect(wrapper.find('Detail[label="Image"]').prop('value')).toEqual( + executionEnvironment.image + ); + expect(wrapper.find('Detail[label="Description"]').prop('value')).toEqual( + 'Foo' + ); + expect(wrapper.find('Detail[label="Organization"]').prop('value')).toEqual( + 'Globally Available' + ); + expect( + wrapper.find('Detail[label="Credential"]').prop('value').props.children + ).toEqual(executionEnvironment.summary_fields.credential.name); + const dates = wrapper.find('UserDateDetail'); + expect(dates).toHaveLength(2); + expect(dates.at(0).prop('date')).toEqual(executionEnvironment.created); + expect(dates.at(1).prop('date')).toEqual(executionEnvironment.modified); + }); + + test('should render organization detail', async () => { + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + wrapper.update(); + + expect(wrapper.find('Detail[label="Image"]').prop('value')).toEqual( + executionEnvironment.image + ); + expect(wrapper.find('Detail[label="Description"]').prop('value')).toEqual( + 'Foo' + ); + expect(wrapper.find(`Detail[label="Organization"] dd`).text()).toBe('Bar'); + expect( + wrapper.find('Detail[label="Credential"]').prop('value').props.children + ).toEqual(executionEnvironment.summary_fields.credential.name); + const dates = wrapper.find('UserDateDetail'); + expect(dates).toHaveLength(2); + expect(dates.at(0).prop('date')).toEqual(executionEnvironment.created); + expect(dates.at(1).prop('date')).toEqual(executionEnvironment.modified); + }); + + test('expected api call is made for delete', async () => { + const history = createMemoryHistory({ + initialEntries: ['/execution_environments/42/details'], + }); + await act(async () => { + wrapper = mountWithContexts( + , + { + context: { router: { history } }, + } + ); + }); + await act(async () => { + wrapper.find('DeleteButton').invoke('onConfirm')(); + }); + expect(ExecutionEnvironmentsAPI.destroy).toHaveBeenCalledTimes(1); + expect(history.location.pathname).toBe('/execution_environments'); + }); +}); diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentDetails/index.js b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentDetails/index.js new file mode 100644 index 0000000000..36121ea0d9 --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentDetails/index.js @@ -0,0 +1 @@ +export { default } from './ExecutionEnvironmentDetails'; diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentEdit/ExecutionEnvironmentEdit.jsx b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentEdit/ExecutionEnvironmentEdit.jsx new file mode 100644 index 0000000000..ea4943b2da --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentEdit/ExecutionEnvironmentEdit.jsx @@ -0,0 +1,47 @@ +import React, { useState } from 'react'; +import { useHistory } from 'react-router-dom'; + +import { CardBody } from '../../../components/Card'; +import { ExecutionEnvironmentsAPI } from '../../../api'; +import ExecutionEnvironmentForm from '../shared/ExecutionEnvironmentForm'; +import { Config } from '../../../contexts/Config'; + +function ExecutionEnvironmentEdit({ executionEnvironment }) { + const history = useHistory(); + const [submitError, setSubmitError] = useState(null); + const detailsUrl = `/execution_environments/${executionEnvironment.id}/details`; + + const handleSubmit = async values => { + try { + await ExecutionEnvironmentsAPI.update(executionEnvironment.id, { + ...values, + credential: values.credential ? values.credential.id : null, + organization: values.organization ? values.organization.id : null, + }); + history.push(detailsUrl); + } catch (error) { + setSubmitError(error); + } + }; + + const handleCancel = () => { + history.push(detailsUrl); + }; + return ( + + + {({ me }) => ( + + )} + + + ); +} + +export default ExecutionEnvironmentEdit; diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentEdit/ExecutionEnvironmentEdit.test.jsx b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentEdit/ExecutionEnvironmentEdit.test.jsx new file mode 100644 index 0000000000..374a0c5dba --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentEdit/ExecutionEnvironmentEdit.test.jsx @@ -0,0 +1,130 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; +import { createMemoryHistory } from 'history'; + +import { mountWithContexts } from '../../../../testUtils/enzymeHelpers'; +import { ExecutionEnvironmentsAPI } from '../../../api'; + +import ExecutionEnvironmentEdit from './ExecutionEnvironmentEdit'; + +jest.mock('../../../api'); + +const mockMe = { + is_superuser: true, + is_system_auditor: false, +}; + +const executionEnvironmentData = { + id: 42, + credential: { id: 4 }, + description: 'A simple EE', + image: 'https://registry.com/image/container', + pull: 'one', + name: 'Test EE', +}; + +const updateExecutionEnvironmentData = { + image: 'https://registry.com/image/container2', + description: 'Updated new description', +}; + +const mockOptions = { + data: { + actions: { + POST: { + pull: { + choices: [ + ['one', 'One'], + ['two', 'Two'], + ['three', 'Three'], + ], + }, + }, + }, + }, +}; + +ExecutionEnvironmentsAPI.readOptions.mockResolvedValue(mockOptions); + +describe('', () => { + let wrapper; + let history; + + beforeAll(async () => { + history = createMemoryHistory(); + await act(async () => { + wrapper = mountWithContexts( + , + { + context: { router: { history } }, + } + ); + }); + }); + + afterAll(() => { + jest.clearAllMocks(); + wrapper.unmount(); + }); + + test('handleSubmit should call the api and redirect to details page', async () => { + await act(async () => { + wrapper.find('ExecutionEnvironmentForm').invoke('onSubmit')( + updateExecutionEnvironmentData + ); + wrapper.update(); + expect(ExecutionEnvironmentsAPI.update).toHaveBeenCalledWith(42, { + ...updateExecutionEnvironmentData, + credential: null, + organization: null, + }); + }); + + expect(history.location.pathname).toEqual( + '/execution_environments/42/details' + ); + }); + + test('should navigate to execution environments details when cancel is clicked', async () => { + await act(async () => { + wrapper.find('button[aria-label="Cancel"]').prop('onClick')(); + }); + expect(history.location.pathname).toEqual( + '/execution_environments/42/details' + ); + }); + + test('should navigate to execution environments detail after successful submission', async () => { + await act(async () => { + wrapper.find('ExecutionEnvironmentForm').invoke('onSubmit')({ + updateExecutionEnvironmentData, + }); + }); + wrapper.update(); + expect(wrapper.find('FormSubmitError').length).toBe(0); + expect(history.location.pathname).toEqual( + '/execution_environments/42/details' + ); + }); + + test('failed form submission should show an error message', async () => { + const error = { + response: { + data: { detail: 'An error occurred' }, + }, + }; + ExecutionEnvironmentsAPI.update.mockImplementationOnce(() => + Promise.reject(error) + ); + await act(async () => { + wrapper.find('ExecutionEnvironmentForm').invoke('onSubmit')( + updateExecutionEnvironmentData + ); + }); + wrapper.update(); + expect(wrapper.find('FormSubmitError').length).toBe(1); + }); +}); diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentEdit/index.js b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentEdit/index.js new file mode 100644 index 0000000000..6ab135ca05 --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentEdit/index.js @@ -0,0 +1 @@ +export { default } from './ExecutionEnvironmentEdit'; diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnviromentList.test.jsx b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnviromentList.test.jsx new file mode 100644 index 0000000000..1490ff49e3 --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnviromentList.test.jsx @@ -0,0 +1,188 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; + +import { + mountWithContexts, + waitForElement, +} from '../../../../testUtils/enzymeHelpers'; + +import { ExecutionEnvironmentsAPI } from '../../../api'; +import ExecutionEnvironmentList from './ExecutionEnvironmentList'; + +jest.mock('../../../api/models/ExecutionEnvironments'); + +const executionEnvironments = { + data: { + results: [ + { + name: 'Foo', + id: 1, + image: 'https://registry.com/r/image/manifest', + organization: null, + credential: null, + url: '/api/v2/execution_environments/1/', + summary_fields: { user_capabilities: { edit: true, delete: true } }, + }, + { + name: 'Bar', + id: 2, + image: 'https://registry.com/r/image2/manifest', + organization: null, + credential: null, + url: '/api/v2/execution_environments/2/', + summary_fields: { user_capabilities: { edit: false, delete: true } }, + }, + ], + count: 2, + }, +}; + +const options = { data: { actions: { POST: true } } }; + +describe('', () => { + beforeEach(() => { + ExecutionEnvironmentsAPI.read.mockResolvedValue(executionEnvironments); + ExecutionEnvironmentsAPI.readOptions.mockResolvedValue(options); + }); + + afterEach(() => { + jest.clearAllMocks(); + }); + let wrapper; + + test('should mount successfully', async () => { + await act(async () => { + wrapper = mountWithContexts(); + }); + await waitForElement( + wrapper, + 'ExecutionEnvironmentList', + el => el.length > 0 + ); + }); + + test('should have data fetched and render 2 rows', async () => { + await act(async () => { + wrapper = mountWithContexts(); + }); + await waitForElement( + wrapper, + 'ExecutionEnvironmentList', + el => el.length > 0 + ); + + expect(wrapper.find('ExecutionEnvironmentListItem').length).toBe(2); + expect(ExecutionEnvironmentsAPI.read).toBeCalled(); + expect(ExecutionEnvironmentsAPI.readOptions).toBeCalled(); + }); + + test('should delete items successfully', async () => { + await act(async () => { + wrapper = mountWithContexts(); + }); + await waitForElement( + wrapper, + 'ExecutionEnvironmentList', + el => el.length > 0 + ); + + await act(async () => { + wrapper + .find('ExecutionEnvironmentListItem') + .at(0) + .invoke('onSelect')(); + }); + wrapper.update(); + await act(async () => { + wrapper + .find('ExecutionEnvironmentListItem') + .at(1) + .invoke('onSelect')(); + }); + wrapper.update(); + await act(async () => { + wrapper.find('ToolbarDeleteButton').invoke('onDelete')(); + }); + + expect(ExecutionEnvironmentsAPI.destroy).toHaveBeenCalledTimes(2); + }); + + test('should render deletion error modal', async () => { + ExecutionEnvironmentsAPI.destroy.mockRejectedValue( + new Error({ + response: { + config: { + method: 'DELETE', + url: '/api/v2/execution_environments', + }, + data: 'An error occurred', + }, + }) + ); + await act(async () => { + wrapper = mountWithContexts(); + }); + waitForElement(wrapper, 'ExecutionEnvironmentList', el => el.length > 0); + + wrapper + .find('ExecutionEnvironmentListItem') + .at(0) + .find('input') + .simulate('change', 'a'); + wrapper.update(); + + expect( + wrapper + .find('ExecutionEnvironmentListItem') + .at(0) + .find('input') + .prop('checked') + ).toBe(true); + + await act(async () => + wrapper.find('Button[aria-label="Delete"]').prop('onClick')() + ); + wrapper.update(); + + await act(async () => + wrapper.find('Button[aria-label="confirm delete"]').prop('onClick')() + ); + wrapper.update(); + expect(wrapper.find('ErrorDetail').length).toBe(1); + }); + + test('should thrown content error', async () => { + ExecutionEnvironmentsAPI.read.mockRejectedValue( + new Error({ + response: { + config: { + method: 'GET', + url: '/api/v2/execution_environments', + }, + data: 'An error occurred', + }, + }) + ); + await act(async () => { + wrapper = mountWithContexts(); + }); + await waitForElement( + wrapper, + 'ExecutionEnvironmentList', + el => el.length > 0 + ); + expect(wrapper.find('ContentError').length).toBe(1); + }); + + test('should not render add button', async () => { + ExecutionEnvironmentsAPI.read.mockResolvedValue(executionEnvironments); + ExecutionEnvironmentsAPI.readOptions.mockResolvedValue({ + data: { actions: { POST: false } }, + }); + await act(async () => { + wrapper = mountWithContexts(); + }); + waitForElement(wrapper, 'ExecutionEnvironmentList', el => el.length > 0); + expect(wrapper.find('ToolbarAddButton').length).toBe(0); + }); +}); diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentList.jsx b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentList.jsx new file mode 100644 index 0000000000..312b18f2cf --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentList.jsx @@ -0,0 +1,221 @@ +import React, { useEffect, useCallback } from 'react'; +import { useLocation, useRouteMatch } from 'react-router-dom'; +import { withI18n } from '@lingui/react'; +import { t } from '@lingui/macro'; +import { Card, PageSection } from '@patternfly/react-core'; + +import { ExecutionEnvironmentsAPI } from '../../../api'; +import { getQSConfig, parseQueryString } from '../../../util/qs'; +import useRequest, { useDeleteItems } from '../../../util/useRequest'; +import useSelected from '../../../util/useSelected'; +import { + ToolbarDeleteButton, + ToolbarAddButton, +} from '../../../components/PaginatedDataList'; +import PaginatedTable, { + HeaderRow, + HeaderCell, +} from '../../../components/PaginatedTable'; +import ErrorDetail from '../../../components/ErrorDetail'; +import AlertModal from '../../../components/AlertModal'; +import DatalistToolbar from '../../../components/DataListToolbar'; + +import ExecutionEnvironmentsListItem from './ExecutionEnvironmentListItem'; + +const QS_CONFIG = getQSConfig('execution_environments', { + page: 1, + page_size: 20, + order_by: 'name', +}); + +function ExecutionEnvironmentList({ i18n }) { + const location = useLocation(); + const match = useRouteMatch(); + + const { + error: contentError, + isLoading, + request: fetchExecutionEnvironments, + result: { + executionEnvironments, + executionEnvironmentsCount, + actions, + relatedSearchableKeys, + searchableKeys, + }, + } = useRequest( + useCallback(async () => { + const params = parseQueryString(QS_CONFIG, location.search); + + const [response, responseActions] = await Promise.all([ + ExecutionEnvironmentsAPI.read(params), + ExecutionEnvironmentsAPI.readOptions(), + ]); + + return { + executionEnvironments: response.data.results, + executionEnvironmentsCount: response.data.count, + actions: responseActions.data.actions, + relatedSearchableKeys: ( + responseActions?.data?.related_search_fields || [] + ).map(val => val.slice(0, -8)), + searchableKeys: Object.keys( + responseActions.data.actions?.GET || {} + ).filter(key => responseActions.data.actions?.GET[key].filterable), + }; + }, [location]), + { + executionEnvironments: [], + executionEnvironmentsCount: 0, + actions: {}, + relatedSearchableKeys: [], + searchableKeys: [], + } + ); + + useEffect(() => { + fetchExecutionEnvironments(); + }, [fetchExecutionEnvironments]); + + const { selected, isAllSelected, handleSelect, setSelected } = useSelected( + executionEnvironments + ); + + const { + isLoading: deleteLoading, + deletionError, + deleteItems: deleteExecutionEnvironments, + clearDeletionError, + } = useDeleteItems( + useCallback(async () => { + await Promise.all( + selected.map(({ id }) => ExecutionEnvironmentsAPI.destroy(id)) + ); + }, [selected]), + { + qsConfig: QS_CONFIG, + allItemsSelected: isAllSelected, + fetchItems: fetchExecutionEnvironments, + } + ); + + const handleDelete = async () => { + await deleteExecutionEnvironments(); + setSelected([]); + }; + + const canAdd = actions && actions.POST; + + return ( + <> + + + + {i18n._(t`Name`)} + {i18n._(t`Image`)} + {i18n._(t`Organization`)} + {i18n._(t`Actions`)} + + } + renderToolbar={props => ( + + setSelected(isSelected ? [...executionEnvironments] : []) + } + qsConfig={QS_CONFIG} + additionalControls={[ + ...(canAdd + ? [ + , + ] + : []), + , + ]} + /> + )} + renderRow={(executionEnvironment, index) => ( + handleSelect(executionEnvironment)} + isSelected={selected.some( + row => row.id === executionEnvironment.id + )} + /> + )} + emptyStateControls={ + canAdd && ( + + ) + } + /> + + + + {i18n._(t`Failed to delete one or more execution environments`)} + + + + ); +} + +export default withI18n()(ExecutionEnvironmentList); diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentListItem.jsx b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentListItem.jsx new file mode 100644 index 0000000000..bb814a1921 --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentListItem.jsx @@ -0,0 +1,79 @@ +import React from 'react'; +import { string, bool, func } from 'prop-types'; +import { withI18n } from '@lingui/react'; +import { t } from '@lingui/macro'; +import { Link } from 'react-router-dom'; +import { Button } from '@patternfly/react-core'; +import { Tr, Td } from '@patternfly/react-table'; +import { PencilAltIcon } from '@patternfly/react-icons'; + +import { ActionsTd, ActionItem } from '../../../components/PaginatedTable'; +import { ExecutionEnvironment } from '../../../types'; + +function ExecutionEnvironmentListItem({ + executionEnvironment, + detailUrl, + isSelected, + onSelect, + i18n, + rowIndex, +}) { + const labelId = `check-action-${executionEnvironment.id}`; + + return ( + + + + + {executionEnvironment.name} + + + + {executionEnvironment.image} + + + {executionEnvironment.organization ? ( + + {executionEnvironment?.summary_fields?.organization?.name} + + ) : ( + i18n._(t`Globally Available`) + )} + + + + + + + + ); +} + +ExecutionEnvironmentListItem.prototype = { + executionEnvironment: ExecutionEnvironment.isRequired, + detailUrl: string.isRequired, + isSelected: bool.isRequired, + onSelect: func.isRequired, +}; + +export default withI18n()(ExecutionEnvironmentListItem); diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentListItem.test.jsx b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentListItem.test.jsx new file mode 100644 index 0000000000..0e7c037aed --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/ExecutionEnvironmentListItem.test.jsx @@ -0,0 +1,74 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; + +import { mountWithContexts } from '../../../../testUtils/enzymeHelpers'; + +import ExecutionEnvironmentListItem from './ExecutionEnvironmentListItem'; + +describe('', () => { + let wrapper; + const executionEnvironment = { + name: 'Foo', + id: 1, + image: 'https://registry.com/r/image/manifest', + organization: null, + credential: null, + summary_fields: { user_capabilities: { edit: true } }, + }; + + test('should mount successfully', async () => { + await act(async () => { + wrapper = mountWithContexts( + + + {}} + /> + +
+ ); + }); + expect(wrapper.find('ExecutionEnvironmentListItem').length).toBe(1); + }); + + test('should render the proper data', async () => { + await act(async () => { + wrapper = mountWithContexts( + + + {}} + /> + +
+ ); + }); + expect( + wrapper + .find('Td') + .at(1) + .text() + ).toBe(executionEnvironment.name); + expect( + wrapper + .find('Td') + .at(2) + .text() + ).toBe(executionEnvironment.image); + + expect( + wrapper + .find('Td') + .at(3) + .text() + ).toBe('Globally Available'); + + expect(wrapper.find('PencilAltIcon').exists()).toBeTruthy(); + }); +}); diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/index.js b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/index.js new file mode 100644 index 0000000000..a8aa4263d7 --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironmentList/index.js @@ -0,0 +1 @@ +export { default } from './ExecutionEnvironmentList'; diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironments.jsx b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironments.jsx new file mode 100644 index 0000000000..802e78a679 --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironments.jsx @@ -0,0 +1,56 @@ +import React, { useState, useCallback } from 'react'; +import { withI18n } from '@lingui/react'; +import { t } from '@lingui/macro'; +import { Route, Switch } from 'react-router-dom'; + +import ExecutionEnvironment from './ExecutionEnvironment'; +import ExecutionEnvironmentAdd from './ExecutionEnvironmentAdd'; +import ExecutionEnvironmentList from './ExecutionEnvironmentList'; +import ScreenHeader from '../../components/ScreenHeader/ScreenHeader'; + +function ExecutionEnvironments({ i18n }) { + const [breadcrumbConfig, setBreadcrumbConfig] = useState({ + '/execution_environments': i18n._(t`Execution environments`), + '/execution_environments/add': i18n._(t`Create Execution environments`), + }); + + const buildBreadcrumbConfig = useCallback( + executionEnvironments => { + if (!executionEnvironments) { + return; + } + setBreadcrumbConfig({ + '/execution_environments': i18n._(t`Execution environments`), + '/execution_environments/add': i18n._(t`Create Execution environments`), + [`/execution_environments/${executionEnvironments.id}`]: `${executionEnvironments.name}`, + [`/execution_environments/${executionEnvironments.id}/edit`]: i18n._( + t`Edit details` + ), + [`/execution_environments/${executionEnvironments.id}/details`]: i18n._( + t`Details` + ), + }); + }, + [i18n] + ); + return ( + <> + + + + + + + + + + + + + + ); +} +export default withI18n()(ExecutionEnvironments); diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironments.test.jsx b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironments.test.jsx new file mode 100644 index 0000000000..5ceb36ac93 --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/ExecutionEnvironments.test.jsx @@ -0,0 +1,25 @@ +import React from 'react'; + +import { mountWithContexts } from '../../../testUtils/enzymeHelpers'; + +import ExecutionEnvironments from './ExecutionEnvironments'; + +describe('', () => { + let pageWrapper; + let pageSections; + + beforeEach(() => { + pageWrapper = mountWithContexts(); + pageSections = pageWrapper.find('PageSection'); + }); + + afterEach(() => { + pageWrapper.unmount(); + }); + + test('initially renders without crashing', () => { + expect(pageWrapper.length).toBe(1); + expect(pageSections.length).toBe(1); + expect(pageSections.first().props().variant).toBe('light'); + }); +}); diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/index.js b/awx/ui_next/src/screens/ExecutionEnvironment/index.js new file mode 100644 index 0000000000..f66a2b3cf3 --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/index.js @@ -0,0 +1 @@ +export { default } from './ExecutionEnvironments'; diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/shared/ExecutionEnvironmentForm.jsx b/awx/ui_next/src/screens/ExecutionEnvironment/shared/ExecutionEnvironmentForm.jsx new file mode 100644 index 0000000000..a8e0e33a0f --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/shared/ExecutionEnvironmentForm.jsx @@ -0,0 +1,211 @@ +import React, { useCallback, useEffect } from 'react'; +import { func, shape } from 'prop-types'; +import { Formik, useField, useFormikContext } from 'formik'; +import { withI18n } from '@lingui/react'; +import { t } from '@lingui/macro'; +import { Form, FormGroup } from '@patternfly/react-core'; + +import { ExecutionEnvironmentsAPI } from '../../../api'; +import CredentialLookup from '../../../components/Lookup/CredentialLookup'; +import FormActionGroup from '../../../components/FormActionGroup'; +import FormField, { FormSubmitError } from '../../../components/FormField'; +import AnsibleSelect from '../../../components/AnsibleSelect'; +import { FormColumnLayout } from '../../../components/FormLayout'; +import { OrganizationLookup } from '../../../components/Lookup'; +import ContentError from '../../../components/ContentError'; +import ContentLoading from '../../../components/ContentLoading'; +import { required } from '../../../util/validators'; +import useRequest from '../../../util/useRequest'; + +function ExecutionEnvironmentFormFields({ + i18n, + me, + options, + executionEnvironment, +}) { + const [credentialField] = useField('credential'); + const [organizationField, organizationMeta, organizationHelpers] = useField({ + name: 'organization', + validate: + !me?.is_superuser && + required(i18n._(t`Select a value for this field`), i18n), + }); + + const { setFieldValue } = useFormikContext(); + + const onCredentialChange = useCallback( + value => { + setFieldValue('credential', value); + }, + [setFieldValue] + ); + + const onOrganizationChange = useCallback( + value => { + setFieldValue('organization', value); + }, + [setFieldValue] + ); + + const [ + containerOptionsField, + containerOptionsMeta, + containerOptionsHelpers, + ] = useField({ + name: 'pull', + }); + + const containerPullChoices = options?.actions?.POST?.pull?.choices.map( + ([value, label]) => ({ value, label, key: value }) + ); + + return ( + <> + + + + { + containerOptionsHelpers.setValue(value); + }} + /> + + + organizationHelpers.setTouched()} + onChange={onOrganizationChange} + value={organizationField.value} + required={!me.is_superuser} + helperText={ + me?.is_superuser + ? i18n._( + t`Leave this field blank to make the execution environment globally available.` + ) + : null + } + autoPopulate={!me?.is_superuser ? !executionEnvironment?.id : null} + /> + + + + ); +} + +function ExecutionEnvironmentForm({ + executionEnvironment = {}, + onSubmit, + onCancel, + submitError, + me, + ...rest +}) { + const { + isLoading, + error, + request: fetchOptions, + result: options, + } = useRequest( + useCallback(async () => { + const res = await ExecutionEnvironmentsAPI.readOptions(); + const { data } = res; + return data; + }, []), + null + ); + + useEffect(() => { + fetchOptions(); + }, [fetchOptions]); + + if (isLoading || !options) { + return ; + } + + if (error) { + return ; + } + + const initialValues = { + name: executionEnvironment.name || '', + image: executionEnvironment.image || '', + pull: executionEnvironment?.pull || '', + description: executionEnvironment.description || '', + credential: executionEnvironment.summary_fields?.credential || null, + organization: executionEnvironment.summary_fields?.organization || null, + }; + return ( + onSubmit(values)}> + {formik => ( +
+ + + {submitError && } + + +
+ )} +
+ ); +} + +ExecutionEnvironmentForm.propTypes = { + executionEnvironment: shape({}), + onCancel: func.isRequired, + onSubmit: func.isRequired, + submitError: shape({}), +}; + +ExecutionEnvironmentForm.defaultProps = { + executionEnvironment: {}, + submitError: null, +}; + +export default withI18n()(ExecutionEnvironmentForm); diff --git a/awx/ui_next/src/screens/ExecutionEnvironment/shared/ExecutionEnvironmentForm.test.jsx b/awx/ui_next/src/screens/ExecutionEnvironment/shared/ExecutionEnvironmentForm.test.jsx new file mode 100644 index 0000000000..cddef9ffce --- /dev/null +++ b/awx/ui_next/src/screens/ExecutionEnvironment/shared/ExecutionEnvironmentForm.test.jsx @@ -0,0 +1,163 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; +import { + mountWithContexts, + waitForElement, +} from '../../../../testUtils/enzymeHelpers'; +import { ExecutionEnvironmentsAPI } from '../../../api'; + +import ExecutionEnvironmentForm from './ExecutionEnvironmentForm'; + +jest.mock('../../../api'); + +const mockMe = { + is_superuser: true, + is_super_auditor: false, +}; + +const executionEnvironment = { + id: 16, + name: 'Test EE', + type: 'execution_environment', + pull: 'one', + url: '/api/v2/execution_environments/16/', + related: { + created_by: '/api/v2/users/1/', + modified_by: '/api/v2/users/1/', + activity_stream: '/api/v2/execution_environments/16/activity_stream/', + unified_job_templates: + '/api/v2/execution_environments/16/unified_job_templates/', + credential: '/api/v2/credentials/4/', + }, + summary_fields: { + credential: { + id: 4, + name: 'Container Registry', + }, + }, + created: '2020-09-17T16:06:57.346128Z', + modified: '2020-09-17T16:06:57.346147Z', + description: 'A simple EE', + organization: null, + image: 'https://registry.com/image/container', + managed_by_tower: false, + credential: 4, +}; + +const mockOptions = { + data: { + actions: { + POST: { + pull: { + choices: [ + ['one', 'One'], + ['two', 'Two'], + ['three', 'Three'], + ], + }, + }, + }, + }, +}; + +describe('', () => { + let wrapper; + let onCancel; + let onSubmit; + + beforeEach(async () => { + onCancel = jest.fn(); + onSubmit = jest.fn(); + ExecutionEnvironmentsAPI.readOptions.mockResolvedValue(mockOptions); + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + await waitForElement(wrapper, 'ContentLoading', el => el.length === 0); + }); + + afterEach(() => { + jest.clearAllMocks(); + wrapper.unmount(); + }); + + test('Initially renders successfully', () => { + expect(wrapper.length).toBe(1); + }); + + test('should display form fields properly', () => { + expect(wrapper.find('FormGroup[label="Image name"]').length).toBe(1); + expect(wrapper.find('FormGroup[label="Description"]').length).toBe(1); + expect(wrapper.find('CredentialLookup').length).toBe(1); + }); + + test('should call onSubmit when form submitted', async () => { + expect(onSubmit).not.toHaveBeenCalled(); + await act(async () => { + wrapper.find('button[aria-label="Save"]').simulate('click'); + }); + expect(onSubmit).toHaveBeenCalledTimes(1); + }); + + test('should update form values', async () => { + await act(async () => { + wrapper.find('input#execution-environment-image').simulate('change', { + target: { + value: 'Updated EE Name', + name: 'name', + }, + }); + wrapper.find('input#execution-environment-image').simulate('change', { + target: { + value: 'https://registry.com/image/container2', + name: 'image', + }, + }); + wrapper + .find('input#execution-environment-description') + .simulate('change', { + target: { value: 'New description', name: 'description' }, + }); + wrapper.find('CredentialLookup').invoke('onBlur')(); + wrapper.find('CredentialLookup').invoke('onChange')({ + id: 99, + name: 'credential', + }); + + wrapper.find('OrganizationLookup').invoke('onBlur')(); + wrapper.find('OrganizationLookup').invoke('onChange')({ + id: 3, + name: 'organization', + }); + }); + + wrapper.update(); + expect(wrapper.find('OrganizationLookup').prop('value')).toEqual({ + id: 3, + name: 'organization', + }); + expect( + wrapper.find('input#execution-environment-image').prop('value') + ).toEqual('https://registry.com/image/container2'); + expect( + wrapper.find('input#execution-environment-description').prop('value') + ).toEqual('New description'); + expect(wrapper.find('CredentialLookup').prop('value')).toEqual({ + id: 99, + name: 'credential', + }); + }); + + test('should call handleCancel when Cancel button is clicked', async () => { + expect(onCancel).not.toHaveBeenCalled(); + wrapper.find('button[aria-label="Cancel"]').invoke('onClick')(); + expect(onCancel).toBeCalled(); + }); +}); diff --git a/awx/ui_next/src/screens/InstanceGroup/ContainerGroupDetails/ContainerGroupDetails.test.jsx b/awx/ui_next/src/screens/InstanceGroup/ContainerGroupDetails/ContainerGroupDetails.test.jsx index aa979ef730..68ca23f80a 100644 --- a/awx/ui_next/src/screens/InstanceGroup/ContainerGroupDetails/ContainerGroupDetails.test.jsx +++ b/awx/ui_next/src/screens/InstanceGroup/ContainerGroupDetails/ContainerGroupDetails.test.jsx @@ -32,7 +32,7 @@ const instanceGroup = { controller: null, is_controller: false, is_isolated: false, - is_containerized: true, + is_container_group: true, credential: 71, policy_instance_percentage: 0, policy_instance_minimum: 0, diff --git a/awx/ui_next/src/screens/InstanceGroup/ContainerGroupEdit/ContainerGroupEdit.jsx b/awx/ui_next/src/screens/InstanceGroup/ContainerGroupEdit/ContainerGroupEdit.jsx index 3bf66a9d4d..9f4454c0a8 100644 --- a/awx/ui_next/src/screens/InstanceGroup/ContainerGroupEdit/ContainerGroupEdit.jsx +++ b/awx/ui_next/src/screens/InstanceGroup/ContainerGroupEdit/ContainerGroupEdit.jsx @@ -37,7 +37,7 @@ function ContainerGroupEdit({ instanceGroup }) { try { await InstanceGroupsAPI.update(instanceGroup.id, { name: values.name, - credential: values.credential.id, + credential: values.credential ? values.credential.id : null, pod_spec_override: values.override ? values.pod_spec_override : null, }); history.push(detailsIUrl); diff --git a/awx/ui_next/src/screens/InstanceGroup/ContainerGroupEdit/ContainerGroupEdit.test.jsx b/awx/ui_next/src/screens/InstanceGroup/ContainerGroupEdit/ContainerGroupEdit.test.jsx index 937aa15adb..860c6363c5 100644 --- a/awx/ui_next/src/screens/InstanceGroup/ContainerGroupEdit/ContainerGroupEdit.test.jsx +++ b/awx/ui_next/src/screens/InstanceGroup/ContainerGroupEdit/ContainerGroupEdit.test.jsx @@ -31,7 +31,7 @@ const instanceGroup = { controller: null, is_controller: false, is_isolated: false, - is_containerized: true, + is_container_group: true, credential: 71, policy_instance_percentage: 0, policy_instance_minimum: 0, diff --git a/awx/ui_next/src/screens/InstanceGroup/InstanceGroupAdd/InstanceGroupAdd.test.jsx b/awx/ui_next/src/screens/InstanceGroup/InstanceGroupAdd/InstanceGroupAdd.test.jsx index 4b2d879398..a4ae1e74fc 100644 --- a/awx/ui_next/src/screens/InstanceGroup/InstanceGroupAdd/InstanceGroupAdd.test.jsx +++ b/awx/ui_next/src/screens/InstanceGroup/InstanceGroupAdd/InstanceGroupAdd.test.jsx @@ -29,7 +29,7 @@ const instanceGroupData = { controller: null, is_controller: false, is_isolated: false, - is_containerized: false, + is_container_group: false, credential: null, policy_instance_percentage: 46, policy_instance_minimum: 12, diff --git a/awx/ui_next/src/screens/InstanceGroup/InstanceGroupDetails/InstanceGroupDetails.jsx b/awx/ui_next/src/screens/InstanceGroup/InstanceGroupDetails/InstanceGroupDetails.jsx index 748e92702d..05ad3277dc 100644 --- a/awx/ui_next/src/screens/InstanceGroup/InstanceGroupDetails/InstanceGroupDetails.jsx +++ b/awx/ui_next/src/screens/InstanceGroup/InstanceGroupDetails/InstanceGroupDetails.jsx @@ -78,7 +78,7 @@ function InstanceGroupDetails({ instanceGroup, i18n }) { { - return item.is_containerized + return item.is_container_group ? `${match.url}/container_group/${item.id}/details` : `${match.url}/${item.id}/details`; }; diff --git a/awx/ui_next/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupListItem.jsx b/awx/ui_next/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupListItem.jsx index 8bfcf05325..4c47269074 100644 --- a/awx/ui_next/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupListItem.jsx +++ b/awx/ui_next/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupListItem.jsx @@ -32,7 +32,7 @@ function InstanceGroupListItem({ const labelId = `check-action-${instanceGroup.id}`; const isContainerGroup = item => { - return item.is_containerized; + return item.is_container_group; }; function usedCapacity(item) { diff --git a/awx/ui_next/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupListItem.test.jsx b/awx/ui_next/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupListItem.test.jsx index 0f22a4b6d7..579aa36dcc 100644 --- a/awx/ui_next/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupListItem.test.jsx +++ b/awx/ui_next/src/screens/InstanceGroup/InstanceGroupList/InstanceGroupListItem.test.jsx @@ -17,7 +17,7 @@ describe('', () => { policy_instance_minimum: 10, policy_instance_percentage: 50, percent_capacity_remaining: 60, - is_containerized: false, + is_container_group: false, summary_fields: { user_capabilities: { edit: true, @@ -34,7 +34,7 @@ describe('', () => { policy_instance_minimum: 0, policy_instance_percentage: 0, percent_capacity_remaining: 0, - is_containerized: true, + is_container_group: true, summary_fields: { user_capabilities: { edit: false, diff --git a/awx/ui_next/src/screens/InstanceGroup/shared/ContainerGroupForm.jsx b/awx/ui_next/src/screens/InstanceGroup/shared/ContainerGroupForm.jsx index fda18b73bc..e41ab0a1b0 100644 --- a/awx/ui_next/src/screens/InstanceGroup/shared/ContainerGroupForm.jsx +++ b/awx/ui_next/src/screens/InstanceGroup/shared/ContainerGroupForm.jsx @@ -25,7 +25,6 @@ function ContainerGroupFormFields({ i18n, instanceGroup }) { const { setFieldValue } = useFormikContext(); const [credentialField, credentialMeta, credentialHelpers] = useField({ name: 'credential', - validate: required(i18n._(t`Select a value for this field`), i18n), }); const [overrideField] = useField('override'); @@ -55,9 +54,8 @@ function ContainerGroupFormFields({ i18n, instanceGroup }) { onBlur={() => credentialHelpers.setTouched()} onChange={onCredentialChange} value={credentialField.value} - required tooltip={i18n._( - t`Credential to authenticate with Kubernetes or OpenShift. Must be of type "Kubernetes/OpenShift API Bearer Token”.` + t`Credential to authenticate with Kubernetes or OpenShift. Must be of type "Kubernetes/OpenShift API Bearer Token". If left blank, the underlying Pod's service account will be used.` )} autoPopulate={!instanceGroup?.id} /> diff --git a/awx/ui_next/src/screens/InstanceGroup/shared/ContainerGroupForm.test.jsx b/awx/ui_next/src/screens/InstanceGroup/shared/ContainerGroupForm.test.jsx index 62709df53e..3e48389195 100644 --- a/awx/ui_next/src/screens/InstanceGroup/shared/ContainerGroupForm.test.jsx +++ b/awx/ui_next/src/screens/InstanceGroup/shared/ContainerGroupForm.test.jsx @@ -27,7 +27,7 @@ const instanceGroup = { controller: null, is_controller: false, is_isolated: false, - is_containerized: false, + is_container_group: false, credential: 3, policy_instance_percentage: 46, policy_instance_minimum: 12, diff --git a/awx/ui_next/src/screens/InstanceGroup/shared/InstanceGroupForm.test.jsx b/awx/ui_next/src/screens/InstanceGroup/shared/InstanceGroupForm.test.jsx index 233ce7f849..0dad4fe6d1 100644 --- a/awx/ui_next/src/screens/InstanceGroup/shared/InstanceGroupForm.test.jsx +++ b/awx/ui_next/src/screens/InstanceGroup/shared/InstanceGroupForm.test.jsx @@ -27,7 +27,7 @@ const instanceGroup = { controller: null, is_controller: false, is_isolated: false, - is_containerized: false, + is_container_group: false, credential: null, policy_instance_percentage: 46, policy_instance_minimum: 12, diff --git a/awx/ui_next/src/screens/Inventory/InventorySourceAdd/InventorySourceAdd.jsx b/awx/ui_next/src/screens/Inventory/InventorySourceAdd/InventorySourceAdd.jsx index 0db70f2dbf..67ea90f4ea 100644 --- a/awx/ui_next/src/screens/Inventory/InventorySourceAdd/InventorySourceAdd.jsx +++ b/awx/ui_next/src/screens/Inventory/InventorySourceAdd/InventorySourceAdd.jsx @@ -1,14 +1,14 @@ import React, { useCallback, useEffect } from 'react'; -import { useHistory, useParams } from 'react-router-dom'; +import { useHistory } from 'react-router-dom'; import { Card } from '@patternfly/react-core'; import { InventorySourcesAPI } from '../../../api'; import useRequest from '../../../util/useRequest'; import { CardBody } from '../../../components/Card'; import InventorySourceForm from '../shared/InventorySourceForm'; -function InventorySourceAdd() { +function InventorySourceAdd({ inventory }) { const history = useHistory(); - const { id } = useParams(); + const { id, organization } = inventory; const { error, request, result } = useRequest( useCallback(async values => { @@ -31,6 +31,7 @@ function InventorySourceAdd() { source_path, source_project, source_script, + execution_environment, ...remainingForm } = form; @@ -46,6 +47,7 @@ function InventorySourceAdd() { credential: credential?.id || null, inventory: id, source_script: source_script?.id || null, + execution_environment: execution_environment?.id || null, ...sourcePath, ...sourceProject, ...remainingForm, @@ -63,6 +65,7 @@ function InventorySourceAdd() { onCancel={handleCancel} onSubmit={handleSubmit} submitError={error} + organizationId={organization} /> diff --git a/awx/ui_next/src/screens/Inventory/InventorySourceAdd/InventorySourceAdd.test.jsx b/awx/ui_next/src/screens/Inventory/InventorySourceAdd/InventorySourceAdd.test.jsx index afc4d69d0e..c186d4dcb7 100644 --- a/awx/ui_next/src/screens/Inventory/InventorySourceAdd/InventorySourceAdd.test.jsx +++ b/awx/ui_next/src/screens/Inventory/InventorySourceAdd/InventorySourceAdd.test.jsx @@ -35,6 +35,12 @@ describe('', () => { verbosity: 1, }; + const mockInventory = { + id: 111, + name: 'Foo', + organization: 2, + }; + InventorySourcesAPI.readOptions.mockResolvedValue({ data: { actions: { @@ -72,9 +78,12 @@ describe('', () => { custom_virtualenvs: ['venv/foo', 'venv/bar'], }; await act(async () => { - wrapper = mountWithContexts(, { - context: { config }, - }); + wrapper = mountWithContexts( + , + { + context: { config }, + } + ); }); await waitForElement(wrapper, 'ContentLoading', el => el.length === 0); expect(wrapper.find('FormGroup[label="Name"]')).toHaveLength(1); @@ -88,9 +97,12 @@ describe('', () => { test('should navigate to inventory sources list when cancel is clicked', async () => { const history = createMemoryHistory({}); await act(async () => { - wrapper = mountWithContexts(, { - context: { router: { history } }, - }); + wrapper = mountWithContexts( + , + { + context: { router: { history } }, + } + ); }); await act(async () => { wrapper.find('InventorySourceForm').invoke('onCancel')(); @@ -103,7 +115,9 @@ describe('', () => { test('should post to the api when submit is clicked', async () => { InventorySourcesAPI.create.mockResolvedValueOnce({ data: {} }); await act(async () => { - wrapper = mountWithContexts(); + wrapper = mountWithContexts( + + ); }); await act(async () => { wrapper.find('InventorySourceForm').invoke('onSubmit')(invSourceData); @@ -114,6 +128,7 @@ describe('', () => { credential: 222, source_project: 999, source_script: null, + execution_environment: null, }); }); @@ -123,9 +138,12 @@ describe('', () => { data: { id: 123, inventory: 111 }, }); await act(async () => { - wrapper = mountWithContexts(, { - context: { router: { history } }, - }); + wrapper = mountWithContexts( + , + { + context: { router: { history } }, + } + ); }); await act(async () => { wrapper.find('InventorySourceForm').invoke('onSubmit')(invSourceData); @@ -143,7 +161,9 @@ describe('', () => { }; InventorySourcesAPI.create.mockImplementation(() => Promise.reject(error)); await act(async () => { - wrapper = mountWithContexts(); + wrapper = mountWithContexts( + + ); }); expect(wrapper.find('FormSubmitError').length).toBe(0); await act(async () => { diff --git a/awx/ui_next/src/screens/Inventory/InventorySourceDetail/InventorySourceDetail.jsx b/awx/ui_next/src/screens/Inventory/InventorySourceDetail/InventorySourceDetail.jsx index f3fcdebfca..4383998994 100644 --- a/awx/ui_next/src/screens/Inventory/InventorySourceDetail/InventorySourceDetail.jsx +++ b/awx/ui_next/src/screens/Inventory/InventorySourceDetail/InventorySourceDetail.jsx @@ -50,6 +50,7 @@ function InventorySourceDetail({ inventorySource, i18n }) { organization, source_project, user_capabilities, + execution_environment, }, } = inventorySource; const [deletionError, setDeletionError] = useState(false); @@ -214,6 +215,18 @@ function InventorySourceDetail({ inventorySource, i18n }) { } /> )} + {execution_environment?.name && ( + + {execution_environment.name} + + } + /> + )} {source === 'scm' ? ( diff --git a/awx/ui_next/src/screens/Inventory/InventorySourceEdit/InventorySourceEdit.test.jsx b/awx/ui_next/src/screens/Inventory/InventorySourceEdit/InventorySourceEdit.test.jsx index 87ec0288c1..cc8ad163b2 100644 --- a/awx/ui_next/src/screens/Inventory/InventorySourceEdit/InventorySourceEdit.test.jsx +++ b/awx/ui_next/src/screens/Inventory/InventorySourceEdit/InventorySourceEdit.test.jsx @@ -18,7 +18,7 @@ jest.mock('react-router-dom', () => ({ }), })); -describe('', () => { +describe('', () => { let wrapper; let history; const mockInvSrc = { @@ -37,6 +37,11 @@ describe('', () => { update_on_project_update: false, verbosity: 1, }; + const mockInventory = { + id: 1, + name: 'Foo', + organization: 1, + }; InventorySourcesAPI.readOptions.mockResolvedValue({ data: { actions: { @@ -89,9 +94,12 @@ describe('', () => { beforeAll(async () => { history = createMemoryHistory(); await act(async () => { - wrapper = mountWithContexts(, { - context: { router: { history } }, - }); + wrapper = mountWithContexts( + , + { + context: { router: { history } }, + } + ); }); await waitForElement(wrapper, 'ContentLoading', el => el.length === 0); }); @@ -133,7 +141,9 @@ describe('', () => { }; InventorySourcesAPI.replace.mockImplementation(() => Promise.reject(error)); await act(async () => { - wrapper = mountWithContexts(); + wrapper = mountWithContexts( + + ); }); expect(wrapper.find('FormSubmitError').length).toBe(0); await act(async () => { diff --git a/awx/ui_next/src/screens/Inventory/InventorySources/InventorySources.jsx b/awx/ui_next/src/screens/Inventory/InventorySources/InventorySources.jsx index 2e8f1a3785..e55125187d 100644 --- a/awx/ui_next/src/screens/Inventory/InventorySources/InventorySources.jsx +++ b/awx/ui_next/src/screens/Inventory/InventorySources/InventorySources.jsx @@ -9,7 +9,7 @@ function InventorySources({ inventory, setBreadcrumb }) { return ( - + diff --git a/awx/ui_next/src/screens/Inventory/shared/InventorySourceForm.jsx b/awx/ui_next/src/screens/Inventory/shared/InventorySourceForm.jsx index 3bc824fca8..05bbd4e370 100644 --- a/awx/ui_next/src/screens/Inventory/shared/InventorySourceForm.jsx +++ b/awx/ui_next/src/screens/Inventory/shared/InventorySourceForm.jsx @@ -31,6 +31,7 @@ import { VMwareSubForm, VirtualizationSubForm, } from './InventorySourceSubForms'; +import { ExecutionEnvironmentLookup } from '../../../components/Lookup'; const buildSourceChoiceOptions = options => { const sourceChoices = options.actions.GET.source.choices.map( @@ -39,7 +40,12 @@ const buildSourceChoiceOptions = options => { return sourceChoices.filter(({ key }) => key !== 'file'); }; -const InventorySourceFormFields = ({ source, sourceOptions, i18n }) => { +const InventorySourceFormFields = ({ + source, + sourceOptions, + organizationId, + i18n, +}) => { const { values, initialValues, @@ -51,6 +57,13 @@ const InventorySourceFormFields = ({ source, sourceOptions, i18n }) => { name: 'source', validate: required(i18n._(t`Set a value for this field`), i18n), }); + const [ + executionEnvironmentField, + executionEnvironmentMeta, + executionEnvironmentHelpers, + ] = useField({ + name: 'execution_environment', + }); const { custom_virtualenvs } = useContext(ConfigContext); const [venvField] = useField('custom_virtualenv'); const defaultVenv = { @@ -111,6 +124,17 @@ const InventorySourceFormFields = ({ source, sourceOptions, i18n }) => { name="description" type="text" /> + executionEnvironmentHelpers.setTouched()} + value={executionEnvironmentField.value} + onChange={value => executionEnvironmentHelpers.setValue(value)} + globallyAvailable + organizationId={organizationId} + /> { const initialValues = { credential: source?.summary_fields?.credential || null, @@ -264,6 +289,8 @@ const InventorySourceForm = ({ enabled_var: source?.enabled_var || '', enabled_value: source?.enabled_value || '', host_filter: source?.host_filter || '', + execution_environment: + source?.summary_fields?.execution_environment || null, }; const { @@ -306,6 +333,7 @@ const InventorySourceForm = ({ i18n={i18n} source={source} sourceOptions={sourceOptions} + organizationId={organizationId} /> {submitError && } ', () => { expect( wrapper.find('FormGroup[label="Ansible Environment"]') ).toHaveLength(1); + expect(wrapper.find('ExecutionEnvironmentLookup')).toHaveLength(1); }); test('should display subform when source dropdown has a value', async () => { diff --git a/awx/ui_next/src/screens/Job/JobDetail/JobDetail.jsx b/awx/ui_next/src/screens/Job/JobDetail/JobDetail.jsx index 35b48f17fb..ad0be4c9e6 100644 --- a/awx/ui_next/src/screens/Job/JobDetail/JobDetail.jsx +++ b/awx/ui_next/src/screens/Job/JobDetail/JobDetail.jsx @@ -252,13 +252,13 @@ function JobDetail({ job, i18n }) { - {instanceGroup && !instanceGroup?.is_containerized && ( + {instanceGroup && !instanceGroup?.is_container_group && ( )} - {instanceGroup && instanceGroup?.is_containerized && ( + {instanceGroup && instanceGroup?.is_container_group && ( )} + {organization && ( + + + + )} {!organizationLoading && !rolesLoading && ( diff --git a/awx/ui_next/src/screens/Organization/Organization.test.jsx b/awx/ui_next/src/screens/Organization/Organization.test.jsx index 10982505d5..487ebff36b 100644 --- a/awx/ui_next/src/screens/Organization/Organization.test.jsx +++ b/awx/ui_next/src/screens/Organization/Organization.test.jsx @@ -68,7 +68,7 @@ describe('', () => { const tabs = await waitForElement( wrapper, '.pf-c-tabs__item', - el => el.length === 5 + el => el.length === 6 ); expect(tabs.last().text()).toEqual('Notifications'); wrapper.unmount(); @@ -92,7 +92,7 @@ describe('', () => { const tabs = await waitForElement( wrapper, '.pf-c-tabs__item', - el => el.length === 4 + el => el.length === 5 ); tabs.forEach(tab => expect(tab.text()).not.toEqual('Notifications')); wrapper.unmount(); diff --git a/awx/ui_next/src/screens/Organization/OrganizationAdd/OrganizationAdd.jsx b/awx/ui_next/src/screens/Organization/OrganizationAdd/OrganizationAdd.jsx index d9c14765ac..adbe04820d 100644 --- a/awx/ui_next/src/screens/Organization/OrganizationAdd/OrganizationAdd.jsx +++ b/awx/ui_next/src/screens/Organization/OrganizationAdd/OrganizationAdd.jsx @@ -13,7 +13,10 @@ function OrganizationAdd() { const handleSubmit = async (values, groupsToAssociate) => { try { - const { data: response } = await OrganizationsAPI.create(values); + const { data: response } = await OrganizationsAPI.create({ + ...values, + default_environment: values.default_environment?.id, + }); await Promise.all( groupsToAssociate .map(id => OrganizationsAPI.associateInstanceGroup(response.id, id)) diff --git a/awx/ui_next/src/screens/Organization/OrganizationAdd/OrganizationAdd.test.jsx b/awx/ui_next/src/screens/Organization/OrganizationAdd/OrganizationAdd.test.jsx index 8fa4e2cbc2..d99634ea09 100644 --- a/awx/ui_next/src/screens/Organization/OrganizationAdd/OrganizationAdd.test.jsx +++ b/awx/ui_next/src/screens/Organization/OrganizationAdd/OrganizationAdd.test.jsx @@ -17,13 +17,18 @@ describe('', () => { description: 'new description', custom_virtualenv: 'Buzz', galaxy_credentials: [], + default_environment: { id: 1, name: 'Foo' }, }; OrganizationsAPI.create.mockResolvedValueOnce({ data: {} }); await act(async () => { const wrapper = mountWithContexts(); wrapper.find('OrganizationForm').prop('onSubmit')(updatedOrgData, []); }); - expect(OrganizationsAPI.create).toHaveBeenCalledWith(updatedOrgData); + expect(OrganizationsAPI.create).toHaveBeenCalledWith({ + ...updatedOrgData, + default_environment: 1, + }); + expect(OrganizationsAPI.create).toHaveBeenCalledTimes(1); }); test('should navigate to organizations list when cancel is clicked', async () => { diff --git a/awx/ui_next/src/screens/Organization/OrganizationDetail/OrganizationDetail.jsx b/awx/ui_next/src/screens/Organization/OrganizationDetail/OrganizationDetail.jsx index 6b55780333..e3b544c091 100644 --- a/awx/ui_next/src/screens/Organization/OrganizationDetail/OrganizationDetail.jsx +++ b/awx/ui_next/src/screens/Organization/OrganizationDetail/OrganizationDetail.jsx @@ -94,6 +94,18 @@ function OrganizationDetail({ i18n, organization }) { label={i18n._(t`Ansible Environment`)} value={custom_virtualenv} /> + {summary_fields?.default_environment?.name && ( + + {summary_fields.default_environment.name} + + } + /> + )} ', () => { const mockOrganization = { + id: 12, name: 'Foo', description: 'Bar', custom_virtualenv: 'Fizz', @@ -24,7 +25,14 @@ describe('', () => { edit: true, delete: true, }, + default_environment: { + id: 1, + name: 'Default EE', + description: '', + image: 'quay.io/ansible/awx-ee', + }, }, + default_environment: 1, }; const mockInstanceGroups = { data: { @@ -43,7 +51,7 @@ describe('', () => { jest.clearAllMocks(); }); - test('initially renders succesfully', async () => { + test('initially renders successfully', async () => { await act(async () => { mountWithContexts(); }); @@ -86,6 +94,7 @@ describe('', () => { { label: 'Created', value: '7/7/2015, 5:21:26 PM' }, { label: 'Last Modified', value: '8/11/2019, 7:47:37 PM' }, { label: 'Max Hosts', value: '0' }, + { label: 'Default Execution Environment', value: 'Default EE' }, ]; for (let i = 0; i < testParams.length; i++) { const { label, value } = testParams[i]; diff --git a/awx/ui_next/src/screens/Organization/OrganizationEdit/OrganizationEdit.jsx b/awx/ui_next/src/screens/Organization/OrganizationEdit/OrganizationEdit.jsx index 849a273ef5..3297d2fd6f 100644 --- a/awx/ui_next/src/screens/Organization/OrganizationEdit/OrganizationEdit.jsx +++ b/awx/ui_next/src/screens/Organization/OrganizationEdit/OrganizationEdit.jsx @@ -28,7 +28,10 @@ function OrganizationEdit({ organization }) { const addedCredentialIds = addedCredentials.map(({ id }) => id); const removedCredentialIds = removedCredentials.map(({ id }) => id); - await OrganizationsAPI.update(organization.id, values); + await OrganizationsAPI.update(organization.id, { + ...values, + default_environment: values.default_environment?.id || null, + }); await Promise.all( groupsToAssociate .map(id => diff --git a/awx/ui_next/src/screens/Organization/OrganizationEdit/OrganizationEdit.test.jsx b/awx/ui_next/src/screens/Organization/OrganizationEdit/OrganizationEdit.test.jsx index ea62e38c9e..5556ee05d5 100644 --- a/awx/ui_next/src/screens/Organization/OrganizationEdit/OrganizationEdit.test.jsx +++ b/awx/ui_next/src/screens/Organization/OrganizationEdit/OrganizationEdit.test.jsx @@ -19,6 +19,13 @@ describe('', () => { related: { instance_groups: '/api/v2/organizations/1/instance_groups', }, + default_environment: 1, + summary_fields: { + default_environment: { + id: 1, + name: 'Baz', + }, + }, }; test('onSubmit should call api update', async () => { @@ -31,6 +38,7 @@ describe('', () => { name: 'new name', description: 'new description', custom_virtualenv: 'Buzz', + default_environment: null, }; wrapper.find('OrganizationForm').prop('onSubmit')(updatedOrgData, [], []); diff --git a/awx/ui_next/src/screens/Organization/OrganizationExecEnvList/OrganizationExecEnvList.jsx b/awx/ui_next/src/screens/Organization/OrganizationExecEnvList/OrganizationExecEnvList.jsx new file mode 100644 index 0000000000..9f2c4ae817 --- /dev/null +++ b/awx/ui_next/src/screens/Organization/OrganizationExecEnvList/OrganizationExecEnvList.jsx @@ -0,0 +1,135 @@ +import React, { useEffect, useCallback } from 'react'; +import { useLocation } from 'react-router-dom'; +import { withI18n } from '@lingui/react'; +import { t } from '@lingui/macro'; +import { Card } from '@patternfly/react-core'; + +import { OrganizationsAPI } from '../../../api'; +import { getQSConfig, parseQueryString } from '../../../util/qs'; +import useRequest from '../../../util/useRequest'; +import PaginatedDataList from '../../../components/PaginatedDataList'; +import DatalistToolbar from '../../../components/DataListToolbar'; + +import OrganizationExecEnvListItem from './OrganizationExecEnvListItem'; + +const QS_CONFIG = getQSConfig('organizations', { + page: 1, + page_size: 20, + order_by: 'name', +}); + +function OrganizationExecEnvList({ i18n, organization }) { + const { id } = organization; + const location = useLocation(); + + const { + error: contentError, + isLoading, + request: fetchExecutionEnvironments, + result: { + executionEnvironments, + executionEnvironmentsCount, + relatedSearchableKeys, + searchableKeys, + }, + } = useRequest( + useCallback(async () => { + const params = parseQueryString(QS_CONFIG, location.search); + + const [response, responseActions] = await Promise.all([ + OrganizationsAPI.readExecutionEnvironments(id, params), + OrganizationsAPI.readExecutionEnvironmentsOptions(id, params), + ]); + + return { + executionEnvironments: response.data.results, + executionEnvironmentsCount: response.data.count, + actions: responseActions.data.actions, + relatedSearchableKeys: ( + responseActions?.data?.related_search_fields || [] + ).map(val => val.slice(0, -8)), + searchableKeys: Object.keys( + responseActions.data.actions?.GET || {} + ).filter(key => responseActions.data.actions?.GET[key].filterable), + }; + }, [location, id]), + { + executionEnvironments: [], + executionEnvironmentsCount: 0, + actions: {}, + relatedSearchableKeys: [], + searchableKeys: [], + } + ); + + useEffect(() => { + fetchExecutionEnvironments(); + }, [fetchExecutionEnvironments]); + + return ( + <> + + ( + + )} + renderItem={executionEnvironment => ( + + )} + /> + + + ); +} + +export default withI18n()(OrganizationExecEnvList); diff --git a/awx/ui_next/src/screens/Organization/OrganizationExecEnvList/OrganizationExecEnvList.test.jsx b/awx/ui_next/src/screens/Organization/OrganizationExecEnvList/OrganizationExecEnvList.test.jsx new file mode 100644 index 0000000000..07e8a53ea5 --- /dev/null +++ b/awx/ui_next/src/screens/Organization/OrganizationExecEnvList/OrganizationExecEnvList.test.jsx @@ -0,0 +1,116 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; + +import { + mountWithContexts, + waitForElement, +} from '../../../../testUtils/enzymeHelpers'; + +import { OrganizationsAPI } from '../../../api'; +import OrganizationExecEnvList from './OrganizationExecEnvList'; + +jest.mock('../../../api/'); + +const executionEnvironments = { + data: { + count: 3, + results: [ + { + id: 1, + type: 'execution_environment', + url: '/api/v2/execution_environments/1/', + related: { + organization: '/api/v2/organizations/1/', + }, + organization: 1, + image: 'https://localhost.com/image/disk', + managed_by_tower: false, + credential: null, + }, + { + id: 2, + type: 'execution_environment', + url: '/api/v2/execution_environments/2/', + related: { + organization: '/api/v2/organizations/1/', + }, + organization: 1, + image: 'test/image123', + managed_by_tower: false, + credential: null, + }, + { + id: 3, + type: 'execution_environment', + url: '/api/v2/execution_environments/3/', + related: { + organization: '/api/v2/organizations/1/', + }, + organization: 1, + image: 'test/test', + managed_by_tower: false, + credential: null, + }, + ], + }, +}; + +const mockOrganization = { + id: 1, + type: 'organization', + name: 'Default', +}; + +const options = { data: { actions: { POST: {}, GET: {} } } }; + +describe('', () => { + let wrapper; + + test('should mount successfully', async () => { + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + await waitForElement( + wrapper, + 'OrganizationExecEnvList', + el => el.length > 0 + ); + }); + + test('should have data fetched and render 3 rows', async () => { + OrganizationsAPI.readExecutionEnvironments.mockResolvedValue( + executionEnvironments + ); + + OrganizationsAPI.readExecutionEnvironmentsOptions.mockResolvedValue( + options + ); + + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + await waitForElement( + wrapper, + 'OrganizationExecEnvList', + el => el.length > 0 + ); + + expect(wrapper.find('OrganizationExecEnvListItem').length).toBe(3); + expect(OrganizationsAPI.readExecutionEnvironments).toBeCalled(); + expect(OrganizationsAPI.readExecutionEnvironmentsOptions).toBeCalled(); + }); + + test('should not render add button', async () => { + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + waitForElement(wrapper, 'OrganizationExecEnvList', el => el.length > 0); + expect(wrapper.find('ToolbarAddButton').length).toBe(0); + }); +}); diff --git a/awx/ui_next/src/screens/Organization/OrganizationExecEnvList/OrganizationExecEnvListItem.jsx b/awx/ui_next/src/screens/Organization/OrganizationExecEnvList/OrganizationExecEnvListItem.jsx new file mode 100644 index 0000000000..0d2715d7a6 --- /dev/null +++ b/awx/ui_next/src/screens/Organization/OrganizationExecEnvList/OrganizationExecEnvListItem.jsx @@ -0,0 +1,57 @@ +import React from 'react'; +import { string } from 'prop-types'; +import { withI18n } from '@lingui/react'; +import { t } from '@lingui/macro'; +import { Link } from 'react-router-dom'; +import { + DataListItem, + DataListItemRow, + DataListItemCells, +} from '@patternfly/react-core'; + +import DataListCell from '../../../components/DataListCell'; +import { ExecutionEnvironment } from '../../../types'; + +function OrganizationExecEnvListItem({ + executionEnvironment, + detailUrl, + i18n, +}) { + const labelId = `check-action-${executionEnvironment.id}`; + + return ( + + + + + {executionEnvironment.name} + + , + + {executionEnvironment.image} + , + ]} + /> + + + ); +} + +OrganizationExecEnvListItem.prototype = { + executionEnvironment: ExecutionEnvironment.isRequired, + detailUrl: string.isRequired, +}; + +export default withI18n()(OrganizationExecEnvListItem); diff --git a/awx/ui_next/src/screens/Organization/OrganizationExecEnvList/OrganizationExecEnvListItem.test.jsx b/awx/ui_next/src/screens/Organization/OrganizationExecEnvList/OrganizationExecEnvListItem.test.jsx new file mode 100644 index 0000000000..29181f4ec3 --- /dev/null +++ b/awx/ui_next/src/screens/Organization/OrganizationExecEnvList/OrganizationExecEnvListItem.test.jsx @@ -0,0 +1,46 @@ +import React from 'react'; +import { act } from 'react-dom/test-utils'; + +import { mountWithContexts } from '../../../../testUtils/enzymeHelpers'; + +import OrganizationExecEnvListItem from './OrganizationExecEnvListItem'; + +describe('', () => { + let wrapper; + const executionEnvironment = { + id: 1, + image: 'https://registry.com/r/image/manifest', + name: 'foo', + organization: 1, + credential: null, + pull: 'always', + }; + + test('should mount successfully', async () => { + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + expect(wrapper.find('OrganizationExecEnvListItem').length).toBe(1); + }); + + test('should render the proper data', async () => { + await act(async () => { + wrapper = mountWithContexts( + + ); + }); + expect( + wrapper + .find('DataListCell[aria-label="Execution environment image"]') + .text() + ).toBe(executionEnvironment.image); + }); +}); diff --git a/awx/ui_next/src/screens/Organization/OrganizationExecEnvList/index.js b/awx/ui_next/src/screens/Organization/OrganizationExecEnvList/index.js new file mode 100644 index 0000000000..668a3beb61 --- /dev/null +++ b/awx/ui_next/src/screens/Organization/OrganizationExecEnvList/index.js @@ -0,0 +1 @@ +export { default } from './OrganizationExecEnvList'; diff --git a/awx/ui_next/src/screens/Organization/Organizations.jsx b/awx/ui_next/src/screens/Organization/Organizations.jsx index 6c7b17dc69..fcf1b8398b 100644 --- a/awx/ui_next/src/screens/Organization/Organizations.jsx +++ b/awx/ui_next/src/screens/Organization/Organizations.jsx @@ -34,6 +34,9 @@ function Organizations({ i18n }) { [`/organizations/${organization.id}/notifications`]: i18n._( t`Notifications` ), + [`/organizations/${organization.id}/execution_environments`]: i18n._( + t`Execution Environments` + ), }; setBreadcrumbConfig(breadcrumb); }, diff --git a/awx/ui_next/src/screens/Organization/shared/OrganizationForm.jsx b/awx/ui_next/src/screens/Organization/shared/OrganizationForm.jsx index 094e6ac5b6..eb46f8c5cc 100644 --- a/awx/ui_next/src/screens/Organization/shared/OrganizationForm.jsx +++ b/awx/ui_next/src/screens/Organization/shared/OrganizationForm.jsx @@ -12,16 +12,21 @@ import ContentError from '../../../components/ContentError'; import ContentLoading from '../../../components/ContentLoading'; import FormField, { FormSubmitError } from '../../../components/FormField'; import FormActionGroup from '../../../components/FormActionGroup/FormActionGroup'; -import { InstanceGroupsLookup } from '../../../components/Lookup'; +import { + InstanceGroupsLookup, + ExecutionEnvironmentLookup, +} from '../../../components/Lookup'; import { getAddedAndRemoved } from '../../../util/lists'; import { required, minMaxValue } from '../../../util/validators'; import { FormColumnLayout } from '../../../components/FormLayout'; import CredentialLookup from '../../../components/Lookup/CredentialLookup'; function OrganizationFormFields({ i18n, instanceGroups, setInstanceGroups }) { + const { license_info = {}, me = {} } = useConfig(); + const { custom_virtualenvs } = useContext(ConfigContext); + const { setFieldValue } = useFormikContext(); const [venvField] = useField('custom_virtualenv'); - const { license_info = {}, me = {} } = useConfig(); const [ galaxyCredentialsField, @@ -29,12 +34,19 @@ function OrganizationFormFields({ i18n, instanceGroups, setInstanceGroups }) { galaxyCredentialsHelpers, ] = useField('galaxy_credentials'); + const [ + executionEnvironmentField, + executionEnvironmentMeta, + executionEnvironmentHelpers, + ] = useField({ + name: 'default_environment', + }); + const defaultVenv = { label: i18n._(t`Use Default Ansible Environment`), value: '/var/lib/awx/venv/ansible/', key: 'default', }; - const { custom_virtualenvs } = useContext(ConfigContext); const handleCredentialUpdate = useCallback( value => { @@ -100,6 +112,20 @@ function OrganizationFormFields({ i18n, instanceGroups, setInstanceGroups }) { t`Select the Instance Groups for this Organization to run on.` )} /> + executionEnvironmentHelpers.setTouched()} + value={executionEnvironmentField.value} + onChange={value => executionEnvironmentHelpers.setValue(value)} + popoverContent={i18n._( + t`Select the default execution environment for this organization.` + )} + globallyAvailable + isDefaultEnvironment + /> @@ -221,6 +249,7 @@ OrganizationForm.defaultProps = { description: '', max_hosts: '0', custom_virtualenv: '', + default_environment: '', }, submitError: null, }; diff --git a/awx/ui_next/src/screens/Organization/shared/OrganizationForm.test.jsx b/awx/ui_next/src/screens/Organization/shared/OrganizationForm.test.jsx index 67cf0a60d6..7dfbca620c 100644 --- a/awx/ui_next/src/screens/Organization/shared/OrganizationForm.test.jsx +++ b/awx/ui_next/src/screens/Organization/shared/OrganizationForm.test.jsx @@ -4,7 +4,7 @@ import { mountWithContexts, waitForElement, } from '../../../../testUtils/enzymeHelpers'; -import { OrganizationsAPI } from '../../../api'; +import { OrganizationsAPI, ExecutionEnvironmentsAPI } from '../../../api'; import OrganizationForm from './OrganizationForm'; @@ -32,6 +32,8 @@ describe('', () => { { name: 'Two', id: 2 }, ]; + const mockExecutionEnvironment = [{ name: 'EE' }]; + afterEach(() => { jest.clearAllMocks(); }); @@ -132,6 +134,11 @@ describe('', () => { results: mockInstanceGroups, }, }); + ExecutionEnvironmentsAPI.read.mockReturnValue({ + data: { + results: mockExecutionEnvironment, + }, + }); let wrapper; const onSubmit = jest.fn(); await act(async () => { @@ -155,10 +162,15 @@ describe('', () => { wrapper.find('input#org-max_hosts').simulate('change', { target: { value: 134, name: 'max_hosts' }, }); + wrapper.find('ExecutionEnvironmentLookup').invoke('onChange')({ + id: 1, + name: 'Test EE', + }); }); await act(async () => { wrapper.find('button[aria-label="Save"]').simulate('click'); }); + wrapper.update(); expect(onSubmit).toHaveBeenCalledTimes(1); expect(onSubmit.mock.calls[0][0]).toEqual({ name: 'new foo', @@ -166,6 +178,7 @@ describe('', () => { galaxy_credentials: [], custom_virtualenv: 'Fizz', max_hosts: 134, + default_environment: { id: 1, name: 'Test EE' }, }); }); @@ -209,12 +222,16 @@ describe('', () => { results: mockInstanceGroups, }, }); + ExecutionEnvironmentsAPI.read.mockReturnValue({ + data: { results: mockExecutionEnvironment }, + }); const mockDataForm = { name: 'Foo', description: 'Bar', galaxy_credentials: [], max_hosts: 1, custom_virtualenv: 'Fizz', + default_environment: '', }; const onSubmit = jest.fn(); OrganizationsAPI.update.mockResolvedValue(1, mockDataForm); @@ -320,6 +337,7 @@ describe('', () => { galaxy_credentials: [], max_hosts: 0, custom_virtualenv: 'Fizz', + default_environment: '', }, [], [] diff --git a/awx/ui_next/src/screens/Project/ProjectAdd/ProjectAdd.jsx b/awx/ui_next/src/screens/Project/ProjectAdd/ProjectAdd.jsx index d0190830e6..eaaa4274f3 100644 --- a/awx/ui_next/src/screens/Project/ProjectAdd/ProjectAdd.jsx +++ b/awx/ui_next/src/screens/Project/ProjectAdd/ProjectAdd.jsx @@ -27,6 +27,7 @@ function ProjectAdd() { } = await ProjectsAPI.create({ ...values, organization: values.organization.id, + default_environment: values.default_environment?.id, }); history.push(`/projects/${id}/details`); } catch (error) { diff --git a/awx/ui_next/src/screens/Project/ProjectAdd/ProjectAdd.test.jsx b/awx/ui_next/src/screens/Project/ProjectAdd/ProjectAdd.test.jsx index 8bc136b889..76bfd49256 100644 --- a/awx/ui_next/src/screens/Project/ProjectAdd/ProjectAdd.test.jsx +++ b/awx/ui_next/src/screens/Project/ProjectAdd/ProjectAdd.test.jsx @@ -20,11 +20,12 @@ describe('', () => { scm_clean: true, credential: 100, local_path: '', - organization: 2, + organization: { id: 2, name: 'Bar' }, scm_update_on_launch: true, scm_update_cache_timeout: 3, allow_override: false, custom_virtualenv: '/var/lib/awx/venv/custom-env', + default_environment: { id: 1, name: 'Foo' }, }; const projectOptionsResolve = { @@ -102,6 +103,11 @@ describe('', () => { await waitForElement(wrapper, 'ContentLoading', el => el.length === 0); wrapper.find('ProjectForm').invoke('handleSubmit')(projectData); expect(ProjectsAPI.create).toHaveBeenCalledTimes(1); + expect(ProjectsAPI.create).toHaveBeenCalledWith({ + ...projectData, + organization: 2, + default_environment: 1, + }); }); test('handleSubmit should throw an error', async () => { diff --git a/awx/ui_next/src/screens/Project/ProjectDetail/ProjectDetail.jsx b/awx/ui_next/src/screens/Project/ProjectDetail/ProjectDetail.jsx index 4c92c9695e..984aaab6fb 100644 --- a/awx/ui_next/src/screens/Project/ProjectDetail/ProjectDetail.jsx +++ b/awx/ui_next/src/screens/Project/ProjectDetail/ProjectDetail.jsx @@ -124,10 +124,23 @@ function ProjectDetail({ project, i18n }) { label={i18n._(t`Cache Timeout`)} value={`${scm_update_cache_timeout} ${i18n._(t`Seconds`)}`} /> + + {summary_fields?.default_environment?.name && ( + + {summary_fields.default_environment.name} + + } + /> + )} {({ project_base_dir }) => ( + ', () => { id: 10, name: 'Foo', }, + default_environment: { + id: 12, + name: 'Bar', + image: 'quay.io/ansible/awx-ee', + }, credential: { id: 1000, name: 'qux', @@ -67,9 +72,10 @@ describe('', () => { scm_update_cache_timeout: 5, allow_override: true, custom_virtualenv: '/custom-venv', + default_environment: 1, }; - test('initially renders succesfully', () => { + test('initially renders successfully', () => { mountWithContexts(); }); @@ -95,6 +101,10 @@ describe('', () => { `${mockProject.scm_update_cache_timeout} Seconds` ); assertDetail('Ansible Environment', mockProject.custom_virtualenv); + assertDetail( + 'Execution Environment', + mockProject.summary_fields.default_environment.name + ); const dateDetails = wrapper.find('UserDateDetail'); expect(dateDetails).toHaveLength(2); expect(dateDetails.at(0).prop('label')).toEqual('Created'); diff --git a/awx/ui_next/src/screens/Project/ProjectEdit/ProjectEdit.jsx b/awx/ui_next/src/screens/Project/ProjectEdit/ProjectEdit.jsx index 6642e03503..3682a01cdc 100644 --- a/awx/ui_next/src/screens/Project/ProjectEdit/ProjectEdit.jsx +++ b/awx/ui_next/src/screens/Project/ProjectEdit/ProjectEdit.jsx @@ -26,6 +26,7 @@ function ProjectEdit({ project }) { } = await ProjectsAPI.update(project.id, { ...values, organization: values.organization.id, + default_environment: values.default_environment?.id || null, }); history.push(`/projects/${id}/details`); } catch (error) { diff --git a/awx/ui_next/src/screens/Project/shared/ProjectForm.jsx b/awx/ui_next/src/screens/Project/shared/ProjectForm.jsx index 8c52218c1e..b2b5b80486 100644 --- a/awx/ui_next/src/screens/Project/shared/ProjectForm.jsx +++ b/awx/ui_next/src/screens/Project/shared/ProjectForm.jsx @@ -12,6 +12,7 @@ import ContentLoading from '../../../components/ContentLoading'; import FormActionGroup from '../../../components/FormActionGroup/FormActionGroup'; import FormField, { FormSubmitError } from '../../../components/FormField'; import OrganizationLookup from '../../../components/Lookup/OrganizationLookup'; +import ExecutionEnvironmentLookup from '../../../components/Lookup/ExecutionEnvironmentLookup'; import { CredentialTypesAPI, ProjectsAPI } from '../../../api'; import { required } from '../../../util/validators'; import { @@ -101,6 +102,14 @@ function ProjectFormFields({ validate: required(i18n._(t`Select a value for this field`), i18n), }); + const [ + executionEnvironmentField, + executionEnvironmentMeta, + executionEnvironmentHelpers, + ] = useField({ + name: 'default_environment', + }); + /* Save current scm subform field values to state */ const saveSubFormState = form => { const currentScmFormFields = { ...scmFormFields }; @@ -178,6 +187,25 @@ function ProjectFormFields({ required autoPopulate={!project?.id} /> + executionEnvironmentHelpers.setTouched()} + value={executionEnvironmentField.value} + onChange={value => executionEnvironmentHelpers.setValue(value)} + popoverContent={i18n._( + t`Select the default execution environment for this project.` + )} + tooltip={i18n._( + t`Select an organization before editing the default execution environment.` + )} + globallyAvailable + isDisabled={!organizationField.value} + organizationId={organizationField.value?.id} + isDefaultEnvironment + /> diff --git a/awx/ui_next/src/screens/Template/JobTemplateAdd/JobTemplateAdd.jsx b/awx/ui_next/src/screens/Template/JobTemplateAdd/JobTemplateAdd.jsx index 562987ba4b..bbb5e47b84 100644 --- a/awx/ui_next/src/screens/Template/JobTemplateAdd/JobTemplateAdd.jsx +++ b/awx/ui_next/src/screens/Template/JobTemplateAdd/JobTemplateAdd.jsx @@ -27,7 +27,10 @@ function JobTemplateAdd() { try { const { data: { id, type }, - } = await JobTemplatesAPI.create(remainingValues); + } = await JobTemplatesAPI.create({ + ...remainingValues, + execution_environment: values.execution_environment?.id, + }); await Promise.all([ submitLabels(id, labels, values.project.summary_fields.organization.id), submitInstanceGroups(id, instanceGroups), diff --git a/awx/ui_next/src/screens/Template/JobTemplateAdd/JobTemplateAdd.test.jsx b/awx/ui_next/src/screens/Template/JobTemplateAdd/JobTemplateAdd.test.jsx index 7b8675cc65..9860d968c5 100644 --- a/awx/ui_next/src/screens/Template/JobTemplateAdd/JobTemplateAdd.test.jsx +++ b/awx/ui_next/src/screens/Template/JobTemplateAdd/JobTemplateAdd.test.jsx @@ -58,6 +58,7 @@ const jobTemplateData = { timeout: 0, use_fact_cache: false, verbosity: '0', + execution_environment: { id: 1, name: 'Foo' }, }; describe('', () => { @@ -77,6 +78,12 @@ describe('', () => { beforeEach(() => { LabelsAPI.read.mockResolvedValue({ data: { results: [] } }); + ProjectsAPI.readDetail.mockReturnValue({ + name: 'foo', + id: 1, + allow_override: true, + organization: 1, + }); }); afterEach(() => { @@ -126,12 +133,13 @@ describe('', () => { ...jobTemplateData, }, }); + let wrapper; await act(async () => { wrapper = mountWithContexts(); }); await waitForElement(wrapper, 'EmptyStateBody', el => el.length === 0); - act(() => { + await act(() => { wrapper.find('input#template-name').simulate('change', { target: { value: 'Bar', name: 'name' }, }); @@ -144,6 +152,10 @@ describe('', () => { name: 'project', summary_fields: { organization: { id: 1, name: 'Org Foo' } }, }); + wrapper.find('ExecutionEnvironmentLookup').invoke('onChange')({ + id: 1, + name: 'Foo', + }); wrapper.update(); wrapper.find('Select#template-playbook').prop('onToggle')(); wrapper.update(); @@ -170,6 +182,7 @@ describe('', () => { inventory: 2, webhook_credential: undefined, webhook_service: '', + execution_environment: 1, }); }); @@ -190,7 +203,7 @@ describe('', () => { }); }); await waitForElement(wrapper, 'EmptyStateBody', el => el.length === 0); - act(() => { + await act(async () => { wrapper.find('input#template-name').simulate('change', { target: { value: 'Foo', name: 'name' }, }); diff --git a/awx/ui_next/src/screens/Template/JobTemplateDetail/JobTemplateDetail.jsx b/awx/ui_next/src/screens/Template/JobTemplateDetail/JobTemplateDetail.jsx index 3feed6bb7d..4a2b47605f 100644 --- a/awx/ui_next/src/screens/Template/JobTemplateDetail/JobTemplateDetail.jsx +++ b/awx/ui_next/src/screens/Template/JobTemplateDetail/JobTemplateDetail.jsx @@ -206,6 +206,18 @@ function JobTemplateDetail({ i18n, template }) { ) : ( )} + {summary_fields?.execution_environment && ( + + {summary_fields.execution_environment.name} + + } + /> + )} ', () => { el => el.length === 0 ); }); + test('webhook fields should render properly', () => { expect(wrapper.find('Detail[label="Webhook Service"]').length).toBe(1); expect(wrapper.find('Detail[label="Webhook Service"]').prop('value')).toBe( @@ -154,4 +155,13 @@ describe('', () => { expect(wrapper.find('Detail[label="Webhook Key"]').length).toBe(1); expect(wrapper.find('Detail[label="Webhook Credential"]').length).toBe(1); }); + + test('execution environment field should render properly', () => { + expect(wrapper.find('Detail[label="Execution Environment"]').length).toBe( + 1 + ); + expect( + wrapper.find(`Detail[label="Execution Environment"] dd`).text() + ).toBe('Default EE'); + }); }); diff --git a/awx/ui_next/src/screens/Template/JobTemplateEdit/JobTemplateEdit.jsx b/awx/ui_next/src/screens/Template/JobTemplateEdit/JobTemplateEdit.jsx index 213900d40d..6f109604b5 100644 --- a/awx/ui_next/src/screens/Template/JobTemplateEdit/JobTemplateEdit.jsx +++ b/awx/ui_next/src/screens/Template/JobTemplateEdit/JobTemplateEdit.jsx @@ -57,7 +57,10 @@ function JobTemplateEdit({ template }) { remainingValues.project = values.project.id; remainingValues.webhook_credential = webhook_credential?.id || null; try { - await JobTemplatesAPI.update(template.id, remainingValues); + await JobTemplatesAPI.update(template.id, { + ...remainingValues, + execution_environment: values.execution_environment?.id, + }); await Promise.all([ submitLabels(labels, template?.organization), submitInstanceGroups(instanceGroups, initialInstanceGroups), diff --git a/awx/ui_next/src/screens/Template/WorkflowJobTemplateAdd/WorkflowJobTemplateAdd.jsx b/awx/ui_next/src/screens/Template/WorkflowJobTemplateAdd/WorkflowJobTemplateAdd.jsx index d9edd77030..3656c73c02 100644 --- a/awx/ui_next/src/screens/Template/WorkflowJobTemplateAdd/WorkflowJobTemplateAdd.jsx +++ b/awx/ui_next/src/screens/Template/WorkflowJobTemplateAdd/WorkflowJobTemplateAdd.jsx @@ -28,7 +28,10 @@ function WorkflowJobTemplateAdd() { try { const { data: { id }, - } = await WorkflowJobTemplatesAPI.create(templatePayload); + } = await WorkflowJobTemplatesAPI.create({ + ...templatePayload, + execution_environment: values.execution_environment?.id, + }); await Promise.all(await submitLabels(id, labels, organizationId)); history.push(`/templates/workflow_job_template/${id}/visualizer`); } catch (err) { diff --git a/awx/ui_next/src/screens/Template/WorkflowJobTemplateDetail/WorkflowJobTemplateDetail.jsx b/awx/ui_next/src/screens/Template/WorkflowJobTemplateDetail/WorkflowJobTemplateDetail.jsx index 0ce65ca092..4f8237e8c2 100644 --- a/awx/ui_next/src/screens/Template/WorkflowJobTemplateDetail/WorkflowJobTemplateDetail.jsx +++ b/awx/ui_next/src/screens/Template/WorkflowJobTemplateDetail/WorkflowJobTemplateDetail.jsx @@ -125,6 +125,18 @@ function WorkflowJobTemplateDetail({ template, i18n }) { } /> )} + {summary_fields?.execution_environment && ( + + {summary_fields.execution_environment.name} + + } + /> + )} {summary_fields.inventory && ( ', () => { created_by: { id: 1, username: 'Athena' }, modified_by: { id: 1, username: 'Apollo' }, organization: { id: 1, name: 'Org' }, + execution_environment: { + id: 4, + name: 'Demo EE', + description: '', + image: 'quay.io/ansible/awx-ee', + }, inventory: { kind: 'Foo', id: 1, name: 'Bar' }, labels: { results: [ @@ -40,6 +46,7 @@ describe('', () => { }, webhook_service: 'Github', webhook_key: 'Foo webhook key', + execution_environment: 4, }; beforeEach(async () => { @@ -150,6 +157,10 @@ describe('', () => { }; renderedValues.map(value => assertValue(value)); + + expect( + wrapper.find(`Detail[label="Execution Environment"] dd`).text() + ).toBe('Demo EE'); }); test('link out resource have the correct url', () => { diff --git a/awx/ui_next/src/screens/Template/WorkflowJobTemplateEdit/WorkflowJobTemplateEdit.jsx b/awx/ui_next/src/screens/Template/WorkflowJobTemplateEdit/WorkflowJobTemplateEdit.jsx index be61a0ee43..cb963e634f 100644 --- a/awx/ui_next/src/screens/Template/WorkflowJobTemplateEdit/WorkflowJobTemplateEdit.jsx +++ b/awx/ui_next/src/screens/Template/WorkflowJobTemplateEdit/WorkflowJobTemplateEdit.jsx @@ -20,7 +20,7 @@ function WorkflowJobTemplateEdit({ template }) { ...templatePayload } = values; templatePayload.inventory = inventory?.id || null; - templatePayload.organization = organization?.id; + templatePayload.organization = organization?.id || null; templatePayload.webhook_credential = webhook_credential?.id || null; const formOrgId = @@ -29,7 +29,10 @@ function WorkflowJobTemplateEdit({ template }) { await Promise.all( await submitLabels(labels, formOrgId, template.organization) ); - await WorkflowJobTemplatesAPI.update(template.id, templatePayload); + await WorkflowJobTemplatesAPI.update(template.id, { + ...templatePayload, + execution_environment: values.execution_environment?.id, + }); history.push(`/templates/workflow_job_template/${template.id}/details`); } catch (err) { setFormSubmitError(err); diff --git a/awx/ui_next/src/screens/Template/shared/JobTemplateForm.jsx b/awx/ui_next/src/screens/Template/shared/JobTemplateForm.jsx index f504224507..c267e157bb 100644 --- a/awx/ui_next/src/screens/Template/shared/JobTemplateForm.jsx +++ b/awx/ui_next/src/screens/Template/shared/JobTemplateForm.jsx @@ -37,9 +37,10 @@ import { InstanceGroupsLookup, ProjectLookup, MultiCredentialsLookup, + ExecutionEnvironmentLookup, } from '../../../components/Lookup'; import Popover from '../../../components/Popover'; -import { JobTemplatesAPI } from '../../../api'; +import { JobTemplatesAPI, ProjectsAPI } from '../../../api'; import LabelSelect from './LabelSelect'; import PlaybookSelect from './PlaybookSelect'; import WebhookSubForm from './WebhookSubForm'; @@ -101,10 +102,40 @@ function JobTemplateForm({ 'webhook_credential' ); + const [ + executionEnvironmentField, + executionEnvironmentMeta, + executionEnvironmentHelpers, + ] = useField({ name: 'execution_environment' }); + + const projectId = projectField.value?.id; + + const { + request: fetchProject, + error: fetchProjectError, + isLoading: fetchProjectLoading, + result: projectData, + } = useRequest( + useCallback(async () => { + if (!projectId) { + return {}; + } + const { data } = await ProjectsAPI.readDetail(projectId); + return data; + }, [projectId]), + { + projectData: null, + } + ); + + useEffect(() => { + fetchProject(); + }, [fetchProject]); + const { request: loadRelatedInstanceGroups, error: instanceGroupError, - contentLoading: instanceGroupLoading, + isLoading: instanceGroupLoading, } = useRequest( useCallback(async () => { if (!template?.id) { @@ -182,12 +213,16 @@ function JobTemplateForm({ callbackUrl = `${origin}${path}`; } - if (instanceGroupLoading) { + if (instanceGroupLoading || fetchProjectLoading) { return ; } - if (contentError || instanceGroupError) { - return ; + if (contentError || instanceGroupError || fetchProjectError) { + return ( + + ); } return ( @@ -258,6 +293,7 @@ function JobTemplateForm({ isOverrideDisabled={isOverrideDisabledLookup} /> + projectHelpers.setTouched()} @@ -270,6 +306,26 @@ function JobTemplateForm({ autoPopulate={!template?.id} isOverrideDisabled={isOverrideDisabledLookup} /> + + executionEnvironmentHelpers.setTouched()} + value={executionEnvironmentField.value} + onChange={value => executionEnvironmentHelpers.setValue(value)} + popoverContent={i18n._( + t`Select the execution environment for this job template.` + )} + tooltip={i18n._( + t`Select a project before editing the execution environment.` + )} + globallyAvailable + isDisabled={!projectField.value} + organizationId={projectData?.organization} + /> + {projectField.value?.allow_override && ( { diff --git a/awx/ui_next/src/screens/Template/shared/WorkflowJobTemplateForm.jsx b/awx/ui_next/src/screens/Template/shared/WorkflowJobTemplateForm.jsx index fd15c92e28..5ee34bbe12 100644 --- a/awx/ui_next/src/screens/Template/shared/WorkflowJobTemplateForm.jsx +++ b/awx/ui_next/src/screens/Template/shared/WorkflowJobTemplateForm.jsx @@ -22,7 +22,10 @@ import { SubFormLayout, } from '../../../components/FormLayout'; import OrganizationLookup from '../../../components/Lookup/OrganizationLookup'; -import { InventoryLookup } from '../../../components/Lookup'; +import { + InventoryLookup, + ExecutionEnvironmentLookup, +} from '../../../components/Lookup'; import { VariablesField } from '../../../components/CodeMirrorInput'; import FormActionGroup from '../../../components/FormActionGroup'; import ContentError from '../../../components/ContentError'; @@ -63,6 +66,14 @@ function WorkflowJobTemplateForm({ 'webhook_credential' ); + const [ + executionEnvironmentField, + executionEnvironmentMeta, + executionEnvironmentHelpers, + ] = useField({ + name: 'execution_environment', + }); + useEffect(() => { if (enableWebhooks) { webhookServiceHelpers.setValue(webhookServiceMeta.initialValue); @@ -178,6 +189,20 @@ function WorkflowJobTemplateForm({ }} /> + executionEnvironmentHelpers.setTouched()} + value={executionEnvironmentField.value} + onChange={value => executionEnvironmentHelpers.setValue(value)} + tooltip={i18n._( + t`Select the default execution environment for this organization to run on.` + )} + globallyAvailable + organizationId={organizationField.value?.id} + /> { diff --git a/awx/ui_next/src/screens/Template/shared/data.job_template.json b/awx/ui_next/src/screens/Template/shared/data.job_template.json index 804c3b72a2..fa516d46db 100644 --- a/awx/ui_next/src/screens/Template/shared/data.job_template.json +++ b/awx/ui_next/src/screens/Template/shared/data.job_template.json @@ -133,6 +133,12 @@ "id": "1", "name": "Webhook Credential" + }, + "execution_environment": { + "id": 1, + "name": "Default EE", + "description": "", + "image": "quay.io/ansible/awx-ee" } }, "created": "2019-09-30T16:18:34.564820Z", @@ -177,5 +183,6 @@ "job_slice_count": 1, "webhook_credential": 1, "webhook_key": "asertdyuhjkhgfd234567kjgfds", - "webhook_service": "github" + "webhook_service": "github", + "execution_environment": 1 } diff --git a/awx/ui_next/src/types.js b/awx/ui_next/src/types.js index 760ecf4ed2..744360eaea 100644 --- a/awx/ui_next/src/types.js +++ b/awx/ui_next/src/types.js @@ -407,3 +407,14 @@ export const WorkflowApproval = shape({ approval_expiration: string, timed_out: bool, }); + +export const ExecutionEnvironment = shape({ + id: number.isRequired, + organization: number, + credential: number, + image: string.isRequired, + url: string, + summary_fields: shape({}), + description: string, + pull: string, +}); diff --git a/awx_collection/plugins/modules/tower_ad_hoc_command.py b/awx_collection/plugins/modules/tower_ad_hoc_command.py index 00f16d9f13..2d099b2b1d 100644 --- a/awx_collection/plugins/modules/tower_ad_hoc_command.py +++ b/awx_collection/plugins/modules/tower_ad_hoc_command.py @@ -28,6 +28,11 @@ options: - Job_type to use for the ad hoc command. type: str choices: [ 'run', 'check' ] + execution_environment: + description: + - Execution Environment to use for the ad hoc command. + required: False + type: str inventory: description: - Inventory to use for the ad hoc command. diff --git a/awx_collection/plugins/modules/tower_execution_environment.py b/awx_collection/plugins/modules/tower_execution_environment.py new file mode 100644 index 0000000000..320141721d --- /dev/null +++ b/awx_collection/plugins/modules/tower_execution_environment.py @@ -0,0 +1,129 @@ +#!/usr/bin/python +# coding: utf-8 -*- + +# (c) 2020, Shane McDonald +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + + +ANSIBLE_METADATA = {'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community'} + + +DOCUMENTATION = ''' +--- +module: tower_execution_environment +author: "Shane McDonald" +short_description: create, update, or destroy Execution Environments in Ansible Tower. +description: + - Create, update, or destroy Execution Environments in Ansible Tower. See + U(https://www.ansible.com/tower) for an overview. +options: + name: + description: + - Name to use for the execution environment. + required: True + type: str + image: + description: + - The fully qualified url of the container image. + required: True + type: str + description: + description: + - Description to use for the execution environment. + type: str + organization: + description: + - The organization the execution environment belongs to. + type: str + credential: + description: + - Name of the credential to use for the execution environment. + type: str + state: + description: + - Desired state of the resource. + choices: ["present", "absent"] + default: "present" + type: str + pull: + description: + - determine image pull behavior + choices: ["always", "missing", "never"] + default: '' + type: str +extends_documentation_fragment: awx.awx.auth +''' + + +EXAMPLES = ''' +- name: Add EE to Tower + tower_execution_environment: + name: "My EE" + image: quay.io/ansible/awx-ee +''' + + +from ..module_utils.tower_api import TowerAPIModule +import json + + +def main(): + # Any additional arguments that are not fields of the item can be added here + argument_spec = dict( + name=dict(required=True), + image=dict(required=True), + description=dict(default=''), + organization=dict(), + credential=dict(default=''), + state=dict(choices=['present', 'absent'], default='present'), + pull=dict(choices=['always', 'missing', 'never'], default='missing') + ) + + # Create a module for ourselves + module = TowerAPIModule(argument_spec=argument_spec) + + # Extract our parameters + name = module.params.get('name') + image = module.params.get('image') + description = module.params.get('description') + state = module.params.get('state') + pull = module.params.get('pull') + + existing_item = module.get_one('execution_environments', name_or_id=name) + + if state == 'absent': + module.delete_if_needed(existing_item) + + new_fields = { + 'name': name, + 'image': image, + } + if description: + new_fields['description'] = description + + if pull: + new_fields['pull'] = pull + + # Attempt to look up the related items the user specified (these will fail the module if not found) + organization = module.params.get('organization') + if organization: + new_fields['organization'] = module.resolve_name_to_id('organizations', organization) + + credential = module.params.get('credential') + if credential: + new_fields['credential'] = module.resolve_name_to_id('credentials', credential) + + module.create_or_update_if_needed( + existing_item, new_fields, + endpoint='execution_environments', + item_type='execution_environment' + ) + + +if __name__ == '__main__': + main() diff --git a/awx_collection/plugins/modules/tower_inventory_source.py b/awx_collection/plugins/modules/tower_inventory_source.py index 5945d411d8..9edf467617 100644 --- a/awx_collection/plugins/modules/tower_inventory_source.py +++ b/awx_collection/plugins/modules/tower_inventory_source.py @@ -73,6 +73,10 @@ options: description: - Credential to use for the source. type: str + execution_environment: + description: + - Execution Environment to use for the source. + type: str overwrite: description: - Delete child groups and hosts not found in source. @@ -173,6 +177,7 @@ def main(): enabled_value=dict(), host_filter=dict(), credential=dict(), + execution_environment=dict(), organization=dict(), overwrite=dict(type='bool'), overwrite_vars=dict(type='bool'), @@ -199,6 +204,7 @@ def main(): organization = module.params.get('organization') source_script = module.params.get('source_script') credential = module.params.get('credential') + ee = module.params.get('execution_environment') source_project = module.params.get('source_project') state = module.params.get('state') @@ -250,6 +256,8 @@ def main(): # Attempt to look up the related items the user specified (these will fail the module if not found) if credential is not None: inventory_source_fields['credential'] = module.resolve_name_to_id('credentials', credential) + if ee is not None: + inventory_source_fields['execution_environment'] = module.resolve_name_to_id('execution_environments', ee) if source_project is not None: inventory_source_fields['source_project'] = module.resolve_name_to_id('projects', source_project) if source_script is not None: diff --git a/awx_collection/plugins/modules/tower_job_template.py b/awx_collection/plugins/modules/tower_job_template.py index 787c145a20..1ed750b86e 100644 --- a/awx_collection/plugins/modules/tower_job_template.py +++ b/awx_collection/plugins/modules/tower_job_template.py @@ -75,6 +75,10 @@ options: - Name of the vault credential to use for the job template. - Deprecated, use 'credentials'. type: str + execution_environment: + description: + - Execution Environment to use for the JT. + type: str forks: description: - The number of parallel or simultaneous processes to use while executing the playbook. @@ -350,6 +354,7 @@ def main(): vault_credential=dict(), custom_virtualenv=dict(), credentials=dict(type='list', elements='str'), + execution_environment=dict(), forks=dict(type='int'), limit=dict(), verbosity=dict(type='int', choices=[0, 1, 2, 3, 4], default=0), @@ -418,6 +423,10 @@ def main(): organization_id = module.resolve_name_to_id('organizations', organization) search_fields['organization'] = new_fields['organization'] = organization_id + ee = module.params.get('execution_environment') + if ee: + new_fields['execution_environment'] = module.resolve_name_to_id('execution_environments', ee) + # Attempt to look up an existing item based on the provided data existing_item = module.get_one('job_templates', name_or_id=name, **{'data': search_fields}) diff --git a/awx_collection/plugins/modules/tower_organization.py b/awx_collection/plugins/modules/tower_organization.py index 0402056bbf..bcf6060ea6 100644 --- a/awx_collection/plugins/modules/tower_organization.py +++ b/awx_collection/plugins/modules/tower_organization.py @@ -36,6 +36,10 @@ options: - Local absolute file path containing a custom Python virtualenv to use. type: str default: '' + default_environment: + description: + - Default Execution Environment to use for jobs owned by the Organization. + type: str max_hosts: description: - The max hosts allowed in this organizations @@ -110,6 +114,7 @@ def main(): name=dict(required=True), description=dict(), custom_virtualenv=dict(), + default_environment=dict(), max_hosts=dict(type='int', default="0"), notification_templates_started=dict(type="list", elements='str'), notification_templates_success=dict(type="list", elements='str'), @@ -126,6 +131,7 @@ def main(): name = module.params.get('name') description = module.params.get('description') custom_virtualenv = module.params.get('custom_virtualenv') + default_ee = module.params.get('default_environment') max_hosts = module.params.get('max_hosts') # instance_group_names = module.params.get('instance_groups') state = module.params.get('state') @@ -175,6 +181,8 @@ def main(): org_fields['description'] = description if custom_virtualenv is not None: org_fields['custom_virtualenv'] = custom_virtualenv + if default_ee is not None: + org_fields['default_environment'] = module.resolve_name_to_id('execution_environments', default_ee) if max_hosts is not None: org_fields['max_hosts'] = max_hosts diff --git a/awx_collection/plugins/modules/tower_project.py b/awx_collection/plugins/modules/tower_project.py index 76cef63f10..acbf3833b5 100644 --- a/awx_collection/plugins/modules/tower_project.py +++ b/awx_collection/plugins/modules/tower_project.py @@ -102,6 +102,10 @@ options: - Local absolute file path containing a custom Python virtualenv to use type: str default: '' + default_environment: + description: + - Default Execution Environment to use for jobs relating to the project. + type: str organization: description: - Name of organization for project. @@ -239,6 +243,7 @@ def main(): allow_override=dict(type='bool', aliases=['scm_allow_override']), timeout=dict(type='int', default=0, aliases=['job_timeout']), custom_virtualenv=dict(), + default_environment=dict(), organization=dict(), notification_templates_started=dict(type="list", elements='str'), notification_templates_success=dict(type="list", elements='str'), @@ -270,6 +275,7 @@ def main(): allow_override = module.params.get('allow_override') timeout = module.params.get('timeout') custom_virtualenv = module.params.get('custom_virtualenv') + default_ee = module.params.get('default_environment') organization = module.params.get('organization') state = module.params.get('state') wait = module.params.get('wait') @@ -333,6 +339,8 @@ def main(): project_fields['description'] = description if credential is not None: project_fields['credential'] = credential + if default_ee is not None: + project_fields['default_environment'] = module.resolve_name_to_id('execution_environments', default_ee) if allow_override is not None: project_fields['allow_override'] = allow_override if scm_type == '': diff --git a/awx_collection/plugins/modules/tower_workflow_job_template.py b/awx_collection/plugins/modules/tower_workflow_job_template.py index 7836b42cc4..48759e1cc4 100644 --- a/awx_collection/plugins/modules/tower_workflow_job_template.py +++ b/awx_collection/plugins/modules/tower_workflow_job_template.py @@ -40,6 +40,10 @@ options: description: - Variables which will be made available to jobs ran inside the workflow. type: dict + execution_environment: + description: + - Execution Environment to use for the WFJT. + type: str organization: description: - Organization the workflow job template exists in. @@ -171,6 +175,7 @@ def main(): description=dict(), extra_vars=dict(type='dict'), organization=dict(), + execution_environment=dict(), survey_spec=dict(type='dict', aliases=['survey']), survey_enabled=dict(type='bool'), allow_simultaneous=dict(type='bool'), @@ -208,6 +213,10 @@ def main(): organization_id = module.resolve_name_to_id('organizations', organization) search_fields['organization'] = new_fields['organization'] = organization_id + ee = module.params.get('execution_environment') + if ee: + new_fields['execution_environment'] = module.resolve_name_to_id('execution_environments', ee) + # Attempt to look up an existing item based on the provided data existing_item = module.get_one('workflow_job_templates', name_or_id=name, **{'data': search_fields}) diff --git a/awx_collection/test/awx/test_completeness.py b/awx_collection/test/awx/test_completeness.py index 467ee9357b..7ff6e22a31 100644 --- a/awx_collection/test/awx/test_completeness.py +++ b/awx_collection/test/awx/test_completeness.py @@ -138,6 +138,7 @@ def determine_state(module_id, endpoint, module, parameter, api_option, module_o if not api_option and module_option and module_option.get('type', 'str') == 'list': return "OK, Field appears to be relation" # TODO, at some point try and check the object model to confirm its actually a relation + return cause_error('Failed, option mismatch') # We made it through all of the checks so we are ok diff --git a/awxkit/awxkit/api/pages/__init__.py b/awxkit/awxkit/api/pages/__init__.py index fafe5dc08f..1d78d4ba5e 100644 --- a/awxkit/awxkit/api/pages/__init__.py +++ b/awxkit/awxkit/api/pages/__init__.py @@ -14,6 +14,7 @@ from .teams import * # NOQA from .credentials import * # NOQA from .unified_jobs import * # NOQA from .unified_job_templates import * # NOQA +from .execution_environments import * # NOQA from .projects import * # NOQA from .inventory import * # NOQA from .system_job_templates import * # NOQA diff --git a/awxkit/awxkit/api/pages/api.py b/awxkit/awxkit/api/pages/api.py index 3209232352..4edc07857f 100644 --- a/awxkit/awxkit/api/pages/api.py +++ b/awxkit/awxkit/api/pages/api.py @@ -23,6 +23,7 @@ EXPORTABLE_RESOURCES = [ 'inventory_sources', 'job_templates', 'workflow_job_templates', + 'execution_environments', ] @@ -33,6 +34,7 @@ EXPORTABLE_RELATIONS = [ 'Credentials', 'Hosts', 'Groups', + 'ExecutionEnvironments', ] diff --git a/awxkit/awxkit/api/pages/credentials.py b/awxkit/awxkit/api/pages/credentials.py index f964ae38e4..e5b80cf7d0 100644 --- a/awxkit/awxkit/api/pages/credentials.py +++ b/awxkit/awxkit/api/pages/credentials.py @@ -82,6 +82,7 @@ def config_cred_from_kind(kind): credential_type_name_to_config_kind_map = { 'amazon web services': 'aws', + 'container registry': 'registry', 'ansible galaxy/automation hub api token': 'galaxy', 'ansible tower': 'tower', 'google compute engine': 'gce', diff --git a/awxkit/awxkit/api/pages/execution_environments.py b/awxkit/awxkit/api/pages/execution_environments.py new file mode 100644 index 0000000000..0471b1f1d3 --- /dev/null +++ b/awxkit/awxkit/api/pages/execution_environments.py @@ -0,0 +1,59 @@ +import logging + +from awxkit.api.mixins import DSAdapter, HasCreate, HasCopy +from awxkit.api.pages import ( + Credential, + Organization, +) +from awxkit.api.resources import resources +from awxkit.utils import random_title, PseudoNamespace + +from . import base +from . import page + + +log = logging.getLogger(__name__) + + +class ExecutionEnvironment(HasCreate, HasCopy, base.Base): + + dependencies = [Organization, Credential] + NATURAL_KEY = ('name',) + + # fields are name, image, organization, managed_by_tower, credential + def create(self, name='', image='quay.io/ansible/ansible-runner:devel', credential=None, pull='', **kwargs): + # we do not want to make a credential by default + payload = self.create_payload(name=name, image=image, credential=credential, pull=pull, **kwargs) + ret = self.update_identity(ExecutionEnvironments(self.connection).post(payload)) + return ret + + def create_payload(self, name='', organization=Organization, **kwargs): + self.create_and_update_dependencies(organization) + payload = self.payload(name=name, organization=self.ds.organization, **kwargs) + payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store) + return payload + + def payload(self, name='', image=None, organization=None, credential=None, pull='', **kwargs): + payload = PseudoNamespace( + name=name or "EE - {}".format(random_title()), + image=image or random_title(10), + organization=organization.id if organization else None, + credential=credential.id if credential else None, + pull=pull, + **kwargs + ) + + return payload + + +page.register_page([resources.execution_environment, + (resources.execution_environments, 'post'), + (resources.organization_execution_environments, 'post')], ExecutionEnvironment) + + +class ExecutionEnvironments(page.PageList, ExecutionEnvironment): + pass + + +page.register_page([resources.execution_environments, + resources.organization_execution_environments], ExecutionEnvironments) diff --git a/awxkit/awxkit/api/pages/job_templates.py b/awxkit/awxkit/api/pages/job_templates.py index cd45fc0c87..9f008078a9 100644 --- a/awxkit/awxkit/api/pages/job_templates.py +++ b/awxkit/awxkit/api/pages/job_templates.py @@ -101,18 +101,17 @@ class JobTemplate( if kwargs.get('project'): payload.update(project=kwargs.get('project').id, playbook=playbook) - if kwargs.get('inventory'): - payload.update(inventory=kwargs.get('inventory').id) - if kwargs.get('credential'): - payload.update(credential=kwargs.get('credential').id) - if kwargs.get('webhook_credential'): - webhook_cred = kwargs.get('webhook_credential') - if isinstance(webhook_cred, int): - payload.update(webhook_credential=int(webhook_cred)) - elif hasattr(webhook_cred, 'id'): - payload.update(webhook_credential=webhook_cred.id) + + for fk_field in ('inventory', 'credential', 'webhook_credential', 'execution_environment'): + rel_obj = kwargs.get(fk_field) + if rel_obj is None: + continue + elif isinstance(rel_obj, int): + payload.update(**{fk_field: int(rel_obj)}) + elif hasattr(rel_obj, 'id'): + payload.update(**{fk_field: rel_obj.id}) else: - raise AttributeError("Webhook credential must either be integer of pkid or Credential object") + raise AttributeError(f'Related field {fk_field} must be either integer of pkid or object') return payload diff --git a/awxkit/awxkit/api/pages/organizations.py b/awxkit/awxkit/api/pages/organizations.py index e1d13a0013..fe34eb07e7 100644 --- a/awxkit/awxkit/api/pages/organizations.py +++ b/awxkit/awxkit/api/pages/organizations.py @@ -39,10 +39,20 @@ class Organization(HasCreate, HasInstanceGroups, HasNotifications, base.Base): "disassociate": True, }) - def payload(self, **kwargs): payload = PseudoNamespace(name=kwargs.get('name') or 'Organization - {}'.format(random_title()), description=kwargs.get('description') or random_title(10)) + + for fk_field in ('default_environment',): + rel_obj = kwargs.get(fk_field) + if rel_obj is None: + continue + elif isinstance(rel_obj, int): + payload.update(**{fk_field: int(rel_obj)}) + elif hasattr(rel_obj, 'id'): + payload.update(**{fk_field: rel_obj.id}) + else: + raise AttributeError(f'Related field {fk_field} must be either integer of pkid or object') return payload def create_payload(self, name='', description='', **kwargs): diff --git a/awxkit/awxkit/api/pages/projects.py b/awxkit/awxkit/api/pages/projects.py index e40191260c..bc46f5edb3 100644 --- a/awxkit/awxkit/api/pages/projects.py +++ b/awxkit/awxkit/api/pages/projects.py @@ -43,6 +43,16 @@ class Project(HasCopy, HasCreate, HasNotifications, UnifiedJobTemplate): 'allow_override') update_payload(payload, fields, kwargs) + for fk_field in ('execution_environment', 'default_environment'): + rel_obj = kwargs.get(fk_field) + if rel_obj is None: + continue + elif isinstance(rel_obj, int): + payload.update(**{fk_field: int(rel_obj)}) + elif hasattr(rel_obj, 'id'): + payload.update(**{fk_field: rel_obj.id}) + else: + raise AttributeError(f'Related field {fk_field} must be either integer of pkid or object') return payload def create_payload( diff --git a/awxkit/awxkit/api/pages/unified_jobs.py b/awxkit/awxkit/api/pages/unified_jobs.py index 8e07b71de9..20c6175ed3 100644 --- a/awxkit/awxkit/api/pages/unified_jobs.py +++ b/awxkit/awxkit/api/pages/unified_jobs.py @@ -135,6 +135,28 @@ class UnifiedJob(HasStatus, base.Base): raise return args + @property + def controller_dir(self): + """Returns the path to the private_data_dir on the controller node for the job + This can be used if trying to shell in and inspect the files used by the job + Cannot use job_cwd, because that is path inside EE container + """ + self.get() + job_args = self.job_args + expected_prefix = '/tmp/awx_{}'.format(self.id) + for arg1, arg2 in zip(job_args[:-1], job_args[1:]): + if arg1 == '-v': + if ':' in arg2: + host_loc = arg2.split(':')[0] + if host_loc.startswith(expected_prefix): + return host_loc + raise RuntimeError( + 'Could not find a controller private_data_dir for this job. ' + 'Searched for volume mount to {} inside of args {}'.format( + expected_prefix, job_args + ) + ) + class UnifiedJobs(page.PageList, UnifiedJob): diff --git a/awxkit/awxkit/api/resources.py b/awxkit/awxkit/api/resources.py index d6340cd2d7..997ada1e70 100644 --- a/awxkit/awxkit/api/resources.py +++ b/awxkit/awxkit/api/resources.py @@ -28,6 +28,8 @@ class Resources(object): _credential_types = 'credential_types/' _credentials = 'credentials/' _dashboard = 'dashboard/' + _execution_environment = r'execution_environments/\d+/' + _execution_environments = 'execution_environments/' _fact_view = r'hosts/\d+/fact_view/' _group = r'groups/\d+/' _group_access_list = r'groups/\d+/access_list/' @@ -141,6 +143,7 @@ class Resources(object): _organization_access_list = r'organizations/\d+/access_list/' _organization_admins = r'organizations/\d+/admins/' _organization_applications = r'organizations/\d+/applications/' + _organization_execution_environments = r'organizations/\d+/execution_environments/' _organization_inventories = r'organizations/\d+/inventories/' _organization_users = r'organizations/\d+/users/' _organizations = 'organizations/' diff --git a/docs/licenses/receptor.txt b/docs/licenses/receptor.txt new file mode 100644 index 0000000000..bb0a7c7983 --- /dev/null +++ b/docs/licenses/receptor.txt @@ -0,0 +1,168 @@ +Apache License +============== + +_Version 2.0, January 2004_ +_<>_ + +### Terms and Conditions for use, reproduction, and distribution + +#### 1. Definitions + +“License” shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +“Licensor” shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +“Legal Entity” shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, “control” means **(i)** the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or **(ii)** ownership of fifty percent (50%) or more of the +outstanding shares, or **(iii)** beneficial ownership of such entity. + +“You” (or “Your”) shall mean an individual or Legal Entity exercising +permissions granted by this License. + +“Source” form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +“Object” form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +“Work” shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +“Derivative Works” shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +“Contribution” shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +“submitted” means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as “Not a Contribution.” + +“Contributor” shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +#### 2. Grant of Copyright License + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form. + +#### 3. Grant of Patent License + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by combination +of their Contribution(s) with the Work to which such Contribution(s) was +submitted. If You institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work or a +Contribution incorporated within the Work constitutes direct or contributory +patent infringement, then any patent licenses granted to You under this License +for that Work shall terminate as of the date such litigation is filed. + +#### 4. Redistribution + +You may reproduce and distribute copies of the Work or Derivative Works thereof +in any medium, with or without modifications, and in Source or Object form, +provided that You meet the following conditions: + +* **(a)** You must give any other recipients of the Work or Derivative Works a copy of +this License; and +* **(b)** You must cause any modified files to carry prominent notices stating that You +changed the files; and +* **(c)** You must retain, in the Source form of any Derivative Works that You distribute, +all copyright, patent, trademark, and attribution notices from the Source form +of the Work, excluding those notices that do not pertain to any part of the +Derivative Works; and +* **(d)** If the Work includes a “NOTICE” text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of the +following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents of +the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. + +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + +#### 5. Submission of Contributions + +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +#### 6. Trademarks + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +#### 7. Disclaimer of Warranty + +Unless required by applicable law or agreed to in writing, Licensor provides the +Work (and each Contributor provides its Contributions) on an “AS IS” BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, +including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are +solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +#### 8. Limitation of Liability + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, incidental, +or consequential damages of any character arising as a result of this License or +out of the use or inability to use the Work (including but not limited to +damages for loss of goodwill, work stoppage, computer failure or malfunction, or +any and all other commercial damages or losses), even if such Contributor has +been advised of the possibility of such damages. + +#### 9. Accepting Warranty or Additional Liability + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. However, +in accepting such obligations, You may act only on Your own behalf and on Your +sole responsibility, not on behalf of any other Contributor, and only if You +agree to indemnify, defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason of your +accepting any such warranty or additional liability. diff --git a/requirements/collections_requirements.yml b/requirements/collections_requirements.yml index 2a8f46fa24..f750aafce4 100644 --- a/requirements/collections_requirements.yml +++ b/requirements/collections_requirements.yml @@ -1,12 +1,21 @@ --- +# from https://github.com/ansible/awx/blob/devel/ +# requirements/collections_requirements.yml collections: - name: awx.awx - - name: azure.azcollection - - name: amazon.aws - - name: theforeman.foreman - - name: google.cloud - - name: openstack.cloud - - name: community.vmware + - name: azure.azcollection # PR 220 is in 1.1.0 + - name: amazon.aws # PR 125 is in 1.1.0 + - name: theforeman.foreman # has requirements.txt (which -r to another file) + - name: google.cloud # has requirements.txt, mainly for google-auth + # forked from opendev.org + - name: https://github.com/AlanCoding/ansible-collections-openstack.git + version: ee_req_install + type: git + - name: community.vmware # has requirements.txt, but may add pyvcloud - name: ovirt.ovirt - - name: community.kubernetes # required for isolated management playbooks - - name: ansible.posix # required for isolated management playbooks + - name: https://github.com/ansible-collections/community.kubernetes.git + type: git + # adds openshift python lib + # needs kubectl for yum / dnf / apt-get + # needs to install snap, then use snap to install helm + - name: ansible.posix diff --git a/requirements/requirements.in b/requirements/requirements.in index 93b5b4f72e..f54094ae72 100644 --- a/requirements/requirements.in +++ b/requirements/requirements.in @@ -40,7 +40,7 @@ pygerduty pyparsing python3-saml python-ldap>=3.3.1 # https://github.com/python-ldap/python-ldap/issues/270 -pyyaml>=5.3.1 # minimum version to pull in new pyyaml for CVE-2017-18342 +pyyaml>=5.4.1 # minimum to fix https://github.com/yaml/pyyaml/issues/478 schedule==0.6.0 social-auth-core==3.3.1 # see UPGRADE BLOCKERs social-auth-app-django==3.1.0 # see UPGRADE BLOCKERs diff --git a/requirements/requirements.txt b/requirements/requirements.txt index 5829401c78..6256afd061 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -1,7 +1,7 @@ adal==1.2.2 # via msrestazure aiohttp==3.6.2 # via -r /awx_devel/requirements/requirements.in aioredis==1.3.1 # via channels-redis -ansible-runner==1.4.7 # via -r /awx_devel/requirements/requirements.in +# ansible-runner==1.4.7 # via -r /awx_devel/requirements/requirements.in ansiconv==1.0.0 # via -r /awx_devel/requirements/requirements.in asciichartpy==1.5.25 # via -r /awx_devel/requirements/requirements.in asgiref==3.2.5 # via channels, channels-redis, daphne @@ -100,7 +100,7 @@ python-string-utils==1.0.0 # via openshift python3-openid==3.1.0 # via social-auth-core python3-saml==1.9.0 # via -r /awx_devel/requirements/requirements.in pytz==2019.3 # via django, irc, tempora, twilio -pyyaml==5.3.1 # via -r /awx_devel/requirements/requirements.in, ansible-runner, djangorestframework-yaml, kubernetes +pyyaml==5.4.1 # via -r /awx_devel/requirements/requirements.in, ansible-runner, djangorestframework-yaml, kubernetes redis==3.4.1 # via -r /awx_devel/requirements/requirements.in, django-redis requests-oauthlib==1.3.0 # via kubernetes, msrest, social-auth-core requests==2.23.0 # via -r /awx_devel/requirements/requirements.in, adal, azure-keyvault, django-oauth-toolkit, kubernetes, msrest, requests-oauthlib, slackclient, social-auth-core, twilio diff --git a/requirements/requirements_dev.txt b/requirements/requirements_dev.txt index fe51fff164..4788e153a2 100644 --- a/requirements/requirements_dev.txt +++ b/requirements/requirements_dev.txt @@ -20,5 +20,6 @@ matplotlib backports.tempfile # support in unit tests for py32+ tempfile.TemporaryDirectory mockldap sdb +remote-pdb gprof2dot atomicwrites==1.4.0 diff --git a/requirements/requirements_git.txt b/requirements/requirements_git.txt index 340cbfdcc7..74f1fb4a9e 100644 --- a/requirements/requirements_git.txt +++ b/requirements/requirements_git.txt @@ -1 +1,3 @@ git+https://github.com/ansible/system-certifi.git@devel#egg=certifi +git+git://github.com/ansible/ansible-runner@devel#egg=ansible-runner +git+https://github.com/project-receptor/receptor.git@#egg=receptorctl&subdirectory=receptorctl diff --git a/tools/ansible/roles/dockerfile/files/launch_awx.sh b/tools/ansible/roles/dockerfile/files/launch_awx.sh old mode 100644 new mode 100755 index 839f7cf746..7b5e86f0bd --- a/tools/ansible/roles/dockerfile/files/launch_awx.sh +++ b/tools/ansible/roles/dockerfile/files/launch_awx.sh @@ -10,7 +10,7 @@ if [ -n "${AWX_KUBE_DEVEL}" ]; then make awx-link popd - export SDB_NOTIFY_HOST=$(ip route | head -n1 | awk '{print $3}') + export SDB_NOTIFY_HOST=$MY_POD_IP fi source /etc/tower/conf.d/environment.sh diff --git a/tools/ansible/roles/dockerfile/files/launch_awx_task.sh b/tools/ansible/roles/dockerfile/files/launch_awx_task.sh old mode 100644 new mode 100755 index 120cc9e3f8..8b9774a477 --- a/tools/ansible/roles/dockerfile/files/launch_awx_task.sh +++ b/tools/ansible/roles/dockerfile/files/launch_awx_task.sh @@ -10,7 +10,7 @@ if [ -n "${AWX_KUBE_DEVEL}" ]; then make awx-link popd - export SDB_NOTIFY_HOST=$(ip route | head -n1 | awk '{print $3}') + export SDB_NOTIFY_HOST=$MY_POD_IP fi source /etc/tower/conf.d/environment.sh diff --git a/tools/ansible/roles/dockerfile/templates/Dockerfile.j2 b/tools/ansible/roles/dockerfile/templates/Dockerfile.j2 index 7acf365c25..66a0eeebbd 100644 --- a/tools/ansible/roles/dockerfile/templates/Dockerfile.j2 +++ b/tools/ansible/roles/dockerfile/templates/Dockerfile.j2 @@ -4,9 +4,6 @@ ### DO NOT EDIT ### -# Locations - set globally to be used across stages -ARG COLLECTION_BASE="/var/lib/awx/vendor/awx_ansible_collections" - # Build container FROM centos:8 as builder @@ -22,7 +19,7 @@ RUN dnf -y update && \ dnf -y install epel-release 'dnf-command(config-manager)' && \ dnf module -y enable 'postgresql:12' && \ dnf config-manager --set-enabled powertools && \ - dnf -y install ansible \ + dnf -y install \ gcc \ gcc-c++ \ git-core \ @@ -57,11 +54,9 @@ ADD requirements/requirements_ansible.txt \ requirements/requirements.txt \ requirements/requirements_tower_uninstall.txt \ requirements/requirements_git.txt \ - requirements/collections_requirements.yml \ /tmp/requirements/ RUN cd /tmp && make requirements_awx requirements_ansible_py3 -RUN cd /tmp && make requirements_collections {% if (build_dev|bool) or (kube_dev|bool) %} ADD requirements/requirements_dev.txt /tmp/requirements @@ -80,8 +75,6 @@ RUN make sdist && \ # Final container(s) FROM centos:8 -ARG COLLECTION_BASE - ENV LANG en_US.UTF-8 ENV LANGUAGE en_US:en ENV LC_ALL en_US.UTF-8 @@ -95,8 +88,6 @@ RUN dnf -y update && \ dnf module -y enable 'postgresql:12' && \ dnf config-manager --set-enabled powertools && \ dnf -y install acl \ - ansible \ - bubblewrap \ git-core \ git-lfs \ glibc-langpack-en \ @@ -143,6 +134,7 @@ RUN cd /usr/local/bin && \ {% if (build_dev|bool) or (kube_dev|bool) %} # Install development/test requirements RUN dnf -y install \ + crun \ gdb \ gtk3 \ gettext \ @@ -169,12 +161,18 @@ RUN dnf -y install \ RUN dnf --enablerepo=debuginfo -y install python3-debuginfo || : {% endif %} +{% if build_dev|bool %} +RUN dnf install -y podman +RUN echo -e 'cgroup_manager = "cgroupfs"\nevents_logger = "file"' > /etc/containers/libpod.conf +{% endif %} + # Copy app from builder COPY --from=builder /var/lib/awx /var/lib/awx RUN ln -s /var/lib/awx/venv/awx/bin/awx-manage /usr/bin/awx-manage {%if build_dev|bool %} +COPY --from=quay.io/project-receptor/receptor:0.9.6 /usr/bin/receptor /usr/bin/receptor RUN openssl req -nodes -newkey rsa:2048 -keyout /etc/nginx/nginx.key -out /etc/nginx/nginx.csr \ -subj "/C=US/ST=North Carolina/L=Durham/O=Ansible/OU=AWX Development/CN=awx.localhost" && \ openssl x509 -req -days 365 -in /etc/nginx/nginx.csr -signkey /etc/nginx/nginx.key -out /etc/nginx/nginx.crt && \ @@ -211,27 +209,29 @@ RUN for dir in \ /var/lib/awx \ /var/lib/awx/rsyslog \ /var/lib/awx/rsyslog/conf.d \ + /var/lib/awx/.local/share/containers/storage \ /var/run/awx-rsyslog \ /var/log/tower \ /var/log/nginx \ /var/lib/postgresql \ /var/run/supervisor \ + /var/run/receptor \ /var/lib/nginx ; \ do mkdir -m 0775 -p $dir ; chmod g+rw $dir ; chgrp root $dir ; done && \ for file in \ + /etc/subuid \ + /etc/subgid \ + /etc/group \ /etc/passwd \ /var/lib/awx/rsyslog/rsyslog.conf ; \ do touch $file ; chmod g+rw $file ; chgrp root $file ; done -# Adjust any remaining permissions -RUN chmod u+s /usr/bin/bwrap ; \ - chgrp -R root ${COLLECTION_BASE} ; \ - chmod -R g+rw ${COLLECTION_BASE} - {% if (build_dev|bool) or (kube_dev|bool) %} RUN for dir in \ /var/lib/awx/venv \ + /var/lib/awx/venv/awx/bin \ /var/lib/awx/venv/awx/lib/python3.6 \ + /var/lib/awx/venv/awx/lib/python3.6/site-packages \ /var/lib/awx/projects \ /var/lib/awx/rsyslog \ /var/run/awx-rsyslog \ @@ -253,6 +253,7 @@ ENV HOME="/var/lib/awx" ENV PATH="/usr/pgsql-10/bin:${PATH}" {% if build_dev|bool %} +ENV PATH="/var/lib/awx/venv/awx/bin/:${PATH}" EXPOSE 8043 8013 8080 22 @@ -265,4 +266,5 @@ EXPOSE 8052 ENTRYPOINT ["/usr/bin/tini", "--"] CMD /usr/bin/launch_awx.sh VOLUME /var/lib/nginx +VOLUME /var/lib/awx/.local/share/containers/storage {% endif %} diff --git a/tools/ansible/roles/dockerfile/templates/supervisor_task.conf.j2 b/tools/ansible/roles/dockerfile/templates/supervisor_task.conf.j2 index b9fe0be41a..994323e6a8 100644 --- a/tools/ansible/roles/dockerfile/templates/supervisor_task.conf.j2 +++ b/tools/ansible/roles/dockerfile/templates/supervisor_task.conf.j2 @@ -16,6 +16,8 @@ directory = /var/lib/awx autostart = true autorestart = true stopwaitsecs = 5 +stopasgroup=true +killasgroup=true stdout_logfile=/dev/stdout stdout_logfile_maxbytes=0 stderr_logfile=/dev/stderr @@ -32,6 +34,8 @@ directory = /var/lib/awx autostart = true autorestart = true stopwaitsecs = 5 +stopasgroup=true +killasgroup=true stdout_logfile=/dev/stdout stdout_logfile_maxbytes=0 stderr_logfile=/dev/stderr diff --git a/tools/ansible/roles/image_build/defaults/main.yml b/tools/ansible/roles/image_build/defaults/main.yml index 0d45e047d8..076a4f47b3 100644 --- a/tools/ansible/roles/image_build/defaults/main.yml +++ b/tools/ansible/roles/image_build/defaults/main.yml @@ -1,5 +1,5 @@ --- -create_preload_data: true +awx_image: quay.io/ansible/awx # Helper vars to construct the proper download URL for the current architecture tini_architecture: '{{ { "x86_64": "amd64", "aarch64": "arm64", "armv7": "arm" }[ansible_facts.architecture] }}' diff --git a/tools/ansible/roles/image_build/tasks/main.yml b/tools/ansible/roles/image_build/tasks/main.yml index ae6e30b7a3..13c5c48a3d 100644 --- a/tools/ansible/roles/image_build/tasks/main.yml +++ b/tools/ansible/roles/image_build/tasks/main.yml @@ -17,10 +17,6 @@ dest: "../awx/ui_next/public/static/media/" when: awx_official|default(false)|bool -- name: Set awx image name - set_fact: - awx_image: "{{ awx_image|default('awx') }}" - # Calling Docker directly because docker-py doesnt support BuildKit - name: Build AWX image command: docker build -t {{ awx_image }}:{{ awx_version }} -f ../../{{ dockerfile_name }} ../.. diff --git a/tools/docker-compose-cluster.yml b/tools/docker-compose-cluster.yml index 8532b6e942..6065069125 100644 --- a/tools/docker-compose-cluster.yml +++ b/tools/docker-compose-cluster.yml @@ -14,6 +14,7 @@ services: - "8013:8013" - "8043:8043" - "1936:1936" + awx-1: user: ${CURRENT_UID} container_name: tools_awx_1_1 @@ -31,8 +32,11 @@ services: - "../:/awx_devel" - "./redis/redis_socket_ha_1:/var/run/redis/" - "./docker-compose/supervisor.conf:/etc/supervisord.conf" + - "./docker-compose-cluster/awx-1-receptor.conf:/etc/receptor/receptor.conf" ports: + - "2222:2222" - "5899-5999:5899-5999" + awx-2: user: ${CURRENT_UID} container_name: tools_awx_2_1 @@ -50,8 +54,11 @@ services: - "../:/awx_devel" - "./redis/redis_socket_ha_2:/var/run/redis/" - "./docker-compose/supervisor.conf:/etc/supervisord.conf" + - "./docker-compose-cluster/awx-2-receptor.conf:/etc/receptor/receptor.conf" ports: + - "2223:2222" - "7899-7999:7899-7999" + awx-3: user: ${CURRENT_UID} container_name: tools_awx_3_1 @@ -69,8 +76,11 @@ services: - "../:/awx_devel" - "./redis/redis_socket_ha_3:/var/run/redis/" - "./docker-compose/supervisor.conf:/etc/supervisord.conf" + - "./docker-compose-cluster/awx-3-receptor.conf:/etc/receptor/receptor.conf" ports: + - "2224:2222" - "8899-8999:8899-8999" + redis_1: user: ${CURRENT_UID} image: redis:latest @@ -79,6 +89,7 @@ services: volumes: - "./redis/redis.conf:/usr/local/etc/redis/redis.conf" - "./redis/redis_socket_ha_1:/var/run/redis/" + redis_2: user: ${CURRENT_UID} image: redis:latest @@ -95,6 +106,14 @@ services: volumes: - "./redis/redis.conf:/usr/local/etc/redis/redis.conf" - "./redis/redis_socket_ha_3:/var/run/redis/" + postgres: image: postgres:12 container_name: tools_postgres_1 + environment: + POSTGRES_HOST_AUTH_METHOD: trust + volumes: + - "awx_db:/var/lib/postgresql/data" + +volumes: + awx_db: diff --git a/tools/docker-compose-cluster/awx-1-receptor.conf b/tools/docker-compose-cluster/awx-1-receptor.conf new file mode 100644 index 0000000000..dcaca8263f --- /dev/null +++ b/tools/docker-compose-cluster/awx-1-receptor.conf @@ -0,0 +1,23 @@ +--- +- log-level: debug + +- control-service: + service: control + filename: /var/run/receptor/receptor.sock + +- tcp-listener: + port: 2200 + +- tcp-peer: + address: awx-2:2200 + +- work-command: + worktype: worker + command: ansible-runner + params: worker + +- work-kubernetes: + worktype: ocp + namespace: receptor + image: quay.io/shanemcd/ee + authmethod: kubeconfig diff --git a/tools/docker-compose-cluster/awx-2-receptor.conf b/tools/docker-compose-cluster/awx-2-receptor.conf new file mode 100644 index 0000000000..bf9d4889a0 --- /dev/null +++ b/tools/docker-compose-cluster/awx-2-receptor.conf @@ -0,0 +1,23 @@ +--- +- log-level: debug + +- control-service: + service: control + filename: /var/run/receptor/receptor.sock + +- tcp-listener: + port: 2200 + +- tcp-peer: + address: awx-3:2200 + +- work-command: + worktype: worker + command: ansible-runner + params: worker + +- work-kubernetes: + worktype: ocp + namespace: receptor + image: quay.io/shanemcd/ee + authmethod: kubeconfig diff --git a/tools/docker-compose-cluster/awx-3-receptor.conf b/tools/docker-compose-cluster/awx-3-receptor.conf new file mode 100644 index 0000000000..ac5db0d284 --- /dev/null +++ b/tools/docker-compose-cluster/awx-3-receptor.conf @@ -0,0 +1,23 @@ +--- +- log-level: debug + +- control-service: + service: control + filename: /var/run/receptor/receptor.sock + +- tcp-listener: + port: 2200 + +- tcp-peer: + address: awx-1:2200 + +- work-command: + worktype: worker + command: ansible-runner + params: worker + +- work-kubernetes: + worktype: ocp + namespace: receptor + image: quay.io/shanemcd/ee + authmethod: kubeconfig diff --git a/tools/docker-compose/ansible/roles/sources/templates/docker-compose.yml.j2 b/tools/docker-compose/ansible/roles/sources/templates/docker-compose.yml.j2 index 72181cfb0b..1d0799c7b9 100644 --- a/tools/docker-compose/ansible/roles/sources/templates/docker-compose.yml.j2 +++ b/tools/docker-compose/ansible/roles/sources/templates/docker-compose.yml.j2 @@ -13,7 +13,9 @@ services: SDB_HOST: 0.0.0.0 SDB_PORT: 7899 AWX_GROUP_QUEUES: tower + RECEPTORCTL_SOCKET: /var/run/receptor/receptor.sock ports: + - "2222:2222" - "8888:8888" - "8080:8080" - "8013:8013" @@ -31,7 +33,11 @@ services: - "../../docker-compose/_sources/websocket_secret.py:/etc/tower/conf.d/websocket_secret.py" - "../../docker-compose/_sources/local_settings.py:/etc/tower/conf.d/local_settings.py" - "../../docker-compose/_sources/SECRET_KEY:/etc/tower/SECRET_KEY" + - "../../docker-compose/receptor.conf:/etc/receptor/receptor.conf" - "redis_socket:/var/run/redis/:rw" + - "receptor:/var/run/receptor/" + - "/sys/fs/cgroup:/sys/fs/cgroup" + - "~/.kube/config:/var/lib/awx/.kube/config" privileged: true tty: true # A useful container that simply passes through log messages to the console @@ -40,8 +46,6 @@ services: # build: # context: ./docker-compose # dockerfile: Dockerfile-logstash - - # Postgres Database Container postgres: image: postgres:12 container_name: tools_postgres_1 @@ -64,3 +68,4 @@ services: volumes: awx_db: redis_socket: + receptor: diff --git a/tools/docker-compose/bootstrap_development.sh b/tools/docker-compose/bootstrap_development.sh index 1d3e399bf7..88a7e62941 100755 --- a/tools/docker-compose/bootstrap_development.sh +++ b/tools/docker-compose/bootstrap_development.sh @@ -1,14 +1,6 @@ #!/bin/bash set +x -# Wait for the databases to come up -ansible -i "127.0.0.1," -c local -v -m wait_for -a "host=postgres port=5432" all -ansible -i "127.0.0.1," -c local -v -m wait_for -a "path=/var/run/redis/redis.sock" all - -# In case AWX in the container wants to connect to itself, use "docker exec" to attach to the container otherwise -# TODO: FIX -#/etc/init.d/ssh start - # Move to the source directory so we can bootstrap if [ -f "/awx_devel/manage.py" ]; then cd /awx_devel diff --git a/tools/docker-compose/entrypoint.sh b/tools/docker-compose/entrypoint.sh index 8ed9bf2abd..006435000a 100755 --- a/tools/docker-compose/entrypoint.sh +++ b/tools/docker-compose/entrypoint.sh @@ -2,13 +2,33 @@ if [ `id -u` -ge 500 ] || [ -z "${CURRENT_UID}" ]; then - cat << EOF > /tmp/passwd +cat << EOF > /etc/passwd root:x:0:0:root:/root:/bin/bash -awx:x:`id -u`:`id -g`:,,,:/tmp:/bin/bash +awx:x:`id -u`:`id -g`:,,,:/var/lib/awx:/bin/bash EOF - cat /tmp/passwd > /etc/passwd - rm /tmp/passwd +cat <> /etc/group +awx:x:`id -u`:awx +EOF + +cat < /etc/subuid +awx:100000:50001 +EOF + +cat < /etc/subgid +awx:100000:50001 +EOF + +fi + +# Required to get rootless podman working after +# writing out the sub*id files above +podman system migrate + +if [[ "$OS" == *"Docker Desktop"* ]]; then + export SDB_NOTIFY_HOST='docker.for.mac.host.internal' +else + export SDB_NOTIFY_HOST=$(ip route | head -n1 | awk '{print $3}') fi exec $@ diff --git a/tools/docker-compose/receptor.conf b/tools/docker-compose/receptor.conf new file mode 100644 index 0000000000..d5ac25cf2d --- /dev/null +++ b/tools/docker-compose/receptor.conf @@ -0,0 +1,28 @@ +--- +- log-level: debug + +- control-service: + service: control + filename: /var/run/receptor/receptor.sock + +- local-only: + +- work-command: + worktype: local + command: ansible-runner + params: worker + allowruntimeparams: true + +- work-kubernetes: + worktype: kubernetes-runtime-auth + authmethod: runtime + allowruntimeauth: true + allowruntimepod: true + allowruntimeparams: true + +- work-kubernetes: + worktype: kubernetes-incluster-auth + authmethod: incluster + allowruntimeauth: true + allowruntimepod: true + allowruntimeparams: true diff --git a/tools/docker-compose/supervisor.conf b/tools/docker-compose/supervisor.conf index 82e8962a9b..fc2eb2d028 100644 --- a/tools/docker-compose/supervisor.conf +++ b/tools/docker-compose/supervisor.conf @@ -83,6 +83,17 @@ redirect_stderr=true stdout_logfile=/dev/fd/1 stdout_logfile_maxbytes=0 +[program:awx-receptor] +command = receptor --config /etc/receptor/receptor.conf +autostart = true +autorestart = true +stopsignal = KILL +stopasgroup = true +killasgroup = true +redirect_stderr=true +stdout_logfile=/dev/fd/1 +stdout_logfile_maxbytes=0 + [group:tower-processes] programs=awx-dispatcher,awx-receiver,awx-uwsgi,awx-daphne,awx-nginx,awx-wsbroadcast,awx-rsyslogd priority=5