mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 03:24:50 -03:30
Compare commits
45 Commits
21.13.0
...
constructe
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b15c54f462 | ||
|
|
3335afcd3a | ||
|
|
1f2a5cf7e4 | ||
|
|
7f2933c43c | ||
|
|
d55af032f7 | ||
|
|
3ff65db2e6 | ||
|
|
e25c767a47 | ||
|
|
0866bfc549 | ||
|
|
2d9f2d36a1 | ||
|
|
c60ba5cec9 | ||
|
|
c98f86a355 | ||
|
|
3f0d28dd7f | ||
|
|
49e5d76062 | ||
|
|
ecd788312e | ||
|
|
e1e27a028c | ||
|
|
0961ca06c9 | ||
|
|
a3d7c02802 | ||
|
|
280ceae267 | ||
|
|
ea719e053e | ||
|
|
f275c2a9c5 | ||
|
|
3242dbcbe6 | ||
|
|
341f8e385c | ||
|
|
659853dcea | ||
|
|
80c15e286f | ||
|
|
c22d8f1d7e | ||
|
|
27a97017dd | ||
|
|
c72dca3ea5 | ||
|
|
ddb3cde872 | ||
|
|
c2ec8396cd | ||
|
|
de115ed1c8 | ||
|
|
87918bd275 | ||
|
|
7598e117d4 | ||
|
|
700055801a | ||
|
|
1c6a48ffb6 | ||
|
|
fab83715e9 | ||
|
|
0ebe57cbf4 | ||
|
|
d4840b240b | ||
|
|
8538d37702 | ||
|
|
5550086b3b | ||
|
|
980bfc4b6f | ||
|
|
6351e8bbc9 | ||
|
|
325e566a3d | ||
|
|
d7f87ed27c | ||
|
|
a5baee1b3a | ||
|
|
dd8c9f87a9 |
1
.github/workflows/promote.yml
vendored
1
.github/workflows/promote.yml
vendored
@@ -10,7 +10,6 @@ on:
|
||||
|
||||
jobs:
|
||||
promote:
|
||||
if: endsWith(github.repository, '/awx')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout awx
|
||||
|
||||
1
.github/workflows/stage.yml
vendored
1
.github/workflows/stage.yml
vendored
@@ -21,7 +21,6 @@ on:
|
||||
|
||||
jobs:
|
||||
stage:
|
||||
if: endsWith(github.repository, '/awx')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
31
Makefile
31
Makefile
@@ -1,5 +1,4 @@
|
||||
PYTHON ?= python3.9
|
||||
DOCKER_COMPOSE ?= docker-compose
|
||||
OFFICIAL ?= no
|
||||
NODE ?= node
|
||||
NPM_BIN ?= npm
|
||||
@@ -204,7 +203,19 @@ uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
uwsgi /etc/tower/uwsgi.ini
|
||||
uwsgi -b 32768 \
|
||||
--socket 127.0.0.1:8050 \
|
||||
--module=awx.wsgi:application \
|
||||
--home=/var/lib/awx/venv/awx \
|
||||
--chdir=/awx_devel/ \
|
||||
--vacuum \
|
||||
--processes=5 \
|
||||
--harakiri=120 --master \
|
||||
--no-orphans \
|
||||
--max-requests=1000 \
|
||||
--stats /tmp/stats.socket \
|
||||
--lazy-apps \
|
||||
--logformat "%(addr) %(method) %(uri) - %(proto) %(status)"
|
||||
|
||||
awx-autoreload:
|
||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
|
||||
@@ -498,20 +509,20 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||
|
||||
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||
|
||||
docker-compose-test: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash
|
||||
|
||||
docker-compose-runtest: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
|
||||
|
||||
docker-compose-build-swagger: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
||||
|
||||
SCHEMA_DIFF_BASE_BRANCH ?= devel
|
||||
detect-schema-change: genschema
|
||||
@@ -520,7 +531,7 @@ detect-schema-change: genschema
|
||||
diff -u -b reference-schema.json schema.json
|
||||
|
||||
docker-compose-clean: awx/projects
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||
|
||||
docker-compose-container-group-clean:
|
||||
@if [ -f "tools/docker-compose-minikube/_sources/minikube" ]; then \
|
||||
@@ -548,10 +559,10 @@ docker-refresh: docker-clean docker-compose
|
||||
|
||||
## Docker Development Environment with Elastic Stack Connected
|
||||
docker-compose-elk: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
docker-compose-cluster-elk: awx/projects docker-compose-sources
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
docker-compose-container-group:
|
||||
MINIKUBE_CONTAINER_GROUP=true make docker-compose
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
@@ -8,7 +9,6 @@ from rest_framework import serializers
|
||||
from awx.conf import fields, register, register_validate
|
||||
from awx.api.fields import OAuth2ProviderField
|
||||
from oauth2_provider.settings import oauth2_settings
|
||||
from awx.sso.common import is_remote_auth_enabled
|
||||
|
||||
|
||||
register(
|
||||
@@ -108,8 +108,19 @@ register(
|
||||
|
||||
|
||||
def authentication_validate(serializer, attrs):
|
||||
if attrs.get('DISABLE_LOCAL_AUTH', False) and not is_remote_auth_enabled():
|
||||
raise serializers.ValidationError(_("There are no remote authentication systems configured."))
|
||||
remote_auth_settings = [
|
||||
'AUTH_LDAP_SERVER_URI',
|
||||
'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_ORG_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_TEAM_KEY',
|
||||
'SOCIAL_AUTH_SAML_ENABLED_IDPS',
|
||||
'RADIUS_SERVER',
|
||||
'TACACSPLUS_HOST',
|
||||
]
|
||||
if attrs.get('DISABLE_LOCAL_AUTH', False):
|
||||
if not any(getattr(settings, s, None) for s in remote_auth_settings):
|
||||
raise serializers.ValidationError(_("There are no remote authentication systems configured."))
|
||||
return attrs
|
||||
|
||||
|
||||
|
||||
@@ -155,7 +155,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
'search',
|
||||
)
|
||||
|
||||
# A list of fields that we know can be filtered on without the possibility
|
||||
# A list of fields that we know can be filtered on without the possiblity
|
||||
# of introducing duplicates
|
||||
NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField, TextField)
|
||||
|
||||
@@ -268,7 +268,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
continue
|
||||
|
||||
# HACK: make `created` available via API for the Django User ORM model
|
||||
# so it keep compatibility with other objects which exposes the `created` attr.
|
||||
# so it keep compatiblity with other objects which exposes the `created` attr.
|
||||
if queryset.model._meta.object_name == 'User' and key.startswith('created'):
|
||||
key = key.replace('created', 'date_joined')
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ from rest_framework import generics
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework import views
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.renderers import StaticHTMLRenderer
|
||||
from rest_framework.negotiation import DefaultContentNegotiation
|
||||
|
||||
@@ -674,7 +674,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
location = None
|
||||
created = True
|
||||
|
||||
# Retrieve the sub object (whether created or by ID).
|
||||
# Retrive the sub object (whether created or by ID).
|
||||
sub = get_object_or_400(self.model, pk=sub_id)
|
||||
|
||||
# Verify we have permission to attach.
|
||||
@@ -822,7 +822,7 @@ def trigger_delayed_deep_copy(*args, **kwargs):
|
||||
|
||||
class CopyAPIView(GenericAPIView):
|
||||
serializer_class = CopySerializer
|
||||
permission_classes = (IsAuthenticated,)
|
||||
permission_classes = (AllowAny,)
|
||||
copy_return_serializer_class = None
|
||||
new_in_330 = True
|
||||
new_in_api_v2 = True
|
||||
|
||||
@@ -60,7 +60,7 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
delattr(renderer_context['view'], '_request')
|
||||
|
||||
def get_raw_data_form(self, data, view, method, request):
|
||||
# Set a flag on the view to indicate to the view/serializer that we're
|
||||
# Set a flag on the view to indiciate to the view/serializer that we're
|
||||
# creating a raw data form for the browsable API. Store the original
|
||||
# request method to determine how to populate the raw data form.
|
||||
if request.method in {'OPTIONS', 'DELETE'}:
|
||||
|
||||
@@ -108,6 +108,7 @@ from awx.main.utils import (
|
||||
extract_ansible_vars,
|
||||
encrypt_dict,
|
||||
prefetch_page_capabilities,
|
||||
get_external_account,
|
||||
truncate_stdout,
|
||||
)
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
@@ -123,8 +124,6 @@ from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, Ver
|
||||
# AWX Utils
|
||||
from awx.api.validators import HostnameRegexValidator
|
||||
|
||||
from awx.sso.common import get_external_account
|
||||
|
||||
logger = logging.getLogger('awx.api.serializers')
|
||||
|
||||
# Fields that should be summarized regardless of object type.
|
||||
@@ -537,7 +536,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
||||
#
|
||||
# This logic is to force rendering choice's on an uneditable field.
|
||||
# Note: Consider expanding this rendering for more than just choices fields
|
||||
# Note: This logic works in conjunction with
|
||||
# Note: This logic works in conjuction with
|
||||
if hasattr(model_field, 'choices') and model_field.choices:
|
||||
was_editable = model_field.editable
|
||||
model_field.editable = True
|
||||
@@ -988,8 +987,23 @@ class UserSerializer(BaseSerializer):
|
||||
def _update_password(self, obj, new_password):
|
||||
# For now we're not raising an error, just not saving password for
|
||||
# users managed by LDAP who already have an unusable password set.
|
||||
# Get external password will return something like ldap or enterprise or None if the user isn't external. We only want to allow a password update for a None option
|
||||
if new_password and not self.get_external_account(obj):
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
|
||||
try:
|
||||
if obj.pk and obj.profile.ldap_dn and not obj.has_usable_password():
|
||||
new_password = None
|
||||
except AttributeError:
|
||||
pass
|
||||
if (
|
||||
getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)
|
||||
) and obj.social_auth.all():
|
||||
new_password = None
|
||||
if (getattr(settings, 'RADIUS_SERVER', None) or getattr(settings, 'TACACSPLUS_HOST', None)) and obj.enterprise_auth.all():
|
||||
new_password = None
|
||||
if new_password:
|
||||
obj.set_password(new_password)
|
||||
obj.save(update_fields=['password'])
|
||||
|
||||
@@ -1666,13 +1680,8 @@ class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables):
|
||||
res.update(
|
||||
dict(
|
||||
hosts=self.reverse('api:inventory_hosts_list', kwargs={'pk': obj.pk}),
|
||||
groups=self.reverse('api:inventory_groups_list', kwargs={'pk': obj.pk}),
|
||||
root_groups=self.reverse('api:inventory_root_groups_list', kwargs={'pk': obj.pk}),
|
||||
variable_data=self.reverse('api:inventory_variable_data', kwargs={'pk': obj.pk}),
|
||||
script=self.reverse('api:inventory_script_view', kwargs={'pk': obj.pk}),
|
||||
tree=self.reverse('api:inventory_tree_view', kwargs={'pk': obj.pk}),
|
||||
inventory_sources=self.reverse('api:inventory_inventory_sources_list', kwargs={'pk': obj.pk}),
|
||||
update_inventory_sources=self.reverse('api:inventory_inventory_sources_update', kwargs={'pk': obj.pk}),
|
||||
activity_stream=self.reverse('api:inventory_activity_stream_list', kwargs={'pk': obj.pk}),
|
||||
job_templates=self.reverse('api:inventory_job_template_list', kwargs={'pk': obj.pk}),
|
||||
ad_hoc_commands=self.reverse('api:inventory_ad_hoc_commands_list', kwargs={'pk': obj.pk}),
|
||||
@@ -1683,8 +1692,18 @@ class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables):
|
||||
labels=self.reverse('api:inventory_label_list', kwargs={'pk': obj.pk}),
|
||||
)
|
||||
)
|
||||
if obj.kind in ('', 'constructed'):
|
||||
# links not relevant for the "old" smart inventory
|
||||
res['groups'] = self.reverse('api:inventory_groups_list', kwargs={'pk': obj.pk})
|
||||
res['root_groups'] = self.reverse('api:inventory_root_groups_list', kwargs={'pk': obj.pk})
|
||||
res['update_inventory_sources'] = self.reverse('api:inventory_inventory_sources_update', kwargs={'pk': obj.pk})
|
||||
res['inventory_sources'] = self.reverse('api:inventory_inventory_sources_list', kwargs={'pk': obj.pk})
|
||||
res['tree'] = self.reverse('api:inventory_tree_view', kwargs={'pk': obj.pk})
|
||||
if obj.organization:
|
||||
res['organization'] = self.reverse('api:organization_detail', kwargs={'pk': obj.organization.pk})
|
||||
if obj.kind == 'constructed':
|
||||
res['input_inventories'] = self.reverse('api:inventory_input_inventories', kwargs={'pk': obj.pk})
|
||||
res['constructed_url'] = self.reverse('api:constructed_inventory_detail', kwargs={'pk': obj.pk})
|
||||
return res
|
||||
|
||||
def to_representation(self, obj):
|
||||
@@ -1726,6 +1745,91 @@ class InventorySerializer(LabelsListMixin, BaseSerializerWithVariables):
|
||||
return super(InventorySerializer, self).validate(attrs)
|
||||
|
||||
|
||||
class ConstructedFieldMixin(serializers.Field):
|
||||
def get_attribute(self, instance):
|
||||
if not hasattr(instance, '_constructed_inv_src'):
|
||||
instance._constructed_inv_src = instance.inventory_sources.first()
|
||||
inv_src = instance._constructed_inv_src
|
||||
return super().get_attribute(inv_src) # yoink
|
||||
|
||||
|
||||
class ConstructedCharField(ConstructedFieldMixin, serializers.CharField):
|
||||
pass
|
||||
|
||||
|
||||
class ConstructedIntegerField(ConstructedFieldMixin, serializers.IntegerField):
|
||||
pass
|
||||
|
||||
|
||||
class ConstructedInventorySerializer(InventorySerializer):
|
||||
source_vars = ConstructedCharField(
|
||||
required=False,
|
||||
default=None,
|
||||
allow_blank=True,
|
||||
help_text=_('The source_vars for the related auto-created inventory source, special to constructed inventory.'),
|
||||
)
|
||||
update_cache_timeout = ConstructedIntegerField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
min_value=0,
|
||||
default=None,
|
||||
help_text=_('The cache timeout for the related auto-created inventory source, special to constructed inventory'),
|
||||
)
|
||||
limit = ConstructedCharField(
|
||||
required=False,
|
||||
default=None,
|
||||
allow_blank=True,
|
||||
help_text=_('The limit to restrict the returned hosts for the related auto-created inventory source, special to constructed inventory.'),
|
||||
)
|
||||
verbosity = ConstructedIntegerField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
min_value=0,
|
||||
max_value=2,
|
||||
default=None,
|
||||
help_text=_('The verbosity level for the related auto-created inventory source, special to constructed inventory'),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Inventory
|
||||
fields = ('*', '-host_filter', 'source_vars', 'update_cache_timeout', 'limit', 'verbosity')
|
||||
read_only_fields = ('*', 'kind')
|
||||
|
||||
def pop_inv_src_data(self, data):
|
||||
inv_src_data = {}
|
||||
for field in ('source_vars', 'update_cache_timeout', 'limit', 'verbosity'):
|
||||
if field in data:
|
||||
# values always need to be removed, as they are not valid for Inventory model
|
||||
value = data.pop(field)
|
||||
# null is not valid for any of those fields, taken as not-provided
|
||||
if value is not None:
|
||||
inv_src_data[field] = value
|
||||
return inv_src_data
|
||||
|
||||
def apply_inv_src_data(self, inventory, inv_src_data):
|
||||
if inv_src_data:
|
||||
update_fields = []
|
||||
inv_src = inventory.inventory_sources.first()
|
||||
for field, value in inv_src_data.items():
|
||||
setattr(inv_src, field, value)
|
||||
update_fields.append(field)
|
||||
if update_fields:
|
||||
inv_src.save(update_fields=update_fields)
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data['kind'] = 'constructed'
|
||||
inv_src_data = self.pop_inv_src_data(validated_data)
|
||||
inventory = super().create(validated_data)
|
||||
self.apply_inv_src_data(inventory, inv_src_data)
|
||||
return inventory
|
||||
|
||||
def update(self, obj, validated_data):
|
||||
inv_src_data = self.pop_inv_src_data(validated_data)
|
||||
obj = super().update(obj, validated_data)
|
||||
self.apply_inv_src_data(obj, inv_src_data)
|
||||
return obj
|
||||
|
||||
|
||||
class InventoryScriptSerializer(InventorySerializer):
|
||||
class Meta:
|
||||
fields = ()
|
||||
@@ -1779,6 +1883,8 @@ class HostSerializer(BaseSerializerWithVariables):
|
||||
ansible_facts=self.reverse('api:host_ansible_facts_detail', kwargs={'pk': obj.pk}),
|
||||
)
|
||||
)
|
||||
if obj.inventory.kind == 'constructed':
|
||||
res['original_host'] = self.reverse('api:host_detail', kwargs={'pk': obj.instance_id})
|
||||
if obj.inventory:
|
||||
res['inventory'] = self.reverse('api:inventory_detail', kwargs={'pk': obj.inventory.pk})
|
||||
if obj.last_job:
|
||||
@@ -1835,8 +1941,8 @@ class HostSerializer(BaseSerializerWithVariables):
|
||||
return value
|
||||
|
||||
def validate_inventory(self, value):
|
||||
if value.kind == 'smart':
|
||||
raise serializers.ValidationError({"detail": _("Cannot create Host for Smart Inventory")})
|
||||
if value.kind in ('constructed', 'smart'):
|
||||
raise serializers.ValidationError({"detail": _("Cannot create Host for Smart or Constructed Inventories")})
|
||||
return value
|
||||
|
||||
def validate_variables(self, value):
|
||||
@@ -1934,8 +2040,8 @@ class GroupSerializer(BaseSerializerWithVariables):
|
||||
return value
|
||||
|
||||
def validate_inventory(self, value):
|
||||
if value.kind == 'smart':
|
||||
raise serializers.ValidationError({"detail": _("Cannot create Group for Smart Inventory")})
|
||||
if value.kind in ('constructed', 'smart'):
|
||||
raise serializers.ValidationError({"detail": _("Cannot create Group for Smart or Constructed Inventories")})
|
||||
return value
|
||||
|
||||
def to_representation(self, obj):
|
||||
@@ -2009,6 +2115,7 @@ class InventorySourceOptionsSerializer(BaseSerializer):
|
||||
'custom_virtualenv',
|
||||
'timeout',
|
||||
'verbosity',
|
||||
'limit',
|
||||
)
|
||||
read_only_fields = ('*', 'custom_virtualenv')
|
||||
|
||||
@@ -2115,8 +2222,8 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
|
||||
return value
|
||||
|
||||
def validate_inventory(self, value):
|
||||
if value and value.kind == 'smart':
|
||||
raise serializers.ValidationError({"detail": _("Cannot create Inventory Source for Smart Inventory")})
|
||||
if value and value.kind in ('constructed', 'smart'):
|
||||
raise serializers.ValidationError({"detail": _("Cannot create Inventory Source for Smart or Constructed Inventories")})
|
||||
return value
|
||||
|
||||
# TODO: remove when old 'credential' fields are removed
|
||||
@@ -2137,6 +2244,8 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
|
||||
obj = super(InventorySourceSerializer, self).update(obj, validated_data)
|
||||
if deprecated_fields:
|
||||
self._update_deprecated_fields(deprecated_fields, obj)
|
||||
if obj.source == 'constructed':
|
||||
raise serializers.ValidationError({'error': _("Cannot edit source of type constructed.")})
|
||||
return obj
|
||||
|
||||
# TODO: remove when old 'credential' fields are removed
|
||||
@@ -2163,6 +2272,8 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
|
||||
if get_field_from_model_or_attrs('source') == 'scm':
|
||||
if ('source' in attrs or 'source_project' in attrs) and get_field_from_model_or_attrs('source_project') is None:
|
||||
raise serializers.ValidationError({"source_project": _("Project required for scm type sources.")})
|
||||
elif get_field_from_model_or_attrs('source') == 'constructed':
|
||||
raise serializers.ValidationError({"Error": _('constructed not a valid source for inventory')})
|
||||
else:
|
||||
redundant_scm_fields = list(filter(lambda x: attrs.get(x, None), ['source_project', 'source_path']))
|
||||
if redundant_scm_fields:
|
||||
@@ -3983,7 +4094,7 @@ class JobEventSerializer(BaseSerializer):
|
||||
# Show full stdout for playbook_on_* events.
|
||||
if obj and obj.event.startswith('playbook_on'):
|
||||
return data
|
||||
# If the view logic says to not truncate (request was to the detail view or a param was used)
|
||||
# If the view logic says to not trunctate (request was to the detail view or a param was used)
|
||||
if self.context.get('no_truncate', False):
|
||||
return data
|
||||
max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY
|
||||
@@ -4014,7 +4125,7 @@ class ProjectUpdateEventSerializer(JobEventSerializer):
|
||||
# raw SCM URLs in their stdout (which *could* contain passwords)
|
||||
# attempt to detect and filter HTTP basic auth passwords in the stdout
|
||||
# of these types of events
|
||||
if obj.event_data.get('task_action') in ('git', 'svn', 'ansible.builtin.git', 'ansible.builtin.svn'):
|
||||
if obj.event_data.get('task_action') in ('git', 'svn'):
|
||||
try:
|
||||
return json.loads(UriCleaner.remove_sensitive(json.dumps(obj.event_data)))
|
||||
except Exception:
|
||||
@@ -4058,7 +4169,7 @@ class AdHocCommandEventSerializer(BaseSerializer):
|
||||
|
||||
def to_representation(self, obj):
|
||||
data = super(AdHocCommandEventSerializer, self).to_representation(obj)
|
||||
# If the view logic says to not truncate (request was to the detail view or a param was used)
|
||||
# If the view logic says to not trunctate (request was to the detail view or a param was used)
|
||||
if self.context.get('no_truncate', False):
|
||||
return data
|
||||
max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY
|
||||
@@ -4751,7 +4862,7 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
||||
),
|
||||
)
|
||||
until = serializers.SerializerMethodField(
|
||||
help_text=_('The date this schedule will end. This field is computed from the RRULE. If the schedule does not end an empty string will be returned'),
|
||||
help_text=_('The date this schedule will end. This field is computed from the RRULE. If the schedule does not end an emptry string will be returned'),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -3,7 +3,7 @@ Make a GET request to this resource to retrieve aggregate statistics about inven
|
||||
Including fetching the number of total hosts tracked by Tower over an amount of time and the current success or
|
||||
failed status of hosts which have run jobs within an Inventory.
|
||||
|
||||
## Parameters and Filtering
|
||||
## Parmeters and Filtering
|
||||
|
||||
The `period` of the data can be adjusted with:
|
||||
|
||||
@@ -24,7 +24,7 @@ Data about the number of hosts will be returned in the following format:
|
||||
Each element contains an epoch timestamp represented in seconds and a numerical value indicating
|
||||
the number of hosts that exist at a given moment
|
||||
|
||||
Data about failed and successful hosts by inventory will be given as:
|
||||
Data about failed and successfull hosts by inventory will be given as:
|
||||
|
||||
{
|
||||
"sources": [
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Make a GET request to this resource to retrieve aggregate statistics about job runs suitable for graphing.
|
||||
|
||||
## Parameters and Filtering
|
||||
## Parmeters and Filtering
|
||||
|
||||
The `period` of the data can be adjusted with:
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ inventory sources:
|
||||
* `inventory_update`: ID of the inventory update job that was started.
|
||||
(integer, read-only)
|
||||
* `project_update`: ID of the project update job that was started if this inventory source is an SCM source.
|
||||
(integer, read-only, optional)
|
||||
(interger, read-only, optional)
|
||||
|
||||
Note: All manual inventory sources (source="") will be ignored by the update_inventory_sources endpoint. This endpoint will not update inventory sources for Smart Inventories.
|
||||
|
||||
|
||||
@@ -6,7 +6,10 @@ from django.urls import re_path
|
||||
from awx.api.views.inventory import (
|
||||
InventoryList,
|
||||
InventoryDetail,
|
||||
ConstructedInventoryDetail,
|
||||
ConstructedInventoryList,
|
||||
InventoryActivityStreamList,
|
||||
InventoryInputInventoriesList,
|
||||
InventoryJobTemplateList,
|
||||
InventoryAccessList,
|
||||
InventoryObjectRolesList,
|
||||
@@ -37,6 +40,7 @@ urls = [
|
||||
re_path(r'^(?P<pk>[0-9]+)/script/$', InventoryScriptView.as_view(), name='inventory_script_view'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/tree/$', InventoryTreeView.as_view(), name='inventory_tree_view'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/inventory_sources/$', InventoryInventorySourcesList.as_view(), name='inventory_inventory_sources_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/input_inventories/$', InventoryInputInventoriesList.as_view(), name='inventory_input_inventories'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/update_inventory_sources/$', InventoryInventorySourcesUpdate.as_view(), name='inventory_inventory_sources_update'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/activity_stream/$', InventoryActivityStreamList.as_view(), name='inventory_activity_stream_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/job_templates/$', InventoryJobTemplateList.as_view(), name='inventory_job_template_list'),
|
||||
@@ -48,4 +52,10 @@ urls = [
|
||||
re_path(r'^(?P<pk>[0-9]+)/copy/$', InventoryCopy.as_view(), name='inventory_copy'),
|
||||
]
|
||||
|
||||
__all__ = ['urls']
|
||||
# Constructed inventory special views
|
||||
constructed_inventory_urls = [
|
||||
re_path(r'^$', ConstructedInventoryList.as_view(), name='constructed_inventory_list'),
|
||||
re_path(r'^(?P<pk>[0-9]+)/$', ConstructedInventoryDetail.as_view(), name='constructed_inventory_detail'),
|
||||
]
|
||||
|
||||
__all__ = ['urls', 'constructed_inventory_urls']
|
||||
|
||||
@@ -39,7 +39,7 @@ from .organization import urls as organization_urls
|
||||
from .user import urls as user_urls
|
||||
from .project import urls as project_urls
|
||||
from .project_update import urls as project_update_urls
|
||||
from .inventory import urls as inventory_urls
|
||||
from .inventory import urls as inventory_urls, constructed_inventory_urls
|
||||
from .execution_environments import urls as execution_environment_urls
|
||||
from .team import urls as team_urls
|
||||
from .host import urls as host_urls
|
||||
@@ -110,6 +110,7 @@ v2_urls = [
|
||||
re_path(r'^project_updates/', include(project_update_urls)),
|
||||
re_path(r'^teams/', include(team_urls)),
|
||||
re_path(r'^inventories/', include(inventory_urls)),
|
||||
re_path(r'^constructed_inventories/', include(constructed_inventory_urls)),
|
||||
re_path(r'^hosts/', include(host_urls)),
|
||||
re_path(r'^groups/', include(group_urls)),
|
||||
re_path(r'^inventory_sources/', include(inventory_source_urls)),
|
||||
|
||||
@@ -152,7 +152,7 @@ def api_exception_handler(exc, context):
|
||||
if 'awx.named_url_rewritten' in req.environ and not str(getattr(exc, 'status_code', 0)).startswith('2'):
|
||||
# if the URL was rewritten, and it's not a 2xx level status code,
|
||||
# revert the request.path to its original value to avoid leaking
|
||||
# any context about the existence of resources
|
||||
# any context about the existance of resources
|
||||
req.path = req.environ['awx.named_url_rewritten']
|
||||
if exc.status_code == 403:
|
||||
exc = NotFound(detail=_('Not found.'))
|
||||
@@ -172,7 +172,7 @@ class DashboardView(APIView):
|
||||
user_inventory = get_user_queryset(request.user, models.Inventory)
|
||||
inventory_with_failed_hosts = user_inventory.filter(hosts_with_active_failures__gt=0)
|
||||
user_inventory_external = user_inventory.filter(has_inventory_sources=True)
|
||||
# if there are *zero* inventories, this aggregate query will be None, fall back to 0
|
||||
# if there are *zero* inventories, this aggregrate query will be None, fall back to 0
|
||||
failed_inventory = user_inventory.aggregate(Sum('inventory_sources_with_failures'))['inventory_sources_with_failures__sum'] or 0
|
||||
data['inventories'] = {
|
||||
'url': reverse('api:inventory_list', request=request),
|
||||
@@ -1559,6 +1559,8 @@ class HostDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
def delete(self, request, *args, **kwargs):
|
||||
if self.get_object().inventory.pending_deletion:
|
||||
return Response({"error": _("The inventory for this host is already being deleted.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
if self.get_object().inventory.kind == 'constructed':
|
||||
return Response({"error": _("Delete constructed inventory hosts from input inventory.")}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return super(HostDetail, self).delete(request, *args, **kwargs)
|
||||
|
||||
|
||||
@@ -1667,7 +1669,7 @@ class GroupList(ListCreateAPIView):
|
||||
|
||||
class EnforceParentRelationshipMixin(object):
|
||||
"""
|
||||
Useful when you have a self-referring ManyToManyRelationship.
|
||||
Useful when you have a self-refering ManyToManyRelationship.
|
||||
* Tower uses a shallow (2-deep only) url pattern. For example:
|
||||
|
||||
When an object hangs off of a parent object you would have the url of the
|
||||
@@ -2415,7 +2417,7 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
# if it's a multiselect or multiple choice, it must have coices listed
|
||||
# choices and defaults must come in as strings separated by /n characters.
|
||||
# choices and defualts must come in as strings seperated by /n characters.
|
||||
if qtype == 'multiselect' or qtype == 'multiplechoice':
|
||||
if 'choices' in survey_item:
|
||||
if isinstance(survey_item['choices'], str):
|
||||
@@ -3430,7 +3432,7 @@ class JobCreateSchedule(RetrieveAPIView):
|
||||
|
||||
config = obj.launch_config
|
||||
|
||||
# Make up a name for the schedule, guarantee that it is unique
|
||||
# Make up a name for the schedule, guarentee that it is unique
|
||||
name = 'Auto-generated schedule from job {}'.format(obj.id)
|
||||
existing_names = models.Schedule.objects.filter(name__startswith=name).values_list('name', flat=True)
|
||||
if name in existing_names:
|
||||
@@ -3621,7 +3623,7 @@ class JobJobEventsChildrenSummary(APIView):
|
||||
# key is counter of meta events (i.e. verbose), value is uuid of the assigned parent
|
||||
map_meta_counter_nested_uuid = {}
|
||||
|
||||
# collapsible tree view in the UI only makes sense for tree-like
|
||||
# collapsable tree view in the UI only makes sense for tree-like
|
||||
# hierarchy. If ansible is ran with a strategy like free or host_pinned, then
|
||||
# events can be out of sequential order, and no longer follow a tree structure
|
||||
# E1
|
||||
@@ -4288,7 +4290,7 @@ class WorkflowApprovalTemplateJobsList(SubListAPIView):
|
||||
parent_key = 'workflow_approval_template'
|
||||
|
||||
|
||||
class WorkflowApprovalList(ListAPIView):
|
||||
class WorkflowApprovalList(ListCreateAPIView):
|
||||
model = models.WorkflowApproval
|
||||
serializer_class = serializers.WorkflowApprovalListSerializer
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework import serializers
|
||||
|
||||
# AWX
|
||||
from awx.main.models import ActivityStream, Inventory, JobTemplate, Role, User, InstanceGroup, InventoryUpdateEvent, InventoryUpdate
|
||||
@@ -31,6 +32,7 @@ from awx.api.views.labels import LabelSubListCreateAttachDetachView
|
||||
|
||||
from awx.api.serializers import (
|
||||
InventorySerializer,
|
||||
ConstructedInventorySerializer,
|
||||
ActivityStreamSerializer,
|
||||
RoleSerializer,
|
||||
InstanceGroupSerializer,
|
||||
@@ -79,7 +81,9 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIVie
|
||||
|
||||
# Do not allow changes to an Inventory kind.
|
||||
if kind is not None and obj.kind != kind:
|
||||
return Response(dict(error=_('You cannot turn a regular inventory into a "smart" inventory.')), status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||
return Response(
|
||||
dict(error=_('You cannot turn a regular inventory into a "smart" or "constructed" inventory.')), status=status.HTTP_405_METHOD_NOT_ALLOWED
|
||||
)
|
||||
return super(InventoryDetail, self).update(request, *args, **kwargs)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
@@ -94,6 +98,29 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIVie
|
||||
return Response(dict(error=_("{0}".format(e))), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class ConstructedInventoryDetail(InventoryDetail):
|
||||
serializer_class = ConstructedInventorySerializer
|
||||
|
||||
|
||||
class ConstructedInventoryList(InventoryList):
|
||||
serializer_class = ConstructedInventorySerializer
|
||||
|
||||
def get_queryset(self):
|
||||
r = super().get_queryset()
|
||||
return r.filter(kind='constructed')
|
||||
|
||||
|
||||
class InventoryInputInventoriesList(SubListAttachDetachAPIView):
|
||||
model = Inventory
|
||||
serializer_class = InventorySerializer
|
||||
parent_model = Inventory
|
||||
relationship = 'input_inventories'
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
if sub.kind == 'constructed':
|
||||
raise serializers.ValidationError({'error': 'You cannot add a constructed inventory to another constructed inventory.'})
|
||||
|
||||
|
||||
class InventoryActivityStreamList(SubListAPIView):
|
||||
model = ActivityStream
|
||||
serializer_class = ActivityStreamSerializer
|
||||
|
||||
@@ -98,6 +98,7 @@ class ApiVersionRootView(APIView):
|
||||
data['tokens'] = reverse('api:o_auth2_token_list', request=request)
|
||||
data['metrics'] = reverse('api:metrics_view', request=request)
|
||||
data['inventory'] = reverse('api:inventory_list', request=request)
|
||||
data['constructed_inventory'] = reverse('api:constructed_inventory_list', request=request)
|
||||
data['inventory_sources'] = reverse('api:inventory_source_list', request=request)
|
||||
data['inventory_updates'] = reverse('api:inventory_update_list', request=request)
|
||||
data['groups'] = reverse('api:group_list', request=request)
|
||||
|
||||
@@ -21,7 +21,7 @@ logger = logging.getLogger('awx.conf.fields')
|
||||
# Use DRF fields to convert/validate settings:
|
||||
# - to_representation(obj) should convert a native Python object to a primitive
|
||||
# serializable type. This primitive type will be what is presented in the API
|
||||
# and stored in the JSON field in the database.
|
||||
# and stored in the JSON field in the datbase.
|
||||
# - to_internal_value(data) should convert the primitive type back into the
|
||||
# appropriate Python type to be used in settings.
|
||||
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
import inspect
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.conf.migrations')
|
||||
from django.utils.timezone import now
|
||||
|
||||
|
||||
def fill_ldap_group_type_params(apps, schema_editor):
|
||||
@@ -19,7 +15,7 @@ def fill_ldap_group_type_params(apps, schema_editor):
|
||||
entry = qs[0]
|
||||
group_type_params = entry.value
|
||||
else:
|
||||
return # for new installs we prefer to use the default value
|
||||
entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS', value=group_type_params, created=now(), modified=now())
|
||||
|
||||
init_attrs = set(inspect.getfullargspec(group_type.__init__).args[1:])
|
||||
for k in list(group_type_params.keys()):
|
||||
@@ -27,5 +23,4 @@ def fill_ldap_group_type_params(apps, schema_editor):
|
||||
del group_type_params[k]
|
||||
|
||||
entry.value = group_type_params
|
||||
logger.warning(f'Migration updating AUTH_LDAP_GROUP_TYPE_PARAMS with value {entry.value}')
|
||||
entry.save()
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
import pytest
|
||||
|
||||
from awx.conf.migrations._ldap_group_type import fill_ldap_group_type_params
|
||||
from awx.conf.models import Setting
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fill_group_type_params_no_op():
|
||||
fill_ldap_group_type_params(apps, 'dont-use-me')
|
||||
assert Setting.objects.count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_keep_old_setting_with_default_value():
|
||||
Setting.objects.create(key='AUTH_LDAP_GROUP_TYPE', value={'name_attr': 'cn', 'member_attr': 'member'})
|
||||
fill_ldap_group_type_params(apps, 'dont-use-me')
|
||||
assert Setting.objects.count() == 1
|
||||
s = Setting.objects.first()
|
||||
assert s.value == {'name_attr': 'cn', 'member_attr': 'member'}
|
||||
|
||||
|
||||
# NOTE: would be good to test the removal of attributes by migration
|
||||
# but this requires fighting with the validator and is not done here
|
||||
@@ -180,7 +180,7 @@ class SettingLoggingTest(GenericAPIView):
|
||||
if not port:
|
||||
return Response({'error': 'Port required for ' + protocol}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
# if http/https by this point, domain is reachable
|
||||
# if http/https by this point, domain is reacheable
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
|
||||
if protocol == 'udp':
|
||||
|
||||
@@ -1972,7 +1972,7 @@ msgid ""
|
||||
"HTTP headers and meta keys to search to determine remote host name or IP. "
|
||||
"Add additional items to this list, such as \"HTTP_X_FORWARDED_FOR\", if "
|
||||
"behind a reverse proxy. See the \"Proxy Support\" section of the "
|
||||
"Administrator guide for more details."
|
||||
"Adminstrator guide for more details."
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/conf.py:85
|
||||
@@ -2457,7 +2457,7 @@ msgid ""
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/conf.py:631
|
||||
msgid "Maximum disk persistence for external log aggregation (in GB)"
|
||||
msgid "Maximum disk persistance for external log aggregation (in GB)"
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/conf.py:633
|
||||
@@ -2548,7 +2548,7 @@ msgid "Enable"
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/constants.py:27
|
||||
msgid "Does"
|
||||
msgid "Doas"
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/constants.py:28
|
||||
@@ -4801,7 +4801,7 @@ msgstr ""
|
||||
|
||||
#: awx/main/models/workflow.py:251
|
||||
msgid ""
|
||||
"An identifier corresponding to the workflow job template node that this node "
|
||||
"An identifier coresponding to the workflow job template node that this node "
|
||||
"was created from."
|
||||
msgstr ""
|
||||
|
||||
@@ -5521,7 +5521,7 @@ msgstr ""
|
||||
#: awx/sso/conf.py:606
|
||||
msgid ""
|
||||
"Extra arguments for Google OAuth2 login. You can restrict it to only allow a "
|
||||
"single domain to authenticate, even if the user is logged in with multiple "
|
||||
"single domain to authenticate, even if the user is logged in with multple "
|
||||
"Google accounts. Refer to the documentation for more detail."
|
||||
msgstr ""
|
||||
|
||||
@@ -5905,7 +5905,7 @@ msgstr ""
|
||||
|
||||
#: awx/sso/conf.py:1290
|
||||
msgid ""
|
||||
"Create a key pair to use as a service provider (SP) and include the "
|
||||
"Create a keypair to use as a service provider (SP) and include the "
|
||||
"certificate content here."
|
||||
msgstr ""
|
||||
|
||||
@@ -5915,7 +5915,7 @@ msgstr ""
|
||||
|
||||
#: awx/sso/conf.py:1302
|
||||
msgid ""
|
||||
"Create a key pair to use as a service provider (SP) and include the private "
|
||||
"Create a keypair to use as a service provider (SP) and include the private "
|
||||
"key content here."
|
||||
msgstr ""
|
||||
|
||||
|
||||
@@ -1971,7 +1971,7 @@ msgid ""
|
||||
"HTTP headers and meta keys to search to determine remote host name or IP. "
|
||||
"Add additional items to this list, such as \"HTTP_X_FORWARDED_FOR\", if "
|
||||
"behind a reverse proxy. See the \"Proxy Support\" section of the "
|
||||
"Administrator guide for more details."
|
||||
"Adminstrator guide for more details."
|
||||
msgstr "Los encabezados HTTP y las llaves de activación para buscar y determinar el nombre de host remoto o IP. Añada elementos adicionales a esta lista, como \"HTTP_X_FORWARDED_FOR\", si está detrás de un proxy inverso. Consulte la sección \"Soporte de proxy\" de la guía del adminstrador para obtener más información."
|
||||
|
||||
#: awx/main/conf.py:85
|
||||
@@ -4804,7 +4804,7 @@ msgstr "Indica que un trabajo no se creará cuando es sea True. La semántica de
|
||||
|
||||
#: awx/main/models/workflow.py:251
|
||||
msgid ""
|
||||
"An identifier corresponding to the workflow job template node that this node "
|
||||
"An identifier coresponding to the workflow job template node that this node "
|
||||
"was created from."
|
||||
msgstr "Un identificador que corresponde al nodo de plantilla de tarea del flujo de trabajo a partir del cual se creó este nodo."
|
||||
|
||||
@@ -5526,7 +5526,7 @@ msgstr "Argumentos adicionales para Google OAuth2"
|
||||
#: awx/sso/conf.py:606
|
||||
msgid ""
|
||||
"Extra arguments for Google OAuth2 login. You can restrict it to only allow a "
|
||||
"single domain to authenticate, even if the user is logged in with multiple "
|
||||
"single domain to authenticate, even if the user is logged in with multple "
|
||||
"Google accounts. Refer to the documentation for more detail."
|
||||
msgstr "Argumentos adicionales para el inicio de sesión en Google OAuth2. Puede limitarlo para permitir la autenticación de un solo dominio, incluso si el usuario ha iniciado sesión con varias cuentas de Google. Consulte la documentación para obtener información detallada."
|
||||
|
||||
@@ -5910,7 +5910,7 @@ msgstr "Certificado público del proveedor de servicio SAML"
|
||||
|
||||
#: awx/sso/conf.py:1290
|
||||
msgid ""
|
||||
"Create a key pair to use as a service provider (SP) and include the "
|
||||
"Create a keypair to use as a service provider (SP) and include the "
|
||||
"certificate content here."
|
||||
msgstr "Crear un par de claves para usar como proveedor de servicio (SP) e incluir el contenido del certificado aquí."
|
||||
|
||||
@@ -5920,7 +5920,7 @@ msgstr "Clave privada del proveedor de servicio SAML"
|
||||
|
||||
#: awx/sso/conf.py:1302
|
||||
msgid ""
|
||||
"Create a key pair to use as a service provider (SP) and include the private "
|
||||
"Create a keypair to use as a service provider (SP) and include the private "
|
||||
"key content here."
|
||||
msgstr "Crear un par de claves para usar como proveedor de servicio (SP) e incluir el contenido de la clave privada aquí."
|
||||
|
||||
|
||||
@@ -70,7 +70,7 @@ def aim_backend(**kwargs):
|
||||
client_cert = kwargs.get('client_cert', None)
|
||||
client_key = kwargs.get('client_key', None)
|
||||
verify = kwargs['verify']
|
||||
webservice_id = kwargs.get('webservice_id', '')
|
||||
webservice_id = kwargs['webservice_id']
|
||||
app_id = kwargs['app_id']
|
||||
object_query = kwargs['object_query']
|
||||
object_query_format = kwargs['object_query_format']
|
||||
|
||||
@@ -1,143 +0,0 @@
|
||||
import time
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from argparse import ArgumentTypeError
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db.models import Q
|
||||
from django.utils.timezone import now
|
||||
|
||||
from awx.main.models import Instance, UnifiedJob
|
||||
|
||||
|
||||
class AWXInstance:
|
||||
def __init__(self, **filter):
|
||||
self.filter = filter
|
||||
self.get_instance()
|
||||
|
||||
def get_instance(self):
|
||||
filter = self.filter if self.filter is not None else dict(hostname=settings.CLUSTER_HOST_ID)
|
||||
qs = Instance.objects.filter(**filter)
|
||||
if not qs.exists():
|
||||
raise ValueError(f"No AWX instance found with {filter} parameters")
|
||||
self.instance = qs.first()
|
||||
|
||||
def disable(self):
|
||||
if self.instance.enabled:
|
||||
self.instance.enabled = False
|
||||
self.instance.save()
|
||||
return True
|
||||
|
||||
def enable(self):
|
||||
if not self.instance.enabled:
|
||||
self.instance.enabled = True
|
||||
self.instance.save()
|
||||
return True
|
||||
|
||||
def jobs(self):
|
||||
return UnifiedJob.objects.filter(
|
||||
Q(controller_node=self.instance.hostname) | Q(execution_node=self.instance.hostname), status__in=("running", "waiting")
|
||||
)
|
||||
|
||||
def jobs_pretty(self):
|
||||
jobs = []
|
||||
for j in self.jobs():
|
||||
job_started = j.started if j.started else now()
|
||||
# similar calculation of `elapsed` as the corresponding serializer
|
||||
# does
|
||||
td = now() - job_started
|
||||
elapsed = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / (10**6 * 1.0)
|
||||
elapsed = float(elapsed)
|
||||
details = dict(
|
||||
name=j.name,
|
||||
url=j.get_ui_url(),
|
||||
elapsed=elapsed,
|
||||
)
|
||||
jobs.append(details)
|
||||
|
||||
jobs = sorted(jobs, reverse=True, key=lambda j: j["elapsed"])
|
||||
|
||||
return ", ".join([f"[\"{j['name']}\"]({j['url']})" for j in jobs])
|
||||
|
||||
def instance_pretty(self):
|
||||
instance = (
|
||||
self.instance.hostname,
|
||||
urljoin(settings.TOWER_URL_BASE, f"/#/instances/{self.instance.pk}/details"),
|
||||
)
|
||||
return f"[\"{instance[0]}\"]({instance[1]})"
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Disable instance, optionally waiting for all its managed jobs to finish."
|
||||
|
||||
@staticmethod
|
||||
def ge_1(arg):
|
||||
if arg == "inf":
|
||||
return float("inf")
|
||||
|
||||
int_arg = int(arg)
|
||||
if int_arg < 1:
|
||||
raise ArgumentTypeError(f"The value must be a positive number >= 1. Provided: \"{arg}\"")
|
||||
return int_arg
|
||||
|
||||
def add_arguments(self, parser):
|
||||
filter_group = parser.add_mutually_exclusive_group()
|
||||
|
||||
filter_group.add_argument(
|
||||
"--hostname",
|
||||
type=str,
|
||||
default=settings.CLUSTER_HOST_ID,
|
||||
help=f"{Instance.hostname.field.help_text} Defaults to the hostname of the machine where the Python interpreter is currently executing".strip(),
|
||||
)
|
||||
filter_group.add_argument("--id", type=self.ge_1, help=Instance.id.field.help_text)
|
||||
|
||||
parser.add_argument(
|
||||
"--wait",
|
||||
action="store_true",
|
||||
help="Wait for jobs managed by the instance to finish. With default retry arguments waits ~1h",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--retry",
|
||||
type=self.ge_1,
|
||||
default=120,
|
||||
help="Number of retries when waiting for jobs to finish. Default: 120. Also accepts \"inf\" to wait indefinitely",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--retry_sleep",
|
||||
type=self.ge_1,
|
||||
default=30,
|
||||
help="Number of seconds to sleep before consequtive retries when waiting. Default: 30",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
try:
|
||||
filter = dict(id=options["id"]) if options["id"] is not None else dict(hostname=options["hostname"])
|
||||
instance = AWXInstance(**filter)
|
||||
except ValueError as e:
|
||||
raise CommandError(e)
|
||||
|
||||
if instance.disable():
|
||||
self.stdout.write(self.style.SUCCESS(f"Instance {instance.instance_pretty()} has been disabled"))
|
||||
else:
|
||||
self.stdout.write(f"Instance {instance.instance_pretty()} has already been disabled")
|
||||
|
||||
if not options["wait"]:
|
||||
return
|
||||
|
||||
rc = 1
|
||||
while instance.jobs().count() > 0:
|
||||
if rc < options["retry"]:
|
||||
self.stdout.write(
|
||||
f"{rc}/{options['retry']}: Waiting {options['retry_sleep']}s before the next attempt to see if the following instance' managed jobs have finished: {instance.jobs_pretty()}"
|
||||
)
|
||||
rc += 1
|
||||
time.sleep(options["retry_sleep"])
|
||||
else:
|
||||
raise CommandError(
|
||||
f"{rc}/{options['retry']}: No more retry attempts left, but the instance still has associated managed jobs: {instance.jobs_pretty()}"
|
||||
)
|
||||
else:
|
||||
self.stdout.write(self.style.SUCCESS("Done waiting for instance' managed jobs to finish!"))
|
||||
109
awx/main/migrations/0175_constructed_inventory.py
Normal file
109
awx/main/migrations/0175_constructed_inventory.py
Normal file
@@ -0,0 +1,109 @@
|
||||
# Generated by Django 3.2.16 on 2022-12-07 14:20
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('main', '0174_ensure_org_ee_admin_roles'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='input_inventories',
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
help_text='Only valid for constructed inventories, this links to the inventories that will be used.',
|
||||
related_name='destination_inventories',
|
||||
to='main.Inventory',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventory',
|
||||
name='kind',
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
('', 'Hosts have a direct link to this inventory.'),
|
||||
('smart', 'Hosts for inventory generated using the host_filter property.'),
|
||||
('constructed', 'Parse list of source inventories with the constructed inventory plugin.'),
|
||||
],
|
||||
default='',
|
||||
help_text='Kind of inventory being represented.',
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventorysource',
|
||||
name='limit',
|
||||
field=models.TextField(blank=True, default='', help_text='Enter host, group or pattern match'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventoryupdate',
|
||||
name='limit',
|
||||
field=models.TextField(blank=True, default='', help_text='Enter host, group or pattern match'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='host_filter',
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='This field is deprecated and will be removed in a future release. Regex where only matching hosts will be imported.',
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='host_filter',
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
help_text='This field is deprecated and will be removed in a future release. Regex where only matching hosts will be imported.',
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -49,7 +49,7 @@ from awx.main.models.notifications import (
|
||||
from awx.main.models.credential.injectors import _openstack_data
|
||||
from awx.main.utils import _inventory_updates
|
||||
from awx.main.utils.safe_yaml import sanitize_jinja
|
||||
from awx.main.utils.execution_environments import to_container_path
|
||||
from awx.main.utils.execution_environments import to_container_path, get_control_plane_execution_environment
|
||||
from awx.main.utils.licensing import server_product_name
|
||||
|
||||
|
||||
@@ -67,6 +67,7 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
KIND_CHOICES = [
|
||||
('', _('Hosts have a direct link to this inventory.')),
|
||||
('smart', _('Hosts for inventory generated using the host_filter property.')),
|
||||
('constructed', _('Parse list of source inventories with the constructed inventory plugin.')),
|
||||
]
|
||||
|
||||
class Meta:
|
||||
@@ -139,6 +140,12 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
default=None,
|
||||
help_text=_('Filter that will be applied to the hosts of this inventory.'),
|
||||
)
|
||||
input_inventories = models.ManyToManyField(
|
||||
'Inventory',
|
||||
blank=True,
|
||||
related_name='destination_inventories',
|
||||
help_text=_('Only valid for constructed inventories, this links to the inventories that will be used.'),
|
||||
)
|
||||
instance_groups = OrderedManyToManyField(
|
||||
'InstanceGroup',
|
||||
blank=True,
|
||||
@@ -431,12 +438,22 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
|
||||
connection.on_commit(on_commit)
|
||||
|
||||
def _enforce_constructed_source(self):
|
||||
"""
|
||||
Constructed inventory should always have exactly 1 inventory source, constructed type
|
||||
this enforces that requirement
|
||||
"""
|
||||
if self.kind == 'constructed':
|
||||
if not self.inventory_sources.exists():
|
||||
self.inventory_sources.create(source='constructed', name=f'Auto-created source for: {self.name}'[:512], overwrite=True, update_on_launch=True)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self._update_host_smart_inventory_memeberships()
|
||||
super(Inventory, self).save(*args, **kwargs)
|
||||
if self.kind == 'smart' and 'host_filter' in kwargs.get('update_fields', ['host_filter']) and connection.vendor != 'sqlite':
|
||||
# Minimal update of host_count for smart inventory host filter changes
|
||||
self.update_computed_fields()
|
||||
self._enforce_constructed_source()
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
self._update_host_smart_inventory_memeberships()
|
||||
@@ -834,6 +851,7 @@ class InventorySourceOptions(BaseModel):
|
||||
|
||||
SOURCE_CHOICES = [
|
||||
('file', _('File, Directory or Script')),
|
||||
('constructed', _('Template additional groups and hostvars at runtime')),
|
||||
('scm', _('Sourced from a Project')),
|
||||
('ec2', _('Amazon EC2')),
|
||||
('gce', _('Google Compute Engine')),
|
||||
@@ -907,7 +925,7 @@ class InventorySourceOptions(BaseModel):
|
||||
host_filter = models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
help_text=_('Regex where only matching hosts will be imported.'),
|
||||
help_text=_('This field is deprecated and will be removed in a future release. Regex where only matching hosts will be imported.'),
|
||||
)
|
||||
overwrite = models.BooleanField(
|
||||
default=False,
|
||||
@@ -927,6 +945,21 @@ class InventorySourceOptions(BaseModel):
|
||||
blank=True,
|
||||
default=1,
|
||||
)
|
||||
limit = models.TextField(
|
||||
blank=True,
|
||||
default='',
|
||||
help_text=_("Enter host, group or pattern match"),
|
||||
)
|
||||
|
||||
def resolve_execution_environment(self):
|
||||
"""
|
||||
Project updates, themselves, will use the control plane execution environment.
|
||||
Jobs using the project can use the default_environment, but the project updates
|
||||
are not flexible enough to allow customizing the image they use.
|
||||
"""
|
||||
if self.inventory.kind == 'constructed':
|
||||
return get_control_plane_execution_environment()
|
||||
return super().resolve_execution_environment()
|
||||
|
||||
@staticmethod
|
||||
def cloud_credential_validation(source, cred):
|
||||
@@ -1363,6 +1396,8 @@ class PluginFileInjector(object):
|
||||
env.update(injector_env)
|
||||
# Preserves current behavior for Ansible change in default planned for 2.10
|
||||
env['ANSIBLE_TRANSFORM_INVALID_GROUP_CHARS'] = 'never'
|
||||
# All CLOUD_PROVIDERS sources implement as inventory plugin from collection
|
||||
env['ANSIBLE_INVENTORY_ENABLED'] = 'auto'
|
||||
return env
|
||||
|
||||
def _get_shared_env(self, inventory_update, private_data_dir, private_data_files):
|
||||
@@ -1546,5 +1581,17 @@ class insights(PluginFileInjector):
|
||||
use_fqcn = True
|
||||
|
||||
|
||||
class constructed(PluginFileInjector):
|
||||
plugin_name = 'constructed'
|
||||
namespace = 'ansible'
|
||||
collection = 'builtin'
|
||||
|
||||
def build_env(self, *args, **kwargs):
|
||||
env = super().build_env(*args, **kwargs)
|
||||
# Enable all types of inventory plugins so we pick up the script files from source inventories
|
||||
del env['ANSIBLE_INVENTORY_ENABLED']
|
||||
return env
|
||||
|
||||
|
||||
for cls in PluginFileInjector.__subclasses__():
|
||||
InventorySourceOptions.injectors[cls.__name__] = cls
|
||||
|
||||
@@ -14,7 +14,7 @@ from oauth2_provider.models import AbstractApplication, AbstractAccessToken
|
||||
from oauth2_provider.generators import generate_client_secret
|
||||
from oauthlib import oauth2
|
||||
|
||||
from awx.sso.common import get_external_account
|
||||
from awx.main.utils import get_external_account
|
||||
from awx.main.fields import OAuth2ClientSecretField
|
||||
|
||||
|
||||
|
||||
@@ -116,7 +116,7 @@ class RunnerCallback:
|
||||
# so it *should* have a negligible performance impact
|
||||
task = event_data.get('event_data', {}).get('task_action')
|
||||
try:
|
||||
if task in ('git', 'svn', 'ansible.builtin.git', 'ansible.builtin.svn'):
|
||||
if task in ('git', 'svn'):
|
||||
event_data_json = json.dumps(event_data)
|
||||
event_data_json = UriCleaner.remove_sensitive(event_data_json)
|
||||
event_data = json.loads(event_data_json)
|
||||
@@ -219,7 +219,7 @@ class RunnerCallbackForProjectUpdate(RunnerCallback):
|
||||
def event_handler(self, event_data):
|
||||
super_return_value = super(RunnerCallbackForProjectUpdate, self).event_handler(event_data)
|
||||
returned_data = event_data.get('event_data', {})
|
||||
if returned_data.get('task_action', '') in ('set_fact', 'ansible.builtin.set_fact'):
|
||||
if returned_data.get('task_action', '') == 'set_fact':
|
||||
returned_facts = returned_data.get('res', {}).get('ansible_facts', {})
|
||||
if 'scm_version' in returned_facts:
|
||||
self.playbook_new_revision = returned_facts['scm_version']
|
||||
|
||||
@@ -311,21 +311,26 @@ class BaseTask(object):
|
||||
env['AWX_PRIVATE_DATA_DIR'] = private_data_dir
|
||||
|
||||
if self.instance.execution_environment is None:
|
||||
raise RuntimeError(f'The {self.model.__name__} could not run because there is no Execution Environment.')
|
||||
raise RuntimeError('The project could not sync because there is no Execution Environment.')
|
||||
|
||||
return env
|
||||
|
||||
def write_inventory_file(self, inventory, private_data_dir, file_name, script_params):
|
||||
script_data = inventory.get_script_data(**script_params)
|
||||
for hostname, hv in script_data.get('_meta', {}).get('hostvars', {}).items():
|
||||
# maintain a list of host_name --> host_id
|
||||
# so we can associate emitted events to Host objects
|
||||
self.runner_callback.host_map[hostname] = hv.get('remote_tower_id', '')
|
||||
file_content = '#! /usr/bin/env python3\n# -*- coding: utf-8 -*-\nprint(%r)\n' % json.dumps(script_data)
|
||||
return self.write_private_data_file(private_data_dir, file_name, file_content, sub_dir='inventory', file_permissions=0o700)
|
||||
|
||||
def build_inventory(self, instance, private_data_dir):
|
||||
script_params = dict(hostvars=True, towervars=True)
|
||||
if hasattr(instance, 'job_slice_number'):
|
||||
script_params['slice_number'] = instance.job_slice_number
|
||||
script_params['slice_count'] = instance.job_slice_count
|
||||
script_data = instance.inventory.get_script_data(**script_params)
|
||||
# maintain a list of host_name --> host_id
|
||||
# so we can associate emitted events to Host objects
|
||||
self.runner_callback.host_map = {hostname: hv.pop('remote_tower_id', '') for hostname, hv in script_data.get('_meta', {}).get('hostvars', {}).items()}
|
||||
file_content = '#! /usr/bin/env python3\n# -*- coding: utf-8 -*-\nprint(%r)\n' % json.dumps(script_data)
|
||||
return self.write_private_data_file(private_data_dir, 'hosts', file_content, sub_dir='inventory', file_permissions=0o700)
|
||||
|
||||
return self.write_inventory_file(instance.inventory, private_data_dir, 'hosts', script_params)
|
||||
|
||||
def build_args(self, instance, private_data_dir, passwords):
|
||||
raise NotImplementedError
|
||||
@@ -1464,8 +1469,6 @@ class RunInventoryUpdate(SourceControlMixin, BaseTask):
|
||||
|
||||
if injector is not None:
|
||||
env = injector.build_env(inventory_update, env, private_data_dir, private_data_files)
|
||||
# All CLOUD_PROVIDERS sources implement as inventory plugin from collection
|
||||
env['ANSIBLE_INVENTORY_ENABLED'] = 'auto'
|
||||
|
||||
if inventory_update.source == 'scm':
|
||||
for env_k in inventory_update.source_vars_dict:
|
||||
@@ -1518,6 +1521,15 @@ class RunInventoryUpdate(SourceControlMixin, BaseTask):
|
||||
|
||||
args = ['ansible-inventory', '--list', '--export']
|
||||
|
||||
# special case for constructed inventories, we pass source inventories from database
|
||||
# these must come in order, and in order _before_ the constructed inventory itself
|
||||
if inventory_update.inventory.kind == 'constructed':
|
||||
for input_inventory in inventory_update.inventory.input_inventories.all():
|
||||
args.append('-i')
|
||||
script_params = dict(hostvars=True, towervars=True)
|
||||
source_inv_path = self.write_inventory_file(input_inventory, private_data_dir, f'hosts_{input_inventory.id}', script_params)
|
||||
args.append(to_container_path(source_inv_path, private_data_dir))
|
||||
|
||||
# Add arguments for the source inventory file/script/thing
|
||||
rel_path = self.pseudo_build_inventory(inventory_update, private_data_dir)
|
||||
container_location = os.path.join(CONTAINER_ROOT, rel_path)
|
||||
@@ -1525,6 +1537,11 @@ class RunInventoryUpdate(SourceControlMixin, BaseTask):
|
||||
|
||||
args.append('-i')
|
||||
args.append(container_location)
|
||||
# Added this in order to allow older versions of ansible-inventory https://github.com/ansible/ansible/pull/79596
|
||||
# limit should be usable in ansible-inventory 2.15+
|
||||
if inventory_update.limit:
|
||||
args.append('--limit')
|
||||
args.append(inventory_update.limit)
|
||||
|
||||
args.append('--output')
|
||||
args.append(os.path.join(CONTAINER_ROOT, 'artifacts', str(inventory_update.id), 'output.json'))
|
||||
|
||||
@@ -594,3 +594,45 @@ class TestControlledBySCM:
|
||||
rando,
|
||||
expect=403,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestConstructedInventory:
|
||||
@pytest.fixture
|
||||
def constructed_inventory(self, organization):
|
||||
return Inventory.objects.create(name='constructed-test-inventory', kind='constructed', organization=organization)
|
||||
|
||||
def test_get_constructed_inventory(self, constructed_inventory, admin_user, get):
|
||||
inv_src = constructed_inventory.inventory_sources.first()
|
||||
inv_src.update_cache_timeout = 53
|
||||
inv_src.save(update_fields=['update_cache_timeout'])
|
||||
r = get(url=reverse('api:constructed_inventory_detail', kwargs={'pk': constructed_inventory.pk}), user=admin_user, expect=200)
|
||||
assert r.data['update_cache_timeout'] == 53
|
||||
|
||||
def test_patch_constructed_inventory(self, constructed_inventory, admin_user, patch):
|
||||
inv_src = constructed_inventory.inventory_sources.first()
|
||||
assert inv_src.update_cache_timeout == 0
|
||||
assert inv_src.limit == ''
|
||||
r = patch(
|
||||
url=reverse('api:constructed_inventory_detail', kwargs={'pk': constructed_inventory.pk}),
|
||||
data=dict(update_cache_timeout=54, limit='foobar'),
|
||||
user=admin_user,
|
||||
expect=200,
|
||||
)
|
||||
assert r.data['update_cache_timeout'] == 54
|
||||
inv_src = constructed_inventory.inventory_sources.first()
|
||||
assert inv_src.update_cache_timeout == 54
|
||||
assert inv_src.limit == 'foobar'
|
||||
|
||||
def test_create_constructed_inventory(self, constructed_inventory, admin_user, post, organization):
|
||||
r = post(
|
||||
url=reverse('api:constructed_inventory_list'),
|
||||
data=dict(name='constructed-inventory-just-created', kind='constructed', organization=organization.id, update_cache_timeout=55, limit='foobar'),
|
||||
user=admin_user,
|
||||
expect=201,
|
||||
)
|
||||
pk = r.data['id']
|
||||
constructed_inventory = Inventory.objects.get(pk=pk)
|
||||
inv_src = constructed_inventory.inventory_sources.first()
|
||||
assert inv_src.update_cache_timeout == 55
|
||||
assert inv_src.limit == 'foobar'
|
||||
|
||||
@@ -511,6 +511,14 @@ def group(inventory):
|
||||
return inventory.groups.create(name='single-group')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def constructed_inventory(organization):
|
||||
"""
|
||||
creates a new constructed inventory source
|
||||
"""
|
||||
return Inventory.objects.create(name='dummy1', kind='constructed', organization=organization)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def inventory_source(inventory):
|
||||
# by making it ec2, the credential is not required
|
||||
|
||||
@@ -169,7 +169,8 @@ class TestInventorySourceInjectors:
|
||||
CLOUD_PROVIDERS constant contains the same names as what are
|
||||
defined within the injectors
|
||||
"""
|
||||
assert set(CLOUD_PROVIDERS) == set(InventorySource.injectors.keys())
|
||||
# slight exception case for constructed, because it has a FQCN but is not a cloud source
|
||||
assert set(CLOUD_PROVIDERS) | set(['constructed']) == set(InventorySource.injectors.keys())
|
||||
|
||||
@pytest.mark.parametrize('source,filename', [('ec2', 'aws_ec2.yml'), ('openstack', 'openstack.yml'), ('gce', 'gcp_compute.yml')])
|
||||
def test_plugin_filenames(self, source, filename):
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
import pytest
|
||||
from awx.main.models import Inventory
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_constructed_inventory_post(post, admin_user, organization):
|
||||
inv1 = Inventory.objects.create(name='dummy1', kind='constructed', organization=organization)
|
||||
inv2 = Inventory.objects.create(name='dummy2', kind='constructed', organization=organization)
|
||||
resp = post(
|
||||
url=reverse('api:inventory_input_inventories', kwargs={'pk': inv1.pk}),
|
||||
data={'id': inv2.pk},
|
||||
user=admin_user,
|
||||
expect=400,
|
||||
)
|
||||
assert resp.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_add_constructed_inventory_source(post, admin_user, constructed_inventory):
|
||||
resp = post(
|
||||
url=reverse('api:inventory_inventory_sources_list', kwargs={'pk': constructed_inventory.pk}),
|
||||
data={'name': 'dummy1', 'source': 'constructed'},
|
||||
user=admin_user,
|
||||
expect=400,
|
||||
)
|
||||
assert resp.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_add_constructed_inventory_host(post, admin_user, constructed_inventory):
|
||||
resp = post(
|
||||
url=reverse('api:inventory_hosts_list', kwargs={'pk': constructed_inventory.pk}),
|
||||
data={'name': 'dummy1'},
|
||||
user=admin_user,
|
||||
expect=400,
|
||||
)
|
||||
assert resp.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_add_constructed_inventory_group(post, admin_user, constructed_inventory):
|
||||
resp = post(
|
||||
reverse('api:inventory_groups_list', kwargs={'pk': constructed_inventory.pk}),
|
||||
data={'name': 'group-test'},
|
||||
user=admin_user,
|
||||
expect=400,
|
||||
)
|
||||
assert resp.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_edit_constructed_inventory_source(patch, admin_user, inventory_source_factory):
|
||||
inv_src = inventory_source_factory(name='dummy1', source='constructed')
|
||||
resp = patch(
|
||||
reverse('api:inventory_source_detail', kwargs={'pk': inv_src.pk}),
|
||||
data={'description': inv_src.name},
|
||||
user=admin_user,
|
||||
expect=400,
|
||||
)
|
||||
assert resp.status_code == 400
|
||||
@@ -2008,7 +2008,7 @@ def test_project_update_no_ee(mock_me):
|
||||
with pytest.raises(RuntimeError) as e:
|
||||
task.build_env(job, {})
|
||||
|
||||
assert 'The ProjectUpdate could not run because there is no Execution Environment' in str(e.value)
|
||||
assert 'The project could not sync because there is no Execution Environment' in str(e.value)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -80,6 +80,7 @@ __all__ = [
|
||||
'set_environ',
|
||||
'IllegalArgumentError',
|
||||
'get_custom_venv_choices',
|
||||
'get_external_account',
|
||||
'ScheduleTaskManager',
|
||||
'ScheduleDependencyManager',
|
||||
'ScheduleWorkflowManager',
|
||||
@@ -1088,6 +1089,29 @@ def has_model_field_prefetched(model_obj, field_name):
|
||||
return getattr(getattr(model_obj, field_name, None), 'prefetch_cache_name', '') in getattr(model_obj, '_prefetched_objects_cache', {})
|
||||
|
||||
|
||||
def get_external_account(user):
|
||||
from django.conf import settings
|
||||
|
||||
account_type = None
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
|
||||
try:
|
||||
if user.pk and user.profile.ldap_dn and not user.has_usable_password():
|
||||
account_type = "ldap"
|
||||
except AttributeError:
|
||||
pass
|
||||
if (
|
||||
getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)
|
||||
) and user.social_auth.all():
|
||||
account_type = "social"
|
||||
if (getattr(settings, 'RADIUS_SERVER', None) or getattr(settings, 'TACACSPLUS_HOST', None)) and user.enterprise_auth.all():
|
||||
account_type = "enterprise"
|
||||
return account_type
|
||||
|
||||
|
||||
class classproperty:
|
||||
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
|
||||
self.fget = fget
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import os
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
@@ -7,15 +6,8 @@ from django.conf import settings
|
||||
from awx.main.models.execution_environments import ExecutionEnvironment
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_control_plane_execution_environment():
|
||||
ee = ExecutionEnvironment.objects.filter(organization=None, managed=True).first()
|
||||
if ee == None:
|
||||
logger.error('Failed to find control plane ee, there are no managed EEs without organizations')
|
||||
raise RuntimeError("Failed to find default control plane EE")
|
||||
return ee
|
||||
return ExecutionEnvironment.objects.filter(organization=None, managed=True).first()
|
||||
|
||||
|
||||
def get_default_execution_environment():
|
||||
|
||||
@@ -25,47 +25,42 @@
|
||||
connection: local
|
||||
name: Update source tree if necessary
|
||||
tasks:
|
||||
- name: Delete project directory before update
|
||||
ansible.builtin.shell: set -o pipefail && find . -delete -print | tail -2 # volume mounted, cannot delete folder itself
|
||||
register: reg
|
||||
changed_when: reg.stdout_lines | length > 1
|
||||
|
||||
- name: delete project directory before update
|
||||
command: "find -delete" # volume mounted, cannot delete folder itself
|
||||
args:
|
||||
chdir: "{{ project_path }}"
|
||||
tags:
|
||||
- delete
|
||||
|
||||
- name: Update project using git
|
||||
tags:
|
||||
- update_git
|
||||
block:
|
||||
- name: Update project using git
|
||||
ansible.builtin.git:
|
||||
dest: "{{ project_path | quote }}"
|
||||
repo: "{{ scm_url }}"
|
||||
version: "{{ scm_branch | quote }}"
|
||||
refspec: "{{ scm_refspec | default(omit) }}"
|
||||
force: "{{ scm_clean }}"
|
||||
track_submodules: "{{ scm_track_submodules | default(omit) }}"
|
||||
accept_hostkey: "{{ scm_accept_hostkey | default(omit) }}"
|
||||
- block:
|
||||
- name: update project using git
|
||||
git:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url}}"
|
||||
version: "{{scm_branch|quote}}"
|
||||
refspec: "{{scm_refspec|default(omit)}}"
|
||||
force: "{{scm_clean}}"
|
||||
track_submodules: "{{scm_track_submodules|default(omit)}}"
|
||||
accept_hostkey: "{{scm_accept_hostkey|default(omit)}}"
|
||||
register: git_result
|
||||
|
||||
- name: Set the git repository version
|
||||
ansible.builtin.set_fact:
|
||||
set_fact:
|
||||
scm_version: "{{ git_result['after'] }}"
|
||||
when: "'after' in git_result"
|
||||
|
||||
- name: Update project using svn
|
||||
tags:
|
||||
- update_svn
|
||||
block:
|
||||
- name: Update project using svn
|
||||
ansible.builtin.subversion:
|
||||
dest: "{{ project_path | quote }}"
|
||||
repo: "{{ scm_url | quote }}"
|
||||
revision: "{{ scm_branch | quote }}"
|
||||
force: "{{ scm_clean }}"
|
||||
username: "{{ scm_username | default(omit) }}"
|
||||
password: "{{ scm_password | default(omit) }}"
|
||||
- update_git
|
||||
|
||||
- block:
|
||||
- name: update project using svn
|
||||
subversion:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url|quote}}"
|
||||
revision: "{{scm_branch|quote}}"
|
||||
force: "{{scm_clean}}"
|
||||
username: "{{scm_username|default(omit)}}"
|
||||
password: "{{scm_password|default(omit)}}"
|
||||
# must be in_place because folder pre-existing, because it is mounted
|
||||
in_place: true
|
||||
environment:
|
||||
@@ -73,90 +68,85 @@
|
||||
register: svn_result
|
||||
|
||||
- name: Set the svn repository version
|
||||
ansible.builtin.set_fact:
|
||||
set_fact:
|
||||
scm_version: "{{ svn_result['after'] }}"
|
||||
when: "'after' in svn_result"
|
||||
|
||||
- name: Parse subversion version string properly
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ scm_version | regex_replace('^.*Revision: ([0-9]+).*$', '\\1') }}"
|
||||
|
||||
|
||||
- name: Project update for Insights
|
||||
- name: parse subversion version string properly
|
||||
set_fact:
|
||||
scm_version: "{{scm_version|regex_replace('^.*Revision: ([0-9]+).*$', '\\1')}}"
|
||||
tags:
|
||||
- update_insights
|
||||
block:
|
||||
- update_svn
|
||||
|
||||
- block:
|
||||
- name: Ensure the project directory is present
|
||||
ansible.builtin.file:
|
||||
dest: "{{ project_path | quote }}"
|
||||
file:
|
||||
dest: "{{project_path|quote}}"
|
||||
state: directory
|
||||
mode: '0755'
|
||||
|
||||
- name: Fetch Insights Playbook(s)
|
||||
insights:
|
||||
insights_url: "{{ insights_url }}"
|
||||
username: "{{ scm_username }}"
|
||||
password: "{{ scm_password }}"
|
||||
project_path: "{{ project_path }}"
|
||||
awx_license_type: "{{ awx_license_type }}"
|
||||
awx_version: "{{ awx_version }}"
|
||||
insights_url: "{{insights_url}}"
|
||||
username: "{{scm_username}}"
|
||||
password: "{{scm_password}}"
|
||||
project_path: "{{project_path}}"
|
||||
awx_license_type: "{{awx_license_type}}"
|
||||
awx_version: "{{awx_version}}"
|
||||
register: results
|
||||
|
||||
- name: Save Insights Version
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ results.version }}"
|
||||
set_fact:
|
||||
scm_version: "{{results.version}}"
|
||||
when: results is defined
|
||||
|
||||
|
||||
- name: Update project using archive
|
||||
tags:
|
||||
- update_archive
|
||||
block:
|
||||
- update_insights
|
||||
|
||||
- block:
|
||||
- name: Ensure the project archive directory is present
|
||||
ansible.builtin.file:
|
||||
dest: "{{ project_path | quote }}/.archive"
|
||||
file:
|
||||
dest: "{{ project_path|quote }}/.archive"
|
||||
state: directory
|
||||
mode: '0755'
|
||||
|
||||
- name: Get archive from url
|
||||
ansible.builtin.get_url:
|
||||
url: "{{ scm_url | quote }}"
|
||||
dest: "{{ project_path | quote }}/.archive/"
|
||||
url_username: "{{ scm_username | default(omit) }}"
|
||||
url_password: "{{ scm_password | default(omit) }}"
|
||||
get_url:
|
||||
url: "{{ scm_url|quote }}"
|
||||
dest: "{{ project_path|quote }}/.archive/"
|
||||
url_username: "{{ scm_username|default(omit) }}"
|
||||
url_password: "{{ scm_password|default(omit) }}"
|
||||
force_basic_auth: true
|
||||
mode: '0755'
|
||||
register: get_archive
|
||||
|
||||
- name: Unpack archive
|
||||
project_archive:
|
||||
src: "{{ get_archive.dest }}"
|
||||
project_path: "{{ project_path | quote }}"
|
||||
project_path: "{{ project_path|quote }}"
|
||||
force: "{{ scm_clean }}"
|
||||
when: get_archive.changed or scm_clean
|
||||
register: unarchived
|
||||
|
||||
- name: Find previous archives
|
||||
ansible.builtin.find:
|
||||
paths: "{{ project_path | quote }}/.archive/"
|
||||
find:
|
||||
paths: "{{ project_path|quote }}/.archive/"
|
||||
excludes:
|
||||
- "{{ get_archive.dest | basename }}"
|
||||
- "{{ get_archive.dest|basename }}"
|
||||
when: unarchived.changed
|
||||
register: previous_archive
|
||||
|
||||
- name: Remove previous archives
|
||||
ansible.builtin.file:
|
||||
file:
|
||||
path: "{{ item.path }}"
|
||||
state: absent
|
||||
loop: "{{ previous_archive.files }}"
|
||||
when: previous_archive.files | default([])
|
||||
when: previous_archive.files|default([])
|
||||
|
||||
- name: Set scm_version to archive sha1 checksum
|
||||
ansible.builtin.set_fact:
|
||||
set_fact:
|
||||
scm_version: "{{ get_archive.checksum_src }}"
|
||||
tags:
|
||||
- update_archive
|
||||
|
||||
- name: Repository Version
|
||||
ansible.builtin.debug:
|
||||
debug:
|
||||
msg: "Repository Version {{ scm_version }}"
|
||||
tags:
|
||||
- update_git
|
||||
@@ -193,59 +183,60 @@
|
||||
additional_collections_env:
|
||||
# These environment variables are used for installing collections, in addition to galaxy_task_env
|
||||
# setting the collections paths silences warnings
|
||||
ANSIBLE_COLLECTIONS_PATHS: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections"
|
||||
ANSIBLE_COLLECTIONS_PATHS: "{{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_collections"
|
||||
# Put the local tmp directory in same volume as collection destination
|
||||
# otherwise, files cannot be moved accross volumes and will cause error
|
||||
ANSIBLE_LOCAL_TEMP: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/tmp"
|
||||
ANSIBLE_LOCAL_TEMP: "{{projects_root}}/.__awx_cache/{{local_path}}/stage/tmp"
|
||||
tasks:
|
||||
|
||||
- name: Check content sync settings
|
||||
when: not roles_enabled | bool and not collections_enabled | bool
|
||||
tags:
|
||||
- install_roles
|
||||
- install_collections
|
||||
block:
|
||||
- name: Warn about disabled content sync
|
||||
ansible.builtin.debug:
|
||||
- debug:
|
||||
msg: >
|
||||
Collection and role syncing disabled. Check the AWX_ROLES_ENABLED and
|
||||
AWX_COLLECTIONS_ENABLED settings and Galaxy credentials on the project's organization.
|
||||
- name: End play due to disabled content sync
|
||||
ansible.builtin.meta: end_play
|
||||
|
||||
- name: Fetch galaxy roles from requirements.(yml/yaml)
|
||||
ansible.builtin.command: >
|
||||
- meta: end_play
|
||||
|
||||
when: not roles_enabled|bool and not collections_enabled|bool
|
||||
tags:
|
||||
- install_roles
|
||||
- install_collections
|
||||
|
||||
- name: fetch galaxy roles from requirements.(yml/yaml)
|
||||
command: >
|
||||
ansible-galaxy role install -r {{ item }}
|
||||
--roles-path {{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_roles
|
||||
--roles-path {{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_roles
|
||||
{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{ project_path | quote }}"
|
||||
chdir: "{{project_path|quote}}"
|
||||
register: galaxy_result
|
||||
with_fileglob:
|
||||
- "{{ project_path | quote }}/roles/requirements.yaml"
|
||||
- "{{ project_path | quote }}/roles/requirements.yml"
|
||||
- "{{project_path|quote}}/roles/requirements.yaml"
|
||||
- "{{project_path|quote}}/roles/requirements.yml"
|
||||
changed_when: "'was installed successfully' in galaxy_result.stdout"
|
||||
environment: "{{ galaxy_task_env }}"
|
||||
when: roles_enabled | bool
|
||||
when: roles_enabled|bool
|
||||
tags:
|
||||
- install_roles
|
||||
|
||||
- name: Fetch galaxy collections from collections/requirements.(yml/yaml)
|
||||
ansible.builtin.command: >
|
||||
- name: fetch galaxy collections from collections/requirements.(yml/yaml)
|
||||
command: >
|
||||
ansible-galaxy collection install -r {{ item }}
|
||||
--collections-path {{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections
|
||||
--collections-path {{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_collections
|
||||
{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{ project_path | quote }}"
|
||||
chdir: "{{project_path|quote}}"
|
||||
register: galaxy_collection_result
|
||||
with_fileglob:
|
||||
- "{{ project_path | quote }}/collections/requirements.yaml"
|
||||
- "{{ project_path | quote }}/collections/requirements.yml"
|
||||
- "{{ project_path | quote }}/requirements.yaml"
|
||||
- "{{ project_path | quote }}/requirements.yml"
|
||||
- "{{project_path|quote}}/collections/requirements.yaml"
|
||||
- "{{project_path|quote}}/collections/requirements.yml"
|
||||
- "{{project_path|quote}}/requirements.yaml"
|
||||
- "{{project_path|quote}}/requirements.yml"
|
||||
changed_when: "'Installing ' in galaxy_collection_result.stdout"
|
||||
environment: "{{ additional_collections_env | combine(galaxy_task_env) }}"
|
||||
when:
|
||||
- "ansible_version.full is version_compare('2.9', '>=')"
|
||||
- collections_enabled | bool
|
||||
- collections_enabled|bool
|
||||
tags:
|
||||
- install_collections
|
||||
|
||||
@@ -172,7 +172,9 @@ GLOBAL_JOB_EXECUTION_ENVIRONMENTS = [{'name': 'AWX EE (latest)', 'image': 'quay.
|
||||
# This image is distinguished from others by having "managed" set to True and users have limited
|
||||
# ability to modify it through the API.
|
||||
# If a registry credential is needed to pull the image, that can be provided to the awx-manage command
|
||||
CONTROL_PLANE_EXECUTION_ENVIRONMENT = 'quay.io/ansible/awx-ee:latest'
|
||||
|
||||
# HACK: this is done temporarily for feature development, remove before merge
|
||||
CONTROL_PLANE_EXECUTION_ENVIRONMENT = 'quay.io/relrod/awx-ee-invlimit:latest'
|
||||
|
||||
# Note: This setting may be overridden by database settings.
|
||||
STDOUT_MAX_BYTES_DISPLAY = 1048576
|
||||
@@ -742,6 +744,13 @@ CUSTOM_EXCLUDE_EMPTY_GROUPS = False
|
||||
SCM_EXCLUDE_EMPTY_GROUPS = False
|
||||
# SCM_INSTANCE_ID_VAR =
|
||||
|
||||
# ----------------
|
||||
# -- Constructed --
|
||||
# ----------------
|
||||
CONSTRUCTED_INSTANCE_ID_VAR = 'remote_tower_id'
|
||||
|
||||
CONSTRUCTED_EXCLUDE_EMPTY_GROUPS = False
|
||||
|
||||
# ---------------------
|
||||
# -- Activity Stream --
|
||||
# ---------------------
|
||||
|
||||
@@ -385,10 +385,10 @@ def on_populate_user(sender, **kwargs):
|
||||
logger.warning('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len))
|
||||
|
||||
org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {})
|
||||
team_map_settings = getattr(backend.settings, 'TEAM_MAP', {})
|
||||
team_map = getattr(backend.settings, 'TEAM_MAP', {})
|
||||
orgs_list = list(org_map.keys())
|
||||
team_map = {}
|
||||
for team_name, team_opts in team_map_settings.items():
|
||||
for team_name, team_opts in team_map.items():
|
||||
if not team_opts.get('organization', None):
|
||||
# You can't save the LDAP config in the UI w/o an org (or '' or null as the org) so if we somehow got this condition its an error
|
||||
logger.error("Team named {} in LDAP team map settings is invalid due to missing organization".format(team_name))
|
||||
@@ -416,7 +416,7 @@ def on_populate_user(sender, **kwargs):
|
||||
|
||||
# Compute in memory what the state is of the different LDAP teams
|
||||
desired_team_states = {}
|
||||
for team_name, team_opts in team_map_settings.items():
|
||||
for team_name, team_opts in team_map.items():
|
||||
if 'organization' not in team_opts:
|
||||
continue
|
||||
users_opts = team_opts.get('users', None)
|
||||
|
||||
@@ -169,45 +169,3 @@ def get_or_create_org_with_default_galaxy_cred(**kwargs):
|
||||
else:
|
||||
logger.debug("Could not find default Ansible Galaxy credential to add to org")
|
||||
return org
|
||||
|
||||
|
||||
def get_external_account(user):
|
||||
account_type = None
|
||||
|
||||
# Previously this method also checked for active configuration which meant that if a user logged in from LDAP
|
||||
# and then LDAP was no longer configured it would "convert" the user from an LDAP account_type to none.
|
||||
# This did have one benefit that if a login type was removed intentionally the user could be given a username password.
|
||||
# But it had a limitation that the user would have to have an active session (or an admin would have to go set a temp password).
|
||||
# It also lead to the side affect that if LDAP was ever reconfigured the user would convert back to LDAP but still have a local password.
|
||||
# That local password could then be used to bypass LDAP authentication.
|
||||
try:
|
||||
if user.pk and user.profile.ldap_dn and not user.has_usable_password():
|
||||
account_type = "ldap"
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if user.social_auth.all():
|
||||
account_type = "social"
|
||||
|
||||
if user.enterprise_auth.all():
|
||||
account_type = "enterprise"
|
||||
|
||||
return account_type
|
||||
|
||||
|
||||
def is_remote_auth_enabled():
|
||||
from django.conf import settings
|
||||
|
||||
# Append LDAP, Radius, TACACS+ and SAML options
|
||||
settings_that_turn_on_remote_auth = [
|
||||
'AUTH_LDAP_SERVER_URI',
|
||||
'SOCIAL_AUTH_SAML_ENABLED_IDPS',
|
||||
'RADIUS_SERVER',
|
||||
'TACACSPLUS_HOST',
|
||||
]
|
||||
# Also include any SOCAIL_AUTH_*KEY (except SAML)
|
||||
for social_auth_key in dir(settings):
|
||||
if social_auth_key.startswith('SOCIAL_AUTH_') and social_auth_key.endswith('_KEY') and 'SAML' not in social_auth_key:
|
||||
settings_that_turn_on_remote_auth.append(social_auth_key)
|
||||
|
||||
return any(getattr(settings, s, None) for s in settings_that_turn_on_remote_auth)
|
||||
|
||||
@@ -2,22 +2,9 @@ import pytest
|
||||
from collections import Counter
|
||||
from django.core.exceptions import FieldError
|
||||
from django.utils.timezone import now
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from awx.main.models import Credential, CredentialType, Organization, Team, User
|
||||
from awx.sso.common import (
|
||||
get_orgs_by_ids,
|
||||
reconcile_users_org_team_mappings,
|
||||
create_org_and_teams,
|
||||
get_or_create_org_with_default_galaxy_cred,
|
||||
is_remote_auth_enabled,
|
||||
get_external_account,
|
||||
)
|
||||
|
||||
|
||||
class MicroMockObject(object):
|
||||
def all(self):
|
||||
return True
|
||||
from awx.sso.common import get_orgs_by_ids, reconcile_users_org_team_mappings, create_org_and_teams, get_or_create_org_with_default_galaxy_cred
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -291,87 +278,3 @@ class TestCommonFunctions:
|
||||
|
||||
for o in Organization.objects.all():
|
||||
assert o.galaxy_credentials.count() == 0
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"enable_ldap, enable_social, enable_enterprise, expected_results",
|
||||
[
|
||||
(False, False, False, None),
|
||||
(True, False, False, 'ldap'),
|
||||
(True, True, False, 'social'),
|
||||
(True, True, True, 'enterprise'),
|
||||
(False, True, True, 'enterprise'),
|
||||
(False, False, True, 'enterprise'),
|
||||
(False, True, False, 'social'),
|
||||
],
|
||||
)
|
||||
def test_get_external_account(self, enable_ldap, enable_social, enable_enterprise, expected_results):
|
||||
try:
|
||||
user = User.objects.get(username="external_tester")
|
||||
except User.DoesNotExist:
|
||||
user = User(username="external_tester")
|
||||
user.set_unusable_password()
|
||||
user.save()
|
||||
|
||||
if enable_ldap:
|
||||
user.profile.ldap_dn = 'test.dn'
|
||||
if enable_social:
|
||||
from social_django.models import UserSocialAuth
|
||||
|
||||
social_auth, _ = UserSocialAuth.objects.get_or_create(
|
||||
uid='667ec049-cdf3-45d0-a4dc-0465f7505954',
|
||||
provider='oidc',
|
||||
extra_data={},
|
||||
user_id=user.id,
|
||||
)
|
||||
user.social_auth.set([social_auth])
|
||||
if enable_enterprise:
|
||||
from awx.sso.models import UserEnterpriseAuth
|
||||
|
||||
enterprise_auth = UserEnterpriseAuth(user=user, provider='tacacs+')
|
||||
enterprise_auth.save()
|
||||
|
||||
assert get_external_account(user) == expected_results
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"setting, expected",
|
||||
[
|
||||
# Set none of the social auth settings
|
||||
('JUNK_SETTING', False),
|
||||
# Set the hard coded settings
|
||||
('AUTH_LDAP_SERVER_URI', True),
|
||||
('SOCIAL_AUTH_SAML_ENABLED_IDPS', True),
|
||||
('RADIUS_SERVER', True),
|
||||
('TACACSPLUS_HOST', True),
|
||||
# Set some SOCIAL_SOCIAL_AUTH_OIDC_KEYAUTH_*_KEY settings
|
||||
('SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ORG_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_TEAM_KEY', True),
|
||||
('SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', True),
|
||||
('SOCIAL_AUTH_OIDC_KEY', True),
|
||||
# Try a hypothetical future one
|
||||
('SOCIAL_AUTH_GIBBERISH_KEY', True),
|
||||
# Do a SAML one
|
||||
('SOCIAL_AUTH_SAML_SP_PRIVATE_KEY', False),
|
||||
],
|
||||
)
|
||||
def test_is_remote_auth_enabled(self, setting, expected):
|
||||
with override_settings(**{setting: True}):
|
||||
assert is_remote_auth_enabled() == expected
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"key_one, key_one_value, key_two, key_two_value, expected",
|
||||
[
|
||||
('JUNK_SETTING', True, 'JUNK2_SETTING', True, False),
|
||||
('AUTH_LDAP_SERVER_URI', True, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', True, True),
|
||||
('JUNK_SETTING', True, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', True, True),
|
||||
('AUTH_LDAP_SERVER_URI', False, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', False, False),
|
||||
],
|
||||
)
|
||||
def test_is_remote_auth_enabled_multiple_keys(self, key_one, key_one_value, key_two, key_two_value, expected):
|
||||
with override_settings(**{key_one: key_one_value}):
|
||||
with override_settings(**{key_two: key_two_value}):
|
||||
assert is_remote_auth_enabled() == expected
|
||||
|
||||
@@ -6,6 +6,7 @@ import Config from './models/Config';
|
||||
import CredentialInputSources from './models/CredentialInputSources';
|
||||
import CredentialTypes from './models/CredentialTypes';
|
||||
import Credentials from './models/Credentials';
|
||||
import ConstructedInventories from './models/ConstructedInventories';
|
||||
import Dashboard from './models/Dashboard';
|
||||
import ExecutionEnvironments from './models/ExecutionEnvironments';
|
||||
import Groups from './models/Groups';
|
||||
@@ -53,6 +54,7 @@ const ConfigAPI = new Config();
|
||||
const CredentialInputSourcesAPI = new CredentialInputSources();
|
||||
const CredentialTypesAPI = new CredentialTypes();
|
||||
const CredentialsAPI = new Credentials();
|
||||
const ConstructedInventoriesAPI = new ConstructedInventories();
|
||||
const DashboardAPI = new Dashboard();
|
||||
const ExecutionEnvironmentsAPI = new ExecutionEnvironments();
|
||||
const GroupsAPI = new Groups();
|
||||
@@ -101,6 +103,7 @@ export {
|
||||
CredentialInputSourcesAPI,
|
||||
CredentialTypesAPI,
|
||||
CredentialsAPI,
|
||||
ConstructedInventoriesAPI,
|
||||
DashboardAPI,
|
||||
ExecutionEnvironmentsAPI,
|
||||
GroupsAPI,
|
||||
|
||||
101
awx/ui/src/api/models/ConstructedInventories.js
Normal file
101
awx/ui/src/api/models/ConstructedInventories.js
Normal file
@@ -0,0 +1,101 @@
|
||||
import Base from '../Base';
|
||||
import InstanceGroupsMixin from '../mixins/InstanceGroups.mixin';
|
||||
|
||||
class ConstructedInventories extends InstanceGroupsMixin(Base) {
|
||||
constructor(http) {
|
||||
super(http);
|
||||
this.baseUrl = 'api/v2/constructed_inventories/';
|
||||
|
||||
this.readAccessList = this.readAccessList.bind(this);
|
||||
this.readAccessOptions = this.readAccessOptions.bind(this);
|
||||
this.readHosts = this.readHosts.bind(this);
|
||||
this.readHostDetail = this.readHostDetail.bind(this);
|
||||
this.readGroups = this.readGroups.bind(this);
|
||||
this.readGroupsOptions = this.readGroupsOptions.bind(this);
|
||||
this.promoteGroup = this.promoteGroup.bind(this);
|
||||
}
|
||||
|
||||
readAccessList(id, params) {
|
||||
return this.http.get(`${this.baseUrl}${id}/access_list/`, {
|
||||
params,
|
||||
});
|
||||
}
|
||||
|
||||
readAccessOptions(id) {
|
||||
return this.http.options(`${this.baseUrl}${id}/access_list/`);
|
||||
}
|
||||
|
||||
createHost(id, data) {
|
||||
return this.http.post(`${this.baseUrl}${id}/hosts/`, data);
|
||||
}
|
||||
|
||||
readHosts(id, params) {
|
||||
return this.http.get(`${this.baseUrl}${id}/hosts/`, {
|
||||
params,
|
||||
});
|
||||
}
|
||||
|
||||
async readHostDetail(inventoryId, hostId) {
|
||||
const {
|
||||
data: { results },
|
||||
} = await this.http.get(
|
||||
`${this.baseUrl}${inventoryId}/hosts/?id=${hostId}`
|
||||
);
|
||||
|
||||
if (Array.isArray(results) && results.length) {
|
||||
return results[0];
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`How did you get here? Host not found for Inventory ID: ${inventoryId}`
|
||||
);
|
||||
}
|
||||
|
||||
readGroups(id, params) {
|
||||
return this.http.get(`${this.baseUrl}${id}/groups/`, {
|
||||
params,
|
||||
});
|
||||
}
|
||||
|
||||
readGroupsOptions(id) {
|
||||
return this.http.options(`${this.baseUrl}${id}/groups/`);
|
||||
}
|
||||
|
||||
readHostsOptions(id) {
|
||||
return this.http.options(`${this.baseUrl}${id}/hosts/`);
|
||||
}
|
||||
|
||||
promoteGroup(inventoryId, groupId) {
|
||||
return this.http.post(`${this.baseUrl}${inventoryId}/groups/`, {
|
||||
id: groupId,
|
||||
disassociate: true,
|
||||
});
|
||||
}
|
||||
|
||||
readAdHocOptions(inventoryId) {
|
||||
return this.http.options(`${this.baseUrl}${inventoryId}/ad_hoc_commands/`);
|
||||
}
|
||||
|
||||
launchAdHocCommands(inventoryId, values) {
|
||||
return this.http.post(
|
||||
`${this.baseUrl}${inventoryId}/ad_hoc_commands/`,
|
||||
values
|
||||
);
|
||||
}
|
||||
|
||||
associateLabel(id, label, orgId) {
|
||||
return this.http.post(`${this.baseUrl}${id}/labels/`, {
|
||||
name: label.name,
|
||||
organization: orgId,
|
||||
});
|
||||
}
|
||||
|
||||
disassociateLabel(id, label) {
|
||||
return this.http.post(`${this.baseUrl}${id}/labels/`, {
|
||||
id: label.id,
|
||||
disassociate: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export default ConstructedInventories;
|
||||
@@ -13,6 +13,7 @@ class Inventories extends InstanceGroupsMixin(Base) {
|
||||
this.readGroups = this.readGroups.bind(this);
|
||||
this.readGroupsOptions = this.readGroupsOptions.bind(this);
|
||||
this.promoteGroup = this.promoteGroup.bind(this);
|
||||
this.readSourceInventories = this.readSourceInventories.bind(this);
|
||||
}
|
||||
|
||||
readAccessList(id, params) {
|
||||
@@ -72,6 +73,12 @@ class Inventories extends InstanceGroupsMixin(Base) {
|
||||
});
|
||||
}
|
||||
|
||||
readSourceInventories(inventoryId, params) {
|
||||
return this.http.get(`${this.baseUrl}${inventoryId}/input_inventories/`, {
|
||||
params,
|
||||
});
|
||||
}
|
||||
|
||||
readSources(inventoryId, params) {
|
||||
return this.http.get(`${this.baseUrl}${inventoryId}/inventory_sources/`, {
|
||||
params,
|
||||
|
||||
@@ -22,19 +22,14 @@ import { CredentialsAPI } from 'api';
|
||||
import CredentialDetail from './CredentialDetail';
|
||||
import CredentialEdit from './CredentialEdit';
|
||||
|
||||
const unacceptableCredentialTypes = [
|
||||
'centrify_vault_kv',
|
||||
'aim',
|
||||
'conjur',
|
||||
'hashivault_kv',
|
||||
'hashivault_ssh',
|
||||
'azure_kv',
|
||||
'thycotic_dsv',
|
||||
'thycotic_tss',
|
||||
'galaxy_api_token',
|
||||
'insights',
|
||||
'registry',
|
||||
'scm',
|
||||
const jobTemplateCredentialTypes = [
|
||||
'machine',
|
||||
'cloud',
|
||||
'net',
|
||||
'ssh',
|
||||
'vault',
|
||||
'kubernetes',
|
||||
'cryptography',
|
||||
];
|
||||
|
||||
function Credential({ setBreadcrumb }) {
|
||||
@@ -91,10 +86,7 @@ function Credential({ setBreadcrumb }) {
|
||||
id: 1,
|
||||
},
|
||||
];
|
||||
if (
|
||||
!unacceptableCredentialTypes.includes(credential?.kind) &&
|
||||
credential !== null
|
||||
) {
|
||||
if (jobTemplateCredentialTypes.includes(credential?.kind)) {
|
||||
tabsArray.push({
|
||||
name: t`Job Templates`,
|
||||
link: `/credentials/${id}/job_templates`,
|
||||
@@ -123,14 +115,12 @@ function Credential({ setBreadcrumb }) {
|
||||
</PageSection>
|
||||
);
|
||||
}
|
||||
if (hasContentLoading) {
|
||||
return <ContentLoading />;
|
||||
}
|
||||
|
||||
return (
|
||||
<PageSection>
|
||||
<Card>
|
||||
{showCardHeader && <RoutedTabs tabsArray={tabsArray} />}
|
||||
{hasContentLoading && <ContentLoading />}
|
||||
{!hasContentLoading && credential && (
|
||||
<Switch>
|
||||
<Redirect
|
||||
|
||||
@@ -8,7 +8,6 @@ import {
|
||||
} from '../../../testUtils/enzymeHelpers';
|
||||
import mockMachineCredential from './shared/data.machineCredential.json';
|
||||
import mockSCMCredential from './shared/data.scmCredential.json';
|
||||
import mockCyberArkCredential from './shared/data.cyberArkCredential.json';
|
||||
import Credential from './Credential';
|
||||
|
||||
jest.mock('../../api');
|
||||
@@ -22,11 +21,6 @@ jest.mock('react-router-dom', () => ({
|
||||
|
||||
describe('<Credential />', () => {
|
||||
let wrapper;
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
wrapper.unmount();
|
||||
});
|
||||
|
||||
test('initially renders user-based machine credential successfully', async () => {
|
||||
CredentialsAPI.readDetail.mockResolvedValueOnce({
|
||||
@@ -67,19 +61,6 @@ describe('<Credential />', () => {
|
||||
});
|
||||
});
|
||||
|
||||
test('should not render job template tab', async () => {
|
||||
CredentialsAPI.readDetail.mockResolvedValueOnce({
|
||||
data: { ...mockCyberArkCredential, kind: 'registry' },
|
||||
});
|
||||
const expectedTabs = ['Back to Credentials', 'Details', 'Access'];
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<Credential setBreadcrumb={() => {}} />);
|
||||
});
|
||||
wrapper.find('RoutedTabs li').forEach((tab, index) => {
|
||||
expect(tab.text()).toEqual(expectedTabs[index]);
|
||||
});
|
||||
});
|
||||
|
||||
test('should show content error when user attempts to navigate to erroneous route', async () => {
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/credentials/2/foobar'],
|
||||
@@ -104,4 +85,3 @@ describe('<Credential />', () => {
|
||||
await waitForElement(wrapper, 'ContentError', (el) => el.length === 1);
|
||||
});
|
||||
});
|
||||
describe('<Credential> should not show job template tab', () => {});
|
||||
|
||||
217
awx/ui/src/screens/Inventory/ConstructedInventory.js
Normal file
217
awx/ui/src/screens/Inventory/ConstructedInventory.js
Normal file
@@ -0,0 +1,217 @@
|
||||
import React, { useCallback, useEffect } from 'react';
|
||||
import { t } from '@lingui/macro';
|
||||
import {
|
||||
Link,
|
||||
Switch,
|
||||
Route,
|
||||
Redirect,
|
||||
useRouteMatch,
|
||||
useLocation,
|
||||
} from 'react-router-dom';
|
||||
import { CaretLeftIcon } from '@patternfly/react-icons';
|
||||
import { Card, PageSection } from '@patternfly/react-core';
|
||||
|
||||
import useRequest from 'hooks/useRequest';
|
||||
import { ConstructedInventoriesAPI, InventoriesAPI } from 'api';
|
||||
|
||||
import ContentError from 'components/ContentError';
|
||||
import ContentLoading from 'components/ContentLoading';
|
||||
import JobList from 'components/JobList';
|
||||
import RelatedTemplateList from 'components/RelatedTemplateList';
|
||||
import { ResourceAccessList } from 'components/ResourceAccessList';
|
||||
import RoutedTabs from 'components/RoutedTabs';
|
||||
import ConstructedInventoryDetail from './InventoryDetail';
|
||||
import ConstructedInventoryEdit from './InventoryEdit';
|
||||
import ConstructedInventoryGroups from './InventoryGroups';
|
||||
import ConstructedInventoryHosts from './InventoryHosts';
|
||||
import { getInventoryPath } from './shared/utils';
|
||||
|
||||
function ConstructedInventory({ setBreadcrumb }) {
|
||||
const location = useLocation();
|
||||
const match = useRouteMatch('/inventories/constructed_inventory/:id');
|
||||
|
||||
const {
|
||||
result: inventory,
|
||||
error: contentError,
|
||||
isLoading: hasContentLoading,
|
||||
request: fetchInventory,
|
||||
} = useRequest(
|
||||
useCallback(async () => {
|
||||
const { data } = await ConstructedInventoriesAPI.readDetail(
|
||||
match.params.id
|
||||
);
|
||||
return data;
|
||||
}, [match.params.id]),
|
||||
|
||||
null
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
fetchInventory();
|
||||
}, [fetchInventory, location.pathname]);
|
||||
|
||||
useEffect(() => {
|
||||
if (inventory) {
|
||||
setBreadcrumb(inventory);
|
||||
}
|
||||
}, [inventory, setBreadcrumb]);
|
||||
|
||||
const tabsArray = [
|
||||
{
|
||||
name: (
|
||||
<>
|
||||
<CaretLeftIcon />
|
||||
{t`Back to Inventories`}
|
||||
</>
|
||||
),
|
||||
link: `/inventories`,
|
||||
id: 99,
|
||||
},
|
||||
{ name: t`Details`, link: `${match.url}/details`, id: 0 },
|
||||
{ name: t`Access`, link: `${match.url}/access`, id: 1 },
|
||||
{ name: t`Hosts`, link: `${match.url}/hosts`, id: 2 },
|
||||
{ name: t`Groups`, link: `${match.url}/groups`, id: 3 },
|
||||
{
|
||||
name: t`Jobs`,
|
||||
link: `${match.url}/jobs`,
|
||||
id: 4,
|
||||
},
|
||||
{ name: t`Job Templates`, link: `${match.url}/job_templates`, id: 5 },
|
||||
];
|
||||
|
||||
if (hasContentLoading) {
|
||||
return (
|
||||
<PageSection>
|
||||
<Card>
|
||||
<ContentLoading />
|
||||
</Card>
|
||||
</PageSection>
|
||||
);
|
||||
}
|
||||
|
||||
if (contentError) {
|
||||
return (
|
||||
<PageSection>
|
||||
<Card>
|
||||
<ContentError error={contentError}>
|
||||
{contentError?.response?.status === 404 && (
|
||||
<span>
|
||||
{t`Constructed Inventory not found.`}{' '}
|
||||
<Link to="/inventories">{t`View all Inventories.`}</Link>
|
||||
</span>
|
||||
)}
|
||||
</ContentError>
|
||||
</Card>
|
||||
</PageSection>
|
||||
);
|
||||
}
|
||||
|
||||
if (inventory && inventory?.kind !== 'constructed') {
|
||||
return <Redirect to={`${getInventoryPath(inventory)}/details`} />;
|
||||
}
|
||||
|
||||
let showCardHeader = true;
|
||||
|
||||
if (
|
||||
['edit', 'add', 'groups/', 'hosts/', 'sources/'].some((name) =>
|
||||
location.pathname.includes(name)
|
||||
)
|
||||
) {
|
||||
showCardHeader = false;
|
||||
}
|
||||
|
||||
return (
|
||||
<PageSection>
|
||||
<Card>
|
||||
{showCardHeader && <RoutedTabs tabsArray={tabsArray} />}
|
||||
<Switch>
|
||||
<Redirect
|
||||
from="/inventories/constructed_inventory/:id"
|
||||
to="/inventories/constructed_inventory/:id/details"
|
||||
exact
|
||||
/>
|
||||
{inventory && [
|
||||
<Route
|
||||
path="/inventories/constructed_inventory/:id/details"
|
||||
key="details"
|
||||
>
|
||||
<ConstructedInventoryDetail
|
||||
inventory={inventory}
|
||||
hasInventoryLoading={hasContentLoading}
|
||||
/>
|
||||
</Route>,
|
||||
<Route
|
||||
key="edit"
|
||||
path="/inventories/constructed_inventory/:id/edit"
|
||||
>
|
||||
<ConstructedInventoryEdit />
|
||||
</Route>,
|
||||
<Route
|
||||
path="/inventories/constructed_inventory/:id/access"
|
||||
key="access"
|
||||
>
|
||||
<ResourceAccessList
|
||||
resource={inventory}
|
||||
apiModel={InventoriesAPI}
|
||||
/>
|
||||
</Route>,
|
||||
<Route
|
||||
path="/inventories/constructed_inventory/:id/hosts"
|
||||
key="constructed_inventory_hosts"
|
||||
>
|
||||
<ConstructedInventoryHosts
|
||||
inventory={inventory}
|
||||
setBreadcrumb={setBreadcrumb}
|
||||
/>
|
||||
</Route>,
|
||||
<Route
|
||||
path="/inventories/constructed_inventory/:id/groups"
|
||||
key="constructed_inventory_groups"
|
||||
>
|
||||
<ConstructedInventoryGroups
|
||||
inventory={inventory}
|
||||
setBreadcrumb={setBreadcrumb}
|
||||
/>
|
||||
</Route>,
|
||||
<Route
|
||||
key="jobs"
|
||||
path="/inventories/constructed_inventory/:id/jobs"
|
||||
>
|
||||
<JobList
|
||||
defaultParams={{
|
||||
or__job__inventory: inventory.id,
|
||||
or__adhoccommand__inventory: inventory.id,
|
||||
or__inventoryupdate__inventory_source__inventory:
|
||||
inventory.id,
|
||||
or__workflowjob__inventory: inventory.id,
|
||||
}}
|
||||
/>
|
||||
</Route>,
|
||||
<Route
|
||||
key="job_templates"
|
||||
path="/inventories/constructed_inventory/:id/job_templates"
|
||||
>
|
||||
<RelatedTemplateList
|
||||
searchParams={{ inventory__id: inventory.id }}
|
||||
/>
|
||||
</Route>,
|
||||
]}
|
||||
<Route path="*" key="not-found">
|
||||
<ContentError isNotFound>
|
||||
{match.params.id && (
|
||||
<Link
|
||||
to={`/inventories/constructed_inventory/${match.params.id}/details`}
|
||||
>
|
||||
{t`View Constructed Inventory Details`}
|
||||
</Link>
|
||||
)}
|
||||
</ContentError>
|
||||
</Route>
|
||||
</Switch>
|
||||
</Card>
|
||||
</PageSection>
|
||||
);
|
||||
}
|
||||
|
||||
export { ConstructedInventory as _ConstructedInventory };
|
||||
export default ConstructedInventory;
|
||||
73
awx/ui/src/screens/Inventory/ConstructedInventory.test.js
Normal file
73
awx/ui/src/screens/Inventory/ConstructedInventory.test.js
Normal file
@@ -0,0 +1,73 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { createMemoryHistory } from 'history';
|
||||
import { ConstructedInventoriesAPI } from 'api';
|
||||
import { mountWithContexts } from '../../../testUtils/enzymeHelpers';
|
||||
import mockInventory from './shared/data.inventory.json';
|
||||
import ConstructedInventory from './ConstructedInventory';
|
||||
|
||||
jest.mock('../../api');
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useRouteMatch: () => ({
|
||||
url: '/constructed_inventories/1',
|
||||
params: { id: 1 },
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('<ConstructedInventory />', () => {
|
||||
let wrapper;
|
||||
|
||||
beforeEach(async () => {
|
||||
ConstructedInventoriesAPI.readDetail.mockResolvedValue({
|
||||
data: mockInventory,
|
||||
});
|
||||
});
|
||||
|
||||
test('should render expected tabs', async () => {
|
||||
const expectedTabs = [
|
||||
'Back to Inventories',
|
||||
'Details',
|
||||
'Access',
|
||||
'Hosts',
|
||||
'Groups',
|
||||
'Jobs',
|
||||
'Job Templates',
|
||||
];
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ConstructedInventory setBreadcrumb={() => {}} />
|
||||
);
|
||||
});
|
||||
wrapper.find('RoutedTabs li').forEach((tab, index) => {
|
||||
expect(tab.text()).toEqual(expectedTabs[index]);
|
||||
});
|
||||
});
|
||||
|
||||
test('should show content error when user attempts to navigate to erroneous route', async () => {
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/inventories/constructed_inventory/1/foobar'],
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<ConstructedInventory setBreadcrumb={() => {}} />,
|
||||
{
|
||||
context: {
|
||||
router: {
|
||||
history,
|
||||
route: {
|
||||
location: history.location,
|
||||
match: {
|
||||
params: { id: 1 },
|
||||
url: '/inventories/constructed_inventory/1/foobar',
|
||||
path: '/inventories/constructed_inventory/1/foobar',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
});
|
||||
expect(wrapper.find('ContentError').length).toBe(1);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,18 @@
|
||||
/* eslint i18next/no-literal-string: "off" */
|
||||
import React from 'react';
|
||||
import { Card, PageSection } from '@patternfly/react-core';
|
||||
import { CardBody } from 'components/Card';
|
||||
|
||||
function ConstructedInventoryAdd() {
|
||||
return (
|
||||
<PageSection>
|
||||
<Card>
|
||||
<CardBody>
|
||||
<div>Coming Soon!</div>
|
||||
</CardBody>
|
||||
</Card>
|
||||
</PageSection>
|
||||
);
|
||||
}
|
||||
|
||||
export default ConstructedInventoryAdd;
|
||||
@@ -0,0 +1,15 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
|
||||
import ConstructedInventoryAdd from './ConstructedInventoryAdd';
|
||||
|
||||
describe('<ConstructedInventoryAdd />', () => {
|
||||
test('initially renders successfully', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<ConstructedInventoryAdd />);
|
||||
});
|
||||
expect(wrapper.length).toBe(1);
|
||||
expect(wrapper.find('ConstructedInventoryAdd').length).toBe(1);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1 @@
|
||||
export { default } from './ConstructedInventoryAdd';
|
||||
@@ -0,0 +1,13 @@
|
||||
/* eslint i18next/no-literal-string: "off" */
|
||||
import React from 'react';
|
||||
import { CardBody } from 'components/Card';
|
||||
|
||||
function ConstructedInventoryEdit() {
|
||||
return (
|
||||
<CardBody>
|
||||
<div>Coming Soon!</div>
|
||||
</CardBody>
|
||||
);
|
||||
}
|
||||
|
||||
export default ConstructedInventoryEdit;
|
||||
@@ -0,0 +1,15 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
|
||||
import ConstructedInventoryEdit from './ConstructedInventoryEdit';
|
||||
|
||||
describe('<ConstructedInventoryEdit />', () => {
|
||||
test('initially renders successfully', async () => {
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<ConstructedInventoryEdit />);
|
||||
});
|
||||
expect(wrapper.length).toBe(1);
|
||||
expect(wrapper.find('ConstructedInventoryEdit').length).toBe(1);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1 @@
|
||||
export { default } from './ConstructedInventoryEdit';
|
||||
@@ -9,14 +9,18 @@ import PersistentFilters from 'components/PersistentFilters';
|
||||
import { InventoryList } from './InventoryList';
|
||||
import Inventory from './Inventory';
|
||||
import SmartInventory from './SmartInventory';
|
||||
import ConstructedInventory from './ConstructedInventory';
|
||||
import InventoryAdd from './InventoryAdd';
|
||||
import SmartInventoryAdd from './SmartInventoryAdd';
|
||||
import ConstructedInventoryAdd from './ConstructedInventoryAdd';
|
||||
import { getInventoryPath } from './shared/utils';
|
||||
|
||||
function Inventories() {
|
||||
const initScreenHeader = useRef({
|
||||
'/inventories': t`Inventories`,
|
||||
'/inventories/inventory/add': t`Create new inventory`,
|
||||
'/inventories/smart_inventory/add': t`Create new smart inventory`,
|
||||
'/inventories/constructed_inventory/add': t`Create new constructed inventory`,
|
||||
});
|
||||
|
||||
const [breadcrumbConfig, setScreenHeader] = useState(
|
||||
@@ -45,10 +49,7 @@ function Inventories() {
|
||||
return;
|
||||
}
|
||||
|
||||
const inventoryKind =
|
||||
inventory.kind === 'smart' ? 'smart_inventory' : 'inventory';
|
||||
|
||||
const inventoryPath = `/inventories/${inventoryKind}/${inventory.id}`;
|
||||
const inventoryPath = getInventoryPath(inventory);
|
||||
const inventoryHostsPath = `${inventoryPath}/hosts`;
|
||||
const inventoryGroupsPath = `${inventoryPath}/groups`;
|
||||
const inventorySourcesPath = `${inventoryPath}/sources`;
|
||||
@@ -109,6 +110,9 @@ function Inventories() {
|
||||
<Route path="/inventories/smart_inventory/add">
|
||||
<SmartInventoryAdd />
|
||||
</Route>
|
||||
<Route path="/inventories/constructed_inventory/add">
|
||||
<ConstructedInventoryAdd />
|
||||
</Route>
|
||||
<Route path="/inventories/inventory/:id">
|
||||
<Config>
|
||||
{({ me }) => (
|
||||
@@ -119,6 +123,9 @@ function Inventories() {
|
||||
<Route path="/inventories/smart_inventory/:id">
|
||||
<SmartInventory setBreadcrumb={setBreadcrumbConfig} />
|
||||
</Route>
|
||||
<Route path="/inventories/constructed_inventory/:id">
|
||||
<ConstructedInventory setBreadcrumb={setBreadcrumbConfig} />
|
||||
</Route>
|
||||
<Route path="/inventories">
|
||||
<PersistentFilters pageKey="inventories">
|
||||
<InventoryList />
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
Link,
|
||||
useLocation,
|
||||
useRouteMatch,
|
||||
useParams,
|
||||
} from 'react-router-dom';
|
||||
import { CaretLeftIcon } from '@patternfly/react-icons';
|
||||
import { Card, PageSection } from '@patternfly/react-core';
|
||||
@@ -23,20 +24,22 @@ import InventoryEdit from './InventoryEdit';
|
||||
import InventoryGroups from './InventoryGroups';
|
||||
import InventoryHosts from './InventoryHosts/InventoryHosts';
|
||||
import InventorySources from './InventorySources';
|
||||
import { getInventoryPath } from './shared/utils';
|
||||
|
||||
function Inventory({ setBreadcrumb }) {
|
||||
const [contentError, setContentError] = useState(null);
|
||||
const [hasContentLoading, setHasContentLoading] = useState(true);
|
||||
const [inventory, setInventory] = useState(null);
|
||||
const location = useLocation();
|
||||
const { id: inventoryId } = useParams();
|
||||
const match = useRouteMatch({
|
||||
path: '/inventories/inventory/:id',
|
||||
path: `/inventories/:inventoryType/:id`,
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
async function fetchData() {
|
||||
try {
|
||||
const { data } = await InventoriesAPI.readDetail(match.params.id);
|
||||
const { data } = await InventoriesAPI.readDetail(inventoryId);
|
||||
setBreadcrumb(data);
|
||||
setInventory(data);
|
||||
} catch (error) {
|
||||
@@ -47,7 +50,7 @@ function Inventory({ setBreadcrumb }) {
|
||||
}
|
||||
|
||||
fetchData();
|
||||
}, [match.params.id, location.pathname, setBreadcrumb]);
|
||||
}, [inventoryId, location.pathname, setBreadcrumb]);
|
||||
|
||||
const tabsArray = [
|
||||
{
|
||||
@@ -111,10 +114,8 @@ function Inventory({ setBreadcrumb }) {
|
||||
showCardHeader = false;
|
||||
}
|
||||
|
||||
if (inventory?.kind === 'smart') {
|
||||
return (
|
||||
<Redirect to={`/inventories/smart_inventory/${inventory.id}/details`} />
|
||||
);
|
||||
if (inventory && inventory?.kind !== '') {
|
||||
return <Redirect to={`${getInventoryPath(inventory)}/details`} />;
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -186,10 +187,8 @@ function Inventory({ setBreadcrumb }) {
|
||||
</Route>,
|
||||
<Route path="*" key="not-found">
|
||||
<ContentError isNotFound>
|
||||
{match.params.id && (
|
||||
<Link
|
||||
to={`/inventories/inventory/${match.params.id}/details`}
|
||||
>
|
||||
{inventoryId && (
|
||||
<Link to={`/inventories/inventory/${inventoryId}/details`}>
|
||||
{t`View Inventory Details`}
|
||||
</Link>
|
||||
)}
|
||||
|
||||
@@ -24,6 +24,7 @@ import useRequest, { useDismissableError } from 'hooks/useRequest';
|
||||
import { Inventory } from 'types';
|
||||
import { relatedResourceDeleteRequests } from 'util/getRelatedResourceDeleteDetails';
|
||||
import InstanceGroupLabels from 'components/InstanceGroupLabels';
|
||||
import { VERBOSITY } from 'components/VerbositySelectField';
|
||||
import getHelpText from '../shared/Inventory.helptext';
|
||||
|
||||
function InventoryDetail({ inventory }) {
|
||||
@@ -102,6 +103,7 @@ function InventoryDetail({ inventory }) {
|
||||
}
|
||||
/>
|
||||
<Detail label={t`Total hosts`} value={inventory.total_hosts} />
|
||||
<Detail label={t`Total groups`} value={inventory.total_groups} />
|
||||
{instanceGroups && (
|
||||
<Detail
|
||||
fullWidth
|
||||
@@ -117,6 +119,21 @@ function InventoryDetail({ inventory }) {
|
||||
helpText={helpText.preventInstanceGroupFallback}
|
||||
/>
|
||||
)}
|
||||
<Detail
|
||||
label={t`Limit`}
|
||||
dataCy="inv-detail-limit"
|
||||
value={inventory.limit}
|
||||
/>
|
||||
<Detail
|
||||
label={t`Cache timeout`}
|
||||
value={inventory.update_cache_timeout}
|
||||
dataCy="inv-detail-cache-timeout"
|
||||
/>
|
||||
<Detail
|
||||
label={t`Verbosity`}
|
||||
dataCy="inv-detail-verbosity"
|
||||
value={VERBOSITY()[inventory.verbosity]}
|
||||
/>
|
||||
{renderOptionsField && (
|
||||
<Detail
|
||||
fullWidth
|
||||
@@ -149,7 +166,7 @@ function InventoryDetail({ inventory }) {
|
||||
<VariablesDetail
|
||||
label={t`Variables`}
|
||||
helpText={helpText.variables()}
|
||||
value={inventory.variables}
|
||||
value={inventory.variables || inventory.source_vars}
|
||||
rows={4}
|
||||
name="variables"
|
||||
dataCy="inventory-detail-variables"
|
||||
@@ -187,7 +204,6 @@ function InventoryDetail({ inventory }) {
|
||||
</DeleteButton>
|
||||
)}
|
||||
</CardActionsRow>
|
||||
{/* Update delete modal to show dependencies https://github.com/ansible/awx/issues/5546 */}
|
||||
{error && (
|
||||
<AlertModal
|
||||
isOpen={error}
|
||||
|
||||
@@ -23,7 +23,7 @@ function InventoryGroup({ setBreadcrumb, inventory }) {
|
||||
const [inventoryGroup, setInventoryGroup] = useState(null);
|
||||
const [contentLoading, setContentLoading] = useState(true);
|
||||
const [contentError, setContentError] = useState(null);
|
||||
const { id: inventoryId, groupId } = useParams();
|
||||
const { id: inventoryId, groupId, inventoryType } = useParams();
|
||||
const location = useLocation();
|
||||
|
||||
useEffect(() => {
|
||||
@@ -50,22 +50,22 @@ function InventoryGroup({ setBreadcrumb, inventory }) {
|
||||
{t`Back to Groups`}
|
||||
</>
|
||||
),
|
||||
link: `/inventories/inventory/${inventory.id}/groups`,
|
||||
link: `/inventories/${inventoryType}/${inventoryId}/groups`,
|
||||
id: 99,
|
||||
},
|
||||
{
|
||||
name: t`Details`,
|
||||
link: `/inventories/inventory/${inventory.id}/groups/${inventoryGroup?.id}/details`,
|
||||
link: `/inventories/${inventoryType}/${inventoryId}/groups/${inventoryGroup?.id}/details`,
|
||||
id: 0,
|
||||
},
|
||||
{
|
||||
name: t`Related Groups`,
|
||||
link: `/inventories/inventory/${inventory.id}/groups/${inventoryGroup?.id}/nested_groups`,
|
||||
link: `/inventories/${inventoryType}/${inventoryId}/groups/${inventoryGroup?.id}/nested_groups`,
|
||||
id: 1,
|
||||
},
|
||||
{
|
||||
name: t`Hosts`,
|
||||
link: `/inventories/inventory/${inventory.id}/groups/${inventoryGroup?.id}/nested_hosts`,
|
||||
link: `/inventories/${inventoryType}/${inventoryId}/groups/${inventoryGroup?.id}/nested_hosts`,
|
||||
id: 2,
|
||||
},
|
||||
];
|
||||
@@ -105,32 +105,32 @@ function InventoryGroup({ setBreadcrumb, inventory }) {
|
||||
{showCardHeader && <RoutedTabs tabsArray={tabsArray} />}
|
||||
<Switch>
|
||||
<Redirect
|
||||
from="/inventories/inventory/:id/groups/:groupId"
|
||||
to="/inventories/inventory/:id/groups/:groupId/details"
|
||||
from="/inventories/:inventoryType/:id/groups/:groupId"
|
||||
to="/inventories/:inventoryType/:id/groups/:groupId/details"
|
||||
exact
|
||||
/>
|
||||
{inventoryGroup && [
|
||||
<Route
|
||||
key="edit"
|
||||
path="/inventories/inventory/:id/groups/:groupId/edit"
|
||||
path="/inventories/:inventoryType/:id/groups/:groupId/edit"
|
||||
>
|
||||
<InventoryGroupEdit inventoryGroup={inventoryGroup} />
|
||||
</Route>,
|
||||
<Route
|
||||
key="details"
|
||||
path="/inventories/inventory/:id/groups/:groupId/details"
|
||||
path="/inventories/:inventoryType/:id/groups/:groupId/details"
|
||||
>
|
||||
<InventoryGroupDetail inventoryGroup={inventoryGroup} />
|
||||
</Route>,
|
||||
<Route
|
||||
key="hosts"
|
||||
path="/inventories/inventory/:id/groups/:groupId/nested_hosts"
|
||||
path="/inventories/:inventoryType/:id/groups/:groupId/nested_hosts"
|
||||
>
|
||||
<InventoryGroupHosts inventoryGroup={inventoryGroup} />
|
||||
</Route>,
|
||||
<Route
|
||||
key="relatedGroups"
|
||||
path="/inventories/inventory/:id/groups/:groupId/nested_groups"
|
||||
path="/inventories/:inventoryType/:id/groups/:groupId/nested_groups"
|
||||
>
|
||||
<InventoryRelatedGroups />
|
||||
</Route>,
|
||||
@@ -138,7 +138,7 @@ function InventoryGroup({ setBreadcrumb, inventory }) {
|
||||
<Route key="not-found" path="*">
|
||||
<ContentError>
|
||||
{inventory && (
|
||||
<Link to={`/inventories/inventory/${inventory.id}/details`}>
|
||||
<Link to={`/inventories/:inventoryType/${inventory.id}/details`}>
|
||||
{t`View Inventory Details`}
|
||||
</Link>
|
||||
)}
|
||||
|
||||
@@ -11,15 +11,16 @@ import {
|
||||
import InventoryGroup from './InventoryGroup';
|
||||
|
||||
jest.mock('../../../api');
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useParams: () => ({
|
||||
id: 1,
|
||||
groupId: 2,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe('<InventoryGroup />', () => {
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useParams: () => ({
|
||||
id: 1,
|
||||
groupId: 2,
|
||||
inventoryType: 'inventory',
|
||||
}),
|
||||
}));
|
||||
|
||||
let wrapper;
|
||||
let history;
|
||||
const inventory = { id: 1, name: 'Foo' };
|
||||
@@ -41,11 +42,11 @@ describe('<InventoryGroup />', () => {
|
||||
},
|
||||
});
|
||||
history = createMemoryHistory({
|
||||
initialEntries: ['/inventories/inventory/1/groups/1/details'],
|
||||
initialEntries: [`/inventories/inventory/1/groups/1/details`],
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Route path="/inventories/inventory/:id/groups">
|
||||
<Route path="/inventories/:inventoryType/:id/groups">
|
||||
<InventoryGroup setBreadcrumb={() => {}} inventory={inventory} />
|
||||
</Route>,
|
||||
{ context: { router: { history } } }
|
||||
@@ -63,7 +64,7 @@ describe('<InventoryGroup />', () => {
|
||||
expect(routedTabs).toHaveLength(1);
|
||||
|
||||
const tabs = routedTabs.prop('tabsArray');
|
||||
expect(tabs[0].link).toEqual('/inventories/inventory/1/groups');
|
||||
expect(tabs[0].link).toEqual(`/inventories/inventory/1/groups`);
|
||||
expect(tabs[1].name).toEqual('Details');
|
||||
expect(tabs[2].name).toEqual('Related Groups');
|
||||
expect(tabs[3].name).toEqual('Hosts');
|
||||
@@ -71,7 +72,7 @@ describe('<InventoryGroup />', () => {
|
||||
|
||||
test('should show content error when user attempts to navigate to erroneous route', async () => {
|
||||
history = createMemoryHistory({
|
||||
initialEntries: ['/inventories/inventory/1/groups/1/foobar'],
|
||||
initialEntries: [`/inventories/inventory/1/groups/1/foobar`],
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
@@ -92,3 +93,59 @@ describe('<InventoryGroup />', () => {
|
||||
await waitForElement(wrapper, 'ContentError', (el) => el.length === 1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('constructed inventory', () => {
|
||||
let wrapper;
|
||||
let history;
|
||||
const inventory = { id: 1, name: 'Foo' };
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useParams: () => ({
|
||||
id: 1,
|
||||
groupId: 2,
|
||||
inventoryType: 'constructed_inventory',
|
||||
}),
|
||||
}));
|
||||
|
||||
beforeEach(async () => {
|
||||
GroupsAPI.readDetail.mockResolvedValue({
|
||||
data: {
|
||||
id: 1,
|
||||
name: 'Foo',
|
||||
description: 'Bar',
|
||||
variables: 'bizz: buzz',
|
||||
summary_fields: {
|
||||
inventory: { id: 1 },
|
||||
created_by: { id: 1, username: 'Athena' },
|
||||
modified_by: { id: 1, username: 'Apollo' },
|
||||
},
|
||||
created: '2020-04-25T01:23:45.678901Z',
|
||||
modified: '2020-04-25T01:23:45.678901Z',
|
||||
},
|
||||
});
|
||||
history = createMemoryHistory({
|
||||
initialEntries: [`/inventories/constructed_inventory/1/groups/1/details`],
|
||||
});
|
||||
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Route path="/inventories/:inventoryType/:id/groups">
|
||||
<InventoryGroup setBreadcrumb={() => {}} inventory={inventory} />
|
||||
</Route>,
|
||||
{ context: { router: { history } } }
|
||||
);
|
||||
});
|
||||
await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0);
|
||||
});
|
||||
test('Constructed Inventory expect all tabs to exist, including Back to Groups', async () => {
|
||||
const routedTabs = wrapper.find('RoutedTabs');
|
||||
expect(routedTabs).toHaveLength(1);
|
||||
|
||||
const tabs = routedTabs.prop('tabsArray');
|
||||
expect(tabs[0].link).toEqual(`/inventories/constructed_inventory/1/groups`);
|
||||
expect(tabs[1].name).toEqual('Details');
|
||||
expect(tabs[2].name).toEqual('Related Groups');
|
||||
expect(tabs[3].name).toEqual('Hosts');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import React, { useState } from 'react';
|
||||
import { t } from '@lingui/macro';
|
||||
|
||||
import { useHistory, useParams } from 'react-router-dom';
|
||||
import { Button } from '@patternfly/react-core';
|
||||
|
||||
import { useHistory, useParams } from 'react-router-dom';
|
||||
import { VariablesDetail } from 'components/CodeEditor';
|
||||
import { CardBody, CardActionsRow } from 'components/Card';
|
||||
import ErrorDetail from 'components/ErrorDetail';
|
||||
@@ -12,6 +11,7 @@ import { DetailList, Detail, UserDateDetail } from 'components/DetailList';
|
||||
import InventoryGroupsDeleteModal from '../shared/InventoryGroupsDeleteModal';
|
||||
|
||||
function InventoryGroupDetail({ inventoryGroup }) {
|
||||
const { inventoryType } = useParams();
|
||||
const {
|
||||
summary_fields: { created_by, modified_by, user_capabilities },
|
||||
created,
|
||||
@@ -47,31 +47,33 @@ function InventoryGroupDetail({ inventoryGroup }) {
|
||||
user={modified_by}
|
||||
/>
|
||||
</DetailList>
|
||||
<CardActionsRow>
|
||||
{user_capabilities?.edit && (
|
||||
<Button
|
||||
ouiaId="inventory-group-detail-edit-button"
|
||||
variant="primary"
|
||||
aria-label={t`Edit`}
|
||||
onClick={() =>
|
||||
history.push(
|
||||
`/inventories/inventory/${params.id}/groups/${params.groupId}/edit`
|
||||
)
|
||||
}
|
||||
>
|
||||
{t`Edit`}
|
||||
</Button>
|
||||
)}
|
||||
{user_capabilities?.delete && (
|
||||
<InventoryGroupsDeleteModal
|
||||
groups={[inventoryGroup]}
|
||||
isDisabled={false}
|
||||
onAfterDelete={() =>
|
||||
history.push(`/inventories/inventory/${params.id}/groups`)
|
||||
}
|
||||
/>
|
||||
)}
|
||||
</CardActionsRow>
|
||||
{inventoryType !== 'constructed_inventory' && (
|
||||
<CardActionsRow>
|
||||
{user_capabilities?.edit && (
|
||||
<Button
|
||||
ouiaId="inventory-group-detail-edit-button"
|
||||
variant="primary"
|
||||
aria-label={t`Edit`}
|
||||
onClick={() =>
|
||||
history.push(
|
||||
`/inventories/inventory/${params.id}/groups/${params.groupId}/edit`
|
||||
)
|
||||
}
|
||||
>
|
||||
{t`Edit`}
|
||||
</Button>
|
||||
)}
|
||||
{user_capabilities?.delete && (
|
||||
<InventoryGroupsDeleteModal
|
||||
groups={[inventoryGroup]}
|
||||
isDisabled={false}
|
||||
onAfterDelete={() =>
|
||||
history.push(`/inventories/inventory/${params.id}/groups`)
|
||||
}
|
||||
/>
|
||||
)}
|
||||
</CardActionsRow>
|
||||
)}
|
||||
{error && (
|
||||
<AlertModal
|
||||
variant="error"
|
||||
|
||||
@@ -39,6 +39,14 @@ describe('<InventoryGroupDetail />', () => {
|
||||
let history;
|
||||
|
||||
describe('User has full permissions', () => {
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useParams: () => ({
|
||||
id: 1,
|
||||
groupId: 3,
|
||||
inventoryType: 'inventory',
|
||||
}),
|
||||
}));
|
||||
beforeEach(async () => {
|
||||
await act(async () => {
|
||||
history = createMemoryHistory({
|
||||
@@ -116,6 +124,14 @@ describe('<InventoryGroupDetail />', () => {
|
||||
});
|
||||
|
||||
describe('User has read-only permissions', () => {
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useParams: () => ({
|
||||
id: 1,
|
||||
groupId: 3,
|
||||
inventoryType: 'inventory',
|
||||
}),
|
||||
}));
|
||||
test('should hide edit/delete buttons', async () => {
|
||||
const readOnlyGroup = {
|
||||
...inventoryGroup,
|
||||
@@ -159,4 +175,48 @@ describe('<InventoryGroupDetail />', () => {
|
||||
expect(wrapper.find('button[aria-label="Delete"]').length).toBe(0);
|
||||
});
|
||||
});
|
||||
describe('Cannot edit or delete constructed inventory group', () => {
|
||||
beforeEach(async () => {
|
||||
await act(async () => {
|
||||
history = createMemoryHistory({
|
||||
initialEntries: ['/inventories/inventory/1/groups/1/details'],
|
||||
});
|
||||
wrapper = mountWithContexts(
|
||||
<Route path="/inventories/inventory/:id/groups/:groupId">
|
||||
<InventoryGroupDetail inventoryGroup={inventoryGroup} />
|
||||
</Route>,
|
||||
{
|
||||
context: {
|
||||
router: {
|
||||
history,
|
||||
route: {
|
||||
location: history.location,
|
||||
match: {
|
||||
params: {
|
||||
id: 1,
|
||||
group: 2,
|
||||
inventoryType: 'constructed_inventory',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
await waitForElement(
|
||||
wrapper,
|
||||
'ContentLoading',
|
||||
(el) => el.length === 0
|
||||
);
|
||||
});
|
||||
});
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
test('should not show edit button', () => {
|
||||
const editButton = wrapper.find('Button[aria-label="edit"]');
|
||||
expect(editButton.length).toBe(0);
|
||||
expect(wrapper.find('Button[aria-label="delete"]').length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -34,7 +34,7 @@ const QS_CONFIG = getQSConfig('host', {
|
||||
function InventoryGroupHostList() {
|
||||
const [isAdHocLaunchLoading, setIsAdHocLaunchLoading] = useState(false);
|
||||
const [isModalOpen, setIsModalOpen] = useState(false);
|
||||
const { id: inventoryId, groupId } = useParams();
|
||||
const { id: inventoryId, groupId, inventoryType } = useParams();
|
||||
const location = useLocation();
|
||||
|
||||
const {
|
||||
@@ -259,8 +259,8 @@ function InventoryGroupHostList() {
|
||||
key={host.id}
|
||||
rowIndex={index}
|
||||
host={host}
|
||||
detailUrl={`/inventories/inventory/${inventoryId}/hosts/${host.id}/details`}
|
||||
editUrl={`/inventories/inventory/${inventoryId}/hosts/${host.id}/edit`}
|
||||
detailUrl={`/inventories/${inventoryType}/${inventoryId}/hosts/${host.id}/details`}
|
||||
editUrl={`/inventories/${inventoryType}/${inventoryId}/hosts/${host.id}/edit`}
|
||||
isSelected={selected.some((row) => row.id === host.id)}
|
||||
onSelect={() => handleSelect(host)}
|
||||
/>
|
||||
|
||||
@@ -1,25 +1,20 @@
|
||||
import React from 'react';
|
||||
import { bool, func, number, oneOfType, string } from 'prop-types';
|
||||
import { bool, func } from 'prop-types';
|
||||
import { t } from '@lingui/macro';
|
||||
|
||||
import { Button } from '@patternfly/react-core';
|
||||
import { Tr, Td } from '@patternfly/react-table';
|
||||
|
||||
import { Link } from 'react-router-dom';
|
||||
import { Link, useParams } from 'react-router-dom';
|
||||
import { PencilAltIcon } from '@patternfly/react-icons';
|
||||
import { ActionsTd, ActionItem } from 'components/PaginatedTable';
|
||||
import { Group } from 'types';
|
||||
|
||||
function InventoryGroupItem({
|
||||
group,
|
||||
inventoryId,
|
||||
isSelected,
|
||||
onSelect,
|
||||
rowIndex,
|
||||
}) {
|
||||
function InventoryGroupItem({ group, isSelected, onSelect, rowIndex }) {
|
||||
const { id: inventoryId, inventoryType } = useParams();
|
||||
const labelId = `check-action-${group.id}`;
|
||||
const detailUrl = `/inventories/inventory/${inventoryId}/groups/${group.id}/details`;
|
||||
const editUrl = `/inventories/inventory/${inventoryId}/groups/${group.id}/edit`;
|
||||
const detailUrl = `/inventories/${inventoryType}/${inventoryId}/groups/${group.id}/details`;
|
||||
const editUrl = `/inventories/${inventoryType}/${inventoryId}/groups/${group.id}/edit`;
|
||||
|
||||
return (
|
||||
<Tr id={`group-row-${group.id}`} ouiaId={`group-row-${group.id}`}>
|
||||
@@ -36,29 +31,30 @@ function InventoryGroupItem({
|
||||
<b>{group.name}</b>
|
||||
</Link>
|
||||
</Td>
|
||||
<ActionsTd dataLabel={t`Actions`} gridColumns="auto 40px">
|
||||
<ActionItem
|
||||
visible={group.summary_fields.user_capabilities.edit}
|
||||
tooltip={t`Edit group`}
|
||||
>
|
||||
<Button
|
||||
ouiaId={`${group.id}-edit-button`}
|
||||
aria-label={t`Edit Group`}
|
||||
variant="plain"
|
||||
component={Link}
|
||||
to={editUrl}
|
||||
{inventoryType !== 'constructed_inventory' && (
|
||||
<ActionsTd dataLabel={t`Actions`} gridColumns="auto 40px">
|
||||
<ActionItem
|
||||
visible={group.summary_fields.user_capabilities.edit}
|
||||
tooltip={t`Edit group`}
|
||||
>
|
||||
<PencilAltIcon />
|
||||
</Button>
|
||||
</ActionItem>
|
||||
</ActionsTd>
|
||||
<Button
|
||||
ouiaId={`${group.id}-edit-button`}
|
||||
aria-label={t`Edit Group`}
|
||||
variant="plain"
|
||||
component={Link}
|
||||
to={editUrl}
|
||||
>
|
||||
<PencilAltIcon />
|
||||
</Button>
|
||||
</ActionItem>
|
||||
</ActionsTd>
|
||||
)}
|
||||
</Tr>
|
||||
);
|
||||
}
|
||||
|
||||
InventoryGroupItem.propTypes = {
|
||||
group: Group.isRequired,
|
||||
inventoryId: oneOfType([number, string]).isRequired,
|
||||
isSelected: bool.isRequired,
|
||||
onSelect: func.isRequired,
|
||||
};
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import React from 'react';
|
||||
import { Route } from 'react-router-dom';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { mountWithContexts } from '../../../../testUtils/enzymeHelpers';
|
||||
import InventoryGroupItem from './InventoryGroupItem';
|
||||
|
||||
@@ -57,4 +59,39 @@ describe('<InventoryGroupItem />', () => {
|
||||
);
|
||||
expect(wrapper.find('PencilAltIcon').exists()).toBeFalsy();
|
||||
});
|
||||
test('edit button should be hidden from constructed inventory group', async () => {
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useParams: () => ({ id: 42, inventoryType: 'constructed_inventory' }),
|
||||
}));
|
||||
const mockGroup = {
|
||||
id: 2,
|
||||
type: 'group',
|
||||
name: 'foo',
|
||||
inventory: 1,
|
||||
summary_fields: {
|
||||
user_capabilities: {
|
||||
edit: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Route path="/inventories/:inventoryType/:id/groups">
|
||||
<table>
|
||||
<tbody>
|
||||
<InventoryGroupItem
|
||||
group={mockGroup}
|
||||
inventoryId={1}
|
||||
isSelected={false}
|
||||
onSelect={() => {}}
|
||||
/>
|
||||
</tbody>
|
||||
</table>
|
||||
</Route>
|
||||
);
|
||||
});
|
||||
expect(wrapper.find('PencilAltIcon').exists()).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -16,11 +16,14 @@ function InventoryGroups({ setBreadcrumb, inventory }) {
|
||||
inventory={inventory}
|
||||
/>
|
||||
</Route>
|
||||
<Route key="details" path="/inventories/inventory/:id/groups/:groupId/">
|
||||
<Route
|
||||
key="details"
|
||||
path="/inventories/:inventoryType/:id/groups/:groupId/"
|
||||
>
|
||||
<InventoryGroup inventory={inventory} setBreadcrumb={setBreadcrumb} />
|
||||
</Route>
|
||||
<Route key="list" path="/inventories/inventory/:id/groups">
|
||||
<InventoryGroupsList />
|
||||
<Route key="list" path="/inventories/:inventoryType/:id/groups">
|
||||
<InventoryGroupsList inventory={inventory} />
|
||||
</Route>
|
||||
</Switch>
|
||||
);
|
||||
|
||||
@@ -5,7 +5,7 @@ import { Tooltip } from '@patternfly/react-core';
|
||||
import { getQSConfig, parseQueryString } from 'util/qs';
|
||||
import useSelected from 'hooks/useSelected';
|
||||
import useRequest from 'hooks/useRequest';
|
||||
import { InventoriesAPI } from 'api';
|
||||
import { ConstructedInventoriesAPI, InventoriesAPI } from 'api';
|
||||
import DataListToolbar from 'components/DataListToolbar';
|
||||
import PaginatedTable, {
|
||||
HeaderRow,
|
||||
@@ -29,7 +29,7 @@ function cannotDelete(item) {
|
||||
|
||||
function InventoryGroupsList() {
|
||||
const location = useLocation();
|
||||
const { id: inventoryId } = useParams();
|
||||
const { id: inventoryId, inventoryType } = useParams();
|
||||
const [isAdHocLaunchLoading, setIsAdHocLaunchLoading] = useState(false);
|
||||
|
||||
const {
|
||||
@@ -104,8 +104,10 @@ function InventoryGroupsList() {
|
||||
};
|
||||
|
||||
const canAdd =
|
||||
actions && Object.prototype.hasOwnProperty.call(actions, 'POST');
|
||||
|
||||
actions &&
|
||||
Object.prototype.hasOwnProperty.call(actions, 'POST') &&
|
||||
inventoryType !== 'constructed_inventory';
|
||||
const canDelete = inventoryType !== 'constructed_inventory';
|
||||
return (
|
||||
<PaginatedTable
|
||||
contentError={contentError}
|
||||
@@ -139,14 +141,15 @@ function InventoryGroupsList() {
|
||||
headerRow={
|
||||
<HeaderRow qsConfig={QS_CONFIG}>
|
||||
<HeaderCell sortKey="name">{t`Name`}</HeaderCell>
|
||||
<HeaderCell>{t`Actions`}</HeaderCell>
|
||||
{inventoryType !== 'constructed_inventory' && (
|
||||
<HeaderCell>{t`Actions`}</HeaderCell>
|
||||
)}
|
||||
</HeaderRow>
|
||||
}
|
||||
renderRow={(item, index) => (
|
||||
<InventoryGroupItem
|
||||
key={item.id}
|
||||
group={item}
|
||||
inventoryId={inventoryId}
|
||||
isSelected={selected.some((row) => row.id === item.id)}
|
||||
onSelect={() => handleSelect(item)}
|
||||
rowIndex={index}
|
||||
@@ -177,20 +180,28 @@ function InventoryGroupsList() {
|
||||
/>,
|
||||
]
|
||||
: []),
|
||||
<Tooltip content={renderTooltip()} position="top" key="delete">
|
||||
<div>
|
||||
<InventoryGroupsDeleteModal
|
||||
groups={selected}
|
||||
isDisabled={
|
||||
selected.length === 0 || selected.some(cannotDelete)
|
||||
}
|
||||
onAfterDelete={() => {
|
||||
fetchData();
|
||||
clearSelected();
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</Tooltip>,
|
||||
...(canDelete
|
||||
? [
|
||||
<Tooltip
|
||||
content={renderTooltip()}
|
||||
position="top"
|
||||
key="delete"
|
||||
>
|
||||
<div>
|
||||
<InventoryGroupsDeleteModal
|
||||
groups={selected}
|
||||
isDisabled={
|
||||
selected.length === 0 || selected.some(cannotDelete)
|
||||
}
|
||||
onAfterDelete={() => {
|
||||
fetchData();
|
||||
clearSelected();
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</Tooltip>,
|
||||
]
|
||||
: []),
|
||||
]}
|
||||
/>
|
||||
)}
|
||||
|
||||
@@ -10,12 +10,6 @@ import {
|
||||
import InventoryGroupsList from './InventoryGroupsList';
|
||||
|
||||
jest.mock('../../../api');
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useParams: () => ({
|
||||
id: 1,
|
||||
}),
|
||||
}));
|
||||
const mockGroups = [
|
||||
{
|
||||
id: 1,
|
||||
@@ -60,7 +54,14 @@ const mockGroups = [
|
||||
|
||||
describe('<InventoryGroupsList />', () => {
|
||||
let wrapper;
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useParams: () => ({
|
||||
id: 1,
|
||||
groupId: 2,
|
||||
inventoryType: 'inventory',
|
||||
}),
|
||||
}));
|
||||
beforeEach(async () => {
|
||||
InventoriesAPI.readGroups.mockResolvedValue({
|
||||
data: {
|
||||
@@ -96,7 +97,7 @@ describe('<InventoryGroupsList />', () => {
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Route path="/inventories/inventory/:id/groups">
|
||||
<Route path="/inventories/:inventoryType/:id/groups">
|
||||
<InventoryGroupsList />
|
||||
</Route>,
|
||||
{
|
||||
@@ -316,3 +317,78 @@ describe('<InventoryGroupsList/> error handling', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Constructed Inventory group', () => {
|
||||
let wrapper;
|
||||
let history;
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useParams: () => ({
|
||||
id: 1,
|
||||
groupId: 2,
|
||||
inventoryType: 'constructed_inventory',
|
||||
}),
|
||||
}));
|
||||
|
||||
beforeEach(async () => {
|
||||
InventoriesAPI.readGroups.mockResolvedValue({
|
||||
data: {
|
||||
count: mockGroups.length,
|
||||
results: mockGroups,
|
||||
},
|
||||
});
|
||||
InventoriesAPI.readGroupsOptions.mockResolvedValue({
|
||||
data: {
|
||||
actions: {
|
||||
GET: {},
|
||||
POST: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
InventoriesAPI.readAdHocOptions.mockResolvedValue({
|
||||
data: {
|
||||
actions: {
|
||||
GET: {
|
||||
module_name: {
|
||||
choices: [
|
||||
['command', 'command'],
|
||||
['shell', 'shell'],
|
||||
],
|
||||
},
|
||||
},
|
||||
POST: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
history = createMemoryHistory({
|
||||
initialEntries: ['/inventories/constructed_inventory/3/groups'],
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Route path="/inventories/:inventoryType/:id/groups">
|
||||
<InventoryGroupsList />
|
||||
</Route>,
|
||||
{
|
||||
context: {
|
||||
router: {
|
||||
history,
|
||||
route: {
|
||||
location: history.location,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
});
|
||||
await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
test('should not show add button', () => {
|
||||
expect(wrapper.find('ToolbarAddButton').length).toBe(0);
|
||||
expect(wrapper.find('ToolbarDeleteButton').length).toBe(0);
|
||||
expect(wrapper.find('AdHocCommands').length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
Link,
|
||||
useRouteMatch,
|
||||
useLocation,
|
||||
useParams,
|
||||
} from 'react-router-dom';
|
||||
import { Card } from '@patternfly/react-core';
|
||||
import { CaretLeftIcon } from '@patternfly/react-icons';
|
||||
@@ -25,9 +26,9 @@ import InventoryHostGroups from '../InventoryHostGroups';
|
||||
|
||||
function InventoryHost({ setBreadcrumb, inventory }) {
|
||||
const location = useLocation();
|
||||
const match = useRouteMatch('/inventories/inventory/:id/hosts/:hostId');
|
||||
const hostListUrl = `/inventories/inventory/${inventory.id}/hosts`;
|
||||
|
||||
const { hostId, id: inventoryId, inventoryType } = useParams();
|
||||
const match = useRouteMatch('/inventories/:inventoryType/:id/hosts/:hostId');
|
||||
const hostListUrl = `/inventories/${inventoryType}/${inventory.id}/hosts`;
|
||||
const {
|
||||
result: { host },
|
||||
error: contentError,
|
||||
@@ -35,14 +36,11 @@ function InventoryHost({ setBreadcrumb, inventory }) {
|
||||
request: fetchHost,
|
||||
} = useRequest(
|
||||
useCallback(async () => {
|
||||
const response = await InventoriesAPI.readHostDetail(
|
||||
inventory.id,
|
||||
match.params.hostId
|
||||
);
|
||||
const response = await InventoriesAPI.readHostDetail(inventoryId, hostId);
|
||||
return {
|
||||
host: response,
|
||||
};
|
||||
}, [inventory.id, match.params.hostId]),
|
||||
}, [inventoryId, hostId]),
|
||||
{
|
||||
host: null,
|
||||
}
|
||||
@@ -120,37 +118,37 @@ function InventoryHost({ setBreadcrumb, inventory }) {
|
||||
{!isLoading && host && (
|
||||
<Switch>
|
||||
<Redirect
|
||||
from="/inventories/inventory/:id/hosts/:hostId"
|
||||
to="/inventories/inventory/:id/hosts/:hostId/details"
|
||||
from="/inventories/:inventoryType/:id/hosts/:hostId"
|
||||
to="/inventories/:inventoryType/:id/hosts/:hostId/details"
|
||||
exact
|
||||
/>
|
||||
<Route
|
||||
key="details"
|
||||
path="/inventories/inventory/:id/hosts/:hostId/details"
|
||||
path="/inventories/:inventoryType/:id/hosts/:hostId/details"
|
||||
>
|
||||
<InventoryHostDetail host={host} />
|
||||
</Route>
|
||||
<Route
|
||||
key="edit"
|
||||
path="/inventories/inventory/:id/hosts/:hostId/edit"
|
||||
path="/inventories/:inventoryType/:id/hosts/:hostId/edit"
|
||||
>
|
||||
<InventoryHostEdit host={host} inventory={inventory} />
|
||||
</Route>
|
||||
<Route
|
||||
key="facts"
|
||||
path="/inventories/inventory/:id/hosts/:hostId/facts"
|
||||
path="/inventories/:inventoryType/:id/hosts/:hostId/facts"
|
||||
>
|
||||
<InventoryHostFacts host={host} />
|
||||
</Route>
|
||||
<Route
|
||||
key="groups"
|
||||
path="/inventories/inventory/:id/hosts/:hostId/groups"
|
||||
path="/inventories/:inventoryType/:id/hosts/:hostId/groups"
|
||||
>
|
||||
<InventoryHostGroups />
|
||||
</Route>
|
||||
<Route
|
||||
key="jobs"
|
||||
path="/inventories/inventory/:id/hosts/:hostId/jobs"
|
||||
path="/inventories/:inventoryType/:id/hosts/:hostId/jobs"
|
||||
>
|
||||
<JobList defaultParams={{ job__hosts: host.id }} />
|
||||
</Route>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import 'styled-components/macro';
|
||||
import React, { useState } from 'react';
|
||||
import { Link, useHistory } from 'react-router-dom';
|
||||
import { Link, useHistory, useParams } from 'react-router-dom';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import { Button } from '@patternfly/react-core';
|
||||
@@ -16,6 +16,7 @@ import { HostsAPI } from 'api';
|
||||
import HostToggle from 'components/HostToggle';
|
||||
|
||||
function InventoryHostDetail({ host }) {
|
||||
const { inventoryType } = useParams();
|
||||
const {
|
||||
created,
|
||||
description,
|
||||
@@ -92,25 +93,27 @@ function InventoryHostDetail({ host }) {
|
||||
dataCy="inventory-host-detail-variables"
|
||||
/>
|
||||
</DetailList>
|
||||
<CardActionsRow>
|
||||
{user_capabilities?.edit && (
|
||||
<Button
|
||||
ouiaId="inventory-host-detail-edit-button"
|
||||
aria-label={t`edit`}
|
||||
component={Link}
|
||||
to={`/inventories/inventory/${inventory.id}/hosts/${id}/edit`}
|
||||
>
|
||||
{t`Edit`}
|
||||
</Button>
|
||||
)}
|
||||
{user_capabilities?.delete && (
|
||||
<DeleteButton
|
||||
name={name}
|
||||
modalTitle={t`Delete Host`}
|
||||
onConfirm={() => handleHostDelete()}
|
||||
/>
|
||||
)}
|
||||
</CardActionsRow>
|
||||
{inventoryType !== 'constructed_inventory' && (
|
||||
<CardActionsRow>
|
||||
{user_capabilities?.edit && (
|
||||
<Button
|
||||
ouiaId="inventory-host-detail-edit-button"
|
||||
aria-label={t`edit`}
|
||||
component={Link}
|
||||
to={`/inventories/inventory/${inventory.id}/hosts/${id}/edit`}
|
||||
>
|
||||
{t`Edit`}
|
||||
</Button>
|
||||
)}
|
||||
{user_capabilities?.delete && (
|
||||
<DeleteButton
|
||||
name={name}
|
||||
modalTitle={t`Delete Host`}
|
||||
onConfirm={() => handleHostDelete()}
|
||||
/>
|
||||
)}
|
||||
</CardActionsRow>
|
||||
)}
|
||||
{deletionError && (
|
||||
<AlertModal
|
||||
isOpen={deletionError}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import React from 'react';
|
||||
import { Route } from 'react-router-dom';
|
||||
import { createMemoryHistory } from 'history';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { HostsAPI } from 'api';
|
||||
import {
|
||||
@@ -10,93 +12,119 @@ import mockHost from '../shared/data.host.json';
|
||||
|
||||
jest.mock('../../../api');
|
||||
|
||||
describe('<InventoryHostDetail />', () => {
|
||||
describe('User has edit permissions', () => {
|
||||
let wrapper;
|
||||
|
||||
describe('User has edit permissions', () => {
|
||||
beforeAll(() => {
|
||||
beforeEach(async () => {
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<InventoryHostDetail host={mockHost} />);
|
||||
});
|
||||
});
|
||||
test('should render Details', async () => {
|
||||
function assertDetail(label, value) {
|
||||
expect(wrapper.find(`Detail[label="${label}"] dt`).text()).toBe(label);
|
||||
expect(wrapper.find(`Detail[label="${label}"] dd`).text()).toBe(value);
|
||||
}
|
||||
|
||||
test('should render Details', async () => {
|
||||
function assertDetail(label, value) {
|
||||
expect(wrapper.find(`Detail[label="${label}"] dt`).text()).toBe(label);
|
||||
expect(wrapper.find(`Detail[label="${label}"] dd`).text()).toBe(value);
|
||||
}
|
||||
|
||||
assertDetail('Name', 'localhost');
|
||||
assertDetail('Description', 'localhost description');
|
||||
assertDetail('Created', '10/28/2019, 9:26:54 PM');
|
||||
assertDetail('Last Modified', '10/29/2019, 8:18:41 PM');
|
||||
expect(wrapper.find(`Detail[label="Activity"] Sparkline`)).toHaveLength(
|
||||
1
|
||||
);
|
||||
});
|
||||
|
||||
test('should show edit button for users with edit permission', () => {
|
||||
const editButton = wrapper.find('Button[aria-label="edit"]');
|
||||
expect(editButton.text()).toEqual('Edit');
|
||||
expect(editButton.prop('to')).toBe(
|
||||
'/inventories/inventory/3/hosts/2/edit'
|
||||
);
|
||||
});
|
||||
|
||||
test('expected api call is made for delete', async () => {
|
||||
await act(async () => {
|
||||
wrapper.find('DeleteButton').invoke('onConfirm')();
|
||||
});
|
||||
expect(HostsAPI.destroy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('Error dialog shown for failed deletion', async () => {
|
||||
HostsAPI.destroy.mockImplementationOnce(() =>
|
||||
Promise.reject(new Error())
|
||||
);
|
||||
await act(async () => {
|
||||
wrapper.find('DeleteButton').invoke('onConfirm')();
|
||||
});
|
||||
await waitForElement(
|
||||
wrapper,
|
||||
'Modal[title="Error!"]',
|
||||
(el) => el.length === 1
|
||||
);
|
||||
await act(async () => {
|
||||
wrapper.find('Modal[title="Error!"]').invoke('onClose')();
|
||||
});
|
||||
await waitForElement(
|
||||
wrapper,
|
||||
'Modal[title="Error!"]',
|
||||
(el) => el.length === 0
|
||||
);
|
||||
});
|
||||
assertDetail('Name', 'localhost');
|
||||
assertDetail('Description', 'localhost description');
|
||||
assertDetail('Created', '10/28/2019, 9:26:54 PM');
|
||||
assertDetail('Last Modified', '10/29/2019, 8:18:41 PM');
|
||||
expect(wrapper.find(`Detail[label="Activity"] Sparkline`)).toHaveLength(1);
|
||||
});
|
||||
|
||||
describe('User has read-only permissions', () => {
|
||||
beforeAll(() => {
|
||||
const readOnlyHost = {
|
||||
...mockHost,
|
||||
summary_fields: {
|
||||
...mockHost.summary_fields,
|
||||
user_capabilities: {
|
||||
...mockHost.summary_fields.user_capabilities,
|
||||
},
|
||||
},
|
||||
};
|
||||
readOnlyHost.summary_fields.user_capabilities.edit = false;
|
||||
readOnlyHost.summary_fields.recent_jobs = [];
|
||||
wrapper = mountWithContexts(<InventoryHostDetail host={readOnlyHost} />);
|
||||
});
|
||||
test('should show edit button for users with edit permission', () => {
|
||||
const editButton = wrapper.find('Button[aria-label="edit"]');
|
||||
expect(editButton.text()).toEqual('Edit');
|
||||
expect(editButton.prop('to')).toBe('/inventories/inventory/3/hosts/2/edit');
|
||||
});
|
||||
|
||||
test('should hide activity stream when there are no recent jobs', async () => {
|
||||
expect(wrapper.find(`Detail[label="Activity"] Sparkline`)).toHaveLength(
|
||||
0
|
||||
);
|
||||
const activity_detail = wrapper.find(`Detail[label="Activity"]`).at(0);
|
||||
expect(activity_detail.prop('isEmpty')).toEqual(true);
|
||||
test('expected api call is made for delete', async () => {
|
||||
await act(async () => {
|
||||
wrapper.find('DeleteButton').invoke('onConfirm')();
|
||||
});
|
||||
expect(HostsAPI.destroy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('should hide edit button for users without edit permission', async () => {
|
||||
expect(wrapper.find('Button[aria-label="edit"]').length).toBe(0);
|
||||
test('Error dialog shown for failed deletion', async () => {
|
||||
HostsAPI.destroy.mockImplementationOnce(() => Promise.reject(new Error()));
|
||||
await act(async () => {
|
||||
wrapper.find('DeleteButton').invoke('onConfirm')();
|
||||
});
|
||||
await waitForElement(
|
||||
wrapper,
|
||||
'Modal[title="Error!"]',
|
||||
(el) => el.length === 1
|
||||
);
|
||||
await act(async () => {
|
||||
wrapper.find('Modal[title="Error!"]').invoke('onClose')();
|
||||
});
|
||||
await waitForElement(
|
||||
wrapper,
|
||||
'Modal[title="Error!"]',
|
||||
(el) => el.length === 0
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('User has read-only permissions', () => {
|
||||
let wrapper;
|
||||
beforeEach(async () => {
|
||||
const readOnlyHost = {
|
||||
...mockHost,
|
||||
summary_fields: {
|
||||
...mockHost.summary_fields,
|
||||
user_capabilities: {
|
||||
...mockHost.summary_fields.user_capabilities,
|
||||
},
|
||||
},
|
||||
};
|
||||
readOnlyHost.summary_fields.user_capabilities.edit = false;
|
||||
readOnlyHost.summary_fields.recent_jobs = [];
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<InventoryHostDetail host={readOnlyHost} />);
|
||||
});
|
||||
});
|
||||
|
||||
test('should hide activity stream when there are no recent jobs', async () => {
|
||||
expect(wrapper.find(`Detail[label="Activity"] Sparkline`)).toHaveLength(0);
|
||||
const activity_detail = wrapper.find(`Detail[label="Activity"]`).at(0);
|
||||
expect(activity_detail.prop('isEmpty')).toEqual(true);
|
||||
});
|
||||
|
||||
test('should hide edit button for users without edit permission', async () => {
|
||||
expect(wrapper.find('Button[aria-label="edit"]').length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Cannot delete a constructed inventory', () => {
|
||||
let wrapper;
|
||||
let history;
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useParams: () => ({
|
||||
id: 42,
|
||||
hostId: 3,
|
||||
inventoryType: 'constructed_inventory',
|
||||
}),
|
||||
}));
|
||||
|
||||
beforeEach(async () => {
|
||||
history = createMemoryHistory({
|
||||
initialEntries: [`/inventories/constructed_inventory/1/hosts/1/details`],
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Route path="/inventories/:inventoryType/:id/hosts/:id/details">
|
||||
<InventoryHostDetail host={mockHost} />
|
||||
</Route>,
|
||||
{ context: { router: { history } } }
|
||||
);
|
||||
});
|
||||
});
|
||||
test('should not show edit button', () => {
|
||||
const editButton = wrapper.find('Button[aria-label="edit"]');
|
||||
expect(editButton.length).toBe(0);
|
||||
expect(wrapper.find('Button[aria-label="delete"]').length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,24 +1,19 @@
|
||||
import React from 'react';
|
||||
import { bool, func, number, oneOfType, string } from 'prop-types';
|
||||
import { bool, func } from 'prop-types';
|
||||
import { t } from '@lingui/macro';
|
||||
import { Button } from '@patternfly/react-core';
|
||||
import { Tr, Td } from '@patternfly/react-table';
|
||||
|
||||
import { Link } from 'react-router-dom';
|
||||
import { Link, useParams } from 'react-router-dom';
|
||||
import { PencilAltIcon } from '@patternfly/react-icons';
|
||||
import { ActionsTd, ActionItem } from 'components/PaginatedTable';
|
||||
import { Group } from 'types';
|
||||
|
||||
function InventoryHostGroupItem({
|
||||
group,
|
||||
inventoryId,
|
||||
isSelected,
|
||||
onSelect,
|
||||
rowIndex,
|
||||
}) {
|
||||
function InventoryHostGroupItem({ group, isSelected, onSelect, rowIndex }) {
|
||||
const { id: inventoryId, inventoryType } = useParams();
|
||||
const labelId = `check-action-${group.id}`;
|
||||
const detailUrl = `/inventories/inventory/${inventoryId}/groups/${group.id}/details`;
|
||||
const editUrl = `/inventories/inventory/${inventoryId}/groups/${group.id}/edit`;
|
||||
const detailUrl = `/inventories/${inventoryType}/${inventoryId}/groups/${group.id}/details`;
|
||||
const editUrl = `/inventories/${inventoryType}/${inventoryId}/groups/${group.id}/edit`;
|
||||
|
||||
return (
|
||||
<Tr
|
||||
@@ -59,7 +54,6 @@ function InventoryHostGroupItem({
|
||||
|
||||
InventoryHostGroupItem.propTypes = {
|
||||
group: Group.isRequired,
|
||||
inventoryId: oneOfType([number, string]).isRequired,
|
||||
isSelected: bool.isRequired,
|
||||
onSelect: func.isRequired,
|
||||
};
|
||||
|
||||
@@ -2,7 +2,7 @@ import React, { useCallback } from 'react';
|
||||
import { string, bool, func } from 'prop-types';
|
||||
import { t } from '@lingui/macro';
|
||||
import { Tr, Td } from '@patternfly/react-table';
|
||||
import { Link } from 'react-router-dom';
|
||||
import { Link, useParams } from 'react-router-dom';
|
||||
import { PencilAltIcon } from '@patternfly/react-icons';
|
||||
import { Button, Chip } from '@patternfly/react-core';
|
||||
import { HostsAPI } from 'api';
|
||||
@@ -22,6 +22,8 @@ function InventoryHostItem({
|
||||
onSelect,
|
||||
rowIndex,
|
||||
}) {
|
||||
const { inventoryType } = useParams();
|
||||
|
||||
const labelId = `check-action-${host.id}`;
|
||||
const initialGroups = host?.summary_fields?.groups ?? {
|
||||
results: [],
|
||||
@@ -95,20 +97,22 @@ function InventoryHostItem({
|
||||
gridColumns="auto 40px"
|
||||
>
|
||||
<HostToggle host={host} />
|
||||
<ActionItem
|
||||
visible={host.summary_fields.user_capabilities?.edit}
|
||||
tooltip={t`Edit host`}
|
||||
>
|
||||
<Button
|
||||
aria-label={t`Edit host`}
|
||||
ouiaId={`${host.id}-edit-button`}
|
||||
variant="plain"
|
||||
component={Link}
|
||||
to={`${editUrl}`}
|
||||
{inventoryType !== 'constructed_inventory' && (
|
||||
<ActionItem
|
||||
visible={host.summary_fields.user_capabilities?.edit}
|
||||
tooltip={t`Edit host`}
|
||||
>
|
||||
<PencilAltIcon />
|
||||
</Button>
|
||||
</ActionItem>
|
||||
<Button
|
||||
aria-label={t`Edit host`}
|
||||
ouiaId={`${host.id}-edit-button`}
|
||||
variant="plain"
|
||||
component={Link}
|
||||
to={`${editUrl}`}
|
||||
>
|
||||
<PencilAltIcon />
|
||||
</Button>
|
||||
</ActionItem>
|
||||
)}
|
||||
</ActionsTd>
|
||||
</Tr>
|
||||
{dismissableError && (
|
||||
|
||||
@@ -26,7 +26,7 @@ const QS_CONFIG = getQSConfig('host', {
|
||||
|
||||
function InventoryHostList() {
|
||||
const [isAdHocLaunchLoading, setIsAdHocLaunchLoading] = useState(false);
|
||||
const { id } = useParams();
|
||||
const { id, inventoryType } = useParams();
|
||||
const { search } = useLocation();
|
||||
|
||||
const {
|
||||
@@ -100,8 +100,10 @@ function InventoryHostList() {
|
||||
};
|
||||
|
||||
const canAdd =
|
||||
actions && Object.prototype.hasOwnProperty.call(actions, 'POST');
|
||||
|
||||
actions &&
|
||||
Object.prototype.hasOwnProperty.call(actions, 'POST') &&
|
||||
inventoryType !== 'constructed_inventory';
|
||||
const canDelete = inventoryType !== 'constructed_inventory';
|
||||
return (
|
||||
<>
|
||||
<PaginatedTable
|
||||
@@ -166,12 +168,16 @@ function InventoryHostList() {
|
||||
/>,
|
||||
]
|
||||
: []),
|
||||
<ToolbarDeleteButton
|
||||
key="delete"
|
||||
onDelete={handleDeleteHosts}
|
||||
itemsToDelete={selected}
|
||||
pluralizedItemName={t`Hosts`}
|
||||
/>,
|
||||
...(canDelete
|
||||
? [
|
||||
<ToolbarDeleteButton
|
||||
key="delete"
|
||||
onDelete={handleDeleteHosts}
|
||||
itemsToDelete={selected}
|
||||
pluralizedItemName={t`Hosts`}
|
||||
/>,
|
||||
]
|
||||
: []),
|
||||
]}
|
||||
/>
|
||||
)}
|
||||
@@ -179,8 +185,8 @@ function InventoryHostList() {
|
||||
<InventoryHostItem
|
||||
key={host.id}
|
||||
host={host}
|
||||
detailUrl={`/inventories/inventory/${id}/hosts/${host.id}/details`}
|
||||
editUrl={`/inventories/inventory/${id}/hosts/${host.id}/edit`}
|
||||
detailUrl={`/inventories/${inventoryType}/${id}/hosts/${host.id}/details`}
|
||||
editUrl={`/inventories/${inventoryType}/${id}/hosts/${host.id}/edit`}
|
||||
isSelected={selected.some((row) => row.id === host.id)}
|
||||
onSelect={() => handleSelect(host)}
|
||||
rowIndex={index}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import React from 'react';
|
||||
import { Route } from 'react-router-dom';
|
||||
import { createMemoryHistory } from 'history';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { InventoriesAPI, HostsAPI } from 'api';
|
||||
import {
|
||||
@@ -359,3 +361,80 @@ describe('<InventoryHostList />', () => {
|
||||
expect(wrapper.find('AdHocCommands')).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Should not show add button for constructed inventory host list', () => {
|
||||
let wrapper;
|
||||
let history;
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useParams: () => ({
|
||||
id: 1,
|
||||
groupId: 2,
|
||||
inventoryType: 'constructed_inventory',
|
||||
}),
|
||||
}));
|
||||
|
||||
beforeEach(async () => {
|
||||
InventoriesAPI.readHosts.mockResolvedValue({
|
||||
data: {
|
||||
count: mockHosts.length,
|
||||
results: mockHosts,
|
||||
},
|
||||
});
|
||||
InventoriesAPI.readHostsOptions.mockResolvedValue({
|
||||
data: {
|
||||
actions: {
|
||||
GET: {},
|
||||
POST: {},
|
||||
},
|
||||
related_search_fields: ['first_key__search', 'ansible_facts'],
|
||||
},
|
||||
});
|
||||
|
||||
InventoriesAPI.readAdHocOptions.mockResolvedValue({
|
||||
data: {
|
||||
actions: {
|
||||
GET: {
|
||||
module_name: {
|
||||
choices: [
|
||||
['command', 'command'],
|
||||
['shell', 'shell'],
|
||||
],
|
||||
},
|
||||
},
|
||||
POST: {},
|
||||
},
|
||||
},
|
||||
});
|
||||
history = createMemoryHistory({
|
||||
initialEntries: ['/inventories/constructed_inventory/3/hosts'],
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Route path="/inventories/:inventoryType/:id/hosts">
|
||||
<InventoryHostList />
|
||||
</Route>,
|
||||
{
|
||||
context: {
|
||||
router: {
|
||||
history,
|
||||
route: {
|
||||
location: history.location,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
});
|
||||
await waitForElement(wrapper, 'ContentLoading', (el) => el.length === 0);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
test('should not show add button', () => {
|
||||
expect(wrapper.find('ToolbarAddButton').length).toBe(0);
|
||||
expect(wrapper.find('ToolbarDeleteButton').length).toBe(0);
|
||||
expect(wrapper.find('AdHocCommands').length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,14 +8,14 @@ import InventoryHostList from './InventoryHostList';
|
||||
function InventoryHosts({ setBreadcrumb, inventory }) {
|
||||
return (
|
||||
<Switch>
|
||||
<Route key="host-add" path="/inventories/inventory/:id/hosts/add">
|
||||
<Route key="host-add" path="/inventories/:inventoryType/:id/hosts/add">
|
||||
<InventoryHostAdd inventory={inventory} />
|
||||
</Route>
|
||||
<Route key="host" path="/inventories/inventory/:id/hosts/:hostId">
|
||||
<Route key="host" path="/inventories/:inventoryType/:id/hosts/:hostId">
|
||||
<InventoryHost setBreadcrumb={setBreadcrumb} inventory={inventory} />
|
||||
</Route>
|
||||
<Route key="host-list" path="/inventories/inventory/:id/hosts">
|
||||
<InventoryHostList />
|
||||
<Route key="host-list" path="/inventories/:inventoryType/:id/hosts">
|
||||
<InventoryHostList inventory={inventory} />
|
||||
</Route>
|
||||
</Switch>
|
||||
);
|
||||
|
||||
@@ -1 +1 @@
|
||||
export { default } from './InventoryHostList';
|
||||
export { default } from './InventoryHosts';
|
||||
|
||||
@@ -135,6 +135,7 @@ function InventoryList() {
|
||||
|
||||
const addInventory = t`Add inventory`;
|
||||
const addSmartInventory = t`Add smart inventory`;
|
||||
const addConstructedInventory = t`Add constructed inventory`;
|
||||
const addButton = (
|
||||
<AddDropDownButton
|
||||
ouiaId="add-inventory-button"
|
||||
@@ -158,6 +159,15 @@ function InventoryList() {
|
||||
>
|
||||
{addSmartInventory}
|
||||
</DropdownItem>,
|
||||
<DropdownItem
|
||||
ouiaId="add-constructed-inventory-item"
|
||||
to={`${match.url}/constructed_inventory/add/`}
|
||||
component={Link}
|
||||
key={addConstructedInventory}
|
||||
aria-label={addConstructedInventory}
|
||||
>
|
||||
{addConstructedInventory}
|
||||
</DropdownItem>,
|
||||
]}
|
||||
/>
|
||||
);
|
||||
@@ -261,11 +271,6 @@ function InventoryList() {
|
||||
inventory={inventory}
|
||||
rowIndex={index}
|
||||
fetchInventories={fetchInventories}
|
||||
detailUrl={
|
||||
inventory.kind === 'smart'
|
||||
? `${match.url}/smart_inventory/${inventory.id}/details`
|
||||
: `${match.url}/inventory/${inventory.id}/details`
|
||||
}
|
||||
onSelect={() => {
|
||||
if (!inventory.pending_deletion) {
|
||||
handleSelect(inventory);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { useState, useCallback } from 'react';
|
||||
import { string, bool, func } from 'prop-types';
|
||||
import { bool, func } from 'prop-types';
|
||||
|
||||
import { Button, Label } from '@patternfly/react-core';
|
||||
import { Tr, Td } from '@patternfly/react-table';
|
||||
@@ -12,6 +12,7 @@ import { Inventory } from 'types';
|
||||
import { ActionsTd, ActionItem, TdBreakWord } from 'components/PaginatedTable';
|
||||
import CopyButton from 'components/CopyButton';
|
||||
import StatusLabel from 'components/StatusLabel';
|
||||
import { getInventoryPath } from '../shared/utils';
|
||||
|
||||
function InventoryListItem({
|
||||
inventory,
|
||||
@@ -19,12 +20,10 @@ function InventoryListItem({
|
||||
isSelected,
|
||||
onSelect,
|
||||
onCopy,
|
||||
detailUrl,
|
||||
fetchInventories,
|
||||
}) {
|
||||
InventoryListItem.propTypes = {
|
||||
inventory: Inventory.isRequired,
|
||||
detailUrl: string.isRequired,
|
||||
isSelected: bool.isRequired,
|
||||
onSelect: func.isRequired,
|
||||
};
|
||||
@@ -50,6 +49,12 @@ function InventoryListItem({
|
||||
|
||||
const labelId = `check-action-${inventory.id}`;
|
||||
|
||||
const typeLabel = {
|
||||
'': t`Inventory`,
|
||||
smart: t`Smart Inventory`,
|
||||
constructed: t`Constructed Inventory`,
|
||||
};
|
||||
|
||||
let syncStatus = 'disabled';
|
||||
if (inventory.isSourceSyncRunning) {
|
||||
syncStatus = 'syncing';
|
||||
@@ -93,16 +98,20 @@ function InventoryListItem({
|
||||
{inventory.pending_deletion ? (
|
||||
<b>{inventory.name}</b>
|
||||
) : (
|
||||
<Link to={`${detailUrl}`}>
|
||||
<Link to={`${getInventoryPath(inventory)}/details`}>
|
||||
<b>{inventory.name}</b>
|
||||
</Link>
|
||||
)}
|
||||
</TdBreakWord>
|
||||
<Td dataLabel={t`Status`}>
|
||||
{inventory.kind !== 'smart' &&
|
||||
{inventory.kind === '' &&
|
||||
(inventory.has_inventory_sources ? (
|
||||
<Link
|
||||
to={`/inventories/inventory/${inventory.id}/jobs?job.or__inventoryupdate__inventory_source__inventory__id=${inventory.id}`}
|
||||
to={`${getInventoryPath(
|
||||
inventory
|
||||
)}/jobs?job.or__inventoryupdate__inventory_source__inventory__id=${
|
||||
inventory.id
|
||||
}`}
|
||||
>
|
||||
<StatusLabel
|
||||
status={syncStatus}
|
||||
@@ -113,9 +122,7 @@ function InventoryListItem({
|
||||
<StatusLabel status={syncStatus} tooltipContent={tooltipContent} />
|
||||
))}
|
||||
</Td>
|
||||
<Td dataLabel={t`Type`}>
|
||||
{inventory.kind === 'smart' ? t`Smart Inventory` : t`Inventory`}
|
||||
</Td>
|
||||
<Td dataLabel={t`Type`}>{typeLabel[inventory.kind]}</Td>
|
||||
<TdBreakWord key="organization" dataLabel={t`Organization`}>
|
||||
<Link
|
||||
to={`/organizations/${inventory?.summary_fields?.organization?.id}/details`}
|
||||
@@ -139,9 +146,7 @@ function InventoryListItem({
|
||||
aria-label={t`Edit Inventory`}
|
||||
variant="plain"
|
||||
component={Link}
|
||||
to={`/inventories/${
|
||||
inventory.kind === 'smart' ? 'smart_inventory' : 'inventory'
|
||||
}/${inventory.id}/edit`}
|
||||
to={`${getInventoryPath(inventory)}edit`}
|
||||
>
|
||||
<PencilAltIcon />
|
||||
</Button>
|
||||
|
||||
@@ -33,7 +33,7 @@ function InventoryRelatedGroupList() {
|
||||
const [isAdHocLaunchLoading, setIsAdHocLaunchLoading] = useState(false);
|
||||
const [associateError, setAssociateError] = useState(null);
|
||||
const [disassociateError, setDisassociateError] = useState(null);
|
||||
const { id: inventoryId, groupId } = useParams();
|
||||
const { id: inventoryId, groupId, inventoryType } = useParams();
|
||||
const location = useLocation();
|
||||
|
||||
const {
|
||||
@@ -69,9 +69,10 @@ function InventoryRelatedGroupList() {
|
||||
searchableKeys: getSearchableKeys(actions.data.actions?.GET),
|
||||
canAdd:
|
||||
actions.data.actions &&
|
||||
Object.prototype.hasOwnProperty.call(actions.data.actions, 'POST'),
|
||||
Object.prototype.hasOwnProperty.call(actions.data.actions, 'POST') &&
|
||||
inventoryType !== 'constructed_inventory',
|
||||
};
|
||||
}, [groupId, location.search, inventoryId]),
|
||||
}, [groupId, location.search, inventoryType, inventoryId]),
|
||||
{
|
||||
groups: [],
|
||||
itemCount: 0,
|
||||
|
||||
@@ -8,13 +8,13 @@ function InventoryRelatedGroups() {
|
||||
<Switch>
|
||||
<Route
|
||||
key="addRelatedGroups"
|
||||
path="/inventories/inventory/:id/groups/:groupId/nested_groups/add"
|
||||
path="/inventories/:inventoryType/:id/groups/:groupId/nested_groups/add"
|
||||
>
|
||||
<InventoryRelatedGroupAdd />
|
||||
</Route>
|
||||
<Route
|
||||
key="relatedGroups"
|
||||
path="/inventories/inventory/:id/groups/:groupId/nested_groups"
|
||||
path="/inventories/:inventoryType/:id/groups/:groupId/nested_groups"
|
||||
>
|
||||
<InventoryRelatedGroupList />
|
||||
</Route>
|
||||
|
||||
@@ -23,6 +23,7 @@ import RelatedTemplateList from 'components/RelatedTemplateList';
|
||||
import SmartInventoryDetail from './SmartInventoryDetail';
|
||||
import SmartInventoryEdit from './SmartInventoryEdit';
|
||||
import SmartInventoryHosts from './SmartInventoryHosts';
|
||||
import { getInventoryPath } from './shared/utils';
|
||||
|
||||
function SmartInventory({ setBreadcrumb }) {
|
||||
const location = useLocation();
|
||||
@@ -101,8 +102,8 @@ function SmartInventory({ setBreadcrumb }) {
|
||||
);
|
||||
}
|
||||
|
||||
if (inventory?.kind === '') {
|
||||
return <Redirect to={`/inventories/inventory/${inventory.id}/details`} />;
|
||||
if (inventory && inventory?.kind !== 'smart') {
|
||||
return <Redirect to={`${getInventoryPath(inventory)}/details`} />;
|
||||
}
|
||||
|
||||
let showCardHeader = true;
|
||||
|
||||
@@ -8,3 +8,12 @@ const parseHostFilter = (value) => {
|
||||
return value;
|
||||
};
|
||||
export default parseHostFilter;
|
||||
|
||||
export function getInventoryPath(inventory) {
|
||||
const url = {
|
||||
'': `/inventories/inventory/${inventory.id}`,
|
||||
smart: `/inventories/smart_inventory/${inventory.id}`,
|
||||
constructed: `/inventories/constructed_inventory/${inventory.id}`,
|
||||
};
|
||||
return url[inventory.kind];
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import parseHostFilter from './utils';
|
||||
import parseHostFilter, { getInventoryPath } from './utils';
|
||||
|
||||
describe('parseHostFilter', () => {
|
||||
test('parse host filter', () => {
|
||||
@@ -19,3 +19,21 @@ describe('parseHostFilter', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getInventoryPath', () => {
|
||||
test('should return inventory path', () => {
|
||||
expect(getInventoryPath({ id: 1, kind: '' })).toMatch(
|
||||
'/inventories/inventory/1'
|
||||
);
|
||||
});
|
||||
test('should return smart inventory path', () => {
|
||||
expect(getInventoryPath({ id: 2, kind: 'smart' })).toMatch(
|
||||
'/inventories/smart_inventory/2'
|
||||
);
|
||||
});
|
||||
test('should return constructed inventory path', () => {
|
||||
expect(getInventoryPath({ id: 3, kind: 'constructed' })).toMatch(
|
||||
'/inventories/constructed_inventory/3'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -196,7 +196,7 @@ class LookupModule(LookupBase):
|
||||
if isinstance(rule[field_name], int):
|
||||
rule[field_name] = [rule[field_name]]
|
||||
# If its not a list, we need to split it into a list
|
||||
if not isinstance(rule[field_name], list):
|
||||
if isinstance(rule[field_name], list):
|
||||
rule[field_name] = rule[field_name].split(',')
|
||||
for value in rule[field_name]:
|
||||
# If they have a list of strs we want to strip the str incase its space delineated
|
||||
@@ -210,8 +210,7 @@ class LookupModule(LookupBase):
|
||||
|
||||
def process_list(self, field_name, rule, valid_list, rule_number):
|
||||
return_values = []
|
||||
# If its not a list, we need to split it into a list
|
||||
if not isinstance(rule[field_name], list):
|
||||
if isinstance(rule[field_name], list):
|
||||
rule[field_name] = rule[field_name].split(',')
|
||||
for value in rule[field_name]:
|
||||
value = value.strip()
|
||||
|
||||
@@ -54,7 +54,7 @@ options:
|
||||
kind:
|
||||
description:
|
||||
- The kind field. Cannot be modified after created.
|
||||
choices: ["", "smart"]
|
||||
choices: ["", "smart", "constructed"]
|
||||
type: str
|
||||
host_filter:
|
||||
description:
|
||||
@@ -65,6 +65,11 @@ options:
|
||||
- list of Instance Groups for this Organization to run on.
|
||||
type: list
|
||||
elements: str
|
||||
input_inventories:
|
||||
description:
|
||||
- List of Inventories to use as input for Constructed Inventory.
|
||||
type: list
|
||||
elements: str
|
||||
prevent_instance_group_fallback:
|
||||
description:
|
||||
- Prevent falling back to instance groups set on the organization
|
||||
@@ -111,11 +116,12 @@ def main():
|
||||
description=dict(),
|
||||
organization=dict(required=True),
|
||||
variables=dict(type='dict'),
|
||||
kind=dict(choices=['', 'smart']),
|
||||
kind=dict(choices=['', 'smart', 'constructed']),
|
||||
host_filter=dict(),
|
||||
instance_groups=dict(type="list", elements='str'),
|
||||
prevent_instance_group_fallback=dict(type='bool'),
|
||||
state=dict(choices=['present', 'absent'], default='present'),
|
||||
input_inventories=dict(type='list', elements='str'),
|
||||
)
|
||||
|
||||
# Create a module for ourselves
|
||||
@@ -181,6 +187,13 @@ def main():
|
||||
if inventory and inventory['kind'] == '' and inventory_fields['kind'] == 'smart':
|
||||
module.fail_json(msg='You cannot turn a regular inventory into a "smart" inventory.')
|
||||
|
||||
if kind == 'constructed':
|
||||
input_inventory_names = module.params.get('input_inventories')
|
||||
if input_inventory_names is not None:
|
||||
association_fields['input_inventories'] = []
|
||||
for item in input_inventory_names:
|
||||
association_fields['input_inventories'].append(module.resolve_name_to_id('inventories', item))
|
||||
|
||||
# If the state was present and we can let the module build or update the existing inventory, this will return on its own
|
||||
module.create_or_update_if_needed(
|
||||
inventory,
|
||||
|
||||
@@ -64,6 +64,10 @@ options:
|
||||
description:
|
||||
- If specified, AWX will only import hosts that match this regular expression.
|
||||
type: str
|
||||
limit:
|
||||
description:
|
||||
- Enter host, group or pattern match
|
||||
type: str
|
||||
credential:
|
||||
description:
|
||||
- Credential to use for the source.
|
||||
@@ -167,6 +171,7 @@ def main():
|
||||
enabled_var=dict(),
|
||||
enabled_value=dict(),
|
||||
host_filter=dict(),
|
||||
limit=dict(),
|
||||
credential=dict(),
|
||||
execution_environment=dict(),
|
||||
custom_virtualenv=dict(),
|
||||
@@ -272,6 +277,7 @@ def main():
|
||||
'enabled_var',
|
||||
'enabled_value',
|
||||
'host_filter',
|
||||
'limit',
|
||||
)
|
||||
|
||||
# Layer in all remaining optional information
|
||||
|
||||
@@ -57,15 +57,7 @@ extends_documentation_fragment: awx.awx.auth
|
||||
|
||||
|
||||
EXAMPLES = """
|
||||
- name: Create a workflow approval node
|
||||
workflow_job_template_node:
|
||||
identifier: approval_test
|
||||
approval_node:
|
||||
name: approval_jt_name
|
||||
timeout: 900
|
||||
workflow: "Test Workflow"
|
||||
|
||||
- name: Launch the workflow with a timeout of 10 seconds
|
||||
- name: Launch a workflow with a timeout of 10 seconds
|
||||
workflow_launch:
|
||||
workflow_template: "Test Workflow"
|
||||
wait: False
|
||||
@@ -74,7 +66,7 @@ EXAMPLES = """
|
||||
- name: Wait for approval node to activate and approve
|
||||
workflow_approval:
|
||||
workflow_job_id: "{{ workflow.id }}"
|
||||
name: approval_jt_name
|
||||
name: Approve Me
|
||||
interval: 10
|
||||
timeout: 20
|
||||
action: deny
|
||||
|
||||
@@ -183,21 +183,7 @@ options:
|
||||
inventory:
|
||||
description:
|
||||
- Inventory applied as a prompt, if job template prompts for inventory
|
||||
type: dict
|
||||
suboptions:
|
||||
name:
|
||||
description:
|
||||
- Name Inventory to be applied to job as launch-time prompts.
|
||||
type: str
|
||||
organization:
|
||||
description:
|
||||
- Name of key for use in model for organizational reference
|
||||
type: dict
|
||||
suboptions:
|
||||
name:
|
||||
description:
|
||||
- The organization of the credentials exists in.
|
||||
type: str
|
||||
type: str
|
||||
scm_branch:
|
||||
description:
|
||||
- SCM branch applied as a prompt, if job template prompts for SCM branch
|
||||
@@ -558,10 +544,6 @@ EXAMPLES = '''
|
||||
type: job_template
|
||||
execution_environment:
|
||||
name: My EE
|
||||
inventory:
|
||||
name: Test inventory
|
||||
organization:
|
||||
name: Default
|
||||
related:
|
||||
credentials:
|
||||
- name: cyberark
|
||||
@@ -631,6 +613,10 @@ def create_workflow_nodes(module, response, workflow_nodes, workflow_id):
|
||||
if workflow_node['unified_job_template']['type'] != 'workflow_approval':
|
||||
module.fail_json(msg="Unable to Find unified_job_template: {0}".format(search_fields))
|
||||
|
||||
inventory = workflow_node.get('inventory')
|
||||
if inventory:
|
||||
workflow_node_fields['inventory'] = module.resolve_name_to_id('inventories', inventory)
|
||||
|
||||
# Lookup Values for other fields
|
||||
|
||||
for field_name in (
|
||||
@@ -659,17 +645,6 @@ def create_workflow_nodes(module, response, workflow_nodes, workflow_id):
|
||||
'execution_environments', name_or_id=workflow_node['execution_environment']['name']
|
||||
)['id']
|
||||
|
||||
# Two lookup methods are used based on a fix added in 21.11.0, and the awx export model
|
||||
if 'inventory' in workflow_node:
|
||||
if 'name' in workflow_node['inventory']:
|
||||
inv_lookup_data = {}
|
||||
if 'organization' in workflow_node['inventory']:
|
||||
inv_lookup_data['organization'] = module.resolve_name_to_id('organizations', workflow_node['inventory']['organization']['name'])
|
||||
workflow_node_fields['inventory'] = module.get_one(
|
||||
'inventories', name_or_id=workflow_node['inventory']['name'], data=inv_lookup_data)['id']
|
||||
else:
|
||||
workflow_node_fields['inventory'] = module.get_one('inventories', name_or_id=workflow_node['inventory'])['id']
|
||||
|
||||
# Set Search fields
|
||||
search_fields['workflow_job_template'] = workflow_node_fields['workflow_job_template'] = workflow_id
|
||||
|
||||
|
||||
@@ -16,11 +16,13 @@ import glob
|
||||
# Normally a read-only endpoint should not have a module (i.e. /api/v2/me) but sometimes we reuse a name
|
||||
# For example, we have a role module but /api/v2/roles is a read only endpoint.
|
||||
# This list indicates which read-only endpoints have associated modules with them.
|
||||
read_only_endpoints_with_modules = ['settings', 'role', 'project_update', 'workflow_approval']
|
||||
read_only_endpoints_with_modules = ['settings', 'role', 'project_update']
|
||||
|
||||
# If a module should not be created for an endpoint and the endpoint is not read-only add it here
|
||||
# THINK HARD ABOUT DOING THIS
|
||||
no_module_for_endpoint = []
|
||||
no_module_for_endpoint = [
|
||||
'constructed_inventories', # This is a view for inventory with kind=constructed
|
||||
]
|
||||
|
||||
# Some modules work on the related fields of an endpoint. These modules will not have an auto-associated endpoint
|
||||
no_endpoint_for_module = [
|
||||
|
||||
@@ -95,22 +95,6 @@
|
||||
- results is failed
|
||||
- "'In rule 2 end_on must either be an integer or in the format YYYY-MM-DD [HH:MM:SS]' in results.msg"
|
||||
|
||||
- name: Every Mondays
|
||||
set_fact:
|
||||
complex_rule: "{{ query(ruleset_plugin_name, '2022-04-30 10:30:45', rules=rrules, timezone='UTC' ) }}"
|
||||
ignore_errors: True
|
||||
register: results
|
||||
vars:
|
||||
rrules:
|
||||
- frequency: 'day'
|
||||
interval: 1
|
||||
byweekday: 'monday'
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- results is success
|
||||
- "'DTSTART;TZID=UTC:20220430T103045 RRULE:FREQ=DAILY;BYDAY=MO;INTERVAL=1' == complex_rule"
|
||||
|
||||
|
||||
- name: call rruleset with an invalid byweekday
|
||||
set_fact:
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
---
|
||||
- name: Generate a random string for names
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
test_prefix: AWX-Collection-tests-workflow_approval
|
||||
|
||||
- name: Generate random names for test objects
|
||||
set_fact:
|
||||
org_name: "{{ test_prefix }}-org-{{ test_id }}"
|
||||
approval_node_name: "{{ test_prefix }}-node-{{ test_id }}"
|
||||
wfjt_name: "{{ test_prefix }}-wfjt-{{ test_id }}"
|
||||
|
||||
- block:
|
||||
- name: Create a new organization for test isolation
|
||||
organization:
|
||||
name: "{{ org_name }}"
|
||||
|
||||
- name: Create a workflow job template
|
||||
workflow_job_template:
|
||||
name: "{{ wfjt_name }}"
|
||||
organization: "{{ org_name }}"
|
||||
|
||||
- name: Create approval node
|
||||
workflow_job_template_node:
|
||||
identifier: approval_test
|
||||
approval_node:
|
||||
name: "{{ approval_node_name }}" # Referenced later on
|
||||
timeout: 900
|
||||
workflow: "{{ wfjt_name }}"
|
||||
|
||||
# Launch and approve the workflow
|
||||
- name: Launch the workflow
|
||||
workflow_launch:
|
||||
workflow_template: "{{ wfjt_name }}"
|
||||
wait: False
|
||||
register: workflow_job
|
||||
|
||||
- name: Wait for approval node to activate and approve
|
||||
workflow_approval:
|
||||
workflow_job_id: "{{ workflow_job.id }}"
|
||||
name: "{{ approval_node_name }}"
|
||||
interval: 10
|
||||
timeout: 20
|
||||
action: approve
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
- "result is not failed"
|
||||
|
||||
always:
|
||||
- name: Delete the workflow job template
|
||||
workflow_job_template:
|
||||
name: "{{ wfjt_name }}"
|
||||
state: absent
|
||||
ignore_errors: True
|
||||
@@ -493,7 +493,6 @@
|
||||
workflow_job_template:
|
||||
name: "copy_{{ wfjt_name }}"
|
||||
organization: Default
|
||||
ask_inventory_on_launch: true
|
||||
survey_spec:
|
||||
name: Basic Survey
|
||||
description: Basic Survey
|
||||
@@ -738,10 +737,6 @@
|
||||
timeout: 23
|
||||
execution_environment:
|
||||
name: "{{ ee1 }}"
|
||||
inventory:
|
||||
name: Test inventory
|
||||
organization:
|
||||
name: Default
|
||||
related:
|
||||
credentials:
|
||||
- name: "{{ scm_cred_name }}"
|
||||
|
||||
@@ -125,14 +125,23 @@ class Inventory(HasCopy, HasCreate, HasInstanceGroups, HasVariables, base.Base):
|
||||
return inv_updates
|
||||
|
||||
|
||||
page.register_page([resources.inventory, (resources.inventories, 'post'), (resources.inventory_copy, 'post')], Inventory)
|
||||
page.register_page(
|
||||
[
|
||||
resources.inventory,
|
||||
resources.constructed_inventory,
|
||||
(resources.inventories, 'post'),
|
||||
(resources.inventory_copy, 'post'),
|
||||
(resources.constructed_inventories, 'post'),
|
||||
],
|
||||
Inventory,
|
||||
)
|
||||
|
||||
|
||||
class Inventories(page.PageList, Inventory):
|
||||
pass
|
||||
|
||||
|
||||
page.register_page([resources.inventories, resources.related_inventories], Inventories)
|
||||
page.register_page([resources.inventories, resources.related_inventories, resources.constructed_inventories], Inventories)
|
||||
|
||||
|
||||
class Group(HasCreate, HasVariables, base.Base):
|
||||
|
||||
@@ -57,7 +57,9 @@ class Resources(object):
|
||||
_instance_related_jobs = r'instances/\d+/jobs/'
|
||||
_instances = 'instances/'
|
||||
_inventories = 'inventories/'
|
||||
_constructed_inventories = 'constructed_inventories/'
|
||||
_inventory = r'inventories/\d+/'
|
||||
_constructed_inventory = r'constructed_inventories/\d+/'
|
||||
_inventory_access_list = r'inventories/\d+/access_list/'
|
||||
_inventory_copy = r'inventories/\d+/copy/'
|
||||
_inventory_labels = r'inventories/\d+/labels/'
|
||||
|
||||
@@ -75,8 +75,7 @@ In the root of awx-operator:
|
||||
-e image_version=devel \
|
||||
-e image_pull_policy=Always \
|
||||
-e service_type=nodeport \
|
||||
-e namespace=awx \
|
||||
-e nodeport_port=30080
|
||||
-e namespace=awx
|
||||
```
|
||||
Check the operator with the following commands:
|
||||
|
||||
|
||||
86
docs/inventory/constructed_inventory.md
Normal file
86
docs/inventory/constructed_inventory.md
Normal file
@@ -0,0 +1,86 @@
|
||||
### Constructed inventory in AWX
|
||||
|
||||
Constructed inventory is a separate "kind" of inventory, along-side of
|
||||
normal (manual) inventories and "smart" inventories.
|
||||
The functionality overlaps with smart inventory, and it is intended that
|
||||
smart inventory is sunsetted and will be eventually removed.
|
||||
|
||||
#### Demo Problem
|
||||
|
||||
This is branched from original demo at:
|
||||
|
||||
https://github.com/AlanCoding/Ansible-inventory-file-examples/tree/master/issues/AWX371
|
||||
|
||||
Consider that we have 2 original "source" inventories named "East" and "West".
|
||||
|
||||
```
|
||||
# East inventory original contents
|
||||
host1 account_alias=product_dev
|
||||
host2 account_alias=product_dev state=shutdown
|
||||
host3 account_alias=sustaining
|
||||
```
|
||||
|
||||
```
|
||||
# West inventory original contents
|
||||
host4 account_alias=product_dev
|
||||
host6 account_alias=product_dev state=shutdown
|
||||
host5 account_alias=sustaining state=shutdown
|
||||
```
|
||||
|
||||
The user's intent is to operate on _shutdown_ hosts in the _product_dev_ group.
|
||||
So these are two AND conditions that we want to filter on.
|
||||
|
||||
To accomplish this, the user will create a constructed inventory with
|
||||
the following properties.
|
||||
|
||||
`source_vars` =
|
||||
|
||||
```yaml
|
||||
plugin: constructed
|
||||
strict: true
|
||||
use_vars_plugins: true # https://github.com/ansible/ansible/issues/75365
|
||||
groups:
|
||||
shutdown: resolved_state == "shutdown"
|
||||
shutdown_in_product_dev: resolved_state == "shutdown" and account_alias == "product_dev"
|
||||
compose:
|
||||
resolved_state: state | default("running")
|
||||
```
|
||||
|
||||
`limit` = "shutdown_in_product_dev"
|
||||
|
||||
Then when running a job template against the constructed inventory, it should
|
||||
act on host2 and host6, because those are the two hosts that fit the criteria.
|
||||
|
||||
#### Mechanic
|
||||
|
||||
The constructed inventory contents will be materialized by an inventory update
|
||||
which runs via `ansible-inventory`.
|
||||
This is always configured to update-on-launch before a job,
|
||||
but the user can still select a cache timeout value in case this takes too long.
|
||||
|
||||
When creating a constructed inventory, the API enforces that it always has 1
|
||||
inventory source associated with it.
|
||||
All inventory updates have an associated inventory source, and the fields
|
||||
needed for constructed inventory (`source_vars` and `limit`) are fields
|
||||
on the inventory source model normally.
|
||||
|
||||
#### Capabilities
|
||||
|
||||
In addition to filtering on hostvars, users will be able to filter based on
|
||||
facts, which are prepared before the update in the same way as for jobs.
|
||||
|
||||
For filtering on related objects in the database, users will need to use "meta"
|
||||
vars that are automatically prepared by the server.
|
||||
These have names such as:
|
||||
- `awx_inventory_name`
|
||||
- `awx_inventory_id`
|
||||
|
||||
#### Best Practices
|
||||
|
||||
It is very important to set the `strict` parameter to `True` so that users
|
||||
can debug problems with their templates, because these can get complicated.
|
||||
If the template fails to render, users will get an error in the
|
||||
associated inventory update for that constructed inventory.
|
||||
|
||||
When encountering errors, it may be prudent to increase `verbosity` to get
|
||||
more details.
|
||||
@@ -199,11 +199,11 @@ ADD tools/ansible/roles/dockerfile/files/rsyslog.conf /var/lib/awx/rsyslog/rsysl
|
||||
ADD tools/ansible/roles/dockerfile/files/wait-for-migrations /usr/local/bin/wait-for-migrations
|
||||
ADD tools/ansible/roles/dockerfile/files/stop-supervisor /usr/local/bin/stop-supervisor
|
||||
|
||||
ADD tools/ansible/roles/dockerfile/files/uwsgi.ini /etc/tower/uwsgi.ini
|
||||
|
||||
## File mappings
|
||||
{% if build_dev|bool %}
|
||||
ADD tools/docker-compose/launch_awx.sh /usr/bin/launch_awx.sh
|
||||
ADD tools/docker-compose/nginx.conf /etc/nginx/nginx.conf
|
||||
ADD tools/docker-compose/nginx.vh.default.conf /etc/nginx/conf.d/nginx.vh.default.conf
|
||||
ADD tools/docker-compose/start_tests.sh /start_tests.sh
|
||||
ADD tools/docker-compose/bootstrap_development.sh /usr/bin/bootstrap_development.sh
|
||||
ADD tools/docker-compose/entrypoint.sh /entrypoint.sh
|
||||
@@ -213,6 +213,7 @@ ADD https://raw.githubusercontent.com/containers/libpod/master/contrib/podmanima
|
||||
{% else %}
|
||||
ADD tools/ansible/roles/dockerfile/files/launch_awx.sh /usr/bin/launch_awx.sh
|
||||
ADD tools/ansible/roles/dockerfile/files/launch_awx_task.sh /usr/bin/launch_awx_task.sh
|
||||
ADD tools/ansible/roles/dockerfile/files/uwsgi.ini /etc/tower/uwsgi.ini
|
||||
ADD {{ template_dest }}/supervisor.conf /etc/supervisord.conf
|
||||
ADD {{ template_dest }}/supervisor_task.conf /etc/supervisord_task.conf
|
||||
{% endif %}
|
||||
|
||||
@@ -30,8 +30,8 @@ environment =
|
||||
DEV_RELOAD_COMMAND='supervisorctl -c /etc/supervisord_task.conf restart all; supervisorctl restart tower-processes:daphne tower-processes:wsbroadcast'
|
||||
{% else %}
|
||||
command = /var/lib/awx/venv/awx/bin/uwsgi /etc/tower/uwsgi.ini
|
||||
{% endif %}
|
||||
directory = /var/lib/awx
|
||||
{% endif %}
|
||||
autorestart = true
|
||||
startsecs = 30
|
||||
stopasgroup=true
|
||||
|
||||
@@ -33,6 +33,8 @@ Notable files:
|
||||
### Prerequisites
|
||||
|
||||
- [Docker](https://docs.docker.com/engine/installation/) on the host where AWX will be deployed. After installing Docker, the Docker service must be started (depending on your OS, you may have to add the local user that uses Docker to the `docker` group, refer to the documentation for details)
|
||||
- [docker-compose](https://pypi.org/project/docker-compose/) Python module.
|
||||
- This also installs the `docker` Python module, which is incompatible with [`docker-py`](https://pypi.org/project/docker-py/). If you have previously installed `docker-py`, please uninstall it.
|
||||
- [Docker Compose](https://docs.docker.com/compose/install/).
|
||||
- [Ansible](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html) will need to be installed as we use it to template files needed for the docker-compose.
|
||||
- OpenSSL.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user