Merge pull request #13712 from ansible/feature_usage-collection

Allow soft deletion of HostMetrics and add usage collection utility
This commit is contained in:
Hao Liu 2023-03-28 12:16:02 -04:00 committed by GitHub
commit 7f085e159f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
33 changed files with 1259 additions and 205 deletions

View File

@ -56,6 +56,8 @@ from awx.main.models import (
ExecutionEnvironment,
Group,
Host,
HostMetric,
HostMetricSummaryMonthly,
Instance,
InstanceGroup,
InstanceLink,
@ -5386,6 +5388,32 @@ class InstanceHealthCheckSerializer(BaseSerializer):
fields = read_only_fields
class HostMetricSerializer(BaseSerializer):
show_capabilities = ['delete']
class Meta:
model = HostMetric
fields = (
"id",
"hostname",
"url",
"first_automation",
"last_automation",
"last_deleted",
"automated_counter",
"deleted_counter",
"deleted",
"used_in_inventories",
)
class HostMetricSummaryMonthlySerializer(BaseSerializer):
class Meta:
model = HostMetricSummaryMonthly
read_only_fields = ("id", "date", "license_consumed", "license_capacity", "hosts_added", "hosts_deleted", "indirectly_managed_hosts")
fields = read_only_fields
class InstanceGroupSerializer(BaseSerializer):
show_capabilities = ['edit', 'delete']
capacity = serializers.SerializerMethodField()

View File

@ -0,0 +1,18 @@
{% ifmeth GET %}
# Retrieve {{ model_verbose_name|title|anora }}:
Make GET request to this resource to retrieve a single {{ model_verbose_name }}
record containing the following fields:
{% include "api/_result_fields_common.md" %}
{% endifmeth %}
{% ifmeth DELETE %}
# Delete {{ model_verbose_name|title|anora }}:
Make a DELETE request to this resource to soft-delete this {{ model_verbose_name }}.
A soft deletion will mark the `deleted` field as true and exclude the host
metric from license calculations.
This may be undone later if the same hostname is automated again afterwards.
{% endifmeth %}

View File

@ -0,0 +1,10 @@
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
from django.urls import re_path
from awx.api.views import HostMetricList, HostMetricDetail
urls = [re_path(r'^$', HostMetricList.as_view(), name='host_metric_list'), re_path(r'^(?P<pk>[0-9]+)/$', HostMetricDetail.as_view(), name='host_metric_detail')]
__all__ = ['urls']

View File

@ -30,6 +30,7 @@ from awx.api.views import (
OAuth2TokenList,
ApplicationOAuth2TokenList,
OAuth2ApplicationDetail,
# HostMetricSummaryMonthlyList, # It will be enabled in future version of the AWX
)
from awx.api.views.bulk import (
@ -50,6 +51,7 @@ from .inventory import urls as inventory_urls
from .execution_environments import urls as execution_environment_urls
from .team import urls as team_urls
from .host import urls as host_urls
from .host_metric import urls as host_metric_urls
from .group import urls as group_urls
from .inventory_source import urls as inventory_source_urls
from .inventory_update import urls as inventory_update_urls
@ -118,6 +120,9 @@ v2_urls = [
re_path(r'^teams/', include(team_urls)),
re_path(r'^inventories/', include(inventory_urls)),
re_path(r'^hosts/', include(host_urls)),
re_path(r'^host_metrics/', include(host_metric_urls)),
# It will be enabled in future version of the AWX
# re_path(r'^host_metric_summary_monthly/$', HostMetricSummaryMonthlyList.as_view(), name='host_metric_summary_monthly_list'),
re_path(r'^groups/', include(group_urls)),
re_path(r'^inventory_sources/', include(inventory_source_urls)),
re_path(r'^inventory_updates/', include(inventory_update_urls)),

View File

@ -17,7 +17,6 @@ from collections import OrderedDict
from urllib3.exceptions import ConnectTimeoutError
# Django
from django.conf import settings
from django.core.exceptions import FieldError, ObjectDoesNotExist
@ -1548,6 +1547,41 @@ class HostRelatedSearchMixin(object):
return ret
class HostMetricList(ListAPIView):
name = _("Host Metrics List")
model = models.HostMetric
serializer_class = serializers.HostMetricSerializer
permission_classes = (IsSystemAdminOrAuditor,)
search_fields = ('hostname', 'deleted')
def get_queryset(self):
return self.model.objects.all()
class HostMetricDetail(RetrieveDestroyAPIView):
name = _("Host Metric Detail")
model = models.HostMetric
serializer_class = serializers.HostMetricSerializer
permission_classes = (IsSystemAdminOrAuditor,)
def delete(self, request, *args, **kwargs):
self.get_object().soft_delete()
return Response(status=status.HTTP_204_NO_CONTENT)
# It will be enabled in future version of the AWX
# class HostMetricSummaryMonthlyList(ListAPIView):
# name = _("Host Metrics Summary Monthly")
# model = models.HostMetricSummaryMonthly
# serializer_class = serializers.HostMetricSummaryMonthlySerializer
# permission_classes = (IsSystemAdminOrAuditor,)
# search_fields = ('date',)
#
# def get_queryset(self):
# return self.model.objects.all()
class HostList(HostRelatedSearchMixin, ListCreateAPIView):
always_allow_superuser = False
model = models.Host

View File

@ -102,6 +102,9 @@ class ApiVersionRootView(APIView):
data['inventory_updates'] = reverse('api:inventory_update_list', request=request)
data['groups'] = reverse('api:group_list', request=request)
data['hosts'] = reverse('api:host_list', request=request)
data['host_metrics'] = reverse('api:host_metric_list', request=request)
# It will be enabled in future version of the AWX
# data['host_metric_summary_monthly'] = reverse('api:host_metric_summary_monthly_list', request=request)
data['job_templates'] = reverse('api:job_template_list', request=request)
data['jobs'] = reverse('api:job_list', request=request)
data['ad_hoc_commands'] = reverse('api:ad_hoc_command_list', request=request)

View File

@ -83,7 +83,7 @@ def _identify_lower(key, since, until, last_gather):
return lower, last_entries
@register('config', '1.4', description=_('General platform configuration.'))
@register('config', '1.5', description=_('General platform configuration.'))
def config(since, **kwargs):
license_info = get_license()
install_type = 'traditional'
@ -119,6 +119,7 @@ def config(since, **kwargs):
'compliant': license_info.get('compliant'),
'date_warning': license_info.get('date_warning'),
'date_expired': license_info.get('date_expired'),
'subscription_usage_model': getattr(settings, 'SUBSCRIPTION_USAGE_MODEL', ''), # 1.5+
'free_instances': license_info.get('free_instances', 0),
'total_licensed_instances': license_info.get('instance_count', 0),
'license_expiry': license_info.get('time_remaining', 0),

View File

@ -10,7 +10,7 @@ from rest_framework import serializers
# AWX
from awx.conf import fields, register, register_validate
from awx.main.models import ExecutionEnvironment
from awx.main.constants import SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS
logger = logging.getLogger('awx.main.conf')
@ -805,6 +805,23 @@ register(
category_slug='system',
)
register(
'SUBSCRIPTION_USAGE_MODEL',
field_class=fields.ChoiceField,
choices=[
('', _('Default model for AWX - no subscription. Deletion of host_metrics will not be considered for purposes of managed host counting')),
(
SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS,
_('Usage based on unique managed nodes in a large historical time frame and delete functionality for no longer used managed nodes'),
),
],
default='',
allow_blank=True,
label=_('Defines subscription usage model and shows Host Metrics'),
category=_('System'),
category_slug='system',
)
def logging_validate(serializer, attrs):
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):

View File

@ -106,3 +106,6 @@ JOB_VARIABLE_PREFIXES = [
ANSIBLE_RUNNER_NEEDS_UPDATE_MESSAGE = (
'\u001b[31m \u001b[1m This can be caused if the version of ansible-runner in your execution environment is out of date.\u001b[0m'
)
# Values for setting SUBSCRIPTION_USAGE_MODEL
SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS = 'unique_managed_hosts'

View File

@ -1,53 +1,230 @@
from django.core.management.base import BaseCommand
import datetime
from django.core.serializers.json import DjangoJSONEncoder
from awx.main.models.inventory import HostMetric
from awx.main.models.inventory import HostMetric, HostMetricSummaryMonthly
from awx.main.analytics.collectors import config
import json
import sys
import tempfile
import tarfile
import csv
CSV_PREFERRED_ROW_COUNT = 500000
BATCHED_FETCH_COUNT = 10000
class Command(BaseCommand):
help = 'This is for offline licensing usage'
def host_metric_queryset(self, result, offset=0, limit=BATCHED_FETCH_COUNT):
list_of_queryset = list(
result.values(
'id',
'hostname',
'first_automation',
'last_automation',
'last_deleted',
'automated_counter',
'deleted_counter',
'deleted',
'used_in_inventories',
).order_by('first_automation')[offset : offset + limit]
)
return list_of_queryset
def host_metric_summary_monthly_queryset(self, result, offset=0, limit=BATCHED_FETCH_COUNT):
list_of_queryset = list(
result.values(
'id',
'date',
'license_consumed',
'license_capacity',
'hosts_added',
'hosts_deleted',
'indirectly_managed_hosts',
).order_by(
'date'
)[offset : offset + limit]
)
return list_of_queryset
def paginated_db_retrieval(self, type, filter_kwargs, rows_per_file):
offset = 0
list_of_queryset = []
while True:
if type == 'host_metric':
result = HostMetric.objects.filter(**filter_kwargs)
list_of_queryset = self.host_metric_queryset(result, offset, rows_per_file)
elif type == 'host_metric_summary_monthly':
result = HostMetricSummaryMonthly.objects.filter(**filter_kwargs)
list_of_queryset = self.host_metric_summary_monthly_queryset(result, offset, rows_per_file)
if not list_of_queryset:
break
else:
yield list_of_queryset
offset += len(list_of_queryset)
def controlled_db_retrieval(self, type, filter_kwargs, offset=0, fetch_count=BATCHED_FETCH_COUNT):
if type == 'host_metric':
result = HostMetric.objects.filter(**filter_kwargs)
return self.host_metric_queryset(result, offset, fetch_count)
elif type == 'host_metric_summary_monthly':
result = HostMetricSummaryMonthly.objects.filter(**filter_kwargs)
return self.host_metric_summary_monthly_queryset(result, offset, fetch_count)
def write_to_csv(self, csv_file, list_of_queryset, always_header, first_write=False, mode='a'):
with open(csv_file, mode, newline='') as output_file:
try:
keys = list_of_queryset[0].keys() if list_of_queryset else []
dict_writer = csv.DictWriter(output_file, keys)
if always_header or first_write:
dict_writer.writeheader()
dict_writer.writerows(list_of_queryset)
except Exception as e:
print(e)
def csv_for_tar(self, temp_dir, type, filter_kwargs, rows_per_file, always_header=True):
for index, list_of_queryset in enumerate(self.paginated_db_retrieval(type, filter_kwargs, rows_per_file)):
csv_file = f'{temp_dir}/{type}{index+1}.csv'
arcname_file = f'{type}{index+1}.csv'
first_write = True if index == 0 else False
self.write_to_csv(csv_file, list_of_queryset, always_header, first_write, 'w')
yield csv_file, arcname_file
def csv_for_tar_batched_fetch(self, temp_dir, type, filter_kwargs, rows_per_file, always_header=True):
csv_iteration = 1
offset = 0
rows_written_per_csv = 0
to_fetch = BATCHED_FETCH_COUNT
while True:
list_of_queryset = self.controlled_db_retrieval(type, filter_kwargs, offset, to_fetch)
if not list_of_queryset:
break
csv_file = f'{temp_dir}/{type}{csv_iteration}.csv'
arcname_file = f'{type}{csv_iteration}.csv'
self.write_to_csv(csv_file, list_of_queryset, always_header)
offset += to_fetch
rows_written_per_csv += to_fetch
always_header = False
remaining_rows_per_csv = rows_per_file - rows_written_per_csv
if not remaining_rows_per_csv:
yield csv_file, arcname_file
rows_written_per_csv = 0
always_header = True
to_fetch = BATCHED_FETCH_COUNT
csv_iteration += 1
elif remaining_rows_per_csv < BATCHED_FETCH_COUNT:
to_fetch = remaining_rows_per_csv
if rows_written_per_csv:
yield csv_file, arcname_file
def config_for_tar(self, options, temp_dir):
config_json = json.dumps(config(options.get('since')))
config_file = f'{temp_dir}/config.json'
arcname_file = 'config.json'
with open(config_file, 'w') as f:
f.write(config_json)
return config_file, arcname_file
def output_json(self, options, filter_kwargs):
with tempfile.TemporaryDirectory() as temp_dir:
for csv_detail in self.csv_for_tar(temp_dir, options.get('json', 'host_metric'), filter_kwargs, BATCHED_FETCH_COUNT, True):
csv_file = csv_detail[0]
with open(csv_file) as f:
reader = csv.DictReader(f)
rows = list(reader)
json_result = json.dumps(rows, cls=DjangoJSONEncoder)
print(json_result)
def output_csv(self, options, filter_kwargs):
with tempfile.TemporaryDirectory() as temp_dir:
for csv_detail in self.csv_for_tar(temp_dir, options.get('csv', 'host_metric'), filter_kwargs, BATCHED_FETCH_COUNT, False):
csv_file = csv_detail[0]
with open(csv_file) as f:
sys.stdout.write(f.read())
def output_tarball(self, options, filter_kwargs):
always_header = True
rows_per_file = options['rows_per_file'] or CSV_PREFERRED_ROW_COUNT
tar = tarfile.open("./host_metrics.tar.gz", "w:gz")
if rows_per_file <= BATCHED_FETCH_COUNT:
csv_function = self.csv_for_tar
else:
csv_function = self.csv_for_tar_batched_fetch
with tempfile.TemporaryDirectory() as temp_dir:
for csv_detail in csv_function(temp_dir, 'host_metric', filter_kwargs, rows_per_file, always_header):
tar.add(csv_detail[0], arcname=csv_detail[1])
for csv_detail in csv_function(temp_dir, 'host_metric_summary_monthly', filter_kwargs, rows_per_file, always_header):
tar.add(csv_detail[0], arcname=csv_detail[1])
config_file, arcname_file = self.config_for_tar(options, temp_dir)
tar.add(config_file, arcname=arcname_file)
tar.close()
def add_arguments(self, parser):
parser.add_argument('--since', type=datetime.datetime.fromisoformat, help='Start Date in ISO format YYYY-MM-DD')
parser.add_argument('--until', type=datetime.datetime.fromisoformat, help='End Date in ISO format YYYY-MM-DD')
parser.add_argument('--json', action='store_true', help='Select output as JSON')
parser.add_argument('--json', type=str, const='host_metric', nargs='?', help='Select output as JSON for host_metric or host_metric_summary_monthly')
parser.add_argument('--csv', type=str, const='host_metric', nargs='?', help='Select output as CSV for host_metric or host_metric_summary_monthly')
parser.add_argument('--tarball', action='store_true', help=f'Package CSV files into a tar with upto {CSV_PREFERRED_ROW_COUNT} rows')
parser.add_argument('--rows_per_file', type=int, help=f'Split rows in chunks of {CSV_PREFERRED_ROW_COUNT}')
def handle(self, *args, **options):
since = options.get('since')
until = options.get('until')
if since is None and until is None:
print("No Arguments received")
return None
if since is not None and since.tzinfo is None:
since = since.replace(tzinfo=datetime.timezone.utc)
if until is not None and until.tzinfo is None:
until = until.replace(tzinfo=datetime.timezone.utc)
filter_kwargs = {}
if since is not None:
filter_kwargs['last_automation__gte'] = since
if until is not None:
filter_kwargs['last_automation__lte'] = until
result = HostMetric.objects.filter(**filter_kwargs)
filter_kwargs_host_metrics_summary = {}
if since is not None:
filter_kwargs_host_metrics_summary['date__gte'] = since
if options['rows_per_file'] and options.get('rows_per_file') > CSV_PREFERRED_ROW_COUNT:
print(f"rows_per_file exceeds the allowable limit of {CSV_PREFERRED_ROW_COUNT}.")
return
# if --json flag is set, output the result in json format
if options['json']:
list_of_queryset = list(result.values('hostname', 'first_automation', 'last_automation'))
json_result = json.dumps(list_of_queryset, cls=DjangoJSONEncoder)
print(json_result)
self.output_json(options, filter_kwargs)
elif options['csv']:
self.output_csv(options, filter_kwargs)
elif options['tarball']:
self.output_tarball(options, filter_kwargs)
# --json flag is not set, output in plain text
else:
print(f"Total Number of hosts automated: {len(result)}")
for item in result:
print(f"Printing up to {BATCHED_FETCH_COUNT} automated hosts:")
result = HostMetric.objects.filter(**filter_kwargs)
list_of_queryset = self.host_metric_queryset(result, 0, BATCHED_FETCH_COUNT)
for item in list_of_queryset:
print(
"Hostname : {hostname} | first_automation : {first_automation} | last_automation : {last_automation}".format(
hostname=item.hostname, first_automation=item.first_automation, last_automation=item.last_automation
hostname=item['hostname'], first_automation=item['first_automation'], last_automation=item['last_automation']
)
)
return

View File

@ -79,6 +79,11 @@ class HostManager(models.Manager):
return qs
class HostMetricActiveManager(models.Manager):
def get_queryset(self):
return super().get_queryset().filter(deleted=False)
def get_ig_ig_mapping(ig_instance_mapping, instance_ig_mapping):
# Create IG mapping by union of all groups their instances are members of
ig_ig_mapping = {}

View File

@ -0,0 +1,43 @@
# Generated by Django 3.2.16 on 2023-02-03 09:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0179_change_cyberark_plugin_names'),
]
operations = [
migrations.AlterField(model_name='hostmetric', name='hostname', field=models.CharField(max_length=512, primary_key=False, serialize=True, unique=True)),
migrations.AddField(
model_name='hostmetric',
name='last_deleted',
field=models.DateTimeField(db_index=True, null=True, help_text='When the host was last deleted'),
),
migrations.AddField(
model_name='hostmetric',
name='automated_counter',
field=models.BigIntegerField(default=0, help_text='How many times was the host automated'),
),
migrations.AddField(
model_name='hostmetric',
name='deleted_counter',
field=models.IntegerField(default=0, help_text='How many times was the host deleted'),
),
migrations.AddField(
model_name='hostmetric',
name='deleted',
field=models.BooleanField(
default=False, help_text='Boolean flag saying whether the host is deleted and therefore not counted into the subscription consumption'
),
),
migrations.AddField(
model_name='hostmetric',
name='used_in_inventories',
field=models.IntegerField(null=True, help_text='How many inventories contain this host'),
),
migrations.AddField(
model_name='hostmetric', name='id', field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')
),
]

View File

@ -0,0 +1,33 @@
# Generated by Django 3.2.16 on 2023-02-10 12:26
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0180_add_hostmetric_fields'),
]
operations = [
migrations.CreateModel(
name='HostMetricSummaryMonthly',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField(unique=True)),
('license_consumed', models.BigIntegerField(default=0, help_text='How many unique hosts are consumed from the license')),
('license_capacity', models.BigIntegerField(default=0, help_text="'License capacity as max. number of unique hosts")),
(
'hosts_added',
models.IntegerField(default=0, help_text='How many hosts were added in the associated month, consuming more license capacity'),
),
(
'hosts_deleted',
models.IntegerField(default=0, help_text='How many hosts were deleted in the associated month, freeing the license capacity'),
),
(
'indirectly_managed_hosts',
models.IntegerField(default=0, help_text='Manually entered number indirectly managed hosts for a certain month'),
),
],
),
]

View File

@ -16,6 +16,7 @@ from awx.main.models.inventory import ( # noqa
Group,
Host,
HostMetric,
HostMetricSummaryMonthly,
Inventory,
InventorySource,
InventoryUpdate,

View File

@ -536,7 +536,7 @@ class JobEvent(BasePlaybookEvent):
return
job = self.job
from awx.main.models import Host, JobHostSummary, HostMetric # circular import
from awx.main.models import Host, JobHostSummary # circular import
all_hosts = Host.objects.filter(pk__in=self.host_map.values()).only('id', 'name')
existing_host_ids = set(h.id for h in all_hosts)
@ -575,12 +575,26 @@ class JobEvent(BasePlaybookEvent):
Host.objects.bulk_update(list(updated_hosts), ['last_job_id', 'last_job_host_summary_id'], batch_size=100)
# bulk-create
current_time = now()
HostMetric.objects.bulk_create(
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in updated_hosts_list], ignore_conflicts=True, batch_size=100
# Create/update Host Metrics
self._update_host_metrics(updated_hosts_list)
@staticmethod
def _update_host_metrics(updated_hosts_list):
from awx.main.models import HostMetric # circular import
# bulk-create
current_time = now()
HostMetric.objects.bulk_create(
[HostMetric(hostname=hostname, last_automation=current_time) for hostname in updated_hosts_list], ignore_conflicts=True, batch_size=100
)
# bulk-update
batch_start, batch_size = 0, 1000
while batch_start <= len(updated_hosts_list):
batched_host_list = updated_hosts_list[batch_start : (batch_start + batch_size)]
HostMetric.objects.filter(hostname__in=batched_host_list).update(
last_automation=current_time, automated_counter=models.F('automated_counter') + 1, deleted=False
)
HostMetric.objects.filter(hostname__in=updated_hosts_list).update(last_automation=current_time)
batch_start += batch_size
@property
def job_verbosity(self):

View File

@ -32,7 +32,7 @@ from awx.main.fields import (
SmartFilterField,
OrderedManyToManyField,
)
from awx.main.managers import HostManager
from awx.main.managers import HostManager, HostMetricActiveManager
from awx.main.models.base import BaseModel, CommonModelNameNotUnique, VarsDictProperty, CLOUD_INVENTORY_SOURCES, prevent_search, accepts_json
from awx.main.models.events import InventoryUpdateEvent, UnpartitionedInventoryUpdateEvent
from awx.main.models.unified_jobs import UnifiedJob, UnifiedJobTemplate
@ -53,7 +53,7 @@ from awx.main.utils.execution_environments import to_container_path
from awx.main.utils.licensing import server_product_name
__all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'SmartInventoryMembership']
__all__ = ['Inventory', 'Host', 'Group', 'InventorySource', 'InventoryUpdate', 'SmartInventoryMembership', 'HostMetric', 'HostMetricSummaryMonthly']
logger = logging.getLogger('awx.main.models.inventory')
@ -820,9 +820,47 @@ class Group(CommonModelNameNotUnique, RelatedJobsMixin):
class HostMetric(models.Model):
hostname = models.CharField(primary_key=True, max_length=512)
hostname = models.CharField(unique=True, max_length=512)
first_automation = models.DateTimeField(auto_now_add=True, null=False, db_index=True, help_text=_('When the host was first automated against'))
last_automation = models.DateTimeField(db_index=True, help_text=_('When the host was last automated against'))
last_deleted = models.DateTimeField(null=True, db_index=True, help_text=_('When the host was last deleted'))
automated_counter = models.BigIntegerField(default=0, help_text=_('How many times was the host automated'))
deleted_counter = models.IntegerField(default=0, help_text=_('How many times was the host deleted'))
deleted = models.BooleanField(
default=False, help_text=_('Boolean flag saying whether the host is deleted and therefore not counted into the subscription consumption')
)
used_in_inventories = models.IntegerField(null=True, help_text=_('How many inventories contain this host'))
objects = models.Manager()
active_objects = HostMetricActiveManager()
def get_absolute_url(self, request=None):
return reverse('api:host_metric_detail', kwargs={'pk': self.pk}, request=request)
def soft_delete(self):
if not self.deleted:
self.deleted_counter = (self.deleted_counter or 0) + 1
self.last_deleted = now()
self.deleted = True
self.save(update_fields=['deleted', 'deleted_counter', 'last_deleted'])
def soft_restore(self):
if self.deleted:
self.deleted = False
self.save(update_fields=['deleted'])
class HostMetricSummaryMonthly(models.Model):
"""
HostMetric summaries computed by scheduled task <TODO> monthly
"""
date = models.DateField(unique=True)
license_consumed = models.BigIntegerField(default=0, help_text=_("How many unique hosts are consumed from the license"))
license_capacity = models.BigIntegerField(default=0, help_text=_("'License capacity as max. number of unique hosts"))
hosts_added = models.IntegerField(default=0, help_text=_("How many hosts were added in the associated month, consuming more license capacity"))
hosts_deleted = models.IntegerField(default=0, help_text=_("How many hosts were deleted in the associated month, freeing the license capacity"))
indirectly_managed_hosts = models.IntegerField(default=0, help_text=("Manually entered number indirectly managed hosts for a certain month"))
class InventorySourceOptions(BaseModel):

View File

@ -3,178 +3,209 @@ import pytest
from django.utils.timezone import now
from awx.main.models import Job, JobEvent, Inventory, Host, JobHostSummary
from django.db.models import Q
from awx.main.models import Job, JobEvent, Inventory, Host, JobHostSummary, HostMetric
@pytest.mark.django_db
@mock.patch('awx.main.models.events.emit_event_detail')
def test_parent_changed(emit):
j = Job()
j.save()
JobEvent.create_from_data(job_id=j.pk, uuid='abc123', event='playbook_on_task_start').save()
assert JobEvent.objects.count() == 1
for e in JobEvent.objects.all():
assert e.changed is False
class TestEvents:
def setup_method(self):
self.hostnames = []
self.host_map = dict()
self.inventory = None
self.job = None
JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='runner_on_ok', event_data={'res': {'changed': ['localhost']}}).save()
# the `playbook_on_stats` event is where we update the parent changed linkage
JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='playbook_on_stats').save()
events = JobEvent.objects.filter(event__in=['playbook_on_task_start', 'runner_on_ok'])
assert events.count() == 2
for e in events.all():
assert e.changed is True
@mock.patch('awx.main.models.events.emit_event_detail')
def test_parent_changed(self, emit):
j = Job()
j.save()
JobEvent.create_from_data(job_id=j.pk, uuid='abc123', event='playbook_on_task_start').save()
assert JobEvent.objects.count() == 1
for e in JobEvent.objects.all():
assert e.changed is False
JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='runner_on_ok', event_data={'res': {'changed': ['localhost']}}).save()
# the `playbook_on_stats` event is where we update the parent changed linkage
JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='playbook_on_stats').save()
events = JobEvent.objects.filter(event__in=['playbook_on_task_start', 'runner_on_ok'])
assert events.count() == 2
for e in events.all():
assert e.changed is True
@pytest.mark.django_db
@pytest.mark.parametrize('event', JobEvent.FAILED_EVENTS)
@mock.patch('awx.main.models.events.emit_event_detail')
def test_parent_failed(emit, event):
j = Job()
j.save()
JobEvent.create_from_data(job_id=j.pk, uuid='abc123', event='playbook_on_task_start').save()
assert JobEvent.objects.count() == 1
for e in JobEvent.objects.all():
assert e.failed is False
@pytest.mark.parametrize('event', JobEvent.FAILED_EVENTS)
@mock.patch('awx.main.models.events.emit_event_detail')
def test_parent_failed(self, emit, event):
j = Job()
j.save()
JobEvent.create_from_data(job_id=j.pk, uuid='abc123', event='playbook_on_task_start').save()
assert JobEvent.objects.count() == 1
for e in JobEvent.objects.all():
assert e.failed is False
JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event=event).save()
JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event=event).save()
# the `playbook_on_stats` event is where we update the parent failed linkage
JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='playbook_on_stats').save()
events = JobEvent.objects.filter(event__in=['playbook_on_task_start', event])
assert events.count() == 2
for e in events.all():
assert e.failed is True
# the `playbook_on_stats` event is where we update the parent failed linkage
JobEvent.create_from_data(job_id=j.pk, parent_uuid='abc123', event='playbook_on_stats').save()
events = JobEvent.objects.filter(event__in=['playbook_on_task_start', event])
assert events.count() == 2
for e in events.all():
assert e.failed is True
def test_host_summary_generation(self):
self._generate_hosts(100)
self._create_job_event(ok=dict((hostname, len(hostname)) for hostname in self.hostnames))
@pytest.mark.django_db
def test_host_summary_generation():
hostnames = [f'Host {i}' for i in range(100)]
inv = Inventory()
inv.save()
Host.objects.bulk_create([Host(created=now(), modified=now(), name=h, inventory_id=inv.id) for h in hostnames])
j = Job(inventory=inv)
j.save()
host_map = dict((host.name, host.id) for host in inv.hosts.all())
JobEvent.create_from_data(
job_id=j.pk,
assert self.job.job_host_summaries.count() == len(self.hostnames)
assert sorted([s.host_name for s in self.job.job_host_summaries.all()]) == sorted(self.hostnames)
for s in self.job.job_host_summaries.all():
assert self.host_map[s.host_name] == s.host_id
assert s.ok == len(s.host_name)
assert s.changed == 0
assert s.dark == 0
assert s.failures == 0
assert s.ignored == 0
assert s.processed == 0
assert s.rescued == 0
assert s.skipped == 0
for host in Host.objects.all():
assert host.last_job_id == self.job.id
assert host.last_job_host_summary.host == host
def test_host_summary_generation_with_deleted_hosts(self):
self._generate_hosts(10)
# delete half of the hosts during the playbook run
for h in self.inventory.hosts.all()[:5]:
h.delete()
self._create_job_event(ok=dict((hostname, len(hostname)) for hostname in self.hostnames))
ids = sorted([s.host_id or -1 for s in self.job.job_host_summaries.order_by('id').all()])
names = sorted([s.host_name for s in self.job.job_host_summaries.all()])
assert ids == [-1, -1, -1, -1, -1, 6, 7, 8, 9, 10]
assert names == ['Host 0', 'Host 1', 'Host 2', 'Host 3', 'Host 4', 'Host 5', 'Host 6', 'Host 7', 'Host 8', 'Host 9']
def test_host_summary_generation_with_limit(self):
# Make an inventory with 10 hosts, run a playbook with a --limit
# pointed at *one* host,
# Verify that *only* that host has an associated JobHostSummary and that
# *only* that host has an updated value for .last_job.
self._generate_hosts(10)
# by making the playbook_on_stats *only* include Host 1, we're emulating
# the behavior of a `--limit=Host 1`
matching_host = Host.objects.get(name='Host 1')
self._create_job_event(ok={matching_host.name: len(matching_host.name)}) # effectively, limit=Host 1
# since the playbook_on_stats only references one host,
# there should *only* be on JobHostSummary record (and it should
# be related to the appropriate Host)
assert JobHostSummary.objects.count() == 1
for h in Host.objects.all():
if h.name == 'Host 1':
assert h.last_job_id == self.job.id
assert h.last_job_host_summary_id == JobHostSummary.objects.first().id
else:
# all other hosts in the inventory should remain untouched
assert h.last_job_id is None
assert h.last_job_host_summary_id is None
def test_host_metrics_insert(self):
self._generate_hosts(10)
self._create_job_event(
ok=dict((hostname, len(hostname)) for hostname in self.hostnames[0:3]),
failures=dict((hostname, len(hostname)) for hostname in self.hostnames[3:6]),
processed=dict((hostname, len(hostname)) for hostname in self.hostnames[6:9]),
skipped=dict((hostname, len(hostname)) for hostname in [self.hostnames[9]]),
)
metrics = HostMetric.objects.all()
assert len(metrics) == 10
for hm in metrics:
assert hm.automated_counter == 1
assert hm.last_automation is not None
assert hm.deleted is False
def test_host_metrics_update(self):
self._generate_hosts(12)
self._create_job_event(ok=dict((hostname, len(hostname)) for hostname in self.hostnames))
# Soft delete 6 host metrics
for hm in HostMetric.objects.filter(id__in=[1, 3, 5, 7, 9, 11]):
hm.soft_delete()
assert len(HostMetric.objects.filter(Q(deleted=False) & Q(deleted_counter=0) & Q(last_deleted__isnull=True))) == 6
assert len(HostMetric.objects.filter(Q(deleted=True) & Q(deleted_counter=1) & Q(last_deleted__isnull=False))) == 6
# hostnames in 'ignored' and 'rescued' stats are ignored
self.job = Job(inventory=self.inventory)
self.job.save()
self._create_job_event(
ignored=dict((hostname, len(hostname)) for hostname in self.hostnames[0:6]),
rescued=dict((hostname, len(hostname)) for hostname in self.hostnames[6:11]),
)
assert len(HostMetric.objects.filter(Q(deleted=False) & Q(deleted_counter=0) & Q(last_deleted__isnull=True))) == 6
assert len(HostMetric.objects.filter(Q(deleted=True) & Q(deleted_counter=1) & Q(last_deleted__isnull=False))) == 6
# hostnames in 'changed', 'dark', 'failures', 'ok', 'processed', 'skipped' are processed
self.job = Job(inventory=self.inventory)
self.job.save()
self._create_job_event(
changed=dict((hostname, len(hostname)) for hostname in self.hostnames[0:2]),
dark=dict((hostname, len(hostname)) for hostname in self.hostnames[2:4]),
failures=dict((hostname, len(hostname)) for hostname in self.hostnames[4:6]),
ok=dict((hostname, len(hostname)) for hostname in self.hostnames[6:8]),
processed=dict((hostname, len(hostname)) for hostname in self.hostnames[8:10]),
skipped=dict((hostname, len(hostname)) for hostname in self.hostnames[10:12]),
)
assert len(HostMetric.objects.filter(Q(deleted=False) & Q(deleted_counter=0) & Q(last_deleted__isnull=True))) == 6
assert len(HostMetric.objects.filter(Q(deleted=False) & Q(deleted_counter=1) & Q(last_deleted__isnull=False))) == 6
def _generate_hosts(self, cnt, id_from=0):
self.hostnames = [f'Host {i}' for i in range(id_from, id_from + cnt)]
self.inventory = Inventory()
self.inventory.save()
Host.objects.bulk_create([Host(created=now(), modified=now(), name=h, inventory_id=self.inventory.id) for h in self.hostnames])
self.job = Job(inventory=self.inventory)
self.job.save()
# host map is a data structure that tracks a mapping of host name --> ID
# for the inventory, _regardless_ of whether or not there's a limit
# applied to the actual playbook run
self.host_map = dict((host.name, host.id) for host in self.inventory.hosts.all())
def _create_job_event(
self,
parent_uuid='abc123',
event='playbook_on_stats',
event_data={
'ok': dict((hostname, len(hostname)) for hostname in hostnames),
'changed': {},
'dark': {},
'failures': {},
'ignored': {},
'processed': {},
'rescued': {},
'skipped': {},
},
host_map=host_map,
).save()
assert j.job_host_summaries.count() == len(hostnames)
assert sorted([s.host_name for s in j.job_host_summaries.all()]) == sorted(hostnames)
for s in j.job_host_summaries.all():
assert host_map[s.host_name] == s.host_id
assert s.ok == len(s.host_name)
assert s.changed == 0
assert s.dark == 0
assert s.failures == 0
assert s.ignored == 0
assert s.processed == 0
assert s.rescued == 0
assert s.skipped == 0
for host in Host.objects.all():
assert host.last_job_id == j.id
assert host.last_job_host_summary.host == host
@pytest.mark.django_db
def test_host_summary_generation_with_deleted_hosts():
hostnames = [f'Host {i}' for i in range(10)]
inv = Inventory()
inv.save()
Host.objects.bulk_create([Host(created=now(), modified=now(), name=h, inventory_id=inv.id) for h in hostnames])
j = Job(inventory=inv)
j.save()
host_map = dict((host.name, host.id) for host in inv.hosts.all())
# delete half of the hosts during the playbook run
for h in inv.hosts.all()[:5]:
h.delete()
JobEvent.create_from_data(
job_id=j.pk,
parent_uuid='abc123',
event='playbook_on_stats',
event_data={
'ok': dict((hostname, len(hostname)) for hostname in hostnames),
'changed': {},
'dark': {},
'failures': {},
'ignored': {},
'processed': {},
'rescued': {},
'skipped': {},
},
host_map=host_map,
).save()
ids = sorted([s.host_id or -1 for s in j.job_host_summaries.order_by('id').all()])
names = sorted([s.host_name for s in j.job_host_summaries.all()])
assert ids == [-1, -1, -1, -1, -1, 6, 7, 8, 9, 10]
assert names == ['Host 0', 'Host 1', 'Host 2', 'Host 3', 'Host 4', 'Host 5', 'Host 6', 'Host 7', 'Host 8', 'Host 9']
@pytest.mark.django_db
def test_host_summary_generation_with_limit():
# Make an inventory with 10 hosts, run a playbook with a --limit
# pointed at *one* host,
# Verify that *only* that host has an associated JobHostSummary and that
# *only* that host has an updated value for .last_job.
hostnames = [f'Host {i}' for i in range(10)]
inv = Inventory()
inv.save()
Host.objects.bulk_create([Host(created=now(), modified=now(), name=h, inventory_id=inv.id) for h in hostnames])
j = Job(inventory=inv)
j.save()
# host map is a data structure that tracks a mapping of host name --> ID
# for the inventory, _regardless_ of whether or not there's a limit
# applied to the actual playbook run
host_map = dict((host.name, host.id) for host in inv.hosts.all())
# by making the playbook_on_stats *only* include Host 1, we're emulating
# the behavior of a `--limit=Host 1`
matching_host = Host.objects.get(name='Host 1')
JobEvent.create_from_data(
job_id=j.pk,
parent_uuid='abc123',
event='playbook_on_stats',
event_data={
'ok': {matching_host.name: len(matching_host.name)}, # effectively, limit=Host 1
'changed': {},
'dark': {},
'failures': {},
'ignored': {},
'processed': {},
'rescued': {},
'skipped': {},
},
host_map=host_map,
).save()
# since the playbook_on_stats only references one host,
# there should *only* be on JobHostSummary record (and it should
# be related to the appropriate Host)
assert JobHostSummary.objects.count() == 1
for h in Host.objects.all():
if h.name == 'Host 1':
assert h.last_job_id == j.id
assert h.last_job_host_summary_id == JobHostSummary.objects.first().id
else:
# all other hosts in the inventory should remain untouched
assert h.last_job_id is None
assert h.last_job_host_summary_id is None
ok=None,
changed=None,
dark=None,
failures=None,
ignored=None,
processed=None,
rescued=None,
skipped=None,
):
JobEvent.create_from_data(
job_id=self.job.pk,
parent_uuid=parent_uuid,
event=event,
event_data={
'ok': ok or {},
'changed': changed or {},
'dark': dark or {},
'failures': failures or {},
'ignored': ignored or {},
'processed': processed or {},
'rescued': rescued or {},
'skipped': skipped or {},
},
host_map=self.host_map,
).save()

View File

@ -20,3 +20,53 @@ def test_host_metrics_generation():
date_today = now().strftime('%Y-%m-%d')
result = HostMetric.objects.filter(first_automation__startswith=date_today).count()
assert result == len(hostnames)
@pytest.mark.django_db
def test_soft_delete():
hostnames = [f'Host to delete {i}' for i in range(2)]
current_time = now()
HostMetric.objects.bulk_create([HostMetric(hostname=h, last_automation=current_time, automated_counter=42) for h in hostnames])
hm = HostMetric.objects.get(hostname="Host to delete 0")
assert hm.last_deleted is None
last_deleted = None
for _ in range(3):
# soft delete 1st
# 2nd/3rd delete don't have an effect
hm.soft_delete()
if last_deleted is None:
last_deleted = hm.last_deleted
assert hm.deleted is True
assert hm.deleted_counter == 1
assert hm.last_deleted == last_deleted
assert hm.automated_counter == 42
# 2nd record is not touched
hm = HostMetric.objects.get(hostname="Host to delete 1")
assert hm.deleted is False
assert hm.deleted_counter == 0
assert hm.last_deleted is None
assert hm.automated_counter == 42
@pytest.mark.django_db
def test_soft_restore():
current_time = now()
HostMetric.objects.create(hostname="Host 1", last_automation=current_time, deleted=True)
HostMetric.objects.create(hostname="Host 2", last_automation=current_time, deleted=True, last_deleted=current_time)
HostMetric.objects.create(hostname="Host 3", last_automation=current_time, deleted=False, last_deleted=current_time)
HostMetric.objects.all().update(automated_counter=42, deleted_counter=10)
# 1. deleted, last_deleted not null
for hm in HostMetric.objects.all():
for _ in range(3):
hm.soft_restore()
assert hm.deleted is False
assert hm.automated_counter == 42 and hm.deleted_counter == 10
if hm.hostname == "Host 1":
assert hm.last_deleted is None
else:
assert hm.last_deleted == current_time

View File

@ -35,6 +35,7 @@ from cryptography import x509
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from awx.main.constants import SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS
MAX_INSTANCES = 9999999
@ -382,8 +383,15 @@ class Licenser(object):
current_instances = Host.objects.active_count()
license_date = int(attrs.get('license_date', 0) or 0)
automated_instances = HostMetric.objects.count()
first_host = HostMetric.objects.only('first_automation').order_by('first_automation').first()
subscription_model = getattr(settings, 'SUBSCRIPTION_USAGE_MODEL', '')
if subscription_model == SUBSCRIPTION_USAGE_MODEL_UNIQUE_HOSTS:
automated_instances = HostMetric.active_objects.count()
first_host = HostMetric.active_objects.only('first_automation').order_by('first_automation').first()
else:
automated_instances = HostMetric.objects.count()
first_host = HostMetric.objects.only('first_automation').order_by('first_automation').first()
if first_host:
automated_since = int(first_host.first_automation.timestamp())
else:

View File

@ -1028,3 +1028,8 @@ AWX_MOUNT_ISOLATED_PATHS_ON_K8S = False
CLUSTER_HOST_ID = socket.gethostname()
UI_NEXT = True
# License compliance for total host count. Possible values:
# - '': No model - Subscription not counted from Host Metrics
# - 'unique_managed_hosts': Compliant = automated - deleted hosts (using /api/v2/host_metrics/)
SUBSCRIPTION_USAGE_MODEL = ''

View File

@ -44,6 +44,7 @@ import WorkflowApprovalTemplates from './models/WorkflowApprovalTemplates';
import WorkflowJobTemplateNodes from './models/WorkflowJobTemplateNodes';
import WorkflowJobTemplates from './models/WorkflowJobTemplates';
import WorkflowJobs from './models/WorkflowJobs';
import HostMetrics from './models/HostMetrics';
const ActivityStreamAPI = new ActivityStream();
const AdHocCommandsAPI = new AdHocCommands();
@ -91,6 +92,7 @@ const WorkflowApprovalTemplatesAPI = new WorkflowApprovalTemplates();
const WorkflowJobTemplateNodesAPI = new WorkflowJobTemplateNodes();
const WorkflowJobTemplatesAPI = new WorkflowJobTemplates();
const WorkflowJobsAPI = new WorkflowJobs();
const HostMetricsAPI = new HostMetrics();
export {
ActivityStreamAPI,
@ -139,4 +141,5 @@ export {
WorkflowJobTemplateNodesAPI,
WorkflowJobTemplatesAPI,
WorkflowJobsAPI,
HostMetricsAPI,
};

View File

@ -0,0 +1,10 @@
import Base from '../Base';
class HostMetrics extends Base {
constructor(http) {
super(http);
this.baseUrl = 'api/v2/host_metrics/';
}
}
export default HostMetrics;

View File

@ -18,6 +18,10 @@ class Settings extends Base {
return this.http.get(`${this.baseUrl}all/`);
}
readSystem() {
return this.http.get(`${this.baseUrl}system/`);
}
updateCategory(category, data) {
return this.http.patch(`${this.baseUrl}${category}/`, data);
}

View File

@ -57,6 +57,7 @@ function DataListToolbar({
enableRelatedFuzzyFiltering,
handleIsAnsibleFactsSelected,
isFilterCleared,
advancedSearchDisabled,
}) {
const showExpandCollapse = onCompact && onExpand;
const [isKebabOpen, setIsKebabOpen] = useState(false);
@ -86,6 +87,10 @@ function DataListToolbar({
}),
[setIsKebabModalOpen]
);
const columns = [...searchColumns];
if (!advancedSearchDisabled) {
columns.push({ name: t`Advanced`, key: 'advanced' });
}
return (
<Toolbar
id={`${qsConfig.namespace}-list-toolbar`}
@ -134,10 +139,7 @@ function DataListToolbar({
<ToolbarItem>
<Search
qsConfig={qsConfig}
columns={[
...searchColumns,
{ name: t`Advanced`, key: 'advanced' },
]}
columns={columns}
searchableKeys={searchableKeys}
relatedSearchableKeys={relatedSearchableKeys}
onSearch={onSearch}
@ -224,6 +226,7 @@ DataListToolbar.propTypes = {
additionalControls: PropTypes.arrayOf(PropTypes.node),
enableNegativeFiltering: PropTypes.bool,
enableRelatedFuzzyFiltering: PropTypes.bool,
advancedSearchDisabled: PropTypes.bool,
};
DataListToolbar.defaultProps = {
@ -243,6 +246,7 @@ DataListToolbar.defaultProps = {
additionalControls: [],
enableNegativeFiltering: true,
enableRelatedFuzzyFiltering: true,
advancedSearchDisabled: false,
};
export default DataListToolbar;

View File

@ -8,6 +8,7 @@ import useRequest, { useDismissableError } from 'hooks/useRequest';
import AlertModal from 'components/AlertModal';
import ErrorDetail from 'components/ErrorDetail';
import { useSession } from './Session';
import { SettingsAPI } from '../api';
// eslint-disable-next-line import/prefer-default-export
export const ConfigContext = React.createContext({});
@ -40,6 +41,11 @@ export const ConfigProvider = ({ children }) => {
},
},
] = await Promise.all([ConfigAPI.read(), MeAPI.read()]);
let systemConfig = {};
if (me?.is_superuser || me?.is_system_auditor) {
const { data: systemConfigResults } = await SettingsAPI.readSystem();
systemConfig = systemConfigResults;
}
const [
{
@ -62,10 +68,21 @@ export const ConfigProvider = ({ children }) => {
role_level: 'execution_environment_admin_role',
}),
]);
return { ...data, me, adminOrgCount, notifAdminCount, execEnvAdminCount };
return {
...data,
me,
adminOrgCount,
notifAdminCount,
execEnvAdminCount,
systemConfig,
};
}, []),
{ adminOrgCount: 0, notifAdminCount: 0, execEnvAdminCount: 0 }
{
adminOrgCount: 0,
notifAdminCount: 0,
execEnvAdminCount: 0,
systemConfig: {},
}
);
const { error, dismissError } = useDismissableError(configError);
@ -112,6 +129,7 @@ export const useUserProfile = () => {
isOrgAdmin: config.adminOrgCount,
isNotificationAdmin: config.notifAdminCount,
isExecEnvAdmin: config.execEnvAdminCount,
systemConfig: config.systemConfig,
};
};

View File

@ -23,6 +23,7 @@ import TopologyView from 'screens/TopologyView';
import Users from 'screens/User';
import WorkflowApprovals from 'screens/WorkflowApproval';
import { Jobs } from 'screens/Job';
import HostMetrics from 'screens/HostMetrics';
function getRouteConfig(userProfile = {}) {
let routeConfig = [
@ -55,6 +56,11 @@ function getRouteConfig(userProfile = {}) {
path: '/workflow_approvals',
screen: WorkflowApprovals,
},
{
title: <Trans>Host Metrics</Trans>,
path: '/host_metrics',
screen: HostMetrics,
},
],
},
{
@ -178,9 +184,15 @@ function getRouteConfig(userProfile = {}) {
const deleteRouteGroup = (name) => {
routeConfig = routeConfig.filter(({ groupId }) => !groupId.includes(name));
};
if (
userProfile?.systemConfig?.SUBSCRIPTION_USAGE_MODEL !==
'unique_managed_hosts'
) {
deleteRoute('host_metrics');
}
if (userProfile?.isSuperUser || userProfile?.isSystemAuditor)
return routeConfig;
deleteRoute('host_metrics');
deleteRouteGroup('settings');
deleteRoute('management_jobs');
if (userProfile?.isOrgAdmin) return routeConfig;

View File

@ -7,6 +7,7 @@ const userProfile = {
isOrgAdmin: false,
isNotificationAdmin: false,
isExecEnvAdmin: false,
systemConfig: { SUBSCRIPTION_USAGE_MODEL: 'unique_managed_hosts' },
};
const filterPaths = (sidebar) => {
@ -29,6 +30,7 @@ describe('getRouteConfig', () => {
'/schedules',
'/activity_stream',
'/workflow_approvals',
'/host_metrics',
'/templates',
'/credentials',
'/projects',
@ -58,6 +60,7 @@ describe('getRouteConfig', () => {
'/schedules',
'/activity_stream',
'/workflow_approvals',
'/host_metrics',
'/templates',
'/credentials',
'/projects',

View File

@ -0,0 +1,156 @@
import React, { useCallback, useEffect, useState } from 'react';
import { t } from '@lingui/macro';
import ScreenHeader from 'components/ScreenHeader/ScreenHeader';
import { HostMetricsAPI } from 'api';
import useRequest from 'hooks/useRequest';
import PaginatedTable, {
HeaderRow,
HeaderCell,
} from 'components/PaginatedTable';
import DataListToolbar from 'components/DataListToolbar';
import { getQSConfig, parseQueryString } from 'util/qs';
import { Card, PageSection } from '@patternfly/react-core';
import { useLocation } from 'react-router-dom';
import useSelected from 'hooks/useSelected';
import HostMetricsListItem from './HostMetricsListItem';
import HostMetricsDeleteButton from './HostMetricsDeleteButton';
const QS_CONFIG = getQSConfig('host_metrics', {
page: 1,
page_size: 20,
order_by: 'hostname',
deleted: false,
});
function HostMetrics() {
const location = useLocation();
const [breadcrumbConfig] = useState({
'/host_metrics': t`Host Metrics`,
});
const {
result: { count, results },
isLoading,
error,
request: readHostMetrics,
} = useRequest(
useCallback(async () => {
const params = parseQueryString(QS_CONFIG, location.search);
const list = await HostMetricsAPI.read(params);
return {
count: list.data.count,
results: list.data.results,
};
}, [location]),
{ results: [], count: 0 }
);
useEffect(() => {
readHostMetrics();
}, [readHostMetrics]);
const { selected, isAllSelected, handleSelect, selectAll, clearSelected } =
useSelected(results);
return (
<>
<ScreenHeader streamType="none" breadcrumbConfig={breadcrumbConfig} />
<PageSection>
<Card>
<PaginatedTable
contentError={error}
hasContentLoading={isLoading}
items={results}
itemCount={count}
pluralizedItemName={t`Host Metrics`}
renderRow={(item, index) => (
<HostMetricsListItem
key={item.id}
item={item}
isSelected={selected.some(
(row) => row.hostname === item.hostname
)}
onSelect={() => handleSelect(item)}
rowIndex={index}
/>
)}
qsConfig={QS_CONFIG}
toolbarSearchColumns={[
{
name: t`Hostname`,
key: 'hostname__icontains',
isDefault: true,
},
]}
toolbarSearchableKeys={[]}
toolbarRelatedSearchableKeys={[]}
renderToolbar={(props) => (
<DataListToolbar
{...props}
advancedSearchDisabled
fillWidth
isAllSelected={isAllSelected}
onSelectAll={selectAll}
additionalControls={[
<HostMetricsDeleteButton
key="delete"
onDelete={() =>
Promise.all(
selected.map((hostMetric) =>
HostMetricsAPI.destroy(hostMetric.id)
)
).then(() => {
readHostMetrics();
clearSelected();
})
}
itemsToDelete={selected}
pluralizedItemName={t`Host Metrics`}
/>,
]}
/>
)}
headerRow={
<HeaderRow qsConfig={QS_CONFIG}>
<HeaderCell sortKey="hostname">{t`Hostname`}</HeaderCell>
<HeaderCell
sortKey="first_automation"
tooltip={t`When was the host first automated`}
>
{t`First automated`}
</HeaderCell>
<HeaderCell
sortKey="last_automation"
tooltip={t`When was the host last automated`}
>
{t`Last automated`}
</HeaderCell>
<HeaderCell
sortKey="automated_counter"
tooltip={t`How many times was the host automated`}
>
{t`Automation`}
</HeaderCell>
<HeaderCell
sortKey="used_in_inventories"
tooltip={t`How many inventories is the host in, recomputed on a weekly schedule`}
>
{t`Inventories`}
</HeaderCell>
<HeaderCell
sortKey="deleted_counter"
tooltip={t`How many times was the host deleted`}
>
{t`Deleted`}
</HeaderCell>
</HeaderRow>
}
/>
</Card>
</PageSection>
</>
);
}
export { HostMetrics as _HostMetrics };
export default HostMetrics;

View File

@ -0,0 +1,69 @@
import React from 'react';
import { act } from 'react-dom/test-utils';
import { HostMetricsAPI } from 'api';
import {
mountWithContexts,
waitForElement,
} from '../../../testUtils/enzymeHelpers';
import HostMetrics from './HostMetrics';
jest.mock('../../api');
const mockHostMetrics = [
{
hostname: 'Host name',
first_automation: 'now',
last_automation: 'now',
automated_counter: 1,
used_in_inventories: 1,
deleted_counter: 1,
id: 1,
url: '',
},
];
function waitForLoaded(wrapper) {
return waitForElement(
wrapper,
'HostList',
(el) => el.find('ContentLoading').length === 0
);
}
describe('<HostMetrics />', () => {
beforeEach(() => {
HostMetricsAPI.read.mockResolvedValue({
data: {
count: mockHostMetrics.length,
results: mockHostMetrics,
},
});
});
afterEach(() => {
jest.clearAllMocks();
});
test('initially renders successfully', async () => {
await act(async () => {
mountWithContexts(
<HostMetrics
match={{ path: '/hosts', url: '/hosts' }}
location={{ search: '', pathname: '/hosts' }}
/>
);
});
});
test('HostMetrics are retrieved from the api and the components finishes loading', async () => {
let wrapper;
await act(async () => {
wrapper = mountWithContexts(<HostMetrics />);
});
await waitForLoaded(wrapper);
expect(HostMetricsAPI.read).toHaveBeenCalled();
expect(wrapper.find('HostMetricsListItem')).toHaveLength(1);
});
});

View File

@ -0,0 +1,205 @@
import React, { useState } from 'react';
import { func, node, string, arrayOf, shape } from 'prop-types';
import styled from 'styled-components';
import { Alert, Badge, Button, Tooltip } from '@patternfly/react-core';
import { t } from '@lingui/macro';
import { getRelatedResourceDeleteCounts } from 'util/getRelatedResourceDeleteDetails';
import AlertModal from '../../components/AlertModal';
import ErrorDetail from '../../components/ErrorDetail';
const WarningMessage = styled(Alert)`
margin-top: 10px;
`;
const Label = styled.span`
&& {
margin-right: 10px;
}
`;
const ItemToDelete = shape({
hostname: string.isRequired,
});
function HostMetricsDeleteButton({
itemsToDelete,
pluralizedItemName,
onDelete,
deleteDetailsRequests,
warningMessage,
deleteMessage,
}) {
const [isModalOpen, setIsModalOpen] = useState(false);
const [deleteDetails, setDeleteDetails] = useState(null);
const [isLoading, setIsLoading] = useState(false);
const [deleteMessageError, setDeleteMessageError] = useState();
const handleDelete = () => {
onDelete();
toggleModal();
};
const toggleModal = async (isOpen) => {
setIsLoading(true);
setDeleteDetails(null);
if (
isOpen &&
itemsToDelete.length === 1 &&
deleteDetailsRequests?.length > 0
) {
const { results, error } = await getRelatedResourceDeleteCounts(
deleteDetailsRequests
);
if (error) {
setDeleteMessageError(error);
} else {
setDeleteDetails(results);
}
}
setIsLoading(false);
setIsModalOpen(isOpen);
};
const renderTooltip = () => {
if (itemsToDelete.length) {
return t`Soft delete`;
}
return t`Select a row to delete`;
};
const modalTitle = t`Soft delete ${pluralizedItemName}?`;
const isDisabled = itemsToDelete.length === 0;
const buildDeleteWarning = () => {
const deleteMessages = [];
if (warningMessage) {
deleteMessages.push(warningMessage);
}
if (deleteMessage) {
if (itemsToDelete.length > 1 || deleteDetails) {
deleteMessages.push(deleteMessage);
}
}
return (
<div>
{deleteMessages.map((message) => (
<div aria-label={message} key={message}>
{message}
</div>
))}
{deleteDetails &&
Object.entries(deleteDetails).map(([key, value]) => (
<div key={key} aria-label={`${key}: ${value}`}>
<Label>{key}</Label>
<Badge>{value}</Badge>
</div>
))}
</div>
);
};
if (deleteMessageError) {
return (
<AlertModal
isOpen={deleteMessageError}
title={t`Error!`}
onClose={() => {
toggleModal(false);
setDeleteMessageError();
}}
>
<ErrorDetail error={deleteMessageError} />
</AlertModal>
);
}
const shouldShowDeleteWarning =
warningMessage ||
(itemsToDelete.length === 1 && deleteDetails) ||
(itemsToDelete.length > 1 && deleteMessage);
return (
<>
<Tooltip content={renderTooltip()} position="top">
<div>
<Button
variant="secondary"
isLoading={isLoading}
ouiaId="delete-button"
spinnerAriaValueText={isLoading ? 'Loading' : undefined}
aria-label={t`Delete`}
onClick={() => toggleModal(true)}
isDisabled={isDisabled}
>
{t`Delete`}
</Button>
</div>
</Tooltip>
{isModalOpen && (
<AlertModal
variant="danger"
title={modalTitle}
isOpen={isModalOpen}
onClose={() => toggleModal(false)}
actions={[
<Button
ouiaId="delete-modal-confirm"
key="delete"
variant="danger"
aria-label={t`confirm delete`}
isDisabled={Boolean(
deleteDetails && itemsToDelete[0]?.type === 'credential_type'
)}
onClick={handleDelete}
>
{t`Delete`}
</Button>,
<Button
ouiaId="delete-cancel"
key="cancel"
variant="link"
aria-label={t`cancel delete`}
onClick={() => toggleModal(false)}
>
{t`Cancel`}
</Button>,
]}
>
<div>{t`This action will soft delete the following:`}</div>
{itemsToDelete.map((item) => (
<span
key={item.hostname}
id={`item-to-be-deleted-${item.hostname}`}
>
<strong>{item.hostname}</strong>
<br />
</span>
))}
{shouldShowDeleteWarning && (
<WarningMessage
variant="warning"
isInline
title={buildDeleteWarning()}
/>
)}
</AlertModal>
)}
</>
);
}
HostMetricsDeleteButton.propTypes = {
onDelete: func.isRequired,
itemsToDelete: arrayOf(ItemToDelete).isRequired,
pluralizedItemName: string,
warningMessage: node,
};
HostMetricsDeleteButton.defaultProps = {
pluralizedItemName: 'Items',
warningMessage: null,
};
export default HostMetricsDeleteButton;

View File

@ -0,0 +1,36 @@
import 'styled-components/macro';
import React from 'react';
import { Tr, Td } from '@patternfly/react-table';
import { formatDateString } from 'util/dates';
import { HostMetrics } from 'types';
import { t } from '@lingui/macro';
import { bool, func } from 'prop-types';
function HostMetricsListItem({ item, isSelected, onSelect, rowIndex }) {
return (
<Tr
id={`host_metrics-row-${item.hostname}`}
ouiaId={`host-metrics-row-${item.hostname}`}
>
<Td select={{ rowIndex, isSelected, onSelect }} dataLabel={t`Selected`} />
<Td dataLabel={t`Hostname`}>{item.hostname}</Td>
<Td dataLabel={t`First automation`}>
{formatDateString(item.first_automation)}
</Td>
<Td dataLabel={t`Last automation`}>
{formatDateString(item.last_automation)}
</Td>
<Td dataLabel={t`Automation`}>{item.automated_counter}</Td>
<Td dataLabel={t`Inventories`}>{item.used_in_inventories || 0}</Td>
<Td dataLabel={t`Deleted`}>{item.deleted_counter}</Td>
</Tr>
);
}
HostMetricsListItem.propTypes = {
item: HostMetrics.isRequired,
isSelected: bool.isRequired,
onSelect: func.isRequired,
};
export default HostMetricsListItem;

View File

@ -0,0 +1 @@
export { default } from './HostMetrics';

View File

@ -439,3 +439,12 @@ export const Toast = shape({
hasTimeout: bool,
message: string,
});
export const HostMetrics = shape({
hostname: string.isRequired,
first_automation: string.isRequired,
last_automation: string.isRequired,
automated_counter: number.isRequired,
used_in_inventories: number,
deleted_counter: number,
});