mirror of
https://github.com/ansible/awx.git
synced 2026-01-20 06:01:25 -03:30
Merge pull request #3284 from ansible/analytics
Analytics Reviewed-by: https://github.com/softwarefactory-project-zuul[bot]
This commit is contained in:
commit
df9a012013
1
awx/main/analytics/__init__.py
Normal file
1
awx/main/analytics/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from .core import register, gather, ship # noqa
|
||||
260
awx/main/analytics/collectors.py
Normal file
260
awx/main/analytics/collectors.py
Normal file
@ -0,0 +1,260 @@
|
||||
import os.path
|
||||
|
||||
from django.db import connection
|
||||
from django.db.models import Count
|
||||
from django.conf import settings
|
||||
from django.utils.timezone import now
|
||||
|
||||
from awx.conf.license import get_license
|
||||
from awx.main.utils import (get_awx_version, get_ansible_version,
|
||||
get_custom_venv_choices, camelcase_to_underscore)
|
||||
from awx.main import models
|
||||
from django.contrib.sessions.models import Session
|
||||
from awx.main.analytics import register
|
||||
|
||||
'''
|
||||
This module is used to define metrics collected by awx.main.analytics.gather()
|
||||
Each function is decorated with a key name, and should return a data
|
||||
structure that can be serialized to JSON
|
||||
|
||||
@register('something')
|
||||
def something(since):
|
||||
# the generated archive will contain a `something.json` w/ this JSON
|
||||
return {'some': 'json'}
|
||||
|
||||
All functions - when called - will be passed a datetime.datetime object,
|
||||
`since`, which represents the last time analytics were gathered (some metrics
|
||||
functions - like those that return metadata about playbook runs, may return
|
||||
data _since_ the last report date - i.e., new data in the last 24 hours)
|
||||
'''
|
||||
|
||||
|
||||
@register('config')
|
||||
def config(since):
|
||||
license_info = get_license(show_key=False)
|
||||
return {
|
||||
'system_uuid': settings.SYSTEM_UUID,
|
||||
'tower_url_base': settings.TOWER_URL_BASE,
|
||||
'tower_version': get_awx_version(),
|
||||
'ansible_version': get_ansible_version(),
|
||||
'license_type': license_info.get('license_type', 'UNLICENSED'),
|
||||
'free_instances': license_info.get('free instances', 0),
|
||||
'license_expiry': license_info.get('time_remaining', 0),
|
||||
'pendo_tracking': settings.PENDO_TRACKING_STATE,
|
||||
'authentication_backends': settings.AUTHENTICATION_BACKENDS,
|
||||
'logging_aggregators': settings.LOG_AGGREGATOR_LOGGERS,
|
||||
'external_logger_enabled': settings.LOG_AGGREGATOR_ENABLED,
|
||||
'external_logger_type': getattr(settings, 'LOG_AGGREGATOR_TYPE', None),
|
||||
}
|
||||
|
||||
|
||||
@register('counts')
|
||||
def counts(since):
|
||||
counts = {}
|
||||
for cls in (models.Organization, models.Team, models.User,
|
||||
models.Inventory, models.Credential, models.Project,
|
||||
models.JobTemplate, models.WorkflowJobTemplate,
|
||||
models.Host, models.Schedule, models.CustomInventoryScript,
|
||||
models.NotificationTemplate):
|
||||
counts[camelcase_to_underscore(cls.__name__)] = cls.objects.count()
|
||||
|
||||
venvs = get_custom_venv_choices()
|
||||
counts['custom_virtualenvs'] = len([
|
||||
v for v in venvs
|
||||
if os.path.basename(v.rstrip('/')) != 'ansible'
|
||||
])
|
||||
|
||||
inv_counts = dict(models.Inventory.objects.order_by().values_list('kind').annotate(Count('kind')))
|
||||
inv_counts['normal'] = inv_counts.get('', 0)
|
||||
inv_counts.pop('', None)
|
||||
inv_counts['smart'] = inv_counts.get('smart', 0)
|
||||
counts['inventories'] = inv_counts
|
||||
|
||||
counts['unified_job'] = models.UnifiedJob.objects.exclude(launch_type='sync').count() # excludes implicit project_updates
|
||||
counts['active_host_count'] = models.Host.objects.active_count()
|
||||
active_sessions = Session.objects.filter(expire_date__gte=now()).count()
|
||||
active_user_sessions = models.UserSessionMembership.objects.select_related('session').filter(session__expire_date__gte=now()).count()
|
||||
active_anonymous_sessions = active_sessions - active_user_sessions
|
||||
counts['active_sessions'] = active_sessions
|
||||
counts['active_user_sessions'] = active_user_sessions
|
||||
counts['active_anonymous_sessions'] = active_anonymous_sessions
|
||||
counts['running_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').filter(status__in=('running', 'waiting',)).count()
|
||||
return counts
|
||||
|
||||
|
||||
@register('org_counts')
|
||||
def org_counts(since):
|
||||
counts = {}
|
||||
for org in models.Organization.objects.annotate(num_users=Count('member_role__members', distinct=True),
|
||||
num_teams=Count('teams', distinct=True)).values('name', 'id', 'num_users', 'num_teams'):
|
||||
counts[org['id']] = {'name': org['name'],
|
||||
'users': org['num_users'],
|
||||
'teams': org['num_teams']
|
||||
}
|
||||
return counts
|
||||
|
||||
|
||||
@register('cred_type_counts')
|
||||
def cred_type_counts(since):
|
||||
counts = {}
|
||||
for cred_type in models.CredentialType.objects.annotate(num_credentials=Count(
|
||||
'credentials', distinct=True)).values('name', 'id', 'managed_by_tower', 'num_credentials'):
|
||||
counts[cred_type['id']] = {'name': cred_type['name'],
|
||||
'credential_count': cred_type['num_credentials'],
|
||||
'managed_by_tower': cred_type['managed_by_tower']
|
||||
}
|
||||
return counts
|
||||
|
||||
|
||||
@register('inventory_counts')
|
||||
def inventory_counts(since):
|
||||
counts = {}
|
||||
for inv in models.Inventory.objects.filter(kind='').annotate(num_sources=Count('inventory_sources', distinct=True),
|
||||
num_hosts=Count('hosts', distinct=True)).only('id', 'name', 'kind'):
|
||||
counts[inv.id] = {'name': inv.name,
|
||||
'kind': inv.kind,
|
||||
'hosts': inv.num_hosts,
|
||||
'sources': inv.num_sources
|
||||
}
|
||||
|
||||
for smart_inv in models.Inventory.objects.filter(kind='smart'):
|
||||
counts[smart_inv.id] = {'name': smart_inv.name,
|
||||
'kind': smart_inv.kind,
|
||||
'num_hosts': smart_inv.hosts.count(),
|
||||
'num_sources': smart_inv.inventory_sources.count()
|
||||
}
|
||||
return counts
|
||||
|
||||
|
||||
@register('projects_by_scm_type')
|
||||
def projects_by_scm_type(since):
|
||||
counts = dict(
|
||||
(t[0] or 'manual', 0)
|
||||
for t in models.Project.SCM_TYPE_CHOICES
|
||||
)
|
||||
for result in models.Project.objects.values('scm_type').annotate(
|
||||
count=Count('scm_type')
|
||||
).order_by('scm_type'):
|
||||
counts[result['scm_type'] or 'manual'] = result['count']
|
||||
return counts
|
||||
|
||||
|
||||
@register('instance_info')
|
||||
def instance_info(since):
|
||||
info = {}
|
||||
instances = models.Instance.objects.values_list('hostname').annotate().values(
|
||||
'uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'hostname', 'last_isolated_check', 'enabled')
|
||||
for instance in instances:
|
||||
info = {'uuid': instance['uuid'],
|
||||
'version': instance['version'],
|
||||
'capacity': instance['capacity'],
|
||||
'cpu': instance['cpu'],
|
||||
'memory': instance['memory'],
|
||||
'managed_by_policy': instance['managed_by_policy'],
|
||||
'last_isolated_check': instance['last_isolated_check'],
|
||||
'enabled': instance['enabled']
|
||||
}
|
||||
return info
|
||||
|
||||
|
||||
@register('job_counts')
|
||||
def job_counts(since):
|
||||
counts = {}
|
||||
counts['total_jobs'] = models.UnifiedJob.objects.exclude(launch_type='sync').count()
|
||||
counts['status'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list('status').annotate(Count('status')))
|
||||
counts['launch_type'] = dict(models.UnifiedJob.objects.exclude(launch_type='sync').values_list('launch_type').annotate(Count('launch_type')))
|
||||
|
||||
return counts
|
||||
|
||||
|
||||
@register('job_instance_counts')
|
||||
def job_instance_counts(since):
|
||||
counts = {}
|
||||
job_types = models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
|
||||
'execution_node', 'launch_type').annotate(job_launch_type=Count('launch_type'))
|
||||
for job in job_types:
|
||||
counts.setdefault(job[0], {}).setdefault('status', {})[job[1]] = job[2]
|
||||
|
||||
job_statuses = models.UnifiedJob.objects.exclude(launch_type='sync').values_list(
|
||||
'execution_node', 'status').annotate(job_status=Count('status'))
|
||||
for job in job_statuses:
|
||||
counts.setdefault(job[0], {}).setdefault('launch_type', {})[job[1]] = job[2]
|
||||
return counts
|
||||
|
||||
|
||||
# Copies Job Events from db to a .csv to be shipped
|
||||
def copy_tables(since, full_path):
|
||||
def _copy_table(table, query, path):
|
||||
file_path = os.path.join(path, table + '_table.csv')
|
||||
file = open(file_path, 'w', encoding='utf-8')
|
||||
with connection.cursor() as cursor:
|
||||
cursor.copy_expert(query, file)
|
||||
file.close()
|
||||
return file_path
|
||||
|
||||
events_query = '''COPY (SELECT main_jobevent.id,
|
||||
main_jobevent.created,
|
||||
main_jobevent.uuid,
|
||||
main_jobevent.parent_uuid,
|
||||
main_jobevent.event,
|
||||
main_jobevent.event_data::json->'task_action',
|
||||
main_jobevent.failed,
|
||||
main_jobevent.changed,
|
||||
main_jobevent.playbook,
|
||||
main_jobevent.play,
|
||||
main_jobevent.task,
|
||||
main_jobevent.role,
|
||||
main_jobevent.job_id,
|
||||
main_jobevent.host_id,
|
||||
main_jobevent.host_name
|
||||
FROM main_jobevent
|
||||
WHERE main_jobevent.created > {}
|
||||
ORDER BY main_jobevent.id ASC) to stdout'''.format(since.strftime("'%Y-%m-%d %H:%M:%S'"))
|
||||
_copy_table(table='events', query=events_query, path=full_path)
|
||||
|
||||
unified_job_query = '''COPY (SELECT main_unifiedjob.id,
|
||||
main_unifiedjob.polymorphic_ctype_id,
|
||||
django_content_type.model,
|
||||
main_unifiedjob.created,
|
||||
main_unifiedjob.name,
|
||||
main_unifiedjob.unified_job_template_id,
|
||||
main_unifiedjob.launch_type,
|
||||
main_unifiedjob.schedule_id,
|
||||
main_unifiedjob.execution_node,
|
||||
main_unifiedjob.controller_node,
|
||||
main_unifiedjob.cancel_flag,
|
||||
main_unifiedjob.status,
|
||||
main_unifiedjob.failed,
|
||||
main_unifiedjob.started,
|
||||
main_unifiedjob.finished,
|
||||
main_unifiedjob.elapsed,
|
||||
main_unifiedjob.job_explanation,
|
||||
main_unifiedjob.instance_group_id
|
||||
FROM main_unifiedjob, django_content_type
|
||||
WHERE main_unifiedjob.created > {} AND
|
||||
main_unifiedjob.polymorphic_ctype_id = django_content_type.id AND
|
||||
main_unifiedjob.launch_type != 'sync'
|
||||
ORDER BY main_unifiedjob.id ASC) to stdout'''.format(since.strftime("'%Y-%m-%d %H:%M:%S'"))
|
||||
_copy_table(table='unified_jobs', query=unified_job_query, path=full_path)
|
||||
|
||||
unified_job_template_query = '''COPY (SELECT main_unifiedjobtemplate.id,
|
||||
main_unifiedjobtemplate.polymorphic_ctype_id,
|
||||
django_content_type.model,
|
||||
main_unifiedjobtemplate.created,
|
||||
main_unifiedjobtemplate.modified,
|
||||
main_unifiedjobtemplate.created_by_id,
|
||||
main_unifiedjobtemplate.modified_by_id,
|
||||
main_unifiedjobtemplate.name,
|
||||
main_unifiedjobtemplate.current_job_id,
|
||||
main_unifiedjobtemplate.last_job_id,
|
||||
main_unifiedjobtemplate.last_job_failed,
|
||||
main_unifiedjobtemplate.last_job_run,
|
||||
main_unifiedjobtemplate.next_job_run,
|
||||
main_unifiedjobtemplate.next_schedule_id,
|
||||
main_unifiedjobtemplate.status
|
||||
FROM main_unifiedjobtemplate, django_content_type
|
||||
WHERE main_unifiedjobtemplate.polymorphic_ctype_id = django_content_type.id
|
||||
ORDER BY main_unifiedjobtemplate.id ASC) to stdout'''.format(since.strftime("'%Y-%m-%d %H:%M:%S'"))
|
||||
_copy_table(table='unified_job_template', query=unified_job_template_query, path=full_path)
|
||||
return
|
||||
|
||||
138
awx/main/analytics/core.py
Normal file
138
awx/main/analytics/core.py
Normal file
@ -0,0 +1,138 @@
|
||||
import inspect
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import os.path
|
||||
import tempfile
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.timezone import now, timedelta
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
from awx.conf.license import get_license
|
||||
from awx.main.models import Job
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.models.ha import TowerAnalyticsState
|
||||
|
||||
|
||||
__all__ = ['register', 'gather', 'ship']
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.main.analytics')
|
||||
|
||||
|
||||
def _valid_license():
|
||||
try:
|
||||
if get_license(show_key=False).get('license_type', 'UNLICENSED') == 'open':
|
||||
return False
|
||||
access_registry[Job](None).check_license()
|
||||
except PermissionDenied:
|
||||
logger.exception("A valid license was not found:")
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def register(key):
|
||||
"""
|
||||
A decorator used to register a function as a metric collector.
|
||||
|
||||
Decorated functions should return JSON-serializable objects.
|
||||
|
||||
@register('projects_by_scm_type')
|
||||
def projects_by_scm_type():
|
||||
return {'git': 5, 'svn': 1, 'hg': 0}
|
||||
"""
|
||||
|
||||
def decorate(f):
|
||||
f.__awx_analytics_key__ = key
|
||||
return f
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
def gather(dest=None, module=None):
|
||||
"""
|
||||
Gather all defined metrics and write them as JSON files in a .tgz
|
||||
|
||||
:param dest: the (optional) absolute path to write a compressed tarball
|
||||
:pararm module: the module to search for registered analytic collector
|
||||
functions; defaults to awx.main.analytics.collectors
|
||||
"""
|
||||
|
||||
run_now = now()
|
||||
state = TowerAnalyticsState.get_solo()
|
||||
last_run = state.last_run
|
||||
logger.debug("Last analytics run was: {}".format(last_run))
|
||||
state.last_run = run_now
|
||||
state.save()
|
||||
|
||||
max_interval = now() - timedelta(days=7)
|
||||
if last_run < max_interval or not last_run:
|
||||
last_run = max_interval
|
||||
|
||||
|
||||
if _valid_license() is False:
|
||||
logger.exception("Invalid License provided, or No License Provided")
|
||||
return "Error: Invalid License provided, or No License Provided"
|
||||
|
||||
if not settings.INSIGHTS_DATA_ENABLED:
|
||||
logger.error("Insights analytics not enabled")
|
||||
return "Error: Insights analytics not enabled"
|
||||
|
||||
if module is None:
|
||||
from awx.main.analytics import collectors
|
||||
module = collectors
|
||||
|
||||
dest = dest or tempfile.mkdtemp(prefix='awx_analytics')
|
||||
for name, func in inspect.getmembers(module):
|
||||
if inspect.isfunction(func) and hasattr(func, '__awx_analytics_key__'):
|
||||
key = func.__awx_analytics_key__
|
||||
path = '{}.json'.format(os.path.join(dest, key))
|
||||
with open(path, 'w', encoding='utf-8') as f:
|
||||
try:
|
||||
json.dump(func(last_run), f)
|
||||
except Exception:
|
||||
logger.exception("Could not generate metric {}.json".format(key))
|
||||
f.close()
|
||||
os.remove(f.name)
|
||||
try:
|
||||
collectors.copy_tables(since=last_run, full_path=dest)
|
||||
except Exception:
|
||||
logger.exception("Could not copy tables")
|
||||
|
||||
# can't use isoformat() since it has colons, which GNU tar doesn't like
|
||||
tarname = '_'.join([
|
||||
settings.SYSTEM_UUID,
|
||||
run_now.strftime('%Y-%m-%d-%H%M%S%z')
|
||||
])
|
||||
tgz = shutil.make_archive(
|
||||
os.path.join(os.path.dirname(dest), tarname),
|
||||
'gztar',
|
||||
dest
|
||||
)
|
||||
shutil.rmtree(dest)
|
||||
return tgz
|
||||
|
||||
|
||||
def ship(path):
|
||||
"""
|
||||
Ship gathered metrics via the Insights agent
|
||||
"""
|
||||
agent = 'insights-client'
|
||||
if shutil.which(agent) is None:
|
||||
logger.error('could not find {} on PATH'.format(agent))
|
||||
return
|
||||
logger.debug('shipping analytics file: {}'.format(path))
|
||||
try:
|
||||
cmd = [
|
||||
agent, '--payload', path, '--content-type', settings.INSIGHTS_AGENT_MIME
|
||||
]
|
||||
output = smart_str(subprocess.check_output(cmd, timeout=60 * 5))
|
||||
logger.debug(output)
|
||||
except subprocess.CalledProcessError:
|
||||
logger.exception('{} failure:'.format(cmd))
|
||||
except subprocess.TimeoutExpired:
|
||||
logger.exception('{} timeout:'.format(cmd))
|
||||
@ -301,6 +301,16 @@ register(
|
||||
placeholder={'HTTP_PROXY': 'myproxy.local:8080'},
|
||||
)
|
||||
|
||||
register(
|
||||
'INSIGHTS_DATA_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
default=False,
|
||||
label=_('Gather data for Automation Insights'),
|
||||
help_text=_('Enables Tower to gather data on automation and send it to Red Hat Insights.'),
|
||||
category=_('System'),
|
||||
category_slug='system',
|
||||
)
|
||||
|
||||
register(
|
||||
'AWX_ROLES_ENABLED',
|
||||
field_class=fields.BooleanField,
|
||||
|
||||
30
awx/main/management/commands/gather_analytics.py
Normal file
30
awx/main/management/commands/gather_analytics.py
Normal file
@ -0,0 +1,30 @@
|
||||
import logging
|
||||
from awx.main.analytics import gather, ship
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
'''
|
||||
Gather AWX analytics data
|
||||
'''
|
||||
|
||||
help = 'Gather AWX analytics data'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--ship', dest='ship', action='store_true',
|
||||
help='Enable to ship metrics via insights-client')
|
||||
|
||||
def init_logging(self):
|
||||
self.logger = logging.getLogger('awx.main.analytics')
|
||||
handler = logging.StreamHandler()
|
||||
handler.setLevel(logging.DEBUG)
|
||||
handler.setFormatter(logging.Formatter('%(message)s'))
|
||||
self.logger.addHandler(handler)
|
||||
self.logger.propagate = False
|
||||
|
||||
def handle(self, *args, **options):
|
||||
tgz = gather()
|
||||
self.init_logging()
|
||||
self.logger.debug(tgz)
|
||||
if options.get('ship'):
|
||||
ship(tgz)
|
||||
25
awx/main/migrations/0064_v350_analytics_state.py
Normal file
25
awx/main/migrations/0064_v350_analytics_state.py
Normal file
@ -0,0 +1,25 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.16 on 2019-01-28 14:27
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0063_v350_org_host_limits'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='TowerAnalyticsState',
|
||||
fields=[
|
||||
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
||||
('last_run', models.DateTimeField(auto_now_add=True)),
|
||||
],
|
||||
options={
|
||||
'abstract': False,
|
||||
},
|
||||
),
|
||||
]
|
||||
@ -26,7 +26,7 @@ from awx.main.models.unified_jobs import UnifiedJob
|
||||
from awx.main.utils import get_cpu_capacity, get_mem_capacity, get_system_task_capacity
|
||||
from awx.main.models.mixins import RelatedJobsMixin
|
||||
|
||||
__all__ = ('Instance', 'InstanceGroup', 'JobOrigin', 'TowerScheduleState',)
|
||||
__all__ = ('Instance', 'InstanceGroup', 'JobOrigin', 'TowerScheduleState', 'TowerAnalyticsState')
|
||||
|
||||
|
||||
class HasPolicyEditsMixin(HasEditsMixin):
|
||||
@ -251,6 +251,10 @@ class TowerScheduleState(SingletonModel):
|
||||
schedule_last_run = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
|
||||
class TowerAnalyticsState(SingletonModel):
|
||||
last_run = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
|
||||
class JobOrigin(models.Model):
|
||||
"""A model representing the relationship between a unified job and
|
||||
the instance that was responsible for starting that job.
|
||||
|
||||
@ -156,7 +156,9 @@ class Profile(CreatedModifiedModel):
|
||||
|
||||
class UserSessionMembership(BaseModel):
|
||||
'''
|
||||
A lookup table for session membership given user.
|
||||
A lookup table for API session membership given user. Note, there is a
|
||||
different session created by channels for websockets using the same
|
||||
underlying model.
|
||||
'''
|
||||
|
||||
class Meta:
|
||||
|
||||
@ -70,6 +70,7 @@ from awx.main.utils.safe_yaml import safe_dump, sanitize_jinja
|
||||
from awx.main.utils.reload import stop_local_services
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
from awx.main import analytics
|
||||
from awx.conf import settings_registry
|
||||
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
@ -321,6 +322,19 @@ def send_notifications(notification_list, job_id=None):
|
||||
logger.exception('Error saving notification {} result.'.format(notification.id))
|
||||
|
||||
|
||||
@task()
|
||||
def gather_analytics():
|
||||
if settings.PENDO_TRACKING_STATE == 'off':
|
||||
return
|
||||
try:
|
||||
tgz = analytics.gather()
|
||||
logger.debug('gathered analytics: {}'.format(tgz))
|
||||
analytics.ship(tgz)
|
||||
finally:
|
||||
if os.path.exists(tgz):
|
||||
os.remove(tgz)
|
||||
|
||||
|
||||
@task()
|
||||
def run_administrative_checks():
|
||||
logger.warn("Running administrative checks.")
|
||||
|
||||
0
awx/main/tests/functional/analytics/__init__.py
Normal file
0
awx/main/tests/functional/analytics/__init__.py
Normal file
59
awx/main/tests/functional/analytics/test_core.py
Normal file
59
awx/main/tests/functional/analytics/test_core.py
Normal file
@ -0,0 +1,59 @@
|
||||
import importlib
|
||||
import json
|
||||
import os
|
||||
import tarfile
|
||||
from unittest import mock
|
||||
import pytest
|
||||
|
||||
from django.conf import settings
|
||||
from awx.main.analytics import gather, register
|
||||
|
||||
|
||||
@register('example')
|
||||
def example(since):
|
||||
return {'awx': 123}
|
||||
|
||||
|
||||
@register('bad_json')
|
||||
def bad_json(since):
|
||||
return set()
|
||||
|
||||
|
||||
@register('throws_error')
|
||||
def throws_error(since):
|
||||
raise ValueError()
|
||||
|
||||
|
||||
def _valid_license():
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_valid_license():
|
||||
with mock.patch('awx.main.analytics.core._valid_license') as license:
|
||||
license.return_value = True
|
||||
yield license
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_gather(mock_valid_license):
|
||||
settings.INSIGHTS_DATA_ENABLED = True
|
||||
|
||||
tgz = gather(module=importlib.import_module(__name__))
|
||||
files = {}
|
||||
with tarfile.open(tgz, "r:gz") as archive:
|
||||
for member in archive.getmembers():
|
||||
files[member.name] = archive.extractfile(member)
|
||||
|
||||
# functions that returned valid JSON should show up
|
||||
assert './example.json' in files.keys()
|
||||
assert json.loads(files['./example.json'].read()) == {'awx': 123}
|
||||
|
||||
# functions that don't return serializable objects should not
|
||||
assert './bad_json.json' not in files.keys()
|
||||
assert './throws_error.json' not in files.keys()
|
||||
try:
|
||||
os.remove(tgz)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
53
awx/main/tests/functional/analytics/test_counts.py
Normal file
53
awx/main/tests/functional/analytics/test_counts.py
Normal file
@ -0,0 +1,53 @@
|
||||
import pytest
|
||||
|
||||
from awx.main import models
|
||||
from awx.main.analytics import collectors
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty():
|
||||
assert collectors.counts(None) == {
|
||||
"active_user_sessions": 0,
|
||||
"active_anonymous_sessions": 0,
|
||||
"active_sessions": 0,
|
||||
"active_host_count": 0,
|
||||
"credential": 0,
|
||||
"custom_inventory_script": 0,
|
||||
"custom_virtualenvs": 0, # dev env ansible3
|
||||
"host": 0,
|
||||
"inventory": 0,
|
||||
"inventories": {'normal': 0, 'smart': 0},
|
||||
"job_template": 0,
|
||||
"notification_template": 0,
|
||||
"organization": 0,
|
||||
"project": 0,
|
||||
"running_jobs": 0,
|
||||
"schedule": 0,
|
||||
"team": 0,
|
||||
"user": 0,
|
||||
"workflow_job_template": 0,
|
||||
"unified_job": 0
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_database_counts(organization_factory, job_template_factory,
|
||||
workflow_job_template_factory):
|
||||
objs = organization_factory('org', superusers=['admin'])
|
||||
jt = job_template_factory('test', organization=objs.organization,
|
||||
inventory='test_inv', project='test_project',
|
||||
credential='test_cred')
|
||||
workflow_job_template_factory('test')
|
||||
models.Team(organization=objs.organization).save()
|
||||
models.Host(inventory=jt.inventory).save()
|
||||
models.Schedule(
|
||||
rrule='DTSTART;TZID=America/New_York:20300504T150000',
|
||||
unified_job_template=jt.job_template
|
||||
).save()
|
||||
models.CustomInventoryScript(organization=objs.organization).save()
|
||||
|
||||
counts = collectors.counts(None)
|
||||
for key in ('organization', 'team', 'user', 'inventory', 'credential',
|
||||
'project', 'job_template', 'workflow_job_template', 'host',
|
||||
'schedule', 'custom_inventory_script'):
|
||||
assert counts[key] == 1
|
||||
@ -0,0 +1,32 @@
|
||||
import pytest
|
||||
import random
|
||||
|
||||
from awx.main.models import Project
|
||||
from awx.main.analytics import collectors
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty():
|
||||
assert collectors.projects_by_scm_type(None) == {
|
||||
'manual': 0,
|
||||
'git': 0,
|
||||
'svn': 0,
|
||||
'hg': 0,
|
||||
'insights': 0
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('scm_type', [t[0] for t in Project.SCM_TYPE_CHOICES])
|
||||
def test_multiple(scm_type):
|
||||
expected = {
|
||||
'manual': 0,
|
||||
'git': 0,
|
||||
'svn': 0,
|
||||
'hg': 0,
|
||||
'insights': 0
|
||||
}
|
||||
for i in range(random.randint(0, 10)):
|
||||
Project(scm_type=scm_type).save()
|
||||
expected[scm_type or 'manual'] += 1
|
||||
assert collectors.projects_by_scm_type(None) == expected
|
||||
@ -5,6 +5,7 @@ import os
|
||||
import re # noqa
|
||||
import sys
|
||||
from datetime import timedelta
|
||||
from celery.schedules import crontab
|
||||
|
||||
# global settings
|
||||
from django.conf import global_settings
|
||||
@ -486,6 +487,10 @@ CELERYBEAT_SCHEDULE = {
|
||||
'task': 'awx.main.tasks.purge_old_stdout_files',
|
||||
'schedule': timedelta(days=7)
|
||||
},
|
||||
'gather_analytics': {
|
||||
'task': 'awx.main.tasks.gather_analytics',
|
||||
'schedule': crontab(hour=0)
|
||||
},
|
||||
'task_manager': {
|
||||
'task': 'awx.main.scheduler.tasks.run_task_manager',
|
||||
'schedule': timedelta(seconds=20),
|
||||
@ -667,6 +672,11 @@ AWX_AUTO_DEPROVISION_INSTANCES = False
|
||||
# Note: This setting may be overridden by database settings.
|
||||
PENDO_TRACKING_STATE = "off"
|
||||
|
||||
# Enables Insights data collection for Ansible Tower.
|
||||
# Note: This setting may be overridden by database settings.
|
||||
INSIGHTS_DATA_ENABLED = False
|
||||
|
||||
|
||||
# Default list of modules allowed for ad hoc commands.
|
||||
# Note: This setting may be overridden by database settings.
|
||||
AD_HOC_COMMANDS = [
|
||||
@ -958,6 +968,7 @@ TOWER_ADMIN_ALERTS = True
|
||||
TOWER_URL_BASE = "https://towerhost"
|
||||
|
||||
INSIGHTS_URL_BASE = "https://example.org"
|
||||
INSIGHTS_AGENT_MIME = 'application/example'
|
||||
|
||||
TOWER_SETTINGS_MANIFEST = {}
|
||||
|
||||
|
||||
@ -88,6 +88,7 @@ AWX_ISOLATED_LAUNCH_TIMEOUT = 30
|
||||
# Disable Pendo on the UI for development/test.
|
||||
# Note: This setting may be overridden by database settings.
|
||||
PENDO_TRACKING_STATE = "off"
|
||||
INSIGHTS_DATA_ENABLED = False
|
||||
|
||||
# Use Django-Jenkins if installed. Only run tests for awx.main app.
|
||||
try:
|
||||
|
||||
@ -58,6 +58,9 @@ export default ['i18n', function(i18n) {
|
||||
CUSTOM_VENV_PATHS: {
|
||||
type: 'textarea',
|
||||
reset: 'CUSTOM_VENV_PATHS'
|
||||
},
|
||||
INSIGHTS_DATA_ENABLED: {
|
||||
type: 'toggleSwitch'
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
@ -140,6 +140,11 @@
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.License-detailsGroup--withSeparator {
|
||||
border-top: 1px solid @default-icon-hov;
|
||||
.License-analyticsCheckbox {
|
||||
padding-top: 5px;
|
||||
}
|
||||
|
||||
.License-analyticsCheckboxGroup {
|
||||
padding: 10px 0;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
@ -9,9 +9,9 @@ import {N_} from "../i18n";
|
||||
export default
|
||||
['Wait', '$state', '$scope', '$rootScope',
|
||||
'ProcessErrors', 'CheckLicense', 'moment','$window',
|
||||
'ConfigService', 'FeaturesService', 'pendoService', 'i18n', 'config',
|
||||
'ConfigService', 'FeaturesService', 'pendoService', 'insightsEnablementService', 'i18n', 'config',
|
||||
function(Wait, $state, $scope, $rootScope, ProcessErrors, CheckLicense, moment,
|
||||
$window, ConfigService, FeaturesService, pendoService, i18n, config) {
|
||||
$window, ConfigService, FeaturesService, pendoService, insightsEnablementService, i18n, config) {
|
||||
|
||||
const calcDaysRemaining = function(seconds) {
|
||||
// calculate the number of days remaining on the license
|
||||
@ -54,7 +54,8 @@ export default
|
||||
$scope.valid = CheckLicense.valid($scope.license.license_info);
|
||||
$scope.compliant = $scope.license.license_info.compliant;
|
||||
$scope.newLicense = {
|
||||
pendo: true
|
||||
pendo: true,
|
||||
insights: true
|
||||
};
|
||||
};
|
||||
|
||||
@ -114,6 +115,13 @@ export default
|
||||
} else {
|
||||
pendoService.updatePendoTrackingState('off');
|
||||
}
|
||||
|
||||
if ($scope.newLicense.insights) {
|
||||
insightsEnablementService.updateInsightsTrackingState(true);
|
||||
} else {
|
||||
insightsEnablementService.updateInsightsTrackingState(false);
|
||||
}
|
||||
|
||||
$state.go('dashboard', {
|
||||
licenseMissing: false
|
||||
});
|
||||
|
||||
@ -115,25 +115,38 @@
|
||||
<div id="eula_notice"
|
||||
class="License-eulaNotice">{{ license.eula }}</div>
|
||||
<div class="form-group License-detailsGroup">
|
||||
<div class="checkbox">
|
||||
<div class="License-analyticsCheckbox checkbox">
|
||||
<label class="License-details--label">
|
||||
<input type="checkbox" ng-model="newLicense.eula" ng-disabled="!user_is_superuser" required>
|
||||
<translate>I agree to the End User License Agreement</translate>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group License-detailsGroup License-detailsGroup--withSeparator" ng-if="licenseMissing">
|
||||
<div class="checkbox">
|
||||
<label class="License-details--label">
|
||||
<input type="checkbox" ng-model="newLicense.pendo" ng-disabled="!user_is_superuser" required>
|
||||
<translate>By default, Tower collects and transmits analytics data on Tower usage to Red Hat. This data is used to enhance future releases of the Tower Software and help streamline customer experience and success. For more information, see
|
||||
<a target="_blank"
|
||||
href="http://docs.ansible.com/ansible-tower/latest/html/installandreference/user-data.html#index-0">
|
||||
this Tower documentation page
|
||||
</a>. Uncheck this box to disable this feature.
|
||||
</translate>
|
||||
</label>
|
||||
<div class="License-subTitleText" ng-if="licenseMissing">
|
||||
<translate>Tracking and Analytics</translate>
|
||||
</div>
|
||||
<div class="form-group License-detailsGroup" ng-if="licenseMissing">
|
||||
<span class="License-helperText">
|
||||
<translate>By default, Tower collects and transmits analytics data on Tower usage to Red Hat. You can uncheck these boxes to disable sending data to these services:</translate>
|
||||
</span>
|
||||
<div class="License-analyticsCheckboxGroup">
|
||||
<div class="License-analyticsCheckbox checkbox">
|
||||
<input type="checkbox" ng-model="newLicense.pendo" ng-disabled="!user_is_superuser" required>
|
||||
<translate>Pendo</translate>
|
||||
</div>
|
||||
<div class="License-analyticsCheckbox checkbox">
|
||||
<input type="checkbox" ng-model="newLicense.insights" ng-disabled="!user_is_superuser" required>
|
||||
<translate>Insights</translate>
|
||||
</div>
|
||||
</div>
|
||||
<span class="License-helperText">
|
||||
<translate>For more information about track and analytics, see
|
||||
<a target="_blank"
|
||||
href="http://docs.ansible.com/ansible-tower/latest/html/installandreference/user-data.html#index-0">
|
||||
this Tower documentation page
|
||||
</a>.
|
||||
</translate>
|
||||
</span>
|
||||
</div>
|
||||
<div>
|
||||
<button ng-click="submit()" class="btn btn-success pull-right" ng-disabled="newLicense.file.license_key == null || newLicense.eula == null || !user_is_superuser" translate>Submit</button>
|
||||
|
||||
@ -0,0 +1,27 @@
|
||||
/*************************************************
|
||||
* Copyright (c) 2015 Ansible, Inc.
|
||||
*
|
||||
* All Rights Reserved
|
||||
*************************************************/
|
||||
|
||||
|
||||
export default ['$rootScope', 'Rest', 'GetBasePath', 'ProcessErrors',
|
||||
function ($rootScope, Rest, GetBasePath, ProcessErrors) {
|
||||
return {
|
||||
updateInsightsTrackingState: function(tracking_type) {
|
||||
if (tracking_type === true || tracking_type === false) {
|
||||
Rest.setUrl(`${GetBasePath('settings')}system`);
|
||||
Rest.patch({ INSIGHTS_DATA_ENABLED: tracking_type })
|
||||
.catch(function ({data, status}) {
|
||||
ProcessErrors($rootScope, data, status, null, {
|
||||
hdr: 'Error!',
|
||||
msg: 'Failed to patch INSIGHTS_DATA_ENABLED in settings: ' +
|
||||
status });
|
||||
});
|
||||
} else {
|
||||
throw new Error(`Can't update insights data enabled in settings to
|
||||
"${tracking_type}"`);
|
||||
}
|
||||
}
|
||||
};
|
||||
}];
|
||||
@ -8,10 +8,12 @@ import authenticationService from './authentication.service';
|
||||
import isAdmin from './isAdmin.factory';
|
||||
import timer from './timer.factory';
|
||||
import pendoService from './pendo.service';
|
||||
import insightsEnablementService from './insightsEnablement.service';
|
||||
|
||||
export default
|
||||
angular.module('authentication', [])
|
||||
.factory('Authorization', authenticationService)
|
||||
.factory('IsAdmin', isAdmin)
|
||||
.factory('Timer', timer)
|
||||
.service('pendoService', pendoService);
|
||||
.service('pendoService', pendoService)
|
||||
.service('insightsEnablementService', insightsEnablementService);
|
||||
|
||||
@ -17,8 +17,8 @@ register(
|
||||
('anonymous', _('Anonymous')),
|
||||
('detailed', _('Detailed')),
|
||||
],
|
||||
label=_('Analytics Tracking State'),
|
||||
help_text=_('Enable or Disable Analytics Tracking.'),
|
||||
label=_('Pendo Analytics Tracking State'),
|
||||
help_text=_('Enable or Disable Pendo Analytics Tracking.'),
|
||||
category=_('UI'),
|
||||
category_slug='ui',
|
||||
)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user