Fixing up unit tests from HA/Clustering work

* Also purge old fact migration tests
* Upgrade some services on various container images
This commit is contained in:
Matthew Jones 2016-10-03 10:39:59 -04:00
parent b4c852cf53
commit 959342a5fd
13 changed files with 59 additions and 290 deletions

View File

@ -41,6 +41,47 @@ TEST_PLAYBOOK = '''- hosts: mygroup
command: test 1 = 1
'''
class QueueTestMixin(object):
def start_queue(self):
self.start_rabbit()
receiver = CallbackReceiver()
self.queue_process = Process(target=receiver.run_subscriber,
args=(False,))
self.queue_process.start()
def terminate_queue(self):
if hasattr(self, 'queue_process'):
self.queue_process.terminate()
self.stop_rabbit()
def start_rabbit(self):
if not getattr(self, 'redis_process', None):
# Centos 6.5 redis is runnable by non-root user but is not in a normal users path by default
env = dict(os.environ)
env['PATH'] = '%s:/usr/sbin/' % env['PATH']
env['RABBITMQ_NODENAME'] = 'towerunittest'
env['RABBITMQ_NODE_PORT'] = '55672'
self.redis_process = Popen('rabbitmq-server > /dev/null',
shell=True, executable='/bin/bash',
env=env)
def stop_rabbit(self):
if getattr(self, 'redis_process', None):
self.redis_process.kill()
self.redis_process = None
# The observed effect of not calling terminate_queue() if you call start_queue() are
# an hang on test cleanup database delete. Thus, to ensure terminate_queue() is called
# whenever start_queue() is called just inherit from this class when you want to use the queue.
class QueueStartStopTestMixin(QueueTestMixin):
def setUp(self):
super(QueueStartStopTestMixin, self).setUp()
self.start_queue()
def tearDown(self):
super(QueueStartStopTestMixin, self).tearDown()
self.terminate_queue()
class MockCommonlySlowTestMixin(object):
def __init__(self, *args, **kwargs):
@ -87,17 +128,9 @@ class BaseTestMixin(MockCommonlySlowTestMixin):
# Set flag so that task chain works with unit tests.
settings.CELERY_UNIT_TEST = True
settings.SYSTEM_UUID='00000000-0000-0000-0000-000000000000'
settings.BROKER_URL='redis://localhost:16379/'
settings.BROKER_URL='redis://localhost:55672/'
settings.CALLBACK_QUEUE = 'callback_tasks_unit'
# Create unique random consumer and queue ports for zeromq callback.
if settings.CALLBACK_CONSUMER_PORT:
callback_port = random.randint(55700, 55799)
settings.CALLBACK_CONSUMER_PORT = 'tcp://127.0.0.1:%d' % callback_port
os.environ['CALLBACK_CONSUMER_PORT'] = settings.CALLBACK_CONSUMER_PORT
callback_queue_path = '/tmp/callback_receiver_test_%d.ipc' % callback_port
self._temp_paths.append(callback_queue_path)
settings.CALLBACK_QUEUE_PORT = 'ipc://%s' % callback_queue_path
settings.TASK_COMMAND_PORT = 'ipc:///tmp/task_command_receiver_%d.ipc' % callback_port
# Disable socket notifications for unit tests.
settings.SOCKETIO_NOTIFICATION_PORT = None
# Make temp job status directory for unit tests.
@ -140,7 +173,7 @@ class BaseTestMixin(MockCommonlySlowTestMixin):
rnd_str = '____' + str(random.randint(1, 9999999))
return __name__ + '-generated-' + string + rnd_str
def create_test_license_file(self, instance_count=10000, license_date=int(time.time() + 3600), features=None):
def create_test_license_file(self, instance_count=10000, license_date=int(time.time() + 3600), features={}):
settings.LICENSE = TaskEnhancer(
company_name='AWX',
contact_name='AWX Admin',
@ -343,7 +376,7 @@ class BaseTestMixin(MockCommonlySlowTestMixin):
return cred
def setup_instances(self):
instance = Instance(uuid=settings.SYSTEM_UUID, primary=True, hostname='127.0.0.1')
instance = Instance(uuid=settings.SYSTEM_UUID, hostname='127.0.0.1')
instance.save()
def setup_users(self, just_super_user=False):

View File

@ -1,84 +0,0 @@
# Python
import pytest
from datetime import timedelta
# Django
from django.utils import timezone
from django.conf import settings
# AWX
from awx.fact.models.fact import Fact, FactHost
# MongoEngine
from mongoengine.connection import ConnectionError
@pytest.fixture(autouse=True)
def mongo_db(request):
marker = request.keywords.get('mongo_db', None)
if marker:
# Drop mongo database
try:
db = Fact._get_db()
db.connection.drop_database(settings.MONGO_DB)
except ConnectionError:
raise
@pytest.fixture
def inventories(organization):
def rf(inventory_count=1):
invs = []
for i in xrange(0, inventory_count):
inv = organization.inventories.create(name="test-inv-%d" % i, description="test-inv-desc")
invs.append(inv)
return invs
return rf
'''
hosts naming convension should align with hosts_mongo
'''
@pytest.fixture
def hosts(organization):
def rf(host_count=1, inventories=[]):
hosts = []
for inv in inventories:
for i in xrange(0, host_count):
name = '%s-host-%s' % (inv.name, i)
host = inv.hosts.create(name=name)
hosts.append(host)
return hosts
return rf
@pytest.fixture
def hosts_mongo(organization):
def rf(host_count=1, inventories=[]):
hosts = []
for inv in inventories:
for i in xrange(0, host_count):
name = '%s-host-%s' % (inv.name, i)
(host, created) = FactHost.objects.get_or_create(hostname=name, inventory_id=inv.id)
hosts.append(host)
return hosts
return rf
@pytest.fixture
def fact_scans(organization, fact_ansible_json, fact_packages_json, fact_services_json):
def rf(fact_scans=1, inventories=[], timestamp_epoch=timezone.now()):
facts_json = {}
facts = []
module_names = ['ansible', 'services', 'packages']
facts_json['ansible'] = fact_ansible_json
facts_json['packages'] = fact_packages_json
facts_json['services'] = fact_services_json
for inv in inventories:
for host_obj in FactHost.objects.filter(inventory_id=inv.id):
timestamp_current = timestamp_epoch
for i in xrange(0, fact_scans):
for module_name in module_names:
facts.append(Fact.add_fact(timestamp_current, facts_json[module_name], host_obj, module_name))
timestamp_current += timedelta(days=1)
return facts
return rf

View File

@ -1,63 +0,0 @@
import pytest
import datetime
from django.apps import apps
from django.conf import settings
from awx.main.models.inventory import Host
from awx.main.models.fact import Fact
from awx.main.migrations import _system_tracking as system_tracking
def micro_to_milli(micro):
return micro - (((int)(micro / 1000)) * 1000)
@pytest.mark.skipif(not getattr(settings, 'MONGO_DB', None), reason="MongoDB not configured")
@pytest.mark.django_db
@pytest.mark.mongo_db
def test_migrate_facts(inventories, hosts, hosts_mongo, fact_scans):
inventory_objs = inventories(2)
hosts(2, inventory_objs)
hosts_mongo(2, inventory_objs)
facts_known = fact_scans(2, inventory_objs)
(migrated_count, not_migrated_count) = system_tracking.migrate_facts(apps, None)
# 4 hosts w/ 2 fact scans each, 3 modules each scan
assert migrated_count == 24
assert not_migrated_count == 0
for fact_mongo, fact_version in facts_known:
host = Host.objects.get(inventory_id=fact_mongo.host.inventory_id, name=fact_mongo.host.hostname)
t = fact_mongo.timestamp - datetime.timedelta(microseconds=micro_to_milli(fact_mongo.timestamp.microsecond))
fact = Fact.objects.filter(host_id=host.id, timestamp=t, module=fact_mongo.module)
assert len(fact) == 1
assert fact[0] is not None
@pytest.mark.skipif(not getattr(settings, 'MONGO_DB', None), reason="MongoDB not configured")
@pytest.mark.django_db
@pytest.mark.mongo_db
def test_migrate_facts_hostname_does_not_exist(inventories, hosts, hosts_mongo, fact_scans):
inventory_objs = inventories(2)
host_objs = hosts(1, inventory_objs)
hosts_mongo(2, inventory_objs)
facts_known = fact_scans(2, inventory_objs)
(migrated_count, not_migrated_count) = system_tracking.migrate_facts(apps, None)
assert migrated_count == 12
assert not_migrated_count == 12
for fact_mongo, fact_version in facts_known:
# Facts that don't match the only host will not be migrated
if fact_mongo.host.hostname != host_objs[0].name:
continue
host = Host.objects.get(inventory_id=fact_mongo.host.inventory_id, name=fact_mongo.host.hostname)
t = fact_mongo.timestamp - datetime.timedelta(microseconds=micro_to_milli(fact_mongo.timestamp.microsecond))
fact = Fact.objects.filter(host_id=host.id, timestamp=t, module=fact_mongo.module)
assert len(fact) == 1
assert fact[0] is not None

View File

@ -616,12 +616,12 @@ class BaseJobTestMixin(BaseTestMixin):
def setUp(self):
super(BaseJobTestMixin, self).setUp()
self.start_redis()
self.start_rabbit()
self.setup_instances()
self.populate()
self.start_queue()
def tearDown(self):
super(BaseJobTestMixin, self).tearDown()
self.stop_redis()
self.stop_rabbit()
self.terminate_queue()

View File

@ -389,13 +389,13 @@ class CleanupActivityStreamTest(BaseCommandMixin, BaseTest):
'''
def setUp(self):
self.start_redis()
super(CleanupActivityStreamTest, self).setUp()
self.start_rabbit()
self.create_test_inventories()
def tearDown(self):
self.stop_rabbit()
super(CleanupActivityStreamTest, self).tearDown()
self.stop_redis()
def test_cleanup(self):
# Should already have entries due to test case setup. With no
@ -457,7 +457,7 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
def setUp(self):
super(InventoryImportTest, self).setUp()
self.start_redis()
self.start_rabbit()
self.setup_instances()
self.create_test_inventories()
self.create_test_ini()
@ -465,7 +465,7 @@ class InventoryImportTest(BaseCommandMixin, BaseLiveServerTest):
def tearDown(self):
super(InventoryImportTest, self).tearDown()
self.stop_redis()
self.stop_rabbit()
def create_test_ini(self, inv_dir=None, ini_content=None):
ini_content = ini_content or TEST_INVENTORY_INI

View File

@ -1,116 +0,0 @@
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
# Python
from mock import MagicMock, Mock
# Django
from django.test import SimpleTestCase
# AWX
from awx.fact.models.fact import * # noqa
from awx.main.management.commands.run_socketio_service import SocketSessionManager, SocketSession, SocketController
__all__ = ['SocketSessionManagerUnitTest', 'SocketControllerUnitTest',]
class WeakRefable():
pass
class SocketSessionManagerUnitTest(SimpleTestCase):
def setUp(self):
self.session_manager = SocketSessionManager()
super(SocketSessionManagerUnitTest, self).setUp()
def create_sessions(self, count, token_key=None):
self.sessions = []
self.count = count
for i in range(0, count):
self.sessions.append(SocketSession(i, token_key or i, WeakRefable()))
self.session_manager.add_session(self.sessions[i])
def test_multiple_session_diff_token(self):
self.create_sessions(10)
for s in self.sessions:
self.assertIn(s.token_key, self.session_manager.socket_session_token_key_map)
self.assertEqual(s, self.session_manager.socket_session_token_key_map[s.token_key][s.session_id])
def test_multiple_session_same_token(self):
self.create_sessions(10, token_key='foo')
sessions_dict = self.session_manager.lookup("foo")
self.assertEqual(len(sessions_dict), 10)
for s in self.sessions:
self.assertIn(s.session_id, sessions_dict)
self.assertEqual(s, sessions_dict[s.session_id])
def test_prune_sessions_max(self):
self.create_sessions(self.session_manager.SESSIONS_MAX + 10)
self.assertEqual(len(self.session_manager.socket_sessions), self.session_manager.SESSIONS_MAX)
class SocketControllerUnitTest(SimpleTestCase):
def setUp(self):
self.socket_controller = SocketController(SocketSessionManager())
server = Mock()
self.socket_controller.set_server(server)
super(SocketControllerUnitTest, self).setUp()
def create_clients(self, count, token_key=None):
self.sessions = []
self.sockets =[]
self.count = count
self.sockets_dict = {}
for i in range(0, count):
if isinstance(token_key, list):
token_key_actual = token_key[i]
else:
token_key_actual = token_key or i
socket = MagicMock(session=dict())
socket_session = SocketSession(i, token_key_actual, socket)
self.sockets.append(socket)
self.sessions.append(socket_session)
self.sockets_dict[i] = socket
self.socket_controller.add_session(socket_session)
socket.session['socket_session'] = socket_session
socket.send_packet = Mock()
self.socket_controller.server.sockets = self.sockets_dict
def test_broadcast_packet(self):
self.create_clients(10)
packet = {
"hello": "world"
}
self.socket_controller.broadcast_packet(packet)
for s in self.sockets:
s.send_packet.assert_called_with(packet)
def test_send_packet(self):
self.create_clients(5, token_key=[0, 1, 2, 3, 4])
packet = {
"hello": "world"
}
self.socket_controller.send_packet(packet, 2)
self.assertEqual(0, len(self.sockets[0].send_packet.mock_calls))
self.assertEqual(0, len(self.sockets[1].send_packet.mock_calls))
self.sockets[2].send_packet.assert_called_once_with(packet)
self.assertEqual(0, len(self.sockets[3].send_packet.mock_calls))
self.assertEqual(0, len(self.sockets[4].send_packet.mock_calls))
def test_send_packet_multiple_sessions_one_token(self):
self.create_clients(5, token_key=[0, 1, 1, 1, 2])
packet = {
"hello": "world"
}
self.socket_controller.send_packet(packet, 1)
self.assertEqual(0, len(self.sockets[0].send_packet.mock_calls))
self.sockets[1].send_packet.assert_called_once_with(packet)
self.sockets[2].send_packet.assert_called_once_with(packet)
self.sockets[3].send_packet.assert_called_once_with(packet)
self.assertEqual(0, len(self.sockets[4].send_packet.mock_calls))

View File

@ -43,7 +43,7 @@ print json.dumps(inventory)
class InventoryTest(BaseTest):
def setUp(self):
self.start_redis()
self.start_rabbit()
super(InventoryTest, self).setUp()
self.setup_instances()
self.setup_users()
@ -63,7 +63,7 @@ class InventoryTest(BaseTest):
def tearDown(self):
super(InventoryTest, self).tearDown()
self.stop_redis()
self.stop_rabbit()
def test_get_inventory_list(self):
url = reverse('api:inventory_list')

View File

@ -50,7 +50,7 @@ class ScheduleTest(BaseTest):
def setUp(self):
super(ScheduleTest, self).setUp()
self.start_redis()
self.start_rabbit()
self.setup_instances()
self.setup_users()
self.organizations = self.make_organizations(self.super_django_user, 2)
@ -92,7 +92,7 @@ class ScheduleTest(BaseTest):
def tearDown(self):
super(ScheduleTest, self).tearDown()
self.stop_redis()
self.stop_rabbit()
def test_schedules_list(self):
url = reverse('api:schedule_list')

View File

@ -62,7 +62,7 @@ class InventoryScriptTest(BaseScriptTest):
def setUp(self):
super(InventoryScriptTest, self).setUp()
self.start_redis()
self.start_rabbit()
self.setup_instances()
self.setup_users()
self.organizations = self.make_organizations(self.super_django_user, 2)
@ -128,7 +128,7 @@ class InventoryScriptTest(BaseScriptTest):
def tearDown(self):
super(InventoryScriptTest, self).tearDown()
self.stop_redis()
self.stop_rabbit()
def run_inventory_script(self, *args, **options):
rest_api_url = self.live_server_url

View File

@ -1,5 +1,3 @@
Completion pending unit tests and acceptance info and instructions. The following documentation will likely be moved to the feature epic card and reproduced in our development documentation.
# Notification System Overview
A Notifier is an instance of a notification type (Email, Slack, Webhook, etc) with a name, description, and a defined configuration (A few examples: Username, password, server, recipients for the Email type. Token and list of channels for Slack. Url and Headers for webhooks)

View File

@ -11,7 +11,7 @@ RUN yum -y update && yum -y install curl epel-release
RUN curl --silent --location https://rpm.nodesource.com/setup_6.x | bash -
RUN yum -y localinstall http://download.postgresql.org/pub/repos/yum/9.4/redhat/rhel-6-x86_64/pgdg-centos94-9.4-3.noarch.rpm
ADD tools/docker-compose/proot.repo /etc/yum.repos.d/proot.repo
RUN yum -y update && yum -y install openssh-server ansible mg vim tmux git mercurial subversion python-devel python-psycopg2 make postgresql postgresql-devel nodejs python-psutil libxml2-devel libxslt-devel libstdc++.so.6 gcc cyrus-sasl-devel cyrus-sasl openldap-devel libffi-devel zeromq-devel proot python-pip xmlsec1-devel swig krb5-devel xmlsec1-openssl xmlsec1 xmlsec1-openssl-devel libtool-ltdl-devel
RUN yum -y update && yum -y install openssh-server ansible mg vim tmux git mercurial subversion python-devel python-psycopg2 make postgresql postgresql-devel nodejs python-psutil libxml2-devel libxslt-devel libstdc++.so.6 gcc cyrus-sasl-devel cyrus-sasl openldap-devel libffi-devel zeromq-devel proot python-pip xmlsec1-devel swig krb5-devel xmlsec1-openssl xmlsec1 xmlsec1-openssl-devel libtool-ltdl-devel rabbitmq-server
RUN pip install flake8 pytest==2.9.2 pytest-pythonpath pytest-django pytest-cov pytest-mock dateutils django-debug-toolbar==1.4 pyflakes==1.0.0 virtualenv
RUN /usr/bin/ssh-keygen -q -t rsa -N "" -f /root/.ssh/id_rsa
RUN mkdir -p /etc/tower

View File

@ -20,6 +20,7 @@ RUN yum install -y \
/usr/bin/pg_config \
openldap-devel \
postgresql-devel \
rabbitmq-server \
libtool-ltdl-devel
# NOTE: The following steps work for tower-3.0.0