rename field to store_facts and fix up tests

This commit is contained in:
Chris Meyers
2017-04-28 13:45:57 -04:00
parent 6dec9d1014
commit 0be384af18
7 changed files with 90 additions and 39 deletions

View File

@@ -2088,7 +2088,7 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
fields = ('*', 'job_type', 'inventory', 'project', 'playbook', fields = ('*', 'job_type', 'inventory', 'project', 'playbook',
'credential', 'cloud_credential', 'network_credential', 'forks', 'limit', 'credential', 'cloud_credential', 'network_credential', 'forks', 'limit',
'verbosity', 'extra_vars', 'job_tags', 'force_handlers', 'verbosity', 'extra_vars', 'job_tags', 'force_handlers',
'skip_tags', 'start_at_task', 'timeout', 'gather_facts',) 'skip_tags', 'start_at_task', 'timeout', 'store_facts',)
def get_related(self, obj): def get_related(self, obj):
res = super(JobOptionsSerializer, self).get_related(obj) res = super(JobOptionsSerializer, self).get_related(obj)

View File

@@ -53,6 +53,24 @@ class FactBrokerWorker(ConsumerMixin):
facts = self._extract_module_facts(module, facts) facts = self._extract_module_facts(module, facts)
return (module, facts) return (module, facts)
def _do_fact_scan_create_update(self, host_obj, module_name, facts, timestamp):
try:
fact_obj = Fact.objects.get(host__id=host_obj.id, module=module_name, timestamp=timestamp)
fact_obj.facts = facts
fact_obj.save()
logger.info('Updated existing fact <%s>' % (fact_obj.id))
except Fact.DoesNotExist:
# Create new Fact entry
fact_obj = Fact.add_fact(host_obj.id, module_name, self.timestamp, facts)
logger.info('Created new fact <fact_id, module> <%s, %s>' % (fact_obj.id, module_name))
analytics_logger.info('Received message with fact data', extra=dict(
module_name=module_name, facts_data=facts))
return fact_obj
def _do_gather_facts_update(self, host_obj, module_name, facts, timestamp):
host_obj.update_ansible_facts(module=module_name, facts=facts, timestamp=self.timestamp)
return host_obj
def process_fact_message(self, body, message): def process_fact_message(self, body, message):
hostname = body['host'] hostname = body['host']
inventory_id = body['inventory_id'] inventory_id = body['inventory_id']
@@ -99,20 +117,10 @@ class FactBrokerWorker(ConsumerMixin):
ret = None ret = None
# Update existing Fact entry # Update existing Fact entry
if is_fact_scan is True: if is_fact_scan is True:
try: ret = self._do_fact_scan_create_update(host_obj, module_name, facts, self.timestamp)
fact_obj = Fact.objects.get(host__id=host_obj.id, module=module_name, timestamp=self.timestamp)
fact_obj.facts = facts if job.store_facts is True:
fact_obj.save() self._do_gather_facts_update(host_obj, module_name, facts, self.timestamp)
logger.info('Updated existing fact <%s>' % (fact_obj.id))
except Fact.DoesNotExist:
# Create new Fact entry
fact_obj = Fact.add_fact(host_obj.id, module_name, self.timestamp, facts)
logger.info('Created new fact <fact_id, module> <%s, %s>' % (fact_obj.id, module_name))
analytics_logger.info('Received message with fact data', extra=dict(
module_name=module_name, facts_data=facts))
ret = fact_obj
if job.gather_facts is True:
host_obj.update_ansible_facts(module=module_name, facts=facts, timestamp=self.timestamp)
message.ack() message.ack()
return ret return ret

View File

@@ -50,13 +50,13 @@ class Migration(migrations.Migration):
), ),
migrations.AddField( migrations.AddField(
model_name='job', model_name='job',
name='gather_facts', name='store_facts',
field=models.BooleanField(default=False), field=models.BooleanField(default=False, help_text='During a Job run, collect, associate, and persist the most recent per-Host Ansible facts in the ansible_facts namespace.'),
), ),
migrations.AddField( migrations.AddField(
model_name='jobtemplate', model_name='jobtemplate',
name='gather_facts', name='store_facts',
field=models.BooleanField(default=False), field=models.BooleanField(default=False, help_text='During a Job run, collect, associate, and persist the most recent per-Host Ansible facts in the ansible_facts namespace.'),
), ),
migrations.RunSQL([("CREATE INDEX host_ansible_facts_default_gin ON %s USING gin" migrations.RunSQL([("CREATE INDEX host_ansible_facts_default_gin ON %s USING gin"
"(ansible_facts jsonb_path_ops);", [AsIs(Host._meta.db_table)])], "(ansible_facts jsonb_path_ops);", [AsIs(Host._meta.db_table)])],

View File

@@ -159,8 +159,9 @@ class JobOptions(BaseModel):
blank=True, blank=True,
default=0, default=0,
) )
gather_facts = models.BooleanField( store_facts = models.BooleanField(
default=False, default=False,
help_text=_('During a Job run, collect, associate, and persist the most recent per-Host Ansible facts in the ansible_facts namespace.'),
) )
extra_vars_dict = VarsDictProperty('extra_vars', True) extra_vars_dict = VarsDictProperty('extra_vars', True)
@@ -265,7 +266,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
'limit', 'verbosity', 'job_tags', 'extra_vars', 'launch_type', 'limit', 'verbosity', 'job_tags', 'extra_vars', 'launch_type',
'force_handlers', 'skip_tags', 'start_at_task', 'become_enabled', 'force_handlers', 'skip_tags', 'start_at_task', 'become_enabled',
'labels', 'survey_passwords', 'allow_simultaneous', 'timeout', 'labels', 'survey_passwords', 'allow_simultaneous', 'timeout',
'gather_facts',] 'store_facts',]
def resource_validation_data(self): def resource_validation_data(self):
''' '''

View File

@@ -944,7 +944,7 @@ class RunJob(BaseTask):
env['ANSIBLE_NET_AUTH_PASS'] = decrypt_field(network_cred, 'authorize_password') env['ANSIBLE_NET_AUTH_PASS'] = decrypt_field(network_cred, 'authorize_password')
# Set environment variables related to gathering facts from the cache # Set environment variables related to gathering facts from the cache
if job.job_type == PERM_INVENTORY_SCAN or job.gather_facts is True: if job.job_type == PERM_INVENTORY_SCAN or job.store_facts is True:
env['FACT_QUEUE'] = settings.FACT_QUEUE env['FACT_QUEUE'] = settings.FACT_QUEUE
env['ANSIBLE_LIBRARY'] = self.get_path_to('..', 'plugins', 'library') env['ANSIBLE_LIBRARY'] = self.get_path_to('..', 'plugins', 'library')
env['ANSIBLE_CACHE_PLUGINS'] = self.get_path_to('..', 'plugins', 'fact_caching') env['ANSIBLE_CACHE_PLUGINS'] = self.get_path_to('..', 'plugins', 'fact_caching')

View File

@@ -6,9 +6,6 @@ import pytest
from datetime import datetime from datetime import datetime
import json import json
# Mock
import mock
# Django # Django
from django.utils import timezone from django.utils import timezone
@@ -16,12 +13,38 @@ from django.utils import timezone
from awx.main.management.commands.run_fact_cache_receiver import FactBrokerWorker from awx.main.management.commands.run_fact_cache_receiver import FactBrokerWorker
from awx.main.models.fact import Fact from awx.main.models.fact import Fact
from awx.main.models.inventory import Host from awx.main.models.inventory import Host
from awx.main.models.base import PERM_INVENTORY_SCAN
@pytest.fixture
def mock_message(mocker):
class Message():
def ack():
pass
msg = Message()
mocker.patch.object(msg, 'ack')
return msg
@pytest.fixture
def mock_job_generator(mocker):
def fn(store_facts=True, job_type=PERM_INVENTORY_SCAN):
class Job():
def __init__(self):
self.store_facts = store_facts
self.job_type = job_type
job = Job()
mocker.patch('awx.main.models.Job.objects.get', return_value=job)
return job
return fn
# TODO: Check that timestamp and other attributes are as expected # TODO: Check that timestamp and other attributes are as expected
def check_process_fact_message_module(fact_returned, data, module_name): def check_process_fact_message_module(fact_returned, data, module_name, message):
date_key = data['date_key'] date_key = data['date_key']
message.ack.assert_called_with()
# Ensure 1, and only 1, fact created # Ensure 1, and only 1, fact created
timestamp = datetime.fromtimestamp(date_key, timezone.utc) timestamp = datetime.fromtimestamp(date_key, timezone.utc)
assert 1 == Fact.objects.all().count() assert 1 == Fact.objects.all().count()
@@ -42,28 +65,31 @@ def check_process_fact_message_module(fact_returned, data, module_name):
@pytest.mark.django_db @pytest.mark.django_db
def test_process_fact_message_ansible(fact_msg_ansible, monkeypatch_jsonbfield_get_db_prep_save): def test_process_fact_message_ansible(fact_msg_ansible, monkeypatch_jsonbfield_get_db_prep_save, mock_message, mock_job_generator):
receiver = FactBrokerWorker(None) receiver = FactBrokerWorker(None)
fact_returned = receiver.process_fact_message(fact_msg_ansible, mock.MagicMock()) mock_job_generator(store_facts=False, job_type=PERM_INVENTORY_SCAN)
check_process_fact_message_module(fact_returned, fact_msg_ansible, 'ansible') fact_returned = receiver.process_fact_message(fact_msg_ansible, mock_message)
check_process_fact_message_module(fact_returned, fact_msg_ansible, 'ansible', mock_message)
@pytest.mark.django_db @pytest.mark.django_db
def test_process_fact_message_packages(fact_msg_packages, monkeypatch_jsonbfield_get_db_prep_save): def test_process_fact_message_packages(fact_msg_packages, monkeypatch_jsonbfield_get_db_prep_save, mock_message, mock_job_generator):
receiver = FactBrokerWorker(None) receiver = FactBrokerWorker(None)
fact_returned = receiver.process_fact_message(fact_msg_packages, mock.MagicMock()) mock_job_generator(store_facts=False, job_type=PERM_INVENTORY_SCAN)
check_process_fact_message_module(fact_returned, fact_msg_packages, 'packages') fact_returned = receiver.process_fact_message(fact_msg_packages, mock_message)
check_process_fact_message_module(fact_returned, fact_msg_packages, 'packages', mock_message)
@pytest.mark.django_db @pytest.mark.django_db
def test_process_fact_message_services(fact_msg_services, monkeypatch_jsonbfield_get_db_prep_save): def test_process_fact_message_services(fact_msg_services, monkeypatch_jsonbfield_get_db_prep_save, mock_message, mock_job_generator):
receiver = FactBrokerWorker(None) receiver = FactBrokerWorker(None)
fact_returned = receiver.process_fact_message(fact_msg_services, mock.MagicMock()) mock_job_generator(store_facts=False, job_type=PERM_INVENTORY_SCAN)
check_process_fact_message_module(fact_returned, fact_msg_services, 'services') fact_returned = receiver.process_fact_message(fact_msg_services, mock_message)
check_process_fact_message_module(fact_returned, fact_msg_services, 'services', mock_message)
@pytest.mark.django_db @pytest.mark.django_db
def test_process_facts_message_ansible_overwrite(fact_scans, fact_msg_ansible, monkeypatch_jsonbfield_get_db_prep_save): def test_process_facts_message_ansible_overwrite(fact_scans, fact_msg_ansible, monkeypatch_jsonbfield_get_db_prep_save, mock_message, mock_job_generator):
''' '''
We pickypack our fact sending onto the Ansible fact interface. We pickypack our fact sending onto the Ansible fact interface.
The interface is <hostname, facts>. Where facts is a json blob of all the facts. The interface is <hostname, facts>. Where facts is a json blob of all the facts.
@@ -72,17 +98,32 @@ def test_process_facts_message_ansible_overwrite(fact_scans, fact_msg_ansible, m
and just keep the newest version. and just keep the newest version.
''' '''
#epoch = timezone.now() #epoch = timezone.now()
mock_job_generator(store_facts=False, job_type=PERM_INVENTORY_SCAN)
epoch = datetime.fromtimestamp(fact_msg_ansible['date_key']) epoch = datetime.fromtimestamp(fact_msg_ansible['date_key'])
fact_scans(fact_scans=1, timestamp_epoch=epoch) fact_scans(fact_scans=1, timestamp_epoch=epoch)
key = 'ansible.overwrite' key = 'ansible.overwrite'
value = 'hello world' value = 'hello world'
receiver = FactBrokerWorker(None) receiver = FactBrokerWorker(None)
receiver.process_fact_message(fact_msg_ansible, mock.MagicMock()) receiver.process_fact_message(fact_msg_ansible, mock_message)
fact_msg_ansible['facts'][key] = value fact_msg_ansible['facts'][key] = value
fact_returned = receiver.process_fact_message(fact_msg_ansible, mock.MagicMock()) fact_returned = receiver.process_fact_message(fact_msg_ansible, mock_message)
fact_obj = Fact.objects.get(id=fact_returned.id) fact_obj = Fact.objects.get(id=fact_returned.id)
assert key in fact_obj.facts assert key in fact_obj.facts
assert fact_msg_ansible['facts'] == (json.loads(fact_obj.facts) if isinstance(fact_obj.facts, unicode) else fact_obj.facts) # TODO: Just make response.data['facts'] when we're only dealing with postgres, or if jsonfields ever fixes this bug assert fact_msg_ansible['facts'] == (json.loads(fact_obj.facts) if isinstance(fact_obj.facts, unicode) else fact_obj.facts) # TODO: Just make response.data['facts'] when we're only dealing with postgres, or if jsonfields ever fixes this bug
@pytest.mark.django_db
def test_process_fact_store_facts(fact_msg_services, monkeypatch_jsonbfield_get_db_prep_save, mock_message, mock_job_generator):
receiver = FactBrokerWorker(None)
mock_job_generator(store_facts=True, job_type='run')
receiver.process_fact_message(fact_msg_services, mock_message)
host_obj = Host.objects.get(name=fact_msg_services['host'], inventory__id=fact_msg_services['inventory_id'])
assert host_obj is not None
assert host_obj.ansible_facts == fact_msg_services['facts']

View File

@@ -83,7 +83,8 @@ def mock_job(mocker):
'inventory': mocker.MagicMock(spec=Inventory, id=2), 'force_handlers': False, 'inventory': mocker.MagicMock(spec=Inventory, id=2), 'force_handlers': False,
'limit': None, 'verbosity': None, 'job_tags': None, 'skip_tags': None, 'limit': None, 'verbosity': None, 'job_tags': None, 'skip_tags': None,
'start_at_task': None, 'pk': 1, 'launch_type': 'normal', 'job_template':None, 'start_at_task': None, 'pk': 1, 'launch_type': 'normal', 'job_template':None,
'created_by': None, 'extra_vars_dict': None, 'project':None, 'playbook': 'test.yml'} 'created_by': None, 'extra_vars_dict': None, 'project':None, 'playbook': 'test.yml',
'store_facts': False,}
mock_job = mocker.MagicMock(spec=Job, **mock_job_attrs) mock_job = mocker.MagicMock(spec=Job, **mock_job_attrs)
return mock_job return mock_job