mirror of
https://github.com/ansible/awx.git
synced 2026-02-21 21:20:08 -03:30
per-jt most recent fact scan gathering
* Remove system tracking piggybacked most-recent-fact-gathering * Add explicit per-jobtemplate configurable fact gathering * Continue to support system tracking * Fixed a bug where we weren't ack()ing system tracking rabbit msgs
This commit is contained in:
@@ -2088,7 +2088,7 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
|
|||||||
fields = ('*', 'job_type', 'inventory', 'project', 'playbook',
|
fields = ('*', 'job_type', 'inventory', 'project', 'playbook',
|
||||||
'credential', 'cloud_credential', 'network_credential', 'forks', 'limit',
|
'credential', 'cloud_credential', 'network_credential', 'forks', 'limit',
|
||||||
'verbosity', 'extra_vars', 'job_tags', 'force_handlers',
|
'verbosity', 'extra_vars', 'job_tags', 'force_handlers',
|
||||||
'skip_tags', 'start_at_task', 'timeout')
|
'skip_tags', 'start_at_task', 'timeout', 'gather_facts',)
|
||||||
|
|
||||||
def get_related(self, obj):
|
def get_related(self, obj):
|
||||||
res = super(JobOptionsSerializer, self).get_related(obj)
|
res = super(JobOptionsSerializer, self).get_related(obj)
|
||||||
|
|||||||
@@ -111,7 +111,7 @@ class FactBrokerWorker(ConsumerMixin):
|
|||||||
analytics_logger.info('Received message with fact data', extra=dict(
|
analytics_logger.info('Received message with fact data', extra=dict(
|
||||||
module_name=module_name, facts_data=facts))
|
module_name=module_name, facts_data=facts))
|
||||||
ret = fact_obj
|
ret = fact_obj
|
||||||
else:
|
if job.gather_facts is True:
|
||||||
host_obj.update_ansible_facts(module=module_name, facts=facts, timestamp=self.timestamp)
|
host_obj.update_ansible_facts(module=module_name, facts=facts, timestamp=self.timestamp)
|
||||||
|
|
||||||
message.ack()
|
message.ack()
|
||||||
|
|||||||
@@ -48,10 +48,21 @@ class Migration(migrations.Migration):
|
|||||||
name='ansible_facts',
|
name='ansible_facts',
|
||||||
field=awx.main.fields.JSONBField(default={}, help_text='Arbitrary JSON structure of most recent ansible_facts, per-host.', blank=True),
|
field=awx.main.fields.JSONBField(default={}, help_text='Arbitrary JSON structure of most recent ansible_facts, per-host.', blank=True),
|
||||||
),
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='job',
|
||||||
|
name='gather_facts',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='jobtemplate',
|
||||||
|
name='gather_facts',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
migrations.RunSQL([("CREATE INDEX host_ansible_facts_default_gin ON %s USING gin"
|
migrations.RunSQL([("CREATE INDEX host_ansible_facts_default_gin ON %s USING gin"
|
||||||
"(ansible_facts jsonb_path_ops);", [AsIs(Host._meta.db_table)])],
|
"(ansible_facts jsonb_path_ops);", [AsIs(Host._meta.db_table)])],
|
||||||
[('DROP INDEX host_ansible_facts_default_gin;', None)]),
|
[('DROP INDEX host_ansible_facts_default_gin;', None)]),
|
||||||
|
|
||||||
|
|
||||||
# SCM file-based inventories
|
# SCM file-based inventories
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='inventorysource',
|
model_name='inventorysource',
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ from django.db import models
|
|||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
|
||||||
from awx.main.fields import JSONBField
|
from awx.main.fields import JSONBField
|
||||||
from awx.main.models import Host
|
|
||||||
|
|
||||||
__all__ = ('Fact',)
|
__all__ = ('Fact',)
|
||||||
|
|
||||||
@@ -65,14 +64,6 @@ class Fact(models.Model):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add_fact(host_id, module, timestamp, facts):
|
def add_fact(host_id, module, timestamp, facts):
|
||||||
try:
|
|
||||||
host = Host.objects.get(id=host_id)
|
|
||||||
except Host.DoesNotExist as e:
|
|
||||||
logger.warn("Host with id %s not found while trying to update latest fact set." % host_id)
|
|
||||||
raise e
|
|
||||||
|
|
||||||
host.update_ansible_facts(module=module, facts=facts, timestamp=timestamp)
|
|
||||||
|
|
||||||
fact_obj = Fact.objects.create(host_id=host_id, module=module, timestamp=timestamp, facts=facts)
|
fact_obj = Fact.objects.create(host_id=host_id, module=module, timestamp=timestamp, facts=facts)
|
||||||
fact_obj.save()
|
fact_obj.save()
|
||||||
return fact_obj
|
return fact_obj
|
||||||
|
|||||||
@@ -159,6 +159,9 @@ class JobOptions(BaseModel):
|
|||||||
blank=True,
|
blank=True,
|
||||||
default=0,
|
default=0,
|
||||||
)
|
)
|
||||||
|
gather_facts = models.BooleanField(
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
|
||||||
extra_vars_dict = VarsDictProperty('extra_vars', True)
|
extra_vars_dict = VarsDictProperty('extra_vars', True)
|
||||||
|
|
||||||
@@ -261,7 +264,8 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
|
|||||||
'playbook', 'credential', 'cloud_credential', 'network_credential', 'forks', 'schedule',
|
'playbook', 'credential', 'cloud_credential', 'network_credential', 'forks', 'schedule',
|
||||||
'limit', 'verbosity', 'job_tags', 'extra_vars', 'launch_type',
|
'limit', 'verbosity', 'job_tags', 'extra_vars', 'launch_type',
|
||||||
'force_handlers', 'skip_tags', 'start_at_task', 'become_enabled',
|
'force_handlers', 'skip_tags', 'start_at_task', 'become_enabled',
|
||||||
'labels', 'survey_passwords', 'allow_simultaneous', 'timeout']
|
'labels', 'survey_passwords', 'allow_simultaneous', 'timeout',
|
||||||
|
'gather_facts',]
|
||||||
|
|
||||||
def resource_validation_data(self):
|
def resource_validation_data(self):
|
||||||
'''
|
'''
|
||||||
|
|||||||
@@ -944,11 +944,12 @@ class RunJob(BaseTask):
|
|||||||
env['ANSIBLE_NET_AUTH_PASS'] = decrypt_field(network_cred, 'authorize_password')
|
env['ANSIBLE_NET_AUTH_PASS'] = decrypt_field(network_cred, 'authorize_password')
|
||||||
|
|
||||||
# Set environment variables related to gathering facts from the cache
|
# Set environment variables related to gathering facts from the cache
|
||||||
env['FACT_QUEUE'] = settings.FACT_QUEUE
|
if job.job_type == PERM_INVENTORY_SCAN or job.gather_facts is True:
|
||||||
env['ANSIBLE_LIBRARY'] = self.get_path_to('..', 'plugins', 'library')
|
env['FACT_QUEUE'] = settings.FACT_QUEUE
|
||||||
env['ANSIBLE_CACHE_PLUGINS'] = self.get_path_to('..', 'plugins', 'fact_caching')
|
env['ANSIBLE_LIBRARY'] = self.get_path_to('..', 'plugins', 'library')
|
||||||
env['ANSIBLE_CACHE_PLUGIN'] = "tower"
|
env['ANSIBLE_CACHE_PLUGINS'] = self.get_path_to('..', 'plugins', 'fact_caching')
|
||||||
env['ANSIBLE_CACHE_PLUGIN_CONNECTION'] = "tcp://127.0.0.1:%s" % str(settings.FACT_CACHE_PORT)
|
env['ANSIBLE_CACHE_PLUGIN'] = "tower"
|
||||||
|
env['ANSIBLE_CACHE_PLUGIN_CONNECTION'] = "tcp://127.0.0.1:%s" % str(settings.FACT_CACHE_PORT)
|
||||||
return env
|
return env
|
||||||
|
|
||||||
def build_args(self, job, **kwargs):
|
def build_args(self, job, **kwargs):
|
||||||
|
|||||||
Reference in New Issue
Block a user