Merge pull request #6691 from chrismeyersfsu/enhancement-migrate_scan_job

Migrate scan job to use tower fact cache instead
This commit is contained in:
Chris Meyers 2017-07-05 15:58:51 -04:00 committed by GitHub
commit 958aaffa62
16 changed files with 197 additions and 161 deletions

View File

@ -1147,7 +1147,6 @@ class InventorySerializer(BaseSerializerWithVariables):
update_inventory_sources = self.reverse('api:inventory_inventory_sources_update', kwargs={'pk': obj.pk}),
activity_stream = self.reverse('api:inventory_activity_stream_list', kwargs={'pk': obj.pk}),
job_templates = self.reverse('api:inventory_job_template_list', kwargs={'pk': obj.pk}),
scan_job_templates = self.reverse('api:inventory_scan_job_template_list', kwargs={'pk': obj.pk}),
ad_hoc_commands = self.reverse('api:inventory_ad_hoc_commands_list', kwargs={'pk': obj.pk}),
access_list = self.reverse('api:inventory_access_list', kwargs={'pk': obj.pk}),
object_roles = self.reverse('api:inventory_object_roles_list', kwargs={'pk': obj.pk}),
@ -2347,8 +2346,7 @@ class JobOptionsSerializer(LabelsListMixin, BaseSerializer):
if 'project' in self.fields and 'playbook' in self.fields:
project = attrs.get('project', self.instance and self.instance.project or None)
playbook = attrs.get('playbook', self.instance and self.instance.playbook or '')
job_type = attrs.get('job_type', self.instance and self.instance.job_type or None)
if not project and job_type != PERM_INVENTORY_SCAN:
if not project:
raise serializers.ValidationError({'project': _('This field is required.')})
if project and project.scm_type and playbook and force_text(playbook) not in project.playbook_files:
raise serializers.ValidationError({'playbook': _('Playbook not found for project.')})
@ -2419,26 +2417,18 @@ class JobTemplateSerializer(JobTemplateMixin, UnifiedJobTemplateSerializer, JobO
def get_field_from_model_or_attrs(fd):
return attrs.get(fd, self.instance and getattr(self.instance, fd) or None)
survey_enabled = get_field_from_model_or_attrs('survey_enabled')
job_type = get_field_from_model_or_attrs('job_type')
inventory = get_field_from_model_or_attrs('inventory')
credential = get_field_from_model_or_attrs('credential')
project = get_field_from_model_or_attrs('project')
prompting_error_message = _("Must either set a default value or ask to prompt on launch.")
if job_type == "scan":
if inventory is None or attrs.get('ask_inventory_on_launch', False):
raise serializers.ValidationError({'inventory': _('Scan jobs must be assigned a fixed inventory.')})
elif project is None:
if project is None:
raise serializers.ValidationError({'project': _("Job types 'run' and 'check' must have assigned a project.")})
elif credential is None and not get_field_from_model_or_attrs('ask_credential_on_launch'):
raise serializers.ValidationError({'credential': prompting_error_message})
elif inventory is None and not get_field_from_model_or_attrs('ask_inventory_on_launch'):
raise serializers.ValidationError({'inventory': prompting_error_message})
if survey_enabled and job_type == PERM_INVENTORY_SCAN:
raise serializers.ValidationError({'survey_enabled': _('Survey Enabled cannot be used with scan jobs.')})
return super(JobTemplateSerializer, self).validate(attrs)
def validate_extra_vars(self, value):
@ -2581,7 +2571,7 @@ class JobRelaunchSerializer(JobSerializer):
obj = self.context.get('obj')
if not obj.credential:
raise serializers.ValidationError(dict(credential=[_("Credential not found or deleted.")]))
if obj.job_type != PERM_INVENTORY_SCAN and obj.project is None:
if obj.project is None:
raise serializers.ValidationError(dict(errors=[_("Job Template Project is missing or undefined.")]))
if obj.inventory is None:
raise serializers.ValidationError(dict(errors=[_("Job Template Inventory is missing or undefined.")]))

View File

@ -97,7 +97,6 @@ inventory_urls = patterns('awx.api.views',
url(r'^(?P<pk>[0-9]+)/update_inventory_sources/$', 'inventory_inventory_sources_update'),
url(r'^(?P<pk>[0-9]+)/activity_stream/$', 'inventory_activity_stream_list'),
url(r'^(?P<pk>[0-9]+)/job_templates/$', 'inventory_job_template_list'),
url(r'^(?P<pk>[0-9]+)/scan_job_templates/$', 'inventory_scan_job_template_list'),
url(r'^(?P<pk>[0-9]+)/ad_hoc_commands/$', 'inventory_ad_hoc_commands_list'),
url(r'^(?P<pk>[0-9]+)/access_list/$', 'inventory_access_list'),
url(r'^(?P<pk>[0-9]+)/object_roles/$', 'inventory_object_roles_list'),

View File

@ -858,10 +858,8 @@ class OrganizationDetail(RetrieveUpdateDestroyAPIView):
organization__id=org_id).count()
org_counts['projects'] = Project.accessible_objects(**access_kwargs).filter(
organization__id=org_id).count()
org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).exclude(
job_type='scan').filter(project__organization__id=org_id).count()
org_counts['job_templates'] += JobTemplate.accessible_objects(**access_kwargs).filter(
job_type='scan').filter(inventory__organization__id=org_id).count()
org_counts['job_templates'] = JobTemplate.accessible_objects(**access_kwargs).filter(
project__organization__id=org_id).count()
full_context['related_field_counts'] = {}
full_context['related_field_counts'][org_id] = org_counts
@ -1907,21 +1905,6 @@ class InventoryJobTemplateList(SubListAPIView):
return qs.filter(inventory=parent)
class InventoryScanJobTemplateList(SubListAPIView):
model = JobTemplate
serializer_class = JobTemplateSerializer
parent_model = Inventory
relationship = 'jobtemplates'
new_in_220 = True
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
qs = self.request.user.get_queryset(self.model)
return qs.filter(job_type=PERM_INVENTORY_SCAN, inventory=parent)
class HostList(ListCreateAPIView):
always_allow_superuser = False

View File

@ -1141,9 +1141,6 @@ class JobTemplateAccess(BaseAccess):
# if reference_obj is provided, determine if it can be copied
reference_obj = data.get('reference_obj', None)
if 'job_type' in data and data['job_type'] == PERM_INVENTORY_SCAN:
self.check_license(feature='system_tracking')
if 'survey_enabled' in data and data['survey_enabled']:
self.check_license(feature='surveys')
@ -1175,11 +1172,6 @@ class JobTemplateAccess(BaseAccess):
return False
project = get_value(Project, 'project')
if 'job_type' in data and data['job_type'] == PERM_INVENTORY_SCAN:
if not inventory:
return False
elif not project:
return True
# If the user has admin access to the project (as an org admin), should
# be able to proceed without additional checks.
if project:
@ -1194,8 +1186,6 @@ class JobTemplateAccess(BaseAccess):
# Check license.
if validate_license:
self.check_license()
if obj.job_type == PERM_INVENTORY_SCAN:
self.check_license(feature='system_tracking')
if obj.survey_enabled:
self.check_license(feature='surveys')
if Instance.objects.active_count() > 1:
@ -1205,12 +1195,6 @@ class JobTemplateAccess(BaseAccess):
if self.user.is_superuser:
return True
if obj.job_type == PERM_INVENTORY_SCAN:
# Scan job with default project, must have JT execute or be org admin
if obj.project is None and obj.inventory:
return (self.user in obj.execute_role or
self.user in obj.inventory.organization.admin_role)
return self.user in obj.execute_role
def can_change(self, obj, data):
@ -1221,9 +1205,6 @@ class JobTemplateAccess(BaseAccess):
data = dict(data)
if self.changes_are_non_sensitive(obj, data):
if 'job_type' in data and obj.job_type != data['job_type'] and data['job_type'] == PERM_INVENTORY_SCAN:
self.check_license(feature='system_tracking')
if 'survey_enabled' in data and obj.survey_enabled != data['survey_enabled'] and data['survey_enabled']:
self.check_license(feature='surveys')
return True

View File

@ -9,6 +9,7 @@ from django.db import migrations
from awx.main.migrations import _inventory_source as invsrc
from awx.main.migrations import _migration_utils as migration_utils
from awx.main.migrations import _reencrypt
from awx.main.migrations import _scan_jobs
class Migration(migrations.Migration):
@ -24,4 +25,5 @@ class Migration(migrations.Migration):
migrations.RunPython(invsrc.remove_inventory_source_with_no_inventory_link),
migrations.RunPython(invsrc.rename_inventory_sources),
migrations.RunPython(_reencrypt.replace_aesecb_fernet),
migrations.RunPython(_scan_jobs.migrate_scan_job_templates),
]

View File

@ -0,0 +1,68 @@
import logging
from awx.main.models.base import PERM_INVENTORY_SCAN, PERM_INVENTORY_DEPLOY
logger = logging.getLogger('awx.main.migrations')
def _create_fact_scan_project(Project, org):
name = "Tower Fact Scan - {}".format(org.name if org else "No Organization")
proj = Project(name=name,
scm_url='https://github.com/ansible/tower-fact-modules',
scm_type='git',
scm_update_on_launch=True,
scm_update_cache_timeout=86400,
organization=org)
proj.save(skip_update=True)
return proj
def _create_fact_scan_projects(Project, orgs):
return {org.id : _create_fact_scan_project(Project, org) for org in orgs}
def _get_tower_scan_job_templates(JobTemplate):
return JobTemplate.objects.filter(job_type=PERM_INVENTORY_SCAN, project__isnull=True) \
.prefetch_related('inventory__organization')
def _get_orgs(Organization, job_template_ids):
return Organization.objects.filter(inventories__jobtemplates__in=job_template_ids).distinct()
def _migrate_scan_job_templates(apps):
Organization = apps.get_model('main', 'Organization')
Project = apps.get_model('main', 'Project')
JobTemplate = apps.get_model('main', 'JobTemplate')
project_no_org = None
# A scan job template with a custom project will retain the custom project.
JobTemplate.objects.filter(job_type=PERM_INVENTORY_SCAN, project__isnull=False).update(use_fact_cache=True, job_type=PERM_INVENTORY_DEPLOY)
# Scan jobs templates using Tower's default scan playbook will now point at
# the same playbook but in a github repo.
jts = _get_tower_scan_job_templates(JobTemplate)
if jts.count() == 0:
return
orgs = _get_orgs(Organization, jts.values_list('id'))
if orgs.count() == 0:
return
org_proj_map = _create_fact_scan_projects(Project, orgs)
for jt in jts:
if jt.inventory and jt.inventory.organization:
jt.project = org_proj_map[jt.inventory.organization.id]
# Job Templates without an Organization; through related Inventory
else:
if not project_no_org:
project_no_org = _create_fact_scan_project(Project, None)
jt.project = project_no_org
jt.job_type = PERM_INVENTORY_DEPLOY
jt.use_fact_cache = True
jt.save()
def migrate_scan_job_templates(apps, schema_editor):
_migrate_scan_job_templates(apps)

View File

@ -8,6 +8,7 @@ import hmac
import logging
import time
import json
import base64
from urlparse import urljoin
# Django
@ -308,10 +309,7 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
validation_errors['credential'] = [_("Job Template must provide 'credential' or allow prompting for it."),]
# Job type dependent checks
if self.job_type == PERM_INVENTORY_SCAN:
if self.inventory is None or self.ask_inventory_on_launch:
validation_errors['inventory'] = [_("Scan jobs must be assigned a fixed inventory."),]
elif self.project is None:
if self.project is None:
resources_needed_to_start.append('project')
validation_errors['project'] = [_("Job types 'run' and 'check' must have assigned a project."),]
@ -407,12 +405,8 @@ class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, Resour
"""
errors = {}
if 'job_type' in data and self.ask_job_type_on_launch:
if ((self.job_type == PERM_INVENTORY_SCAN and not data['job_type'] == PERM_INVENTORY_SCAN) or
(data['job_type'] == PERM_INVENTORY_SCAN and not self.job_type == PERM_INVENTORY_SCAN)):
if data['job_type'] == PERM_INVENTORY_SCAN and not self.job_type == PERM_INVENTORY_SCAN:
errors['job_type'] = _('Cannot override job_type to or from a scan job.')
if (self.job_type == PERM_INVENTORY_SCAN and ('inventory' in data) and self.ask_inventory_on_launch and
self.inventory != data['inventory']):
errors['inventory'] = _('Inventory cannot be changed at runtime for scan jobs.')
return errors
@property
@ -647,8 +641,6 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin):
return data
def _resources_sufficient_for_launch(self):
if self.job_type == PERM_INVENTORY_SCAN:
return self.inventory_id is not None
return not (self.inventory_id is None or self.project_id is None)
def display_artifacts(self):
@ -714,10 +706,10 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin):
return '{}'.format(self.inventory.id)
def memcached_fact_host_key(self, host_name):
return '{}-{}'.format(self.inventory.id, host_name)
return '{}-{}'.format(self.inventory.id, base64.b64encode(host_name))
def memcached_fact_modified_key(self, host_name):
return '{}-{}-modified'.format(self.inventory.id, host_name)
return '{}-{}-modified'.format(self.inventory.id, base64.b64encode(host_name))
def _get_inventory_hosts(self, only=['name', 'ansible_facts', 'modified',]):
return self.inventory.hosts.only(*only)

View File

@ -1060,18 +1060,13 @@ class RunJob(BaseTask):
args.extend(['-e', json.dumps(extra_vars)])
# Add path to playbook (relative to project.local_path).
if job.project is None and job.job_type == PERM_INVENTORY_SCAN:
args.append("scan_facts.yml")
else:
args.append(job.playbook)
args.append(job.playbook)
return args
def build_safe_args(self, job, **kwargs):
return self.build_args(job, display=True, **kwargs)
def build_cwd(self, job, **kwargs):
if job.project is None and job.job_type == PERM_INVENTORY_SCAN:
return self.get_path_to('..', 'playbooks')
cwd = job.project.get_project_path()
if not cwd:
root = settings.PROJECTS_ROOT

View File

@ -76,14 +76,6 @@ def job_template_prompts_null(project):
)
@pytest.fixture
def bad_scan_JT(job_template_prompts):
job_template = job_template_prompts(True)
job_template.job_type = 'scan'
job_template.save()
return job_template
# End of setup, tests start here
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
@ -259,18 +251,6 @@ def test_job_block_scan_job_type_change(job_template_prompts, post, admin_user):
assert 'job_type' in response.data
@pytest.mark.django_db
@pytest.mark.job_runtime_vars
def test_job_block_scan_job_inv_change(mocker, bad_scan_JT, runtime_data, post, admin_user):
# Assure that giving a new inventory for a scan job blocks the launch
with mocker.patch('awx.main.access.BaseAccess.check_license'):
response = post(reverse('api:job_template_launch', kwargs={'pk': bad_scan_JT.pk}),
dict(inventory=runtime_data['inventory']), admin_user,
expect=400)
assert 'inventory' in response.data
@pytest.mark.django_db
def test_job_launch_JT_with_validation(machine_credential, deploy_jobtemplate):
deploy_jobtemplate.extra_vars = '{"job_template_var": 3}'

View File

@ -1,7 +1,7 @@
import pytest
# AWX
from awx.api.serializers import JobTemplateSerializer, JobLaunchSerializer
from awx.api.serializers import JobTemplateSerializer
from awx.api.versioning import reverse
from awx.main.models.jobs import Job
from awx.main.migrations import _save_password_keys as save_password_keys
@ -387,7 +387,6 @@ def test_edit_nonsenstive(patch, job_template_factory, alice):
'ask_inventory_on_launch':True,
'ask_credential_on_launch': True,
}, alice, expect=200)
print(res.data)
assert res.data['name'] == 'updated'
@ -430,48 +429,6 @@ def test_jt_admin_copy_edit_functional(jt_copy_edit, rando, get, post):
assert post_response.status_code == 403
@pytest.mark.django_db
def test_scan_jt_no_inventory(job_template_factory):
# A user should be able to create a scan job without a project, but an inventory is required
objects = job_template_factory('jt',
credential='c',
job_type="scan",
project='p',
inventory='i',
organization='o')
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
"project": None, "inventory": objects.inventory.pk})
assert serializer.is_valid()
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
"project": None, "inventory": None})
assert not serializer.is_valid()
assert "inventory" in serializer.errors
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
"project": None, "inventory": None,
"ask_inventory_on_launch": True})
assert not serializer.is_valid()
assert "inventory" in serializer.errors
# A user shouldn't be able to launch a scan job template which is missing an inventory
obj_jt = objects.job_template
obj_jt.inventory = None
serializer = JobLaunchSerializer(instance=obj_jt,
context={'obj': obj_jt,
"data": {}},
data={})
assert not serializer.is_valid()
assert 'inventory' in serializer.errors
@pytest.mark.django_db
def test_scan_jt_surveys(inventory):
serializer = JobTemplateSerializer(data={"name": "Test", "job_type": "scan",
"project": None, "inventory": inventory.pk,
"survey_enabled": True})
assert not serializer.is_valid()
assert "survey_enabled" in serializer.errors
@pytest.mark.django_db
def test_launch_with_pending_deletion_inventory(get, post, organization_factory,
job_template_factory, machine_credential,
@ -641,9 +598,6 @@ def test_jt_without_project(inventory):
serializer = JobTemplateSerializer(data=data)
assert not serializer.is_valid()
assert "project" in serializer.errors
data["job_type"] = "scan"
serializer = JobTemplateSerializer(data=data)
assert serializer.is_valid()
@pytest.mark.django_db

View File

@ -163,12 +163,7 @@ def test_two_organizations(resourced_organization, organizations, user, get):
@pytest.mark.django_db
def test_scan_JT_counted(resourced_organization, user, get):
admin_user = user('admin', True)
# Add a scan job template to the org
resourced_organization.projects.all()[0].jobtemplates.create(
job_type='scan', inventory=resourced_organization.inventories.all()[0],
name='scan-job-template')
counts_dict = COUNTS_PRIMES
counts_dict['job_templates'] += 1
# Test list view
list_response = get(reverse('api:organization_list'), admin_user)
@ -184,7 +179,7 @@ def test_scan_JT_counted(resourced_organization, user, get):
@pytest.mark.django_db
def test_JT_not_double_counted(resourced_organization, user, get):
admin_user = user('admin', True)
# Add a scan job template to the org
# Add a run job template to the org
resourced_organization.projects.all()[0].jobtemplates.create(
job_type='run',
inventory=resourced_organization.inventories.all()[0],

View File

@ -0,0 +1,100 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Ansible, Inc.
# All Rights Reserved.
import pytest
from django.apps import apps
from awx.main.models.base import PERM_INVENTORY_SCAN, PERM_INVENTORY_DEPLOY
from awx.main.models import (
JobTemplate,
Project,
Inventory,
Organization,
)
from awx.main.migrations._scan_jobs import _migrate_scan_job_templates
@pytest.fixture
def organizations():
return [Organization.objects.create(name="org-{}".format(x)) for x in range(3)]
@pytest.fixture
def inventories(organizations):
return [Inventory.objects.create(name="inv-{}".format(x),
organization=organizations[x]) for x in range(3)]
@pytest.fixture
def job_templates_scan(inventories):
return [JobTemplate.objects.create(name="jt-scan-{}".format(x),
job_type=PERM_INVENTORY_SCAN,
inventory=inventories[x]) for x in range(3)]
@pytest.fixture
def job_templates_deploy(inventories):
return [JobTemplate.objects.create(name="jt-deploy-{}".format(x),
job_type=PERM_INVENTORY_DEPLOY,
inventory=inventories[x]) for x in range(3)]
@pytest.fixture
def project_custom(organizations):
return Project.objects.create(name="proj-scan_custom",
scm_url='https://giggity.com',
organization=organizations[0])
@pytest.fixture
def job_templates_custom_scan_project(project_custom):
return [JobTemplate.objects.create(name="jt-scan-custom-{}".format(x),
project=project_custom,
job_type=PERM_INVENTORY_SCAN) for x in range(3)]
@pytest.fixture
def job_template_scan_no_org():
return JobTemplate.objects.create(name="jt-scan-no-org",
job_type=PERM_INVENTORY_SCAN)
@pytest.mark.django_db
def test_scan_jobs_migration(job_templates_scan, job_templates_deploy, job_templates_custom_scan_project, project_custom, job_template_scan_no_org):
_migrate_scan_job_templates(apps)
# Ensure there are no scan job templates after the migration
assert 0 == JobTemplate.objects.filter(job_type=PERM_INVENTORY_SCAN).count()
# Ensure special No Organization proj created
# And No Organization project is associated with correct jt
proj = Project.objects.get(name="Tower Fact Scan - No Organization")
assert proj.id == JobTemplate.objects.get(id=job_template_scan_no_org.id).project.id
# Ensure per-org projects were created
projs = Project.objects.filter(name__startswith="Tower Fact Scan")
assert projs.count() == 4
# Ensure scan job templates with Tower project are migrated
for i, jt_old in enumerate(job_templates_scan):
jt = JobTemplate.objects.get(id=jt_old.id)
assert PERM_INVENTORY_DEPLOY == jt.job_type
assert jt.use_fact_cache is True
assert projs[i] == jt.project
# Ensure scan job templates with custom projects are migrated
for jt_old in job_templates_custom_scan_project:
jt = JobTemplate.objects.get(id=jt_old.id)
assert PERM_INVENTORY_DEPLOY == jt.job_type
assert jt.use_fact_cache is True
assert project_custom == jt.project
# Ensure other job template aren't touched
for jt_old in job_templates_deploy:
jt = JobTemplate.objects.get(id=jt_old.id)
assert PERM_INVENTORY_DEPLOY == jt.job_type
assert jt.project is None

View File

@ -8,6 +8,7 @@ from awx.main.models import (
import datetime
import json
import base64
from dateutil.tz import tzutc
@ -89,8 +90,8 @@ def test_start_job_fact_cache(hosts, job, inventory, mocker):
job._get_memcache_connection().set.assert_any_call('5', [h.name for h in hosts])
for host in hosts:
job._get_memcache_connection().set.assert_any_call('{}-{}'.format(5, host.name), json.dumps(host.ansible_facts))
job._get_memcache_connection().set.assert_any_call('{}-{}-modified'.format(5, host.name), host.ansible_facts_modified.isoformat())
job._get_memcache_connection().set.assert_any_call('{}-{}'.format(5, base64.b64encode(host.name)), json.dumps(host.ansible_facts))
job._get_memcache_connection().set.assert_any_call('{}-{}-modified'.format(5, base64.b64encode(host.name)), host.ansible_facts_modified.isoformat())
def test_start_job_fact_cache_existing_host(hosts, hosts2, job, job2, inventory, mocker):
@ -98,15 +99,15 @@ def test_start_job_fact_cache_existing_host(hosts, hosts2, job, job2, inventory,
job.start_job_fact_cache()
for host in hosts:
job._get_memcache_connection().set.assert_any_call('{}-{}'.format(5, host.name), json.dumps(host.ansible_facts))
job._get_memcache_connection().set.assert_any_call('{}-{}-modified'.format(5, host.name), host.ansible_facts_modified.isoformat())
job._get_memcache_connection().set.assert_any_call('{}-{}'.format(5, base64.b64encode(host.name)), json.dumps(host.ansible_facts))
job._get_memcache_connection().set.assert_any_call('{}-{}-modified'.format(5, base64.b64encode(host.name)), host.ansible_facts_modified.isoformat())
job._get_memcache_connection().set.reset_mock()
job2.start_job_fact_cache()
# Ensure hosts2 ansible_facts didn't overwrite hosts ansible_facts
ansible_facts_cached = job._get_memcache_connection().get('{}-{}'.format(5, hosts2[0].name))
ansible_facts_cached = job._get_memcache_connection().get('{}-{}'.format(5, base64.b64encode(hosts2[0].name)))
assert ansible_facts_cached == json.dumps(hosts[1].ansible_facts)

View File

@ -242,11 +242,3 @@ class TestWorkflowWarnings:
assert 'credential' in job_node_with_prompts.get_prompts_warnings()['ignored']
assert len(job_node_with_prompts.get_prompts_warnings()['ignored']) == 2
def test_warn_scan_errors_node_prompts(self, job_node_with_prompts):
job_node_with_prompts.unified_job_template.job_type = 'scan'
job_node_with_prompts.char_prompts['job_type'] = 'run'
job_node_with_prompts.inventory = Inventory(name='different-inventory', pk=23)
assert 'ignored' in job_node_with_prompts.get_prompts_warnings()
assert 'job_type' in job_node_with_prompts.get_prompts_warnings()['ignored']
assert 'inventory' in job_node_with_prompts.get_prompts_warnings()['ignored']
assert len(job_node_with_prompts.get_prompts_warnings()['ignored']) == 2

View File

@ -33,6 +33,7 @@ import os
import memcache
import json
import datetime
import base64
from dateutil import parser
from dateutil.tz import tzutc
@ -56,10 +57,10 @@ class CacheModule(BaseCacheModule):
return '{}'.format(self._inventory_id)
def translate_host_key(self, host_name):
return '{}-{}'.format(self._inventory_id, host_name)
return '{}-{}'.format(self._inventory_id, base64.b64encode(host_name))
def translate_modified_key(self, host_name):
return '{}-{}-modified'.format(self._inventory_id, host_name)
return '{}-{}-modified'.format(self._inventory_id, base64.b64encode(host_name))
def get(self, key):
host_key = self.translate_host_key(key)
@ -104,8 +105,7 @@ class CacheModule(BaseCacheModule):
return False
def delete(self, key):
self.mc.delete(self.translate_host_key(key))
self.mc.delete(self.translate_modified_key(key))
self.set(key, {})
def flush(self):
host_names = self.mc.get(self.host_names_key)

View File

@ -17,3 +17,7 @@ Tower will always inject the host `ansible_facts` into memcached. The Ansible To
## Tower Fact Logging
New and changed facts will be logged via Tower's logging facility. Specifically, to the `system_tracking` namespace or logger. The logging payload will include the fields: `host_name`, `inventory_id`, and `ansible_facts`. Where `ansible_facts` is a dictionary of all ansible facts for `host_name` in Tower Inventory `inventory_id`.
## Integration Testing
* ensure `clear_facts` set's `hosts/<id>/ansible_facts` to `{}`
* ensure that `gather_facts: False` does NOT result in clearing existing facts
* ensure that the when a host fact timeout is reached, that the facts are not used from the cache