mirror of
https://github.com/ansible/awx.git
synced 2026-01-12 18:40:01 -03:30
Initial Scan Job Template type implementation
This commit is contained in:
parent
f6faff5b14
commit
4d271bef35
@ -1304,11 +1304,19 @@ class JobOptionsSerializer(BaseSerializer):
|
||||
ret['cloud_credential'] = None
|
||||
return ret
|
||||
|
||||
def validate_project(self, attrs, source):
|
||||
project = attrs.get('project', None)
|
||||
if not project and attrs.get('job_type') != PERM_INVENTORY_SCAN:
|
||||
raise serializers.ValidationError("This field is required")
|
||||
return attrs
|
||||
|
||||
def validate_playbook(self, attrs, source):
|
||||
project = attrs.get('project', None)
|
||||
playbook = attrs.get('playbook', '')
|
||||
if project and playbook and smart_str(playbook) not in project.playbooks:
|
||||
raise serializers.ValidationError('Playbook not found for project')
|
||||
if project and not playbook:
|
||||
raise serializers.ValidationError('Must select playbook for project')
|
||||
return attrs
|
||||
|
||||
|
||||
@ -1470,14 +1478,12 @@ class SystemJobSerializer(UnifiedJobSerializer):
|
||||
args=(obj.system_job_template.pk,))
|
||||
return res
|
||||
|
||||
|
||||
class JobListSerializer(JobSerializer, UnifiedJobListSerializer):
|
||||
pass
|
||||
|
||||
class SystemJobListSerializer(SystemJobSerializer, UnifiedJobListSerializer):
|
||||
pass
|
||||
|
||||
|
||||
class JobHostSummarySerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
|
||||
@ -1451,7 +1451,7 @@ class JobTemplateLaunch(GenericAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
if obj.credential is None and ('credential' not in request.DATA and 'credential_id' not in request.DATA):
|
||||
return Response(dict(errors="Credential not provided"), status=status.HTTP_400_BAD_REQUEST)
|
||||
if obj.project is None or not obj.project.active:
|
||||
if obj.job_type != PERM_INVENTORY_SCAN and (obj.project is None or not obj.project.active):
|
||||
return Response(dict(errors="Job Template Project is missing or undefined"), status=status.HTTP_400_BAD_REQUEST)
|
||||
if obj.inventory is None or not obj.inventory.active:
|
||||
return Response(dict(errors="Job Template Inventory is missing or undefined"), status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@ -27,7 +27,7 @@ __all__ = ['VarsDictProperty', 'BaseModel', 'CreatedModifiedModel',
|
||||
'PasswordFieldsModel', 'PrimordialModel', 'CommonModel',
|
||||
'CommonModelNameNotUnique',
|
||||
'PERM_INVENTORY_ADMIN', 'PERM_INVENTORY_READ',
|
||||
'PERM_INVENTORY_WRITE', 'PERM_INVENTORY_DEPLOY',
|
||||
'PERM_INVENTORY_WRITE', 'PERM_INVENTORY_DEPLOY', 'PERM_INVENTORY_SCAN',
|
||||
'PERM_INVENTORY_CHECK', 'PERM_JOBTEMPLATE_CREATE', 'JOB_TYPE_CHOICES',
|
||||
'PERMISSION_TYPE_CHOICES', 'CLOUD_INVENTORY_SOURCES']
|
||||
|
||||
@ -36,11 +36,13 @@ PERM_INVENTORY_READ = 'read'
|
||||
PERM_INVENTORY_WRITE = 'write'
|
||||
PERM_INVENTORY_DEPLOY = 'run'
|
||||
PERM_INVENTORY_CHECK = 'check'
|
||||
PERM_INVENTORY_SCAN = 'scan'
|
||||
PERM_JOBTEMPLATE_CREATE = 'create'
|
||||
|
||||
JOB_TYPE_CHOICES = [
|
||||
(PERM_INVENTORY_DEPLOY, _('Run')),
|
||||
(PERM_INVENTORY_CHECK, _('Check')),
|
||||
(PERM_INVENTORY_SCAN, _('Scan')),
|
||||
]
|
||||
|
||||
PERMISSION_TYPE_CHOICES = [
|
||||
@ -49,6 +51,7 @@ PERMISSION_TYPE_CHOICES = [
|
||||
(PERM_INVENTORY_ADMIN, _('Administrate Inventory')),
|
||||
(PERM_INVENTORY_DEPLOY, _('Deploy To Inventory')),
|
||||
(PERM_INVENTORY_CHECK, _('Deploy To Inventory (Dry Run)')),
|
||||
(PERM_INVENTORY_SCAN, _('Scan an Inventory')),
|
||||
(PERM_JOBTEMPLATE_CREATE, _('Create a Job Template')),
|
||||
]
|
||||
|
||||
|
||||
@ -52,11 +52,14 @@ class JobOptions(BaseModel):
|
||||
'Project',
|
||||
related_name='%(class)ss',
|
||||
null=True,
|
||||
default=None,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
)
|
||||
playbook = models.CharField(
|
||||
max_length=1024,
|
||||
default='',
|
||||
blank=True,
|
||||
)
|
||||
credential = models.ForeignKey(
|
||||
'Credential',
|
||||
@ -142,7 +145,6 @@ class JobOptions(BaseModel):
|
||||
needed.append(pw)
|
||||
return needed
|
||||
|
||||
|
||||
class JobTemplate(UnifiedJobTemplate, JobOptions):
|
||||
'''
|
||||
A job template is a reusable job definition for applying a project (with
|
||||
@ -1016,7 +1018,6 @@ class SystemJob(UnifiedJob, SystemJobOptions):
|
||||
def is_blocked_by(self, obj):
|
||||
return True
|
||||
|
||||
|
||||
def handle_extra_data(self, extra_data):
|
||||
extra_vars = {}
|
||||
if type(extra_data) == dict:
|
||||
@ -1037,3 +1038,4 @@ class SystemJob(UnifiedJob, SystemJobOptions):
|
||||
@property
|
||||
def task_impact(self):
|
||||
return 150
|
||||
|
||||
|
||||
@ -550,10 +550,6 @@ class RunJob(BaseTask):
|
||||
env['JOB_ID'] = str(job.pk)
|
||||
env['INVENTORY_ID'] = str(job.inventory.pk)
|
||||
env['ANSIBLE_CALLBACK_PLUGINS'] = plugin_dir
|
||||
# TODO: env['ANSIBLE_LIBRARY'] # plugins/library
|
||||
# TODO: env['ANSIBLE_CACHE_PLUGINS'] # plugins/fact_caching
|
||||
# TODD: env['ANSIBLE_CACHE_PLUGIN'] # tower
|
||||
# TODO: env['ANSIBLE_CACHE_PLUGIN_CONNECTION'] # connection to tower service
|
||||
env['REST_API_URL'] = settings.INTERNAL_API_URL
|
||||
env['REST_API_TOKEN'] = job.task_auth_token or ''
|
||||
env['CALLBACK_CONSUMER_PORT'] = str(settings.CALLBACK_CONSUMER_PORT)
|
||||
@ -598,6 +594,12 @@ class RunJob(BaseTask):
|
||||
env['VMWARE_PASSWORD'] = decrypt_field(cloud_cred, 'password')
|
||||
env['VMWARE_HOST'] = cloud_cred.host
|
||||
|
||||
# Set environment variables related to scan jobs
|
||||
if job.job_type == PERM_INVENTORY_SCAN:
|
||||
env['ANSIBLE_LIBRARY'] = self.get_path_to('..', 'plugins', 'library')
|
||||
env['ANSIBLE_CACHE_PLUGINS'] = self.get_path_to('..', 'plugins', 'fact_caching')
|
||||
env['ANSIBLE_CACHE_PLUGIN'] = "tower"
|
||||
env['ANSIBLE_CACHE_PLUGIN_CONNECTION'] = "tcp://127.0.0.1:%s" % str(settings.FACT_CACHE_PORT)
|
||||
return env
|
||||
|
||||
def build_args(self, job, **kwargs):
|
||||
@ -678,11 +680,16 @@ class RunJob(BaseTask):
|
||||
args.extend(['-e', json.dumps(extra_vars)])
|
||||
|
||||
# Add path to playbook (relative to project.local_path).
|
||||
args.append(job.playbook)
|
||||
if job.project is None and job.job_type == PERM_INVENTORY_SCAN:
|
||||
args.append("scan_facts.yml")
|
||||
else:
|
||||
args.append(job.playbook)
|
||||
|
||||
return args
|
||||
|
||||
def build_cwd(self, job, **kwargs):
|
||||
if job.project is None and job.job_type == PERM_INVENTORY_SCAN:
|
||||
return self.get_path_to('..', 'playbooks')
|
||||
cwd = job.project.get_project_path()
|
||||
if not cwd:
|
||||
root = settings.PROJECTS_ROOT
|
||||
|
||||
@ -45,6 +45,9 @@ class CacheModule(BaseCacheModule):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
||||
# Basic in-memory caching for typical runs
|
||||
self._cache = {}
|
||||
|
||||
# This is the local tower zmq connection
|
||||
self._tower_connection = C.CACHE_PLUGIN_CONNECTION
|
||||
self.date_key = time.mktime(datetime.datetime.utcnow().timetuple())
|
||||
@ -58,23 +61,26 @@ class CacheModule(BaseCacheModule):
|
||||
sys.exit(1)
|
||||
|
||||
def get(self, key):
|
||||
return {} # Temporary until we have some tower retrieval endpoints
|
||||
return self._cache.get(key)
|
||||
|
||||
def set(self, key, value):
|
||||
self._cache[key] = value
|
||||
|
||||
# Emit fact data to tower for processing
|
||||
self.socket.send_json(dict(host=key, facts=value, date_key=self.date_key))
|
||||
self.socket.recv()
|
||||
|
||||
def keys(self):
|
||||
return []
|
||||
return self._cache.keys()
|
||||
|
||||
def contains(self, key):
|
||||
return False
|
||||
return key in self._cache
|
||||
|
||||
def delete(self, key):
|
||||
pass
|
||||
del self._cache[key]
|
||||
|
||||
def flush(self):
|
||||
pass
|
||||
self._cache = {}
|
||||
|
||||
def copy(self):
|
||||
return dict()
|
||||
return self._cache.copy()
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user