mirror of
https://github.com/ansible/awx.git
synced 2026-01-14 03:10:42 -03:30
Merge pull request #459 from ryanpetrello/simplified-inventory-building
remove support for job-scoped auth tokens
This commit is contained in:
commit
764356bf47
@ -17,7 +17,7 @@ from rest_framework import exceptions
|
||||
from rest_framework import HTTP_HEADER_ENCODING
|
||||
|
||||
# AWX
|
||||
from awx.main.models import UnifiedJob, AuthToken
|
||||
from awx.main.models import AuthToken
|
||||
|
||||
logger = logging.getLogger('awx.api.authentication')
|
||||
|
||||
@ -137,29 +137,3 @@ class LoggedBasicAuthentication(authentication.BasicAuthentication):
|
||||
if not settings.AUTH_BASIC_ENABLED:
|
||||
return
|
||||
return super(LoggedBasicAuthentication, self).authenticate_header(request)
|
||||
|
||||
|
||||
class TaskAuthentication(authentication.BaseAuthentication):
|
||||
'''
|
||||
Custom authentication used for views accessed by the inventory and callback
|
||||
scripts when running a task.
|
||||
'''
|
||||
|
||||
model = None
|
||||
|
||||
def authenticate(self, request):
|
||||
auth = authentication.get_authorization_header(request).split()
|
||||
if len(auth) != 2 or auth[0].lower() != 'token' or '-' not in auth[1]:
|
||||
return None
|
||||
pk, key = auth[1].split('-', 1)
|
||||
try:
|
||||
unified_job = UnifiedJob.objects.get(pk=pk, status='running')
|
||||
except UnifiedJob.DoesNotExist:
|
||||
return None
|
||||
token = unified_job.task_auth_token
|
||||
if auth[1] != token:
|
||||
raise exceptions.AuthenticationFailed(_('Invalid task token'))
|
||||
return (None, token)
|
||||
|
||||
def authenticate_header(self, request):
|
||||
return 'Token'
|
||||
|
||||
@ -59,7 +59,7 @@ from social.backends.utils import load_backends
|
||||
from awx.main.tasks import send_notifications
|
||||
from awx.main.access import get_user_queryset
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.api.authentication import TaskAuthentication, TokenGetAuthentication
|
||||
from awx.api.authentication import TokenGetAuthentication
|
||||
from awx.api.filters import V1CredentialFilterBackend
|
||||
from awx.api.generics import get_view_name
|
||||
from awx.api.generics import * # noqa
|
||||
@ -2371,80 +2371,23 @@ class InventoryScriptView(RetrieveAPIView):
|
||||
|
||||
model = Inventory
|
||||
serializer_class = InventoryScriptSerializer
|
||||
authentication_classes = [TaskAuthentication] + api_settings.DEFAULT_AUTHENTICATION_CLASSES
|
||||
permission_classes = (TaskPermission,)
|
||||
filter_backends = ()
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
obj = self.get_object()
|
||||
hostname = request.query_params.get('host', '')
|
||||
hostvars = bool(request.query_params.get('hostvars', ''))
|
||||
show_all = bool(request.query_params.get('all', ''))
|
||||
if show_all:
|
||||
hosts_q = dict()
|
||||
else:
|
||||
hosts_q = dict(enabled=True)
|
||||
if hostname:
|
||||
host = get_object_or_404(obj.hosts, name=hostname, **hosts_q)
|
||||
data = host.variables_dict
|
||||
else:
|
||||
data = dict()
|
||||
if obj.variables_dict:
|
||||
all_group = data.setdefault('all', dict())
|
||||
all_group['vars'] = obj.variables_dict
|
||||
if obj.kind == 'smart':
|
||||
if len(obj.hosts.all()) == 0:
|
||||
return Response({})
|
||||
else:
|
||||
all_group = data.setdefault('all', dict())
|
||||
smart_hosts_qs = obj.hosts.all()
|
||||
smart_hosts = list(smart_hosts_qs.values_list('name', flat=True))
|
||||
all_group['hosts'] = smart_hosts
|
||||
else:
|
||||
# Add hosts without a group to the all group.
|
||||
groupless_hosts_qs = obj.hosts.filter(groups__isnull=True, **hosts_q)
|
||||
groupless_hosts = list(groupless_hosts_qs.values_list('name', flat=True))
|
||||
if groupless_hosts:
|
||||
all_group = data.setdefault('all', dict())
|
||||
all_group['hosts'] = groupless_hosts
|
||||
|
||||
# Build in-memory mapping of groups and their hosts.
|
||||
group_hosts_kw = dict(group__inventory_id=obj.id, host__inventory_id=obj.id)
|
||||
if 'enabled' in hosts_q:
|
||||
group_hosts_kw['host__enabled'] = hosts_q['enabled']
|
||||
group_hosts_qs = Group.hosts.through.objects.filter(**group_hosts_kw)
|
||||
group_hosts_qs = group_hosts_qs.values_list('group_id', 'host_id', 'host__name')
|
||||
group_hosts_map = {}
|
||||
for group_id, host_id, host_name in group_hosts_qs:
|
||||
group_hostnames = group_hosts_map.setdefault(group_id, [])
|
||||
group_hostnames.append(host_name)
|
||||
|
||||
# Build in-memory mapping of groups and their children.
|
||||
group_parents_qs = Group.parents.through.objects.filter(
|
||||
from_group__inventory_id=obj.id,
|
||||
to_group__inventory_id=obj.id,
|
||||
)
|
||||
group_parents_qs = group_parents_qs.values_list('from_group_id', 'from_group__name', 'to_group_id')
|
||||
group_children_map = {}
|
||||
for from_group_id, from_group_name, to_group_id in group_parents_qs:
|
||||
group_children = group_children_map.setdefault(to_group_id, [])
|
||||
group_children.append(from_group_name)
|
||||
|
||||
# Now use in-memory maps to build up group info.
|
||||
for group in obj.groups.all():
|
||||
group_info = dict()
|
||||
group_info['hosts'] = group_hosts_map.get(group.id, [])
|
||||
group_info['children'] = group_children_map.get(group.id, [])
|
||||
group_info['vars'] = group.variables_dict
|
||||
data[group.name] = group_info
|
||||
|
||||
if hostvars:
|
||||
data.setdefault('_meta', dict())
|
||||
data['_meta'].setdefault('hostvars', dict())
|
||||
for host in obj.hosts.filter(**hosts_q):
|
||||
data['_meta']['hostvars'][host.name] = host.variables_dict
|
||||
|
||||
return Response(data)
|
||||
hosts_q = dict(name=hostname)
|
||||
if not show_all:
|
||||
hosts_q['enabled'] = True
|
||||
host = get_object_or_404(obj.hosts, **hosts_q)
|
||||
return Response(host.variables_dict)
|
||||
return Response(obj.get_script_data(
|
||||
hostvars=bool(request.query_params.get('hostvars', '')),
|
||||
show_all=show_all
|
||||
))
|
||||
|
||||
|
||||
class InventoryTreeView(RetrieveAPIView):
|
||||
|
||||
@ -3,8 +3,6 @@
|
||||
|
||||
# Python
|
||||
import datetime
|
||||
import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
from urlparse import urljoin
|
||||
|
||||
@ -156,13 +154,6 @@ class AdHocCommand(UnifiedJob, JobNotificationMixin):
|
||||
def get_ui_url(self):
|
||||
return urljoin(settings.TOWER_URL_BASE, "/#/ad_hoc_commands/{}".format(self.pk))
|
||||
|
||||
@property
|
||||
def task_auth_token(self):
|
||||
'''Return temporary auth token used for task requests via API.'''
|
||||
if self.status == 'running':
|
||||
h = hmac.new(settings.SECRET_KEY, self.created.isoformat(), digestmod=hashlib.sha1)
|
||||
return '%d-%s' % (self.pk, h.hexdigest())
|
||||
|
||||
@property
|
||||
def notification_templates(self):
|
||||
all_orgs = set()
|
||||
|
||||
@ -209,6 +209,71 @@ class Inventory(CommonModelNameNotUnique, ResourceMixin):
|
||||
group_children.add(from_group_id)
|
||||
return group_children_map
|
||||
|
||||
def get_script_data(self, hostvars=False, show_all=False):
|
||||
if show_all:
|
||||
hosts_q = dict()
|
||||
else:
|
||||
hosts_q = dict(enabled=True)
|
||||
data = dict()
|
||||
|
||||
if self.variables_dict:
|
||||
all_group = data.setdefault('all', dict())
|
||||
all_group['vars'] = self.variables_dict
|
||||
if self.kind == 'smart':
|
||||
if len(self.hosts.all()) == 0:
|
||||
return {}
|
||||
else:
|
||||
all_group = data.setdefault('all', dict())
|
||||
smart_hosts_qs = self.hosts.all()
|
||||
smart_hosts = list(smart_hosts_qs.values_list('name', flat=True))
|
||||
all_group['hosts'] = smart_hosts
|
||||
else:
|
||||
# Add hosts without a group to the all group.
|
||||
groupless_hosts_qs = self.hosts.filter(groups__isnull=True, **hosts_q)
|
||||
groupless_hosts = list(groupless_hosts_qs.values_list('name', flat=True))
|
||||
if groupless_hosts:
|
||||
all_group = data.setdefault('all', dict())
|
||||
all_group['hosts'] = groupless_hosts
|
||||
|
||||
# Build in-memory mapping of groups and their hosts.
|
||||
group_hosts_kw = dict(group__inventory_id=self.id, host__inventory_id=self.id)
|
||||
if 'enabled' in hosts_q:
|
||||
group_hosts_kw['host__enabled'] = hosts_q['enabled']
|
||||
group_hosts_qs = Group.hosts.through.objects.filter(**group_hosts_kw)
|
||||
group_hosts_qs = group_hosts_qs.values_list('group_id', 'host_id', 'host__name')
|
||||
group_hosts_map = {}
|
||||
for group_id, host_id, host_name in group_hosts_qs:
|
||||
group_hostnames = group_hosts_map.setdefault(group_id, [])
|
||||
group_hostnames.append(host_name)
|
||||
|
||||
# Build in-memory mapping of groups and their children.
|
||||
group_parents_qs = Group.parents.through.objects.filter(
|
||||
from_group__inventory_id=self.id,
|
||||
to_group__inventory_id=self.id,
|
||||
)
|
||||
group_parents_qs = group_parents_qs.values_list('from_group_id', 'from_group__name',
|
||||
'to_group_id')
|
||||
group_children_map = {}
|
||||
for from_group_id, from_group_name, to_group_id in group_parents_qs:
|
||||
group_children = group_children_map.setdefault(to_group_id, [])
|
||||
group_children.append(from_group_name)
|
||||
|
||||
# Now use in-memory maps to build up group info.
|
||||
for group in self.groups.all():
|
||||
group_info = dict()
|
||||
group_info['hosts'] = group_hosts_map.get(group.id, [])
|
||||
group_info['children'] = group_children_map.get(group.id, [])
|
||||
group_info['vars'] = group.variables_dict
|
||||
data[group.name] = group_info
|
||||
|
||||
if hostvars:
|
||||
data.setdefault('_meta', dict())
|
||||
data['_meta'].setdefault('hostvars', dict())
|
||||
for host in self.hosts.filter(**hosts_q):
|
||||
data['_meta']['hostvars'][host.name] = host.variables_dict
|
||||
|
||||
return data
|
||||
|
||||
def update_host_computed_fields(self):
|
||||
'''
|
||||
Update computed fields for all hosts in this inventory.
|
||||
|
||||
@ -3,8 +3,6 @@
|
||||
|
||||
# Python
|
||||
import datetime
|
||||
import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
import time
|
||||
import json
|
||||
@ -532,13 +530,6 @@ class Job(UnifiedJob, JobOptions, SurveyJobMixin, JobNotificationMixin, TaskMana
|
||||
def get_ui_url(self):
|
||||
return urljoin(settings.TOWER_URL_BASE, "/#/jobs/{}".format(self.pk))
|
||||
|
||||
@property
|
||||
def task_auth_token(self):
|
||||
'''Return temporary auth token used for task requests via API.'''
|
||||
if self.status == 'running':
|
||||
h = hmac.new(settings.SECRET_KEY, self.created.isoformat(), digestmod=hashlib.sha1)
|
||||
return '%d-%s' % (self.pk, h.hexdigest())
|
||||
|
||||
@property
|
||||
def ask_diff_mode_on_launch(self):
|
||||
if self.job_template is not None:
|
||||
|
||||
@ -7,7 +7,6 @@ from collections import OrderedDict
|
||||
import ConfigParser
|
||||
import cStringIO
|
||||
import functools
|
||||
import imp
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
@ -661,7 +660,7 @@ class BaseTask(LogErrorsTask):
|
||||
urlpass_re = re.compile(r'^.*?://[^:]+:(.*?)@.*?$')
|
||||
safe_env = dict(env)
|
||||
for k,v in safe_env.items():
|
||||
if k in ('REST_API_URL', 'AWS_ACCESS_KEY_ID'):
|
||||
if k == 'AWS_ACCESS_KEY_ID':
|
||||
continue
|
||||
elif k.startswith('ANSIBLE_') and not k.startswith('ANSIBLE_NET'):
|
||||
continue
|
||||
@ -678,31 +677,13 @@ class BaseTask(LogErrorsTask):
|
||||
return False
|
||||
|
||||
def build_inventory(self, instance, **kwargs):
|
||||
plugin = self.get_path_to('..', 'plugins', 'inventory', 'awxrest.py')
|
||||
if kwargs.get('isolated') is True:
|
||||
# For isolated jobs, we have to interact w/ the REST API from the
|
||||
# controlling node and ship the static JSON inventory to the
|
||||
# isolated host (because the isolated host itself can't reach the
|
||||
# REST API to fetch the inventory).
|
||||
path = os.path.join(kwargs['private_data_dir'], 'inventory')
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
awxrest = imp.load_source('awxrest', plugin)
|
||||
path = os.path.join(kwargs['private_data_dir'], 'inventory')
|
||||
if not os.path.exists(path):
|
||||
with open(path, 'w') as f:
|
||||
buff = cStringIO.StringIO()
|
||||
awxrest.InventoryScript(**{
|
||||
'base_url': settings.INTERNAL_API_URL,
|
||||
'authtoken': instance.task_auth_token or '',
|
||||
'inventory_id': str(instance.inventory.pk),
|
||||
'list': True,
|
||||
'hostvars': True,
|
||||
}).run(buff)
|
||||
json_data = buff.getvalue().strip()
|
||||
f.write("#! /usr/bin/env python\nprint '''%s'''\n" % json_data)
|
||||
json_data = json.dumps(instance.inventory.get_script_data(hostvars=True))
|
||||
f.write('#! /usr/bin/env python\n# -*- coding: utf-8 -*-\nprint """%s"""\n' % json_data)
|
||||
os.chmod(path, stat.S_IRUSR | stat.S_IXUSR)
|
||||
return path
|
||||
else:
|
||||
return plugin
|
||||
return path
|
||||
|
||||
def build_args(self, instance, **kwargs):
|
||||
raise NotImplementedError
|
||||
@ -1020,8 +1001,6 @@ class RunJob(BaseTask):
|
||||
if not kwargs.get('isolated'):
|
||||
env['ANSIBLE_CALLBACK_PLUGINS'] = plugin_path
|
||||
env['ANSIBLE_STDOUT_CALLBACK'] = 'awx_display'
|
||||
env['REST_API_URL'] = settings.INTERNAL_API_URL
|
||||
env['REST_API_TOKEN'] = job.task_auth_token or ''
|
||||
env['TOWER_HOST'] = settings.TOWER_URL_BASE
|
||||
env['AWX_HOST'] = settings.TOWER_URL_BASE
|
||||
env['CALLBACK_QUEUE'] = settings.CALLBACK_QUEUE
|
||||
@ -2070,8 +2049,6 @@ class RunAdHocCommand(BaseTask):
|
||||
env['ANSIBLE_CALLBACK_PLUGINS'] = plugin_dir
|
||||
env['ANSIBLE_LOAD_CALLBACK_PLUGINS'] = '1'
|
||||
env['ANSIBLE_STDOUT_CALLBACK'] = 'minimal' # Hardcoded by Ansible for ad-hoc commands (either minimal or oneline).
|
||||
env['REST_API_URL'] = settings.INTERNAL_API_URL
|
||||
env['REST_API_TOKEN'] = ad_hoc_command.task_auth_token or ''
|
||||
env['CALLBACK_QUEUE'] = settings.CALLBACK_QUEUE
|
||||
env['CALLBACK_CONNECTION'] = settings.BROKER_URL
|
||||
env['ANSIBLE_SFTP_BATCH_MODE'] = 'False'
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
import pytest
|
||||
import json
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
|
||||
from awx.main.tasks import RunJob
|
||||
from awx.main.models import (
|
||||
@ -16,7 +18,8 @@ def job(mocker):
|
||||
'extra_vars_dict': {"secret_key": "my_password"},
|
||||
'pk': 1, 'job_template.pk': 1, 'job_template.name': '',
|
||||
'created_by.pk': 1, 'created_by.username': 'admin',
|
||||
'launch_type': 'manual'})
|
||||
'launch_type': 'manual',
|
||||
'inventory.get_script_data.return_value': {}})
|
||||
ret.project = mocker.MagicMock(scm_revision='asdf1234')
|
||||
return ret
|
||||
|
||||
@ -62,7 +65,7 @@ def test_survey_passwords_not_in_extra_vars():
|
||||
|
||||
def test_job_safe_args_redacted_passwords(job):
|
||||
"""Verify that safe_args hides passwords in the job extra_vars"""
|
||||
kwargs = {'ansible_version': '2.1'}
|
||||
kwargs = {'ansible_version': '2.1', 'private_data_dir': tempfile.mkdtemp()}
|
||||
run_job = RunJob()
|
||||
safe_args = run_job.build_safe_args(job, **kwargs)
|
||||
ev_index = safe_args.index('-e') + 1
|
||||
@ -71,7 +74,7 @@ def test_job_safe_args_redacted_passwords(job):
|
||||
|
||||
|
||||
def test_job_args_unredacted_passwords(job):
|
||||
kwargs = {'ansible_version': '2.1'}
|
||||
kwargs = {'ansible_version': '2.1', 'private_data_dir': tempfile.mkdtemp()}
|
||||
run_job = RunJob()
|
||||
args = run_job.build_args(job, **kwargs)
|
||||
ev_index = args.index('-e') + 1
|
||||
|
||||
@ -179,6 +179,10 @@ class TestJobExecution:
|
||||
|
||||
TASK_CLS = tasks.RunJob
|
||||
EXAMPLE_PRIVATE_KEY = '-----BEGIN PRIVATE KEY-----\nxyz==\n-----END PRIVATE KEY-----'
|
||||
INVENTORY_DATA = {
|
||||
"all": {"hosts": ["localhost"]},
|
||||
"_meta": {"localhost": {"ansible_connection": "local"}}
|
||||
}
|
||||
|
||||
def setup_method(self, method):
|
||||
if not os.path.exists(settings.PROJECTS_ROOT):
|
||||
@ -196,7 +200,11 @@ class TestJobExecution:
|
||||
mock.patch.object(Project, 'get_project_path', lambda *a, **kw: self.project_path),
|
||||
# don't emit websocket statuses; they use the DB and complicate testing
|
||||
mock.patch.object(UnifiedJob, 'websocket_emit_status', mock.Mock()),
|
||||
mock.patch.object(Job, 'inventory', mock.Mock(pk=1, spec_set=['pk'])),
|
||||
mock.patch.object(Job, 'inventory', mock.Mock(
|
||||
pk=1,
|
||||
get_script_data=lambda *args, **kw: self.INVENTORY_DATA,
|
||||
spec_set=['pk', 'get_script_data']
|
||||
)),
|
||||
mock.patch('awx.main.expect.run.run_pexpect', self.run_pexpect)
|
||||
]
|
||||
for p in self.patches:
|
||||
@ -309,7 +317,6 @@ class TestIsolatedExecution(TestJobExecution):
|
||||
REMOTE_HOST = 'some-isolated-host'
|
||||
|
||||
def test_with_ssh_credentials(self):
|
||||
mock_get = mock.Mock()
|
||||
ssh = CredentialType.defaults['ssh']()
|
||||
credential = Credential(
|
||||
pk=1,
|
||||
@ -325,7 +332,6 @@ class TestIsolatedExecution(TestJobExecution):
|
||||
|
||||
private_data = tempfile.mkdtemp(prefix='awx_')
|
||||
self.task.build_private_data_dir = mock.Mock(return_value=private_data)
|
||||
inventory = json.dumps({"all": {"hosts": ["localhost"]}})
|
||||
|
||||
def _mock_job_artifacts(*args, **kw):
|
||||
artifacts = os.path.join(private_data, 'artifacts')
|
||||
@ -341,16 +347,7 @@ class TestIsolatedExecution(TestJobExecution):
|
||||
f.write(data)
|
||||
return ('successful', 0)
|
||||
self.run_pexpect.side_effect = _mock_job_artifacts
|
||||
|
||||
with mock.patch('time.sleep'):
|
||||
with mock.patch('requests.get') as mock_get:
|
||||
mock_get.return_value = mock.Mock(content=inventory)
|
||||
self.task.run(self.pk, self.REMOTE_HOST)
|
||||
assert mock_get.call_count == 1
|
||||
assert mock.call(
|
||||
'http://127.0.0.1:8013/api/v1/inventories/1/script/?hostvars=1',
|
||||
auth=mock.ANY
|
||||
) in mock_get.call_args_list
|
||||
self.task.run(self.pk, self.REMOTE_HOST)
|
||||
|
||||
playbook_run = self.run_pexpect.call_args_list[0][0]
|
||||
assert ' '.join(playbook_run[0]).startswith(' '.join([
|
||||
|
||||
@ -1,183 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# This file is a utility script that is not part of the AWX or Ansible
|
||||
# packages. It does not import any code from either package, nor does its
|
||||
# license apply to Ansible or AWX.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# Redistributions of source code must retain the above copyright notice, this
|
||||
# list of conditions and the following disclaimer.
|
||||
#
|
||||
# Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# Neither the name of the <ORGANIZATION> nor the names of its contributors
|
||||
# may be used to endorse or promote products derived from this software
|
||||
# without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
# POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# Python
|
||||
import json
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
import urllib
|
||||
import urlparse
|
||||
|
||||
# Requests
|
||||
try:
|
||||
import requests
|
||||
except ImportError:
|
||||
# If running from an AWX installation, use the local version of requests if
|
||||
# if cannot be found globally.
|
||||
local_site_packages = os.path.join(os.path.dirname(__file__), '..', '..',
|
||||
'lib', 'site-packages')
|
||||
if os.path.exists(local_site_packages):
|
||||
sys.path.insert(0, local_site_packages)
|
||||
import requests
|
||||
|
||||
class TokenAuth(requests.auth.AuthBase):
|
||||
def __init__(self, token):
|
||||
self.token = token
|
||||
|
||||
def __call__(self, request):
|
||||
request.headers['Authorization'] = 'Token %s' % self.token
|
||||
return request
|
||||
|
||||
class InventoryScript(object):
|
||||
|
||||
def __init__(self, **options):
|
||||
self.options = options
|
||||
|
||||
def get_data(self, output):
|
||||
parts = urlparse.urlsplit(self.base_url)
|
||||
if parts.username and parts.password:
|
||||
auth = (parts.username, parts.password)
|
||||
elif self.auth_token:
|
||||
auth = TokenAuth(self.auth_token)
|
||||
else:
|
||||
auth = None
|
||||
port = parts.port or (443 if parts.scheme == 'https' else 80)
|
||||
url = urlparse.urlunsplit([parts.scheme,
|
||||
'%s:%d' % (parts.hostname, port),
|
||||
parts.path, parts.query, parts.fragment])
|
||||
url_path = '/api/v1/inventories/%d/script/' % self.inventory_id
|
||||
q = {}
|
||||
if self.show_all:
|
||||
q['all'] = 1
|
||||
if self.hostname:
|
||||
q['host'] = self.hostname
|
||||
elif self.hostvars:
|
||||
q['hostvars'] = 1
|
||||
url_path += '?%s' % urllib.urlencode(q)
|
||||
url = urlparse.urljoin(url, url_path)
|
||||
response = requests.get(url, auth=auth)
|
||||
response.raise_for_status()
|
||||
output.write(json.dumps(json.loads(response.content),
|
||||
indent=self.indent) + '\n')
|
||||
|
||||
def run(self, output=sys.stdout):
|
||||
try:
|
||||
self.base_url = self.options.get('base_url', '') or \
|
||||
os.getenv('REST_API_URL', '')
|
||||
if not self.base_url:
|
||||
raise ValueError('No REST API URL specified')
|
||||
self.auth_token = self.options.get('authtoken', '') or \
|
||||
os.getenv('REST_API_TOKEN', '')
|
||||
parts = urlparse.urlsplit(self.base_url)
|
||||
if not (parts.username and parts.password) and not self.auth_token:
|
||||
raise ValueError('No username/password specified in REST API '
|
||||
'URL, and no REST API token provided')
|
||||
try:
|
||||
# Command line argument takes precedence over environment
|
||||
# variable.
|
||||
self.inventory_id = int(self.options.get('inventory_id', 0) or
|
||||
os.getenv('INVENTORY_ID', 0))
|
||||
except ValueError:
|
||||
raise ValueError('Inventory ID must be an integer')
|
||||
if not self.inventory_id:
|
||||
raise ValueError('No inventory ID specified')
|
||||
self.hostname = self.options.get('hostname', '')
|
||||
self.list_ = self.options.get('list', False)
|
||||
self.hostvars = bool(self.options.get('hostvars', False) or
|
||||
os.getenv('INVENTORY_HOSTVARS', ''))
|
||||
self.show_all = bool(self.options.get('show_all', False) or
|
||||
os.getenv('INVENTORY_ALL', ''))
|
||||
self.indent = self.options.get('indent', None)
|
||||
if self.list_ and self.hostname:
|
||||
raise RuntimeError('Only --list or --host may be specified')
|
||||
elif self.list_ or self.hostname:
|
||||
self.get_data(output)
|
||||
else:
|
||||
raise RuntimeError('Either --list or --host must be specified')
|
||||
except Exception, e:
|
||||
output.write('%s\n' % json.dumps(dict(failed=True)))
|
||||
if self.options.get('traceback', False):
|
||||
sys.stderr.write(traceback.format_exc())
|
||||
else:
|
||||
sys.stderr.write('%s\n' % str(e))
|
||||
if hasattr(e, 'response'):
|
||||
if hasattr(e.response, 'content'):
|
||||
sys.stderr.write('%s\n' % e.response.content)
|
||||
else:
|
||||
sys.stderr.write('%s\n' % e.response)
|
||||
raise
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser()
|
||||
parser.add_option('-v', '--verbosity', action='store', dest='verbosity',
|
||||
default='1', type='choice', choices=['0', '1', '2', '3'],
|
||||
help='Verbosity level; 0=minimal output, 1=normal output'
|
||||
', 2=verbose output, 3=very verbose output')
|
||||
parser.add_option('--traceback', action='store_true',
|
||||
help='Raise on exception on error')
|
||||
parser.add_option('-u', '--url', dest='base_url', default='',
|
||||
help='Base URL to access REST API, including username '
|
||||
'and password for authentication (can also be specified'
|
||||
' using REST_API_URL environment variable)')
|
||||
parser.add_option('--authtoken', dest='authtoken', default='',
|
||||
help='Authentication token used to access REST API (can '
|
||||
'also be specified using REST_API_TOKEN environment '
|
||||
'variable)')
|
||||
parser.add_option('-i', '--inventory', dest='inventory_id', type='int',
|
||||
default=0, help='Inventory ID (can also be specified '
|
||||
'using INVENTORY_ID environment variable)')
|
||||
parser.add_option('--list', action='store_true', dest='list',
|
||||
default=False, help='Return JSON hash of host groups.')
|
||||
parser.add_option('--hostvars', action='store_true', dest='hostvars',
|
||||
default=False, help='Return hostvars inline with --list,'
|
||||
' under ["_meta"]["hostvars"]. Can also be specified '
|
||||
'using INVENTORY_HOSTVARS environment variable.')
|
||||
parser.add_option('--all', action='store_true', dest='show_all',
|
||||
default=False, help='Return all hosts, including those '
|
||||
'marked as offline/disabled. Can also be specified '
|
||||
'using INVENTORY_ALL environment variable.')
|
||||
parser.add_option('--host', dest='hostname', default='',
|
||||
help='Return JSON hash of host vars.')
|
||||
parser.add_option('--indent', dest='indent', type='int', default=None,
|
||||
help='Indentation level for pretty printing output')
|
||||
options, args = parser.parse_args()
|
||||
try:
|
||||
InventoryScript(**vars(options)).run()
|
||||
except Exception:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Loading…
x
Reference in New Issue
Block a user