Merge pull request #1411 from wwitzel3/more-rht-integration

RedHat Inventory Integrations
This commit is contained in:
Akita Noek 2016-04-07 12:56:12 -05:00
commit 2d31021296
9 changed files with 517 additions and 18 deletions

View File

@ -1,5 +1,5 @@
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
CLOUD_PROVIDERS = ('azure', 'ec2', 'gce', 'rax', 'vmware', 'openstack')
CLOUD_PROVIDERS = ('azure', 'ec2', 'gce', 'rax', 'vmware', 'openstack', 'foreman', 'cloudforms')
SCHEDULEABLE_PROVIDERS = CLOUD_PROVIDERS + ('custom',)

View File

@ -61,7 +61,7 @@ PERMISSION_TYPE_CHOICES = [
(PERM_JOBTEMPLATE_CREATE, _('Create a Job Template')),
]
CLOUD_INVENTORY_SOURCES = ['ec2', 'rax', 'vmware', 'gce', 'azure', 'openstack', 'custom']
CLOUD_INVENTORY_SOURCES = ['ec2', 'rax', 'vmware', 'gce', 'azure', 'openstack', 'custom', 'foreman', 'cloudforms']
VERBOSITY_CHOICES = [
(0, '0 (Normal)'),

View File

@ -32,21 +32,23 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
'''
KIND_CHOICES = [
('ssh', _('Machine')),
('scm', _('Source Control')),
('aws', _('Amazon Web Services')),
('rax', _('Rackspace')),
('ssh', _('Machine')),
('scm', _('Source Control')),
('aws', _('Amazon Web Services')),
('rax', _('Rackspace')),
('vmware', _('VMware vCenter')),
('gce', _('Google Compute Engine')),
('azure', _('Microsoft Azure')),
('foreman', _('Satellite 6')),
('cloudforms', _('CloudForms')),
('gce', _('Google Compute Engine')),
('azure', _('Microsoft Azure')),
('openstack', _('OpenStack')),
]
BECOME_METHOD_CHOICES = [
('', _('None')),
('sudo', _('Sudo')),
('su', _('Su')),
('pbrun', _('Pbrun')),
('', _('None')),
('sudo', _('Sudo')),
('su', _('Su')),
('pbrun', _('Pbrun')),
('pfexec', _('Pfexec')),
#('runas', _('Runas')),
]

View File

@ -725,13 +725,15 @@ class InventorySourceOptions(BaseModel):
'''
SOURCE_CHOICES = [
('', _('Manual')),
('file', _('Local File, Directory or Script')),
('rax', _('Rackspace Cloud Servers')),
('ec2', _('Amazon EC2')),
('gce', _('Google Compute Engine')),
('azure', _('Microsoft Azure')),
('', _('Manual')),
('file', _('Local File, Directory or Script')),
('rax', _('Rackspace Cloud Servers')),
('ec2', _('Amazon EC2')),
('gce', _('Google Compute Engine')),
('azure', _('Microsoft Azure')),
('vmware', _('VMware vCenter')),
('foreman', _('Satellite 6')),
('cloudforms', _('CloudForms')),
('openstack', _('OpenStack')),
('custom', _('Custom Script')),
]

View File

@ -1223,6 +1223,38 @@ class RunInventoryUpdate(BaseTask):
for k,v in vmware_opts.items():
cp.set(section, k, unicode(v))
elif inventory_update.source == 'foreman':
section = 'foreman'
cp.add_section(section)
foreman_opts = dict(inventory_update.source_vars_dict.items())
foreman_opts.setdefault('ssl_verify', 'False')
for k, v in foreman_opts.items():
cp.set(section, k, unicode(v))
credential = inventory_update.credential
if credential:
cp.set(section, 'url', credential.host)
cp.set(section, 'user', credential.username)
cp.set(section, 'password', decrypt_field(credential, 'password'))
section = 'ansible'
cp.set(section, 'group_patterns', '["{app}-{tier}-{color}", "{app}-{color}", "{app}", "{tier}"]')
section = 'cache'
cp.set(section, 'path', '/tmp')
cp.set(section, 'max_age', '0')
elif inventory_update.source == 'cloudforms':
section = 'cloudforms'
cp.add_section(section)
credential = inventory_update.credential
if credential:
cp.set(section, 'hostname', credential.host)
cp.set(section, 'username', credential.username)
cp.set(section, 'password', decrypt_field(credential, 'password'))
# Return INI content.
if cp.sections():
f = cStringIO.StringIO()
@ -1305,6 +1337,10 @@ class RunInventoryUpdate(BaseTask):
env['GCE_ZONE'] = inventory_update.source_regions
elif inventory_update.source == 'openstack':
env['OS_CLIENT_CONFIG_FILE'] = cloud_credential
elif inventory_update.source == 'foreman':
env['FOREMAN_INI_PATH'] = cloud_credential
elif inventory_update.source == 'cloudforms':
env['CLOUDFORMS_INI_PATH'] = cloud_credential
elif inventory_update.source == 'file':
# FIXME: Parse source_env to dict, update env.
pass

View File

@ -0,0 +1,16 @@
# Ansible CloudForms external inventory script settings
#
[cloudforms]
# The version of CloudForms (this is not used yet)
version = 3.1
# The hostname of the CloudForms server
hostname = #insert your hostname here
# Username for CloudForms
username = #insert your cloudforms user here
# Password for CloudForms user
password = #password

View File

@ -0,0 +1,126 @@
#!/usr/bin/python
'''
CloudForms external inventory script
==================================================
Generates inventory that Ansible can understand by making API request to CloudForms.
Modeled after https://raw.githubusercontent.com/ansible/ansible/stable-1.9/plugins/inventory/ec2.py
jlabocki <at> redhat.com or @jameslabocki on twitter
'''
import os
import argparse
import ConfigParser
import requests
import json
# This disables warnings and is not a good idea, but hey, this is a demo
# http://urllib3.readthedocs.org/en/latest/security.html#disabling-warnings
requests.packages.urllib3.disable_warnings()
class CloudFormsInventory(object):
def _empty_inventory(self):
return {"_meta" : {"hostvars" : {}}}
def __init__(self):
''' Main execution path '''
# Inventory grouped by instance IDs, tags, security groups, regions,
# and availability zones
self.inventory = self._empty_inventory()
# Index of hostname (address) to instance ID
self.index = {}
# Read CLI arguments
self.read_settings()
self.parse_cli_args()
# Get Hosts
if self.args.list:
self.get_hosts()
# This doesn't exist yet and needs to be added
if self.args.host:
data2 = { }
print json.dumps(data2, indent=2)
def parse_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on CloudForms')
parser.add_argument('--list', action='store_true', default=False,
help='List instances (default: False)')
parser.add_argument('--host', action='store',
help='Get all the variables about a specific instance')
self.args = parser.parse_args()
def read_settings(self):
''' Reads the settings from the cloudforms.ini file '''
config = ConfigParser.SafeConfigParser()
config_paths = [
os.path.join(os.path.dirname(os.path.realpath(__file__)), 'cloudforms.ini'),
"/opt/rh/cloudforms.ini",
]
env_value = os.environ.get('CLOUDFORMS_INI_PATH')
if env_value is not None:
config_paths.append(os.path.expanduser(os.path.expandvars(env_value)))
config.read(config_paths)
# Version
if config.has_option('cloudforms', 'version'):
self.cloudforms_version = config.get('cloudforms', 'version')
else:
self.cloudforms_version = "none"
# CloudForms Endpoint
if config.has_option('cloudforms', 'hostname'):
self.cloudforms_hostname = config.get('cloudforms', 'hostname')
else:
self.cloudforms_hostname = None
# CloudForms Username
if config.has_option('cloudforms', 'username'):
self.cloudforms_username = config.get('cloudforms', 'username')
else:
self.cloudforms_username = "none"
# CloudForms Password
if config.has_option('cloudforms', 'password'):
self.cloudforms_password = config.get('cloudforms', 'password')
else:
self.cloudforms_password = "none"
def get_hosts(self):
''' Gets host from CloudForms '''
r = requests.get("https://" + self.cloudforms_hostname + "/api/vms?expand=resources&attributes=name,power_state", auth=(self.cloudforms_username,self.cloudforms_password), verify=False)
obj = r.json()
#Remove objects that don't matter
del obj["count"]
del obj["subcount"]
del obj["name"]
#Create a new list to grab VMs with power_state on to add to a new list
#I'm sure there is a cleaner way to do this
newlist = []
getnext = False
for x in obj.items():
for y in x[1]:
for z in y.items():
if getnext == True:
newlist.append(z[1])
getnext = False
if ( z[0] == "power_state" and z[1] == "on" ):
getnext = True
newdict = {'hosts': newlist}
newdict2 = {'Dynamic_CloudForms': newdict}
print json.dumps(newdict2, indent=2)
# Run the script
CloudFormsInventory()

View File

@ -0,0 +1,15 @@
[foreman]
url = http://localhost:3000/
user = foreman
password = secret
ssl_verify = True
[ansible]
group_patterns = ["{app}-{tier}-{color}",
"{app}-{color}",
"{app}",
"{tier}"]
[cache]
path = .
max_age = 60

View File

@ -0,0 +1,302 @@
#!/usr/bin/python
# vim: set fileencoding=utf-8 :
#
# Copyright (C) 2016 Guido Günther <agx@sigxcpu.org>
#
# This script is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with it. If not, see <http://www.gnu.org/licenses/>.
#
# This is somewhat based on cobbler inventory
import argparse
import ConfigParser
import copy
import os
import re
from time import time
import requests
from requests.auth import HTTPBasicAuth
try:
import json
except ImportError:
import simplejson as json
class ForemanInventory(object):
def __init__(self):
""" Main execution path """
self.inventory = dict() # A list of groups and the hosts in that group
self.cache = dict() # Details about hosts in the inventory
self.params = dict() # Params of each host
self.hostgroups = dict() # host groups
# Read settings and parse CLI arguments
self.read_settings()
self.parse_cli_args()
# Cache
if self.args.refresh_cache:
self.update_cache()
elif not self.is_cache_valid():
self.update_cache()
else:
self.load_inventory_from_cache()
self.load_params_from_cache()
self.load_cache_from_cache()
data_to_print = ""
# Data to print
if self.args.host:
data_to_print += self.get_host_info()
else:
self.inventory['_meta'] = {'hostvars': {}}
for hostname in self.cache:
self.inventory['_meta']['hostvars'][hostname] = {
'foreman': self.cache[hostname],
'foreman_params': self.params[hostname],
}
data_to_print += self.json_format_dict(self.inventory, True)
print(data_to_print)
def is_cache_valid(self):
""" Determines if the cache files have expired, or if it is still valid """
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
if (os.path.isfile(self.cache_path_inventory) and
os.path.isfile(self.cache_path_params)):
return True
return False
def read_settings(self):
""" Reads the settings from the foreman.ini file """
config = ConfigParser.SafeConfigParser()
config_paths = [
"/etc/ansible/foreman.ini",
os.path.dirname(os.path.realpath(__file__)) + '/foreman.ini',
]
env_value = os.environ.get('FOREMAN_INI_PATH')
if env_value is not None:
config_paths.append(os.path.expanduser(os.path.expandvars(env_value)))
config.read(config_paths)
# Foreman API related
self.foreman_url = config.get('foreman', 'url')
self.foreman_user = config.get('foreman', 'user')
self.foreman_pw = config.get('foreman', 'password')
self.foreman_ssl_verify = config.getboolean('foreman', 'ssl_verify')
# Ansible related
try:
group_patterns = config.get('ansible', 'group_patterns')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
group_patterns = "[]"
self.group_patterns = eval(group_patterns)
# Cache related
try:
cache_path = os.path.expanduser(config.get('cache', 'path'))
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
cache_path = '.'
(script, ext) = os.path.splitext(os.path.basename(__file__))
self.cache_path_cache = cache_path + "/%s.cache" % script
self.cache_path_inventory = cache_path + "/%s.index" % script
self.cache_path_params = cache_path + "/%s.params" % script
self.cache_max_age = config.getint('cache', 'max_age')
def parse_cli_args(self):
""" Command line argument processing """
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on foreman')
parser.add_argument('--list', action='store_true', default=True, help='List instances (default: True)')
parser.add_argument('--host', action='store', help='Get all the variables about a specific instance')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests to foreman (default: False - use cache files)')
self.args = parser.parse_args()
def _get_json(self, url):
page = 1
results = []
while True:
ret = requests.get(url,
auth=HTTPBasicAuth(self.foreman_user, self.foreman_pw),
verify=self.foreman_ssl_verify,
params={'page': page, 'per_page': 250})
ret.raise_for_status()
json = ret.json()
if not json.has_key('results'):
return json
results = results + json['results']
if len(results) >= json['total']:
break
page += 1
return results
def _get_hosts(self):
return self._get_json("%s/api/v2/hosts" % self.foreman_url)
def _get_hostgroup_by_id(self, hid):
if hid not in self.hostgroups:
url = "%s/api/v2/hostgroups/%s" % (self.foreman_url, hid)
self.hostgroups[hid] = self._get_json(url)
return self.hostgroups[hid]
def _get_params_by_id(self, hid):
url = "%s/api/v2/hosts/%s/parameters" % (self.foreman_url, hid)
return self._get_json(url)
def _resolve_params(self, host):
"""
Resolve all host group params of the host using the top level
hostgroup and the ancestry.
"""
hostgroup_id = host['hostgroup_id']
paramgroups = []
params = {}
if hostgroup_id:
hostgroup = self._get_hostgroup_by_id(hostgroup_id)
ancestry_path = hostgroup.get('ancestry', '')
ancestry = ancestry_path.split('/') if ancestry_path is not None else []
# Append top level hostgroup last to overwrite lower levels
# values
ancestry.append(hostgroup_id)
paramgroups = [self._get_hostgroup_by_id(hostgroup_id)['parameters']
for hostgroup_id in ancestry]
paramgroups += [self._get_params_by_id(host['id'])]
for paramgroup in paramgroups:
for param in paramgroup:
name = param['name']
params[name] = param['value']
return params
def update_cache(self):
"""Make calls to foreman and save the output in a cache"""
self.groups = dict()
self.hosts = dict()
for host in self._get_hosts():
dns_name = host['name']
# Create ansible groups for hostgroup, location and organization
for group in ['hostgroup', 'location', 'organization']:
val = host.get('%s_name' % group)
if val:
safe_key = self.to_safe('foreman_%s_%s' % (group, val.lower()))
self.push(self.inventory, safe_key, dns_name)
params = self._resolve_params(host)
# Ansible groups by parameters in host groups and Foreman host
# attributes.
groupby = copy.copy(params)
for k, v in host.items():
if isinstance(v, basestring):
groupby[k] = self.to_safe(v)
elif isinstance(v, int):
groupby[k] = v
# The name of the ansible groups is given by group_patterns:
for pattern in self.group_patterns:
try:
key = pattern.format(**groupby)
self.push(self.inventory, key, dns_name)
except KeyError:
pass # Host not part of this group
self.cache[dns_name] = host
self.params[dns_name] = params
self.push(self.inventory, 'all', dns_name)
self.write_to_cache(self.cache, self.cache_path_cache)
self.write_to_cache(self.inventory, self.cache_path_inventory)
self.write_to_cache(self.params, self.cache_path_params)
def get_host_info(self):
""" Get variables about a specific host """
if not self.cache or len(self.cache) == 0:
# Need to load index from cache
self.load_cache_from_cache()
if self.args.host not in self.cache:
# try updating the cache
self.update_cache()
if self.args.host not in self.cache:
# host might not exist anymore
return self.json_format_dict({}, True)
return self.json_format_dict(self.cache[self.args.host], True)
def push(self, d, k, v):
if k in d:
d[k].append(v)
else:
d[k] = [v]
def load_inventory_from_cache(self):
""" Reads the index from the cache file sets self.index """
cache = open(self.cache_path_inventory, 'r')
json_inventory = cache.read()
self.inventory = json.loads(json_inventory)
def load_params_from_cache(self):
""" Reads the index from the cache file sets self.index """
cache = open(self.cache_path_params, 'r')
json_params = cache.read()
self.params = json.loads(json_params)
def load_cache_from_cache(self):
""" Reads the cache from the cache file sets self.cache """
cache = open(self.cache_path_cache, 'r')
json_cache = cache.read()
self.cache = json.loads(json_cache)
def write_to_cache(self, data, filename):
""" Writes data in JSON format to a file """
json_data = self.json_format_dict(data, True)
cache = open(filename, 'w')
cache.write(json_data)
cache.close()
def to_safe(self, word):
''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups '''
regex = "[^A-Za-z0-9\_]"
return re.sub(regex, "_", word.replace(" ", ""))
def json_format_dict(self, data, pretty=False):
""" Converts a dict to a JSON object and dumps it as a formatted string """
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
ForemanInventory()