import awxkit

Co-authored-by: Christopher Wang <cwang@ansible.com>
Co-authored-by: Jake McDermott <jmcdermott@ansible.com>
Co-authored-by: Jim Ladd <jladd@redhat.com>
Co-authored-by: Elijah DeLee <kdelee@redhat.com>
Co-authored-by: Alan Rominger <arominge@redhat.com>
Co-authored-by: Yanis Guenane <yanis@guenane.org>
This commit is contained in:
Ryan Petrello 2019-08-08 22:12:31 -04:00
parent 9b836abf1f
commit 9616cc6f78
No known key found for this signature in database
GPG Key ID: F2AA5F2122351777
101 changed files with 10479 additions and 0 deletions

100
awxkit/.gitignore vendored Normal file
View File

@ -0,0 +1,100 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
report.xml
report.pylama
*,cover
.hypothesis/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# IPython Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# dotenv
.env
# virtualenv
venv/
ENV/
# Spyder project settings
.spyderproject
# Rope project settings
.ropeproject
# vim
*.swp
# mac OS
*.DS_Store
# pytest
*.pytest_cache

4
awxkit/MANIFEST.in Normal file
View File

@ -0,0 +1,4 @@
include requirements.txt
include setup.py
recursive-include awxkit *.py *.yml *.md
recursive-include test *.py *.yml *.md

4
awxkit/README.md Normal file
View File

@ -0,0 +1,4 @@
awxkit
======
Python library that backs the provided `awx` command line client.

View File

@ -0,0 +1,4 @@
from .api import pages, client, resources # NOQA
from .config import config # NOQA
from . import awx # NOQA
from .ws import WSClient # NOQA

View File

@ -0,0 +1,2 @@
from .pages import * # NOQA
from .client import * # NOQA

121
awxkit/awxkit/api/client.py Normal file
View File

@ -0,0 +1,121 @@
import logging
import requests
from awxkit import exceptions as exc
from awxkit.config import config
log = logging.getLogger(__name__)
class ConnectionException(exc.Common):
pass
class Token_Auth(requests.auth.AuthBase):
def __init__(self, token, auth_type='Token'):
self.token = token
self.auth_type = auth_type
def __call__(self, request):
request.headers['Authorization'] = '{0.auth_type} {0.token}'.format(self)
return request
def log_elapsed(r, *args, **kwargs): # requests hook to display API elapsed time
log.debug('"{0.request.method} {0.url}" elapsed: {0.elapsed}'.format(r))
class Connection(object):
"""A requests.Session wrapper for establishing connection w/ AWX instance"""
def __init__(self, server, verify=False):
self.server = server
self.verify = verify
if not self.verify:
requests.packages.urllib3.disable_warnings()
self.session = requests.Session()
self.uses_session_cookie = False
def get_session_requirements(self, next='/api/'):
self.get('/api/') # this causes a cookie w/ the CSRF token to be set
return dict(next=next)
def login(self, username=None, password=None, token=None, **kwargs):
if username and password:
_next = kwargs.get('next')
if _next:
headers = self.session.headers.copy()
self.post('/api/login/', headers=headers,
data=dict(username=username, password=password, next=_next))
self.session_id = self.session.cookies.get('sessionid')
self.uses_session_cookie = True
else:
self.session.auth = (username, password)
elif token:
self.session.auth = Token_Auth(token, auth_type=kwargs.get('auth_type', 'Token'))
else:
self.session.auth = None
def logout(self):
if self.uses_session_cookie:
self.session.cookies.pop('sessionid', None)
else:
self.session.auth = None
def request(self, relative_endpoint, method='get', json=None, data=None, query_parameters=None, headers=None):
"""Core requests.Session wrapper that returns requests.Response objects"""
session_request_method = getattr(self.session, method, None)
if not session_request_method:
raise ConnectionException(message="Unknown request method: {0}".format(method))
use_endpoint = relative_endpoint
if self.server.endswith('/') and use_endpoint.startswith('/'):
raise RuntimeError('AWX URL given with trailing slash, remove slash.')
url = '{0.server}{1}'.format(self, use_endpoint)
kwargs = dict(verify=self.verify, params=query_parameters, json=json, data=data,
hooks=dict(response=log_elapsed))
if headers is not None:
kwargs['headers'] = headers
if method in ('post', 'put', 'patch', 'delete'):
kwargs.setdefault('headers', {})['X-CSRFToken'] = self.session.cookies.get('csrftoken')
kwargs['headers']['Referer'] = url
for attempt in range(1, config.client_connection_attempts + 1):
try:
response = session_request_method(url, **kwargs)
break
except requests.exceptions.ConnectionError as err:
if attempt == config.client_connection_attempts:
raise err
log.exception('Failed to reach url: {0}. Retrying.'.format(url))
return response
def delete(self, relative_endpoint):
return self.request(relative_endpoint, method='delete')
def get(self, relative_endpoint, query_parameters=None, headers=None):
return self.request(relative_endpoint, method='get', query_parameters=query_parameters, headers=headers)
def head(self, relative_endpoint):
return self.request(relative_endpoint, method='head')
def options(self, relative_endpoint):
return self.request(relative_endpoint, method='options')
def patch(self, relative_endpoint, json):
return self.request(relative_endpoint, method='patch', json=json)
def post(self, relative_endpoint, json=None, data=None, headers=None):
return self.request(relative_endpoint, method='post', json=json, data=data, headers=headers)
def put(self, relative_endpoint, json):
return self.request(relative_endpoint, method='put', json=json)

View File

@ -0,0 +1,7 @@
from .has_create import * # NOQA
from .has_instance_groups import HasInstanceGroups # NOQA
from .has_notifications import HasNotifications # NOQA
from .has_status import HasStatus # NOQA
from .has_survey import HasSurvey # NOQA
from .has_variables import HasVariables # NOQA
from .has_copy import HasCopy # NOQA

View File

@ -0,0 +1,15 @@
from awxkit.api.pages import Page
from awxkit.utils import random_title
class HasCopy(object):
def can_copy(self):
return self.get_related('copy').can_copy
def copy(self, name=''):
"""Return a copy of current page"""
payload = {"name": name or "Copy - " + random_title()}
endpoint = self.json.related['copy']
page = Page(self.connection, endpoint=endpoint)
return page.post(payload)

View File

@ -0,0 +1,395 @@
from collections import defaultdict
import inspect
from toposort import toposort
from awxkit.utils import get_class_if_instance, class_name_to_kw_arg, is_proper_subclass, super_dir_set
# HasCreate dependency resolution and creation utilities
def dependency_graph(page, *provided_dependencies):
"""Creates a dependency graph of the form
{page: set(page.dependencies[0:i]),
page.dependencies[0]: set(page.dependencies[0][0:j]
...
page.dependencies[i][j][...][n]: set(page.dependencies[i][j][...][n][0:z]),
...}
Any optional provided_dependencies will be included as if they were dependencies,
without affecting the value of each keyed page.
"""
graph = {}
dependencies = set(getattr(page, 'dependencies', [])) # Some HasCreate's can claim generic Base's w/o dependencies
graph[page] = dependencies
for dependency in dependencies | set(provided_dependencies):
graph.update(dependency_graph(dependency))
return graph
def optional_dependency_graph(page, *provided_dependencies):
"""Creates a dependency graph for a page including all dependencies and optional_dependencies
Any optional provided_dependencies will be included as if they were dependencies,
without affecting the value of each keyed page.
"""
graph = {}
dependencies = set(getattr(page, 'dependencies', []) + getattr(page, 'optional_dependencies', []))
graph[page] = dependencies
for dependency in dependencies | set(provided_dependencies):
graph.update(optional_dependency_graph(dependency))
return graph
def creation_order(graph):
"""returns a list of sets of HasCreate subclasses representing the order of page creation that will
resolve the dependencies of subsequent pages for any non-cyclic dependency_graph
ex:
[set(Organization), set(Inventory), set(Group)]
**The result is based entirely on the passed dependency graph and should be blind
to node attributes.**
"""
return list(toposort(graph))
def separate_async_optionals(creation_order):
"""In cases where creation group items share dependencies but as asymetric optionals,
those that create them as actual dependencies to be later sourced as optionals
need to be listed first
"""
actual_order = []
for group in creation_order:
if len(group) <= 1:
actual_order.append(group)
continue
by_count = defaultdict(set)
has_creates = [cand for cand in group if hasattr(cand, 'dependencies')]
counts = {has_create: 0 for has_create in has_creates}
for has_create in has_creates:
for dependency in has_create.dependencies:
for compared in [cand for cand in has_creates if cand != has_create]:
if dependency in compared.optional_dependencies:
counts[has_create] += 1
for has_create in group:
by_count[counts.get(has_create, 0)].add(has_create)
for count in sorted(by_count, reverse=True):
actual_order.append(by_count[count])
return actual_order
def page_creation_order(page=None, *provided_dependencies):
"""returns a creation_order() where HasCreate subclasses do not share creation group sets with members
of their optional_dependencies. All provided_dependencies and their dependencies will also be
included in the creation
"""
if not page:
return []
# dependency_graphs only care about class type
provided_dependencies = [x[0] if isinstance(x, tuple) else x for x in provided_dependencies]
provided_dependencies = [get_class_if_instance(x) for x in provided_dependencies]
# make a set of all pages we may need to create
to_create = set(dependency_graph(page, *provided_dependencies))
# creation order w/ the most accurate dependency graph
full_graph_order = creation_order(optional_dependency_graph(page, *provided_dependencies))
order = []
for group in full_graph_order:
to_append = group & to_create # we only care about pages we may need to create
if to_append:
order.append(to_append)
actual_order = separate_async_optionals(order)
return actual_order
def all_instantiated_dependencies(*potential_parents):
"""returns a list of all instantiated dependencies including parents themselves.
Will be in page_creation_order
"""
scope_provided_dependencies = []
instantiated = set([x for x in potential_parents
if not isinstance(x, type) and not isinstance(x, tuple)])
for potential_parent in [x for x in instantiated if hasattr(x, '_dependency_store')]:
for dependency in potential_parent._dependency_store.values():
if dependency and dependency not in scope_provided_dependencies:
scope_provided_dependencies.extend(all_instantiated_dependencies(dependency))
scope_provided_dependencies.extend(instantiated)
scope_provided_dependencies = list(set(scope_provided_dependencies))
class_to_provided = {}
for provided in scope_provided_dependencies:
if provided.__class__ in class_to_provided:
class_to_provided[provided.__class__].append(provided)
else:
class_to_provided[provided.__class__] = [provided]
all_instantiated = []
for group in page_creation_order(*scope_provided_dependencies):
for item in group:
if item in class_to_provided:
all_instantiated.extend(class_to_provided[item])
del class_to_provided[item]
elif item.__class__ in class_to_provided:
all_instantiated.extend(class_to_provided[item.__class__])
del class_to_provided[item.__class__]
return all_instantiated
class DSAdapter(object):
"""Access HasCreate._dependency_store dependencies by attribute instead of class.
ex:
```
base_sc = HasCreate().create(inventory=awxkit.api.Inventory)
base_sc._dependency_store[Inventory] == base.ds.inventory
```
"""
def __init__(self, owner, dependency_store):
self.owner = owner
self.dependency_store = dependency_store
self._lookup = {class_name_to_kw_arg(cls.__name__): cls for cls in dependency_store}
def __repr__(self):
return self.__str__()
def __str__(self):
return str(list(self._lookup.keys()))
def __getattr__(self, attr):
if attr in self._lookup:
dep = self.dependency_store[self._lookup[attr]]
if dep:
return dep
raise AttributeError('{0.owner} has no dependency "{1}"'.format(self, attr))
def __getitem__(self, item):
return getattr(self, item)
def __iter__(self):
return iter(self._lookup)
def __dir__(self):
attrs = super_dir_set(self.__class__)
if '_lookup' in self.__dict__ and hasattr(self._lookup, 'keys'):
attrs.update(self._lookup.keys())
return sorted(attrs)
# Hijack json.dumps and simplejson.dumps (used by requests)
# to allow HasCreate.create_payload() serialization without impacting payload.ds access
def filter_ds_from_payload(dumps):
def _filter_ds_from_payload(obj, *a, **kw):
if hasattr(obj, 'get') and isinstance(obj.get('ds'), DSAdapter):
filtered = obj.copy()
del filtered['ds']
else:
filtered = obj
return dumps(filtered, *a, **kw)
return _filter_ds_from_payload
import json # noqa
json.dumps = filter_ds_from_payload(json.dumps)
try:
import simplejson # noqa
simplejson.dumps = filter_ds_from_payload(simplejson.dumps)
except ImportError:
pass
class HasCreate(object):
# For reference only. Use self.ds, or self._dependency_store if mutating.
dependencies = []
optional_dependencies = []
# Provides introspection capability in recursive create_and_update_dependencies calls
_scoped_dependencies_by_frame = dict()
def __init__(self, *a, **kw):
dependency_store = kw.get('ds')
if dependency_store is None:
deps = self.dependencies + self.optional_dependencies
self._dependency_store = {base_subclass: None for base_subclass in deps}
self.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
else:
self._dependency_store = dependency_store.dependency_store
self.ds = dependency_store
super(HasCreate, self).__init__(*a, **kw)
def _update_dependencies(self, dependency_candidates):
"""updates self._dependency_store to reflect instantiated dependencies, if any."""
if self._dependency_store:
potentials = []
# in case the candidate is an instance of a desired base class
# (e.g. Project for self._dependency_store = {'UnifiedJobTemplate': None})
# we try each of its base classes until a match is found
base_lookup = {}
for candidate in dependency_candidates:
for cls_type in inspect.getmro(candidate[0].__class__):
if cls_type in self._dependency_store:
base_lookup[candidate[0]] = cls_type
potentials.append(candidate)
break
second_pass = []
for candidate, claimed in potentials:
if claimed:
self._dependency_store[base_lookup[candidate]] = candidate
else:
second_pass.append(candidate)
# Technical Debt: We need to iron out the expected behavior of multiple instances
# of unclaimed types. Right now the last one in potentials is marked as a dependency.
second_pass.reverse() # for the last one in the list to be marked we need to reverse.
for candidate in second_pass:
if not self._dependency_store[base_lookup[candidate]]:
self._dependency_store[base_lookup[candidate]] = candidate
def create_and_update_dependencies(self, *provided_and_desired_dependencies):
"""in order creation of dependencies and updating of self._dependency_store
to include instances, indexed by page class. If a (HasCreate, dict()) tuple is
provided as a desired dependency, the dict() will be unpacked as kwargs for the
`HasCreate.create(**dict())` call.
***
Providing (HasCreate, dict()) tuples for dependency args to this method
removes the assurance that all shared dependencies types will be the same instance
and only one instance of each type is created
(Tech Debt: can create orphans if default dependency isn't claimed).
The provided args are only in scope of the desired page, override any previously created
instance of the same class, and replace said instances in the continuing chain.
***
```
ex:
self.dependencies = [awxkit.api.pages.Inventory]
self.create_and_update_dependencies()
inventory = self._dependency_store[awxkit.api.pages.Inventory]
ex:
self.dependencies = [awxkit.api.pages.Inventory]
self.create_and_update_dependencies((awxkit.api.pages.Inventory, dict(attr_one=1, attr_two=2)))
inventory = self._dependency_store[awxkit.api.pages.Inventory]
# assume kwargs are set as attributes by Inventory.create()
inventory.attr_one == 1
> True
inventory.attr_two == 2
> True
ex:
self.dependencies = []
self.optional_dependencies = [awxkit.api.pages.Organization]
self.create_and_update_dependencies(awxkit.api.pages.Organization)
organization = self._dependency_store[awxkit.api.pages.Organization]
ex:
self.dependencies = [awxkit.api.pages.Inventory]
inventory = v2.inventories.create()
self.create_and_update_dependencies(inventory)
inventory == self._dependency_store[awxkit.api.pages.Inventory]
> True
```
"""
if not any((self.dependencies, self.optional_dependencies)):
return
# remove falsy values
provided_and_desired_dependencies = [x for x in provided_and_desired_dependencies if x]
# (HasCreate(), True) tells HasCreate._update_dependencies to link
provided_dependencies = [(x, True) for x in provided_and_desired_dependencies
if not isinstance(x, type) and not isinstance(x, tuple)]
# Since dependencies are often declared at runtime, we need to use some introspection
# to determine previously created ones for proper dependency store linking.
# This is done by keeping an updated dependency record by the root caller's frame
caller_frame = inspect.currentframe()
self.parent_frame = None
for frame in inspect.stack()[1:]:
if frame[3] == 'create_and_update_dependencies':
self.parent_frame = frame[0]
if not self.parent_frame:
# a maintained dict of instantiated resources keyed by lowercase class name to be
# expanded as keyword args during `create()` calls
all_instantiated = all_instantiated_dependencies(*[d[0] for d in provided_dependencies])
scoped_dependencies = {class_name_to_kw_arg(d.__class__.__name__): d for d in all_instantiated}
self._scoped_dependencies_by_frame[caller_frame] = [self, scoped_dependencies]
else:
scoped_dependencies = self._scoped_dependencies_by_frame[self.parent_frame][1]
desired_dependencies = []
desired_dependency_classes = []
for item in provided_and_desired_dependencies:
if isinstance(item, tuple):
item_cls = item[0]
elif inspect.isclass(item):
item_cls = item
else:
item_cls = item.__class__
if item_cls not in [x[0].__class__ for x in provided_dependencies]:
desired_dependency_classes.append(item_cls)
desired_dependencies.append(item)
if desired_dependencies:
ordered_desired_dependencies = []
creation_order = [item for s in page_creation_order(*desired_dependency_classes) for item in s]
for item in creation_order:
for desired in desired_dependency_classes:
if desired == item or is_proper_subclass(desired, item):
ordered_desired_dependencies.append(desired)
desired_dependency_classes.remove(desired)
break
# keep track of (HasCreate, kwarg_dict)
provided_with_kwargs = dict()
for page_cls, provided_kwargs in [x for x in desired_dependencies if isinstance(x, tuple)]:
provided_with_kwargs[page_cls] = provided_kwargs
for to_create in ordered_desired_dependencies:
scoped_args = dict(scoped_dependencies)
if to_create in provided_with_kwargs:
scoped_args.pop(to_create, None) # remove any conflicts in favor of explicit kwargs
scoped_args.update(provided_with_kwargs.pop(to_create))
scoped_args.pop(class_name_to_kw_arg(to_create.__name__), None)
created = to_create(self.connection).create(**scoped_args)
provided_dependencies.append((created, True))
for dependency, _ in provided_dependencies:
if dependency not in scoped_dependencies:
scoped_dependencies[class_name_to_kw_arg(dependency.__class__.__name__)] = dependency
self._update_dependencies(provided_dependencies)
if not self.parent_frame:
del self._scoped_dependencies_by_frame[caller_frame]
def teardown(self):
"""Calls `silent_cleanup()` on all dependencies and self in reverse page creation order."""
to_teardown = all_instantiated_dependencies(self)
to_teardown_types = set(map(get_class_if_instance, to_teardown))
order = [
set(
[
potential for potential in (
get_class_if_instance(x) for x in group) if potential in to_teardown_types
]
)
for group in page_creation_order(self, *to_teardown)
]
order.reverse()
for teardown_group in order:
for teardown_class in teardown_group:
instance = [x for x in to_teardown if isinstance(x, teardown_class)].pop()
instance.silent_cleanup()
for item in to_teardown:
for dep_type, dep in item._dependency_store.items():
if dep and dep_type in to_teardown_types:
item._dependency_store[dep_type] = None # Note that we don't call del

View File

@ -0,0 +1,17 @@
from awxkit.utils import suppress
import awxkit.exceptions as exc
class HasInstanceGroups(object):
def add_instance_group(self, instance_group):
with suppress(exc.NoContent):
self.related['instance_groups'].post(dict(id=instance_group.id))
def remove_instance_group(self, instance_group):
with suppress(exc.NoContent):
self.related['instance_groups'].post(dict(id=instance_group.id, disassociate=instance_group.id))
def remove_all_instance_groups(self):
for ig in self.related.instance_groups.get().results:
self.remove_instance_group(ig)

View File

@ -0,0 +1,23 @@
from awxkit.utils import suppress
import awxkit.exceptions as exc
notification_endpoints = ("notification_templates", "notification_templates_started", "notification_templates_error",
"notification_templates_success")
class HasNotifications(object):
def add_notification_template(self, notification_template, endpoint="notification_templates_success"):
if endpoint not in notification_endpoints:
raise ValueError('Unsupported notification endpoint "{0}". Please use one of {1}.'
.format(endpoint, notification_endpoints))
with suppress(exc.NoContent):
self.related[endpoint].post(dict(id=notification_template.id))
def remove_notification_template(self, notification_template, endpoint="notification_templates_success"):
if endpoint not in notification_endpoints:
raise ValueError('Unsupported notification endpoint "{0}". Please use one of {1}.'
.format(endpoint, notification_endpoints))
with suppress(exc.NoContent):
self.related[endpoint].post(dict(id=notification_template.id, disassociate=notification_template.id))

View File

@ -0,0 +1,85 @@
from datetime import datetime
import json
from awxkit.utils import poll_until
def bytes_to_str(obj):
try:
return obj.decode()
except AttributeError:
return str(obj)
class HasStatus(object):
completed_statuses = ['successful', 'failed', 'error', 'canceled']
started_statuses = ['pending', 'running'] + completed_statuses
@property
def is_completed(self):
return self.status.lower() in self.completed_statuses
@property
def is_successful(self):
return self.status == 'successful'
def wait_until_status(self, status, interval=1, timeout=60, **kwargs):
status = [status] if not isinstance(status, (list, tuple)) else status
poll_until(lambda: getattr(self.get(), 'status') in status, interval=interval, timeout=timeout, **kwargs)
return self
def wait_until_completed(self, interval=5, timeout=60, **kwargs):
start_time = datetime.utcnow()
HasStatus.wait_until_status(self, self.completed_statuses, interval=interval, timeout=timeout, **kwargs)
if not getattr(self, 'event_processing_finished', True):
elapsed = datetime.utcnow() - start_time
time_left = timeout - elapsed.total_seconds()
poll_until(lambda: getattr(self.get(), 'event_processing_finished', True),
interval=interval, timeout=time_left, **kwargs)
return self
def wait_until_started(self, interval=1, timeout=60):
return self.wait_until_status(self.started_statuses, interval=interval, timeout=timeout)
def assert_status(self, status_list, msg=None):
if isinstance(status_list, str):
status_list = [status_list]
if self.status in status_list:
# include corner cases in is_successful logic
if 'successful' not in status_list or self.is_successful:
return
if msg is None:
msg = ''
else:
msg += '\n'
msg += '{0}-{1} has status of {2}, which is not in {3}.'.format(
self.type.title(), self.id, self.status, status_list
)
if getattr(self, 'job_explanation', ''):
msg += '\njob_explanation: {}'.format(bytes_to_str(self.job_explanation))
if getattr(self, 'result_traceback', ''):
msg += '\nresult_traceback:\n{}'.format(bytes_to_str(self.result_traceback))
if getattr(self, 'result_stdout', ''):
output = bytes_to_str(self.result_stdout)
if output:
msg = msg + '\nstdout:\n{}'.format(output)
if getattr(self, 'job_explanation', '').startswith('Previous Task Failed'):
try:
data = json.loads(self.job_explanation.replace('Previous Task Failed: ', ''))
dep_output = self.connection.get(
'{0}/api/v2/{1}s/{2}/stdout/'.format(
self.endpoint.split('/api')[0], data['job_type'], data['job_id']
),
query_parameters=dict(format='txt_download')
).content
msg += '\nDependency output:\n{}'.format(bytes_to_str(dep_output))
except Exception as e:
msg += '\nFailed to obtain dependency stdout: {}'.format(e)
msg += '\nTIME WHEN STATUS WAS FOUND: {} (UTC)\n'.format(datetime.utcnow())
raise AssertionError(msg)
def assert_successful(self, msg=None):
return self.assert_status('successful', msg=msg)

View File

@ -0,0 +1,15 @@
from awxkit.utils import random_title
class HasSurvey(object):
def add_survey(self, spec=None, name=None, description=None, required=False, enabled=True):
payload = dict(name=name or 'Survey - {}'.format(random_title()),
description=description or random_title(10),
spec=spec or [dict(required=required,
question_name="What's the password?",
variable="secret",
type="password",
default="foo")])
if enabled != self.survey_enabled:
self.patch(survey_enabled=enabled)
return self.related.survey_spec.post(payload).get()

View File

@ -0,0 +1,10 @@
import yaml
from awxkit.utils import PseudoNamespace
class HasVariables(object):
@property
def variables(self):
return PseudoNamespace(yaml.load(self.json.variables, Loader=yaml.FullLoader))

View File

@ -0,0 +1,39 @@
# Order matters
from .page import * # NOQA
from .base import * # NOQA
from .access_list import * # NOQA
from .api import * # NOQA
from .authtoken import * # NOQA
from .roles import * # NOQA
from .organizations import * # NOQA
from .notifications import * # NOQA
from .notification_templates import * # NOQA
from .users import * # NOQA
from .applications import * # NOQA
from .teams import * # NOQA
from .credentials import * # NOQA
from .unified_jobs import * # NOQA
from .unified_job_templates import * # NOQA
from .projects import * # NOQA
from .inventory import * # NOQA
from .system_job_templates import * # NOQA
from .job_templates import * # NOQA
from .jobs import * # NOQA
from .survey_spec import * # NOQA
from .system_jobs import * # NOQA
from .config import * # NOQA
from .ping import * # NOQA
from .dashboard import * # NOQA
from .activity_stream import * # NOQA
from .schedules import * # NOQA
from .ad_hoc_commands import * # NOQA
from .labels import * # NOQA
from .workflow_job_templates import * # NOQA
from .workflow_job_template_nodes import * # NOQA
from .workflow_jobs import * # NOQA
from .workflow_job_nodes import * # NOQA
from .settings import * # NOQA
from .instances import * # NOQA
from .instance_groups import * # NOQA
from .credential_input_sources import * # NOQA
from .metrics import * # NOQA

View File

@ -0,0 +1,18 @@
from awxkit.api.resources import resources
from . import users
from . import page
class AccessList(page.PageList, users.User):
pass
page.register_page([resources.organization_access_list,
resources.user_access_list,
resources.inventory_access_list,
resources.group_access_list,
resources.credential_access_list,
resources.project_access_list,
resources.job_template_access_list,
resources.team_access_list], AccessList)

View File

@ -0,0 +1,20 @@
from awxkit.api.resources import resources
from . import base
from . import page
class ActivityStream(base.Base):
pass
page.register_page(resources.activity, ActivityStream)
class ActivityStreams(page.PageList, ActivityStream):
pass
page.register_page([resources.activity_stream,
resources.object_activity_stream], ActivityStreams)

View File

@ -0,0 +1,66 @@
from awxkit.utils import update_payload, PseudoNamespace
from awxkit.api.pages import Inventory, Credential
from awxkit.api.mixins import HasCreate, DSAdapter
from awxkit.utils import not_provided as np
from awxkit.api.resources import resources
from .jobs import UnifiedJob
from . import page
class AdHocCommand(HasCreate, UnifiedJob):
dependencies = [Inventory, Credential]
def relaunch(self, payload={}):
"""Relaunch the command using the related->relaunch endpoint"""
# navigate to relaunch_pg
relaunch_pg = self.get_related('relaunch')
# relaunch the command
result = relaunch_pg.post(payload)
# return the corresponding command_pg
return self.walk(result.url)
def payload(self, inventory, credential, module_name='ping', **kwargs):
payload = PseudoNamespace(inventory=inventory.id,
credential=credential.id,
module_name=module_name)
optional_fields = ('diff_mode', 'extra_vars', 'module_args', 'job_type', 'limit', 'forks',
'verbosity')
return update_payload(payload, optional_fields, kwargs)
def create_payload(self, module_name='ping', module_args=np, job_type=np, limit=np, verbosity=np,
inventory=Inventory, credential=Credential, **kwargs):
self.create_and_update_dependencies(inventory, credential)
payload = self.payload(module_name=module_name, module_args=module_args, job_type=job_type, limit=limit,
verbosity=verbosity, inventory=self.ds.inventory, credential=self.ds.credential,
**kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(self, module_name='ping', module_args=np, job_type=np, limit=np, verbosity=np,
inventory=Inventory, credential=Credential, **kwargs):
payload = self.create_payload(module_name=module_name, module_args=module_args,
job_type=job_type, limit=limit, verbosity=verbosity,
inventory=inventory, credential=credential, **kwargs)
return self.update_identity(AdHocCommands(self.connection).post(payload))
page.register_page([resources.ad_hoc_command], AdHocCommand)
class AdHocCommands(page.PageList, AdHocCommand):
pass
page.register_page([resources.ad_hoc_commands,
resources.inventory_related_ad_hoc_commands,
resources.group_related_ad_hoc_commands,
resources.host_related_ad_hoc_commands], AdHocCommands)

View File

@ -0,0 +1,19 @@
from awxkit.api.resources import resources
from . import base
from . import page
class Api(base.Base):
pass
page.register_page(resources.api, Api)
class ApiV2(base.Base):
pass
page.register_page(resources.v2, ApiV2)

View File

@ -0,0 +1,84 @@
from awxkit.utils import random_title, update_payload, filter_by_class, PseudoNamespace
from awxkit.api.resources import resources
from awxkit.api.pages import Organization
from awxkit.api.mixins import HasCreate, DSAdapter
from . import page
from . import base
class OAuth2Application(HasCreate, base.Base):
dependencies = [Organization]
def payload(self, **kwargs):
payload = PseudoNamespace(name=kwargs.get('name') or 'OAuth2Application - {}'.format(random_title()),
description=kwargs.get('description') or random_title(10),
client_type=kwargs.get('client_type', 'public'),
authorization_grant_type=kwargs.get('authorization_grant_type', 'password'))
if kwargs.get('organization'):
payload.organization = kwargs['organization'].id
optional_fields = ('redirect_uris', 'skip_authorization')
update_payload(payload, optional_fields, kwargs)
return payload
def create_payload(self, organization=Organization, **kwargs):
self.create_and_update_dependencies(*filter_by_class((organization, Organization)))
organization = self.ds.organization if organization else None
payload = self.payload(organization=organization, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(self, organization=Organization, **kwargs):
payload = self.create_payload(organization=organization, **kwargs)
return self.update_identity(OAuth2Applications(self.connection).post(payload))
page.register_page((resources.application,
(resources.applications, 'post')), OAuth2Application)
class OAuth2Applications(page.PageList, OAuth2Application):
pass
page.register_page(resources.applications, OAuth2Applications)
class OAuth2AccessToken(HasCreate, base.Base):
optional_dependencies = [OAuth2Application]
def payload(self, **kwargs):
payload = PseudoNamespace(description=kwargs.get('description') or random_title(10),
scope=kwargs.get('scope', 'write'))
if kwargs.get('oauth_2_application'):
payload.application = kwargs['oauth_2_application'].id
optional_fields = ('expires',)
update_payload(payload, optional_fields, kwargs)
return payload
def create_payload(self, oauth_2_application=None, **kwargs):
self.create_and_update_dependencies(*filter_by_class((oauth_2_application, OAuth2Application)))
oauth_2_application = self.ds.oauth_2_application if oauth_2_application else None
payload = self.payload(oauth_2_application=oauth_2_application, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(self, oauth_2_application=None, **kwargs):
payload = self.create_payload(oauth_2_application=oauth_2_application, **kwargs)
return self.update_identity(OAuth2AccessTokens(self.connection).post(payload))
page.register_page((resources.token,
(resources.tokens, 'post')), OAuth2AccessToken)
class OAuth2AccessTokens(page.PageList, OAuth2AccessToken):
pass
page.register_page(resources.tokens, OAuth2AccessTokens)

View File

@ -0,0 +1,11 @@
from awxkit.api.resources import resources
from . import base
from . import page
class AuthToken(base.Base):
pass
page.register_page(resources.authtoken, AuthToken)

View File

@ -0,0 +1,234 @@
import collections
import logging
from requests.auth import HTTPBasicAuth
from awxkit.api.pages import (
Page,
get_registered_page,
exception_from_status_code
)
from awxkit.config import config
from awxkit.api.resources import resources
import awxkit.exceptions as exc
log = logging.getLogger(__name__)
class Base(Page):
def silent_delete(self):
"""Delete the object. If it's already deleted, ignore the error"""
try:
if not config.prevent_teardown:
return self.delete()
except (exc.NoContent, exc.NotFound, exc.Forbidden):
pass
def get_object_role(self, role, by_name=False):
"""Lookup and return a related object role by its role field or name.
Args:
----
role (str): The role's `role_field` or name
by_name (bool): Whether to retrieve the role by its name field (default: False)
Examples:
--------
>>> # get the description of the Use role for an inventory
>>> inventory = v2.inventory.create()
>>> use_role_1 = inventory.get_object_role('use_role')
>>> use_role_2 = inventory.get_object_role('use', True)
>>> use_role_1.description
u'Can use the inventory in a job template'
>>> use_role_1.json == use_role_2.json
True
"""
if by_name:
for obj_role in self.related.object_roles.get().results:
if obj_role.name.lower() == role.lower():
return obj_role
raise Exception("Role '{0}' not found for {1.endpoint}".format(role, self))
object_roles = self.get_related('object_roles', role_field=role)
if not object_roles.count == 1:
raise Exception("No role with role_field '{0}' found.".format(role))
return object_roles.results[0]
def set_object_roles(self, agent, *role_names, **kw):
"""Associate related object roles to a User or Team by role names
Args:
----
agent (User or Team): The agent the role is to be (dis)associated with.
*role_names (str): an arbitrary number of role names ('Admin', 'Execute', 'Read', etc.)
**kw:
endpoint (str): The endpoint to use when making the object role association
- 'related_users': use the related users endpoint of the role (default)
- 'related_roles': use the related roles endpoint of the user
disassociate (bool): Indicates whether to disassociate the role with the user (default: False)
Examples:
--------
# create a user that is an organization admin with use and
# update roles on an inventory
>>> organization = v2.organization.create()
>>> inventory = v2.inventory.create()
>>> user = v2.user.create()
>>> organization.set_object_roles(user, 'admin')
>>> inventory.set_object_roles(user, 'use', 'update')
"""
from awxkit.api.pages import User, Team
endpoint = kw.get('endpoint', 'related_users')
disassociate = kw.get('disassociate', False)
if not any([isinstance(agent, agent_type) for agent_type in (User, Team)]):
raise ValueError('Invalid agent type {0.__class__.__name__}'.format(agent))
if endpoint not in ('related_users', 'related_roles'):
raise ValueError('Invalid role association endpoint: {0}'.format(endpoint))
object_roles = [self.get_object_role(name, by_name=True) for name in role_names]
payload = {}
for role in object_roles:
if endpoint == 'related_users':
payload['id'] = agent.id
if isinstance(agent, User):
endpoint_model = role.related.users
elif isinstance(agent, Team):
endpoint_model = role.related.teams
else:
raise RuntimeError("Unhandled type for agent: {0.__class__.__name__}.".format(agent))
elif endpoint == 'related_roles':
payload['id'] = role.id
endpoint_model = agent.related.roles
else:
raise RuntimeError('Invalid role association endpoint')
if disassociate:
payload['disassociate'] = True
try:
endpoint_model.post(payload)
except exc.NoContent: # desired exception on successful (dis)association
pass
return True
@property
def object_roles(self):
from awxkit.api.pages import Roles, Role
url = self.get().json.related.object_roles
for obj_role in Roles(self.connection, endpoint=url).get().json.results:
yield Role(self.connection, endpoint=obj_role.url).get()
def get_authtoken(self, username='', password=''):
default_cred = config.credentials.default
payload = dict(username=username or default_cred.username,
password=password or default_cred.password)
auth_url = resources.authtoken
return get_registered_page(auth_url)(self.connection, endpoint=auth_url).post(payload).token
def load_authtoken(self, username='', password=''):
self.connection.login(token=self.get_authtoken(username, password))
return self
load_default_authtoken = load_authtoken
def get_oauth2_token(self, username='', password='', client_id=None,
client_secret=None, scope='write'):
default_cred = config.credentials.default
username = username or default_cred.username
password = password or default_cred.password
req = collections.namedtuple('req', 'headers')({})
if client_id and client_secret:
HTTPBasicAuth(client_id, client_secret)(req)
req.headers['Content-Type'] = 'application/x-www-form-urlencoded'
resp = self.connection.post(
'/api/o/token/',
data={
"grant_type": "password",
"username": username,
"password": password,
"scope": scope
},
headers=req.headers
)
elif client_id:
req.headers['Content-Type'] = 'application/x-www-form-urlencoded'
resp = self.connection.post(
'/api/o/token/',
data={
"grant_type": "password",
"username": username,
"password": password,
"client_id": client_id,
"scope": scope
},
headers=req.headers
)
else:
HTTPBasicAuth(username, password)(req)
resp = self.connection.post(
'/api/v2/users/{}/personal_tokens/'.format(username),
json={
"description": "Tower CLI",
"application": None,
"scope": scope
},
headers=req.headers
)
if resp.ok:
result = resp.json()
if client_id:
return result.pop('access_token', None)
else:
return result.pop('token', None)
else:
raise exception_from_status_code(resp.status_code)
def load_session(self, username='', password=''):
default_cred = config.credentials.default
self.connection.login(username=username or default_cred.username,
password=password or default_cred.password,
**self.connection.get_session_requirements())
return self
def cleanup(self):
log.debug('{0.endpoint} cleaning up.'.format(self))
return self._cleanup(self.delete)
def silent_cleanup(self):
log.debug('{0.endpoint} silently cleaning up.'.format(self))
return self._cleanup(self.silent_delete)
def _cleanup(self, delete_method):
try:
delete_method()
except exc.Forbidden as e:
if e.msg == {'detail': 'Cannot delete running job resource.'}:
self.cancel()
self.wait_until_completed(interval=1, timeout=30, since_job_created=False)
delete_method()
else:
raise
except exc.Conflict as e:
conflict = e.msg.get('conflict', e.msg.get('error', ''))
if "running jobs" in conflict:
active_jobs = e.msg.get('active_jobs', []) # [{type: id},], not page containing
jobs = []
for active_job in active_jobs:
job_type = active_job['type']
endpoint = '/api/v2/{}s/{}/'.format(job_type, active_job['id'])
job = self.walk(endpoint)
jobs.append(job)
job.cancel()
for job in jobs:
job.wait_until_completed(interval=1, timeout=30, since_job_created=False)
delete_method()
else:
raise

View File

@ -0,0 +1,55 @@
from awxkit.api.resources import resources
from . import base
from . import page
class Config(base.Base):
@property
def is_aws_license(self):
return self.license_info.get('is_aws', False) or \
'ami-id' in self.license_info or \
'instance-id' in self.license_info
@property
def is_demo_license(self):
return self.license_info.get('demo', False) or \
self.license_info.get('key_present', False)
@property
def is_valid_license(self):
return self.license_info.get('valid_key', False) and \
'license_key' in self.license_info and \
'instance_count' in self.license_info
@property
def is_trial_license(self):
return self.is_valid_license and \
self.license_info.get('trial', False)
@property
def is_awx_license(self):
return self.license_info.get('license_type', None) == 'open'
@property
def is_legacy_license(self):
return self.is_valid_license and \
self.license_info.get('license_type', None) == 'legacy'
@property
def is_basic_license(self):
return self.is_valid_license and \
self.license_info.get('license_type', None) == 'basic'
@property
def is_enterprise_license(self):
return self.is_valid_license and \
self.license_info.get('license_type', None) == 'enterprise'
@property
def features(self):
"""returns a list of enabled license features"""
return [k for k, v in self.license_info.get('features', {}).items() if v]
page.register_page(resources.config, Config)

View File

@ -0,0 +1,20 @@
from awxkit.api.resources import resources
from . import base
from . import page
class CredentialInputSource(base.Base):
pass
page.register_page(resources.credential_input_source, CredentialInputSource)
class CredentialInputSources(page.PageList, CredentialInputSource):
pass
page.register_page([resources.credential_input_sources,
resources.related_input_sources], CredentialInputSources)

View File

@ -0,0 +1,336 @@
import logging
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from awxkit.utils import (
cloud_types,
filter_by_class,
not_provided,
random_title,
update_payload,
PseudoNamespace)
from awxkit.api.pages import Organization, User, Team
from awxkit.api.mixins import HasCreate, HasCopy, DSAdapter
from awxkit.api.resources import resources
from awxkit.config import config
from . import base
from . import page
log = logging.getLogger(__name__)
credential_input_fields = (
'authorize_password',
'become_method',
'become_password',
'become_username',
'client',
'cloud_environment',
'domain',
'host',
'password',
'project_id',
'project_name',
'secret',
'ssh_key_data',
'ssh_key_unlock',
'subscription',
'tenant',
'username',
'vault_password',
'vault_id')
def generate_private_key():
key = rsa.generate_private_key(
public_exponent=65537,
key_size=4096,
backend=default_backend()
)
return key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()
).decode('utf-8')
def config_cred_from_kind(kind):
try:
if kind == 'net':
config_cred = config.credentials.network
elif kind in cloud_types:
if kind == 'azure_rm':
config_cred = config.credentials.cloud.azure
else:
config_cred = config.credentials.cloud[kind]
else:
config_cred = config.credentials[kind]
return config_cred
except (KeyError, AttributeError):
return PseudoNamespace()
credential_type_name_to_config_kind_map = {
'amazon web services': 'aws',
'ansible tower': 'tower',
'google compute engine': 'gce',
'insights': 'insights',
'microsoft azure classic (deprecated)': 'azure_classic',
'microsoft azure resource manager': 'azure_rm',
'network': 'net',
'openstack': 'OpenStack',
'red hat virtualization': 'rhv',
'red hat cloudforms': 'cloudforms',
'red hat satellite 6': 'satellite6',
'source control': 'scm',
'machine': 'ssh',
'vault': 'vault',
'vmware vcenter': 'vmware'}
config_kind_to_credential_type_name_map = {
kind: name
for name, kind in credential_type_name_to_config_kind_map.items()}
def kind_and_config_cred_from_credential_type(credential_type):
kind = ''
if not credential_type.managed_by_tower:
return kind, PseudoNamespace()
try:
if credential_type.kind == 'net':
config_cred = config.credentials.network
kind = 'net'
elif credential_type.kind == 'cloud':
kind = credential_type_name_to_config_kind_map[credential_type.name.lower(
)]
config_kind = kind if kind != 'azure_rm' else 'azure'
config_cred = config.credentials.cloud[config_kind]
else:
kind = credential_type.kind.lower()
config_cred = config.credentials[kind]
return kind, config_cred
except (KeyError, AttributeError):
return kind, PseudoNamespace()
def get_payload_field_and_value_from_kwargs_or_config_cred(
field, kind, kwargs, config_cred):
if field in (
'project_id',
'project_name'): # Needed to prevent Project kwarg collision
config_field = 'project'
elif field == 'subscription' and 'azure' in kind:
config_field = 'subscription_id'
elif field == 'username' and kind == 'azure_ad':
config_field = 'ad_user'
elif field == 'client':
config_field = 'client_id'
elif field == 'authorize_password':
config_field = 'authorize'
else:
config_field = field
value = kwargs.get(field, config_cred.get(config_field, not_provided))
if field in ('project_id', 'project_name'):
field = 'project'
return field, value
class CredentialType(HasCreate, base.Base):
def silent_delete(self):
if not self.managed_by_tower:
return super(CredentialType, self).silent_delete()
def payload(self, kind='cloud', **kwargs):
payload = PseudoNamespace(
name=kwargs.get('name') or 'CredentialType - {}'.format(
random_title()),
description=kwargs.get('description') or random_title(10),
kind=kind)
fields = ('inputs', 'injectors')
update_payload(payload, fields, kwargs)
return payload
def create_payload(self, kind='cloud', **kwargs):
payload = self.payload(kind=kind, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(self, kind='cloud', **kwargs):
payload = self.create_payload(kind=kind, **kwargs)
return self.update_identity(
CredentialTypes(
self.connection).post(payload))
page.register_page([resources.credential_type,
(resources.credential_types, 'post')], CredentialType)
class CredentialTypes(page.PageList, CredentialType):
pass
page.register_page(resources.credential_types, CredentialTypes)
class Credential(HasCopy, HasCreate, base.Base):
dependencies = [CredentialType]
optional_dependencies = [Organization, User, Team]
def payload(
self,
credential_type,
user=None,
team=None,
organization=None,
inputs=None,
**kwargs):
if not any((user, team, organization)):
raise TypeError(
'{0.__class__.__name__} requires user, team, and/or organization instances.'.format(self))
if inputs is None:
inputs = {}
payload = PseudoNamespace(
name=kwargs.get('name') or 'Credential - {}'.format(
random_title()),
description=kwargs.get('description') or random_title(10),
credential_type=credential_type.id,
inputs=inputs)
if user:
payload.user = user.id
if team:
payload.team = team.id
if organization:
payload.organization = organization.id
kind, config_cred = kind_and_config_cred_from_credential_type(
credential_type)
for field in credential_input_fields:
field, value = get_payload_field_and_value_from_kwargs_or_config_cred(
field, kind, inputs or kwargs, config_cred)
if value != not_provided:
payload.inputs[field] = value
if kind == 'net':
payload.inputs.authorize = inputs.get(
'authorize', bool(inputs.get('authorize_password')))
if kind in ('ssh', 'net') and 'ssh_key_data' not in payload.inputs:
payload.inputs.ssh_key_data = inputs.get(
'ssh_key_data', generate_private_key())
return payload
def create_payload(
self,
credential_type=CredentialType,
user=None,
team=None,
organization=Organization,
inputs=None,
**kwargs):
if isinstance(credential_type, int):
# if an int was passed, it is assumed to be the pk id of a
# credential type
credential_type = CredentialTypes(
self.connection).get(id=credential_type).results.pop()
if credential_type == CredentialType:
kind = kwargs.pop('kind', 'ssh')
if kind in ('openstack', 'openstack_v3'):
credential_type_name = 'OpenStack'
if inputs is None:
if kind == 'openstack_v3':
inputs = config.credentials.cloud['openstack_v3']
else:
inputs = config.credentials.cloud['openstack']
else:
credential_type_name = config_kind_to_credential_type_name_map[kind]
credential_type = CredentialTypes(
self.connection).get(
managed_by_tower=True,
name__icontains=credential_type_name).results.pop()
credential_type, organization, user, team = filter_by_class(
(credential_type, CredentialType), (organization, Organization), (user, User), (team, Team))
if not any((user, team, organization)):
organization = Organization
self.create_and_update_dependencies(
credential_type, organization, user, team)
user = self.ds.user if user else None
team = self.ds.team if team else None
organization = self.ds.organization if organization else None
payload = self.payload(
self.ds.credential_type,
user=user,
team=team,
organization=organization,
inputs=inputs,
**kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(
self,
credential_type=CredentialType,
user=None,
team=None,
organization=Organization,
inputs=None,
**kwargs):
payload = self.create_payload(
credential_type=credential_type,
user=user,
team=team,
organization=organization,
inputs=inputs,
**kwargs)
return self.update_identity(
Credentials(
self.connection)).post(payload)
@property
def expected_passwords_needed_to_start(self):
"""Return a list of expected passwords needed to start a job using this credential."""
passwords = []
for field in (
'password',
'become_password',
'ssh_key_unlock',
'vault_password'):
if getattr(self.inputs, field, None) == 'ASK':
if field == 'password':
passwords.append('ssh_password')
else:
passwords.append(field)
return passwords
page.register_page([resources.credential,
(resources.credentials, 'post'),
(resources.credential_copy, 'post')], Credential)
class Credentials(page.PageList, Credential):
pass
page.register_page([resources.credentials,
resources.related_credentials,
resources.job_extra_credentials,
resources.job_template_extra_credentials],
Credentials)

View File

@ -0,0 +1,11 @@
from awxkit.api.resources import resources
from . import base
from . import page
class Dashboard(base.Base):
pass
page.register_page(resources.dashboard, Dashboard)

View File

@ -0,0 +1,45 @@
from awxkit.utils import PseudoNamespace, random_title, suppress, update_payload
from awxkit.api.resources import resources
from awxkit.api.mixins import HasCreate
import awxkit.exceptions as exc
from . import base
from . import page
class InstanceGroup(HasCreate, base.Base):
def add_instance(self, instance):
with suppress(exc.NoContent):
self.related.instances.post(dict(id=instance.id))
def remove_instance(self, instance):
with suppress(exc.NoContent):
self.related.instances.post(dict(id=instance.id, disassociate=True))
def payload(self, **kwargs):
payload = PseudoNamespace(name=kwargs.get('name') or
'Instance Group - {}'.format(random_title()))
fields = ('policy_instance_percentage', 'policy_instance_minimum', 'policy_instance_list')
update_payload(payload, fields, kwargs)
return payload
def create_payload(self, name='', **kwargs):
payload = self.payload(name=name, **kwargs)
return payload
def create(self, name='', **kwargs):
payload = self.create_payload(name=name, **kwargs)
return self.update_identity(InstanceGroups(self.connection).post(payload))
page.register_page([resources.instance_group,
(resources.instance_groups, 'post')], InstanceGroup)
class InstanceGroups(page.PageList, InstanceGroup):
pass
page.register_page([resources.instance_groups,
resources.related_instance_groups], InstanceGroups)

View File

@ -0,0 +1,20 @@
from awxkit.api.resources import resources
from . import base
from . import page
class Instance(base.Base):
pass
page.register_page(resources.instance, Instance)
class Instances(page.PageList, Instance):
pass
page.register_page([resources.instances,
resources.related_instances], Instances)

View File

@ -0,0 +1,684 @@
import logging
import json
import re
from awxkit.api.pages import (
Credential,
Organization,
Project,
UnifiedJob,
UnifiedJobTemplate
)
from awxkit.utils import (
filter_by_class,
random_title,
update_payload,
suppress,
not_provided,
PseudoNamespace,
poll_until,
random_utf8
)
from awxkit.api.mixins import DSAdapter, HasCreate, HasInstanceGroups, HasNotifications, HasVariables, HasCopy
from awxkit.api.resources import resources
import awxkit.exceptions as exc
from . import base
from . import page
log = logging.getLogger(__name__)
class Inventory(HasCopy, HasCreate, HasInstanceGroups, HasVariables, base.Base):
dependencies = [Organization]
def print_ini(self):
"""Print an ini version of the inventory"""
output = list()
inv_dict = self.related.script.get(hostvars=1).json
for group in inv_dict.keys():
if group == '_meta':
continue
# output host groups
output.append('[%s]' % group)
for host in inv_dict[group].get('hosts', []):
# FIXME ... include hostvars
output.append(host)
output.append('') # newline
# output child groups
if inv_dict[group].get('children', []):
output.append('[%s:children]' % group)
for child in inv_dict[group].get('children', []):
output.append(child)
output.append('') # newline
# output group vars
if inv_dict[group].get('vars', {}).items():
output.append('[%s:vars]' % group)
for k, v in inv_dict[group].get('vars', {}).items():
output.append('%s=%s' % (k, v))
output.append('') # newline
print('\n'.join(output))
def payload(self, organization, **kwargs):
payload = PseudoNamespace(
name=kwargs.get('name') or 'Inventory - {}'.format(
random_title()),
description=kwargs.get('description') or random_title(10),
organization=organization.id)
optional_fields = (
'host_filter',
'insights_credential',
'kind',
'variables')
update_payload(payload, optional_fields, kwargs)
if 'variables' in payload and isinstance(payload.variables, dict):
payload.variables = json.dumps(payload.variables)
if 'insights_credential' in payload and isinstance(
payload.insights_credential, Credential):
payload.insights_credential = payload.insights_credential.id
return payload
def create_payload(
self,
name='',
description='',
organization=Organization,
**kwargs):
self.create_and_update_dependencies(organization)
payload = self.payload(
name=name,
description=description,
organization=self.ds.organization,
**kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(
self,
name='',
description='',
organization=Organization,
**kwargs):
payload = self.create_payload(
name=name,
description=description,
organization=organization,
**kwargs)
return self.update_identity(
Inventories(
self.connection).post(payload))
def add_host(self, host=None):
if host is None:
return self.related.hosts.create(inventory=self)
if isinstance(host, base.Base):
host = host.json
with suppress(exc.NoContent):
self.related.hosts.post(host)
return host
def wait_until_deleted(self):
def _wait():
try:
self.get()
except exc.NotFound:
return True
poll_until(_wait, interval=1, timeout=60)
def update_inventory_sources(self, wait=False):
response = self.related.update_inventory_sources.post()
source_ids = [entry['inventory_source']
for entry in response if entry['status'] == 'started']
inv_updates = []
for source_id in source_ids:
inv_source = self.related.inventory_sources.get(
id=source_id).results.pop()
inv_updates.append(inv_source.related.current_job.get())
if wait:
for update in inv_updates:
update.wait_until_completed()
return inv_updates
page.register_page([resources.inventory,
(resources.inventories, 'post'),
(resources.inventory_copy, 'post')], Inventory)
class Inventories(page.PageList, Inventory):
pass
page.register_page([resources.inventories,
resources.related_inventories], Inventories)
class InventoryScript(HasCopy, HasCreate, base.Base):
dependencies = [Organization]
def payload(self, organization, **kwargs):
payload = PseudoNamespace(
name=kwargs.get('name') or 'Inventory Script - {}'.format(
random_title()),
description=kwargs.get('description') or random_title(10),
organization=organization.id,
script=kwargs.get('script') or self._generate_script())
return payload
def create_payload(
self,
name='',
description='',
organization=Organization,
script='',
**kwargs):
self.create_and_update_dependencies(organization)
payload = self.payload(
name=name,
description=description,
organization=self.ds.organization,
script=script,
**kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(
self,
name='',
description='',
organization=Organization,
script='',
**kwargs):
payload = self.create_payload(
name=name,
description=description,
organization=organization,
script=script,
**kwargs)
return self.update_identity(
InventoryScripts(
self.connection).post(payload))
def _generate_script(self):
script = '\n'.join([
'#!/usr/bin/env python',
'# -*- coding: utf-8 -*-',
'import json',
'inventory = dict()',
'inventory["{0}"] = dict()',
'inventory["{0}"]["hosts"] = list()',
'inventory["{0}"]["hosts"].append("{1}")',
'inventory["{0}"]["hosts"].append("{2}")',
'inventory["{0}"]["hosts"].append("{3}")',
'inventory["{0}"]["hosts"].append("{4}")',
'inventory["{0}"]["hosts"].append("{5}")',
'inventory["{0}"]["vars"] = dict(ansible_host="127.0.0.1", ansible_connection="local")',
'print(json.dumps(inventory))'
])
group_name = re.sub(r"[\']", "", "group-{}".format(random_utf8()))
host_names = [
re.sub(
r"[\':]",
"",
"host_{}".format(
random_utf8())) for _ in range(5)]
return script.format(group_name, *host_names)
page.register_page([resources.inventory_script,
(resources.inventory_scripts, 'post'),
(resources.inventory_script_copy, 'post')], InventoryScript)
class InventoryScripts(page.PageList, InventoryScript):
pass
page.register_page([resources.inventory_scripts], InventoryScripts)
class Group(HasCreate, HasVariables, base.Base):
dependencies = [Inventory]
optional_dependencies = [Credential, InventoryScript]
@property
def is_root_group(self):
"""Returns whether the current group is a top-level root group in the inventory"""
return self.related.inventory.get().related.root_groups.get(id=self.id).count == 1
def get_parents(self):
"""Inspects the API and returns all groups that include the current group as a child."""
return Groups(self.connection).get(children=self.id).results
def payload(self, inventory, credential=None, **kwargs):
payload = PseudoNamespace(
name=kwargs.get('name') or 'Group{}'.format(
random_title(
non_ascii=False)),
description=kwargs.get('description') or random_title(10),
inventory=inventory.id)
if credential:
payload.credential = credential.id
update_payload(payload, ('variables',), kwargs)
if 'variables' in payload and isinstance(payload.variables, dict):
payload.variables = json.dumps(payload.variables)
return payload
def create_payload(
self,
name='',
description='',
inventory=Inventory,
credential=None,
source_script=None,
**kwargs):
credential, source_script = filter_by_class(
(credential, Credential), (source_script, InventoryScript))
self.create_and_update_dependencies(
inventory, credential, source_script)
credential = self.ds.credential if credential else None
payload = self.payload(
inventory=self.ds.inventory,
credential=credential,
name=name,
description=description,
**kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(self, name='', description='', inventory=Inventory, **kwargs):
payload = self.create_payload(
name=name,
description=description,
inventory=inventory,
**kwargs)
parent = kwargs.get('parent', None) # parent must be a Group instance
resource = parent.related.children if parent else Groups(
self.connection)
return self.update_identity(resource.post(payload))
def add_host(self, host=None):
if host is None:
host = self.related.hosts.create(inventory=self.ds.inventory)
with suppress(exc.NoContent):
host.related.groups.post(dict(id=self.id))
return host
if isinstance(host, base.Base):
host = host.json
with suppress(exc.NoContent):
self.related.hosts.post(host)
return host
def add_group(self, group):
if isinstance(group, page.Page):
group = group.json
with suppress(exc.NoContent):
self.related.children.post(group)
def remove_group(self, group):
if isinstance(group, page.Page):
group = group.json
with suppress(exc.NoContent):
self.related.children.post(dict(id=group.id, disassociate=True))
page.register_page([resources.group,
(resources.groups, 'post')], Group)
class Groups(page.PageList, Group):
pass
page.register_page([resources.groups,
resources.host_groups,
resources.inventory_related_groups,
resources.inventory_related_root_groups,
resources.group_children,
resources.group_potential_children], Groups)
class Host(HasCreate, HasVariables, base.Base):
dependencies = [Inventory]
def payload(self, inventory, **kwargs):
payload = PseudoNamespace(
name=kwargs.get('name') or 'Host{}'.format(
random_title(
non_ascii=False)),
description=kwargs.get('description') or random_title(10),
inventory=inventory.id)
optional_fields = ('enabled', 'instance_id')
update_payload(payload, optional_fields, kwargs)
variables = kwargs.get('variables', not_provided)
if variables is None:
variables = dict(
ansible_host='127.0.0.1',
ansible_connection='local')
if variables != not_provided:
if isinstance(variables, dict):
variables = json.dumps(variables)
payload.variables = variables
return payload
def create_payload(
self,
name='',
description='',
variables=None,
inventory=Inventory,
**kwargs):
self.create_and_update_dependencies(
*filter_by_class((inventory, Inventory)))
payload = self.payload(
inventory=self.ds.inventory,
name=name,
description=description,
variables=variables,
**kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(
self,
name='',
description='',
variables=None,
inventory=Inventory,
**kwargs):
payload = self.create_payload(
name=name,
description=description,
variables=variables,
inventory=inventory,
**kwargs)
return self.update_identity(Hosts(self.connection).post(payload))
page.register_page([resources.host,
(resources.hosts, 'post')], Host)
class Hosts(page.PageList, Host):
pass
page.register_page([resources.hosts,
resources.group_related_hosts,
resources.inventory_related_hosts,
resources.inventory_sources_related_hosts], Hosts)
class FactVersion(base.Base):
pass
page.register_page(resources.host_related_fact_version, FactVersion)
class FactVersions(page.PageList, FactVersion):
@property
def count(self):
return len(self.results)
page.register_page(resources.host_related_fact_versions, FactVersions)
class FactView(base.Base):
pass
page.register_page(resources.fact_view, FactView)
class InventorySource(HasCreate, HasNotifications, UnifiedJobTemplate):
optional_schedule_fields = tuple()
dependencies = [Inventory]
optional_dependencies = [Credential, InventoryScript, Project]
def payload(
self,
inventory,
source='custom',
credential=None,
source_script=None,
project=None,
**kwargs):
payload = PseudoNamespace(
name=kwargs.get('name') or 'InventorySource - {}'.format(
random_title()),
description=kwargs.get('description') or random_title(10),
inventory=inventory.id,
source=source)
if credential:
payload.credential = credential.id
if source_script:
payload.source_script = source_script.id
if project:
payload.source_project = project.id
optional_fields = (
'group_by',
'instance_filters',
'source_path',
'source_regions',
'source_vars',
'timeout',
'overwrite',
'overwrite_vars',
'update_cache_timeout',
'update_on_launch',
'update_on_project_update',
'verbosity')
update_payload(payload, optional_fields, kwargs)
return payload
def create_payload(
self,
name='',
description='',
source='custom',
inventory=Inventory,
credential=None,
source_script=InventoryScript,
project=None,
**kwargs):
if source != 'custom' and source_script == InventoryScript:
source_script = None
if source == 'scm':
kwargs.setdefault('overwrite_vars', True)
if project is None:
project = Project
inventory, credential, source_script, project = filter_by_class((inventory, Inventory),
(credential, Credential),
(source_script, InventoryScript),
(project, Project))
self.create_and_update_dependencies(
inventory, credential, source_script, project)
if credential:
credential = self.ds.credential
if source_script:
source_script = self.ds.inventory_script
if project:
project = self.ds.project
payload = self.payload(
inventory=self.ds.inventory,
source=source,
credential=credential,
source_script=source_script,
project=project,
name=name,
description=description,
**kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(
self,
name='',
description='',
source='custom',
inventory=Inventory,
credential=None,
source_script=InventoryScript,
project=None,
**kwargs):
payload = self.create_payload(
name=name,
description=description,
source=source,
inventory=inventory,
credential=credential,
source_script=source_script,
project=project,
**kwargs)
return self.update_identity(
InventorySources(
self.connection).post(payload))
def update(self):
"""Update the inventory_source using related->update endpoint"""
# get related->launch
update_pg = self.get_related('update')
# assert can_update == True
assert update_pg.can_update, \
"The specified inventory_source (id:%s) is not able to update (can_update:%s)" % \
(self.id, update_pg.can_update)
# start the inventory_update
result = update_pg.post()
# assert JSON response
assert 'inventory_update' in result.json, \
"Unexpected JSON response when starting an inventory_update.\n%s" % \
json.dumps(result.json, indent=2)
# locate and return the inventory_update
jobs_pg = self.related.inventory_updates.get(
id=result.json['inventory_update'])
assert jobs_pg.count == 1, \
"An inventory_update started (id:%s) but job not found in response at %s/inventory_updates/" % \
(result.json['inventory_update'], self.url)
return jobs_pg.results[0]
@property
def is_successful(self):
"""An inventory_source is considered successful when source != "" and super().is_successful ."""
return self.source != "" and super(
InventorySource, self).is_successful
def add_credential(self, credential):
with suppress(exc.NoContent):
self.related.credentials.post(
dict(id=credential.id, associate=True))
def remove_credential(self, credential):
with suppress(exc.NoContent):
self.related.credentials.post(
dict(id=credential.id, disassociate=True))
page.register_page([resources.inventory_source,
(resources.inventory_sources, 'post')], InventorySource)
class InventorySources(page.PageList, InventorySource):
pass
page.register_page([resources.inventory_sources,
resources.related_inventory_sources],
InventorySources)
class InventorySourceGroups(page.PageList, Group):
pass
page.register_page(
resources.inventory_sources_related_groups,
InventorySourceGroups)
class InventorySourceUpdate(base.Base):
pass
page.register_page([resources.inventory_sources_related_update,
resources.inventory_related_update_inventory_sources],
InventorySourceUpdate)
class InventoryUpdate(UnifiedJob):
pass
page.register_page(resources.inventory_update, InventoryUpdate)
class InventoryUpdates(page.PageList, InventoryUpdate):
pass
page.register_page([resources.inventory_updates,
resources.inventory_source_updates,
resources.project_update_scm_inventory_updates],
InventoryUpdates)
class InventoryUpdateCancel(base.Base):
pass
page.register_page(resources.inventory_update_cancel, InventoryUpdateCancel)

View File

@ -0,0 +1,235 @@
import json
from awxkit.utils import (
filter_by_class,
not_provided,
random_title,
suppress,
update_payload,
PseudoNamespace)
from awxkit.api.pages import Credential, Inventory, Project, UnifiedJobTemplate
from awxkit.api.mixins import HasCreate, HasInstanceGroups, HasNotifications, HasSurvey, HasCopy, DSAdapter
from awxkit.api.resources import resources
import awxkit.exceptions as exc
from . import base
from . import page
class JobTemplate(
HasCopy,
HasCreate,
HasInstanceGroups,
HasNotifications,
HasSurvey,
UnifiedJobTemplate):
optional_dependencies = [Inventory, Credential, Project]
def launch(self, payload={}):
"""Launch the job_template using related->launch endpoint."""
# get related->launch
launch_pg = self.get_related('launch')
# launch the job_template
result = launch_pg.post(payload)
# return job
if result.json['type'] == 'job':
jobs_pg = self.get_related('jobs', id=result.json['job'])
assert jobs_pg.count == 1, \
"job_template launched (id:%s) but job not found in response at %s/jobs/" % \
(result.json['job'], self.url)
return jobs_pg.results[0]
elif result.json['type'] == 'workflow_job':
slice_workflow_jobs = self.get_related(
'slice_workflow_jobs', id=result.json['id'])
assert slice_workflow_jobs.count == 1, (
"job_template launched sliced job (id:%s) but not found in related %s/slice_workflow_jobs/" %
(result.json['id'], self.url)
)
return slice_workflow_jobs.results[0]
else:
raise RuntimeError('Unexpected type of job template spawned job.')
def payload(self, job_type='run', playbook='ping.yml', **kwargs):
name = kwargs.get('name') or 'JobTemplate - {}'.format(random_title())
description = kwargs.get('description') or random_title(10)
payload = PseudoNamespace(
name=name,
description=description,
job_type=job_type)
optional_fields = (
'ask_scm_branch_on_launch',
'ask_credential_on_launch',
'ask_diff_mode_on_launch',
'ask_inventory_on_launch',
'ask_job_type_on_launch',
'ask_limit_on_launch',
'ask_skip_tags_on_launch',
'ask_tags_on_launch',
'ask_variables_on_launch',
'ask_verbosity_on_launch',
'allow_simultaneous',
'become_enabled',
'diff_mode',
'force_handlers',
'forks',
'host_config_key',
'job_tags',
'limit',
'skip_tags',
'start_at_task',
'survey_enabled',
'timeout',
'use_fact_cache',
'vault_credential',
'verbosity',
'job_slice_count',
'scm_branch')
update_payload(payload, optional_fields, kwargs)
extra_vars = kwargs.get('extra_vars', not_provided)
if extra_vars != not_provided:
if isinstance(extra_vars, dict):
extra_vars = json.dumps(extra_vars)
payload.update(extra_vars=extra_vars)
if kwargs.get('project'):
payload.update(project=kwargs.get('project').id, playbook=playbook)
if kwargs.get('inventory'):
payload.update(inventory=kwargs.get('inventory').id)
if kwargs.get('credential'):
payload.update(credential=kwargs.get('credential').id)
return payload
def add_label(self, label):
if isinstance(label, page.Page):
label = label.json
with suppress(exc.NoContent):
self.related.labels.post(label)
def create_payload(
self,
name='',
description='',
job_type='run',
playbook='ping.yml',
credential=Credential,
inventory=Inventory,
project=None,
**kwargs):
if not project and job_type != 'scan':
project = Project
if not inventory and not kwargs.get('ask_inventory_on_launch', False):
inventory = Inventory
self.create_and_update_dependencies(
*
filter_by_class(
(credential,
Credential),
(inventory,
Inventory),
(project,
Project)))
project = self.ds.project if project else None
inventory = self.ds.inventory if inventory else None
credential = self.ds.credential if credential else None
payload = self.payload(
name=name,
description=description,
job_type=job_type,
playbook=playbook,
credential=credential,
inventory=inventory,
project=project,
**kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload, credential
def create(
self,
name='',
description='',
job_type='run',
playbook='ping.yml',
credential=Credential,
inventory=Inventory,
project=None,
**kwargs):
payload, credential = self.create_payload(name=name, description=description, job_type=job_type,
playbook=playbook, credential=credential, inventory=inventory,
project=project, **kwargs)
ret = self.update_identity(
JobTemplates(
self.connection).post(payload))
if credential:
with suppress(exc.NoContent):
self.related.credentials.post(dict(id=credential.id))
if 'vault_credential' in kwargs:
with suppress(exc.NoContent):
if not isinstance(kwargs['vault_credential'], int):
raise ValueError(
"Expected 'vault_credential' value to be an integer, the id of the desired vault credential")
self.related.credentials.post(
dict(id=kwargs['vault_credential']))
return ret
def add_extra_credential(self, credential):
with suppress(exc.NoContent):
self.related.extra_credentials.post(
dict(id=credential.id, associate=True))
def remove_extra_credential(self, credential):
with suppress(exc.NoContent):
self.related.extra_credentials.post(
dict(id=credential.id, disassociate=True))
def add_credential(self, credential):
with suppress(exc.NoContent):
self.related.credentials.post(
dict(id=credential.id, associate=True))
def remove_credential(self, credential):
with suppress(exc.NoContent):
self.related.credentials.post(
dict(id=credential.id, disassociate=True))
def remove_all_credentials(self):
for cred in self.related.credentials.get().results:
with suppress(exc.NoContent):
self.related.credentials.post(
dict(id=cred.id, disassociate=True))
page.register_page([resources.job_template,
(resources.job_templates, 'post'),
(resources.job_template_copy, 'post')], JobTemplate)
class JobTemplates(page.PageList, JobTemplate):
pass
page.register_page([resources.job_templates,
resources.related_job_templates], JobTemplates)
class JobTemplateCallback(base.Base):
pass
page.register_page(resources.job_template_callback, JobTemplateCallback)
class JobTemplateLaunch(base.Base):
pass
page.register_page(resources.job_template_launch, JobTemplateLaunch)

View File

@ -0,0 +1,117 @@
from awxkit.api.pages import UnifiedJob
from awxkit.api.resources import resources
from . import base
from . import page
class Job(UnifiedJob):
def relaunch(self, payload={}):
result = self.related.relaunch.post(payload)
return self.walk(result.endpoint)
page.register_page(resources.job, Job)
class Jobs(page.PageList, Job):
pass
page.register_page([resources.jobs,
resources.job_template_jobs,
resources.system_job_template_jobs], Jobs)
class JobCancel(UnifiedJob):
pass
page.register_page(resources.job_cancel, JobCancel)
class JobEvent(base.Base):
pass
page.register_page([resources.job_event,
resources.job_job_event], JobEvent)
class JobEvents(page.PageList, JobEvent):
pass
page.register_page([resources.job_events,
resources.job_job_events,
resources.job_event_children,
resources.group_related_job_events], JobEvents)
class JobPlay(base.Base):
pass
page.register_page(resources.job_play, JobPlay)
class JobPlays(page.PageList, JobPlay):
pass
page.register_page(resources.job_plays, JobPlays)
class JobTask(base.Base):
pass
page.register_page(resources.job_task, JobTask)
class JobTasks(page.PageList, JobTask):
pass
page.register_page(resources.job_tasks, JobTasks)
class JobHostSummary(base.Base):
pass
page.register_page(resources.job_host_summary, JobHostSummary)
class JobHostSummaries(page.PageList, JobHostSummary):
pass
page.register_page([resources.job_host_summaries,
resources.group_related_job_host_summaries], JobHostSummaries)
class JobRelaunch(base.Base):
pass
page.register_page(resources.job_relaunch, JobRelaunch)
class JobStdout(base.Base):
pass
page.register_page(resources.related_stdout, JobStdout)

View File

@ -0,0 +1,67 @@
from awxkit.utils import random_title, PseudoNamespace
from awxkit.api.mixins import HasCreate, DSAdapter
from awxkit.api.resources import resources
from awxkit.api.pages import Organization
from . import base
from . import page
class Label(HasCreate, base.Base):
dependencies = [Organization]
def silent_delete(self):
"""Label pages do not support DELETE requests. Here, we override the base page object
silent_delete method to account for this.
"""
pass
def payload(self, organization, **kwargs):
payload = PseudoNamespace(
name=kwargs.get('name') or 'Label - {}'.format(
random_title()),
description=kwargs.get('description') or random_title(10),
organization=organization.id)
return payload
def create_payload(
self,
name='',
description='',
organization=Organization,
**kwargs):
self.create_and_update_dependencies(organization)
payload = self.payload(
organization=self.ds.organization,
name=name,
description=description,
**kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(
self,
name='',
description='',
organization=Organization,
**kwargs):
payload = self.create_payload(
name=name,
description=description,
organization=organization,
**kwargs)
return self.update_identity(Labels(self.connection).post(payload))
page.register_page([resources.label,
(resources.labels, 'post')], Label)
class Labels(page.PageList, Label):
pass
page.register_page([resources.labels,
resources.job_labels,
resources.job_template_labels], Labels)

View File

@ -0,0 +1,23 @@
from prometheus_client.parser import text_string_to_metric_families
from awxkit.api.resources import resources
from . import base
from . import page
class Metrics(base.Base):
def get(self, **query_parameters):
request = self.connection.get(self.endpoint, query_parameters)
self.page_identity(request, ignore_json_errors=True)
parsed_metrics = text_string_to_metric_families(request.text)
data = {}
for family in parsed_metrics:
for sample in family.samples:
data[sample[0]] = {"labels": sample[1], "value": sample[2]}
request.json = lambda: data
return self.page_identity(request)
page.register_page([resources.metrics,
(resources.metrics, 'get')], Metrics)

View File

@ -0,0 +1,219 @@
from awxkit.api.mixins import HasCreate, HasCopy, DSAdapter
from awxkit.api.pages import Organization
from awxkit.api.resources import resources
from awxkit.config import config
import awxkit.exceptions as exc
from awxkit.utils import not_provided, random_title, suppress, PseudoNamespace
from . import base
from . import page
job_results = ('any', 'error', 'success')
notification_types = (
'email',
'irc',
'pagerduty',
'slack',
'twilio',
'webhook',
'mattermost')
class NotificationTemplate(HasCopy, HasCreate, base.Base):
dependencies = [Organization]
def test(self):
"""Create test notification"""
assert 'test' in self.related, \
"No such related attribute 'test'"
# trigger test notification
notification_id = self.related.test.post().notification
# return notification page
notifications_pg = self.get_related(
'notifications', id=notification_id).wait_until_count(1)
assert notifications_pg.count == 1, \
"test notification triggered (id:%s) but notification not found in response at %s/notifications/" % \
(notification_id, self.url)
return notifications_pg.results[0]
def silent_delete(self):
"""Delete the Notification Template, ignoring the exception that is raised
if there are notifications pending.
"""
try:
super(NotificationTemplate, self).silent_delete()
except (exc.MethodNotAllowed):
pass
def payload(self, organization, notification_type='slack', **kwargs):
payload = PseudoNamespace(
name=kwargs.get('name') or 'NotificationTemplate ({0}) - {1}' .format(
notification_type,
random_title()),
description=kwargs.get('description') or random_title(10),
organization=organization.id,
notification_type=notification_type)
notification_configuration = kwargs.get(
'notification_configuration', {})
payload.notification_configuration = notification_configuration
if payload.notification_configuration == {}:
services = config.credentials.notification_services
if notification_type == 'email':
fields = (
'host',
'username',
'password',
'port',
'use_ssl',
'use_tls',
'sender',
'recipients')
cred = services.email
elif notification_type == 'irc':
fields = (
'server',
'port',
'use_ssl',
'password',
'nickname',
'targets')
cred = services.irc
elif notification_type == 'pagerduty':
fields = ('client_name', 'service_key', 'subdomain', 'token')
cred = services.pagerduty
elif notification_type == 'slack':
fields = ('channels', 'token')
cred = services.slack
elif notification_type == 'twilio':
fields = (
'account_sid',
'account_token',
'from_number',
'to_numbers')
cred = services.twilio
elif notification_type == 'webhook':
fields = ('url', 'headers')
cred = services.webhook
elif notification_type == 'mattermost':
fields = (
'mattermost_url',
'mattermost_username',
'mattermost_channel',
'mattermost_icon_url',
'mattermost_no_verify_ssl')
cred = services.mattermost
else:
raise ValueError(
'Unknown notification_type {0}'.format(notification_type))
for field in fields:
if field == 'bot_token':
payload_field = 'token'
else:
payload_field = field
value = kwargs.get(field, cred.get(field, not_provided))
if value != not_provided:
payload.notification_configuration[payload_field] = value
return payload
def create_payload(
self,
name='',
description='',
notification_type='slack',
organization=Organization,
**kwargs):
if notification_type not in notification_types:
raise ValueError(
'Unsupported notification type "{0}". Please use one of {1}.' .format(
notification_type, notification_types))
self.create_and_update_dependencies(organization)
payload = self.payload(
organization=self.ds.organization,
notification_type=notification_type,
name=name,
description=description,
**kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(
self,
name='',
description='',
notification_type='slack',
organization=Organization,
**kwargs):
payload = self.create_payload(
name=name,
description=description,
notification_type=notification_type,
organization=organization,
**kwargs)
return self.update_identity(
NotificationTemplates(
self.connection).post(payload))
def associate(self, resource, job_result='any'):
"""Associates a NotificationTemplate with the provided resource"""
return self._associate(resource, job_result)
def disassociate(self, resource, job_result='any'):
"""Disassociates a NotificationTemplate with the provided resource"""
return self._associate(resource, job_result, disassociate=True)
def _associate(self, resource, job_result='any', disassociate=False):
if job_result not in job_results:
raise ValueError(
'Unsupported job_result type "{0}". Please use one of {1}.' .format(
job_result, job_results))
result_attr = 'notification_templates_{0}'.format(job_result)
if result_attr not in resource.related:
raise ValueError(
'Unsupported resource "{0}". Does not have a related {1} field.' .format(
resource, result_attr))
payload = dict(id=self.id)
if disassociate:
payload['disassociate'] = True
with suppress(exc.NoContent):
getattr(resource.related, result_attr).post(payload)
page.register_page([resources.notification_template,
(resources.notification_templates, 'post'),
(resources.notification_template_copy, 'post'),
resources.notification_template_any,
resources.notification_template_error,
resources.notification_template_success], NotificationTemplate)
class NotificationTemplates(page.PageList, NotificationTemplate):
pass
page.register_page([resources.notification_templates,
resources.notification_templates_any,
resources.notification_templates_error,
resources.notification_templates_success],
NotificationTemplates)
class NotificationTemplateTest(base.Base):
pass
page.register_page(
resources.notification_template_test,
NotificationTemplateTest)

View File

@ -0,0 +1,52 @@
from awxkit.api.mixins import HasStatus
from awxkit.api.resources import resources
from awxkit.utils import poll_until, seconds_since_date_string
from . import base
from . import page
class Notification(HasStatus, base.Base):
def __str__(self):
items = ['id', 'notification_type', 'status', 'error', 'notifications_sent',
'subject', 'recipients']
info = []
for item in [x for x in items if hasattr(self, x)]:
info.append('{0}:{1}'.format(item, getattr(self, item)))
output = '<{0.__class__.__name__} {1}>'.format(self, ', '.join(info))
return output.replace('%', '%%')
@property
def is_successful(self):
"""Return whether the notification was created successfully. This means that:
* self.status == 'successful'
* self.error == False
"""
return super(Notification, self).is_successful and not self.error
def wait_until_status(self, status, interval=5, timeout=30, **kwargs):
adjusted_timeout = timeout - seconds_since_date_string(self.created)
return super(Notification, self).wait_until_status(status, interval, adjusted_timeout, **kwargs)
def wait_until_completed(self, interval=5, timeout=240):
"""Notifications need a longer timeout, since the backend often has
to wait for the request (sending the notification) to timeout itself
"""
adjusted_timeout = timeout - seconds_since_date_string(self.created)
return super(Notification, self).wait_until_completed(interval, adjusted_timeout)
page.register_page(resources.notification, Notification)
class Notifications(page.PageList, Notification):
def wait_until_count(self, count, interval=10, timeout=60, **kw):
"""Poll notifications page until it is populated with `count` number of notifications."""
poll_until(lambda: getattr(self.get(), 'count') == count,
interval=interval, timeout=timeout, **kw)
return self
page.register_page([resources.notifications,
resources.related_notifications], Notifications)

View File

@ -0,0 +1,49 @@
from awxkit.api.mixins import HasCreate, HasInstanceGroups, HasNotifications, DSAdapter
from awxkit.utils import random_title, suppress, PseudoNamespace
from awxkit.api.resources import resources
import awxkit.exceptions as exc
from . import base
from . import page
class Organization(HasCreate, HasInstanceGroups, HasNotifications, base.Base):
def add_admin(self, user):
if isinstance(user, page.Page):
user = user.json
with suppress(exc.NoContent):
self.related.admins.post(user)
def add_user(self, user):
if isinstance(user, page.Page):
user = user.json
with suppress(exc.NoContent):
self.related.users.post(user)
def payload(self, **kwargs):
payload = PseudoNamespace(name=kwargs.get('name') or 'Organization - {}'.format(random_title()),
description=kwargs.get('description') or random_title(10))
return payload
def create_payload(self, name='', description='', **kwargs):
payload = self.payload(name=name, description=description, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(self, name='', description='', **kwargs):
payload = self.create_payload(name=name, description=description, **kwargs)
return self.update_identity(Organizations(self.connection).post(payload))
page.register_page([resources.organization,
(resources.organizations, 'post')], Organization)
class Organizations(page.PageList, Organization):
pass
page.register_page([resources.organizations,
resources.user_organizations,
resources.project_organizations], Organizations)

View File

@ -0,0 +1,500 @@
import http.client
import inspect
import logging
import json
import re
from requests import Response
from awxkit.utils import (
PseudoNamespace,
is_relative_endpoint,
are_same_endpoint,
super_dir_set,
suppress,
is_list_or_tuple
)
from awxkit.api.client import Connection
from awxkit.api.registry import URLRegistry
from awxkit.config import config
import awxkit.exceptions as exc
log = logging.getLogger(__name__)
_page_registry = URLRegistry()
get_registered_page = _page_registry.get
def is_license_invalid(response):
if re.match(r".*Invalid license.*", response.text):
return True
if re.match(r".*Missing 'eula_accepted' property.*", response.text):
return True
if re.match(r".*'eula_accepted' must be True.*", response.text):
return True
if re.match(r".*Invalid license data.*", response.text):
return True
def is_license_exceeded(response):
if re.match(
r".*license range of.*instances has been exceeded.*",
response.text):
return True
if re.match(
r".*License count of.*instances has been reached.*",
response.text):
return True
if re.match(
r".*License count of.*instances has been exceeded.*",
response.text):
return True
if re.match(r".*License has expired.*", response.text):
return True
if re.match(r".*License is missing.*", response.text):
return True
def is_duplicate_error(response):
if re.match(r".*already exists.*", response.text):
return True
def register_page(urls, page_cls):
if not _page_registry.default:
from awxkit.api.pages import Base
_page_registry.setdefault(Base)
if not is_list_or_tuple(urls):
urls = [urls]
# Register every methodless page with wildcard method
# until more granular page objects exist (options, head, etc.)
updated_urls = []
for url_method_pair in urls:
if isinstance(url_method_pair, str):
url = url_method_pair
method = '.*'
else:
url, method = url_method_pair
updated_urls.append((url, method))
page_cls.endpoint = updated_urls[0][0]
return _page_registry.register(updated_urls, page_cls)
def objectify_response_json(response):
"""return a PseudoNamespace() from requests.Response.json()."""
try:
json = response.json()
except ValueError:
json = dict()
# PseudoNamespace arg must be a dict, and json can be an array.
# TODO: Assess if list elements should be PseudoNamespace
if isinstance(json, dict):
return PseudoNamespace(json)
return json
class Page(object):
endpoint = ''
def __init__(self, connection=None, *a, **kw):
if 'endpoint' in kw:
self.endpoint = kw['endpoint']
self.connection = connection or Connection(
config.base_url, kw.get(
'verify', not config.assume_untrusted))
self.r = kw.get('r', None)
self.json = kw.get(
'json', objectify_response_json(
self.r) if self.r else {})
self.last_elapsed = kw.get('last_elapsed', None)
def __getattr__(self, name):
if 'json' in self.__dict__ and name in self.json:
value = self.json[name]
if not isinstance(
value,
TentativePage) and is_relative_endpoint(value):
value = TentativePage(value, self.connection)
elif isinstance(value, dict):
for key, item in value.items():
if not isinstance(
item, TentativePage) and is_relative_endpoint(item):
value[key] = TentativePage(item, self.connection)
return value
raise AttributeError(
"{!r} object has no attribute {!r}".format(
self.__class__.__name__, name))
def __setattr__(self, name, value):
if 'json' in self.__dict__ and name in self.json:
# Update field only. For new field use explicit patch
self.patch(**{name: value})
else:
self.__dict__[name] = value
def __str__(self):
if hasattr(self, 'json'):
return json.dumps(self.json, indent=4)
return str(super(Page, self).__repr__())
__repr__ = __str__
def __dir__(self):
attrs = super_dir_set(self.__class__)
if 'json' in self.__dict__ and hasattr(self.json, 'keys'):
attrs.update(self.json.keys())
return sorted(attrs)
def __getitem__(self, key):
return getattr(self, key)
def __iter__(self):
return iter(self.json)
@property
def __item_class__(self):
"""Returns the class representing a single 'Page' item"""
return self.__class__
@classmethod
def from_json(cls, raw):
resp = Response()
resp._content = bytes(json.dumps(raw), 'utf-8')
resp.encoding = 'utf-8'
resp.status_code = 200
return cls(r=resp)
def page_identity(self, response, request_json=None, ignore_json_errors=False):
"""Takes a `requests.Response` and
returns a new __item_class__ instance if the request method is not a get, or returns
a __class__ instance if the request path is different than the caller's `endpoint`.
"""
request_path = response.request.path_url
request_method = response.request.method.lower()
self.last_elapsed = response.elapsed
if isinstance(request_json, dict) and 'ds' in request_json:
ds = request_json.ds
else:
ds = None
try:
data = response.json()
except ValueError as e: # If there was no json to parse
data = dict()
if (response.text and not ignore_json_errors) or response.status_code not in (200, 202, 204):
text = response.text
if len(text) > 1024:
text = text[:1024] + '... <<< Truncated >>> ...'
log.warning(
"Unable to parse JSON response ({0.status_code}): {1} - '{2}'".format(response, e, text))
exc_str = "%s (%s) received" % (
http.client.responses[response.status_code], response.status_code)
exception = exception_from_status_code(response.status_code)
if exception:
raise exception(exc_str, data)
if response.status_code in (
http.client.OK,
http.client.CREATED,
http.client.ACCEPTED):
# Not all JSON responses include a URL. Grab it from the request
# object, if needed.
if 'url' in data:
endpoint = data['url']
else:
endpoint = request_path
data = objectify_response_json(response)
if request_method in ('get', 'patch', 'put'):
# Update existing resource and return it
if are_same_endpoint(self.endpoint, request_path):
self.json = data
self.r = response
return self
registered_type = get_registered_page(request_path, request_method)
return registered_type(
self.connection,
endpoint=endpoint,
json=data,
last_elapsed=response.elapsed,
r=response,
ds=ds)
elif response.status_code == http.client.FORBIDDEN:
if is_license_invalid(response):
raise exc.LicenseInvalid(exc_str, data)
elif is_license_exceeded(response):
raise exc.LicenseExceeded(exc_str, data)
else:
raise exc.Forbidden(exc_str, data)
elif response.status_code == http.client.BAD_REQUEST:
if is_license_invalid(response):
raise exc.LicenseInvalid(exc_str, data)
if is_duplicate_error(response):
raise exc.Duplicate(exc_str, data)
else:
raise exc.BadRequest(exc_str, data)
else:
raise exc.Unknown(exc_str, data)
def update_identity(self, obj):
"""Takes a `Page` and updates attributes to reflect its content"""
self.endpoint = obj.endpoint
self.json = obj.json
self.last_elapsed = obj.last_elapsed
self.r = obj.r
return self
def delete(self):
r = self.connection.delete(self.endpoint)
with suppress(exc.NoContent):
return self.page_identity(r)
def get(self, all_pages=False, **query_parameters):
r = self.connection.get(self.endpoint, query_parameters)
page = self.page_identity(r)
if all_pages and page.next:
paged_results = [r.json()['results']]
while page.next:
r = self.connection.get(self.next, query_parameters)
page = self.page_identity(r)
paged_results.append(r.json()['results'])
json = r.json()
json['results'] = []
for page in paged_results:
json['results'].extend(page)
page = self.__class__.from_json(json)
return page
def head(self):
r = self.connection.head(self.endpoint)
return self.page_identity(r)
def options(self):
r = self.connection.options(self.endpoint)
return self.page_identity(r)
def patch(self, **json):
r = self.connection.patch(self.endpoint, json)
return self.page_identity(r, request_json=json)
def post(self, json={}):
r = self.connection.post(self.endpoint, json)
return self.page_identity(r, request_json=json)
def put(self, json=None):
"""If a payload is supplied, PUT the payload. If not, submit our existing page JSON as our payload."""
json = self.json if json is None else json
r = self.connection.put(self.endpoint, json=json)
return self.page_identity(r, request_json=json)
def get_related(self, related_name, **kwargs):
assert related_name in self.json.get('related', [])
endpoint = self.json['related'][related_name]
return self.walk(endpoint, **kwargs)
def walk(self, endpoint, **kw):
page_cls = get_registered_page(endpoint)
return page_cls(self.connection, endpoint=endpoint).get(**kw)
_exception_map = {http.client.NO_CONTENT: exc.NoContent,
http.client.NOT_FOUND: exc.NotFound,
http.client.INTERNAL_SERVER_ERROR: exc.InternalServerError,
http.client.BAD_GATEWAY: exc.BadGateway,
http.client.METHOD_NOT_ALLOWED: exc.MethodNotAllowed,
http.client.UNAUTHORIZED: exc.Unauthorized,
http.client.PAYMENT_REQUIRED: exc.PaymentRequired,
http.client.CONFLICT: exc.Conflict}
def exception_from_status_code(status_code):
return _exception_map.get(status_code, None)
class PageList(object):
@property
def __item_class__(self):
"""Returns the class representing a single 'Page' item
With an inheritence of OrgListSubClass -> OrgList -> PageList -> Org -> Base -> Page, the following
will return the parent class of the current object (e.g. 'Org').
Obtaining a page type by registered endpoint is highly recommended over using this method.
"""
mro = inspect.getmro(self.__class__)
bl_index = mro.index(PageList)
return mro[bl_index + 1]
@property
def results(self):
items = []
for item in self.json['results']:
endpoint = item.get('url')
if endpoint is None:
registered_type = self.__item_class__
else:
registered_type = get_registered_page(endpoint)
items.append(
registered_type(
self.connection,
endpoint=endpoint,
json=item,
r=self.r))
return items
def go_to_next(self):
if self.next:
next_page = self.__class__(self.connection, endpoint=self.next)
return next_page.get()
def go_to_previous(self):
if self.previous:
prev_page = self.__class__(self.connection, endpoint=self.previous)
return prev_page.get()
def create(self, *a, **kw):
return self.__item_class__(self.connection).create(*a, **kw)
class TentativePage(str):
def __new__(cls, endpoint, connection):
return super(TentativePage, cls).__new__(cls, endpoint)
def __init__(self, endpoint, connection):
self.endpoint = endpoint
self.connection = connection
def _create(self):
return get_registered_page(
self.endpoint)(
self.connection,
endpoint=self.endpoint)
def get(self, **params):
return self._create().get(**params)
def create_or_replace(self, **query_parameters):
"""Create an object, and if any other item shares the name, delete that one first.
Generally, requires 'name' of object.
Exceptions:
- Users are looked up by username
- Teams need to be looked up by name + organization
"""
page = None
# look up users by username not name
if 'users' in self:
assert query_parameters.get(
'username'), 'For this resource, you must call this method with a "username" to look up the object by'
page = self.get(username=query_parameters['username'])
else:
assert query_parameters.get(
'name'), 'For this resource, you must call this method with a "name" to look up the object by'
if query_parameters.get('organization'):
if isinstance(query_parameters.get('organization'), int):
page = self.get(
name=query_parameters['name'],
organization=query_parameters.get('organization'))
else:
page = self.get(
name=query_parameters['name'],
organization=query_parameters.get('organization').id)
else:
page = self.get(name=query_parameters['name'])
if page and page.results:
for item in page.results:
# We found a duplicate item, we will delete it
# Some things, like inventory scripts, allow multiple scripts
# by same name as long as they have different organization
item.delete()
# Now that we know that there is no duplicate, we create a new object
return self.create(**query_parameters)
def get_or_create(self, **query_parameters):
"""Get an object by this name or id if it exists, otherwise create it.
Exceptions:
- Users are looked up by username
- Teams need to be looked up by name + organization
"""
page = None
# look up users by username not name
if query_parameters.get('username') and 'users' in self:
page = self.get(username=query_parameters['username'])
if query_parameters.get('name'):
if query_parameters.get('organization'):
if isinstance(query_parameters.get('organization'), int):
page = self.get(
name=query_parameters['name'],
organization=query_parameters.get('organization'))
else:
page = self.get(
name=query_parameters['name'],
organization=query_parameters.get('organization').id)
else:
page = self.get(name=query_parameters['name'])
elif query_parameters.get('id'):
page = self.get(id=query_parameters['id'])
if page and page.results:
item = page.results.pop()
return item.url.get()
else:
# We did not find it given these params, we will create it instead
return self.create(**query_parameters)
def post(self, payload={}):
return self._create().post(payload)
def put(self):
return self._create().put()
def patch(self, **payload):
return self._create().patch(**payload)
def delete(self):
return self._create().delete()
def options(self):
return self._create().options()
def create(self, *a, **kw):
return self._create().create(*a, **kw)
def payload(self, *a, **kw):
return self._create().payload(*a, **kw)
def create_payload(self, *a, **kw):
return self._create().create_payload(*a, **kw)
def __str__(self):
if hasattr(self, 'endpoint'):
return self.endpoint
return super(TentativePage, self).__str__()
__repr__ = __str__
def __eq__(self, other):
return self.endpoint == other
def __ne__(self, other):
return self.endpoint != other

View File

@ -0,0 +1,11 @@
from awxkit.api.resources import resources
from . import base
from . import page
class Ping(base.Base):
pass
page.register_page(resources.ping, Ping)

View File

@ -0,0 +1,209 @@
import json
from awxkit.api.pages import Credential, Organization, UnifiedJob, UnifiedJobTemplate
from awxkit.utils import filter_by_class, random_title, update_payload, PseudoNamespace
from awxkit.api.mixins import HasCreate, HasNotifications, HasCopy, DSAdapter
from awxkit.api.resources import resources
from awxkit.config import config
from . import base
from . import page
class Project(HasCopy, HasCreate, HasNotifications, UnifiedJobTemplate):
optional_dependencies = [Credential, Organization]
optional_schedule_fields = tuple()
def payload(self, organization, scm_type='git', **kwargs):
payload = PseudoNamespace(
name=kwargs.get('name') or 'Project - {}'.format(
random_title()),
description=kwargs.get('description') or random_title(10),
scm_type=scm_type,
scm_url=kwargs.get('scm_url') or config.project_urls.get(
scm_type,
''))
if organization is not None:
payload.organization = organization.id
if kwargs.get('credential'):
payload.credential = kwargs.get('credential').id
fields = (
'scm_branch',
'local_path',
'scm_clean',
'scm_delete_on_update',
'scm_update_cache_timeout',
'scm_update_on_launch',
'scm_refspec',
'allow_override')
update_payload(payload, fields, kwargs)
return payload
def create_payload(
self,
name='',
description='',
scm_type='git',
scm_url='',
scm_branch='',
organization=Organization,
credential=None,
**kwargs):
if credential:
if isinstance(credential, Credential):
if credential.ds.credential_type.namespace not in (
'scm', 'insights'):
credential = None # ignore incompatible credential from HasCreate dependency injection
elif credential in (Credential,):
credential = (
Credential, dict(
credential_type=(
True, dict(
kind='scm'))))
elif credential is True:
credential = (
Credential, dict(
credential_type=(
True, dict(
kind='scm'))))
self.create_and_update_dependencies(
*filter_by_class((credential, Credential), (organization, Organization)))
credential = self.ds.credential if credential else None
organization = self.ds.organization if organization else None
payload = self.payload(
organization=organization,
scm_type=scm_type,
name=name,
description=description,
scm_url=scm_url,
scm_branch=scm_branch,
credential=credential,
**kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(
self,
name='',
description='',
scm_type='git',
scm_url='',
scm_branch='',
organization=Organization,
credential=None,
**kwargs):
payload = self.create_payload(
name=name,
description=description,
scm_type=scm_type,
scm_url=scm_url,
scm_branch=scm_branch,
organization=organization,
credential=credential,
**kwargs)
self.update_identity(Projects(self.connection).post(payload))
if kwargs.get('wait', True):
self.related.current_update.get().wait_until_completed()
return self.get()
return self
def update(self):
"""Update the project using related->update endpoint."""
# get related->launch
update_pg = self.get_related('update')
# assert can_update == True
assert update_pg.can_update, \
"The specified project (id:%s) is not able to update (can_update:%s)" % \
(self.id, update_pg.can_update)
# start the update
result = update_pg.post()
# assert JSON response
assert 'project_update' in result.json, \
"Unexpected JSON response when starting an project_update.\n%s" % \
json.dumps(result.json, indent=2)
# locate and return the specific update
jobs_pg = self.get_related(
'project_updates',
id=result.json['project_update'])
assert jobs_pg.count == 1, \
"An project_update started (id:%s) but job not found in response at %s/inventory_updates/" % \
(result.json['project_update'], self.url)
return jobs_pg.results[0]
@property
def is_successful(self):
"""An project is considered successful when:
0) scm_type != ""
1) unified_job_template.is_successful
"""
return self.scm_type != "" and \
super(Project, self).is_successful
page.register_page([resources.project,
(resources.projects, 'post'),
(resources.project_copy, 'post')], Project)
class Projects(page.PageList, Project):
pass
page.register_page([resources.projects,
resources.related_projects], Projects)
class ProjectUpdate(UnifiedJob):
pass
page.register_page(resources.project_update, ProjectUpdate)
class ProjectUpdates(page.PageList, ProjectUpdate):
pass
page.register_page([resources.project_updates,
resources.project_project_updates], ProjectUpdates)
class ProjectUpdateLaunch(base.Base):
pass
page.register_page(resources.project_related_update, ProjectUpdateLaunch)
class ProjectUpdateCancel(base.Base):
pass
page.register_page(resources.project_update_cancel, ProjectUpdateCancel)
class Playbooks(base.Base):
pass
page.register_page(resources.project_playbooks, Playbooks)

View File

@ -0,0 +1,21 @@
from awxkit.api.resources import resources
from . import base
from . import page
class Role(base.Base):
pass
page.register_page(resources.role, Role)
class Roles(page.PageList, Role):
pass
page.register_page([resources.roles,
resources.related_roles,
resources.related_object_roles], Roles)

View File

@ -0,0 +1,54 @@
from awxkit.api.pages import UnifiedJob
from awxkit.api.resources import resources
import awxkit.exceptions as exc
from awxkit.utils import suppress
from . import page
from . import base
class Schedule(UnifiedJob):
pass
page.register_page([resources.schedule,
resources.related_schedule], Schedule)
class Schedules(page.PageList, Schedule):
def get_zoneinfo(self):
return SchedulesZoneInfo(self.connection).get()
def preview(self, rrule=''):
payload = dict(rrule=rrule)
return SchedulesPreview(self.connection).post(payload)
def add_credential(self, cred):
with suppress(exc.NoContent):
self.related.credentials.post(dict(id=cred.id))
def remove_credential(self, cred):
with suppress(exc.NoContent):
self.related.credentials.post(dict(id=cred.id, disassociate=True))
page.register_page([resources.schedules,
resources.related_schedules], Schedules)
class SchedulesPreview(base.Base):
pass
page.register_page(((resources.schedules_preview, 'post'),), SchedulesPreview)
class SchedulesZoneInfo(base.Base):
def __getitem__(self, idx):
return self.json[idx]
page.register_page(((resources.schedules_zoneinfo, 'get'),), SchedulesZoneInfo)

View File

@ -0,0 +1,42 @@
from awxkit.api.resources import resources
from . import base
from . import page
class Setting(base.Base):
pass
page.register_page([resources.setting,
resources.settings_all,
resources.settings_authentication,
resources.settings_changed,
resources.settings_github,
resources.settings_github_org,
resources.settings_github_team,
resources.settings_google_oauth2,
resources.settings_jobs,
resources.settings_ldap,
resources.settings_radius,
resources.settings_saml,
resources.settings_system,
resources.settings_tacacsplus,
resources.settings_ui,
resources.settings_user,
resources.settings_user_defaults], Setting)
class Settings(page.PageList, Setting):
def get_endpoint(self, endpoint):
"""Helper method used to navigate to a specific settings endpoint.
(Pdb) settings_pg.get_endpoint('all')
"""
base_url = '{0}{1}/'.format(self.endpoint, endpoint)
return self.walk(base_url)
get_setting = get_endpoint
page.register_page(resources.settings, Settings)

View File

@ -0,0 +1,30 @@
from . import base
from . import page
from awxkit.api.resources import resources
class SurveySpec(base.Base):
def get_variable_default(self, var):
for item in self.spec:
if item.get('variable') == var:
return item.get('default')
def get_default_vars(self):
default_vars = dict()
for item in self.spec:
if item.get("default", None):
default_vars[item.variable] = item.default
return default_vars
def get_required_vars(self):
required_vars = []
for item in self.spec:
if item.get("required", None):
required_vars.append(item.variable)
return required_vars
page.register_page([resources.job_template_survey_spec,
resources.workflow_job_template_survey_spec], SurveySpec)

View File

@ -0,0 +1,29 @@
from awxkit.api.mixins import HasNotifications
from awxkit.api.pages import UnifiedJobTemplate
from awxkit.api.resources import resources
from . import page
class SystemJobTemplate(UnifiedJobTemplate, HasNotifications):
def launch(self, payload={}):
"""Launch the system_job_template using related->launch endpoint."""
result = self.related.launch.post(payload)
# return job
jobs_pg = self.get_related('jobs', id=result.json['system_job'])
assert jobs_pg.count == 1, \
"system_job_template launched (id:%s) but unable to find matching " \
"job at %s/jobs/" % (result.json['job'], self.url)
return jobs_pg.results[0]
page.register_page(resources.system_job_template, SystemJobTemplate)
class SystemJobTemplates(page.PageList, SystemJobTemplate):
pass
page.register_page(resources.system_job_templates, SystemJobTemplates)

View File

@ -0,0 +1,27 @@
from awxkit.api.pages import UnifiedJob
from awxkit.api.resources import resources
from . import page
class SystemJob(UnifiedJob):
pass
page.register_page(resources.system_job, SystemJob)
class SystemJobs(page.PageList, SystemJob):
pass
page.register_page(resources.system_jobs, SystemJobs)
class SystemJobCancel(UnifiedJob):
pass
page.register_page(resources.system_job_cancel, SystemJobCancel)

View File

@ -0,0 +1,48 @@
from awxkit.api.mixins import HasCreate, DSAdapter
from awxkit.utils import suppress, random_title, PseudoNamespace
from awxkit.api.resources import resources
from awxkit.api.pages import Organization
from awxkit.exceptions import NoContent
from . import base
from . import page
class Team(HasCreate, base.Base):
dependencies = [Organization]
def add_user(self, user):
if isinstance(user, page.Page):
user = user.json
with suppress(NoContent):
self.related.users.post(user)
def payload(self, organization, **kwargs):
payload = PseudoNamespace(name=kwargs.get('name') or 'Team - {}'.format(random_title()),
description=kwargs.get('description') or random_title(10),
organization=organization.id)
return payload
def create_payload(self, name='', description='', organization=Organization, **kwargs):
self.create_and_update_dependencies(organization)
payload = self.payload(organization=self.ds.organization, name=name, description=description, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(self, name='', description='', organization=Organization, **kwargs):
payload = self.create_payload(name=name, description=description, organization=organization, **kwargs)
return self.update_identity(Teams(self.connection).post(payload))
page.register_page([resources.team,
(resources.teams, 'post')], Team)
class Teams(page.PageList, Team):
pass
page.register_page([resources.teams,
resources.related_teams], Teams)

View File

@ -0,0 +1,86 @@
from awxkit.api.resources import resources
from awxkit.utils import random_title, update_payload
from awxkit.api.mixins import HasStatus
from . import base
from . import page
class UnifiedJobTemplate(HasStatus, base.Base):
"""Base class for unified job template pages (e.g. project, inventory_source,
and job_template).
"""
optional_schedule_fields = (
'extra_data',
'diff_mode',
'limit',
'job_tags',
'skip_tags',
'job_type',
'verbosity',
'inventory',
)
def __str__(self):
# NOTE: I use .replace('%', '%%') to workaround an odd string
# formatting issue where result_stdout contained '%s'. This later caused
# a python traceback when attempting to display output from this
# method.
items = [
'id',
'name',
'status',
'source',
'last_update_failed',
'last_updated',
'result_traceback',
'job_explanation',
'job_args']
info = []
for item in [x for x in items if hasattr(self, x)]:
info.append('{0}:{1}'.format(item, getattr(self, item)))
output = '<{0.__class__.__name__} {1}>'.format(self, ', '.join(info))
return output.replace('%', '%%')
def add_schedule(
self,
name='',
description='',
enabled=True,
rrule=None,
**kwargs):
if rrule is None:
rrule = "DTSTART:30180101T000000Z RRULE:FREQ=YEARLY;INTERVAL=1"
payload = dict(
name=name or "{0} Schedule {1}".format(
self.name,
random_title()),
description=description or random_title(10),
enabled=enabled,
rrule=str(rrule))
update_payload(payload, self.optional_schedule_fields, kwargs)
return self.related.schedules.post(payload)
@property
def is_successful(self):
"""An unified_job_template is considered successful when:
1) status == 'successful'
2) not last_update_failed
3) last_updated
"""
return super(
UnifiedJobTemplate,
self).is_successful and not self.last_update_failed and self.last_updated is not None
page.register_page(resources.unified_job_template, UnifiedJobTemplate)
class UnifiedJobTemplates(page.PageList, UnifiedJobTemplate):
pass
page.register_page(resources.unified_job_templates, UnifiedJobTemplates)

View File

@ -0,0 +1,150 @@
from pprint import pformat
import yaml.parser
import yaml.scanner
import yaml
from awxkit.utils import args_string_to_list, seconds_since_date_string
from awxkit.api.resources import resources
from awxkit.api.mixins import HasStatus
import awxkit.exceptions as exc
from . import base
from . import page
class UnifiedJob(HasStatus, base.Base):
"""Base class for unified job pages (e.g. project_updates, inventory_updates
and jobs).
"""
def __str__(self):
# NOTE: I use .replace('%', '%%') to workaround an odd string
# formatting issue where result_stdout contained '%s'. This later caused
# a python traceback when attempting to display output from this method.
items = ['id', 'name', 'status', 'failed', 'result_stdout', 'result_traceback',
'job_explanation', 'job_args']
info = []
for item in [x for x in items if hasattr(self, x)]:
info.append('{0}:{1}'.format(item, getattr(self, item)))
output = '<{0.__class__.__name__} {1}>'.format(self, ', '.join(info))
return output.replace('%', '%%')
@property
def result_stdout(self):
if 'result_stdout' not in self.json and 'stdout' in self.related:
return self.connection.get(
self.related.stdout, query_parameters=dict(format='txt_download')
).content.decode()
if str(self.json.get('result_stdout')) == 'stdout capture is missing' and 'stdout' in self.related:
ping = self.walk(resources.ping)
if self.execution_node != ping.active_node:
self.connection.get(self.related.stdout, query_parameters=dict(format='txt_download'))
self.get()
return self.json.result_stdout.decode()
def assert_text_in_stdout(self, expected_text, replace_spaces=None, replace_newlines=' '):
"""Assert text is found in stdout, and if not raise exception with entire stdout.
Default behavior is to replace newline characters with a space, but this can be modified, including replacement
with ''. Pass replace_newlines=None to disable.
Additionally, you may replace any ' ' with another character (including ''). This is applied after the newline
replacement. Default behavior is to not replace spaces.
"""
stdout = self.result_stdout
if replace_newlines is not None:
stdout = stdout.replace('\n', replace_newlines)
if replace_spaces is not None:
stdout = stdout.replace(' ', replace_spaces)
if expected_text not in stdout:
pretty_stdout = pformat(stdout)
raise AssertionError(
'Expected "{}", but it was not found in stdout. Full stdout:\n {}'.format(expected_text, pretty_stdout)
)
@property
def is_successful(self):
"""Return whether the current has completed successfully.
This means that:
* self.status == 'successful'
* self.has_traceback == False
* self.failed == False
"""
return super(UnifiedJob, self).is_successful and not (self.has_traceback or self.failed)
def wait_until_status(self, status, interval=1, timeout=60, since_job_created=True, **kwargs):
if since_job_created:
timeout = timeout - seconds_since_date_string(self.created)
return super(UnifiedJob, self).wait_until_status(status, interval, timeout, **kwargs)
def wait_until_completed(self, interval=5, timeout=60 * 8, since_job_created=True, **kwargs):
if since_job_created:
timeout = timeout - seconds_since_date_string(self.created)
return super(UnifiedJob, self).wait_until_completed(interval, timeout, **kwargs)
@property
def has_traceback(self):
"""Return whether a traceback has been detected in result_traceback"""
try:
tb = str(self.result_traceback)
except AttributeError:
# If record obtained from list view, then traceback isn't given
# and result_stdout is only given for some types
# we must suppress AttributeError or else it will be mis-interpreted
# by __getattr__
tb = ''
return 'Traceback' in tb
def cancel(self):
cancel = self.get_related('cancel')
if not cancel.can_cancel:
return
try:
cancel.post()
except exc.MethodNotAllowed as e:
# Race condition where job finishes between can_cancel
# check and post.
if not any("not allowed" in field for field in e.msg.values()):
raise(e)
return self.get()
@property
def job_args(self):
"""Helper property to return flattened cmdline arg tokens in a list.
Flattens arg strings for rough inclusion checks:
```assert "thing" in unified_job.job_args```
```assert dict(extra_var=extra_var_val) in unified_job.job_args```
If you need to ensure the job_args are of awx-provided format use raw unified_job.json.job_args.
"""
def attempt_yaml_load(arg):
try:
return yaml.load(arg, Loader=yaml.FullLoader)
except (yaml.parser.ParserError, yaml.scanner.ScannerError):
return str(arg)
args = []
if not self.json.job_args:
return ""
for arg in yaml.load(self.json.job_args, Loader=yaml.FullLoader):
try:
args.append(yaml.load(arg, Loader=yaml.FullLoader))
except (yaml.parser.ParserError, yaml.scanner.ScannerError):
if arg[0] == '@': # extra var file reference
args.append(attempt_yaml_load(arg))
elif args[-1] == '-c': # this arg is likely sh arg string
args.extend([attempt_yaml_load(item) for item in args_string_to_list(arg)])
else:
raise
return args
class UnifiedJobs(page.PageList, UnifiedJob):
pass
page.register_page([resources.unified_jobs,
resources.instance_related_jobs,
resources.instance_group_related_jobs,
resources.schedules_jobs], UnifiedJobs)

View File

@ -0,0 +1,74 @@
from awxkit.api.mixins import HasCreate, DSAdapter
from awxkit.utils import random_title, PseudoNamespace
from awxkit.api.resources import resources
from awxkit.config import config
from . import base
from . import page
class User(HasCreate, base.Base):
def payload(self, **kwargs):
payload = PseudoNamespace(
username=kwargs.get('username') or 'User-{}'.format(
random_title(
non_ascii=False)),
password=kwargs.get('password') or config.credentials.default.password,
is_superuser=kwargs.get(
'is_superuser',
False),
is_system_auditor=kwargs.get(
'is_system_auditor',
False),
first_name=kwargs.get(
'first_name',
random_title()),
last_name=kwargs.get(
'last_name',
random_title()),
email=kwargs.get(
'email',
'{}@example.com'.format(random_title(5, non_ascii=False)))
)
return payload
def create_payload(self, username='', password='', **kwargs):
payload = self.payload(username=username, password=password, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(self, username='', password='', organization=None, **kwargs):
payload = self.create_payload(
username=username, password=password, **kwargs)
self.password = payload.password
self.update_identity(Users(self.connection).post(payload))
if organization:
organization.add_user(self)
return self
page.register_page([resources.user,
(resources.users, 'post')], User)
class Users(page.PageList, User):
pass
page.register_page([resources.users,
resources.organization_admins,
resources.related_users,
resources.user_admin_organizations], Users)
class Me(Users):
pass
page.register_page(resources.me, Me)

View File

@ -0,0 +1,39 @@
from awxkit.api.pages import base
from awxkit.api.resources import resources
from awxkit.utils import poll_until, seconds_since_date_string, suppress
from awxkit.exceptions import WaitUntilTimeout
from . import page
class WorkflowJobNode(base.Base):
def wait_for_job(self, interval=5, timeout=60, **kw):
"""Waits until node's job exists"""
adjusted_timeout = timeout - seconds_since_date_string(self.created)
with suppress(WaitUntilTimeout):
poll_until(self.job_exists, interval=interval, timeout=adjusted_timeout, **kw)
return self
def job_exists(self):
self.get()
try:
return self.job
except AttributeError:
return False
page.register_page(resources.workflow_job_node, WorkflowJobNode)
class WorkflowJobNodes(page.PageList, WorkflowJobNode):
pass
page.register_page([resources.workflow_job_nodes,
resources.workflow_job_workflow_nodes,
resources.workflow_job_node_always_nodes,
resources.workflow_job_node_failure_nodes,
resources.workflow_job_node_success_nodes], WorkflowJobNodes)

View File

@ -0,0 +1,85 @@
import awxkit.exceptions as exc
from awxkit.api.pages import base, WorkflowJobTemplate, UnifiedJobTemplate, JobTemplate
from awxkit.api.mixins import HasCreate, DSAdapter
from awxkit.api.resources import resources
from awxkit.utils import update_payload, PseudoNamespace, suppress
from . import page
class WorkflowJobTemplateNode(HasCreate, base.Base):
dependencies = [WorkflowJobTemplate, UnifiedJobTemplate]
def payload(self, workflow_job_template, unified_job_template, **kwargs):
payload = PseudoNamespace(workflow_job_template=workflow_job_template.id,
unified_job_template=unified_job_template.id)
optional_fields = ('diff_mode', 'extra_data', 'limit', 'job_tags', 'job_type', 'skip_tags', 'verbosity',
'extra_data')
update_payload(payload, optional_fields, kwargs)
for resource in ('credential', 'inventory'):
if resource in kwargs:
payload[resource] = kwargs[resource].id
return payload
def create_payload(self, workflow_job_template=WorkflowJobTemplate, unified_job_template=JobTemplate, **kwargs):
self.create_and_update_dependencies(workflow_job_template, unified_job_template)
payload = self.payload(workflow_job_template=self.ds.workflow_job_template,
unified_job_template=self.ds.unified_job_template, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(self, workflow_job_template=WorkflowJobTemplate, unified_job_template=JobTemplate, **kwargs):
payload = self.create_payload(workflow_job_template=workflow_job_template,
unified_job_template=unified_job_template, **kwargs)
return self.update_identity(WorkflowJobTemplateNodes(self.connection).post(payload))
def _add_node(self, endpoint, unified_job_template):
node = endpoint.post(dict(unified_job_template=unified_job_template.id))
node.create_and_update_dependencies(self.ds.workflow_job_template, unified_job_template)
return node
def add_always_node(self, unified_job_template):
return self._add_node(self.related.always_nodes, unified_job_template)
def add_failure_node(self, unified_job_template):
return self._add_node(self.related.failure_nodes, unified_job_template)
def add_success_node(self, unified_job_template):
return self._add_node(self.related.success_nodes, unified_job_template)
def add_credential(self, credential):
with suppress(exc.NoContent):
self.related.credentials.post(
dict(id=credential.id, associate=True))
def remove_credential(self, credential):
with suppress(exc.NoContent):
self.related.credentials.post(
dict(id=credential.id, disassociate=True))
def remove_all_credentials(self):
for cred in self.related.credentials.get().results:
with suppress(exc.NoContent):
self.related.credentials.post(
dict(id=cred.id, disassociate=True))
page.register_page([resources.workflow_job_template_node,
(resources.workflow_job_template_nodes, 'post')], WorkflowJobTemplateNode)
class WorkflowJobTemplateNodes(page.PageList, WorkflowJobTemplateNode):
pass
page.register_page([resources.workflow_job_template_nodes,
resources.workflow_job_template_workflow_nodes,
resources.workflow_job_template_node_always_nodes,
resources.workflow_job_template_node_failure_nodes,
resources.workflow_job_template_node_success_nodes], WorkflowJobTemplateNodes)

View File

@ -0,0 +1,100 @@
import json
from awxkit.api.mixins import HasCreate, HasNotifications, HasSurvey, HasCopy, DSAdapter
from awxkit.api.pages import Organization, UnifiedJobTemplate
from awxkit.utils import filter_by_class, not_provided, update_payload, random_title, suppress, PseudoNamespace
from awxkit.api.resources import resources
import awxkit.exceptions as exc
from . import base
from . import page
class WorkflowJobTemplate(HasCopy, HasCreate, HasNotifications, HasSurvey, UnifiedJobTemplate):
optional_dependencies = [Organization]
def launch(self, payload={}):
"""Launch using related->launch endpoint."""
# get related->launch
launch_pg = self.get_related('launch')
# launch the workflow_job_template
result = launch_pg.post(payload)
# return job
jobs_pg = self.related.workflow_jobs.get(id=result.workflow_job)
if jobs_pg.count != 1:
msg = "workflow_job_template launched (id:{}) but job not found in response at {}/workflow_jobs/" % \
(result.json['workflow_job'], self.url)
raise exc.UnexpectedAWXState(msg)
return jobs_pg.results[0]
def payload(self, **kwargs):
payload = PseudoNamespace(name=kwargs.get('name') or 'WorkflowJobTemplate - {}'.format(random_title()),
description=kwargs.get('description') or random_title(10))
optional_fields = ("allow_simultaneous", "ask_variables_on_launch", "survey_enabled")
update_payload(payload, optional_fields, kwargs)
extra_vars = kwargs.get('extra_vars', not_provided)
if extra_vars != not_provided:
if isinstance(extra_vars, dict):
extra_vars = json.dumps(extra_vars)
payload.update(extra_vars=extra_vars)
if kwargs.get('organization'):
payload.organization = kwargs.get('organization').id
if kwargs.get('inventory'):
payload.inventory = kwargs.get('inventory').id
if kwargs.get('ask_inventory_on_launch'):
payload.ask_inventory_on_launch = kwargs.get('ask_inventory_on_launch')
return payload
def create_payload(self, name='', description='', organization=None, **kwargs):
self.create_and_update_dependencies(*filter_by_class((organization, Organization)))
organization = self.ds.organization if organization else None
payload = self.payload(name=name, description=description, organization=organization, **kwargs)
payload.ds = DSAdapter(self.__class__.__name__, self._dependency_store)
return payload
def create(self, name='', description='', organization=None, **kwargs):
payload = self.create_payload(name=name, description=description, organization=organization, **kwargs)
return self.update_identity(WorkflowJobTemplates(self.connection).post(payload))
def add_label(self, label):
if isinstance(label, page.Page):
label = label.json
with suppress(exc.NoContent):
self.related.labels.post(label)
page.register_page([resources.workflow_job_template,
(resources.workflow_job_templates, 'post'),
(resources.workflow_job_template_copy, 'post')], WorkflowJobTemplate)
class WorkflowJobTemplates(page.PageList, WorkflowJobTemplate):
pass
page.register_page([resources.workflow_job_templates], WorkflowJobTemplates)
class WorkflowJobTemplateLaunch(base.Base):
pass
page.register_page(resources.workflow_job_template_launch, WorkflowJobTemplateLaunch)
class WorkflowJobTemplateCopy(base.Base):
pass
page.register_page([resources.workflow_job_template_copy], WorkflowJobTemplateCopy)

View File

@ -0,0 +1,38 @@
from awxkit.api.pages import UnifiedJob
from awxkit.api.resources import resources
from . import page
class WorkflowJob(UnifiedJob):
def __str__(self):
# TODO: Update after endpoint's fields are finished filling out
return super(UnifiedJob, self).__str__()
def relaunch(self, payload={}):
result = self.related.relaunch.post(payload)
return self.walk(result.url)
@property
def result_stdout(self):
# workflow jobs do not have result_stdout
# which is problematic for the UnifiedJob.is_successful reliance on
# related stdout endpoint.
if 'result_stdout' not in self.json:
return 'Unprovided AWX field.'
else:
return super(WorkflowJob, self).result_stdout
page.register_page(resources.workflow_job, WorkflowJob)
class WorkflowJobs(page.PageList, WorkflowJob):
pass
page.register_page([resources.workflow_jobs,
resources.workflow_job_template_jobs,
resources.job_template_slice_workflow_jobs],
WorkflowJobs)

View File

@ -0,0 +1,161 @@
from collections import defaultdict
import logging
import re
from awxkit.utils import is_list_or_tuple, not_provided
log = logging.getLogger(__name__)
class URLRegistry(object):
def __init__(self):
self.store = defaultdict(dict)
self.default = {}
def url_pattern(self, pattern_str):
"""Converts some regex-friendly url pattern (Resources().resource string)
to a compiled pattern.
"""
# should account for any relative endpoint w/ query parameters
pattern = r'^' + pattern_str + r'(\?.*)*$'
return re.compile(pattern)
def _generate_url_iterable(self, url_iterable):
parsed_urls = []
for url in url_iterable:
method = not_provided
if is_list_or_tuple(url):
url, method = url
if not is_list_or_tuple(method):
methods = (method,)
else:
methods = method
for method in methods:
method_pattern = re.compile(method)
url_pattern = self.url_pattern(url)
parsed_urls.append((url_pattern, method_pattern))
return parsed_urls
def register(self, *args):
"""Registers a single resource (generic python type or object) to either
1. a single url string (internally coverted via URLRegistry.url_pattern) and optional method or method iterable
2. a list or tuple of url string and optional method or method iterables
for retrieval via get().
reg.register('/some/path/', ResourceOne)
reg.get('/some/path/')
-> ResourceOne
reg.register('/some/other/path/', 'method', ResourceTwo)
reg.get('/some/other/path/', 'method')
-> ResourceTwo
reg.register('/some/additional/path/', ('method_one', 'method_two'), ResourceThree)
reg.get('/some/additional/path/', 'method_one')
-> ResourceThree
reg.get('/some/additional/path/', 'method_two')
-> ResourceThree
reg.register(('/some/new/path/one/', '/some/new/path/two/',
('/some/other/new/path', 'method'),
('/some/other/additional/path/, ('method_one', 'method_two')), ResourceFour))
reg.get('/some/other/new/path/', 'method')
-> ResourceFour
"""
if not args or len(args) == 1:
raise TypeError('register needs at least a url and Resource.')
elif len(args) not in (2, 3):
raise TypeError('register takes at most 3 arguments ({} given).'.format(len(args)))
if len(args) == 3: # url, method (iterable), and Resource
url_iterable = (args[:2],)
resource = args[2]
else:
urls, resource = args
if not is_list_or_tuple(urls):
url_iterable = [(urls, not_provided)]
else:
url_iterable = urls
url_iterable = self._generate_url_iterable(url_iterable)
for url_pattern, method_pattern in url_iterable:
if url_pattern in self.store and method_pattern in self.store[url_pattern]:
if method_pattern.pattern == not_provided:
exc_msg = '"{0.pattern}" already has methodless registration.'.format(url_pattern, method_pattern)
else:
exc_msg = ('"{0.pattern}" already has registered method "{1.pattern}"'
.format(url_pattern, method_pattern))
raise TypeError(exc_msg)
self.store[url_pattern][method_pattern] = resource
def setdefault(self, *args):
"""Establishes a default return value for get() by optional method (iterable).
reg.setdefault(ResourceOne)
reg.get('/some/unregistered/path')
-> ResourceOne
reg.setdefault('method', ResourceTwo)
reg.get('/some/registered/methodless/path/', 'method')
-> ResourceTwo
reg.setdefault(('method_one', 'method_two'), ResourceThree)
reg.get('/some/unregistered/path', 'method_two')
-> ResourceThree
reg.setdefault('supports.*regex', ResourceFour)
reg.get('supports123regex')
-> ResourceFour
"""
if not args:
raise TypeError('setdefault needs at least a Resource.')
if len(args) == 1: # all methods
self.default[re.compile('.*')] = args[0]
elif len(args) == 2:
if is_list_or_tuple(args[0]):
methods = args[0]
else:
methods = (args[0],)
for method in methods:
method_pattern = re.compile(method)
self.default[method_pattern] = args[1]
else:
raise TypeError('setdefault takes at most 2 arguments ({} given).'.format(len(args)))
def get(self, url, method=not_provided):
"""Returns a single resource by previously registered path and optional method where
1. If a registration was methodless and a method is provided to get() the return value will be
None or, if applicable, a registry default (see setdefault()).
2. If a registration included a method (excluding the method wildcard '.*') and no method is provided to get()
the return value will be None or, if applicable, a registry default.
reg.register('/some/path/', ResourceOne)
reg.get('/some/path/')
-> ResourceOne
reg.get('/some/path/', 'method')
-> None
reg.register('/some/other/path/', 'method', ResourceTwo)
reg.get('/some/other/path/', 'method')
-> ResourceTwo
reg.get('/some/other/path')
-> None
reg.register('/some/additional/path/', '.*', ResourceThree)
reg.get('/some/additional/path/', 'method')
-> ResourceThree
reg.get('/some/additional/path/')
-> ResourceThree
"""
registered_type = None
default_methods = list(self.default)
# Make sure dot character evaluated last
default_methods.sort(key=lambda x: x.pattern == '.*')
for method_key in default_methods:
if method_key.match(method):
registered_type = self.default[method_key]
break
for re_key in self.store:
if re_key.match(url):
keys = list(self.store[re_key])
keys.sort(key=lambda x: x.pattern == '.*')
for method_key in keys:
if method_key.match(method):
registered_type = self.store[re_key][method_key]
break
log.debug('Retrieved {} by url: {}'.format(registered_type, url))
return registered_type

View File

@ -0,0 +1,285 @@
class Resources(object):
_activity = r'activity_stream/\d+/'
_activity_stream = 'activity_stream/'
_ad_hoc_command = r'ad_hoc_commands/\d+/'
_ad_hoc_command_relaunch = r'ad_hoc_commands/\d+/relaunch/'
_ad_hoc_commands = 'ad_hoc_commands/'
_ad_hoc_event = r'ad_hoc_command_events/\d+/'
_ad_hoc_events = r'ad_hoc_commands/\d+/events/'
_ad_hoc_related_cancel = r'ad_hoc_commands/\d+/cancel/'
_ad_hoc_relaunch = r'ad_hoc_commands/\d+/relaunch/'
_ansible_facts = r'hosts/\d+/ansible_facts/'
_application = r'applications/\d+/'
_applications = 'applications/'
_auth = 'auth/'
_authtoken = 'authtoken/'
_config = 'config/'
_credential = r'credentials/\d+/'
_credential_access_list = r'credentials/\d+/access_list/'
_credential_copy = r'credentials/\d+/copy/'
_credential_input_source = r'credential_input_sources/\d+/'
_credential_input_sources = 'credential_input_sources/'
_credential_owner_teams = r'credentials/\d+/owner_teams/'
_credential_owner_users = r'credentials/\d+/owner_users/'
_credential_type = r'credential_types/\d+/'
_credential_types = 'credential_types/'
_credentials = 'credentials/'
_dashboard = 'dashboard/'
_fact_view = r'hosts/\d+/fact_view/'
_group = r'groups/\d+/'
_group_access_list = r'groups/\d+/access_list/'
_group_children = r'groups/\d+/children/'
_group_potential_children = r'groups/\d+/potential_children/'
_group_related_ad_hoc_commands = r'groups/\d+/ad_hoc_commands/'
_group_related_all_hosts = r'groups/\d+/all_hosts/'
_group_related_hosts = r'groups/\d+/hosts/'
_group_related_job_events = r'groups/\d+/job_events/'
_group_related_job_host_summaries = r'groups/\d+/job_host_summaries/'
_group_variable_data = r'groups/\d+/variable_data/'
_groups = 'groups/'
_host = r'hosts/\d+/'
_host_groups = r'hosts/\d+/groups/'
_host_insights = r'hosts/\d+/insights/'
_host_related_ad_hoc_commands = r'hosts/\d+/ad_hoc_commands/'
_host_related_fact_version = r'hosts/\d+/fact_versions/\d+/'
_host_related_fact_versions = r'hosts/\d+/fact_versions/'
_host_variable_data = r'hosts/\d+/variable_data/'
_hosts = 'hosts/'
_instance = r'instances/\d+/'
_instance_group = r'instance_groups/\d+/'
_instance_group_related_jobs = r'instance_groups/\d+/jobs/'
_instance_groups = 'instance_groups/'
_instance_related_jobs = r'instances/\d+/jobs/'
_instances = 'instances/'
_inventories = 'inventories/'
_inventory = r'inventories/\d+/'
_inventory_access_list = r'inventories/\d+/access_list/'
_inventory_copy = r'inventories/\d+/copy/'
_inventory_related_ad_hoc_commands = r'inventories/\d+/ad_hoc_commands/'
_inventory_related_groups = r'inventories/\d+/groups/'
_inventory_related_hosts = r'inventories/\d+/hosts/'
_inventory_related_root_groups = r'inventories/\d+/root_groups/'
_inventory_related_script = r'inventories/\d+/script/'
_inventory_related_update_inventory_sources = r'inventories/\d+/update_inventory_sources/'
_inventory_scan_job_templates = r'inventories/\d+/scan_job_templates/'
_inventory_script = r'inventory_scripts/\d+/'
_inventory_script_copy = r'inventory_scripts/\d+/copy/'
_inventory_scripts = 'inventory_scripts/'
_inventory_source = r'inventory_sources/\d+/'
_inventory_source_schedule = r'inventory_sources/\d+/schedules/\d+/'
_inventory_source_schedules = r'inventory_sources/\d+/schedules/'
_inventory_source_updates = r'inventory_sources/\d+/inventory_updates/'
_inventory_sources = 'inventory_sources/'
_inventory_sources_related_groups = r'inventory_sources/\d+/groups/'
_inventory_sources_related_hosts = r'inventory_sources/\d+/hosts/'
_inventory_sources_related_update = r'inventory_sources/\d+/update/'
_inventory_tree = r'inventories/\d+/tree/'
_inventory_update = r'inventory_updates/\d+/'
_inventory_update_cancel = r'inventory_updates/\d+/cancel/'
_inventory_update_events = r'inventory_updates/\d+/events/'
_inventory_updates = 'inventory_updates/'
_inventory_variable_data = r'inventories/\d+/variable_data/'
_job = r'jobs/\d+/'
_job_cancel = r'jobs/\d+/cancel/'
_job_create_schedule = r'jobs/\d+/create_schedule/'
_job_event = r'job_events/\d+/'
_job_event_children = r'job_events/\d+/children/'
_job_events = 'job_events/'
_job_extra_credentials = _job + 'extra_credentials/'
_job_host_summaries = r'jobs/\d+/job_host_summaries/'
_job_host_summary = r'job_host_summaries/\d+/'
_job_job_event = r'jobs/\d+/job_events/\d+/'
_job_job_events = r'jobs/\d+/job_events/'
_job_labels = r'jobs/\d+/labels/'
_job_notifications = r'jobs/\d+/notifications/'
_job_play = r'jobs/\d+/job_plays/\d+/'
_job_plays = r'jobs/\d+/job_plays/'
_job_relaunch = r'jobs/\d+/relaunch/'
_job_start = r'jobs/\d+/start/'
_job_task = r'jobs/\d+/job_tasks/\d+/'
_job_tasks = r'jobs/\d+/job_tasks/'
_job_template = r'job_templates/\d+/'
_job_template_access_list = r'job_templates/\d+/access_list/'
_job_template_callback = r'job_templates/\d+/callback/'
_job_template_copy = r'job_templates/\d+/copy/'
_job_template_extra_credentials = _job_template + 'extra_credentials/'
_job_template_jobs = r'job_templates/\d+/jobs/'
_job_template_labels = r'job_templates/\d+/labels/'
_job_template_launch = r'job_templates/\d+/launch/'
_job_template_schedule = r'job_templates/\d+/schedules/\d+/'
_job_template_schedules = r'job_templates/\d+/schedules/'
_job_template_slice_workflow_jobs = r'job_templates/\d+/slice_workflow_jobs/'
_job_template_survey_spec = r'job_templates/\d+/survey_spec/'
_job_templates = 'job_templates/'
_jobs = 'jobs/'
_label = r'labels/\d+/'
_labels = 'labels/'
_me = 'me/'
_metrics = 'metrics/'
_notification = r'notifications/\d+/'
_notification_template = r'notification_templates/\d+/'
_notification_template_any = r'\w+/\d+/notification_templates_any/\d+/'
_notification_template_copy = r'notification_templates/\d+/copy/'
_notification_template_error = r'\w+/\d+/notification_templates_error/\d+/'
_notification_template_success = r'\w+/\d+/notification_templates_success/\d+/'
_notification_template_test = r'notification_templates/\d+/test/'
_notification_templates = 'notification_templates/'
_notification_templates_any = r'\w+/\d+/notification_templates_any/'
_notification_templates_error = r'\w+/\d+/notification_templates_error/'
_notification_templates_success = r'\w+/\d+/notification_templates_success/'
_notifications = 'notifications/'
_object_activity_stream = r'[^/]+/\d+/activity_stream/'
_org_projects = r'organizations/\d+/projects/'
_org_teams = r'organizations/\d+/teams/'
_organization = r'organizations/\d+/'
_organization_access_list = r'organizations/\d+/access_list/'
_organization_admins = r'organizations/\d+/admins/'
_organization_applications = r'organizations/\d+/applications/'
_organization_inventories = r'organizations/\d+/inventories/'
_organization_users = r'organizations/\d+/users/'
_organizations = 'organizations/'
_ping = 'ping/'
_project = r'projects/\d+/'
_project_access_list = r'projects/\d+/access_list/'
_project_copy = r'projects/\d+/copy/'
_project_inventories = r'projects/\d+/inventories/'
_project_organizations = r'projects/\d+/organizations/'
_project_playbooks = r'projects/\d+/playbooks/'
_project_project_updates = r'projects/\d+/project_updates/'
_project_related_update = r'projects/\d+/update/'
_project_schedule = r'projects/\d+/schedules/\d+/'
_project_schedules = r'projects/\d+/schedules/'
_project_scm_inventory_sources = r'projects/\d+/scm_inventory_sources/'
_project_teams = r'projects/\d+/teams/'
_project_update = r'project_updates/\d+/'
_project_update_cancel = r'project_updates/\d+/cancel/'
_project_update_events = r'project_updates/\d+/events/'
_project_update_scm_inventory_updates = r'project_updates/\d+/scm_inventory_updates/'
_project_updates = 'project_updates/'
_projects = 'projects/'
_related_credentials = r'\w+/\d+/credentials/'
_related_input_sources = r'\w+/\d+/input_sources/'
_related_instance_groups = r'\w+/\d+/instance_groups/'
_related_instances = r'\w+/\d+/instances/'
_related_inventories = r'(?!projects)\w+/\d+/inventories/' # project related inventories are inventory files (.ini)
_related_inventory_sources = r'\w+/\d+/inventory_sources/'
_related_job_templates = r'\w+/\d+/job_templates/'
_related_notification_templates = r'\w+/\d+/notification_templates/'
_related_notifications = r'\w+/\d+/notifications/'
_related_object_roles = r'\w+/\d+/object_roles/'
_related_projects = r'\w+/\d+/projects/'
_related_roles = r'\w+/\d+/roles/'
_related_schedule = r'\w+/\d+/schedules/\d+/'
_related_schedules = r'\w+/\d+/schedules/'
_related_stdout = r'\w+/\d+/stdout/'
_related_teams = r'\w+/\d+/teams/'
_related_users = r'\w+/\d+/users/'
_related_workflow_job_templates = r'\w+/\d+/workflow_job_templates/'
_role = r'roles/\d+/'
_roles = 'roles/'
_roles_related_teams = r'roles/\d+/teams/'
_schedule = r'schedules/\d+/'
_schedules = 'schedules/'
_schedules_jobs = r'schedules/\d+/jobs/'
_schedules_preview = 'schedules/preview/'
_schedules_zoneinfo = 'schedules/zoneinfo/'
_setting = r'settings/\w+/'
_settings = 'settings/'
_settings_all = 'settings/all/'
_settings_authentication = 'settings/authentication/'
_settings_azuread_oauth2 = 'settings/azuread-oauth2/'
_settings_changed = 'settings/changed/'
_settings_github = 'settings/github/'
_settings_github_org = 'settings/github-org/'
_settings_github_team = 'settings/github-team/'
_settings_google_oauth2 = 'settings/google-oauth2/'
_settings_jobs = 'settings/jobs/'
_settings_ldap = 'settings/ldap/'
_settings_logging = 'settings/logging/'
_settings_named_url = 'settings/named-url/'
_settings_radius = 'settings/radius/'
_settings_saml = 'settings/saml/'
_settings_system = 'settings/system/'
_settings_tacacsplus = 'settings/tacacsplus/'
_settings_ui = 'settings/ui/'
_settings_user = 'settings/user/'
_settings_user_defaults = 'settings/user-defaults/'
_system_job = r'system_jobs/\d+/'
_system_job_cancel = r'system_jobs/\d+/cancel/'
_system_job_events = r'system_jobs/\d+/events/'
_system_job_template = r'system_job_templates/\d+/'
_system_job_template_jobs = r'system_job_templates/\d+/jobs/'
_system_job_template_launch = r'system_job_templates/\d+/launch/'
_system_job_template_schedule = r'system_job_templates/\d+/schedules/\d+/'
_system_job_template_schedules = r'system_job_templates/\d+/schedules/'
_system_job_templates = 'system_job_templates/'
_system_jobs = 'system_jobs/'
_team = r'teams/\d+/'
_team_access_list = r'teams/\d+/access_list/'
_team_credentials = r'teams/\d+/credentials/'
_team_permission = r'teams/\d+/permissions/\d+/'
_team_permissions = r'teams/\d+/permissions/'
_team_users = r'teams/\d+/users/'
_teams = 'teams/'
_token = r'tokens/\d+/'
_tokens = 'tokens/'
_unified_job_template = r'unified_job_templates/\d+/'
_unified_job_templates = 'unified_job_templates/'
_unified_jobs = 'unified_jobs/'
_user = r'users/\d+/'
_user_access_list = r'users/\d+/access_list/'
_user_admin_organizations = r'users/\d+/admin_of_organizations/'
_user_credentials = r'users/\d+/credentials/'
_user_organizations = r'users/\d+/organizations/'
_user_permission = r'users/\d+/permissions/\d+/'
_user_permissions = r'users/\d+/permissions/'
_user_teams = r'users/\d+/teams/'
_users = 'users/'
_variable_data = r'.*\/variable_data/'
_workflow_job = r'workflow_jobs/\d+/'
_workflow_job_cancel = r'workflow_jobs/\d+/cancel/'
_workflow_job_labels = r'workflow_jobs/\d+/labels/'
_workflow_job_node = r'workflow_job_nodes/\d+/'
_workflow_job_node_always_nodes = r'workflow_job_nodes/\d+/always_nodes/'
_workflow_job_node_failure_nodes = r'workflow_job_nodes/\d+/failure_nodes/'
_workflow_job_node_success_nodes = r'workflow_job_nodes/\d+/success_nodes/'
_workflow_job_nodes = 'workflow_job_nodes/'
_workflow_job_relaunch = r'workflow_jobs/\d+/relaunch/'
_workflow_job_template = r'workflow_job_templates/\d+/'
_workflow_job_template_copy = r'workflow_job_templates/\d+/copy/'
_workflow_job_template_jobs = r'workflow_job_templates/\d+/workflow_jobs/'
_workflow_job_template_labels = r'workflow_job_templates/\d+/labels/'
_workflow_job_template_launch = r'workflow_job_templates/\d+/launch/'
_workflow_job_template_node = r'workflow_job_template_nodes/\d+/'
_workflow_job_template_node_always_nodes = r'workflow_job_template_nodes/\d+/always_nodes/'
_workflow_job_template_node_failure_nodes = r'workflow_job_template_nodes/\d+/failure_nodes/'
_workflow_job_template_node_success_nodes = r'workflow_job_template_nodes/\d+/success_nodes/'
_workflow_job_template_nodes = 'workflow_job_template_nodes/'
_workflow_job_template_schedule = r'workflow_job_templates/\d+/schedules/\d+/'
_workflow_job_template_schedules = r'workflow_job_templates/\d+/schedules/'
_workflow_job_template_survey_spec = r'workflow_job_templates/\d+/survey_spec/'
_workflow_job_template_workflow_nodes = r'workflow_job_templates/\d+/workflow_nodes/'
_workflow_job_templates = 'workflow_job_templates/'
_workflow_job_workflow_nodes = r'workflow_jobs/\d+/workflow_nodes/'
_workflow_jobs = 'workflow_jobs/'
api = '/api/'
common = api + r'v\d+/'
v2 = api + 'v2/'
def __getattr__(self, resource):
if resource[:3] == '___':
raise AttributeError('No existing resource: {}'.format(resource))
# Currently we don't handle anything under:
# /api/o/
# /api/login/
# /api/logout/
# If/when we do we will probably need to modify this __getattr__ method
# Also, if we add another API version, this would be handled here
prefix = 'v2'
resource = '_' + resource
return '{0}{1}'.format(getattr(self, prefix), getattr(self, resource))
resources = Resources()

View File

@ -0,0 +1,5 @@
from distutils.version import LooseVersion
def version_cmp(x, y):
return LooseVersion(x)._cmp(y)

View File

@ -0,0 +1,129 @@
import optparse
import json
from awxkit.utils import random_title
def upload_inventory(ansible_runner, nhosts=10, ini=False):
"""Helper to upload inventory script to target host"""
# Create an inventory script
if ini:
copy_mode = '0644'
copy_dest = '/tmp/inventory{}.ini'.format(random_title(non_ascii=False))
copy_content = ini_inventory(nhosts)
else:
copy_mode = '0755'
copy_dest = '/tmp/inventory{}.sh'.format(random_title(non_ascii=False))
copy_content = '''#!/bin/bash
cat <<EOF
%s
EOF''' % json_inventory(nhosts)
# Copy script to test system
contacted = ansible_runner.copy(dest=copy_dest, force=True, mode=copy_mode, content=copy_content)
for result in contacted.values():
assert not result.get('failed', False), \
"Failed to create inventory file: %s" % result
return copy_dest
def generate_inventory(nhosts=100):
"""Generate a somewhat complex inventory with a configurable number of hosts"""
inv_list = {
'_meta': {
'hostvars': {},
},
}
for n in range(nhosts):
hostname = 'host-%08d.example.com' % n
group_evens_odds = 'evens.example.com' if n % 2 == 0 else 'odds.example.com'
group_threes = 'threes.example.com' if n % 3 == 0 else ''
group_fours = 'fours.example.com' if n % 4 == 0 else ''
group_fives = 'fives.example.com' if n % 5 == 0 else ''
group_sixes = 'sixes.example.com' if n % 6 == 0 else ''
group_sevens = 'sevens.example.com' if n % 7 == 0 else ''
group_eights = 'eights.example.com' if n % 8 == 0 else ''
group_nines = 'nines.example.com' if n % 9 == 0 else ''
group_tens = 'tens.example.com' if n % 10 == 0 else ''
group_by_10s = 'group-%07dX.example.com' % (n / 10)
group_by_100s = 'group-%06dXX.example.com' % (n / 100)
group_by_1000s = 'group-%05dXXX.example.com' % (n / 1000)
for group in [group_evens_odds, group_threes, group_fours, group_fives, group_sixes, group_sevens,
group_eights, group_nines, group_tens, group_by_10s]:
if not group:
continue
if group in inv_list:
inv_list[group]['hosts'].append(hostname)
else:
inv_list[group] = {'hosts': [hostname], 'children': [], 'vars': {'group_prefix': group.split('.')[0]}}
if group_by_1000s not in inv_list:
inv_list[group_by_1000s] = {'hosts': [], 'children': [],
'vars': {'group_prefix': group_by_1000s.split('.')[0]}}
if group_by_100s not in inv_list:
inv_list[group_by_100s] = {'hosts': [], 'children': [],
'vars': {'group_prefix': group_by_100s.split('.')[0]}}
if group_by_100s not in inv_list[group_by_1000s]['children']:
inv_list[group_by_1000s]['children'].append(group_by_100s)
if group_by_10s not in inv_list[group_by_100s]['children']:
inv_list[group_by_100s]['children'].append(group_by_10s)
inv_list['_meta']['hostvars'][hostname] = {
'ansible_user': 'example',
'ansible_connection': 'local',
'host_prefix': hostname.split('.')[0],
'host_id': n,
}
return inv_list
def json_inventory(nhosts=10):
"""Return a JSON representation of inventory"""
return json.dumps(generate_inventory(nhosts), indent=4)
def ini_inventory(nhosts=10):
"""Return a .INI representation of inventory"""
output = list()
inv_list = generate_inventory(nhosts)
for group in inv_list.keys():
if group == '_meta':
continue
# output host groups
output.append('[%s]' % group)
for host in inv_list[group].get('hosts', []):
output.append(host)
output.append('') # newline
# output child groups
output.append('[%s:children]' % group)
for child in inv_list[group].get('children', []):
output.append(child)
output.append('') # newline
# output group vars
output.append('[%s:vars]' % group)
for k, v in inv_list[group].get('vars', {}).items():
output.append('%s=%s' % (k, v))
output.append('') # newline
return '\n'.join(output)
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option('--json', action='store_true', dest='json')
parser.add_option('--ini', action='store_true', dest='ini')
parser.add_option('--host', dest='hostname', default='')
parser.add_option('--nhosts', dest='nhosts', action='store', type='int', default=10)
options, args = parser.parse_args()
if options.json:
print(json_inventory(nhosts=options.nhosts))
elif options.ini:
print(ini_inventory(nhosts=options.nhosts))
elif options.hostname:
print(json_inventory(nhosts=options.nhosts)['_meta']['hostvars'][options.hostname])
else:
print(json.dumps({}, indent=4))

114
awxkit/awxkit/awx/utils.py Normal file
View File

@ -0,0 +1,114 @@
import contextlib
from awxkit import api, utils, exceptions
from awxkit.config import config
__all__ = ('as_user', 'check_related', 'delete_all', 'uses_sessions')
def get_all(endpoint):
results = []
while True:
get_args = dict(page_size=200) if 'page_size' not in endpoint else dict()
resource = endpoint.get(**get_args)
results.extend(resource.results)
if not resource.next:
return results
endpoint = resource.next
def _delete_all(endpoint):
while True:
resource = endpoint.get()
for item in resource.results:
try:
item.delete()
except Exception as e:
print(e)
if not resource.next:
return
def delete_all(v):
for endpoint in (v.unified_jobs, v.job_templates, v.workflow_job_templates, v.notification_templates,
v.projects, v.inventory, v.hosts, v.inventory_scripts, v.labels, v.credentials,
v.teams, v.users, v.organizations, v.schedules):
_delete_all(endpoint)
def check_related(resource):
examined = []
for related in resource.related.values():
if related in examined:
continue
print(related)
with utils.suppress(exceptions.NotFound):
child_related = related.get()
examined.append(related)
if 'results' in child_related and child_related.results:
child_related = child_related.results.pop()
if 'related' in child_related:
for _related in child_related.related.values():
if not isinstance(_related, api.page.TentativePage) or _related in examined:
continue
print(_related)
with utils.suppress(exceptions.NotFound):
_related.get()
examined.append(_related)
@contextlib.contextmanager
def as_user(v, username, password=None):
"""Context manager to allow running tests as an alternative login user."""
access_token = False
if not isinstance(v, api.client.Connection):
connection = v.connection
else:
connection = v
if isinstance(username, api.User):
password = username.password
username = username.username
if isinstance(username, api.OAuth2AccessToken):
access_token = username.token
username = None
password = None
try:
if config.use_sessions:
session_id = None
domain = None
# requests doesn't provide interface for retrieving
# domain segregated cookies other than iterating.
for cookie in connection.session.cookies:
if cookie.name == 'sessionid':
session_id = cookie.value
domain = cookie.domain
break
if session_id:
del connection.session.cookies['sessionid']
if access_token:
kwargs = dict(token=access_token, auth_type='Bearer')
else:
kwargs = connection.get_session_requirements()
else:
previous_auth = connection.session.auth
kwargs = dict()
connection.login(username, password, **kwargs)
yield
finally:
if config.use_sessions:
if access_token:
connection.session.auth = None
del connection.session.cookies['sessionid']
if session_id:
connection.session.cookies.set('sessionid', session_id, domain=domain)
else:
connection.session.auth = previous_auth
def uses_sessions(connection):
session_login = connection.get('/api/login/')
return session_login.status_code == 200

View File

@ -0,0 +1,55 @@
import json
import sys
import traceback
import yaml
from requests.exceptions import ConnectionError, SSLError
from .client import CLI
from awxkit.exceptions import Unauthorized, Common
from awxkit.cli.utils import cprint
def run(stdout=sys.stdout, stderr=sys.stderr, argv=[]):
cli = CLI(stdout=stdout, stderr=stderr)
try:
cli.parse_args(argv or sys.argv)
cli.connect()
cli.parse_resource()
except ConnectionError as e:
cli.parser.print_help()
msg = (
'\nThere was a network error of some kind trying to reach '
'{}.\nYou might need to specify (or double-check) '
'--conf.host'.format(cli.get_config('host'))
)
if isinstance(e, SSLError):
msg = (
'\nCould not establish a secure connection. '
'\nPlease add your server to your certificate authority.'
'\nYou can also run this command by specifying '
'-k or --conf.insecure'
)
cprint(msg + '\n', 'red', file=stderr)
cprint(e, 'red', file=stderr)
sys.exit(1)
except Unauthorized as e:
cli.parser.print_help()
msg = '\nValid credentials were not provided.\n$ awx login --help'
cprint(msg + '\n', 'red', file=stderr)
if cli.verbose:
cprint(e.__class__, 'red', file=stderr)
sys.exit(1)
except Common as e:
if cli.verbose:
print(traceback.format_exc(), sys.stderr)
if cli.get_config('format') == 'json':
json.dump(e.msg, sys.stdout)
elif cli.get_config('format') == 'yaml':
sys.stdout.write(yaml.dump(e.msg))
sys.exit(1)
except Exception as e:
if cli.verbose:
e = traceback.format_exc()
cprint(e, 'red', file=stderr)
sys.exit(1)

289
awxkit/awxkit/cli/client.py Executable file
View File

@ -0,0 +1,289 @@
import logging
import os
import pkg_resources
import sys
from .custom import handle_custom_actions
from .format import (add_authentication_arguments,
add_output_formatting_arguments,
FORMATTERS, format_response)
from .options import ResourceOptionsParser
from .resource import parse_resource, is_control_resource
from awxkit import api, config, utils, exceptions, WSClient # noqa
from awxkit.cli.utils import HelpfulArgumentParser, cprint, disable_color
from awxkit.awx.utils import uses_sessions # noqa
__version__ = pkg_resources.get_distribution('awxkit').version
class CLI(object):
"""A programmatic HTTP OPTIONS-based CLI for AWX/Ansible Tower.
This CLI works by:
- Configuring CLI options via Python's argparse (authentication, formatting
options, etc...)
- Discovering AWX API endpoints at /api/v2/ and mapping them to _resources_
- Discovering HTTP OPTIONS _actions_ on resources to determine how
resources can be interacted with (e.g., list, modify, delete, etc...)
- Parsing sys.argv to map CLI arguments and flags to
awxkit SDK calls
~ awx <resource> <action> --parameters
e.g.,
~ awx users list -v
GET /api/ HTTP/1.1" 200
GET /api/v2/ HTTP/1.1" 200
POST /api/login/ HTTP/1.1" 302
OPTIONS /api/v2/users/ HTTP/1.1" 200
GET /api/v2/users/
{
"count": 2,
"results": [
...
Interacting with this class generally involves a few critical methods:
1. parse_args() - this method is used to configure and parse global CLI
flags, such as formatting flags, and arguments which represent client
configuration (including authentication details)
2. connect() - once configuration is parsed, this method fetches /api/v2/
and itemizes the list of supported resources
3. parse_resource() - attempts to parse the <resource> specified on the
command line (e.g., users, organizations), including logic
for discovering available actions for endpoints using HTTP OPTIONS
requests
At multiple stages of this process, an internal argparse.ArgumentParser()
is progressively built and parsed based on sys.argv, (meaning, that if you
supply invalid or incomplete arguments, argparse will print the usage
message and an explanation of what you got wrong).
"""
subparsers = {}
original_action = None
def __init__(self, stdout=sys.stdout, stderr=sys.stderr):
self.stdout = stdout
self.stderr = stderr
def get_config(self, key):
"""Helper method for looking up the value of a --conf.xyz flag"""
return getattr(self.args, 'conf.{}'.format(key))
@property
def help(self):
return '--help' in self.argv or '-h' in self.argv
def authenticate(self):
"""Configure the current session (or OAuth2.0 token)"""
token = self.get_config('token')
if token:
self.root.connection.login(
None, None, token=token, auth_type='Bearer'
)
else:
config.use_sessions = True
self.root.load_session().get()
def connect(self):
"""Fetch top-level resources from /api/v2"""
config.base_url = self.get_config('host')
config.client_connection_attempts = 1
config.assume_untrusted = False
if self.get_config('insecure'):
config.assume_untrusted = True
config.credentials = utils.PseudoNamespace({
'default': {
'username': self.get_config('username'),
'password': self.get_config('password'),
}
})
_, remainder = self.parser.parse_known_args()
if remainder and remainder[0] == 'config':
# the config command is special; it doesn't require
# API connectivity
return
# ...otherwise, set up a awxkit connection because we're
# likely about to do some requests to /api/v2/
self.root = api.Api()
self.fetch_version_root()
def fetch_version_root(self):
self.v2 = self.root.get().available_versions.v2.get()
def parse_resource(self, skip_deprecated=False):
"""Attempt to parse the <resource> (e.g., jobs) specified on the CLI
If a valid resource is discovered, the user will be authenticated
(either via an OAuth2.0 token or session-based auth) and the remaining
CLI arguments will be processed (to determine the requested action
e.g., list, create, delete)
:param skip_deprecated: when False (the default), deprecated resource
names from the open source tower-cli project
will be allowed
"""
self.resource = parse_resource(self, skip_deprecated=skip_deprecated)
if self.resource:
self.authenticate()
resource = getattr(self.v2, self.resource)
if is_control_resource(self.resource):
# control resources are special endpoints that you can only
# do an HTTP GET to, and which return plain JSON metadata
# examples are `/api/v2/ping/`, `/api/v2/config/`, etc...
self.method = 'get'
response = getattr(resource, self.method)()
else:
response = self.parse_action(resource)
formatted = format_response(
response,
fmt=self.get_config('format'),
filter=self.get_config('filter'),
changed=self.original_action in ('modify', 'create')
)
if formatted:
print(formatted, file=self.stdout)
else:
self.parser.print_help()
def parse_action(self, page, from_sphinx=False):
"""Perform an HTTP OPTIONS request
This method performs an HTTP OPTIONS request to build a list of valid
actions, and (if provided) runs the code for the action specified on
the CLI
:param page: a awxkit.api.pages.TentativePage object representing the
top-level resource in question (e.g., /api/v2/jobs)
:param from_sphinx: a flag specified by our sphinx plugin, which allows
us to walk API OPTIONS using this function
_without_ triggering a SystemExit (argparse's
behavior if required arguments are missing)
"""
subparsers = self.subparsers[self.resource].add_subparsers(
dest='action',
metavar='action'
)
subparsers.required = True
# parse the action from OPTIONS
parser = ResourceOptionsParser(page, self.resource, subparsers)
if from_sphinx:
# Our Sphinx plugin runs `parse_action` for *every* available
# resource + action in the API so that it can generate usage
# strings for automatic doc generation.
#
# Because of this behavior, we want to silently ignore the
# `SystemExit` argparse will raise when you're missing required
# positional arguments (which some actions have).
try:
self.parser.parse_known_args(self.argv)[0]
except SystemExit:
pass
else:
self.parser.parse_known_args()[0]
# parse any action arguments
if self.resource != 'settings':
for method in ('list', 'modify', 'create'):
parser.build_query_arguments(
method,
'GET' if method == 'list' else 'POST'
)
if from_sphinx:
parsed, extra = self.parser.parse_known_args(self.argv)
else:
parsed, extra = self.parser.parse_known_args()
if extra and self.verbose:
# If extraneous arguments were provided, warn the user
cprint('{}: unrecognized arguments: {}'.format(
self.parser.prog,
' '.join(extra)
), 'yellow', file=self.stdout)
# build a dictionary of all of the _valid_ flags specified on the
# command line so we can pass them on to the underlying awxkit call
# we ignore special global flags like `--help` and `--conf.xyz`, and
# the positional resource argument (i.e., "jobs")
# everything else is a flag used as a query argument for the HTTP
# request we'll make (e.g., --username="Joe", --verbosity=3)
parsed = parsed.__dict__
parsed = dict(
(k, v) for k, v in parsed.items()
if (
v is not None and
k not in ('help', 'resource') and
not k.startswith('conf.')
)
)
# if `id` is one of the arguments, it's a detail view
if 'id' in parsed:
page.endpoint += '{}/'.format(str(parsed.pop('id')))
# determine the awxkit method to call
action = self.original_action = parsed.pop('action')
page, action = handle_custom_actions(
self.resource, action, page
)
self.method = {
'list': 'get',
'modify': 'patch',
}.get(action, action)
if self.method == 'patch' and not parsed:
# If we're doing an HTTP PATCH with an empty payload,
# just print the help message (it's a no-op anyways)
parser.parser.choices['modify'].print_help()
return
if self.help:
# If --help is specified on a subarg parser, bail out
# and print its help text
parser.parser.choices[self.original_action].print_help()
return
if self.original_action == 'create':
return page.post(parsed)
return getattr(page, self.method)(**parsed)
def parse_args(self, argv, env=None):
"""Configure the global parser.ArgumentParser object and apply
global flags (such as --help, authentication, and formatting arguments)
"""
env = env or os.environ
self.argv = argv
self.parser = HelpfulArgumentParser(add_help=False)
self.parser.add_argument(
'--help',
action='store_true',
help='prints usage information for the awx tool',
)
self.parser.add_argument(
'--version',
dest='conf.version',
action='version',
help='display awx CLI version',
version=__version__
)
add_authentication_arguments(self.parser, env)
add_output_formatting_arguments(self.parser, env)
self.args = self.parser.parse_known_args(self.argv)[0]
self.verbose = self.get_config('verbose')
if self.verbose:
logging.basicConfig(level='DEBUG')
self.color = self.get_config('color')
if not self.color:
disable_color()
fmt = self.get_config('format')
if fmt not in FORMATTERS.keys():
self.parser.error('No formatter %s available.' % (fmt))

216
awxkit/awxkit/cli/custom.py Normal file
View File

@ -0,0 +1,216 @@
from .stdout import monitor, monitor_workflow
from .utils import CustomRegistryMeta, color_enabled
def handle_custom_actions(resource, action, page):
key = ' '.join([resource, action])
if key in CustomAction.registry:
page = CustomAction.registry[key](page)
action = 'perform'
return page, action
class CustomActionRegistryMeta(CustomRegistryMeta):
@property
def name(self):
return ' '.join([self.resource, self.action])
class CustomAction(object, metaclass=CustomActionRegistryMeta):
"""Base class for defining a custom action for a resource."""
def __init__(self, page):
self.page = page
@property
def action(self):
raise NotImplementedError()
@property
def resource(self):
raise NotImplementedError()
@property
def perform(self):
raise NotImplementedError()
def add_arguments(self, parser):
pass
class Launchable(object):
def add_arguments(self, parser, with_pk=True):
if with_pk:
parser.choices[self.action].add_argument('id', type=int, help='')
parser.choices[self.action].add_argument(
'--monitor', action='store_true',
help='If set, prints stdout of the launched job until it finishes.'
)
parser.choices[self.action].add_argument(
'--timeout', type=int,
help='If set with --monitor or --wait, time out waiting on job completion.' # noqa
)
parser.choices[self.action].add_argument(
'--wait', action='store_true',
help='If set, waits until the launched job finishes.'
)
def monitor(self, response, **kwargs):
mon = monitor_workflow if response.type == 'workflow_job' else monitor
if kwargs.get('monitor') or kwargs.get('wait'):
status = mon(
response,
self.page.connection.session,
print_stdout=not kwargs.get('wait'),
timeout=kwargs.get('timeout'),
)
if status:
response.json['status'] = status
return response
def perform(self, **kwargs):
response = self.page.get().related.get(self.action).post()
self.monitor(response, **kwargs)
return response
class JobTemplateLaunch(Launchable, CustomAction):
action = 'launch'
resource = 'job_templates'
class ProjectUpdate(Launchable, CustomAction):
action = 'update'
resource = 'projects'
class ProjectCreate(CustomAction):
action = 'create'
resource = 'projects'
def add_arguments(self, parser):
parser.choices[self.action].add_argument(
'--monitor', action='store_true',
help=('If set, prints stdout of the project update until '
'it finishes.')
)
parser.choices[self.action].add_argument(
'--wait', action='store_true',
help='If set, waits until the new project has updated.'
)
def post(self, kwargs):
should_monitor = kwargs.pop('monitor', False)
wait = kwargs.pop('wait', False)
response = self.page.post(kwargs)
if should_monitor or wait:
update = response.related.project_updates.get(
order_by='-created'
).results[0]
monitor(
update,
self.page.connection.session,
print_stdout=not wait,
)
return response
class InventoryUpdate(Launchable, CustomAction):
action = 'update'
resource = 'inventory_sources'
class AdhocCommandLaunch(Launchable, CustomAction):
action = 'create'
resource = 'ad_hoc_commands'
def add_arguments(self, parser):
Launchable.add_arguments(self, parser, with_pk=False)
def perform(self, **kwargs):
monitor_kwargs = {
'monitor': kwargs.pop('monitor', False),
'wait': kwargs.pop('wait', False),
}
response = self.page.post(kwargs)
self.monitor(response, **monitor_kwargs)
return response
def post(self, kwargs):
return self.perform(**kwargs)
class WorkflowLaunch(Launchable, CustomAction):
action = 'launch'
resource = 'workflow_job_templates'
class HasStdout(object):
action = 'stdout'
def add_arguments(self, parser):
parser.choices['stdout'].add_argument('id', type=int, help='')
def perform(self):
fmt = 'txt_download'
if color_enabled():
fmt = 'ansi_download'
return self.page.connection.get(
self.page.get().related.stdout,
query_parameters=dict(format=fmt)
).content.decode('utf-8')
class JobStdout(HasStdout, CustomAction):
resource = 'jobs'
class ProjectUpdateStdout(HasStdout, CustomAction):
resource = 'project_updates'
class InventoryUpdateStdout(HasStdout, CustomAction):
resource = 'inventory_updates'
class AdhocCommandStdout(HasStdout, CustomAction):
resource = 'ad_hoc_commands'
class SettingsList(CustomAction):
action = 'list'
resource = 'settings'
def add_arguments(self, parser):
parser.choices['list'].add_argument(
'--slug', help='optional setting category/slug', default='all'
)
def perform(self, slug):
self.page.endpoint = self.page.endpoint + '{}/'.format(slug)
return self.page.get()
class SettingsModify(CustomAction):
action = 'modify'
resource = 'settings'
def add_arguments(self, parser):
options = self.page.__class__(
self.page.endpoint + 'all/', self.page.connection
).options()
parser.choices['modify'].add_argument(
'key',
choices=sorted(options['actions']['PUT'].keys()),
metavar='key',
help=''
)
parser.choices['modify'].add_argument('value', help='')
def perform(self, key, value):
self.page.endpoint = self.page.endpoint + 'all/'
resp = self.page.patch(**{key: value})
return resp.from_json({'key': key, 'value': resp[key]})

View File

@ -0,0 +1,20 @@
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = source
BUILDDIR = build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

View File

@ -0,0 +1,5 @@
To build the docs, spin up a real AWX/Tower server, `pip install sphinx sphinxcontrib-autoprogram`, and run:
~ TOWER_HOST=https://awx.example.org TOWER_USERNAME=example TOWER_PASSWORD=secret make clean html
~ cd build/html/ && python -m http.server
Serving HTTP on 0.0.0.0 port 8000 (http://0.0.0.0:8000/) ..

View File

@ -0,0 +1,35 @@
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=source
set BUILDDIR=build
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
:end
popd

View File

@ -0,0 +1,69 @@
.. _authentication:
Authentication
==============
Generating a Personal Access Token
----------------------------------
The preferred mechanism for authenticating with AWX and |RHAT| is by generating and storing an OAuth2.0 token. Tokens can be scoped for read/write permissions, are easily revoked, and are more suited to third party tooling integration than session-based authentication.
|prog| provides a simple login command for generating a personal access token from your username and password.
.. code:: bash
TOWER_HOST=https://awx.example.org \
TOWER_USERNAME=alice \
TOWER_PASSWORD=secret \
awx login
As a convenience, the ``awx login`` command prints a shell-formatted token
value:
.. code:: bash
export TOWER_TOKEN=6E5SXhld7AMOhpRveZsLJQsfs9VS8U
By ingesting this token, you can run subsequent CLI commands without having to
specify your username and password each time:
.. code:: bash
export TOWER_HOST=https://awx.example.org
$(TOWER_USERNAME=alice TOWER_PASSWORD=secret awx login)
awx config
Working with OAuth2.0 Applications
----------------------------------
AWX and |RHAT| allow you to configure OAuth2.0 applications scoped to specific
organizations. To generate an application token (instead of a personal access
token), specify the **Client ID** and **Client Secret** generated when the
application was created.
.. code:: bash
TOWER_USERNAME=alice TOWER_PASSWORD=secret awx login \
--conf.client_id <value> --conf.client_secret <value>
OAuth2.0 Token Scoping
----------------------
By default, tokens created with ``awx login`` are write-scoped. To generate
a read-only token, specify ``--scope read``:
.. code:: bash
TOWER_USERNAME=alice TOWER_PASSWORD=secret \
awx login --conf.scope read
Session Authentication
----------------------
If you do not want or need to generate a long-lived token, |prog| allows you to
specify your username and password on every invocation:
.. code:: bash
TOWER_USERNAME=alice TOWER_PASSWORD=secret awx jobs list
awx --conf.username alice --conf.password secret jobs list

View File

@ -0,0 +1,59 @@
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'AWX CLI'
copyright = '2019, Ansible by Red Hat'
author = 'Ansible by Red Hat'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'awxkit.cli.sphinx'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'classic'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
rst_epilog = '''
.. |prog| replace:: awx
.. |at| replace:: Ansible Tower
.. |RHAT| replace:: Red Hat Ansible Tower
'''

View File

@ -0,0 +1,60 @@
Usage Examples
==============
Verifying CLI Configuration
---------------------------
To confirm that you've properly configured ``awx`` to point at the correct
AWX/|RHAT| host, and that your authentication credentials are correct, run:
.. code:: bash
awx config
.. note:: for help configurating authentication settings with the awx CLI, see :ref:`authentication`.
Printing the History of a Particular Job
----------------------------------------
To print a table containing the recent history of any jobs named ``Example Job Template``:
.. code:: bash
awx jobs list --all --name 'Example Job Template' \
-f human --filter 'name,created,status'
Creating and Launching a Job Template
-------------------------------------
Assuming you have an existing Inventory named ``Demo Inventory``, here's how
you might set up a new project from a GitHub repository, and run (and monitor
the output of) a playbook from that repository:
.. code:: bash
export TOWER_COLOR=f
INVENTORY_ID=$(awx inventory list --name 'Demo Inventory' -f jq --filter '.results[0].id')
PROJECT_ID=$(awx projects create --wait \
--organization 1 --name='Example Project' \
--scm_type git --scm_url 'https://github.com/ansible/ansible-tower-samples' \
-f jq --filter '.id')
TEMPLATE_ID=$(awx job_templates create \
--name='Example Job Template' --project $PROJECT_ID \
--playbook hello_world.yml --inventory $INVENTORY_ID \
-f jq --filter '.id')
awx job_templates launch $TEMPLATE_ID --monitor
Importing an SSH Key
--------------------
DOCUMENT ME
Creating a Job Template with Extra Vars
---------------------------------------
DOCUMENT ME
Granting Membership to a Team or Organization
---------------------------------------------
DOCUMENT ME

View File

@ -0,0 +1,36 @@
.. AWX CLI documentation master file, created by
sphinx-quickstart on Mon Jul 22 11:39:10 2019.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
AWX Command Line Interface
==========================
|prog| is the official command-line client for AWX and |RHAT|. It:
* Uses naming and structure consistent with the AWX HTTP API
* Provides consistent output formats with optional machine-parsable formats
* To the extent possible, auto-detects API versions, available endpoints, and
feature support across multiple versions of AWX and |RHAT|.
Potential uses include:
* Configuring and launching jobs/playbooks
* Checking on the status and output of job runs
* Managing objects like organizations, users, teams, etc...
.. toctree::
:maxdepth: 3
usage
authentication
output
examples
reference
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`

View File

@ -0,0 +1,59 @@
.. _formatting:
Output Formatting
=================
By default, awx prints valid JSON for successful commands. The ``-f`` (or
``--conf.format``) global flag can be used to specify alternative output
formats.
YAML Formatting
---------------
To print results in YAML, specify ``-f yaml``:
.. code:: bash
awx jobs list -f yaml
Human-Readable (Tabular) Formatting
-----------------------------------
|prog| provides *optional* support for printing results in a human-readable
tabular format, but it requires an additional Python software dependency,
``tabulate``.
To use ``-f human``, you must install the optional dependency via ``pip install tabulate``.
.. code:: bash
awx jobs list -f human
awx jobs list -f human --filter name,created,status
Custom Formatting with jq
-------------------------
|prog| provides *optional* support for filtering results using the ``jq`` JSON
processor, but it requires an additional Python software dependency,
``jq``.
To use ``-f jq``, you must install the optional dependency via ``pip
install jq``. Note that some platforms may require additional programs to
build ``jq`` from source (like ``libtool``). See https://pypi.org/project/jq/ for instructions.
.. code:: bash
awx jobs list \
-f jq --filter '.results[] | .name + " is " + .status'
For details on ``jq`` filtering usage, see the ``jq`` manual at https://stedolan.github.io/jq/
Colorized Output
----------------
By default, |prog| prints colorized output using ANSI color codes. To disable
this functionality, specify ``--conf.color f`` or set the environment variable
``TOWER_COLOR=f``.

View File

@ -0,0 +1,3 @@
.. autoprogram:: awxkit.cli.sphinx:parser
:prog: awx
:maxdepth: 3

View File

@ -0,0 +1,99 @@
Basic Usage
===========
Installation
------------
The awx CLI is available as part of the ``awxkit`` package on PyPI.
The preferred way to install is through pip:
.. code:: bash
pip install awxkit
Synopsis
--------
|prog| commands follow a simple format:
.. code:: bash
awx [<global-options>] <resource> <action> [<arguments>]
awx --help
The ``resource`` is a type of object within AWX (a noun), such as ``users`` or ``organizations``.
The ``action`` is the thing you want to do (a verb). Resources generally have a base set of actions (``get``, ``list``, ``create``, ``modify``, and ``delete``), and have options corresponding to fields on the object in AWX. Some resources have special actions, like ``job_templates launch``.
Getting Started
---------------
Using |prog| requires some initial configuration. Here is a simple example for interacting with an AWX or |RHAT| server:
.. code:: bash
awx --conf.host https://awx.example.org \
--conf.username joe --conf.password secret \
--conf.insecure \
users list
There are multiple ways to configure and authenticate with an AWX or |RHAT| server. For more details, see :ref:`authentication`.
By default, |prog| prints valid JSON for successful commands. Certain commands (such as those for printing job stdout) print raw text and do not allow for custom formatting. For details on customizing |prog|'s output format, see :ref:`formatting`.
Resources and Actions
---------------------
To get a list of available resources:
.. code:: bash
awx --conf.host https://awx.example.org --help
To get a description of a specific resource, and list its available actions (and their arguments):
.. code:: bash
awx --conf.host https://awx.example.org users --help
awx --conf.host https://awx.example.org users create --help
.. note:: The list of resources and actions may vary based on context. For
example, certain resources may not be available based on role-based access
control (e.g., if you do not have permission to launch certain Job Templates,
`launch` may not show up as an action for certain `job_templates` objects.
Global Options
--------------
|prog| accepts global options that control overall behavior. In addition to CLI flags, most global options have a corresponding environment variable that may be used to set the value. If both are provided, the command line option takes priority.
A few of the most important ones are:
``-h, --help``
Prints usage information for the |prog| tool
``-v, --verbose``
prints debug-level logs, including HTTP(s) requests made
``-f, --conf.format``
used to specify a custom output format (the default is json)
``--conf.host, TOWER_HOST``
the full URL of the AWX/|RHAT| host (i.e., https://my.awx.example.org)
``-k, --conf.insecure, TOWER_VERIFY_SSL``
allows insecure server connections when using SSL
``--conf.username, TOWER_USERNAME``
the AWX username to use for authentication
``--conf.password, TOWER_PASSWORD``
the AWX password to use for authentication
``--conf.token, TOWER_TOKEN``
an OAuth2.0 token to use for authentication

166
awxkit/awxkit/cli/format.py Normal file
View File

@ -0,0 +1,166 @@
import json
from distutils.util import strtobool
import yaml
from awxkit.cli.utils import colored
def add_authentication_arguments(parser, env):
auth = parser.add_argument_group('authentication')
auth.add_argument(
'--conf.host',
default=env.get('TOWER_HOST', 'https://127.0.0.1:443'),
metavar='https://example.awx.org',
)
auth.add_argument(
'--conf.token',
default=env.get('TOWER_TOKEN', ''),
help='an OAuth2.0 token (get one by using `awx login`)',
metavar='TEXT',
)
auth.add_argument(
'--conf.username',
default=env.get('TOWER_USERNAME', 'admin'),
metavar='TEXT',
)
auth.add_argument(
'--conf.password',
default=env.get('TOWER_PASSWORD', 'password'),
metavar='TEXT',
)
auth.add_argument(
'-k',
'--conf.insecure',
help='Allow insecure server connections when using SSL',
default=not strtobool(env.get('TOWER_VERIFY_SSL', 'True')),
action='store_true',
)
def add_output_formatting_arguments(parser, env):
formatting = parser.add_argument_group('output formatting')
formatting.add_argument(
'-f',
'--conf.format',
dest='conf.format',
choices=FORMATTERS.keys(),
default=env.get('TOWER_FORMAT', 'json'),
help=(
'specify an output format'
),
)
formatting.add_argument(
'--filter',
dest='conf.filter',
default='.',
metavar='TEXT',
help=(
'specify an output filter (only valid with jq or human format)'
),
)
formatting.add_argument(
'--conf.color',
metavar='BOOLEAN',
help='Display colorized output. Defaults to True',
default=env.get('TOWER_COLOR', 't'), type=strtobool,
)
formatting.add_argument(
'-v',
'--verbose',
dest='conf.verbose',
help='print debug-level logs, including requests made',
default=strtobool(env.get('TOWER_VERBOSE', 'f')),
action="store_true"
)
def format_response(response, fmt='json', filter='.', changed=False):
if response is None:
return # HTTP 204
if isinstance(response, str):
return response
if 'results' in response.__dict__:
results = getattr(response, 'results')
else:
results = [response]
for result in results:
if 'related' in result.json:
result.json.pop('related')
formatted = FORMATTERS[fmt](response.json, filter)
if changed:
formatted = colored(formatted, 'green')
return formatted
def format_jq(output, fmt):
try:
import jq
except ImportError:
if fmt == '.':
return output
raise ImportError(
'To use `-f jq`, you must install the optional jq dependency.\n'
'`pip install jq`\n',
'Note that some platforms may require additional programs to '
'build jq from source (like `libtool`).\n'
'See https://pypi.org/project/jq/ for instructions.'
)
results = []
for x in jq.jq(fmt).transform(output, multiple_output=True):
if x not in (None, ''):
if isinstance(x, str):
results.append(x)
else:
results.append(json.dumps(x))
return '\n'.join(results)
def format_json(output, fmt):
return json.dumps(output, indent=5)
def format_yaml(output, fmt):
output = json.loads(json.dumps(output))
return yaml.dump(
output,
default_flow_style=False
)
def format_human(output, fmt):
if fmt == '.':
fmt = 'id,name'
column_names = fmt.split(',')
try:
from tabulate import tabulate
except ImportError:
raise ImportError(
'To use `-f human`, you must install the optional tabulate '
'dependency.\n`pip install tabulate`',
)
if 'count' in output:
output = output['results']
else:
output = [output]
return tabulate([
dict(
(col, record.get(col, ''))
for col in column_names
)
for record in output
], headers='keys', tablefmt='rst'
)
FORMATTERS = {
'json': format_json,
'yaml': format_yaml,
'jq': format_jq,
'human': format_human
}

View File

@ -0,0 +1,112 @@
from distutils.util import strtobool
from .custom import CustomAction
from .format import add_output_formatting_arguments
class ResourceOptionsParser(object):
def __init__(self, page, resource, parser):
"""Used to submit an OPTIONS request to the appropriate endpoint
and apply the appropriate argparse arguments
:param page: a awxkit.api.pages.page.TentativePage instance
:param resource: a string containing the resource (e.g., jobs)
:param parser: an argparse.ArgumentParser object to append new args to
"""
self.page = page
self.resource = resource
self.parser = parser
self.options = getattr(
self.page.options().json, 'actions', {'GET': {}}
)
if self.resource != 'settings':
# /api/v2/settings is a special resource that doesn't have
# traditional list/detail endpoints
self.build_list_actions()
self.build_detail_actions()
self.handle_custom_actions()
def build_list_actions(self):
action_map = {
'GET': 'list',
'POST': 'create',
}
for method, action in self.options.items():
method = action_map[method]
parser = self.parser.add_parser(method, help='')
if method == 'list':
parser.add_argument(
'--all', dest='all_pages', action='store_true',
help=(
'fetch all pages of content from the API when '
'returning results (instead of just the first page)'
)
)
add_output_formatting_arguments(parser, {})
def build_detail_actions(self):
for method in ('get', 'modify', 'delete'):
parser = self.parser.add_parser(method, help='')
self.parser.choices[method].add_argument('id', type=int, help='')
if method == 'get':
add_output_formatting_arguments(parser, {})
def build_query_arguments(self, method, http_method):
for k, param in self.options.get(http_method, {}).items():
required = (
method == 'create' and
param.get('required', False) is True
)
help_text = param.get('help_text', '')
if required:
help_text = '[REQUIRED] {}'.format(help_text)
if method == 'list':
help_text = 'only list {} with the specified {}'.format(
self.resource,
k
)
if method == 'list' and param.get('filterable') is False:
continue
kwargs = {
'help': help_text,
'required': required,
'type': {
'string': str,
'field': int,
'integer': int,
'boolean': strtobool,
}.get(param['type'], str),
}
meta_map = {
'string': 'TEXT',
'integer': 'INTEGER',
'boolean': 'BOOLEAN',
}
if param.get('choices', []):
kwargs['choices'] = [c[0] for c in param['choices']]
# if there are choices, try to guess at the type (we can't
# just assume it's a list of str, but the API doesn't actually
# explicitly tell us in OPTIONS all the time)
if isinstance(kwargs['choices'][0], int):
kwargs['type'] = int
kwargs['choices'] = [str(choice) for choice in kwargs['choices']]
elif param['type'] in meta_map:
kwargs['metavar'] = meta_map[param['type']]
self.parser.choices[method].add_argument(
'--{}'.format(k),
**kwargs
)
def handle_custom_actions(self):
for _, action in CustomAction.registry.items():
if action.resource != self.resource:
continue
if action.action not in self.parser.choices:
self.parser.add_parser(action.action, help='')
action(self.page).add_arguments(self.parser)

View File

@ -0,0 +1,158 @@
import os
from awxkit import api, config
from awxkit.api.pages import Page
from awxkit.cli.format import format_response, add_authentication_arguments
from awxkit.cli.utils import CustomRegistryMeta, cprint
CONTROL_RESOURCES = ['ping', 'config', 'me', 'metrics']
DEPRECATED_RESOURCES = {
'applications': 'application',
'credentials': 'credential',
'credential_types': 'credential_type',
'groups': 'group',
'hosts': 'hosts',
'instances': 'instance',
'instance_groups': 'instance_group',
'inventory_scripts': 'inventory_script',
'inventory_sources': 'inventory_source',
'inventory_updates': 'inventory_update',
'jobs': 'job',
'job_templates': 'job_template',
'labels': 'label',
'workflow_job_template_nodes': 'node',
'notification_templates': 'notification_template',
'organizations': 'organization',
'projects': 'project',
'project_updates': 'project_update',
'roles': 'role',
'schedules': 'schedule',
'settings': 'setting',
'teams': 'team',
'workflow_job_templates': 'workflow',
'workflow_jobs': 'workflow_job',
'users': 'user'
}
DEPRECATED_RESOURCES_REVERSE = dict(
(v, k) for k, v in DEPRECATED_RESOURCES.items()
)
class CustomCommand(object, metaclass=CustomRegistryMeta):
"""Base class for implementing custom commands.
Custom commands represent static code which should run - they are
responsible for returning and formatting their own output (which may or may
not be JSON/YAML).
"""
help_text = ''
@property
def name(self):
raise NotImplementedError()
def handle(self, client, parser):
"""To be implemented by subclasses.
Should return a dictionary that is JSON serializable
"""
raise NotImplementedError()
class Login(CustomCommand):
name = 'login'
help_text = 'authenticate and retrieve an OAuth2 token'
def handle(self, client, parser):
auth = parser.add_argument_group('OAuth2.0 Options')
auth.add_argument('--conf.client_id', metavar='TEXT')
auth.add_argument('--conf.client_secret', metavar='TEXT')
auth.add_argument(
'--conf.scope', choices=['read', 'write'], default='write'
)
parsed = parser.parse_known_args()[0]
kwargs = {
'client_id': getattr(parsed, 'conf.client_id', None),
'client_secret': getattr(parsed, 'conf.client_secret', None),
'scope': getattr(parsed, 'conf.scope', None),
}
try:
token = api.Api().get_oauth2_token(**kwargs)
except Exception as e:
add_authentication_arguments(parser, os.environ)
parser.print_help()
cprint(
'Error retrieving an OAuth2.0 token ({}).'.format(e.__class__),
'red'
)
else:
print('export TOWER_TOKEN={}'.format(token))
class Config(CustomCommand):
name = 'config'
help_text = 'print current configuration values'
def handle(self, client, parser):
return {
'base_url': config.base_url,
'token': client.get_config('token'),
'use_sessions': config.use_sessions,
'credentials': config.credentials,
}
def parse_resource(client, skip_deprecated=False):
subparsers = client.parser.add_subparsers(
dest='resource',
metavar='resource',
)
# check if the user is running a custom command
for command in CustomCommand.__subclasses__():
client.subparsers[command.name] = subparsers.add_parser(
command.name, help=command.help_text
)
if hasattr(client, 'v2'):
for k in client.v2.json.keys():
if k in ('dashboard',):
# the Dashboard API is deprecated and not supported
continue
aliases = []
if not skip_deprecated:
if k in DEPRECATED_RESOURCES:
aliases = [DEPRECATED_RESOURCES[k]]
client.subparsers[k] = subparsers.add_parser(
k, help='', aliases=aliases
)
resource = client.parser.parse_known_args()[0].resource
if resource in DEPRECATED_RESOURCES.values():
client.argv[
client.argv.index(resource)
] = DEPRECATED_RESOURCES_REVERSE[resource]
resource = DEPRECATED_RESOURCES_REVERSE[resource]
if resource in CustomCommand.registry:
parser = client.subparsers[resource]
command = CustomCommand.registry[resource]()
response = command.handle(client, parser)
if response:
formatted = format_response(
Page.from_json(response),
fmt=client.get_config('format'),
filter=client.get_config('filter'),
)
print(formatted)
raise SystemExit()
else:
return resource
def is_control_resource(resource):
# special root level resources that don't don't represent database
# entities that follow the list/detail semantic
return resource in CONTROL_RESOURCES

View File

@ -0,0 +1,84 @@
import os
from docutils.nodes import Text, paragraph
from sphinxcontrib.autoprogram import AutoprogramDirective
from .client import CLI
from .resource import is_control_resource, CustomCommand
class CustomAutoprogramDirective(AutoprogramDirective):
def run(self):
nodes = super(CustomAutoprogramDirective, self).run()
# By default, the document generated by sphinxcontrib.autoprogram
# just has a page title which is the program name ("awx")
# The code here changes this slightly so the reference guide starts
# with a human-friendly title and preamble
# configure a custom page heading (not `awx`)
heading = Text('Reference Guide')
heading.parent = nodes[0][0]
nodes[0][0].children = [heading]
# add a descriptive top synopsis of the reference guide
nodes[0].children.insert(1, paragraph(
text=(
'This is an exhaustive guide of every available command in '
'the awx CLI tool.'
)
))
disclaimer = (
'The commands and parameters documented here can (and will) '
'vary based on a variety of factors, such as the AWX API '
'version, AWX settings, and access level of the authenticated '
'user. For the most accurate view of available commands, '
'invoke the awx CLI using the --help flag.'
)
nodes[0].children.insert(2, paragraph(text=disclaimer))
return nodes
def render():
# This function is called by Sphinx when making the docs.
#
# It loops over every resource at `/api/v2/` and performs an HTTP OPTIONS
# request to determine all of the supported actions and their arguments.
#
# The return value of this function is an argparse.ArgumentParser, which
# the sphinxcontrib.autoprogram plugin crawls and generates an indexed
# Sphinx document from.
for e in ('TOWER_HOST', 'TOWER_USERNAME', 'TOWER_PASSWORD'):
if not os.environ.get(e):
raise SystemExit(
'Please specify a valid {} for a real (running) Tower install.'.format(e) # noqa
)
cli = CLI()
cli.parse_args(['awx', '--help'])
cli.connect()
cli.authenticate()
try:
cli.parse_resource(skip_deprecated=True)
except SystemExit:
pass
for resource in cli.subparsers.keys():
cli.argv = [resource, '--help']
cli.resource = resource
if resource in CustomCommand.registry or is_control_resource(resource):
pass
else:
page = getattr(cli.v2, resource, None)
if page:
try:
cli.parse_action(page, from_sphinx=True)
except SystemExit:
pass
return cli.parser
def setup(app):
app.add_directive('autoprogram', CustomAutoprogramDirective)
parser = render()

116
awxkit/awxkit/cli/stdout.py Normal file
View File

@ -0,0 +1,116 @@
import sys
import time
from .utils import cprint, color_enabled, STATUS_COLORS
def monitor_workflow(response, session, print_stdout=True, timeout=None,
interval=.25):
get = response.url.get
payload = {
'order_by': 'finished',
'unified_job_node__workflow_job': response.id,
}
def fetch(seen):
results = response.connection.get(
'/api/v2/unified_jobs', payload
).json()['results']
# erase lines we've previously printed
if print_stdout and sys.stdout.isatty():
for _ in seen:
sys.stdout.write('\x1b[1A')
sys.stdout.write('\x1b[2K')
for result in results:
if print_stdout:
print('{id} - {name} '.format(**result), end='')
status = result['status']
if color_enabled():
color = STATUS_COLORS.get(status, 'white')
cprint(status, color)
else:
print(status)
seen.add(result['id'])
if print_stdout:
cprint('------Starting Standard Out Stream------', 'red')
if print_stdout:
print('Launching {}...'.format(get().json.name))
started = time.time()
seen = set()
while True:
if timeout and time.time() - started > timeout:
if print_stdout:
cprint('Monitoring aborted due to timeout.', 'red')
break
if sys.stdout.isatty():
# if this is a tty-like device, we can send ANSI codes
# to draw an auto-updating view
# otherwise, just wait for the job to finish and print it *once*
# all at the end
fetch(seen)
time.sleep(.25)
json = get().json
if json.finished:
fetch(seen)
break
if print_stdout:
cprint('------End of Standard Out Stream--------\n', 'red')
return get().json.status
def monitor(response, session, print_stdout=True, timeout=None, interval=.25):
get = response.url.get
payload = {'order_by': 'start_line'}
if response.type == 'job':
events = response.related.job_events.get
else:
events = response.related.events.get
next_line = 0
def fetch(next_line):
for result in events(**payload).json.results:
if result['start_line'] != next_line:
# If this event is a line from _later_ in the stdout,
# it means that the events didn't arrive in order;
# skip it for now and wait until the prior lines arrive and are
# printed
continue
stdout = result.get('stdout')
if stdout and print_stdout:
print(stdout)
next_line = result['end_line']
return next_line
if print_stdout:
cprint('------Starting Standard Out Stream------', 'red')
started = time.time()
while True:
if timeout and time.time() - started > timeout:
if print_stdout:
cprint('Monitoring aborted due to timeout.', 'red')
break
next_line = fetch(next_line)
if next_line:
payload['start_line__gte'] = next_line
time.sleep(.25)
json = get().json
if (
json.event_processing_finished is True or
json.status in ('error', 'canceled')
):
fetch(next_line)
break
if print_stdout:
cprint('------End of Standard Out Stream--------\n', 'red')
return get().json.status

View File

@ -0,0 +1,76 @@
from argparse import ArgumentParser
import threading
import sys
import termcolor
_color = threading.local()
_color.enabled = True
__all__ = ['CustomRegistryMeta', 'HelpfulArgumentParser', 'disable_color',
'color_enabled', 'colored', 'cprint', 'STATUS_COLORS']
STATUS_COLORS = {
'new': 'grey',
'pending': 'grey',
'running': 'yellow',
'successful': 'green',
'failed': 'red',
'error': 'red',
'cancelled': 'grey',
}
class CustomRegistryMeta(type):
@property
def registry(cls):
return dict(
(command.name, command)
for command in cls.__subclasses__()
)
class HelpfulArgumentParser(ArgumentParser):
def error(self, message): # pragma: nocover
"""Prints a usage message incorporating the message to stderr and
exits.
If you override this in a subclass, it should not return -- it
should either exit or raise an exception.
"""
self.print_help(sys.stderr)
self._print_message('\n')
self.exit(2, '%s: %s\n' % (self.prog, message))
def _parse_known_args(self, args, ns):
for arg in ('-h', '--help'):
# the -h argument is extraneous; if you leave it off,
# awx-cli will just print usage info
if arg in args:
args.remove(arg)
return super(HelpfulArgumentParser, self)._parse_known_args(args, ns)
def color_enabled():
return _color.enabled
def disable_color():
_color.enabled = False
def colored(value, color):
if _color.enabled:
return termcolor.colored(value, color)
else:
return value
def cprint(value, color, **kwargs):
if _color.enabled:
termcolor.cprint(value, color, **kwargs)
else:
print(value, **kwargs)

34
awxkit/awxkit/config.py Normal file
View File

@ -0,0 +1,34 @@
import types
import os
from .utils import (
PseudoNamespace,
load_credentials,
load_projects,
to_bool,
)
config = PseudoNamespace()
def getvalue(self, name):
return self.__getitem__(name)
if os.getenv('AWXKIT_BASE_URL'):
config.base_url = os.getenv('AWXKIT_BASE_URL')
if os.getenv('AWXKIT_CREDENTIAL_FILE'):
config.credentials = load_credentials(os.getenv('AWXKIT_CREDENTIAL_FILE'))
if os.getenv('AWXKIT_PROJECT_FILE'):
config.project_urls = load_projects(config.get('AWXKIT_PROJECT_FILE'))
# kludge to mimic pytest.config
config.getvalue = types.MethodType(getvalue, config)
config.assume_untrusted = config.get('assume_untrusted', True)
config.client_connection_attempts = int(os.getenv('AWXKIT_CLIENT_CONNECTION_ATTEMPTS', 5))
config.prevent_teardown = to_bool(os.getenv('AWXKIT_PREVENT_TEARDOWN', False))
config.use_sessions = to_bool(os.getenv('AWXKIT_SESSIONS', False))

View File

@ -0,0 +1,98 @@
class Common(Exception):
def __init__(self, status_string='', message=''):
if isinstance(status_string, Exception):
self.status_string = ''
return super(Common, self).__init__(*status_string)
self.status_string = status_string
self.msg = message
def __getitem__(self, val):
return (self.status_string, self.msg)[val]
def __repr__(self):
return self.__str__()
def __str__(self):
return str(self.msg)
class BadRequest(Common):
pass
class Conflict(Common):
pass
class Duplicate(Common):
pass
class Forbidden(Common):
pass
class InternalServerError(Common):
pass
class BadGateway(Common):
pass
class LicenseExceeded(Common):
pass
class LicenseInvalid(Common):
pass
class MethodNotAllowed(Common):
pass
class NoContent(Common):
message = ''
class NotFound(Common):
pass
class PaymentRequired(Common):
pass
class Unauthorized(Common):
pass
class Unknown(Common):
pass
class WaitUntilTimeout(Common):
pass
class UnexpectedAWXState(Common):
pass

54
awxkit/awxkit/rrule.py Normal file
View File

@ -0,0 +1,54 @@
from datetime import datetime
from dateutil import rrule
from awxkit.utils import to_ical
class RRule(rrule.rrule):
@property
def next_run(self):
after = self.after(datetime.utcnow())
if after is None:
return after
return after.isoformat() + 'Z'
def next_runs(self, count=1):
return [a.isoformat() + 'Z' for a in self.xafter(datetime.utcnow(),
count=count)]
def __str__(self):
dstart = 'DTSTART:{}'.format(to_ical(self._dtstart))
rules = []
if self._freq not in range(len(rrule.FREQNAMES)):
raise Exception('Invalid freq "{}"'.format(self._freq))
rules.append('FREQ=' + rrule.FREQNAMES[self._freq])
for name, value in [('INTERVAL', self._interval),
('WKST', self._wkst),
('COUNT', self._count)]:
if value is not None:
if name == 'WKST':
value = ['MO', 'TU', 'WE', 'TH', 'FR', 'SA', 'SU'][value]
rules.append('{}={}'.format(name, value))
if self._until:
rules.append('UNTIL={}'.format(to_ical(self._until)))
for name, value in [('BYSETPOS', self._bysetpos),
('BYMONTH', self._bymonth),
('BYMONTHDAY', self._bymonthday),
('BYYEARDAY', self._byyearday),
('BYWEEKNO', self._byweekno),
('BYWEEKDAY', self._byweekday),
('BYHOUR', self._byhour),
('BYMINUTE', self._byminute),
('BYSECOND', self._bysecond), ]:
if name == "BYWEEKDAY" and value:
value = (rrule.weekdays[num] for num in value)
if value:
rules.append(name + '=' + ','.join(str(v) for v in value))
return '{0} RRULE:{1}'.format(dstart, ';'.join(rules))
__repr__ = __str__

View File

View File

@ -0,0 +1,127 @@
from argparse import ArgumentParser
import traceback
import logging
import pdb # noqa
import sys
import os
from awxkit import api, config, utils, exceptions, WSClient # noqa
from awxkit.awx.utils import check_related, delete_all, get_all, uses_sessions # noqa
from awxkit.awx.utils import as_user as _as_user
if str(os.getenv('AWXKIT_DEBUG', 'false')).lower() in ['true', '1']:
logging.basicConfig(level='DEBUG')
def parse_args():
parser = ArgumentParser()
parser.add_argument(
'--base-url',
dest='base_url',
default=os.getenv(
'AWXKIT_BASE_URL',
'http://127.0.0.1:8013'),
help='URL for AWX. Defaults to env var AWXKIT_BASE_URL or http://127.0.0.1:8013')
parser.add_argument(
'-c',
'--credential-file',
dest='credential_file',
default=os.getenv(
'AWXKIT_CREDENTIAL_FILE',
utils.not_provided),
help='Path for yml credential file. If not provided or set by AWXKIT_CREDENTIAL_FILE, set '
'AWXKIT_USER and AWXKIT_USER_PASSWORD env vars for awx user credentials.')
parser.add_argument(
'-p',
'--project-file',
dest='project_file',
default=os.getenv(
'AWXKIT_PROJECT_FILE'),
help='Path for yml project config file.'
'If not provided or set by AWXKIT_PROJECT_FILE, projects will not have default SCM_URL')
parser.add_argument('-f', '--file', dest='akit_script', default=False,
help='akit script file to run in interactive session.')
parser.add_argument(
'-x',
'--non-interactive',
action='store_true',
dest='non_interactive',
help='Do not run in interactive mode.')
return parser.parse_known_args()[0]
def main():
exc = None
try:
global akit_args
akit_args = parse_args()
config.base_url = akit_args.base_url
if akit_args.credential_file != utils.not_provided:
config.credentials = utils.load_credentials(
akit_args.credential_file)
if akit_args.project_file != utils.not_provided:
config.project_urls = utils.load_projects(
akit_args.project_file)
else:
config.credentials = utils.PseudoNamespace({'default': {'username': os.getenv(
'AWXKIT_USER', 'admin'), 'password': os.getenv('AWXKIT_USER_PASSWORD', 'password')}})
global root
root = api.Api()
if uses_sessions(root.connection):
config.use_sessions = True
root.load_session().get()
else:
root.load_authtoken().get()
if 'v2' in root.available_versions:
global v2
v2 = root.available_versions.v2.get()
rc = 0
if akit_args.akit_script:
try:
exec(open(akit_args.akit_script).read(), globals())
except Exception as e:
exc = e
raise exc
except Exception as e:
exc = e
rc = 1
if akit_args.non_interactive:
if exc:
traceback.print_exc(exc)
os._exit(rc)
if exc:
raise exc
def as_user(username, password=None):
return _as_user(root, username, password)
def load_interactive():
if '--help' in sys.argv or '-h' in sys.argv:
return parse_args()
try:
from IPython import start_ipython
basic_session_path = os.path.abspath(__file__)
if basic_session_path[-1] == 'c': # start_ipython doesn't work w/ .pyc
basic_session_path = basic_session_path[:-1]
sargs = ['-i', basic_session_path]
if sys.argv[1:]:
sargs.extend(['--'] + sys.argv[1:])
return start_ipython(argv=sargs)
except ImportError:
from code import interact
main()
interact('', local=dict(globals(), **locals()))
if __name__ == '__main__':
main()

448
awxkit/awxkit/utils.py Normal file
View File

@ -0,0 +1,448 @@
from contextlib import contextmanager
from datetime import datetime, timedelta, tzinfo
import inspect
import logging
import random
import shlex
import types
import time
import sys
import re
import os
import yaml
from awxkit.words import words
from awxkit.exceptions import WaitUntilTimeout
log = logging.getLogger(__name__)
cloud_types = (
'aws',
'azure',
'azure_ad',
'azure_classic',
'azure_rm',
'cloudforms',
'ec2',
'gce',
'openstack',
'openstack_v2',
'openstack_v3',
'rhv',
'rax',
'satellite6',
'tower',
'vmware')
credential_type_kinds = ('cloud', 'net')
not_provided = 'xx__NOT_PROVIDED__xx'
def super_dir_set(cls):
attrs = set()
for _class in inspect.getmro(cls):
attrs.update(dir(_class))
return attrs
class NoReloadError(Exception):
pass
class PseudoNamespace(dict):
def __init__(self, _d=None, **loaded):
if not isinstance(_d, dict):
_d = {}
_d.update(loaded)
super(PseudoNamespace, self).__init__(_d)
# Convert nested structures into PseudoNamespaces
for k, v in _d.items():
tuple_converted = False
if isinstance(v, tuple):
self[k] = v = list(v)
tuple_converted = True
if isinstance(v, list):
for i, item in enumerate(v):
if isinstance(item, dict):
self[k][i] = PseudoNamespace(item)
if tuple_converted:
self[k] = tuple(self[k])
elif isinstance(v, dict):
self[k] = PseudoNamespace(v)
def __getattr__(self, attr):
try:
return self.__getitem__(attr)
except KeyError:
raise AttributeError(
"{!r} has no attribute {!r}".format(
self.__class__.__name__, attr))
def __setattr__(self, attr, value):
self.__setitem__(attr, value)
def __setitem__(self, key, value):
if not isinstance(value, PseudoNamespace):
tuple_converted = False
if isinstance(value, dict):
value = PseudoNamespace(value)
elif isinstance(value, tuple):
value = list(value)
tuple_converted = True
if isinstance(value, list):
for i, item in enumerate(value):
if isinstance(item, dict) and not isinstance(item, PseudoNamespace):
value[i] = PseudoNamespace(item)
if tuple_converted:
value = tuple(value)
super(PseudoNamespace, self).__setitem__(key, value)
def __delattr__(self, attr):
self.__delitem__(attr)
def __dir__(self):
attrs = super_dir_set(self.__class__)
attrs.update(self.keys())
return sorted(attrs)
# override builtin in order to have updated content become
# PseudoNamespaces if applicable
def update(self, iterable=None, **kw):
if iterable:
if (hasattr(iterable,
'keys') and isinstance(iterable.keys,
(types.FunctionType,
types.BuiltinFunctionType,
types.MethodType))):
for key in iterable:
self[key] = iterable[key]
else:
for (k, v) in iterable:
self[k] = v
for k in kw:
self[k] = kw[k]
def is_relative_endpoint(candidate):
return isinstance(candidate, (str,)) and candidate.startswith('/api/')
def is_class_or_instance(obj, cls):
"""returns True is obj is an instance of cls or is cls itself"""
return isinstance(obj, cls) or obj is cls
def filter_by_class(*item_class_tuples):
"""takes an arbitrary number of (item, class) tuples and returns a list consisting
of each item if it's an instance of the class, the item if it's a (class, dict()) tuple,
the class itself if item is truthy but not an instance of the
class or (class, dict()) tuple, or None if item is falsy in the same order as the arguments
```
_cred = Credential()
inv, org, cred = filter_base_subclasses((True, Inventory), (None, Organization), (_cred, Credential))
inv == Inventory
org == None
cred == _cred
```
"""
results = []
for item, cls in item_class_tuples:
if item:
was_tuple = False
if isinstance(item, tuple):
was_tuple = True
examined_item = item[0]
else:
examined_item = item
if is_class_or_instance(
examined_item,
cls) or is_proper_subclass(
examined_item,
cls):
results.append(item)
else:
updated = (cls, item[1]) if was_tuple else cls
results.append(updated)
else:
results.append(None)
return results
def load_credentials(filename=None):
if filename is None:
path = os.path.join(os.getcwd(), 'credentials.yaml')
else:
path = os.path.abspath(filename)
if os.path.isfile(path):
with open(path) as credentials_fh:
credentials_dict = yaml.load(
credentials_fh, Loader=yaml.FullLoader)
return credentials_dict
else:
msg = 'Unable to load credentials file at %s' % path
raise Exception(msg)
def load_projects(filename=None):
if filename is None:
return {}
else:
path = os.path.abspath(filename)
if os.path.isfile(path):
with open(path) as projects_fh:
projects_dict = yaml.load(projects_fh, Loader=yaml.FullLoader)
return projects_dict
else:
msg = 'Unable to load projects file at %s' % path
raise Exception(msg)
def logged_sleep(duration, level='DEBUG', stack_depth=1):
level = getattr(logging, level.upper())
# based on
# http://stackoverflow.com/questions/1095543/get-name-of-calling-functions-module-in-python
try:
frm = inspect.stack()[stack_depth]
logger = logging.getLogger(inspect.getmodule(frm[0]).__name__)
except AttributeError: # module is None (interactive shell)
logger = log # fall back to utils logger
logger.log(level, 'Sleeping for {0} seconds.'.format(duration))
time.sleep(duration)
def poll_until(function, interval=5, timeout=0):
"""Polls `function` every `interval` seconds until it returns a non-falsey
value. If this does not occur within the provided `timeout`,
a WaitUntilTimeout is raised.
Each attempt will log the time that has elapsed since the original
request.
"""
start_time = time.time()
while True:
elapsed = time.time() - start_time
log.debug('elapsed: {0:4.1f}'.format(elapsed))
value = function()
if value:
return value
if elapsed > timeout:
break
logged_sleep(interval, stack_depth=3)
msg = 'Timeout after {0} seconds.'.format(elapsed)
raise WaitUntilTimeout(None, msg)
def gen_utf_char():
is_char = False
b = 'b'
while not is_char:
b = random.randint(32, 0x110000)
is_char = chr(b).isprintable()
return chr(b)
def random_int(maxint=sys.maxsize):
max = int(maxint)
return random.randint(0, max)
def random_ipv4():
"""Generates a random ipv4 address;; useful for testing."""
return ".".join(str(random.randint(1, 255)) for i in range(4))
def random_ipv6():
"""Generates a random ipv6 address;; useful for testing."""
return ':'.join(
'{0:x}'.format(
random.randint(
0,
2 ** 16 -
1)) for i in range(8))
def random_loopback_ip():
"""Generates a random loopback ipv4 address;; useful for testing."""
return "127.{}.{}.{}".format(
random_int(255),
random_int(255),
random_int(255))
def random_utf8(*args, **kwargs):
"""This function exists due to a bug in ChromeDriver that throws an
exception when a character outside of the BMP is sent to `send_keys`.
Code pulled from http://stackoverflow.com/a/3220210.
"""
pattern = re.compile('[^\u0000-\uD7FF\uE000-\uFFFF]', re.UNICODE)
length = args[0] if len(args) else kwargs.get('length', 10)
scrubbed = pattern.sub('\uFFFD', ''.join(
[gen_utf_char() for _ in range(length)]))
return scrubbed
def random_title(num_words=2, non_ascii=True):
base = ''.join([random.choice(words) for word in range(num_words)])
if non_ascii:
title = ''.join([base, random_utf8(1)])
else:
title = ''.join([base, ''.join([str(random_int()) for _ in range(3)])])
return title
def update_payload(payload, fields, kwargs):
"""Takes a list of fields and adds their kwargs value to payload if defined.
If the payload has an existing value and not_provided is the kwarg value for that key,
the existing key/value are stripped from the payload.
"""
not_provided_as_kwarg = 'xx_UPDATE_PAYLOAD_FIELD_NOT_PROVIDED_AS_KWARG_xx'
for field in fields:
field_val = kwargs.get(field, not_provided_as_kwarg)
if field_val not in (not_provided, not_provided_as_kwarg):
payload[field] = field_val
elif field in payload and field_val == not_provided:
payload.pop(field)
return payload
def to_bool(obj):
if isinstance(obj, (str,)):
return obj.lower() not in ('false', 'off', 'no', 'n', '0', '')
return bool(obj)
def load_json_or_yaml(obj):
try:
return yaml.load(obj, Loader=yaml.FullLoader)
except AttributeError:
raise TypeError("Provide valid YAML/JSON.")
def get_class_if_instance(obj):
if not inspect.isclass(obj):
return obj.__class__
return obj
def class_name_to_kw_arg(class_name):
"""'ClassName' -> 'class_name'"""
first_pass = re.sub(r'([a-z])([A-Z0-9])', r'\1_\2', class_name)
second_pass = re.sub(r'([0-9])([a-zA-Z])', r'\1_\2', first_pass).lower()
return second_pass.replace('v2_', '')
def is_proper_subclass(obj, cls):
return inspect.isclass(obj) and obj is not cls and issubclass(obj, cls)
def are_same_endpoint(first, second):
"""Equivalence check of two urls, stripped of query parameters"""
def strip(url):
return url.replace('www.', '').split('?')[0]
return strip(first) == strip(second)
@contextmanager
def suppress(*exceptions):
"""Context manager that suppresses the provided exceptions
:param exceptions: List of exceptions to suppress
Usage::
>>> with suppress(ZeroDivisionError):
>>> foo = 1/0
>>> # This code will not run
Note: This is an intermediate framework and test refactoring tool.
It's almost never a good idea to plan on using this. Also, note
that after the suppressed exception has been caught, no further
statements in the with block will be executed.
"""
try:
yield
except exceptions:
pass
def utcnow():
"""Provide a wrapped copy of the built-in utcnow that can be easily mocked."""
return datetime.utcnow()
class UTC(tzinfo):
"""Concrete implementation of tzinfo for UTC. For more information, see:
https://docs.python.org/2/library/datetime.html
"""
def tzname(self, dt):
return 'UTC'
def dst(self, dt):
return timedelta(0)
def utcoffset(self, dt):
return timedelta(0)
def seconds_since_date_string(
date_str,
fmt='%Y-%m-%dT%H:%M:%S.%fZ',
default_tz=UTC()):
"""Return the number of seconds since the date and time indicated by a date
string and its corresponding format string.
:param date_str: string representing a date and time.
:param fmt: Formatting string - by default, this value is set to parse
date strings originating from awx API response data.
:param default_tz: Assumed tzinfo if the parsed date_str does not include tzinfo
For more information on python date string formatting directives, see
https://docs.python.org/2/library/datetime.httpsml#strftime-strptime-behavior
"""
parsed_datetime = datetime.strptime(date_str, fmt)
if not parsed_datetime.tzinfo:
parsed_datetime = parsed_datetime.replace(tzinfo=default_tz)
elapsed = utcnow().replace(tzinfo=UTC()) - parsed_datetime
return elapsed.total_seconds()
def to_ical(dt):
return re.sub('[:-]', '', dt.strftime("%Y%m%dT%H%M%SZ"))
def version_from_endpoint(endpoint):
return endpoint.split('/api/')[1].split('/')[0] or 'common'
def args_string_to_list(args):
"""Converts cmdline arg string to list of args. The reverse of subprocess.list2cmdline()
heavily inspired by robot.utils.argumentparser.cmdline2list()
"""
lexer = shlex.shlex(args, posix=True)
lexer.escapedquotes = '"\''
lexer.commenters = ''
lexer.whitespace_split = True
return [token.decode('utf-8') for token in lexer]
def is_list_or_tuple(item):
return isinstance(item, list) or isinstance(item, tuple)

193
awxkit/awxkit/words.py Normal file
View File

@ -0,0 +1,193 @@
# list of random English nouns used for resource name utilities
words = ['People', 'History', 'Way', 'Art', 'World', 'Information', 'Map', 'Two',
'Family', 'Government', 'Health', 'System', 'Computer', 'Meat', 'Year', 'Thanks',
'Music', 'Person', 'Reading', 'Method', 'Data', 'Food', 'Understanding', 'Theory',
'Law', 'Bird', 'Literature', 'Problem', 'Software', 'Control', 'Knowledge', 'Power',
'Ability', 'Economics', 'Love', 'Internet', 'Television', 'Science', 'Library', 'Nature',
'Fact', 'Product', 'Idea', 'Temperature', 'Investment', 'Area', 'Society', 'Activity',
'Story', 'Industry', 'Media', 'Thing', 'Oven', 'Community', 'Definition', 'Safety',
'Quality', 'Development', 'Language', 'Management', 'Player', 'Variety', 'Video', 'Week',
'Security', 'Country', 'Exam', 'Movie', 'Organization', 'Equipment', 'Physics', 'Analysis',
'Policy', 'Series', 'Thought', 'Basis', 'Boyfriend', 'Direction', 'Strategy', 'Technology',
'Army', 'Camera', 'Freedom', 'Paper', 'Environment', 'Child', 'Instance', 'Month',
'Truth', 'Marketing', 'University', 'Writing', 'Article', 'Department', 'Difference', 'Goal',
'News', 'Audience', 'Fishing', 'Growth', 'Income', 'Marriage', 'User', 'Combination',
'Failure', 'Meaning', 'Medicine', 'Philosophy', 'Teacher', 'Communication', 'Night', 'Chemistry',
'Disease', 'Disk', 'Energy', 'Nation', 'Road', 'Role', 'Soup', 'Advertising',
'Location', 'Success', 'Addition', 'Apartment', 'Education', 'Math', 'Moment', 'Painting',
'Politics', 'Attention', 'Decision', 'Event', 'Property', 'Shopping', 'Student', 'Wood',
'Competition', 'Distribution', 'Entertainment', 'Office', 'Population', 'President', 'Unit', 'Category',
'Cigarette', 'Context', 'Introduction', 'Opportunity', 'Performance', 'Driver', 'Flight', 'Length',
'Magazine', 'Newspaper', 'Relationship', 'Teaching', 'Cell', 'Dealer', 'Debate', 'Finding',
'Lake', 'Member', 'Message', 'Phone', 'Scene', 'Appearance', 'Association', 'Concept',
'Customer', 'Death', 'Discussion', 'Housing', 'Inflation', 'Insurance', 'Mood', 'Woman',
'Advice', 'Blood', 'Effort', 'Expression', 'Importance', 'Opinion', 'Payment', 'Reality',
'Responsibility', 'Situation', 'Skill', 'Statement', 'Wealth', 'Application', 'City', 'County',
'Depth', 'Estate', 'Foundation', 'Grandmother', 'Heart', 'Perspective', 'Photo', 'Recipe',
'Studio', 'Topic', 'Collection', 'Depression', 'Imagination', 'Passion', 'Percentage', 'Resource',
'Setting', 'Ad', 'Agency', 'College', 'Connection', 'Criticism', 'Debt', 'Description',
'Memory', 'Patience', 'Secretary', 'Solution', 'Administration', 'Aspect', 'Attitude', 'Director',
'Personality', 'Psychology', 'Recommendation', 'Response', 'Selection', 'Storage', 'Version', 'Alcohol',
'Argument', 'Complaint', 'Contract', 'Emphasis', 'Highway', 'Loss', 'Membership', 'Possession',
'Preparation', 'Steak', 'Union', 'Agreement', 'Cancer', 'Currency', 'Employment', 'Engineering',
'Entry', 'Interaction', 'Limit', 'Mixture', 'Preference', 'Region', 'Republic', 'Seat',
'Tradition', 'Virus', 'Actor', 'Classroom', 'Delivery', 'Device', 'Difficulty', 'Drama',
'Election', 'Engine', 'Football', 'Guidance', 'Hotel', 'Match', 'Owner', 'Priority',
'Protection', 'Suggestion', 'Tension', 'Variation', 'Anxiety', 'Atmosphere', 'Awareness', 'Bread',
'Climate', 'Comparison', 'Confusion', 'Construction', 'Elevator', 'Emotion', 'Employee', 'Employer',
'Guest', 'Height', 'Leadership', 'Mall', 'Manager', 'Operation', 'Recording', 'Respect',
'Sample', 'Transportation', 'Boring', 'Charity', 'Cousin', 'Disaster', 'Editor', 'Efficiency',
'Excitement', 'Extent', 'Feedback', 'Guitar', 'Homework', 'Leader', 'Mom', 'Outcome',
'Permission', 'Presentation', 'Promotion', 'Reflection', 'Refrigerator', 'Resolution', 'Revenue', 'Session',
'Singer', 'Tennis', 'Basket', 'Bonus', 'Cabinet', 'Childhood', 'Church', 'Clothes',
'Coffee', 'Dinner', 'Drawing', 'Hair', 'Hearing', 'Initiative', 'Judgment', 'Lab',
'Measurement', 'Mode', 'Mud', 'Orange', 'Poetry', 'Police', 'Possibility', 'Procedure',
'Queen', 'Ratio', 'Relation', 'Restaurant', 'Satisfaction', 'Sector', 'Signature', 'Significance',
'Song', 'Tooth', 'Town', 'Vehicle', 'Volume', 'Wife', 'Accident', 'Airport',
'Appointment', 'Arrival', 'Assumption', 'Baseball', 'Chapter', 'Committee', 'Conversation', 'Database',
'Enthusiasm', 'Error', 'Explanation', 'Farmer', 'Gate', 'Girl', 'Hall', 'Historian',
'Hospital', 'Injury', 'Instruction', 'Maintenance', 'Manufacturer', 'Meal', 'Perception', 'Pie',
'Poem', 'Presence', 'Proposal', 'Reception', 'Replacement', 'Revolution', 'River', 'Son',
'Speech', 'Tea', 'Village', 'Warning', 'Winner', 'Worker', 'Writer', 'Assistance',
'Breath', 'Buyer', 'Chest', 'Chocolate', 'Conclusion', 'Contribution', 'Cookie', 'Courage',
'Dad', 'Desk', 'Drawer', 'Establishment', 'Examination', 'Garbage', 'Grocery', 'Honey',
'Impression', 'Improvement', 'Independence', 'Insect', 'Inspection', 'Inspector', 'King', 'Ladder',
'Menu', 'Penalty', 'Piano', 'Potato', 'Profession', 'Professor', 'Quantity', 'Reaction',
'Requirement', 'Salad', 'Sister', 'Supermarket', 'Tongue', 'Weakness', 'Wedding', 'Affair',
'Ambition', 'Analyst', 'Apple', 'Assignment', 'Assistant', 'Bathroom', 'Bedroom', 'Beer',
'Birthday', 'Celebration', 'Championship', 'Cheek', 'Client', 'Consequence', 'Departure', 'Diamond',
'Dirt', 'Ear', 'Fortune', 'Friendship', 'Snapewife', 'Funeral', 'Gene', 'Girlfriend', 'Hat',
'Indication', 'Intention', 'Lady', 'Midnight', 'Negotiation', 'Obligation', 'Passenger', 'Pizza',
'Platform', 'Poet', 'Pollution', 'Recognition', 'Reputation', 'Shirt', 'Sir', 'Speaker',
'Stranger', 'Surgery', 'Sympathy', 'Tale', 'Throat', 'Trainer', 'Uncle', 'Youth',
'Time', 'Work', 'Film', 'Water', 'Money', 'Example', 'While', 'Business',
'Study', 'Game', 'Life', 'Form', 'Air', 'Day', 'Place', 'Number',
'Part', 'Field', 'Fish', 'Back', 'Process', 'Heat', 'Hand', 'Experience',
'Job', 'Book', 'End', 'Point', 'Type', 'Home', 'Economy', 'Value',
'Body', 'Market', 'Guide', 'Interest', 'State', 'Radio', 'Course', 'Company',
'Price', 'Size', 'Card', 'List', 'Mind', 'Trade', 'Line', 'Care',
'Group', 'Risk', 'Word', 'Fat', 'Force', 'Key', 'Light', 'Training',
'Name', 'School', 'Top', 'Amount', 'Level', 'Order', 'Practice', 'Research',
'Sense', 'Service', 'Piece', 'Web', 'Boss', 'Sport', 'Fun', 'House',
'Page', 'Term', 'Test', 'Answer', 'Sound', 'Focus', 'Matter', 'Kind',
'Soil', 'Board', 'Oil', 'Picture', 'Access', 'Garden', 'Range', 'Rate',
'Reason', 'Future', 'Site', 'Demand', 'Exercise', 'Image', 'Case', 'Cause',
'Coast', 'Action', 'Age', 'Bad', 'Boat', 'Record', 'Result', 'Section',
'Building', 'Mouse', 'Cash', 'Class', 'Nothing', 'Period', 'Plan', 'Store',
'Tax', 'Side', 'Subject', 'Space', 'Rule', 'Stock', 'Weather', 'Chance',
'Figure', 'Man', 'Model', 'Source', 'Beginning', 'Earth', 'Program', 'Chicken',
'Design', 'Feature', 'Head', 'Material', 'Purpose', 'Question', 'Rock', 'Salt',
'Act', 'Birth', 'Car', 'Dog', 'Object', 'Scale', 'Sun', 'Note',
'Profit', 'Rent', 'Speed', 'Style', 'War', 'Bank', 'Craft', 'Half',
'Inside', 'Outside', 'Standard', 'Bus', 'Exchange', 'Eye', 'Fire', 'Position',
'Pressure', 'Stress', 'Advantage', 'Benefit', 'Box', 'Frame', 'Issue', 'Step',
'Cycle', 'Face', 'Item', 'Metal', 'Paint', 'Review', 'Room', 'Screen',
'Structure', 'View', 'Account', 'Ball', 'Discipline', 'Medium', 'Share', 'Balance',
'Bit', 'Black', 'Bottom', 'Choice', 'Gift', 'Impact', 'Machine', 'Shape',
'Tool', 'Wind', 'Address', 'Average', 'Career', 'Culture', 'Morning', 'Pot',
'Sign', 'Table', 'Task', 'Condition', 'Contact', 'Credit', 'Egg', 'Hope',
'Ice', 'Network', 'North', 'Square', 'Attempt', 'Date', 'Effect', 'Link',
'Post', 'Star', 'Voice', 'Capital', 'Challenge', 'Friend', 'Self', 'Shot',
'Brush', 'Couple', 'Exit', 'Front', 'Function', 'Lack', 'Living', 'Plant',
'Plastic', 'Spot', 'Summer', 'Taste', 'Theme', 'Track', 'Wing', 'Brain',
'Button', 'Click', 'Desire', 'Foot', 'Gas', 'Influence', 'Notice', 'Rain',
'Wall', 'Base', 'Damage', 'Distance', 'Feeling', 'Pair', 'Savings', 'Staff',
'Sugar', 'Target', 'Text', 'Animal', 'Author', 'Budget', 'Discount', 'File',
'Ground', 'Lesson', 'Minute', 'Officer', 'Phase', 'Reference', 'Register', 'Sky',
'Stage', 'Stick', 'Title', 'Trouble', 'Bowl', 'Bridge', 'Campaign', 'Character',
'Club', 'Edge', 'Evidence', 'Fan', 'Letter', 'Lock', 'Maximum', 'Novel',
'Option', 'Pack', 'Park', 'Plenty', 'Quarter', 'Skin', 'Sort', 'Weight',
'Baby', 'Background', 'Carry', 'Dish', 'Factor', 'Fruit', 'Glass', 'Joint',
'Master', 'Muscle', 'Red', 'Strength', 'Traffic', 'Trip', 'Vegetable', 'Appeal',
'Chart', 'Gear', 'Ideal', 'Kitchen', 'Land', 'Log', 'Mother', 'Net',
'Party', 'Principle', 'Relative', 'Sale', 'Season', 'Signal', 'Spirit', 'Street',
'Tree', 'Wave', 'Belt', 'Bench', 'Commission', 'Copy', 'Drop', 'Minimum',
'Path', 'Progress', 'Project', 'Sea', 'South', 'Status', 'Stuff', 'Ticket',
'Tour', 'Angle', 'Blue', 'Breakfast', 'Confidence', 'Daughter', 'Degree', 'Doctor',
'Dot', 'Dream', 'Duty', 'Essay', 'Father', 'Fee', 'Finance', 'Hour',
'Juice', 'Luck', 'Milk', 'Mouth', 'Peace', 'Pipe', 'Stable', 'Storm',
'Substance', 'Team', 'Trick', 'Afternoon', 'Bat', 'Beach', 'Blank', 'Catch',
'Chain', 'Consideration', 'Cream', 'Crew', 'Detail', 'Gold', 'Interview', 'Kid',
'Mark', 'Mission', 'Pain', 'Pleasure', 'Score', 'Screw', 'Gratitude', 'Shop',
'Shower', 'Suit', 'Tone', 'Window', 'Agent', 'Band', 'Bath', 'Block',
'Bone', 'Calendar', 'Candidate', 'Cap', 'Coat', 'Contest', 'Corner', 'Court',
'Cup', 'District', 'Door', 'East', 'Finger', 'Garage', 'Guarantee', 'Hole',
'Hook', 'Implement', 'Layer', 'Lecture', 'Lie', 'Manner', 'Meeting', 'Nose',
'Parking', 'Partner', 'Profile', 'Rice', 'Routine', 'Schedule', 'Swimming', 'Telephone',
'Tip', 'Winter', 'Airline', 'Bag', 'Battle', 'Bed', 'Bill', 'Bother',
'Cake', 'Code', 'Curve', 'Designer', 'Dimension', 'Dress', 'Ease', 'Emergency',
'Evening', 'Extension', 'Farm', 'Fight', 'Gap', 'Grade', 'Holiday', 'Horror',
'Horse', 'Host', 'Husband', 'Loan', 'Mistake', 'Mountain', 'Nail', 'Noise',
'Occasion', 'Package', 'Patient', 'Pause', 'Phrase', 'Proof', 'Race', 'Relief',
'Sand', 'Sentence', 'Shoulder', 'Smoke', 'Stomach', 'String', 'Tourist', 'Towel',
'Vacation', 'West', 'Wheel', 'Wine', 'Arm', 'Aside', 'Associate', 'Bet',
'Blow', 'Border', 'Branch', 'Breast', 'Brother', 'Buddy', 'Bunch', 'Chip',
'Coach', 'Cross', 'Document', 'Draft', 'Dust', 'Expert', 'Floor', 'God',
'Golf', 'Habit', 'Iron', 'Judge', 'Knife', 'Landscape', 'League', 'Mail',
'Mess', 'Native', 'Opening', 'Parent', 'Pattern', 'Pin', 'Pool', 'Pound',
'Request', 'Salary', 'Shame', 'Shelter', 'Shoe', 'Silver', 'Tackle', 'Tank',
'Trust', 'Assist', 'Bake', 'Bar', 'Bell', 'Bike', 'Blame', 'Boy',
'Brick', 'Chair', 'Closet', 'Clue', 'Collar', 'Comment', 'Conference', 'Devil',
'Diet', 'Fear', 'Fuel', 'Glove', 'Jacket', 'Lunch', 'Monitor', 'Mortgage',
'Nurse', 'Pace', 'Panic', 'Peak', 'Plane', 'Reward', 'Row', 'Sandwich',
'Shock', 'Spite', 'Spray', 'Surprise', 'Till', 'Transition', 'Weekend', 'Welcome',
'Yard', 'Alarm', 'Bend', 'Bicycle', 'Bite', 'Blind', 'Bottle', 'Cable',
'Candle', 'Clerk', 'Cloud', 'Concert', 'Counter', 'Flower', 'Grandfather', 'Harm',
'Knee', 'Lawyer', 'Leather', 'Load', 'Mirror', 'Neck', 'Pension', 'Plate',
'Purple', 'Ruin', 'Ship', 'Skirt', 'Slice', 'Snow', 'Specialist', 'Stroke',
'Switch', 'Trash', 'Tune', 'Zone', 'Anger', 'Award', 'Bid', 'Bitter',
'Boot', 'Bug', 'Camp', 'Candy', 'Carpet', 'Cat', 'Champion', 'Channel',
'Clock', 'Comfort', 'Cow', 'Crack', 'Engineer', 'Entrance', 'Fault', 'Grass',
'Guy', 'Hell', 'Highlight', 'Incident', 'Island', 'Joke', 'Jury', 'Leg',
'Lip', 'Mate', 'Motor', 'Nerve', 'Passage', 'Pen', 'Pride', 'Priest',
'Prize', 'Promise', 'Resident', 'Resort', 'Ring', 'Roof', 'Rope', 'Sail',
'Scheme', 'Script', 'Sock', 'Station', 'Toe', 'Tower', 'Truck', 'Witness',
'Asparagus', 'You', 'It', 'Can', 'Will', 'If', 'One', 'Many',
'Most', 'Other', 'Use', 'Make', 'Good', 'Look', 'Help', 'Go',
'Great', 'Being', 'Few', 'Might', 'Still', 'Public', 'Read', 'Keep',
'Start', 'Give', 'Human', 'Local', 'General', 'She', 'Specific', 'Long',
'Play', 'Feel', 'High', 'Tonight', 'Put', 'Common', 'Set', 'Change',
'Simple', 'Past', 'Big', 'Possible', 'Particular', 'Today', 'Major', 'Personal',
'Current', 'National', 'Cut', 'Natural', 'Physical', 'Show', 'Try', 'Check',
'Second', 'Call', 'Move', 'Pay', 'Let', 'Increase', 'Single', 'Individual',
'Turn', 'Ask', 'Buy', 'Guard', 'Hold', 'Main', 'Offer', 'Potential',
'Professional', 'International', 'Travel', 'Cook', 'Alternative', 'Following', 'Special', 'Working',
'Whole', 'Dance', 'Excuse', 'Cold', 'Commercial', 'Low', 'Purchase', 'Deal',
'Primary', 'Worth', 'Fall', 'Necessary', 'Positive', 'Produce', 'Search', 'Present',
'Spend', 'Talk', 'Creative', 'Tell', 'Cost', 'Drive', 'Green', 'Support',
'Glad', 'Remove', 'Return', 'Run', 'Complex', 'Due', 'Effective', 'Middle',
'Regular', 'Reserve', 'Independent', 'Leave', 'Original', 'Reach', 'Rest', 'Serve',
'Watch', 'Beautiful', 'Charge', 'Active', 'Break', 'Negative', 'Safe', 'Stay',
'Visit', 'Visual', 'Affect', 'Cover', 'Report', 'Rise', 'Walk', 'White',
'Beyond', 'Junior', 'Pick', 'Unique', 'Anything', 'Classic', 'Final', 'Lift',
'Mix', 'Private', 'Stop', 'Teach', 'Western', 'Concern', 'Familiar', 'Fly',
'Official', 'Broad', 'Comfortable', 'Gain', 'Maybe', 'Rich', 'Save', 'Stand',
'Young', 'Heavy', 'Hello', 'Lead', 'Listen', 'Valuable', 'Worry', 'Handle',
'Leading', 'Meet', 'Release', 'Sell', 'Finish', 'Normal', 'Press', 'Ride',
'Secret', 'Spread', 'Spring', 'Tough', 'Wait', 'Brown', 'Deep', 'Display',
'Flow', 'Hit', 'Objective', 'Shoot', 'Touch', 'Cancel', 'Chemical', 'Cry',
'Dump', 'Extreme', 'Push', 'Conflict', 'Eat', 'Fill', 'Formal', 'Jump',
'Kick', 'Opposite', 'Pass', 'Pitch', 'Remote', 'Total', 'Treat', 'Vast',
'Abuse', 'Beat', 'Burn', 'Deposit', 'Print', 'Raise', 'Sleep', 'Somewhere',
'Advance', 'Anywhere', 'Consist', 'Dark', 'Double', 'Draw', 'Equal', 'Fix',
'Hire', 'Internal', 'Join', 'Kill', 'Sensitive', 'Tap', 'Win', 'Attack',
'Claim', 'Constant', 'Drag', 'Drink', 'Guess', 'Minor', 'Pull', 'Raw',
'Soft', 'Solid', 'Wear', 'Weird', 'Wonder', 'Annual', 'Count', 'Dead',
'Doubt', 'Feed', 'Forever', 'Impress', 'Nobody', 'Repeat', 'Round', 'Sing',
'Slide', 'Strip', 'Whereas', 'Wish', 'Combine', 'Command', 'Dig', 'Divide',
'Equivalent', 'Hang', 'Hunt', 'Initial', 'March', 'Mention', 'Spiritual', 'Survey',
'Tie', 'Adult', 'Brief', 'Crazy', 'Escape', 'Gather', 'Hate', 'Prior',
'Repair', 'Rough', 'Sad', 'Scratch', 'Sick', 'Strike', 'Employ', 'External',
'Hurt', 'Illegal', 'Laugh', 'Lay', 'Mobile', 'Nasty', 'Ordinary', 'Respond',
'Royal', 'Senior', 'Split', 'Strain', 'Struggle', 'Swim', 'Train', 'Upper',
'Wash', 'Yellow', 'Convert', 'Crash', 'Dependent', 'Fold', 'Funny', 'Grab',
'Hide', 'Miss', 'Permit', 'Quote', 'Recover', 'Resolve', 'Roll', 'Sink',
'Slip', 'Spare', 'Suspect', 'Sweet', 'Swing', 'Twist', 'Upstairs', 'Usual',
'Abroad', 'Brave', 'Calm', 'Concentrate', 'Estimate', 'Grand', 'Male', 'Mine',
'Prompt', 'Quiet', 'Refuse', 'Regret', 'Reveal', 'Rush', 'Shake', 'Shift',
'Shine', 'Steal', 'Suck', 'Surround', 'Anybody', 'Bear', 'Brilliant', 'Dare',
'Dear', 'Delay', 'Drunk', 'Female', 'Hurry', 'Inevitable', 'Invite', 'Kiss',
'Neat', 'Pop', 'Punch', 'Quit', 'Reply', 'Representative', 'Resist', 'Rip',
'Rub', 'Silly', 'Smile', 'Spell', 'Stretch', 'Stupid', 'Tear', 'Temporary',
'Tomorrow', 'Wake', 'Wrap', 'Yesterday']

242
awxkit/awxkit/ws.py Normal file
View File

@ -0,0 +1,242 @@
from queue import Queue, Empty
import time
import threading
import logging
import atexit
import json
import ssl
import urllib.parse
import websocket
from awxkit.config import config
log = logging.getLogger(__name__)
class WSClientException(Exception):
pass
changed = 'changed'
limit_reached = 'limit_reached'
status_changed = 'status_changed'
summary = 'summary'
class WSClient(object):
"""Provides a basic means of testing pub/sub notifications with payloads similar to
'groups': {'jobs': ['status_changed', 'summary'],
'schedules': ['changed'],
'ad_hoc_command_events': [ids...],
'job_events': [ids...],
'workflow_events': [ids...],
'project_update_events': [ids...],
'inventory_update_events': [ids...],
'system_job_events': [ids...],
'control': ['limit_reached']}
e.x:
```
ws = WSClient(token, port=8013, secure=False).connect()
ws.job_details()
... # launch job
job_messages = [msg for msg in ws]
ws.ad_hoc_stdout()
... # launch ad hoc command
ad_hoc_messages = [msg for msg in ws]
ws.close()
```
"""
# Subscription group types
def __init__(self, token=None, hostname='', port=443, secure=True, session_id=None, csrftoken=None):
if not hostname:
result = urllib.parse.urlparse(config.base_url)
secure = result.scheme == 'https'
port = result.port
if port is None:
port = 80
if secure:
port = 443
# should we be adding result.path here?
hostname = result.hostname
self.port = port
self._use_ssl = secure
self.hostname = hostname
self.token = token
self.session_id = session_id
self.csrftoken = csrftoken
self._recv_queue = Queue()
self._ws_closed = False
self._ws_connected_flag = threading.Event()
if self.token is not None:
auth_cookie = 'token="{0.token}";'.format(self)
elif self.session_id is not None:
auth_cookie = 'sessionid="{0.session_id}"'.format(self)
if self.csrftoken:
auth_cookie += ';csrftoken={0.csrftoken}'.format(self)
else:
auth_cookie = ''
pref = 'wss://' if self._use_ssl else 'ws://'
url = '{0}{1.hostname}:{1.port}/websocket/'.format(pref, self)
self.ws = websocket.WebSocketApp(url,
on_open=self._on_open,
on_message=self._on_message,
on_error=self._on_error,
on_close=self._on_close,
cookie=auth_cookie)
self._message_cache = []
self._should_subscribe_to_pending_job = False
def connect(self):
wst = threading.Thread(target=self._ws_run_forever, args=(self.ws, {"cert_reqs": ssl.CERT_NONE}))
wst.daemon = True
wst.start()
atexit.register(self.close)
if not self._ws_connected_flag.wait(20):
raise WSClientException('Failed to establish channel connection w/ AWX.')
return self
def close(self):
log.info('close method was called, but ignoring')
if not self._ws_closed:
log.info('Closing websocket connection.')
self.ws.close()
def job_details(self, *job_ids):
"""subscribes to job status, summary, and, for the specified ids, job events"""
self.subscribe(jobs=[status_changed, summary], job_events=list(job_ids))
def pending_job_details(self):
"""subscribes to job status and summary, with responsive
job event subscription for an id provided by AWX
"""
self.subscribe_to_pending_events('job_events', [status_changed, summary])
def status_changes(self):
self.subscribe(jobs=[status_changed])
def job_stdout(self, *job_ids):
self.subscribe(jobs=[status_changed], job_events=list(job_ids))
def pending_job_stdout(self):
self.subscribe_to_pending_events('job_events')
# mirror page behavior
def ad_hoc_stdout(self, *ahc_ids):
self.subscribe(jobs=[status_changed], ad_hoc_command_events=list(ahc_ids))
def pending_ad_hoc_stdout(self):
self.subscribe_to_pending_events('ad_hoc_command_events')
def project_update_stdout(self, *project_update_ids):
self.subscribe(jobs=[status_changed], project_update_events=list(project_update_ids))
def pending_project_update_stdout(self):
self.subscribe_to_pending_events('project_update_events')
def inventory_update_stdout(self, *inventory_update_ids):
self.subscribe(jobs=[status_changed], inventory_update_events=list(inventory_update_ids))
def pending_inventory_update_stdout(self):
self.subscribe_to_pending_events('inventory_update_events')
def workflow_events(self, *wfjt_ids):
self.subscribe(jobs=[status_changed], workflow_events=list(wfjt_ids))
def pending_workflow_events(self):
self.subscribe_to_pending_events('workflow_events')
def system_job_events(self, *system_job_ids):
self.subscribe(jobs=[status_changed], system_job_events=list(system_job_ids))
def pending_system_job_events(self):
self.subscribe_to_pending_events('system_job_events')
def subscribe_to_pending_events(self, events, jobs=[status_changed]):
self._should_subscribe_to_pending_job = dict(jobs=jobs, events=events)
self.subscribe(jobs=jobs)
# mirror page behavior
def jobs_list(self):
self.subscribe(jobs=[status_changed, summary], schedules=[changed])
# mirror page behavior
def dashboard(self):
self.subscribe(jobs=[status_changed])
def subscribe(self, **groups):
"""Sends a subscription request for the specified channel groups.
```
ws.subscribe(jobs=[ws.status_changed, ws.summary],
job_events=[1,2,3])
```
"""
self._subscribe(groups=groups)
def _subscribe(self, **payload):
payload['xrftoken'] = self.csrftoken
self._send(json.dumps(payload))
def unsubscribe(self):
self._send(json.dumps(dict(groups={}, xrftoken=self.csrftoken)))
# it takes time for the unsubscribe event to be recieved and consumed and for
# messages to stop being put on the queue for daphne to send to us
time.sleep(5)
def _on_message(self, message):
message = json.loads(message)
log.debug('received message: {}'.format(message))
if all([message.get('group_name') == 'jobs',
message.get('status') == 'pending',
message.get('unified_job_id'),
self._should_subscribe_to_pending_job]):
if bool(message.get('project_id')) == (
self._should_subscribe_to_pending_job['events'] == 'project_update_events'):
self._update_subscription(message['unified_job_id'])
return self._recv_queue.put(message)
def _update_subscription(self, job_id):
subscription = dict(jobs=self._should_subscribe_to_pending_job['jobs'])
events = self._should_subscribe_to_pending_job['events']
subscription[events] = [job_id]
self.subscribe(**subscription)
self._should_subscribe_to_pending_job = False
def _on_open(self):
self._ws_connected_flag.set()
def _on_error(self, error):
log.info('Error received: {}'.format(error))
def _on_close(self):
log.info('Successfully closed ws.')
self._ws_closed = True
def _ws_run_forever(self, sockopt=None, sslopt=None):
self.ws.run_forever(sslopt=sslopt)
log.debug('ws.run_forever finished')
def _recv(self, wait=False, timeout=10):
try:
msg = self._recv_queue.get(wait, timeout)
except Empty:
return None
return msg
def _send(self, data):
self.ws.send(data)
log.debug('successfully sent {}'.format(data))
def __iter__(self):
while True:
val = self._recv()
if not val:
return
yield val

View File

@ -0,0 +1,97 @@
import os
import yaml
import glob
import logging
from py.path import local
log = logging.getLogger(__name__)
file_pattern_cache = {}
file_path_cache = {}
class Loader(yaml.FullLoader):
def __init__(self, stream):
self._root = os.path.split(stream.name)[0]
super(Loader, self).__init__(stream)
Loader.add_constructor('!include', Loader.include)
Loader.add_constructor('!import', Loader.include)
def include(self, node):
if isinstance(node, yaml.ScalarNode):
return self.extractFile(self.construct_scalar(node))
elif isinstance(node, yaml.SequenceNode):
result = []
for filename in self.construct_sequence(node):
result += self.extractFile(filename)
return result
elif isinstance(node, yaml.MappingNode):
result = {}
for k, v in self.construct_mapping(node).items():
result[k] = self.extractFile(v)[k]
return result
else:
log.error("unrecognised node type in !include statement")
raise yaml.constructor.ConstructorError
def extractFile(self, filename):
file_pattern = os.path.join(self._root, filename)
log.debug('Will attempt to extract schema from: {0}'.format(file_pattern))
if file_pattern in file_pattern_cache:
log.debug('File pattern cache hit: {0}'.format(file_pattern))
return file_pattern_cache[file_pattern]
data = dict()
for file_path in glob.glob(file_pattern):
file_path = os.path.abspath(file_path)
if file_path in file_path_cache:
log.debug('Schema cache hit: {0}'.format(file_path))
path_data = file_path_cache[file_path]
else:
log.debug('Loading schema from {0}'.format(file_path))
with open(file_path, 'r') as f:
path_data = yaml.load(f, Loader)
file_path_cache[file_path] = path_data
data.update(path_data)
file_pattern_cache[file_pattern] = data
return data
def load_file(filename):
"""Loads a YAML file from the given filename.
If the filename is omitted or None, attempts will be made to load it from
its normal location in the parent of the utils directory.
The awx_data dict loaded with this method supports value randomization,
thanks to the RandomizeValues class. See that class for possible options
Example usage in data.yaml (quotes are important!):
top_level:
list:
- "{random_str}"
- "{random_int}"
- "{random_uuid}"
random_thing: "{random_string:24}"
"""
if filename is None:
this_file = os.path.abspath(__file__)
path = local(this_file).new(basename='../data.yaml')
else:
path = local(filename)
if path.check():
fp = path.open()
# FIXME - support load_all()
return yaml.load(fp, Loader=Loader)
else:
msg = 'Unable to load data file at %s' % path
raise Exception(msg)

9
awxkit/requirements.txt Normal file
View File

@ -0,0 +1,9 @@
PyYAML>=5.1
cryptography
flake8
prometheus-client
python-dateutil
requests
termcolor
toposort
websocket-client>=0.54.0

70
awxkit/setup.py Normal file
View File

@ -0,0 +1,70 @@
import os
import glob
import shutil
from setuptools import setup, find_packages, Command
try: # for pip >= 10
from pip._internal.req import parse_requirements
except ImportError: # for pip <= 9.0.3
from pip.req import parse_requirements
requirements = [str(r.req) for r in parse_requirements('requirements.txt', session=False)]
class CleanCommand(Command):
description = "Custom clean command that forcefully removes dist/build directories"
user_options = []
def initialize_options(self):
self.cwd = None
def finalize_options(self):
self.cwd = os.getcwd()
def run(self):
assert os.getcwd() == self.cwd, 'Must be in package root: %s' % self.cwd
# List of things to remove
rm_list = list()
# Find any .pyc files or __pycache__ dirs
for root, dirs, files in os.walk(self.cwd, topdown=False):
for fname in files:
if fname.endswith('.pyc') and os.path.isfile(os.path.join(root, fname)):
rm_list.append(os.path.join(root, fname))
if root.endswith('__pycache__'):
rm_list.append(root)
# Find egg's
for egg_dir in glob.glob('*.egg') + glob.glob('*egg-info'):
rm_list.append(egg_dir)
# Zap!
for rm in rm_list:
if self.verbose:
print("Removing '%s'" % rm)
if os.path.isdir(rm):
if not self.dry_run:
shutil.rmtree(rm)
else:
if not self.dry_run:
os.remove(rm)
version = '0.1.0'
setup(name='awxkit',
version=version,
description='awx cli client',
packages=find_packages(exclude=['test']),
cmdclass={
'clean': CleanCommand,
},
include_package_data=True,
install_requires=requirements,
python_requires=">= 3.5",
entry_points={
'console_scripts': [
'akit=awxkit.scripts.basic_session:load_interactive',
'awx=awxkit.cli:run'
]
}
)

0
awxkit/test/__init__.py Normal file
View File

View File

@ -0,0 +1,60 @@
from io import StringIO
import pytest
from requests.exceptions import ConnectionError
from awxkit.cli import run, CLI
class MockedCLI(CLI):
def fetch_version_root(self):
pass
@property
def v2(self):
return MockedCLI()
@property
def json(self):
return {
'users': None
}
@pytest.mark.parametrize('help_param', ['-h', '--help'])
def test_help(capfd, help_param):
with pytest.raises(SystemExit):
run(['awx {}'.format(help_param)])
out, err = capfd.readouterr()
assert "usage:" in out
for snippet in (
'--conf.host https://example.awx.org]',
'-v, --verbose'
):
assert snippet in out
def test_connection_error(capfd):
cli = CLI()
cli.parse_args(['awx'])
with pytest.raises(ConnectionError):
cli.connect()
@pytest.mark.parametrize('resource', ['', 'invalid'])
def test_list_resources(capfd, resource):
# if a valid resource isn't specified, print --help
cli = MockedCLI()
cli.parse_args(['awx {}'.format(resource)])
cli.connect()
cli.parse_resource()
out, err = capfd.readouterr()
assert "usage:" in out
for snippet in (
'--conf.host https://example.awx.org]',
'-v, --verbose'
):
assert snippet in out

View File

@ -0,0 +1,65 @@
import pytest
from requests.exceptions import ConnectionError
from awxkit.cli import CLI
from awxkit import config
def test_host_from_environment():
cli = CLI()
cli.parse_args(
['awx'],
env={'TOWER_HOST': 'https://xyz.local'}
)
with pytest.raises(ConnectionError):
cli.connect()
assert config.base_url == 'https://xyz.local'
def test_host_from_argv():
cli = CLI()
cli.parse_args(['awx', '--conf.host', 'https://xyz.local'])
with pytest.raises(ConnectionError):
cli.connect()
assert config.base_url == 'https://xyz.local'
def test_username_and_password_from_environment():
cli = CLI()
cli.parse_args(
['awx'],
env={
'TOWER_USERNAME': 'mary',
'TOWER_PASSWORD': 'secret'
}
)
with pytest.raises(ConnectionError):
cli.connect()
assert config.credentials.default.username == 'mary'
assert config.credentials.default.password == 'secret'
def test_username_and_password_argv():
cli = CLI()
cli.parse_args([
'awx', '--conf.username', 'mary', '--conf.password', 'secret'
])
with pytest.raises(ConnectionError):
cli.connect()
assert config.credentials.default.username == 'mary'
assert config.credentials.default.password == 'secret'
def test_config_precedence():
cli = CLI()
cli.parse_args(
[
'awx', '--conf.username', 'mary', '--conf.password', 'secret'
],
env={
'TOWER_USERNAME': 'IGNORE',
'TOWER_PASSWORD': 'IGNORE'
}
)
with pytest.raises(ConnectionError):
cli.connect()
assert config.credentials.default.username == 'mary'
assert config.credentials.default.password == 'secret'

View File

@ -0,0 +1,46 @@
import json
import yaml
from awxkit.api.pages import Page
from awxkit.api.pages.users import Users, User
from awxkit.cli.format import format_response
def test_json_empty_list():
page = Page.from_json({
'results': []
})
formatted = format_response(page)
assert json.loads(formatted) == {'results': []}
def test_yaml_empty_list():
page = Page.from_json({
'results': []
})
formatted = format_response(page, fmt='yaml')
assert yaml.safe_load(formatted) == {'results': []}
def test_json_list():
users = {
'results': [
{'username': 'betty'},
{'username': 'tom'},
{'username': 'anne'},
]
}
page = Users.from_json(users)
formatted = format_response(page)
assert json.loads(formatted) == users
def test_yaml_list():
users = {
'results': [
{'username': 'betty'},
{'username': 'tom'},
{'username': 'anne'},
]
}
page = Users.from_json(users)
formatted = format_response(page, fmt='yaml')
assert yaml.safe_load(formatted) == users

View File

@ -0,0 +1,229 @@
import argparse
import json
import unittest
from io import StringIO
import pytest
from requests import Response
from awxkit.api.pages import Page
from awxkit.cli.options import ResourceOptionsParser
class OptionsPage(Page):
def options(self):
return self
def endswith(self, v):
return self.endpoint.endswith(v)
def __getitem__(self, k):
return {
'GET': {},
'POST': {},
'PUT': {},
}
class TestOptions(unittest.TestCase):
def setUp(self):
_parser = argparse.ArgumentParser()
self.parser = _parser.add_subparsers(help='action')
def test_list(self):
page = OptionsPage.from_json({
'actions': {
'GET': {},
'POST': {},
}
})
ResourceOptionsParser(page, 'users', self.parser)
assert 'list' in self.parser.choices
def test_list_filtering(self):
page = OptionsPage.from_json({
'actions': {
'GET': {},
'POST': {
'first_name': {'type': 'string'}
},
}
})
options = ResourceOptionsParser(page, 'users', self.parser)
options.build_query_arguments('list', 'POST')
assert 'list' in self.parser.choices
out = StringIO()
self.parser.choices['list'].print_help(out)
assert '--first_name TEXT' in out.getvalue()
def test_list_not_filterable(self):
page = OptionsPage.from_json({
'actions': {
'GET': {},
'POST': {
'middle_name': {'type': 'string', 'filterable': False}
},
}
})
options = ResourceOptionsParser(page, 'users', self.parser)
options.build_query_arguments('list', 'POST')
assert 'list' in self.parser.choices
out = StringIO()
self.parser.choices['list'].print_help(out)
assert '--middle_name' not in out.getvalue()
def test_creation_optional_argument(self):
page = OptionsPage.from_json({
'actions': {
'POST': {
'first_name': {
'type': 'string',
'help_text': 'Please specify your first name',
}
},
}
})
options = ResourceOptionsParser(page, 'users', self.parser)
options.build_query_arguments('create', 'POST')
assert 'create' in self.parser.choices
out = StringIO()
self.parser.choices['create'].print_help(out)
assert '--first_name TEXT Please specify your first name' in out.getvalue()
def test_creation_required_argument(self):
page = OptionsPage.from_json({
'actions': {
'POST': {
'username': {
'type': 'string',
'help_text': 'Please specify a username',
'required': True
}
},
}
})
options = ResourceOptionsParser(page, self.parser)
options.build_query_arguments('create', 'POST')
assert 'create' in self.parser.choices
out = StringIO()
self.parser.choices['create'].print_help(out)
assert '--username TEXT [REQUIRED] Please specify a username' in out.getvalue()
def test_creation_required_argument(self):
page = OptionsPage.from_json({
'actions': {
'POST': {
'username': {
'type': 'string',
'help_text': 'Please specify a username',
'required': True
}
},
}
})
options = ResourceOptionsParser(page, 'users', self.parser)
options.build_query_arguments('create', 'POST')
assert 'create' in self.parser.choices
out = StringIO()
self.parser.choices['create'].print_help(out)
assert '--username TEXT [REQUIRED] Please specify a username' in out.getvalue()
def test_integer_argument(self):
page = OptionsPage.from_json({
'actions': {
'POST': {
'limit': {'type': 'integer'}
},
}
})
options = ResourceOptionsParser(page, 'job_templates', self.parser)
options.build_query_arguments('create', 'POST')
assert 'create' in self.parser.choices
out = StringIO()
self.parser.choices['create'].print_help(out)
assert '--limit INTEGER' in out.getvalue()
def test_boolean_argument(self):
page = OptionsPage.from_json({
'actions': {
'POST': {
'diff_mode': {'type': 'boolean'}
},
}
})
options = ResourceOptionsParser(page, 'users', self.parser)
options.build_query_arguments('create', 'POST')
assert 'create' in self.parser.choices
out = StringIO()
self.parser.choices['create'].print_help(out)
assert '--diff_mode BOOLEAN' in out.getvalue()
def test_choices(self):
page = OptionsPage.from_json({
'actions': {
'POST': {
'verbosity': {
'type': 'integer',
'choices': [
(0, '0 (Normal)'),
(1, '1 (Verbose)'),
(2, '2 (More Verbose)'),
(3, '3 (Debug)'),
(4, '4 (Connection Debug)'),
(5, '5 (WinRM Debug)'),
]
}
},
}
})
options = ResourceOptionsParser(page, 'users', self.parser)
options.build_query_arguments('create', 'POST')
assert 'create' in self.parser.choices
out = StringIO()
self.parser.choices['create'].print_help(out)
assert '--verbosity {0,1,2,3,4,5}' in out.getvalue()
def test_actions_with_primary_key(self):
for method in ('get', 'modify', 'delete'):
page = OptionsPage.from_json({
'actions': {'GET': {}, 'POST': {}}
})
ResourceOptionsParser(page, 'users', self.parser)
assert method in self.parser.choices
out = StringIO()
self.parser.choices[method].print_help(out)
assert 'positional arguments:\n id' in out.getvalue()
class TestSettingsOptions(unittest.TestCase):
def setUp(self):
_parser = argparse.ArgumentParser()
self.parser = _parser.add_subparsers(help='action')
def test_list(self):
page = OptionsPage.from_json({
'actions': {
'GET': {},
'POST': {},
'PUT': {},
}
})
page.endpoint = '/settings/all/'
ResourceOptionsParser(page, 'settings', self.parser)
assert 'list' in self.parser.choices
assert 'modify' in self.parser.choices
out = StringIO()
self.parser.choices['modify'].print_help(out)
assert 'modify [-h] key value' in out.getvalue()

0
awxkit/test/pytest.ini Normal file
View File

View File

@ -0,0 +1,45 @@
from unittest.mock import patch
import pytest
from awxkit.api.pages import credentials
from awxkit.utils import PseudoNamespace
def set_config_cred_to_desired(config, location):
split = location.split('.')
config_ref = config.credentials
for _location in split[:-1]:
setattr(config_ref, _location, PseudoNamespace())
config_ref = config_ref[_location]
setattr(config_ref, split[-1], 'desired')
class MockCredentialType(object):
def __init__(self, name, kind, managed_by_tower=True):
self.name = name
self.kind = kind
self.managed_by_tower = managed_by_tower
@pytest.mark.parametrize('field, kind, config_cred, desired_field, desired_value',
[('field', 'ssh', PseudoNamespace(field=123), 'field', 123),
('subscription', 'azure', PseudoNamespace(subscription_id=123), 'subscription', 123),
('project_id', 'gce', PseudoNamespace(project=123), 'project', 123),
('authorize_password', 'net', PseudoNamespace(authorize=123), 'authorize_password', 123)])
def test_get_payload_field_and_value_from_config_cred(field, kind, config_cred, desired_field, desired_value):
ret_field, ret_val = credentials.get_payload_field_and_value_from_kwargs_or_config_cred(field, kind, {},
config_cred)
assert ret_field == desired_field
assert ret_val == desired_value
@pytest.mark.parametrize('field, kind, kwargs, desired_field, desired_value',
[('field', 'ssh', dict(field=123), 'field', 123),
('subscription', 'azure', dict(subscription=123), 'subscription', 123),
('project_id', 'gce', dict(project_id=123), 'project', 123),
('authorize_password', 'net', dict(authorize_password=123), 'authorize_password', 123)])
def test_get_payload_field_and_value_from_kwarg(field, kind, kwargs, desired_field, desired_value):
ret_field, ret_val = credentials.get_payload_field_and_value_from_kwargs_or_config_cred(field, kind, kwargs,
PseudoNamespace())
assert ret_field == desired_field
assert ret_val == desired_value

View File

@ -0,0 +1,689 @@
from toposort import CircularDependencyError
import pytest
from awxkit.utils import filter_by_class
from awxkit.api.mixins import has_create
class MockHasCreate(has_create.HasCreate):
connection = None
def __str__(self):
return "instance of {0.__class__.__name__} ({1})".format(self, hex(id(self)))
def __init__(self, *a, **kw):
self.cleaned = False
super(MockHasCreate, self).__init__()
def silent_cleanup(self):
self.cleaned = True
class A(MockHasCreate):
def create(self, **kw):
return self
class B(MockHasCreate):
optional_dependencies = [A]
def create(self, a=None, **kw):
self.create_and_update_dependencies(*filter_by_class((a, A)))
return self
class C(MockHasCreate):
dependencies = [A, B]
def create(self, a=A, b=B, **kw):
self.create_and_update_dependencies(b, a)
return self
class D(MockHasCreate):
dependencies = [A]
optional_dependencies = [B]
def create(self, a=A, b=None, **kw):
self.create_and_update_dependencies(*filter_by_class((a, A), (b, B)))
return self
class E(MockHasCreate):
dependencies = [D, C]
def create(self, c=C, d=D, **kw):
self.create_and_update_dependencies(d, c)
return self
class F(MockHasCreate):
dependencies = [B]
optional_dependencies = [E]
def create(self, b=B, e=None, **kw):
self.create_and_update_dependencies(*filter_by_class((b, B), (e, E)))
return self
class G(MockHasCreate):
dependencies = [D]
optional_dependencies = [F, E]
def create(self, d=D, f=None, e=None, **kw):
self.create_and_update_dependencies(*filter_by_class((d, D), (f, F), (e, E)))
return self
class H(MockHasCreate):
optional_dependencies = [E, A]
def create(self, a=None, e=None, **kw):
self.create_and_update_dependencies(*filter_by_class((a, A), (e, E)))
return self
class MultipleWordClassName(MockHasCreate):
def create(self, **kw):
return self
class AnotherMultipleWordClassName(MockHasCreate):
optional_dependencies = [MultipleWordClassName]
def create(self, multiple_word_class_name=None, **kw):
self.create_and_update_dependencies(*filter_by_class((multiple_word_class_name, MultipleWordClassName)))
return self
def test_dependency_graph_single_page():
"""confirms that `dependency_graph(Base)` will return a dependency graph
consisting of only dependencies and dependencies of dependencies (if any)
"""
desired = {}
desired[G] = set([D])
desired[D] = set([A])
desired[A] = set()
assert has_create.dependency_graph(G) == desired
def test_dependency_graph_page_with_optional():
"""confirms that `dependency_graph(Base, OptionalBase)` will return a dependency
graph consisting of only dependencies and dependencies of dependencies (if any)
with the exception that the OptionalBase and its dependencies are included as well.
"""
desired = {}
desired[G] = set([D])
desired[E] = set([D, C])
desired[C] = set([A, B])
desired[D] = set([A])
desired[B] = set()
desired[A] = set()
assert has_create.dependency_graph(G, E) == desired
def test_dependency_graph_page_with_additionals():
"""confirms that `dependency_graph(Base, AdditionalBaseOne, AdditionalBaseTwo)`
will return a dependency graph consisting of only dependencies and dependencies
of dependencies (if any) with the exception that the AdditionalBases
are treated as a dependencies of Base (when they aren't) and their dependencies
are included as well.
"""
desired = {}
desired[E] = set([D, C])
desired[D] = set([A])
desired[C] = set([A, B])
desired[F] = set([B])
desired[G] = set([D])
desired[A] = set()
desired[B] = set()
assert has_create.dependency_graph(E, F, G) == desired
def test_optional_dependency_graph_single_page():
"""confirms that has_create._optional_dependency_graph(Base) returns a complete dependency tree
including all optional_dependencies
"""
desired = {}
desired[H] = set([E, A])
desired[E] = set([D, C])
desired[D] = set([A, B])
desired[C] = set([A, B])
desired[B] = set([A])
desired[A] = set()
assert has_create.optional_dependency_graph(H) == desired
def test_optional_dependency_graph_with_additional():
"""confirms that has_create._optional_dependency_graph(Base) returns a complete dependency tree
including all optional_dependencies with the AdditionalBases treated as a dependencies
of Base (when they aren't) and their dependencies and optional_dependencies included as well.
"""
desired = {}
desired[F] = set([B, E])
desired[H] = set([E, A])
desired[E] = set([D, C])
desired[D] = set([A, B])
desired[C] = set([A, B])
desired[B] = set([A])
desired[A] = set()
assert has_create.optional_dependency_graph(F, H, A) == desired
def test_creation_order():
"""confirms that `has_create.creation_order()` returns a valid creation order in the desired list of sets format"""
dependency_graph = dict(eight=set(['seven', 'six']),
seven=set(['five']),
six=set(),
five=set(['two', 'one']),
four=set(['one']),
three=set(['two']),
two=set(['one']),
one=set())
desired = [set(['one', 'six']),
set(['two', 'four']),
set(['three', 'five']),
set(['seven']),
set(['eight'])]
assert has_create.creation_order(dependency_graph) == desired
def test_creation_order_with_loop():
"""confirms that `has_create.creation_order()` raises toposort.CircularDependencyError when evaluating
a cyclic dependency graph
"""
dependency_graph = dict(eight=set(['seven', 'six']),
seven=set(['five']),
six=set(),
five=set(['two', 'one']),
four=set(['one']),
three=set(['two']),
two=set(['one']),
one=set(['eight']))
with pytest.raises(CircularDependencyError):
assert has_create.creation_order(dependency_graph)
class One(MockHasCreate):
pass
class Two(MockHasCreate):
dependencies = [One]
class Three(MockHasCreate):
dependencies = [Two, One]
class Four(MockHasCreate):
optional_dependencies = [Two]
class Five(MockHasCreate):
dependencies = [Two]
optional_dependencies = [One]
class IsntAHasCreate(object):
pass
class Six(MockHasCreate, IsntAHasCreate):
dependencies = [Two]
class Seven(MockHasCreate):
dependencies = [IsntAHasCreate]
def test_separate_async_optionals_none_exist():
"""confirms that when creation group classes have no async optional dependencies the order is unchanged"""
order = has_create.creation_order(has_create.optional_dependency_graph(Three, Two, One))
assert has_create.separate_async_optionals(order) == order
def test_separate_async_optionals_two_exist():
"""confirms that when two creation group classes have async dependencies
the class that has shared item as a dependency occurs first in a separate creation group
"""
order = has_create.creation_order(has_create.optional_dependency_graph(Four, Three, Two))
assert has_create.separate_async_optionals(order) == [set([One]), set([Two]), set([Three]), set([Four])]
def test_separate_async_optionals_three_exist():
"""confirms that when three creation group classes have async dependencies
the class that has shared item as a dependency occurs first in a separate creation group
"""
order = has_create.creation_order(has_create.optional_dependency_graph(Five, Four, Three))
assert has_create.separate_async_optionals(order) == [set([One]), set([Two]), set([Three]),
set([Five]), set([Four])]
def test_separate_async_optionals_not_has_create():
"""confirms that when a dependency isn't a HasCreate has_create.separate_aysnc_optionals doesn't
unnecessarily move it from the initial creation group
"""
order = has_create.creation_order(has_create.optional_dependency_graph(Seven, Six))
assert has_create.separate_async_optionals(order) == [set([One, IsntAHasCreate]), set([Two, Seven]), set([Six])]
def test_page_creation_order_single_page():
"""confirms that `has_create.page_creation_order()` returns a valid creation order"""
desired = [set([A]), set([D]), set([G])]
assert has_create.page_creation_order(G) == desired
def test_page_creation_order_optionals_provided():
"""confirms that `has_create.page_creation_order()` returns a valid creation order
when optional_dependencies are included
"""
desired = [set([A]), set([B]), set([C]), set([D]), set([E]), set([H])]
assert has_create.page_creation_order(H, A, E) == desired
def test_page_creation_order_additionals_provided():
"""confirms that `has_create.page_creation_order()` returns a valid creation order
when additional pages are included
"""
desired = [set([A]), set([B]), set([D]), set([F, H]), set([G])]
assert has_create.page_creation_order(F, H, G) == desired
def test_all_instantiated_dependencies_single_page():
f = F().create()
b = f._dependency_store[B]
desired = set([b, f])
assert set(has_create.all_instantiated_dependencies(f, A, B, C, D, E, F, G, H)) == desired
def test_all_instantiated_dependencies_single_page_are_ordered():
f = F().create()
b = f._dependency_store[B]
desired = [b, f]
assert has_create.all_instantiated_dependencies(f, A, B, C, D, E, F, G, H) == desired
def test_all_instantiated_dependencies_optionals():
a = A().create()
b = B().create(a=a)
c = C().create(a=a, b=b)
d = D().create(a=a, b=b)
e = E().create(c=c, d=d)
h = H().create(a=a, e=e)
desired = set([a, b, c, d, e, h])
assert set(has_create.all_instantiated_dependencies(h, A, B, C, D, E, F, G, H)) == desired
def test_all_instantiated_dependencies_optionals_are_ordered():
a = A().create()
b = B().create(a=a)
c = C().create(a=a, b=b)
d = D().create(a=a, b=b)
e = E().create(c=c, d=d)
h = H().create(a=a, e=e)
desired = [a, b, c, d, e, h]
assert has_create.all_instantiated_dependencies(h, A, B, C, D, E, F, G, H) == desired
def test_dependency_resolution_complete():
h = H().create(a=True, e=True)
a = h._dependency_store[A]
e = h._dependency_store[E]
c = e._dependency_store[C]
d = e._dependency_store[D]
b = c._dependency_store[B]
for item in (h, a, e, d, c, b):
if item._dependency_store:
assert all(item._dependency_store.values()
), "{0} missing dependency: {0._dependency_store}".format(item)
assert a == b._dependency_store[A], "Duplicate dependency detected"
assert a == c._dependency_store[A], "Duplicate dependency detected"
assert a == d._dependency_store[A], "Duplicate dependency detected"
assert b == c._dependency_store[B], "Duplicate dependency detected"
assert b == d._dependency_store[B], "Duplicate dependency detected"
def test_ds_mapping():
h = H().create(a=True, e=True)
a = h._dependency_store[A]
e = h._dependency_store[E]
c = e._dependency_store[C]
d = e._dependency_store[D]
b = c._dependency_store[B]
assert a == h.ds.a
assert e == h.ds.e
assert c == e.ds.c
assert d == e.ds.d
assert b == c.ds.b
def test_ds_multiple_word_class_and_attribute_name():
amwcn = AnotherMultipleWordClassName().create(multiple_word_class_name=True)
mwcn = amwcn._dependency_store[MultipleWordClassName]
assert amwcn.ds.multiple_word_class_name == mwcn
def test_ds_missing_dependency():
a = A().create()
with pytest.raises(AttributeError):
a.ds.b
def test_teardown_calls_silent_cleanup():
g = G().create(f=True, e=True)
f = g._dependency_store[F]
e = g._dependency_store[E]
b = f._dependency_store[B]
d = e._dependency_store[D]
c = e._dependency_store[C]
a = c._dependency_store[A]
instances = [g, f, e, b, d, c, a]
for instance in instances:
assert not instance.cleaned
g.teardown()
for instance in instances:
assert instance.cleaned
def test_teardown_dependency_store_cleared():
g = G().create(f=True, e=True)
f = g._dependency_store[F]
e = g._dependency_store[E]
b = f._dependency_store[B]
d = e._dependency_store[D]
c = e._dependency_store[C]
a = c._dependency_store[A]
g.teardown()
assert not g._dependency_store[F]
assert not g._dependency_store[E]
assert not f._dependency_store[B]
assert not e._dependency_store[D]
assert not e._dependency_store[C]
assert not c._dependency_store[A]
def test_idempotent_teardown_dependency_store_cleared():
g = G().create(f=True, e=True)
f = g._dependency_store[F]
e = g._dependency_store[E]
b = f._dependency_store[B]
d = e._dependency_store[D]
c = e._dependency_store[C]
a = c._dependency_store[A]
for item in (g, f, e, b, d, c, a):
item.teardown()
item.teardown()
assert not g._dependency_store[F]
assert not g._dependency_store[E]
assert not f._dependency_store[B]
assert not e._dependency_store[D]
assert not e._dependency_store[C]
assert not c._dependency_store[A]
def test_teardown_ds_cleared():
g = G().create(f=True, e=True)
f = g._dependency_store[F]
e = g._dependency_store[E]
b = f._dependency_store[B]
d = e._dependency_store[D]
c = e._dependency_store[C]
a = c._dependency_store[A]
g.teardown()
for former_dep in ('f', 'e'):
with pytest.raises(AttributeError):
getattr(g.ds, former_dep)
with pytest.raises(AttributeError):
getattr(f.ds, 'b')
for former_dep in ('d', 'c'):
with pytest.raises(AttributeError):
getattr(e.ds, former_dep)
with pytest.raises(AttributeError):
getattr(c.ds, 'a')
class OneWithArgs(MockHasCreate):
def create(self, **kw):
self.kw = kw
return self
class TwoWithArgs(MockHasCreate):
dependencies = [OneWithArgs]
def create(self, one_with_args=OneWithArgs, **kw):
if not one_with_args and kw.pop('make_one_with_args', False):
one_with_args = (OneWithArgs, dict(a='a', b='b', c='c'))
self.create_and_update_dependencies(one_with_args)
self.kw = kw
return self
class ThreeWithArgs(MockHasCreate):
dependencies = [OneWithArgs]
optional_dependencies = [TwoWithArgs]
def create(self, one_with_args=OneWithArgs, two_with_args=None, **kw):
self.create_and_update_dependencies(*filter_by_class((one_with_args, OneWithArgs),
(two_with_args, TwoWithArgs)))
self.kw = kw
return self
class FourWithArgs(MockHasCreate):
dependencies = [TwoWithArgs, ThreeWithArgs]
def create(self, two_with_args=TwoWithArgs, three_with_args=ThreeWithArgs, **kw):
self.create_and_update_dependencies(*filter_by_class((two_with_args, TwoWithArgs),
(three_with_args, ThreeWithArgs)))
self.kw = kw
return self
def test_single_kwargs_class_in_create_and_update_dependencies():
two_wa = TwoWithArgs().create(one_with_args=False, make_one_with_args=True, two_with_args_kw_arg=123)
assert isinstance(two_wa.ds.one_with_args, OneWithArgs)
assert two_wa.ds.one_with_args.kw == dict(a='a', b='b', c='c')
assert two_wa.kw == dict(two_with_args_kw_arg=123)
def test_no_tuple_for_class_arg_causes_shared_dependencies_staggered():
three_wo = ThreeWithArgs().create(two_with_args=True)
assert isinstance(three_wo.ds.one_with_args, OneWithArgs)
assert isinstance(three_wo.ds.two_with_args, TwoWithArgs)
assert isinstance(three_wo.ds.two_with_args.ds.one_with_args, OneWithArgs)
assert three_wo.ds.one_with_args == three_wo.ds.two_with_args.ds.one_with_args
def test_no_tuple_for_class_arg_causes_shared_dependencies_nested_staggering():
four_wo = FourWithArgs().create()
assert isinstance(four_wo.ds.two_with_args, TwoWithArgs)
assert isinstance(four_wo.ds.three_with_args, ThreeWithArgs)
assert isinstance(four_wo.ds.two_with_args.ds.one_with_args, OneWithArgs)
assert isinstance(four_wo.ds.three_with_args.ds.one_with_args, OneWithArgs)
assert isinstance(four_wo.ds.three_with_args.ds.two_with_args, TwoWithArgs)
assert four_wo.ds.two_with_args.ds.one_with_args == four_wo.ds.three_with_args.ds.one_with_args
assert four_wo.ds.two_with_args == four_wo.ds.three_with_args.ds.two_with_args
def test_tuple_for_class_arg_causes_unshared_dependencies_when_downstream():
"""Confirms that provided arg-tuple for dependency type is applied instead of chained dependency"""
three_wa = ThreeWithArgs().create(two_with_args=(TwoWithArgs, dict(one_with_args=False,
make_one_with_args=True,
two_with_args_kw_arg=234)),
three_with_args_kw_arg=345)
assert isinstance(three_wa.ds.one_with_args, OneWithArgs)
assert isinstance(three_wa.ds.two_with_args, TwoWithArgs)
assert isinstance(three_wa.ds.two_with_args.ds.one_with_args, OneWithArgs)
assert three_wa.ds.one_with_args != three_wa.ds.two_with_args.ds.one_with_args
assert three_wa.ds.one_with_args.kw == dict()
assert three_wa.ds.two_with_args.kw == dict(two_with_args_kw_arg=234)
assert three_wa.ds.two_with_args.ds.one_with_args.kw == dict(a='a', b='b', c='c')
assert three_wa.kw == dict(three_with_args_kw_arg=345)
def test_tuples_for_class_arg_cause_unshared_dependencies_when_downstream():
"""Confirms that provided arg-tuple for dependency type is applied instead of chained dependency"""
four_wa = FourWithArgs().create(two_with_args=(TwoWithArgs, dict(one_with_args=False,
make_one_with_args=True,
two_with_args_kw_arg=456)),
# No shared dependencies with four_wa.ds.two_with_args
three_with_args=(ThreeWithArgs, dict(one_with_args=(OneWithArgs, {}),
two_with_args=False)),
four_with_args_kw=567)
assert isinstance(four_wa.ds.two_with_args, TwoWithArgs)
assert isinstance(four_wa.ds.three_with_args, ThreeWithArgs)
assert isinstance(four_wa.ds.two_with_args.ds.one_with_args, OneWithArgs)
assert isinstance(four_wa.ds.three_with_args.ds.one_with_args, OneWithArgs)
assert four_wa.ds.three_with_args.ds.one_with_args != four_wa.ds.two_with_args.ds.one_with_args
with pytest.raises(AttributeError):
four_wa.ds.three_with_args.ds.two_with_args
assert four_wa.kw == dict(four_with_args_kw=567)
class NotHasCreate(object):
pass
class MixinUserA(MockHasCreate, NotHasCreate):
def create(self, **kw):
return self
class MixinUserB(MockHasCreate, NotHasCreate):
def create(self, **kw):
return self
class MixinUserC(MixinUserB):
def create(self, **kw):
return self
class MixinUserD(MixinUserC):
def create(self, **kw):
return self
class NotHasCreateDependencyHolder(MockHasCreate):
dependencies = [NotHasCreate]
def create(self, not_has_create=MixinUserA):
self.create_and_update_dependencies(not_has_create)
return self
def test_not_has_create_default_dependency():
"""Confirms that HasCreates that claim non-HasCreates as dependencies claim them by correct kwarg
class name in _dependency_store
"""
dep_holder = NotHasCreateDependencyHolder().create()
assert isinstance(dep_holder.ds.not_has_create, MixinUserA)
def test_not_has_create_passed_dependency():
"""Confirms that passed non-HasCreate subclasses are sourced as dependency"""
dep = MixinUserB().create()
assert isinstance(dep, MixinUserB)
dep_holder = NotHasCreateDependencyHolder().create(not_has_create=dep)
assert dep_holder.ds.not_has_create == dep
class HasCreateParentDependencyHolder(MockHasCreate):
dependencies = [MixinUserB]
def create(self, mixin_user_b=MixinUserC):
self.create_and_update_dependencies(mixin_user_b)
return self
def test_has_create_stored_as_parent_dependency():
"""Confirms that HasCreate subclasses are sourced as their parent"""
dep = MixinUserC().create()
assert isinstance(dep, MixinUserC)
assert isinstance(dep, MixinUserB)
dep_holder = HasCreateParentDependencyHolder().create(mixin_user_b=dep)
assert dep_holder.ds.mixin_user_b == dep
class DynamicallyDeclaresNotHasCreateDependency(MockHasCreate):
dependencies = [NotHasCreate]
def create(self, not_has_create=MixinUserA):
dynamic_dependency = dict(mixinusera=MixinUserA,
mixinuserb=MixinUserB,
mixinuserc=MixinUserC)
self.create_and_update_dependencies(dynamic_dependency[not_has_create])
return self
@pytest.mark.parametrize('dependency,dependency_class',
[('mixinusera', MixinUserA),
('mixinuserb', MixinUserB),
('mixinuserc', MixinUserC)])
def test_subclass_or_parent_dynamic_not_has_create_dependency_declaration(dependency, dependency_class):
"""Confirms that dependencies that dynamically declare dependencies subclassed from not HasCreate
are properly linked
"""
dep_holder = DynamicallyDeclaresNotHasCreateDependency().create(dependency)
assert dep_holder.ds.not_has_create.__class__ == dependency_class
class DynamicallyDeclaresHasCreateDependency(MockHasCreate):
dependencies = [MixinUserB]
def create(self, mixin_user_b=MixinUserB):
dynamic_dependency = dict(mixinuserb=MixinUserB,
mixinuserc=MixinUserC,
mixinuserd=MixinUserD)
self.create_and_update_dependencies(dynamic_dependency[mixin_user_b])
return self
@pytest.mark.parametrize('dependency,dependency_class',
[('mixinuserb', MixinUserB),
('mixinuserc', MixinUserC),
('mixinuserd', MixinUserD)])
def test_subclass_or_parent_dynamic_has_create_dependency_declaration(dependency, dependency_class):
"""Confirms that dependencies that dynamically declare dependencies subclassed from not HasCreate
are properly linked
"""
dep_holder = DynamicallyDeclaresHasCreateDependency().create(dependency)
assert dep_holder.ds.mixin_user_b.__class__ == dependency_class

View File

@ -0,0 +1,254 @@
import pytest
from awxkit.api.registry import URLRegistry
class One(object):
pass
class Two(object):
pass
@pytest.fixture
def reg():
return URLRegistry()
def test_url_pattern(reg):
desired = r'^/some/resources/\d+/(\?.*)*$'
assert reg.url_pattern(r'/some/resources/\d+/').pattern == desired
def test_methodless_get_from_empty_registry(reg):
assert reg.get('nonexistent') is None
def test_method_get_from_empty_registry(reg):
assert reg.get('nonexistent', 'method') is None
def test_methodless_setdefault_methodless_get(reg):
reg.setdefault(One)
assert reg.get('some_path') is One
def test_methodless_setdefault_method_get(reg):
reg.setdefault(One)
assert reg.get('some_path', 'method') is One
def test_method_setdefault_methodless_get(reg):
reg.setdefault('method', One)
assert reg.get('some_path') is None
def test_method_setdefault_matching_method_get(reg):
reg.setdefault('method', One)
assert reg.get('some_path', 'method') is One
def test_method_setdefault_nonmatching_method_get(reg):
reg.setdefault('method', One)
assert reg.get('some_path', 'nonexistent') is None
def test_multimethod_setdefault_matching_method_get(reg):
reg.setdefault(('method_one', 'method_two'), One)
assert reg.get('some_path', 'method_one') is One
assert reg.get('some_path', 'method_two') is One
def test_multimethod_setdefault_nonmatching_method_get(reg):
reg.setdefault(('method_one', 'method_two'), One)
assert reg.get('some_path') is None
assert reg.get('some_path', 'nonexistent') is None
def test_wildcard_setdefault_methodless_get(reg):
reg.setdefault('.*', One)
assert reg.get('some_path') is One
def test_wildcard_setdefault_method_get(reg):
reg.setdefault('.*', One)
assert reg.get('some_path', 'method') is One
def test_regex_method_setdefaults_over_wildcard_method_get(reg):
reg.setdefault('.*', One)
reg.setdefault('reg.*ex', Two)
for _ in range(1000):
assert reg.get('some_path', 'regex') is Two
def test_methodless_registration_with_matching_path_methodless_get(reg):
reg.register('some_path', One)
assert reg.get('some_path') is One
def test_methodless_registraion_with_nonmatching_path_methodless_get(reg):
reg.register('some_path', One)
assert reg.get('nonexistent') is None
def test_methodless_registration_with_matching_path_nonmatching_method_get(reg):
reg.register('some_path', One)
assert reg.get('some_path', 'method') is None
def test_method_registration_with_matching_path_matching_method_get(reg):
reg.register('some_path', 'method', One)
assert reg.get('some_path', 'method') is One
def test_method_registration_with_matching_path_nonmatching_method_get(reg):
reg.register('some_path', 'method_one', One)
assert reg.get('some_path', 'method_two') is None
def test_multimethod_registration_with_matching_path_matching_method_get(reg):
reg.register('some_path', ('method_one', 'method_two'), One)
assert reg.get('some_path', 'method_one') is One
assert reg.get('some_path', 'method_two') is One
def test_multimethod_registration_with_path_matching_method_get(reg):
reg.register('some_path', ('method_one', 'method_two'), One)
assert reg.get('some_path', 'method_three') is None
def test_multipath_methodless_registration_with_matching_path_methodless_get(reg):
reg.register(('some_path_one', 'some_path_two'), One)
assert reg.get('some_path_one') is One
assert reg.get('some_path_two') is One
def test_multipath_methodless_registration_with_matching_path_nonmatching_method_get(reg):
reg.register(('some_path_one', 'some_path_two'), One)
assert reg.get('some_path_one', 'method') is None
assert reg.get('some_path_two', 'method') is None
def test_multipath_method_registration_with_matching_path_matching_method_get(reg):
reg.register((('some_path_one', 'method_one'), ('some_path_two', 'method_two')), One)
assert reg.get('some_path_one', 'method_one') is One
assert reg.get('some_path_two', 'method_two') is One
def test_multipath_partial_method_registration_with_matching_path_matching_method_get(reg):
reg.register(('some_path_one', ('some_path_two', 'method')), One)
assert reg.get('some_path_one') is One
assert reg.get('some_path_two', 'method') is One
def test_wildcard_method_registration_with_methodless_get(reg):
reg.register('some_path', '.*', One)
assert reg.get('some_path') is One
def test_wildcard_method_registration_with_method_get(reg):
reg.register('some_path', '.*', One)
assert reg.get('some_path', 'method') is One
def test_wildcard_and_specific_method_registration_acts_as_default(reg):
reg.register('some_path', 'method_one', Two)
reg.register('some_path', '.*', One)
reg.register('some_path', 'method_two', Two)
for _ in range(1000): # eliminate overt randomness
assert reg.get('some_path', 'nonexistent') is One
assert reg.get('some_path', 'method_one') is Two
assert reg.get('some_path', 'method_two') is Two
@pytest.mark.parametrize('method', ('method', '.*'))
def test_multiple_method_registrations_disallowed_for_single_path_single_registration(reg, method):
with pytest.raises(TypeError) as e:
reg.register((('some_path', method), ('some_path', method)), One)
assert str(e.value) == ('"{0.pattern}" already has registered method "{1}"'
.format(reg.url_pattern('some_path'), method))
@pytest.mark.parametrize('method', ('method', '.*'))
def test_multiple_method_registrations_disallowed_for_single_path_multiple_registrations(reg, method):
reg.register('some_path', method, One)
with pytest.raises(TypeError) as e:
reg.register('some_path', method, One)
assert str(e.value) == ('"{0.pattern}" already has registered method "{1}"'
.format(reg.url_pattern('some_path'), method))
def test_paths_can_be_patterns(reg):
reg.register('.*pattern.*', One)
assert reg.get('XYZpattern123') is One
def test_mixed_form_single_registration(reg):
reg.register([('some_path_one', 'method_one'),
'some_path_two',
('some_path_three', ('method_two', 'method_three')),
'some_path_four', 'some_path_five'], One)
assert reg.get('some_path_one', 'method_one') is One
assert reg.get('some_path_one') is None
assert reg.get('some_path_one', 'nonexistent') is None
assert reg.get('some_path_two') is One
assert reg.get('some_path_two', 'nonexistent') is None
assert reg.get('some_path_three', 'method_two') is One
assert reg.get('some_path_three', 'method_three') is One
assert reg.get('some_path_three') is None
assert reg.get('some_path_three', 'nonexistent') is None
assert reg.get('some_path_four') is One
assert reg.get('some_path_four', 'nonexistent') is None
assert reg.get('some_path_five') is One
assert reg.get('some_path_five', 'nonexistent') is None
def test_mixed_form_single_registration_with_methodless_default(reg):
reg.setdefault(One)
reg.register([('some_path_one', 'method_one'),
'some_path_two',
('some_path_three', ('method_two', 'method_three')),
'some_path_four', 'some_path_five'], Two)
assert reg.get('some_path_one', 'method_one') is Two
assert reg.get('some_path_one') is One
assert reg.get('some_path_one', 'nonexistent') is One
assert reg.get('some_path_two') is Two
assert reg.get('some_path_two', 'nonexistent') is One
assert reg.get('some_path_three', 'method_two') is Two
assert reg.get('some_path_three', 'method_three') is Two
assert reg.get('some_path_three') is One
assert reg.get('some_path_three', 'nonexistent') is One
assert reg.get('some_path_four') is Two
assert reg.get('some_path_four', 'nonexistent') is One
assert reg.get('some_path_five') is Two
assert reg.get('some_path_five', 'nonexistent') is One
def test_mixed_form_single_registration_with_method_default(reg):
reg.setdefault('existent', One)
reg.register([('some_path_one', 'method_one'),
'some_path_two',
('some_path_three', ('method_two', 'method_three')),
'some_path_four', 'some_path_five'], Two)
assert reg.get('some_path_one', 'method_one') is Two
assert reg.get('some_path_one') is None
assert reg.get('some_path_one', 'existent') is One
assert reg.get('some_path_one', 'nonexistent') is None
assert reg.get('some_path_two') is Two
assert reg.get('some_path_two', 'existent') is One
assert reg.get('some_path_two', 'nonexistent') is None
assert reg.get('some_path_three', 'method_two') is Two
assert reg.get('some_path_three', 'method_three') is Two
assert reg.get('some_path_three') is None
assert reg.get('some_path_three', 'existent') is One
assert reg.get('some_path_three', 'nonexistent') is None
assert reg.get('some_path_four') is Two
assert reg.get('some_path_four', 'existent') is One
assert reg.get('some_path_four', 'nonexistent') is None
assert reg.get('some_path_five') is Two
assert reg.get('some_path_five', 'existent') is One
assert reg.get('some_path_five', 'nonexistent') is None

64
awxkit/test/test_rrule.py Normal file
View File

@ -0,0 +1,64 @@
from dateutil.relativedelta import relativedelta
from dateutil import rrule
from datetime import datetime
import pytest
from awxkit.rrule import RRule
from awxkit.utils import to_ical
@pytest.mark.parametrize('frequency,expected_rrule',
[('YEARLY', 'RRULE:FREQ=YEARLY;INTERVAL=1;WKST=MO;BYMONTH={0.month};'
'BYMONTHDAY={0.day};BYHOUR={0.hour};BYMINUTE={0.minute};BYSECOND={0.second}'),
('MONTHLY', 'RRULE:FREQ=MONTHLY;INTERVAL=1;WKST=MO;BYMONTHDAY={0.day};BYHOUR={0.hour};'
'BYMINUTE={0.minute};BYSECOND={0.second}'),
('WEEKLY', 'RRULE:FREQ=WEEKLY;INTERVAL=1;WKST=MO;BYWEEKDAY={1};BYHOUR={0.hour};'
'BYMINUTE={0.minute};BYSECOND={0.second}'),
('DAILY', 'RRULE:FREQ=DAILY;INTERVAL=1;WKST=MO;BYHOUR={0.hour};'
'BYMINUTE={0.minute};BYSECOND={0.second}'),
('HOURLY', 'RRULE:FREQ=HOURLY;INTERVAL=1;WKST=MO;BYMINUTE={0.minute};BYSECOND={0.second}'),
('MINUTELY', 'RRULE:FREQ=MINUTELY;INTERVAL=1;WKST=MO;BYSECOND={0.second}'),
('SECONDLY', 'RRULE:FREQ=SECONDLY;INTERVAL=1;WKST=MO')],
ids=('yearly', 'monthly', 'weekly', 'daily', 'hourly', 'minutely', 'secondly'))
def test_string_frequency(frequency, expected_rrule):
dtstart = datetime.utcnow()
rule = RRule(freq=getattr(rrule, frequency), dtstart=dtstart)
weekday_str = ['MO', 'TU', 'WE', 'TH', 'FR', 'SA', 'SU'][dtstart.weekday()]
assert str(rule) == 'DTSTART:{0} {1}'.format(to_ical(dtstart), expected_rrule.format(dtstart, weekday_str))
@pytest.mark.parametrize('frequency,expected_rrule',
[(0, 'RRULE:FREQ=YEARLY;INTERVAL=1;WKST=MO;BYMONTH={0.month};'
'BYMONTHDAY={0.day};BYHOUR={0.hour};BYMINUTE={0.minute};BYSECOND={0.second}'),
(1, 'RRULE:FREQ=MONTHLY;INTERVAL=1;WKST=MO;BYMONTHDAY={0.day};BYHOUR={0.hour};'
'BYMINUTE={0.minute};BYSECOND={0.second}'),
(2, 'RRULE:FREQ=WEEKLY;INTERVAL=1;WKST=MO;BYWEEKDAY={1};BYHOUR={0.hour};'
'BYMINUTE={0.minute};BYSECOND={0.second}'),
(3, 'RRULE:FREQ=DAILY;INTERVAL=1;WKST=MO;BYHOUR={0.hour};'
'BYMINUTE={0.minute};BYSECOND={0.second}'),
(4, 'RRULE:FREQ=HOURLY;INTERVAL=1;WKST=MO;BYMINUTE={0.minute};BYSECOND={0.second}'),
(5, 'RRULE:FREQ=MINUTELY;INTERVAL=1;WKST=MO;BYSECOND={0.second}'),
(6, 'RRULE:FREQ=SECONDLY;INTERVAL=1;WKST=MO')],
ids=('0-yearly', '1-monthly', '2-weekly', '3-daily', '4-hourly', '5-minutely', '6-secondly'))
def test_int_frequency(frequency, expected_rrule):
dtstart = datetime.utcnow()
rule = RRule(freq=frequency, dtstart=dtstart)
weekday_str = ['MO', 'TU', 'WE', 'TH', 'FR', 'SA', 'SU'][dtstart.weekday()]
assert str(rule) == 'DTSTART:{0} {1}'.format(to_ical(dtstart), expected_rrule.format(dtstart, weekday_str))
def test_count():
dtstart = datetime.utcnow()
rule = RRule(freq=rrule.YEARLY, dtstart=dtstart, count=10)
expected_rrule = ('RRULE:FREQ=YEARLY;INTERVAL=1;WKST=MO;COUNT=10;BYMONTH={0.month};'
'BYMONTHDAY={0.day};BYHOUR={0.hour};BYMINUTE={0.minute};BYSECOND={0.second}')
assert str(rule) == 'DTSTART:{0} {1}'.format(to_ical(dtstart), expected_rrule.format(dtstart))
def test_until():
dtstart = datetime.utcnow()
until = dtstart + relativedelta(years=100)
rule = RRule(freq=rrule.YEARLY, dtstart=dtstart, until=until)
expected_rrule = ('RRULE:FREQ=YEARLY;INTERVAL=1;WKST=MO;UNTIL={1};BYMONTH={0.month};'
'BYMONTHDAY={0.day};BYHOUR={0.hour};BYMINUTE={0.minute};BYSECOND={0.second}')
assert str(rule) == 'DTSTART:{0} {1}'.format(to_ical(dtstart), expected_rrule.format(dtstart, to_ical(until)))

400
awxkit/test/test_utils.py Normal file
View File

@ -0,0 +1,400 @@
# -*- coding: utf-8 -*-
from datetime import datetime
from unittest import mock
import pytest
from awxkit import utils
from awxkit import exceptions as exc
@pytest.mark.parametrize('inp, out',
[[True, True],
[False, False],
[1, True],
[0, False],
[1.0, True],
[0.0, False],
['TrUe', True],
['FalSe', False],
['yEs', True],
['No', False],
['oN', True],
['oFf', False],
['asdf', True],
['0', False],
['', False],
[{1: 1}, True],
[{}, False],
[(0,), True],
[(), False],
[[1], True],
[[], False]])
def test_to_bool(inp, out):
assert utils.to_bool(inp) == out
@pytest.mark.parametrize('inp, out',
[["{}", {}],
["{'null': null}", {"null": None}],
["{'bool': true}", {"bool": True}],
["{'bool': false}", {"bool": False}],
["{'int': 0}", {"int": 0}],
["{'float': 1.0}", {"float": 1.0}],
["{'str': 'abc'}", {"str": "abc"}],
["{'obj': {}}", {"obj": {}}],
["{'list': []}", {"list": []}],
["---", None],
["---\n'null': null", {'null': None}],
["---\n'bool': true", {'bool': True}],
["---\n'bool': false", {'bool': False}],
["---\n'int': 0", {'int': 0}],
["---\n'float': 1.0", {'float': 1.0}],
["---\n'string': 'abc'", {'string': 'abc'}],
["---\n'obj': {}", {'obj': {}}],
["---\n'list': []", {'list': []}],
["", None],
["'null': null", {'null': None}],
["'bool': true", {'bool': True}],
["'bool': false", {'bool': False}],
["'int': 0", {'int': 0}],
["'float': 1.0", {'float': 1.0}],
["'string': 'abc'", {'string': 'abc'}],
["'obj': {}", {'obj': {}}],
["'list': []", {'list': []}]])
def test_load_valid_json_or_yaml(inp, out):
assert utils.load_json_or_yaml(inp) == out
@pytest.mark.parametrize('inp', [True, False, 0, 1.0, {}, [], None])
def test_load_invalid_json_or_yaml(inp):
with pytest.raises(TypeError):
utils.load_json_or_yaml(inp)
@pytest.mark.parametrize('non_ascii', [True, False])
def test_random_titles_are_unicode(non_ascii):
assert isinstance(utils.random_title(non_ascii=non_ascii), str)
@pytest.mark.parametrize('non_ascii', [True, False])
def test_random_titles_generates_correct_characters(non_ascii):
title = utils.random_title(non_ascii=non_ascii)
if non_ascii:
with pytest.raises(UnicodeEncodeError):
title.encode('ascii')
title.encode('utf-8')
else:
title.encode('ascii')
title.encode('utf-8')
@pytest.mark.parametrize('inp, out',
[['ClassNameShouldChange', 'class_name_should_change'],
['classnameshouldntchange', 'classnameshouldntchange'],
['Classspacingshouldntchange', 'classspacingshouldntchange'],
['Class1Name2Should3Change', 'class_1_name_2_should_3_change'],
['Class123name234should345change456', 'class_123_name_234_should_345_change_456']])
def test_class_name_to_kw_arg(inp, out):
assert utils.class_name_to_kw_arg(inp) == out
@pytest.mark.parametrize('first, second, expected',
[['/api/v2/resources/', '/api/v2/resources/', True],
['/api/v2/resources/', '/api/v2/resources/?test=ignored', True],
['/api/v2/resources/?one=ignored', '/api/v2/resources/?two=ignored', True],
['http://one.com', 'http://one.com', True],
['http://one.com', 'http://www.one.com', True],
['http://one.com', 'http://one.com?test=ignored', True],
['http://one.com', 'http://www.one.com?test=ignored', True],
['http://one.com', 'https://one.com', False],
['http://one.com', 'https://one.com?test=ignored', False]])
def test_are_same_endpoint(first, second, expected):
assert utils.are_same_endpoint(first, second) == expected
@pytest.mark.parametrize('endpoint, expected',
[['/api/v2/resources/', 'v2'],
['/api/v2000/resources/', 'v2000'],
['/api/', 'common']])
def test_version_from_endpoint(endpoint, expected):
assert utils.version_from_endpoint(endpoint) == expected
class OneClass:
pass
class TwoClass:
pass
class ThreeClass:
pass
class FourClass(ThreeClass):
pass
def test_filter_by_class_with_subclass_class():
filtered = utils.filter_by_class((OneClass, OneClass), (FourClass, ThreeClass))
assert filtered == [OneClass, FourClass]
def test_filter_by_class_with_subclass_instance():
one = OneClass()
four = FourClass()
filtered = utils.filter_by_class((one, OneClass), (four, ThreeClass))
assert filtered == [one, four]
def test_filter_by_class_no_arg_tuples():
three = ThreeClass()
filtered = utils.filter_by_class((True, OneClass), (False, TwoClass), (three, ThreeClass))
assert filtered == [OneClass, None, three]
def test_filter_by_class_with_arg_tuples_containing_class():
one = OneClass()
three = (ThreeClass, dict(one=1, two=2))
filtered = utils.filter_by_class((one, OneClass), (False, TwoClass), (three, ThreeClass))
assert filtered == [one, None, three]
def test_filter_by_class_with_arg_tuples_containing_subclass():
one = OneClass()
three = (FourClass, dict(one=1, two=2))
filtered = utils.filter_by_class((one, OneClass), (False, TwoClass), (three, ThreeClass))
assert filtered == [one, None, three]
@pytest.mark.parametrize('truthy', (True, 123, 'yes'))
def test_filter_by_class_with_arg_tuples_containing_truthy(truthy):
one = OneClass()
three = (truthy, dict(one=1, two=2))
filtered = utils.filter_by_class((one, OneClass), (False, TwoClass), (three, ThreeClass))
assert filtered == [one, None, (ThreeClass, dict(one=1, two=2))]
@pytest.mark.parametrize('date_string,now,expected', [
('2017-12-20T00:00:01.5Z', datetime(2017, 12, 20, 0, 0, 2, 750000), 1.25),
('2017-12-20T00:00:01.5Z', datetime(2017, 12, 20, 0, 0, 1, 500000), 0.00),
('2017-12-20T00:00:01.5Z', datetime(2017, 12, 20, 0, 0, 0, 500000), -1.00),
])
def test_seconds_since_date_string(date_string, now, expected):
with mock.patch('awxkit.utils.utcnow', return_value=now):
assert utils.seconds_since_date_string(date_string) == expected
class RecordingCallback(object):
def __init__(self, value=True):
self.call_count = 0
self.value = value
def __call__(self):
self.call_count += 1
return self.value
def test_suppress():
callback = RecordingCallback()
with utils.suppress(ZeroDivisionError, IndexError):
raise ZeroDivisionError
callback()
raise IndexError
raise KeyError
assert callback.call_count == 0
with utils.suppress(ZeroDivisionError, IndexError):
raise IndexError
callback()
raise ZeroDivisionError
raise KeyError
assert callback.call_count == 0
with pytest.raises(KeyError):
with utils.suppress(ZeroDivisionError, IndexError):
raise KeyError
callback()
raise ZeroDivisionError
raise IndexError
assert callback.call_count == 0
class TestPollUntil(object):
@pytest.mark.parametrize('timeout', [0, 0.0, -0.5, -1, -9999999])
def test_callback_called_once_for_non_positive_timeout(self, timeout):
with mock.patch('awxkit.utils.logged_sleep') as sleep:
callback = RecordingCallback()
utils.poll_until(callback, timeout=timeout)
assert not sleep.called
assert callback.call_count == 1
def test_exc_raised_on_timeout(self):
with mock.patch('awxkit.utils.logged_sleep'):
with pytest.raises(exc.WaitUntilTimeout):
utils.poll_until(lambda: False, timeout=0)
@pytest.mark.parametrize('callback_value', [{'hello': 1}, 'foo', True])
def test_non_falsey_callback_value_is_returned(self, callback_value):
with mock.patch('awxkit.utils.logged_sleep'):
assert utils.poll_until(lambda: callback_value) == callback_value
class TestPseudoNamespace(object):
def test_set_item_check_item(self):
pn = utils.PseudoNamespace()
pn['key'] = 'value'
assert pn['key'] == 'value'
def test_set_item_check_attr(self):
pn = utils.PseudoNamespace()
pn['key'] = 'value'
assert pn.key == 'value'
def test_set_attr_check_item(self):
pn = utils.PseudoNamespace()
pn.key = 'value'
assert pn['key'] == 'value'
def test_set_attr_check_attr(self):
pn = utils.PseudoNamespace()
pn.key = 'value'
assert pn.key == 'value'
def test_auto_dicts_cast(self):
pn = utils.PseudoNamespace()
pn.one = dict()
pn.one.two = dict(three=3)
assert pn.one.two.three == 3
assert pn == dict(one=dict(two=dict(three=3)))
def test_auto_list_of_dicts_cast(self):
pn = utils.PseudoNamespace()
pn.one = [dict(two=2), dict(three=3)]
assert pn.one[0].two == 2
assert pn == dict(one=[dict(two=2), dict(three=3)])
def test_auto_tuple_of_dicts_cast(self):
pn = utils.PseudoNamespace()
pn.one = (dict(two=2), dict(three=3))
assert pn.one[0].two == 2
assert pn == dict(one=(dict(two=2), dict(three=3)))
def test_instantiation_via_dict(self):
pn = utils.PseudoNamespace(dict(one=1, two=2, three=3))
assert pn.one == 1
assert pn == dict(one=1, two=2, three=3)
assert len(pn.keys()) == 3
def test_instantiation_via_kwargs(self):
pn = utils.PseudoNamespace(one=1, two=2, three=3)
assert pn.one == 1
assert pn == dict(one=1, two=2, three=3)
assert len(pn.keys()) == 3
def test_instantiation_via_dict_and_kwargs(self):
pn = utils.PseudoNamespace(dict(one=1, two=2, three=3), four=4, five=5)
assert pn.one == 1
assert pn.four == 4
assert pn == dict(one=1, two=2, three=3, four=4, five=5)
assert len(pn.keys()) == 5
def test_instantiation_via_nested_dict(self):
pn = utils.PseudoNamespace(dict(one=1, two=2), three=dict(four=4, five=dict(six=6)))
assert pn.one == 1
assert pn.three.four == 4
assert pn.three.five.six == 6
assert pn == dict(one=1, two=2, three=dict(four=4, five=dict(six=6)))
def test_instantiation_via_nested_dict_with_list(self):
pn = utils.PseudoNamespace(dict(one=[dict(two=2), dict(three=3)]))
assert pn.one[0].two == 2
assert pn.one[1].three == 3
assert pn == dict(one=[dict(two=2), dict(three=3)])
def test_instantiation_via_nested_dict_with_lists(self):
pn = utils.PseudoNamespace(dict(one=[dict(two=2),
dict(three=dict(four=4,
five=[dict(six=6),
dict(seven=7)]))]))
assert pn.one[1].three.five[1].seven == 7
def test_instantiation_via_nested_dict_with_tuple(self):
pn = utils.PseudoNamespace(dict(one=(dict(two=2), dict(three=3))))
assert pn.one[0].two == 2
assert pn.one[1].three == 3
assert pn == dict(one=(dict(two=2), dict(three=3)))
def test_instantiation_via_nested_dict_with_tuples(self):
pn = utils.PseudoNamespace(dict(one=(dict(two=2),
dict(three=dict(four=4,
five=(dict(six=6),
dict(seven=7)))))))
assert pn.one[1].three.five[1].seven == 7
def test_update_with_nested_dict(self):
pn = utils.PseudoNamespace()
pn.update(dict(one=1, two=2, three=3), four=4, five=5)
assert pn.one == 1
assert pn.four == 4
assert pn == dict(one=1, two=2, three=3, four=4, five=5)
assert len(pn.keys()) == 5
def test_update_with_nested_dict_with_lists(self):
pn = utils.PseudoNamespace()
pn.update(dict(one=[dict(two=2),
dict(three=dict(four=4,
five=[dict(six=6),
dict(seven=7)]))]))
assert pn.one[1].three.five[1].seven == 7
def test_update_with_nested_dict_with_tuples(self):
pn = utils.PseudoNamespace()
pn.update(dict(one=(dict(two=2),
dict(three=dict(four=4,
five=(dict(six=6),
dict(seven=7)))))))
assert pn.one[1].three.five[1].seven == 7
class TestUpdatePayload(object):
def test_empty_payload(self):
fields = ('one', 'two', 'three', 'four')
kwargs = dict(two=2, four=4)
payload = {}
utils.update_payload(payload, fields, kwargs)
assert payload == kwargs
def test_untouched_payload(self):
fields = ('not', 'in', 'kwargs')
kwargs = dict(one=1, two=2)
payload = dict(three=3, four=4)
utils.update_payload(payload, fields, kwargs)
assert payload == dict(three=3, four=4)
def test_overwritten_payload(self):
fields = ('one', 'two')
kwargs = dict(one=1, two=2)
payload = dict(one='one', two='two')
utils.update_payload(payload, fields, kwargs)
assert payload == kwargs
def test_falsy_kwargs(self):
fields = ('one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight')
kwargs = dict(one=False, two=(), three='', four=None, five=0, six={}, seven=set(), eight=[])
payload = {}
utils.update_payload(payload, fields, kwargs)
assert payload == kwargs
def test_not_provided_strips_payload(self):
fields = ('one', 'two')
kwargs = dict(one=utils.not_provided)
payload = dict(one=1, two=2)
utils.update_payload(payload, fields, kwargs)
assert payload == dict(two=2)
def test_to_ical():
now = datetime.utcnow()
ical_datetime = utils.to_ical(now)
date = str(now.date()).replace('-', '')
time = str(now.time()).split('.')[0].replace(':', '')
assert ical_datetime == '{}T{}Z'.format(date, time)

32
awxkit/test/test_ws.py Normal file
View File

@ -0,0 +1,32 @@
# -*- coding: utf-8 -*-
from collections import namedtuple
from unittest.mock import patch
import pytest
from awxkit.ws import WSClient
ParseResult = namedtuple("ParseResult", ["port", "hostname", "secure"])
def test_explicit_hostname():
client = WSClient("token", "some-hostname", 556, False)
assert client.port == 556
assert client.hostname == "some-hostname"
assert client._use_ssl == False
assert client.token == "token"
@pytest.mark.parametrize('url, result',
[['https://somename:123', ParseResult(123, "somename", True)],
['http://othername:456', ParseResult(456, "othername", False)],
['http://othername', ParseResult(80, "othername", False)],
['https://othername', ParseResult(443, "othername", True)],
])
def test_urlparsing(url, result):
with patch("awxkit.ws.config") as mock_config:
mock_config.base_url = url
client = WSClient("token")
assert client.port == result.port
assert client.hostname == result.hostname
assert client._use_ssl == result.secure

Some files were not shown because too many files have changed in this diff Show More