mirror of
https://github.com/ansible/awx.git
synced 2026-01-19 13:41:28 -03:30
Merge pull request #6125 from jbradberry/import-export
Import export
Reviewed-by: Jeff Bradberry
https://github.com/jbradberry
This commit is contained in:
commit
a0413b7e0b
@ -1,6 +1,47 @@
|
||||
import itertools
|
||||
import logging
|
||||
|
||||
from awxkit.api.resources import resources
|
||||
import awxkit.exceptions as exc
|
||||
from . import base
|
||||
from . import page
|
||||
from .. import utils
|
||||
from ..mixins import has_create
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
EXPORTABLE_RESOURCES = [
|
||||
'users',
|
||||
'organizations',
|
||||
'teams',
|
||||
'credential_types',
|
||||
'credentials',
|
||||
'notification_templates',
|
||||
'projects',
|
||||
'inventory',
|
||||
'inventory_sources',
|
||||
'job_templates',
|
||||
'workflow_job_templates',
|
||||
]
|
||||
|
||||
|
||||
EXPORTABLE_RELATIONS = [
|
||||
'Roles',
|
||||
'NotificationTemplates',
|
||||
'WorkflowJobTemplateNodes',
|
||||
'Credentials',
|
||||
]
|
||||
|
||||
|
||||
EXPORTABLE_DEPENDENT_OBJECTS = [
|
||||
'Labels',
|
||||
'SurveySpec',
|
||||
'Schedules',
|
||||
# WFJT Nodes are a special case, we want full data for the create
|
||||
# view and natural keys for the attach views.
|
||||
'WorkflowJobTemplateNodes',
|
||||
]
|
||||
|
||||
|
||||
class Api(base.Base):
|
||||
@ -13,7 +54,269 @@ page.register_page(resources.api, Api)
|
||||
|
||||
class ApiV2(base.Base):
|
||||
|
||||
pass
|
||||
# Export methods
|
||||
|
||||
def _export(self, _page, post_fields):
|
||||
# Drop any (credential_type) assets that are being managed by the Tower instance.
|
||||
if _page.json.get('managed_by_tower'):
|
||||
log.debug("%s is managed by Tower, skipping.", _page.endpoint)
|
||||
return None
|
||||
if post_fields is None: # Deprecated endpoint or insufficient permissions
|
||||
log.error("Object export failed: %s", _page.endpoint)
|
||||
return None
|
||||
|
||||
# Note: doing _page[key] automatically parses json blob strings, which can be a problem.
|
||||
fields = {
|
||||
key: _page.json[key] for key in post_fields
|
||||
if key in _page.json and key not in _page.related and key != 'id'
|
||||
}
|
||||
|
||||
for key in post_fields:
|
||||
if key not in _page.related:
|
||||
continue
|
||||
|
||||
rel_endpoint = self._cache.get_page(_page.related[key])
|
||||
if rel_endpoint is None: # This foreign key is unreadable
|
||||
if post_fields[key].get('required'):
|
||||
log.error("Foreign key %r export failed for object %s.", key, _page.endpoint)
|
||||
return None
|
||||
log.warning("Foreign key %r export failed for object %s, setting to null", key, _page.endpoint)
|
||||
continue
|
||||
rel_natural_key = rel_endpoint.get_natural_key(self._cache)
|
||||
if rel_natural_key is None:
|
||||
log.error("Unable to construct a natural key for foreign key %r of object %s.",
|
||||
key, _page.endpoint)
|
||||
return None # This foreign key has unresolvable dependencies
|
||||
fields[key] = rel_natural_key
|
||||
|
||||
related = {}
|
||||
for key, rel_endpoint in _page.related.items():
|
||||
if key in post_fields or not rel_endpoint:
|
||||
continue
|
||||
|
||||
rel = rel_endpoint._create()
|
||||
is_relation = rel.__class__.__name__ in EXPORTABLE_RELATIONS
|
||||
is_dependent = rel.__class__.__name__ in EXPORTABLE_DEPENDENT_OBJECTS
|
||||
if not (is_relation or is_dependent):
|
||||
continue
|
||||
|
||||
rel_post_fields = utils.get_post_fields(rel_endpoint, self._cache)
|
||||
if rel_post_fields is None:
|
||||
log.debug("%s is a read-only endpoint.", rel_endpoint)
|
||||
continue
|
||||
is_attach = 'id' in rel_post_fields # This is not a create-only endpoint.
|
||||
|
||||
if is_relation and is_attach:
|
||||
by_natural_key = True
|
||||
elif is_dependent:
|
||||
by_natural_key = False
|
||||
else:
|
||||
continue
|
||||
|
||||
rel_page = self._cache.get_page(rel_endpoint)
|
||||
if rel_page is None:
|
||||
continue
|
||||
|
||||
if 'results' in rel_page:
|
||||
results = (
|
||||
x.get_natural_key(self._cache) if by_natural_key else self._export(x, rel_post_fields)
|
||||
for x in rel_page.results
|
||||
)
|
||||
related[key] = [x for x in results if x is not None]
|
||||
else:
|
||||
related[key] = rel_page.json
|
||||
|
||||
if related:
|
||||
fields['related'] = related
|
||||
|
||||
natural_key = _page.get_natural_key(self._cache)
|
||||
if natural_key is None:
|
||||
log.error("Unable to construct a natural key for object %s.", _page.endpoint)
|
||||
return None
|
||||
fields['natural_key'] = natural_key
|
||||
|
||||
return utils.remove_encrypted(fields)
|
||||
|
||||
def _export_list(self, endpoint):
|
||||
post_fields = utils.get_post_fields(endpoint, self._cache)
|
||||
if post_fields is None:
|
||||
return None
|
||||
|
||||
if isinstance(endpoint, page.TentativePage):
|
||||
endpoint = self._cache.get_page(endpoint)
|
||||
if endpoint is None:
|
||||
return None
|
||||
|
||||
assets = (self._export(asset, post_fields) for asset in endpoint.results)
|
||||
return [asset for asset in assets if asset is not None]
|
||||
|
||||
def _filtered_list(self, endpoint, value):
|
||||
if isinstance(value, int) or value.isdecimal():
|
||||
return endpoint.get(id=int(value))
|
||||
options = self._cache.get_options(endpoint)
|
||||
identifier = next(field for field in options['search_fields']
|
||||
if field in ('name', 'username', 'hostname'))
|
||||
return endpoint.get(**{identifier: value})
|
||||
|
||||
def export_assets(self, **kwargs):
|
||||
self._cache = page.PageCache()
|
||||
|
||||
# If no resource kwargs are explicitly used, export everything.
|
||||
all_resources = all(kwargs.get(resource) is None for resource in EXPORTABLE_RESOURCES)
|
||||
|
||||
data = {}
|
||||
for resource in EXPORTABLE_RESOURCES:
|
||||
value = kwargs.get(resource)
|
||||
if all_resources or value is not None:
|
||||
endpoint = getattr(self, resource)
|
||||
if value:
|
||||
endpoint = self._filtered_list(endpoint, value)
|
||||
data[resource] = self._export_list(endpoint)
|
||||
|
||||
return data
|
||||
|
||||
# Import methods
|
||||
|
||||
def _dependent_resources(self, data):
|
||||
page_resource = {getattr(self, resource)._create().__item_class__: resource
|
||||
for resource in self.json}
|
||||
data_pages = [getattr(self, resource)._create().__item_class__ for resource in EXPORTABLE_RESOURCES]
|
||||
|
||||
for page_cls in itertools.chain(*has_create.page_creation_order(*data_pages)):
|
||||
yield page_resource[page_cls]
|
||||
|
||||
def _import_list(self, endpoint, assets):
|
||||
log.debug("_import_list -- endpoint: %s, assets: %s", endpoint.endpoint, repr(assets))
|
||||
post_fields = utils.get_post_fields(endpoint, self._cache)
|
||||
|
||||
changed = False
|
||||
|
||||
for asset in assets:
|
||||
post_data = {}
|
||||
for field, value in asset.items():
|
||||
if field not in post_fields:
|
||||
continue
|
||||
if post_fields[field]['type'] in ('id', 'integer') and isinstance(value, dict):
|
||||
_page = self._cache.get_by_natural_key(value)
|
||||
post_data[field] = _page['id'] if _page is not None else None
|
||||
else:
|
||||
post_data[field] = value
|
||||
|
||||
_page = self._cache.get_by_natural_key(asset['natural_key'])
|
||||
try:
|
||||
if _page is None:
|
||||
if asset['natural_key']['type'] == 'user':
|
||||
# We should only impose a default password if the resource doesn't exist.
|
||||
post_data.setdefault('password', 'abc123')
|
||||
_page = endpoint.post(post_data)
|
||||
changed = True
|
||||
if asset['natural_key']['type'] == 'project':
|
||||
# When creating a project, we need to wait for its
|
||||
# first project update to finish so that associated
|
||||
# JTs have valid options for playbook names
|
||||
_page.wait_until_completed()
|
||||
else:
|
||||
_page = _page.put(post_data)
|
||||
changed = True
|
||||
except (exc.Common, AssertionError) as e:
|
||||
log.error("Object import failed: %s.", e)
|
||||
log.debug("post_data: %r", post_data)
|
||||
continue
|
||||
|
||||
self._cache.set_page(_page)
|
||||
|
||||
# Queue up everything related to be either created or assigned.
|
||||
for name, S in asset.get('related', {}).items():
|
||||
if not S:
|
||||
continue
|
||||
if name == 'roles':
|
||||
self._roles.append((_page, S))
|
||||
else:
|
||||
self._related.append((_page, name, S))
|
||||
|
||||
return changed
|
||||
|
||||
def _assign_role(self, endpoint, role):
|
||||
if 'content_object' not in role:
|
||||
return
|
||||
obj_page = self._cache.get_by_natural_key(role['content_object'])
|
||||
if obj_page is None:
|
||||
return
|
||||
role_page = obj_page.get_object_role(role['name'], by_name=True)
|
||||
try:
|
||||
endpoint.post({'id': role_page['id']})
|
||||
except exc.NoContent: # desired exception on successful (dis)association
|
||||
pass
|
||||
except exc.Common as e:
|
||||
log.error("Role assignment failed: %s.", e)
|
||||
log.debug("post_data: %r", {'id': role_page['id']})
|
||||
|
||||
def _assign_membership(self):
|
||||
for _page, roles in self._roles:
|
||||
role_endpoint = _page.json['related']['roles']
|
||||
for role in roles:
|
||||
if role['name'] == 'Member':
|
||||
self._assign_role(role_endpoint, role)
|
||||
|
||||
def _assign_roles(self):
|
||||
for _page, roles in self._roles:
|
||||
role_endpoint = _page.json['related']['roles']
|
||||
for role in roles:
|
||||
if role['name'] != 'Member':
|
||||
self._assign_role(role_endpoint, role)
|
||||
|
||||
def _assign_related(self):
|
||||
for _page, name, related_set in self._related:
|
||||
endpoint = _page.related[name]
|
||||
if isinstance(related_set, dict): # Relateds that are just json blobs, e.g. survey_spec
|
||||
endpoint.post(related_set)
|
||||
return
|
||||
|
||||
if 'natural_key' not in related_set[0]: # It is an attach set
|
||||
# Try to impedance match
|
||||
related = endpoint.get(all_pages=True)
|
||||
existing = {rel['id'] for rel in related.results}
|
||||
for item in related_set:
|
||||
rel_page = self._cache.get_by_natural_key(item)
|
||||
if rel_page is None:
|
||||
continue # FIXME
|
||||
if rel_page['id'] in existing:
|
||||
continue
|
||||
try:
|
||||
post_data = {'id': rel_page['id']}
|
||||
endpoint.post(post_data)
|
||||
log.error("endpoint: %s, id: %s", endpoint.endpoint, rel_page['id'])
|
||||
except exc.NoContent: # desired exception on successful (dis)association
|
||||
pass
|
||||
except exc.Common as e:
|
||||
log.error("Object association failed: %s.", e)
|
||||
log.debug("post_data: %r", post_data)
|
||||
else: # It is a create set
|
||||
self._cache.get_page(endpoint)
|
||||
self._import_list(endpoint, related_set)
|
||||
|
||||
# FIXME: deal with pruning existing relations that do not match the import set
|
||||
|
||||
def import_assets(self, data):
|
||||
self._cache = page.PageCache()
|
||||
self._related = []
|
||||
self._roles = []
|
||||
|
||||
changed = False
|
||||
|
||||
for resource in self._dependent_resources(data):
|
||||
endpoint = getattr(self, resource)
|
||||
# Load up existing objects, so that we can try to update or link to them
|
||||
self._cache.get_page(endpoint)
|
||||
imported = self._import_list(endpoint, data.get(resource) or [])
|
||||
changed = changed or imported
|
||||
# FIXME: should we delete existing unpatched assets?
|
||||
|
||||
self._assign_related()
|
||||
self._assign_membership()
|
||||
self._assign_roles()
|
||||
|
||||
return changed
|
||||
|
||||
|
||||
page.register_page(resources.v2, ApiV2)
|
||||
|
||||
@ -149,6 +149,8 @@ def get_payload_field_and_value_from_kwargs_or_config_cred(
|
||||
|
||||
class CredentialType(HasCreate, base.Base):
|
||||
|
||||
NATURAL_KEY = ('name', 'kind')
|
||||
|
||||
def silent_delete(self):
|
||||
if not self.managed_by_tower:
|
||||
return super(CredentialType, self).silent_delete()
|
||||
@ -204,6 +206,7 @@ class Credential(HasCopy, HasCreate, base.Base):
|
||||
|
||||
dependencies = [CredentialType]
|
||||
optional_dependencies = [Organization, User, Team]
|
||||
NATURAL_KEY = ('organization', 'name', 'credential_type')
|
||||
|
||||
def payload(
|
||||
self,
|
||||
|
||||
@ -32,6 +32,7 @@ log = logging.getLogger(__name__)
|
||||
class Inventory(HasCopy, HasCreate, HasInstanceGroups, HasVariables, base.Base):
|
||||
|
||||
dependencies = [Organization]
|
||||
NATURAL_KEY = ('organization', 'name')
|
||||
|
||||
def print_ini(self):
|
||||
"""Print an ini version of the inventory"""
|
||||
@ -473,6 +474,7 @@ class InventorySource(HasCreate, HasNotifications, UnifiedJobTemplate):
|
||||
optional_schedule_fields = tuple()
|
||||
dependencies = [Inventory]
|
||||
optional_dependencies = [Credential, InventoryScript, Project]
|
||||
NATURAL_KEY = ('organization', 'name', 'inventory')
|
||||
|
||||
def payload(
|
||||
self,
|
||||
|
||||
@ -24,6 +24,7 @@ class JobTemplate(
|
||||
UnifiedJobTemplate):
|
||||
|
||||
optional_dependencies = [Inventory, Credential, Project]
|
||||
NATURAL_KEY = ('organization', 'name')
|
||||
|
||||
def launch(self, payload={}):
|
||||
"""Launch the job_template using related->launch endpoint."""
|
||||
|
||||
@ -9,6 +9,7 @@ from . import page
|
||||
class Label(HasCreate, base.Base):
|
||||
|
||||
dependencies = [Organization]
|
||||
NATURAL_KEY = ('organization', 'name')
|
||||
|
||||
def silent_delete(self):
|
||||
"""Label pages do not support DELETE requests. Here, we override the base page object
|
||||
|
||||
@ -24,6 +24,7 @@ notification_types = (
|
||||
class NotificationTemplate(HasCopy, HasCreate, base.Base):
|
||||
|
||||
dependencies = [Organization]
|
||||
NATURAL_KEY = ('organization', 'name')
|
||||
|
||||
def test(self):
|
||||
"""Create test notification"""
|
||||
|
||||
@ -8,6 +8,8 @@ from . import page
|
||||
|
||||
class Organization(HasCreate, HasInstanceGroups, HasNotifications, base.Base):
|
||||
|
||||
NATURAL_KEY = ('name',)
|
||||
|
||||
def add_admin(self, user):
|
||||
if isinstance(user, page.Page):
|
||||
user = user.json
|
||||
|
||||
@ -15,6 +15,7 @@ from awxkit.utils import (
|
||||
is_list_or_tuple,
|
||||
to_str
|
||||
)
|
||||
from awxkit.api import utils
|
||||
from awxkit.api.client import Connection
|
||||
from awxkit.api.registry import URLRegistry
|
||||
from awxkit.config import config
|
||||
@ -273,7 +274,7 @@ class Page(object):
|
||||
def get(self, all_pages=False, **query_parameters):
|
||||
r = self.connection.get(self.endpoint, query_parameters)
|
||||
page = self.page_identity(r)
|
||||
if all_pages and page.next:
|
||||
if all_pages and getattr(page, 'next', None):
|
||||
paged_results = [r.json()['results']]
|
||||
while page.next:
|
||||
r = self.connection.get(self.next, query_parameters)
|
||||
@ -317,6 +318,28 @@ class Page(object):
|
||||
page_cls = get_registered_page(endpoint)
|
||||
return page_cls(self.connection, endpoint=endpoint).get(**kw)
|
||||
|
||||
def get_natural_key(self, cache=None):
|
||||
if cache is None:
|
||||
cache = PageCache()
|
||||
|
||||
if not getattr(self, 'NATURAL_KEY', None):
|
||||
log.warning("This object does not have a natural key: %s", getattr(self, 'endpoint', ''))
|
||||
return None
|
||||
|
||||
natural_key = {}
|
||||
for key in self.NATURAL_KEY:
|
||||
if key in self.related:
|
||||
related_endpoint = cache.get_page(self.related[key])
|
||||
if related_endpoint is not None:
|
||||
natural_key[key] = related_endpoint.get_natural_key(cache=cache)
|
||||
else:
|
||||
natural_key[key] = None
|
||||
elif key in self:
|
||||
natural_key[key] = self[key]
|
||||
|
||||
natural_key['type'] = self['type']
|
||||
return natural_key
|
||||
|
||||
|
||||
_exception_map = {http.NO_CONTENT: exc.NoContent,
|
||||
http.NOT_FOUND: exc.NotFound,
|
||||
@ -334,6 +357,8 @@ def exception_from_status_code(status_code):
|
||||
|
||||
class PageList(object):
|
||||
|
||||
NATURAL_KEY = None
|
||||
|
||||
@property
|
||||
def __item_class__(self):
|
||||
"""Returns the class representing a single 'Page' item
|
||||
@ -376,6 +401,10 @@ class PageList(object):
|
||||
def create(self, *a, **kw):
|
||||
return self.__item_class__(self.connection).create(*a, **kw)
|
||||
|
||||
def get_natural_key(self, cache=None):
|
||||
log.warning("This object does not have a natural key: %s", getattr(self, 'endpoint', ''))
|
||||
return None
|
||||
|
||||
|
||||
class TentativePage(str):
|
||||
|
||||
@ -502,3 +531,67 @@ class TentativePage(str):
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.endpoint != other
|
||||
|
||||
|
||||
class PageCache(object):
|
||||
def __init__(self):
|
||||
self.options = {}
|
||||
self.pages_by_url = {}
|
||||
self.pages_by_natural_key = {}
|
||||
|
||||
def get_options(self, page):
|
||||
url = page.endpoint if isinstance(page, Page) else str(page)
|
||||
if url in self.options:
|
||||
return self.options[url]
|
||||
|
||||
try:
|
||||
options = page.options()
|
||||
except exc.Common:
|
||||
log.error("This endpoint raised an error: %s", url)
|
||||
return self.options.setdefault(url, None)
|
||||
|
||||
warning = options.r.headers.get('Warning', '')
|
||||
if '299' in warning and 'deprecated' in warning:
|
||||
log.warning("This endpoint is deprecated: %s", url)
|
||||
return self.options.setdefault(url, None)
|
||||
|
||||
return self.options.setdefault(url, options)
|
||||
|
||||
def set_page(self, page):
|
||||
log.debug("set_page: %s %s", type(page), page.endpoint)
|
||||
self.pages_by_url[page.endpoint] = page
|
||||
if getattr(page, 'NATURAL_KEY', None):
|
||||
log.debug("set_page has natural key fields.")
|
||||
natural_key = page.get_natural_key(cache=self)
|
||||
if natural_key is not None:
|
||||
log.debug("set_page natural_key: %s", repr(natural_key))
|
||||
self.pages_by_natural_key[utils.freeze(natural_key)] = page.endpoint
|
||||
if 'results' in page:
|
||||
for p in page.results:
|
||||
self.set_page(p)
|
||||
return page
|
||||
|
||||
def get_page(self, page):
|
||||
url = page.endpoint if isinstance(page, Page) else str(page)
|
||||
if url in self.pages_by_url:
|
||||
return self.pages_by_url[url]
|
||||
|
||||
try:
|
||||
page = page.get(all_pages=True)
|
||||
except exc.Common:
|
||||
log.error("This endpoint raised an error: %s", url)
|
||||
return self.pages_by_url.setdefault(url, None)
|
||||
|
||||
warning = page.r.headers.get('Warning', '')
|
||||
if '299' in warning and 'deprecated' in warning:
|
||||
log.warning("This endpoint is deprecated: %s", url)
|
||||
return self.pages_by_url.setdefault(url, None)
|
||||
|
||||
log.debug("get_page: %s", page.endpoint)
|
||||
return self.set_page(page)
|
||||
|
||||
def get_by_natural_key(self, natural_key):
|
||||
endpoint = self.pages_by_natural_key.get(utils.freeze(natural_key))
|
||||
log.debug("get_by_natural_key: %s, endpoint: %s", repr(natural_key), endpoint)
|
||||
if endpoint:
|
||||
return self.get_page(endpoint)
|
||||
|
||||
@ -14,6 +14,7 @@ class Project(HasCopy, HasCreate, HasNotifications, UnifiedJobTemplate):
|
||||
|
||||
optional_dependencies = [Credential, Organization]
|
||||
optional_schedule_fields = tuple()
|
||||
NATURAL_KEY = ('organization', 'name')
|
||||
|
||||
def payload(self, organization, scm_type='git', **kwargs):
|
||||
payload = PseudoNamespace(
|
||||
|
||||
@ -1,11 +1,36 @@
|
||||
import logging
|
||||
|
||||
from awxkit.api.resources import resources
|
||||
|
||||
from . import base
|
||||
from . import page
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Role(base.Base):
|
||||
|
||||
pass
|
||||
NATURAL_KEY = ('name',)
|
||||
|
||||
def get_natural_key(self, cache=None):
|
||||
if cache is None:
|
||||
cache = page.PageCache()
|
||||
|
||||
natural_key = super(Role, self).get_natural_key(cache=cache)
|
||||
related_objs = [
|
||||
related for name, related in self.related.items()
|
||||
if name not in ('users', 'teams')
|
||||
]
|
||||
if related_objs:
|
||||
related_endpoint = cache.get_page(related_objs[0])
|
||||
if related_endpoint is None:
|
||||
log.error("Unable to obtain content_object %s for role %s",
|
||||
related_objs[0], self.endpoint)
|
||||
return None
|
||||
natural_key['content_object'] = related_endpoint.get_natural_key(cache=cache)
|
||||
|
||||
return natural_key
|
||||
|
||||
|
||||
page.register_page(resources.role, Role)
|
||||
|
||||
@ -8,7 +8,7 @@ from . import base
|
||||
|
||||
class Schedule(UnifiedJob):
|
||||
|
||||
pass
|
||||
NATURAL_KEY = ('unified_job_template', 'name')
|
||||
|
||||
|
||||
page.register_page([resources.schedule,
|
||||
|
||||
@ -11,6 +11,7 @@ from . import page
|
||||
class Team(HasCreate, base.Base):
|
||||
|
||||
dependencies = [Organization]
|
||||
NATURAL_KEY = ('organization', 'name')
|
||||
|
||||
def add_user(self, user):
|
||||
if isinstance(user, page.Page):
|
||||
|
||||
@ -9,6 +9,8 @@ from . import page
|
||||
|
||||
class User(HasCreate, base.Base):
|
||||
|
||||
NATURAL_KEY = ('username',)
|
||||
|
||||
def payload(self, **kwargs):
|
||||
payload = PseudoNamespace(
|
||||
username=kwargs.get('username') or 'User-{}'.format(
|
||||
|
||||
@ -10,6 +10,7 @@ from . import page
|
||||
class WorkflowJobTemplateNode(HasCreate, base.Base):
|
||||
|
||||
dependencies = [WorkflowJobTemplate, UnifiedJobTemplate]
|
||||
NATURAL_KEY = ('workflow_job_template', 'identifier')
|
||||
|
||||
def payload(self, workflow_job_template, unified_job_template, **kwargs):
|
||||
if not unified_job_template:
|
||||
@ -122,8 +123,8 @@ class WorkflowJobTemplateNode(HasCreate, base.Base):
|
||||
|
||||
|
||||
page.register_page([resources.workflow_job_template_node,
|
||||
(resources.workflow_job_template_nodes,
|
||||
'post')],
|
||||
(resources.workflow_job_template_nodes, 'post'),
|
||||
(resources.workflow_job_template_workflow_nodes, 'post')],
|
||||
WorkflowJobTemplateNode)
|
||||
|
||||
|
||||
|
||||
@ -13,6 +13,7 @@ from . import page
|
||||
class WorkflowJobTemplate(HasCopy, HasCreate, HasNotifications, HasSurvey, UnifiedJobTemplate):
|
||||
|
||||
optional_dependencies = [Organization]
|
||||
NATURAL_KEY = ('organization', 'name')
|
||||
|
||||
def launch(self, payload={}):
|
||||
"""Launch using related->launch endpoint."""
|
||||
|
||||
50
awxkit/awxkit/api/utils.py
Normal file
50
awxkit/awxkit/api/utils.py
Normal file
@ -0,0 +1,50 @@
|
||||
import logging
|
||||
import re
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
descRE = re.compile(r'^[*] `(\w+)`: [^(]*\((\w+), ([^)]+)\)')
|
||||
|
||||
|
||||
def freeze(key):
|
||||
if key is None:
|
||||
return None
|
||||
return frozenset((k, freeze(v) if isinstance(v, dict) else v) for k, v in key.items())
|
||||
|
||||
|
||||
def parse_description(desc):
|
||||
options = {}
|
||||
for line in desc[desc.index('POST'):].splitlines():
|
||||
match = descRE.match(line)
|
||||
if not match:
|
||||
continue
|
||||
options[match.group(1)] = {'type': match.group(2),
|
||||
'required': match.group(3) == 'required'}
|
||||
return options
|
||||
|
||||
|
||||
def remove_encrypted(value):
|
||||
if value == '$encrypted$':
|
||||
return ''
|
||||
if isinstance(value, list):
|
||||
return [remove_encrypted(item) for item in value]
|
||||
if isinstance(value, dict):
|
||||
return {k: remove_encrypted(v) for k, v in value.items()}
|
||||
return value
|
||||
|
||||
|
||||
def get_post_fields(page, cache):
|
||||
options_page = cache.get_options(page)
|
||||
if options_page is None:
|
||||
return None
|
||||
|
||||
if 'POST' not in options_page.r.headers.get('Allow', ''):
|
||||
return None
|
||||
|
||||
if 'POST' in options_page.json['actions']:
|
||||
return options_page.json['actions']['POST']
|
||||
else:
|
||||
log.warning(
|
||||
"Insufficient privileges on %s, inferring POST fields from description.", options_page.endpoint)
|
||||
return parse_description(options_page.json['description'])
|
||||
@ -59,9 +59,37 @@ Importing an SSH Key
|
||||
--name 'My SSH Key' --user 'alice' \
|
||||
--inputs '{"username": "server-login", "ssh_key_data": "@~/.ssh/id_rsa"}'
|
||||
|
||||
Backup/Restore
|
||||
--------------
|
||||
Import/Export
|
||||
-------------
|
||||
|
||||
The AWX CLI doesn't currently have official support for backing up and restoring resources (similar to `tower-cli send` and `tower-cli receive`).
|
||||
Intended to be similar to `tower-cli send` and `tower-cli receive`.
|
||||
|
||||
If you rely on this functionality, you should continue to use `tower-cli` at this time.
|
||||
Exporting everything:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
awx export
|
||||
|
||||
Exporting everything of some particular type or types:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
awx export --users
|
||||
|
||||
Exporting a particular named resource:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
awx export --users admin
|
||||
|
||||
Exporting a resource by id:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
awx export --users 42
|
||||
|
||||
Importing a set of resources stored as a file:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
awx import < resources.json
|
||||
|
||||
@ -1,8 +1,11 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
from awxkit import api, config
|
||||
from awxkit.utils import to_str
|
||||
from awxkit.api.pages import Page
|
||||
from awxkit.api.pages.api import EXPORTABLE_RESOURCES
|
||||
from awxkit.cli.format import FORMATTERS, format_response, add_authentication_arguments
|
||||
from awxkit.cli.utils import CustomRegistryMeta, cprint
|
||||
|
||||
@ -123,6 +126,51 @@ class Config(CustomCommand):
|
||||
}
|
||||
|
||||
|
||||
class Import(CustomCommand):
|
||||
name = 'import'
|
||||
help_text = 'import resources into Tower'
|
||||
|
||||
def handle(self, client, parser):
|
||||
if client.help:
|
||||
parser.print_help()
|
||||
raise SystemExit()
|
||||
|
||||
data = json.load(sys.stdin)
|
||||
|
||||
client.authenticate()
|
||||
client.v2.import_assets(data)
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
class Export(CustomCommand):
|
||||
name = 'export'
|
||||
help_text = 'export resources from Tower'
|
||||
|
||||
def extend_parser(self, parser):
|
||||
resources = parser.add_argument_group('resources')
|
||||
|
||||
for resource in EXPORTABLE_RESOURCES:
|
||||
# This parsing pattern will result in 3 different possible outcomes:
|
||||
# 1) the resource flag is not used at all, which will result in the attr being None
|
||||
# 2) the resource flag is used with no argument, which will result in the attr being ''
|
||||
# 3) the resource flag is used with an argument, and the attr will be that argument's value
|
||||
resources.add_argument('--{}'.format(resource), nargs='?', const='')
|
||||
|
||||
def handle(self, client, parser):
|
||||
self.extend_parser(parser)
|
||||
|
||||
if client.help:
|
||||
parser.print_help()
|
||||
raise SystemExit()
|
||||
|
||||
parsed = parser.parse_known_args()[0]
|
||||
kwargs = {resource: getattr(parsed, resource, None) for resource in EXPORTABLE_RESOURCES}
|
||||
|
||||
client.authenticate()
|
||||
return client.v2.export_assets(**kwargs)
|
||||
|
||||
|
||||
def parse_resource(client, skip_deprecated=False):
|
||||
subparsers = client.parser.add_subparsers(
|
||||
dest='resource',
|
||||
|
||||
@ -101,3 +101,8 @@ class UnexpectedAWXState(Common):
|
||||
class IsMigrating(Common):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ImportExportError(Exception):
|
||||
|
||||
pass
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user