awxkit: allow to modify api base url (#14835)

Signed-off-by: Julen Landa Alustiza <jlanda@redhat.com>
This commit is contained in:
Julen Landa Alustiza 2024-02-07 12:26:42 +01:00 committed by GitHub
parent 8a902debd5
commit 8c9c02c975
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 54 additions and 49 deletions

View File

@ -43,8 +43,8 @@ class Connection(object):
self.session = requests.Session()
self.uses_session_cookie = False
def get_session_requirements(self, next='/api/'):
self.get('/api/') # this causes a cookie w/ the CSRF token to be set
def get_session_requirements(self, next=config.api_base_path):
self.get(config.api_base_path) # this causes a cookie w/ the CSRF token to be set
return dict(next=next)
def login(self, username=None, password=None, token=None, **kwargs):
@ -52,7 +52,7 @@ class Connection(object):
_next = kwargs.get('next')
if _next:
headers = self.session.headers.copy()
response = self.post('/api/login/', headers=headers, data=dict(username=username, password=password, next=_next))
response = self.post(f"{config.api_base_path}login/", headers=headers, data=dict(username=username, password=password, next=_next))
# The login causes a redirect so we need to search the history of the request to find the header
for historical_response in response.history:
if 'X-API-Session-Cookie-Name' in historical_response.headers:

View File

@ -3,6 +3,7 @@ import json
from awxkit.utils import poll_until
from awxkit.exceptions import WaitUntilTimeout
from awxkit.config import config
def bytes_to_str(obj):
@ -83,7 +84,7 @@ class HasStatus(object):
if getattr(self, 'job_explanation', '').startswith('Previous Task Failed'):
try:
data = json.loads(self.job_explanation.replace('Previous Task Failed: ', ''))
dependency = self.walk('/api/v2/{0}s/{1}/'.format(data['job_type'], data['job_id']))
dependency = self.walk('/{0}v2/{1}s/{2}/'.format(config.api_base_path, data['job_type'], data['job_id']))
if hasattr(dependency, 'failure_output_details'):
msg += '\nDependency output:\n{}'.format(dependency.failure_output_details())
else:

View File

@ -150,19 +150,21 @@ class Base(Page):
HTTPBasicAuth(client_id, client_secret)(req)
req.headers['Content-Type'] = 'application/x-www-form-urlencoded'
resp = self.connection.post(
'/api/o/token/', data={"grant_type": "password", "username": username, "password": password, "scope": scope}, headers=req.headers
f"{config.api_base_path}o/token/",
data={"grant_type": "password", "username": username, "password": password, "scope": scope},
headers=req.headers,
)
elif client_id:
req.headers['Content-Type'] = 'application/x-www-form-urlencoded'
resp = self.connection.post(
'/api/o/token/',
f"{config.api_base_path}o/token/",
data={"grant_type": "password", "username": username, "password": password, "client_id": client_id, "scope": scope},
headers=req.headers,
)
else:
HTTPBasicAuth(username, password)(req)
resp = self.connection.post(
'/api/v2/users/{}/personal_tokens/'.format(username),
'{0}v2/users/{1}/personal_tokens/'.format(config.api_base_path, username),
json={"description": description, "application": None, "scope": scope},
headers=req.headers,
)
@ -207,7 +209,7 @@ class Base(Page):
jobs = []
for active_job in active_jobs:
job_type = active_job['type']
endpoint = '/api/v2/{}s/{}/'.format(job_type, active_job['id'])
endpoint = '{}v2/{}s/{}/'.format(config.api_base_path, job_type, active_job['id'])
job = self.walk(endpoint)
jobs.append(job)
job.cancel()

View File

@ -1,3 +1,6 @@
from awxkit.config import config
class Resources(object):
_activity = r'activity_stream/\d+/'
_activity_stream = 'activity_stream/'
@ -285,6 +288,9 @@ class Resources(object):
common = api + r'v\d+/'
v2 = api + 'v2/'
def __init__(self, api):
self.api = api
def __getattr__(self, resource):
if resource[:3] == '___':
raise AttributeError('No existing resource: {}'.format(resource))
@ -299,4 +305,4 @@ class Resources(object):
return '{0}{1}'.format(getattr(self, prefix), getattr(self, resource))
resources = Resources()
resources = Resources(api=config.api_base_path)

View File

@ -122,5 +122,5 @@ def as_user(v, username, password=None):
def uses_sessions(connection):
session_login = connection.get('/api/login/')
session_login = connection.get(f"{config.api_base_path}login/")
return session_login.status_code == 200

View File

@ -4,6 +4,7 @@ import json
from .stdout import monitor, monitor_workflow
from .utils import CustomRegistryMeta, color_enabled
from awxkit import api
from awxkit.config import config
from awxkit.exceptions import NoContent
@ -479,7 +480,7 @@ class RoleMixin(object):
options = ', '.join(RoleMixin.roles[flag])
raise ValueError("invalid choice: '{}' must be one of {}".format(role, options))
value = kwargs[flag]
target = '/api/v2/{}/{}'.format(resource, value)
target = '{}v2/{}/{}'.format(config.api_base_path, resource, value)
detail = self.page.__class__(target, self.page.connection).get()
object_roles = detail['summary_fields']['object_roles']
actual_role = object_roles[role + '_role']

View File

@ -6,6 +6,7 @@ import sys
import time
from .utils import cprint, color_enabled, STATUS_COLORS
from awxkit.config import config
from awxkit.utils import to_str
@ -17,7 +18,7 @@ def monitor_workflow(response, session, print_stdout=True, action_timeout=None,
}
def fetch(seen):
results = response.connection.get('/api/v2/unified_jobs', payload).json()['results']
results = response.connection.get(f"{config.api_base_path}v2/unified_jobs", payload).json()['results']
# erase lines we've previously printed
if print_stdout and sys.stdout.isatty():

View File

@ -32,3 +32,4 @@ config.assume_untrusted = config.get('assume_untrusted', True)
config.client_connection_attempts = int(os.getenv('AWXKIT_CLIENT_CONNECTION_ATTEMPTS', 5))
config.prevent_teardown = to_bool(os.getenv('AWXKIT_PREVENT_TEARDOWN', False))
config.use_sessions = to_bool(os.getenv('AWXKIT_SESSIONS', False))
config.api_base_path = os.getenv('AWXKIT_API_BASE_PATH', '/api/')

View File

@ -14,10 +14,8 @@ import yaml
from awxkit.words import words
from awxkit.exceptions import WaitUntilTimeout
log = logging.getLogger(__name__)
cloud_types = (
'aws',
'azure',

View File

@ -16,11 +16,11 @@ pubdate = datetime.strptime(pubdateshort, '%Y-%m-%d').strftime('%B %d, %Y')
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# html_title = None
html_title = 'Ansible AWX community documentation'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# html_short_title = None
html_short_title = 'AWX community documentation'
htmlhelp_basename = 'AWX_docs'
@ -54,8 +54,8 @@ release = 'AWX latest'
language = 'en'
locale_dirs = ['locale/'] # path is example but recommended.
gettext_compact = False # optional.
locale_dirs = ['locale/'] # path is example but recommended.
gettext_compact = False # optional.
rst_epilog = """
.. |atqi| replace:: *AWX Quick Installation Guide*
@ -88,4 +88,8 @@ rst_epilog = """
.. |rhaap| replace:: Red Hat Ansible Automation Platform
.. |RHAT| replace:: Red Hat Ansible Automation Platform controller
""" % (version, pubdateshort, pubdate)
""" % (
version,
pubdateshort,
pubdate,
)

View File

@ -35,12 +35,8 @@ def assets(app, exception):
_, extension = os.path.splitext(asset)
if extension in ('py', 'pyc'):
continue
if not exception and os.path.exists(
os.path.join(app.outdir, '_static')
):
copyfile(
os.path.join(here, asset),
os.path.join(app.outdir, '_static', asset))
if not exception and os.path.exists(os.path.join(app.outdir, '_static')):
copyfile(os.path.join(here, asset), os.path.join(app.outdir, '_static', asset))
def setup(app):

View File

@ -5,4 +5,5 @@
if __name__ == '__main__':
from awx import manage
manage()

View File

@ -53,7 +53,7 @@ from awx.main.models import ( # noqa
WorkflowJobTemplateNode,
batch_role_ancestor_rebuilding,
)
from awx.main.models.schedules import Schedule #noqa
from awx.main.models.schedules import Schedule # noqa
from awx.main.signals import disable_activity_stream, disable_computed_fields # noqa
@ -595,8 +595,6 @@ def make_the_data():
schedule._is_new = _
schedules.append(schedule)
print('# Creating %d Labels' % n_labels)
org_idx = 0
for n in spread(n_labels, n_organizations):

View File

@ -40,13 +40,12 @@ def popen_wrapper(args, os_err_exc_type=Exception, stdout_encoding='utf-8'):
p = Popen(args, shell=False, stdout=PIPE, stderr=PIPE, close_fds=os.name != 'nt')
except OSError as e:
strerror = force_text(e.strerror, DEFAULT_LOCALE_ENCODING, strings_only=True)
raise Exception(os_err_exc_type, os_err_exc_type('Error executing %s: %s' %
(args[0], strerror)), sys.exc_info()[2])
raise Exception(os_err_exc_type, os_err_exc_type('Error executing %s: %s' % (args[0], strerror)), sys.exc_info()[2])
output, errors = p.communicate()
return (
force_text(output, stdout_encoding, strings_only=True, errors='strict'),
force_text(errors, DEFAULT_LOCALE_ENCODING, strings_only=True, errors='replace'),
p.returncode
p.returncode,
)
@ -65,7 +64,13 @@ def get_system_encoding():
_PROTECTED_TYPES = (
type(None), int, float, Decimal, datetime.datetime, datetime.date, datetime.time,
type(None),
int,
float,
Decimal,
datetime.datetime,
datetime.date,
datetime.time,
)
@ -111,8 +116,7 @@ def force_text(s, encoding='utf-8', strings_only=False, errors='strict'):
# working unicode method. Try to handle this without raising a
# further exception by individually forcing the exception args
# to unicode.
s = ' '.join(force_text(arg, encoding, strings_only, errors)
for arg in s)
s = ' '.join(force_text(arg, encoding, strings_only, errors) for arg in s)
return s
@ -140,17 +144,14 @@ if __name__ == "__main__":
print('processing file %s in %s\n' % (f, dirpath))
po_path = os.path.join(dirpath, f)
if has_bom(po_path):
raise Exception("The %s file has a BOM (Byte Order Mark). "
"Django only supports .po files encoded in "
"UTF-8 and without any BOM." % po_path)
raise Exception(
"The %s file has a BOM (Byte Order Mark). " "Django only supports .po files encoded in " "UTF-8 and without any BOM." % po_path
)
base_path = os.path.splitext(po_path)[0]
# Check writability on first location
if i == 0 and not is_writable((base_path + '.mo')):
raise Exception("The po files under %s are in a seemingly not writable location. "
"mo files will not be updated/created." % dirpath)
args = [program] + program_options + [
'-o', (base_path + '.mo'), (base_path + '.po')
]
raise Exception("The po files under %s are in a seemingly not writable location. " "mo files will not be updated/created." % dirpath)
args = [program] + program_options + ['-o', (base_path + '.mo'), (base_path + '.po')]
output, errors, status = popen_wrapper(args)
if status:
if errors:

View File

@ -66,7 +66,7 @@ class YieldedRows(StringIO):
def __init__(self, job_id, rows, created_stamp, modified_stamp, *args, **kwargs):
self.rows = rows
self.rowlist = []
for (event, module) in itertools.product(EVENT_OPTIONS, MODULE_OPTIONS):
for event, module in itertools.product(EVENT_OPTIONS, MODULE_OPTIONS):
event_data_json = {"task_action": module, "name": "Do a {} thing".format(module), "task": "Do a {} thing".format(module)}
row = (
"\t".join(

View File

@ -6,15 +6,11 @@ def _get_class_full_name(cls_):
class _ModelFieldRow(object):
def __init__(self, field):
self.field = field
self.name = field.name
self.type_ = _get_class_full_name(type(field))
if self.field.many_to_many\
or self.field.many_to_one\
or self.field.one_to_many\
or self.field.one_to_one:
if self.field.many_to_many or self.field.many_to_one or self.field.one_to_many or self.field.one_to_one:
self.related_model = _get_class_full_name(self.field.remote_field.model)
else:
self.related_model = 'N/A'

View File

@ -66,7 +66,6 @@ class Controller(Plugin, RedHatPlugin):
short_desc = "Ansible Automation Platform controller information"
def setup(self):
for path in SOSREPORT_CONTROLLER_DIRS:
self.add_copy_spec(path)