mirror of
https://github.com/ansible/awx.git
synced 2026-02-26 07:26:03 -03:30
Merge branch 'feature-django-upgrade' of github.com:ansible/awx into feature-django-upgrade
This commit is contained in:
@@ -33,7 +33,7 @@ from rest_framework.negotiation import DefaultContentNegotiation
|
||||
# AWX
|
||||
from awx.api.filters import FieldLookupBackend
|
||||
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.access import optimize_queryset
|
||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.utils.licensing import server_product_name
|
||||
@@ -362,12 +362,7 @@ class GenericAPIView(generics.GenericAPIView, APIView):
|
||||
return self.queryset._clone()
|
||||
elif self.model is not None:
|
||||
qs = self.model._default_manager
|
||||
if self.model in access_registry:
|
||||
access_class = access_registry[self.model]
|
||||
if access_class.select_related:
|
||||
qs = qs.select_related(*access_class.select_related)
|
||||
if access_class.prefetch_related:
|
||||
qs = qs.prefetch_related(*access_class.prefetch_related)
|
||||
qs = optimize_queryset(qs)
|
||||
return qs
|
||||
else:
|
||||
return super(GenericAPIView, self).get_queryset()
|
||||
@@ -529,11 +524,7 @@ class SubListAPIView(ParentMixin, ListAPIView):
|
||||
self.check_parent_access(parent)
|
||||
sublist_qs = self.get_sublist_queryset(parent)
|
||||
if not self.filter_read_permission:
|
||||
access_class = access_registry[self.model]
|
||||
if access_class.prefetch_related:
|
||||
return sublist_qs.prefetch_related(*access_class.prefetch_related)
|
||||
if access_class.select_related:
|
||||
return sublist_qs.select_related(*access_class.select_related)
|
||||
return optimize_queryset(sublist_qs)
|
||||
qs = self.request.user.get_queryset(self.model).distinct()
|
||||
return qs & sublist_qs
|
||||
|
||||
|
||||
@@ -62,7 +62,7 @@ from wsgiref.util import FileWrapper
|
||||
|
||||
# AWX
|
||||
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
|
||||
from awx.main.access import get_user_queryset, HostAccess
|
||||
from awx.main.access import get_user_queryset
|
||||
from awx.api.generics import (
|
||||
APIView,
|
||||
BaseUsersList,
|
||||
@@ -794,13 +794,7 @@ class ExecutionEnvironmentActivityStreamList(SubListAPIView):
|
||||
parent_model = models.ExecutionEnvironment
|
||||
relationship = 'activitystream_set'
|
||||
search_fields = ('changes',)
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
return qs.filter(execution_environment=parent)
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class ProjectList(ListCreateAPIView):
|
||||
@@ -1634,13 +1628,7 @@ class InventoryHostsList(HostRelatedSearchMixin, SubListCreateAttachDetachAPIVie
|
||||
parent_model = models.Inventory
|
||||
relationship = 'hosts'
|
||||
parent_key = 'inventory'
|
||||
|
||||
def get_queryset(self):
|
||||
inventory = self.get_parent_object()
|
||||
qs = getattrd(inventory, self.relationship).all()
|
||||
# Apply queryset optimizations
|
||||
qs = qs.select_related(*HostAccess.select_related).prefetch_related(*HostAccess.prefetch_related)
|
||||
return qs
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class HostGroupsList(SubListCreateAttachDetachAPIView):
|
||||
@@ -2862,16 +2850,7 @@ class WorkflowJobTemplateNodeChildrenBaseList(EnforceParentRelationshipMixin, Su
|
||||
relationship = ''
|
||||
enforce_parent_relationship = 'workflow_job_template'
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
'''
|
||||
Limit the set of WorkflowJobTemplateNodes to the related nodes of specified by
|
||||
'relationship'
|
||||
'''
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).all()
|
||||
filter_read_permission = False
|
||||
|
||||
def is_valid_relation(self, parent, sub, created=False):
|
||||
if created:
|
||||
@@ -2946,14 +2925,7 @@ class WorkflowJobNodeChildrenBaseList(SubListAPIView):
|
||||
parent_model = models.WorkflowJobNode
|
||||
relationship = ''
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
#
|
||||
# Limit the set of WorkflowJobNodes to the related nodes of specified by self.relationship
|
||||
#
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).all()
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class WorkflowJobNodeSuccessNodesList(WorkflowJobNodeChildrenBaseList):
|
||||
@@ -3132,11 +3104,8 @@ class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView):
|
||||
relationship = 'workflow_job_template_nodes'
|
||||
parent_key = 'workflow_job_template'
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).order_by('id')
|
||||
ordering = ('id',) # assure ordering by id for consistency
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class WorkflowJobTemplateJobsList(SubListAPIView):
|
||||
@@ -3228,11 +3197,8 @@ class WorkflowJobWorkflowNodesList(SubListAPIView):
|
||||
relationship = 'workflow_job_nodes'
|
||||
parent_key = 'workflow_job'
|
||||
search_fields = ('unified_job_template__name', 'unified_job_template__description')
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).order_by('id')
|
||||
ordering = ('id',) # assure ordering by id for consistency
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class WorkflowJobCancel(GenericCancelView):
|
||||
@@ -3546,11 +3512,7 @@ class BaseJobHostSummariesList(SubListAPIView):
|
||||
relationship = 'job_host_summaries'
|
||||
name = _('Job Host Summaries List')
|
||||
search_fields = ('host_name',)
|
||||
|
||||
def get_queryset(self):
|
||||
parent = self.get_parent_object()
|
||||
self.check_parent_access(parent)
|
||||
return getattr(parent, self.relationship).select_related('job', 'job__job_template', 'host')
|
||||
filter_read_permission = False
|
||||
|
||||
|
||||
class HostJobHostSummariesList(BaseJobHostSummariesList):
|
||||
|
||||
@@ -61,12 +61,6 @@ class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
|
||||
model = Organization
|
||||
serializer_class = OrganizationSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
qs = Organization.accessible_objects(self.request.user, 'read_role')
|
||||
qs = qs.select_related('admin_role', 'auditor_role', 'member_role', 'read_role')
|
||||
qs = qs.prefetch_related('created_by', 'modified_by')
|
||||
return qs
|
||||
|
||||
|
||||
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
|
||||
model = Organization
|
||||
|
||||
@@ -2952,3 +2952,19 @@ class WorkflowApprovalTemplateAccess(BaseAccess):
|
||||
for cls in BaseAccess.__subclasses__():
|
||||
access_registry[cls.model] = cls
|
||||
access_registry[UnpartitionedJobEvent] = UnpartitionedJobEventAccess
|
||||
|
||||
|
||||
def optimize_queryset(queryset):
|
||||
"""
|
||||
A utility method in case you already have a queryset and just want to
|
||||
apply the standard optimizations for that model.
|
||||
In other words, use if you do not want to start from filtered_queryset for some reason.
|
||||
"""
|
||||
if not queryset.model or queryset.model not in access_registry:
|
||||
return queryset
|
||||
access_class = access_registry[queryset.model]
|
||||
if access_class.select_related:
|
||||
queryset = queryset.select_related(*access_class.select_related)
|
||||
if access_class.prefetch_related:
|
||||
queryset = queryset.prefetch_related(*access_class.prefetch_related)
|
||||
return queryset
|
||||
|
||||
@@ -399,7 +399,10 @@ def _copy_table(table, query, path):
|
||||
file_path = os.path.join(path, table + '_table.csv')
|
||||
file = FileSplitter(filespec=file_path)
|
||||
with connection.cursor() as cursor:
|
||||
cursor.copy_expert(query, file)
|
||||
with cursor.copy(query) as copy:
|
||||
while data := copy.read():
|
||||
byte_data = bytes(data)
|
||||
file.write(byte_data.decode())
|
||||
return file.file_list()
|
||||
|
||||
|
||||
|
||||
@@ -2,6 +2,8 @@ import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import signal
|
||||
import sys
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
@@ -50,6 +52,11 @@ class Command(BaseCommand):
|
||||
}
|
||||
return json.dumps(payload)
|
||||
|
||||
def notify_listener_and_exit(self, *args):
|
||||
with pg_bus_conn(new_connection=False) as conn:
|
||||
conn.notify('web_heartbeet', self.construct_payload(action='offline'))
|
||||
sys.exit(0)
|
||||
|
||||
def do_hearbeat_loop(self):
|
||||
with pg_bus_conn(new_connection=True) as conn:
|
||||
while True:
|
||||
@@ -57,10 +64,10 @@ class Command(BaseCommand):
|
||||
conn.notify('web_heartbeet', self.construct_payload())
|
||||
time.sleep(settings.BROADCAST_WEBSOCKET_BEACON_FROM_WEB_RATE_SECONDS)
|
||||
|
||||
# TODO: Send a message with action=offline if we notice a SIGTERM or SIGINT
|
||||
# (wsrelay can use this to remove the node quicker)
|
||||
def handle(self, *arg, **options):
|
||||
self.print_banner()
|
||||
signal.signal(signal.SIGTERM, self.notify_listener_and_exit)
|
||||
signal.signal(signal.SIGINT, self.notify_listener_and_exit)
|
||||
|
||||
# Note: We don't really try any reconnect logic to pg_notify here,
|
||||
# just let supervisor restart if we fail.
|
||||
|
||||
102
awx/main/migrations/0185_djanog_indexes.py
Normal file
102
awx/main/migrations/0185_djanog_indexes.py
Normal file
@@ -0,0 +1,102 @@
|
||||
# Generated by Django 4.2 on 2023-04-28 19:21
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('main', '0184_django_upgrade'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameIndex(
|
||||
model_name='adhoccommandevent',
|
||||
new_name='main_adhocc_ad_hoc__a57777_idx',
|
||||
old_fields=('ad_hoc_command', 'job_created', 'counter'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='adhoccommandevent',
|
||||
new_name='main_adhocc_ad_hoc__e72142_idx',
|
||||
old_fields=('ad_hoc_command', 'job_created', 'event'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='adhoccommandevent',
|
||||
new_name='main_adhocc_ad_hoc__1e4d24_idx',
|
||||
old_fields=('ad_hoc_command', 'job_created', 'uuid'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='inventoryupdateevent',
|
||||
new_name='main_invent_invento_f72b21_idx',
|
||||
old_fields=('inventory_update', 'job_created', 'uuid'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='inventoryupdateevent',
|
||||
new_name='main_invent_invento_364dcb_idx',
|
||||
old_fields=('inventory_update', 'job_created', 'counter'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='jobevent',
|
||||
new_name='main_jobeve_job_id_51c382_idx',
|
||||
old_fields=('job', 'job_created', 'counter'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='jobevent',
|
||||
new_name='main_jobeve_job_id_0ddc6b_idx',
|
||||
old_fields=('job', 'job_created', 'event'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='jobevent',
|
||||
new_name='main_jobeve_job_id_40a56d_idx',
|
||||
old_fields=('job', 'job_created', 'parent_uuid'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='jobevent',
|
||||
new_name='main_jobeve_job_id_3c4a4a_idx',
|
||||
old_fields=('job', 'job_created', 'uuid'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='projectupdateevent',
|
||||
new_name='main_projec_project_c44b7c_idx',
|
||||
old_fields=('project_update', 'job_created', 'event'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='projectupdateevent',
|
||||
new_name='main_projec_project_449bbd_idx',
|
||||
old_fields=('project_update', 'job_created', 'uuid'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='projectupdateevent',
|
||||
new_name='main_projec_project_69559a_idx',
|
||||
old_fields=('project_update', 'job_created', 'counter'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='role',
|
||||
new_name='main_rbac_r_content_979bdd_idx',
|
||||
old_fields=('content_type', 'object_id'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='roleancestorentry',
|
||||
new_name='main_rbac_r_ancesto_22b9f0_idx',
|
||||
old_fields=('ancestor', 'content_type_id', 'object_id'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='roleancestorentry',
|
||||
new_name='main_rbac_r_ancesto_b44606_idx',
|
||||
old_fields=('ancestor', 'content_type_id', 'role_field'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='roleancestorentry',
|
||||
new_name='main_rbac_r_ancesto_c87b87_idx',
|
||||
old_fields=('ancestor', 'descendent'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='systemjobevent',
|
||||
new_name='main_system_system__e39825_idx',
|
||||
old_fields=('system_job', 'job_created', 'uuid'),
|
||||
),
|
||||
migrations.RenameIndex(
|
||||
model_name='systemjobevent',
|
||||
new_name='main_system_system__73537a_idx',
|
||||
old_fields=('system_job', 'job_created', 'counter'),
|
||||
),
|
||||
]
|
||||
@@ -8,7 +8,7 @@ logger = logging.getLogger('awx.main.migrations')
|
||||
def migrate_org_admin_to_use(apps, schema_editor):
|
||||
logger.info('Initiated migration from Org admin to use role')
|
||||
roles_added = 0
|
||||
for org in Organization.objects.prefetch_related('admin_role__members').iterator():
|
||||
for org in Organization.objects.prefetch_related('admin_role__members').iterator(chunk_size=1000):
|
||||
igs = list(org.instance_groups.all())
|
||||
if not igs:
|
||||
continue
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import datetime
|
||||
from datetime import timezone
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
|
||||
@@ -10,7 +11,7 @@ from django.db import models, DatabaseError
|
||||
from django.db.models.functions import Cast
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.text import Truncator
|
||||
from django.utils.timezone import utc, now
|
||||
from django.utils.timezone import now
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.encoding import force_str
|
||||
|
||||
@@ -422,7 +423,7 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
if not isinstance(kwargs['created'], datetime.datetime):
|
||||
kwargs['created'] = parse_datetime(kwargs['created'])
|
||||
if not kwargs['created'].tzinfo:
|
||||
kwargs['created'] = kwargs['created'].replace(tzinfo=utc)
|
||||
kwargs['created'] = kwargs['created'].replace(tzinfo=timezone.utc)
|
||||
except (KeyError, ValueError):
|
||||
kwargs.pop('created', None)
|
||||
|
||||
@@ -432,7 +433,7 @@ class BasePlaybookEvent(CreatedModifiedModel):
|
||||
if not isinstance(kwargs['job_created'], datetime.datetime):
|
||||
kwargs['job_created'] = parse_datetime(kwargs['job_created'])
|
||||
if not kwargs['job_created'].tzinfo:
|
||||
kwargs['job_created'] = kwargs['job_created'].replace(tzinfo=utc)
|
||||
kwargs['job_created'] = kwargs['job_created'].replace(tzinfo=timezone.utc)
|
||||
except (KeyError, ValueError):
|
||||
kwargs.pop('job_created', None)
|
||||
|
||||
@@ -470,11 +471,11 @@ class JobEvent(BasePlaybookEvent):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('pk',)
|
||||
index_together = [
|
||||
('job', 'job_created', 'event'),
|
||||
('job', 'job_created', 'uuid'),
|
||||
('job', 'job_created', 'parent_uuid'),
|
||||
('job', 'job_created', 'counter'),
|
||||
indexes = [
|
||||
models.Index(fields=['job', 'job_created', 'event']),
|
||||
models.Index(fields=['job', 'job_created', 'uuid']),
|
||||
models.Index(fields=['job', 'job_created', 'parent_uuid']),
|
||||
models.Index(fields=['job', 'job_created', 'counter']),
|
||||
]
|
||||
|
||||
id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')
|
||||
@@ -632,10 +633,10 @@ class ProjectUpdateEvent(BasePlaybookEvent):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('pk',)
|
||||
index_together = [
|
||||
('project_update', 'job_created', 'event'),
|
||||
('project_update', 'job_created', 'uuid'),
|
||||
('project_update', 'job_created', 'counter'),
|
||||
indexes = [
|
||||
models.Index(fields=['project_update', 'job_created', 'event']),
|
||||
models.Index(fields=['project_update', 'job_created', 'uuid']),
|
||||
models.Index(fields=['project_update', 'job_created', 'counter']),
|
||||
]
|
||||
|
||||
id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')
|
||||
@@ -734,7 +735,7 @@ class BaseCommandEvent(CreatedModifiedModel):
|
||||
if not isinstance(kwargs['created'], datetime.datetime):
|
||||
kwargs['created'] = parse_datetime(kwargs['created'])
|
||||
if not kwargs['created'].tzinfo:
|
||||
kwargs['created'] = kwargs['created'].replace(tzinfo=utc)
|
||||
kwargs['created'] = kwargs['created'].replace(tzinfo=timezone.utc)
|
||||
except (KeyError, ValueError):
|
||||
kwargs.pop('created', None)
|
||||
|
||||
@@ -770,10 +771,10 @@ class AdHocCommandEvent(BaseCommandEvent):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('-pk',)
|
||||
index_together = [
|
||||
('ad_hoc_command', 'job_created', 'event'),
|
||||
('ad_hoc_command', 'job_created', 'uuid'),
|
||||
('ad_hoc_command', 'job_created', 'counter'),
|
||||
indexes = [
|
||||
models.Index(fields=['ad_hoc_command', 'job_created', 'event']),
|
||||
models.Index(fields=['ad_hoc_command', 'job_created', 'uuid']),
|
||||
models.Index(fields=['ad_hoc_command', 'job_created', 'counter']),
|
||||
]
|
||||
|
||||
EVENT_TYPES = [
|
||||
@@ -875,9 +876,9 @@ class InventoryUpdateEvent(BaseCommandEvent):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('-pk',)
|
||||
index_together = [
|
||||
('inventory_update', 'job_created', 'uuid'),
|
||||
('inventory_update', 'job_created', 'counter'),
|
||||
indexes = [
|
||||
models.Index(fields=['inventory_update', 'job_created', 'uuid']),
|
||||
models.Index(fields=['inventory_update', 'job_created', 'counter']),
|
||||
]
|
||||
|
||||
id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')
|
||||
@@ -920,9 +921,9 @@ class SystemJobEvent(BaseCommandEvent):
|
||||
class Meta:
|
||||
app_label = 'main'
|
||||
ordering = ('-pk',)
|
||||
index_together = [
|
||||
('system_job', 'job_created', 'uuid'),
|
||||
('system_job', 'job_created', 'counter'),
|
||||
indexes = [
|
||||
models.Index(fields=['system_job', 'job_created', 'uuid']),
|
||||
models.Index(fields=['system_job', 'job_created', 'counter']),
|
||||
]
|
||||
|
||||
id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')
|
||||
|
||||
@@ -141,7 +141,7 @@ class Role(models.Model):
|
||||
app_label = 'main'
|
||||
verbose_name_plural = _('roles')
|
||||
db_table = 'main_rbac_roles'
|
||||
index_together = [("content_type", "object_id")]
|
||||
indexes = [models.Index(fields=["content_type", "object_id"])]
|
||||
ordering = ("content_type", "object_id")
|
||||
|
||||
role_field = models.TextField(null=False)
|
||||
@@ -447,10 +447,10 @@ class RoleAncestorEntry(models.Model):
|
||||
app_label = 'main'
|
||||
verbose_name_plural = _('role_ancestors')
|
||||
db_table = 'main_rbac_role_ancestors'
|
||||
index_together = [
|
||||
("ancestor", "content_type_id", "object_id"), # used by get_roles_on_resource
|
||||
("ancestor", "content_type_id", "role_field"), # used by accessible_objects
|
||||
("ancestor", "descendent"), # used by rebuild_role_ancestor_list in the NOT EXISTS clauses.
|
||||
indexes = [
|
||||
models.Index(fields=["ancestor", "content_type_id", "object_id"]), # used by get_roles_on_resource
|
||||
models.Index(fields=["ancestor", "content_type_id", "role_field"]), # used by accessible_objects
|
||||
models.Index(fields=["ancestor", "descendent"]), # used by rebuild_role_ancestor_list in the NOT EXISTS clauses.
|
||||
]
|
||||
|
||||
descendent = models.ForeignKey(Role, null=False, on_delete=models.CASCADE, related_name='+')
|
||||
|
||||
@@ -1137,11 +1137,9 @@ class UnifiedJob(
|
||||
if total > max_supported:
|
||||
raise StdoutMaxBytesExceeded(total, max_supported)
|
||||
|
||||
# psycopg2's copy_expert writes bytes, but callers of this
|
||||
# psycopg3's copy writes bytes, but callers of this
|
||||
# function assume a str-based fd will be returned; decode
|
||||
# .write() calls on the fly to maintain this interface
|
||||
_write = fd.write
|
||||
fd.write = lambda s: _write(smart_str(s))
|
||||
tbl = self._meta.db_table + 'event'
|
||||
created_by_cond = ''
|
||||
if self.has_unpartitioned_events:
|
||||
@@ -1150,7 +1148,9 @@ class UnifiedJob(
|
||||
created_by_cond = f"job_created='{self.created.isoformat()}' AND "
|
||||
|
||||
sql = f"copy (select stdout from {tbl} where {created_by_cond}{self.event_parent_key}={self.id} and stdout != '' order by start_line) to stdout" # nosql
|
||||
cursor.copy_expert(sql, fd)
|
||||
with cursor.copy(sql) as copy:
|
||||
while data := copy.read():
|
||||
fd.write(smart_str(bytes(data)))
|
||||
|
||||
if hasattr(fd, 'name'):
|
||||
# If we're dealing with a physical file, use `sed` to clean
|
||||
|
||||
@@ -2,8 +2,8 @@ import pytest
|
||||
import tempfile
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import csv
|
||||
from io import StringIO
|
||||
|
||||
from django.utils.timezone import now
|
||||
from datetime import timedelta
|
||||
@@ -20,15 +20,16 @@ from awx.main.models import (
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sqlite_copy_expert(request):
|
||||
# copy_expert is postgres-specific, and SQLite doesn't support it; mock its
|
||||
# behavior to test that it writes a file that contains stdout from events
|
||||
path = tempfile.mkdtemp(prefix="copied_tables")
|
||||
class MockCopy:
|
||||
headers = None
|
||||
results = None
|
||||
sent_data = False
|
||||
|
||||
def write_stdout(self, sql, fd):
|
||||
def __init__(self, sql, parent_connection):
|
||||
# Would be cool if we instead properly disected the SQL query and verified
|
||||
# it that way. But instead, we just take the naive approach here.
|
||||
self.results = None
|
||||
self.headers = None
|
||||
sql = sql.strip()
|
||||
assert sql.startswith("COPY (")
|
||||
assert sql.endswith(") TO STDOUT WITH CSV HEADER")
|
||||
@@ -51,29 +52,49 @@ def sqlite_copy_expert(request):
|
||||
elif not line.endswith(","):
|
||||
sql_new[-1] = sql_new[-1].rstrip(",")
|
||||
sql = "\n".join(sql_new)
|
||||
parent_connection.execute(sql)
|
||||
self.results = parent_connection.fetchall()
|
||||
self.headers = [i[0] for i in parent_connection.description]
|
||||
|
||||
self.execute(sql)
|
||||
results = self.fetchall()
|
||||
headers = [i[0] for i in self.description]
|
||||
def read(self):
|
||||
if not self.sent_data:
|
||||
mem_file = StringIO()
|
||||
csv_handle = csv.writer(
|
||||
mem_file,
|
||||
delimiter=",",
|
||||
quoting=csv.QUOTE_ALL,
|
||||
escapechar="\\",
|
||||
lineterminator="\n",
|
||||
)
|
||||
if self.headers:
|
||||
csv_handle.writerow(self.headers)
|
||||
if self.results:
|
||||
csv_handle.writerows(self.results)
|
||||
self.sent_data = True
|
||||
return memoryview((mem_file.getvalue()).encode())
|
||||
return None
|
||||
|
||||
csv_handle = csv.writer(
|
||||
fd,
|
||||
delimiter=",",
|
||||
quoting=csv.QUOTE_ALL,
|
||||
escapechar="\\",
|
||||
lineterminator="\n",
|
||||
)
|
||||
csv_handle.writerow(headers)
|
||||
csv_handle.writerows(results)
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
setattr(SQLiteCursorWrapper, "copy_expert", write_stdout)
|
||||
request.addfinalizer(lambda: shutil.rmtree(path))
|
||||
request.addfinalizer(lambda: delattr(SQLiteCursorWrapper, "copy_expert"))
|
||||
return path
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sqlite_copy(request, mocker):
|
||||
# copy is postgres-specific, and SQLite doesn't support it; mock its
|
||||
# behavior to test that it writes a file that contains stdout from events
|
||||
|
||||
def write_stdout(self, sql):
|
||||
mock_copy = MockCopy(sql, self)
|
||||
return mock_copy
|
||||
|
||||
mocker.patch.object(SQLiteCursorWrapper, 'copy', write_stdout, create=True)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_copy_tables_unified_job_query(sqlite_copy_expert, project, inventory, job_template):
|
||||
def test_copy_tables_unified_job_query(sqlite_copy, project, inventory, job_template):
|
||||
"""
|
||||
Ensure that various unified job types are in the output of the query.
|
||||
"""
|
||||
@@ -127,7 +148,7 @@ def workflow_job(states=["new", "new", "new", "new", "new"]):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_copy_tables_workflow_job_node_query(sqlite_copy_expert, workflow_job):
|
||||
def test_copy_tables_workflow_job_node_query(sqlite_copy, workflow_job):
|
||||
time_start = now() - timedelta(hours=9)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
|
||||
@@ -214,7 +214,7 @@ class TestControllerNode:
|
||||
return AdHocCommand.objects.create(inventory=inventory)
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_field_controller_node_exists(self, sqlite_copy_expert, admin_user, job, project_update, inventory_update, adhoc, get, system_job_factory):
|
||||
def test_field_controller_node_exists(self, sqlite_copy, admin_user, job, project_update, inventory_update, adhoc, get, system_job_factory):
|
||||
system_job = system_job_factory()
|
||||
|
||||
r = get(reverse('api:unified_job_list') + '?id={}'.format(job.id), admin_user, expect=200)
|
||||
|
||||
@@ -57,7 +57,7 @@ def _mk_inventory_update(created=None):
|
||||
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
|
||||
],
|
||||
)
|
||||
def test_text_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin):
|
||||
def test_text_stdout(sqlite_copy, Parent, Child, relation, view, get, admin):
|
||||
job = Parent()
|
||||
job.save()
|
||||
for i in range(3):
|
||||
@@ -79,7 +79,7 @@ def test_text_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, adm
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize('download', [True, False])
|
||||
def test_ansi_stdout_filtering(sqlite_copy_expert, Parent, Child, relation, view, download, get, admin):
|
||||
def test_ansi_stdout_filtering(sqlite_copy, Parent, Child, relation, view, download, get, admin):
|
||||
job = Parent()
|
||||
job.save()
|
||||
for i in range(3):
|
||||
@@ -111,7 +111,7 @@ def test_ansi_stdout_filtering(sqlite_copy_expert, Parent, Child, relation, view
|
||||
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
|
||||
],
|
||||
)
|
||||
def test_colorized_html_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin):
|
||||
def test_colorized_html_stdout(sqlite_copy, Parent, Child, relation, view, get, admin):
|
||||
job = Parent()
|
||||
job.save()
|
||||
for i in range(3):
|
||||
@@ -134,7 +134,7 @@ def test_colorized_html_stdout(sqlite_copy_expert, Parent, Child, relation, view
|
||||
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
|
||||
],
|
||||
)
|
||||
def test_stdout_line_range(sqlite_copy_expert, Parent, Child, relation, view, get, admin):
|
||||
def test_stdout_line_range(sqlite_copy, Parent, Child, relation, view, get, admin):
|
||||
job = Parent()
|
||||
job.save()
|
||||
for i in range(20):
|
||||
@@ -146,7 +146,7 @@ def test_stdout_line_range(sqlite_copy_expert, Parent, Child, relation, view, ge
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin):
|
||||
def test_text_stdout_from_system_job_events(sqlite_copy, get, admin):
|
||||
created = tz_now()
|
||||
job = SystemJob(created=created)
|
||||
job.save()
|
||||
@@ -158,7 +158,7 @@ def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin):
|
||||
def test_text_stdout_with_max_stdout(sqlite_copy, get, admin):
|
||||
created = tz_now()
|
||||
job = SystemJob(created=created)
|
||||
job.save()
|
||||
@@ -185,7 +185,7 @@ def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin):
|
||||
)
|
||||
@pytest.mark.parametrize('fmt', ['txt', 'ansi'])
|
||||
@mock.patch('awx.main.redact.UriCleaner.SENSITIVE_URI_PATTERN', mock.Mock(**{'search.return_value': None})) # really slow for large strings
|
||||
def test_max_bytes_display(sqlite_copy_expert, Parent, Child, relation, view, fmt, get, admin):
|
||||
def test_max_bytes_display(sqlite_copy, Parent, Child, relation, view, fmt, get, admin):
|
||||
created = tz_now()
|
||||
job = Parent(created=created)
|
||||
job.save()
|
||||
@@ -255,7 +255,7 @@ def test_legacy_result_stdout_with_max_bytes(Cls, view, fmt, get, admin):
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize('fmt', ['txt', 'ansi', 'txt_download', 'ansi_download'])
|
||||
def test_text_with_unicode_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin, fmt):
|
||||
def test_text_with_unicode_stdout(sqlite_copy, Parent, Child, relation, view, get, admin, fmt):
|
||||
job = Parent()
|
||||
job.save()
|
||||
for i in range(3):
|
||||
@@ -267,7 +267,7 @@ def test_text_with_unicode_stdout(sqlite_copy_expert, Parent, Child, relation, v
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_unicode_with_base64_ansi(sqlite_copy_expert, get, admin):
|
||||
def test_unicode_with_base64_ansi(sqlite_copy, get, admin):
|
||||
created = tz_now()
|
||||
job = Job(created=created)
|
||||
job.save()
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
# Python
|
||||
import pytest
|
||||
from unittest import mock
|
||||
import tempfile
|
||||
import shutil
|
||||
import urllib.parse
|
||||
from unittest.mock import PropertyMock
|
||||
|
||||
@@ -789,25 +787,43 @@ def oauth_application(admin):
|
||||
return Application.objects.create(name='test app', user=admin, client_type='confidential', authorization_grant_type='password')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sqlite_copy_expert(request):
|
||||
# copy_expert is postgres-specific, and SQLite doesn't support it; mock its
|
||||
# behavior to test that it writes a file that contains stdout from events
|
||||
path = tempfile.mkdtemp(prefix='job-event-stdout')
|
||||
class MockCopy:
|
||||
events = []
|
||||
index = -1
|
||||
|
||||
def write_stdout(self, sql, fd):
|
||||
# simulate postgres copy_expert support with ORM code
|
||||
def __init__(self, sql):
|
||||
self.events = []
|
||||
parts = sql.split(' ')
|
||||
tablename = parts[parts.index('from') + 1]
|
||||
for cls in (JobEvent, AdHocCommandEvent, ProjectUpdateEvent, InventoryUpdateEvent, SystemJobEvent):
|
||||
if cls._meta.db_table == tablename:
|
||||
for event in cls.objects.order_by('start_line').all():
|
||||
fd.write(event.stdout)
|
||||
self.events.append(event.stdout)
|
||||
|
||||
setattr(SQLiteCursorWrapper, 'copy_expert', write_stdout)
|
||||
request.addfinalizer(lambda: shutil.rmtree(path))
|
||||
request.addfinalizer(lambda: delattr(SQLiteCursorWrapper, 'copy_expert'))
|
||||
return path
|
||||
def read(self):
|
||||
self.index = self.index + 1
|
||||
if self.index < len(self.events):
|
||||
return memoryview(self.events[self.index].encode())
|
||||
|
||||
return None
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sqlite_copy(request, mocker):
|
||||
# copy is postgres-specific, and SQLite doesn't support it; mock its
|
||||
# behavior to test that it writes a file that contains stdout from events
|
||||
|
||||
def write_stdout(self, sql):
|
||||
mock_copy = MockCopy(sql)
|
||||
return mock_copy
|
||||
|
||||
mocker.patch.object(SQLiteCursorWrapper, 'copy', write_stdout, create=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -98,7 +98,7 @@ class TestJobNotificationMixin(object):
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('JobClass', [AdHocCommand, InventoryUpdate, Job, ProjectUpdate, SystemJob, WorkflowJob])
|
||||
def test_context(self, JobClass, sqlite_copy_expert, project, inventory_source):
|
||||
def test_context(self, JobClass, sqlite_copy, project, inventory_source):
|
||||
"""The Jinja context defines all of the fields that can be used by a template. Ensure that the context generated
|
||||
for each job type has the expected structure."""
|
||||
kwargs = {}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from datetime import datetime
|
||||
from django.utils.timezone import utc
|
||||
from datetime import timezone
|
||||
import pytest
|
||||
|
||||
from awx.main.models import JobEvent, ProjectUpdateEvent, AdHocCommandEvent, InventoryUpdateEvent, SystemJobEvent
|
||||
@@ -18,7 +18,7 @@ from awx.main.models import JobEvent, ProjectUpdateEvent, AdHocCommandEvent, Inv
|
||||
@pytest.mark.parametrize('created', [datetime(2018, 1, 1).isoformat(), datetime(2018, 1, 1)])
|
||||
def test_event_parse_created(job_identifier, cls, created):
|
||||
event = cls.create_from_data(**{job_identifier: 123, 'created': created})
|
||||
assert event.created == datetime(2018, 1, 1).replace(tzinfo=utc)
|
||||
assert event.created == datetime(2018, 1, 1).replace(tzinfo=timezone.utc)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -195,9 +195,9 @@ function getRouteConfig(userProfile = {}) {
|
||||
deleteRoute('host_metrics');
|
||||
deleteRouteGroup('settings');
|
||||
deleteRoute('management_jobs');
|
||||
if (userProfile?.isOrgAdmin) return routeConfig;
|
||||
deleteRoute('topology_view');
|
||||
deleteRoute('instances');
|
||||
if (userProfile?.isOrgAdmin) return routeConfig;
|
||||
if (!userProfile?.isNotificationAdmin) deleteRoute('notification_templates');
|
||||
|
||||
return routeConfig;
|
||||
|
||||
@@ -101,10 +101,8 @@ describe('getRouteConfig', () => {
|
||||
'/credential_types',
|
||||
'/notification_templates',
|
||||
'/instance_groups',
|
||||
'/instances',
|
||||
'/applications',
|
||||
'/execution_environments',
|
||||
'/topology_view',
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -237,10 +235,8 @@ describe('getRouteConfig', () => {
|
||||
'/credential_types',
|
||||
'/notification_templates',
|
||||
'/instance_groups',
|
||||
'/instances',
|
||||
'/applications',
|
||||
'/execution_environments',
|
||||
'/topology_view',
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -268,10 +264,8 @@ describe('getRouteConfig', () => {
|
||||
'/credential_types',
|
||||
'/notification_templates',
|
||||
'/instance_groups',
|
||||
'/instances',
|
||||
'/applications',
|
||||
'/execution_environments',
|
||||
'/topology_view',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -12,8 +12,13 @@ To encrypt secret fields, AWX uses AES in CBC mode with a 256-bit key for encryp
|
||||
If necessary, credentials and encrypted settings can be extracted using the AWX shell:
|
||||
|
||||
```python
|
||||
# awx-manage shell_plus
|
||||
$ awx-manage shell_plus
|
||||
>>> from awx.main.utils import decrypt_field
|
||||
>>> cred = Credential.objects.get(name="my private key")
|
||||
>>> print(decrypt_field(cred, "ssh_key_data"))
|
||||
>>> print(decrypt_field(Credential.objects.get(name="my private key"), "ssh_key_data")) # Example for a credential
|
||||
>>> print(decrypt_field(Setting.objects.get(key='SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET'), 'value')) # Example for a setting
|
||||
```
|
||||
|
||||
If you are running a kubernetes based deployment, you can execute awx-manage like this:
|
||||
```bash
|
||||
$ kubectl exec --stdin --tty [instance name]-task-[...] -c [instance name]-task -- awx-manage shell_plus
|
||||
```
|
||||
Reference in New Issue
Block a user