Merge branch 'feature-django-upgrade' of github.com:ansible/awx into feature-django-upgrade

This commit is contained in:
John Westcott IV
2023-05-02 11:45:51 -04:00
20 changed files with 276 additions and 164 deletions

View File

@@ -33,7 +33,7 @@ from rest_framework.negotiation import DefaultContentNegotiation
# AWX # AWX
from awx.api.filters import FieldLookupBackend from awx.api.filters import FieldLookupBackend
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
from awx.main.access import access_registry from awx.main.access import optimize_queryset
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
from awx.main.utils.db import get_all_field_names from awx.main.utils.db import get_all_field_names
from awx.main.utils.licensing import server_product_name from awx.main.utils.licensing import server_product_name
@@ -362,12 +362,7 @@ class GenericAPIView(generics.GenericAPIView, APIView):
return self.queryset._clone() return self.queryset._clone()
elif self.model is not None: elif self.model is not None:
qs = self.model._default_manager qs = self.model._default_manager
if self.model in access_registry: qs = optimize_queryset(qs)
access_class = access_registry[self.model]
if access_class.select_related:
qs = qs.select_related(*access_class.select_related)
if access_class.prefetch_related:
qs = qs.prefetch_related(*access_class.prefetch_related)
return qs return qs
else: else:
return super(GenericAPIView, self).get_queryset() return super(GenericAPIView, self).get_queryset()
@@ -529,11 +524,7 @@ class SubListAPIView(ParentMixin, ListAPIView):
self.check_parent_access(parent) self.check_parent_access(parent)
sublist_qs = self.get_sublist_queryset(parent) sublist_qs = self.get_sublist_queryset(parent)
if not self.filter_read_permission: if not self.filter_read_permission:
access_class = access_registry[self.model] return optimize_queryset(sublist_qs)
if access_class.prefetch_related:
return sublist_qs.prefetch_related(*access_class.prefetch_related)
if access_class.select_related:
return sublist_qs.select_related(*access_class.select_related)
qs = self.request.user.get_queryset(self.model).distinct() qs = self.request.user.get_queryset(self.model).distinct()
return qs & sublist_qs return qs & sublist_qs

View File

@@ -62,7 +62,7 @@ from wsgiref.util import FileWrapper
# AWX # AWX
from awx.main.tasks.system import send_notifications, update_inventory_computed_fields from awx.main.tasks.system import send_notifications, update_inventory_computed_fields
from awx.main.access import get_user_queryset, HostAccess from awx.main.access import get_user_queryset
from awx.api.generics import ( from awx.api.generics import (
APIView, APIView,
BaseUsersList, BaseUsersList,
@@ -794,13 +794,7 @@ class ExecutionEnvironmentActivityStreamList(SubListAPIView):
parent_model = models.ExecutionEnvironment parent_model = models.ExecutionEnvironment
relationship = 'activitystream_set' relationship = 'activitystream_set'
search_fields = ('changes',) search_fields = ('changes',)
filter_read_permission = False
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
qs = self.request.user.get_queryset(self.model)
return qs.filter(execution_environment=parent)
class ProjectList(ListCreateAPIView): class ProjectList(ListCreateAPIView):
@@ -1634,13 +1628,7 @@ class InventoryHostsList(HostRelatedSearchMixin, SubListCreateAttachDetachAPIVie
parent_model = models.Inventory parent_model = models.Inventory
relationship = 'hosts' relationship = 'hosts'
parent_key = 'inventory' parent_key = 'inventory'
filter_read_permission = False
def get_queryset(self):
inventory = self.get_parent_object()
qs = getattrd(inventory, self.relationship).all()
# Apply queryset optimizations
qs = qs.select_related(*HostAccess.select_related).prefetch_related(*HostAccess.prefetch_related)
return qs
class HostGroupsList(SubListCreateAttachDetachAPIView): class HostGroupsList(SubListCreateAttachDetachAPIView):
@@ -2862,16 +2850,7 @@ class WorkflowJobTemplateNodeChildrenBaseList(EnforceParentRelationshipMixin, Su
relationship = '' relationship = ''
enforce_parent_relationship = 'workflow_job_template' enforce_parent_relationship = 'workflow_job_template'
search_fields = ('unified_job_template__name', 'unified_job_template__description') search_fields = ('unified_job_template__name', 'unified_job_template__description')
filter_read_permission = False
'''
Limit the set of WorkflowJobTemplateNodes to the related nodes of specified by
'relationship'
'''
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).all()
def is_valid_relation(self, parent, sub, created=False): def is_valid_relation(self, parent, sub, created=False):
if created: if created:
@@ -2946,14 +2925,7 @@ class WorkflowJobNodeChildrenBaseList(SubListAPIView):
parent_model = models.WorkflowJobNode parent_model = models.WorkflowJobNode
relationship = '' relationship = ''
search_fields = ('unified_job_template__name', 'unified_job_template__description') search_fields = ('unified_job_template__name', 'unified_job_template__description')
filter_read_permission = False
#
# Limit the set of WorkflowJobNodes to the related nodes of specified by self.relationship
#
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).all()
class WorkflowJobNodeSuccessNodesList(WorkflowJobNodeChildrenBaseList): class WorkflowJobNodeSuccessNodesList(WorkflowJobNodeChildrenBaseList):
@@ -3132,11 +3104,8 @@ class WorkflowJobTemplateWorkflowNodesList(SubListCreateAPIView):
relationship = 'workflow_job_template_nodes' relationship = 'workflow_job_template_nodes'
parent_key = 'workflow_job_template' parent_key = 'workflow_job_template'
search_fields = ('unified_job_template__name', 'unified_job_template__description') search_fields = ('unified_job_template__name', 'unified_job_template__description')
ordering = ('id',) # assure ordering by id for consistency
def get_queryset(self): filter_read_permission = False
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).order_by('id')
class WorkflowJobTemplateJobsList(SubListAPIView): class WorkflowJobTemplateJobsList(SubListAPIView):
@@ -3228,11 +3197,8 @@ class WorkflowJobWorkflowNodesList(SubListAPIView):
relationship = 'workflow_job_nodes' relationship = 'workflow_job_nodes'
parent_key = 'workflow_job' parent_key = 'workflow_job'
search_fields = ('unified_job_template__name', 'unified_job_template__description') search_fields = ('unified_job_template__name', 'unified_job_template__description')
ordering = ('id',) # assure ordering by id for consistency
def get_queryset(self): filter_read_permission = False
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).order_by('id')
class WorkflowJobCancel(GenericCancelView): class WorkflowJobCancel(GenericCancelView):
@@ -3546,11 +3512,7 @@ class BaseJobHostSummariesList(SubListAPIView):
relationship = 'job_host_summaries' relationship = 'job_host_summaries'
name = _('Job Host Summaries List') name = _('Job Host Summaries List')
search_fields = ('host_name',) search_fields = ('host_name',)
filter_read_permission = False
def get_queryset(self):
parent = self.get_parent_object()
self.check_parent_access(parent)
return getattr(parent, self.relationship).select_related('job', 'job__job_template', 'host')
class HostJobHostSummariesList(BaseJobHostSummariesList): class HostJobHostSummariesList(BaseJobHostSummariesList):

View File

@@ -61,12 +61,6 @@ class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
model = Organization model = Organization
serializer_class = OrganizationSerializer serializer_class = OrganizationSerializer
def get_queryset(self):
qs = Organization.accessible_objects(self.request.user, 'read_role')
qs = qs.select_related('admin_role', 'auditor_role', 'member_role', 'read_role')
qs = qs.prefetch_related('created_by', 'modified_by')
return qs
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView): class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
model = Organization model = Organization

View File

@@ -2952,3 +2952,19 @@ class WorkflowApprovalTemplateAccess(BaseAccess):
for cls in BaseAccess.__subclasses__(): for cls in BaseAccess.__subclasses__():
access_registry[cls.model] = cls access_registry[cls.model] = cls
access_registry[UnpartitionedJobEvent] = UnpartitionedJobEventAccess access_registry[UnpartitionedJobEvent] = UnpartitionedJobEventAccess
def optimize_queryset(queryset):
"""
A utility method in case you already have a queryset and just want to
apply the standard optimizations for that model.
In other words, use if you do not want to start from filtered_queryset for some reason.
"""
if not queryset.model or queryset.model not in access_registry:
return queryset
access_class = access_registry[queryset.model]
if access_class.select_related:
queryset = queryset.select_related(*access_class.select_related)
if access_class.prefetch_related:
queryset = queryset.prefetch_related(*access_class.prefetch_related)
return queryset

View File

@@ -399,7 +399,10 @@ def _copy_table(table, query, path):
file_path = os.path.join(path, table + '_table.csv') file_path = os.path.join(path, table + '_table.csv')
file = FileSplitter(filespec=file_path) file = FileSplitter(filespec=file_path)
with connection.cursor() as cursor: with connection.cursor() as cursor:
cursor.copy_expert(query, file) with cursor.copy(query) as copy:
while data := copy.read():
byte_data = bytes(data)
file.write(byte_data.decode())
return file.file_list() return file.file_list()

View File

@@ -2,6 +2,8 @@ import json
import logging import logging
import os import os
import time import time
import signal
import sys
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from django.conf import settings from django.conf import settings
@@ -50,6 +52,11 @@ class Command(BaseCommand):
} }
return json.dumps(payload) return json.dumps(payload)
def notify_listener_and_exit(self, *args):
with pg_bus_conn(new_connection=False) as conn:
conn.notify('web_heartbeet', self.construct_payload(action='offline'))
sys.exit(0)
def do_hearbeat_loop(self): def do_hearbeat_loop(self):
with pg_bus_conn(new_connection=True) as conn: with pg_bus_conn(new_connection=True) as conn:
while True: while True:
@@ -57,10 +64,10 @@ class Command(BaseCommand):
conn.notify('web_heartbeet', self.construct_payload()) conn.notify('web_heartbeet', self.construct_payload())
time.sleep(settings.BROADCAST_WEBSOCKET_BEACON_FROM_WEB_RATE_SECONDS) time.sleep(settings.BROADCAST_WEBSOCKET_BEACON_FROM_WEB_RATE_SECONDS)
# TODO: Send a message with action=offline if we notice a SIGTERM or SIGINT
# (wsrelay can use this to remove the node quicker)
def handle(self, *arg, **options): def handle(self, *arg, **options):
self.print_banner() self.print_banner()
signal.signal(signal.SIGTERM, self.notify_listener_and_exit)
signal.signal(signal.SIGINT, self.notify_listener_and_exit)
# Note: We don't really try any reconnect logic to pg_notify here, # Note: We don't really try any reconnect logic to pg_notify here,
# just let supervisor restart if we fail. # just let supervisor restart if we fail.

View File

@@ -0,0 +1,102 @@
# Generated by Django 4.2 on 2023-04-28 19:21
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0184_django_upgrade'),
]
operations = [
migrations.RenameIndex(
model_name='adhoccommandevent',
new_name='main_adhocc_ad_hoc__a57777_idx',
old_fields=('ad_hoc_command', 'job_created', 'counter'),
),
migrations.RenameIndex(
model_name='adhoccommandevent',
new_name='main_adhocc_ad_hoc__e72142_idx',
old_fields=('ad_hoc_command', 'job_created', 'event'),
),
migrations.RenameIndex(
model_name='adhoccommandevent',
new_name='main_adhocc_ad_hoc__1e4d24_idx',
old_fields=('ad_hoc_command', 'job_created', 'uuid'),
),
migrations.RenameIndex(
model_name='inventoryupdateevent',
new_name='main_invent_invento_f72b21_idx',
old_fields=('inventory_update', 'job_created', 'uuid'),
),
migrations.RenameIndex(
model_name='inventoryupdateevent',
new_name='main_invent_invento_364dcb_idx',
old_fields=('inventory_update', 'job_created', 'counter'),
),
migrations.RenameIndex(
model_name='jobevent',
new_name='main_jobeve_job_id_51c382_idx',
old_fields=('job', 'job_created', 'counter'),
),
migrations.RenameIndex(
model_name='jobevent',
new_name='main_jobeve_job_id_0ddc6b_idx',
old_fields=('job', 'job_created', 'event'),
),
migrations.RenameIndex(
model_name='jobevent',
new_name='main_jobeve_job_id_40a56d_idx',
old_fields=('job', 'job_created', 'parent_uuid'),
),
migrations.RenameIndex(
model_name='jobevent',
new_name='main_jobeve_job_id_3c4a4a_idx',
old_fields=('job', 'job_created', 'uuid'),
),
migrations.RenameIndex(
model_name='projectupdateevent',
new_name='main_projec_project_c44b7c_idx',
old_fields=('project_update', 'job_created', 'event'),
),
migrations.RenameIndex(
model_name='projectupdateevent',
new_name='main_projec_project_449bbd_idx',
old_fields=('project_update', 'job_created', 'uuid'),
),
migrations.RenameIndex(
model_name='projectupdateevent',
new_name='main_projec_project_69559a_idx',
old_fields=('project_update', 'job_created', 'counter'),
),
migrations.RenameIndex(
model_name='role',
new_name='main_rbac_r_content_979bdd_idx',
old_fields=('content_type', 'object_id'),
),
migrations.RenameIndex(
model_name='roleancestorentry',
new_name='main_rbac_r_ancesto_22b9f0_idx',
old_fields=('ancestor', 'content_type_id', 'object_id'),
),
migrations.RenameIndex(
model_name='roleancestorentry',
new_name='main_rbac_r_ancesto_b44606_idx',
old_fields=('ancestor', 'content_type_id', 'role_field'),
),
migrations.RenameIndex(
model_name='roleancestorentry',
new_name='main_rbac_r_ancesto_c87b87_idx',
old_fields=('ancestor', 'descendent'),
),
migrations.RenameIndex(
model_name='systemjobevent',
new_name='main_system_system__e39825_idx',
old_fields=('system_job', 'job_created', 'uuid'),
),
migrations.RenameIndex(
model_name='systemjobevent',
new_name='main_system_system__73537a_idx',
old_fields=('system_job', 'job_created', 'counter'),
),
]

View File

@@ -8,7 +8,7 @@ logger = logging.getLogger('awx.main.migrations')
def migrate_org_admin_to_use(apps, schema_editor): def migrate_org_admin_to_use(apps, schema_editor):
logger.info('Initiated migration from Org admin to use role') logger.info('Initiated migration from Org admin to use role')
roles_added = 0 roles_added = 0
for org in Organization.objects.prefetch_related('admin_role__members').iterator(): for org in Organization.objects.prefetch_related('admin_role__members').iterator(chunk_size=1000):
igs = list(org.instance_groups.all()) igs = list(org.instance_groups.all())
if not igs: if not igs:
continue continue

View File

@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import datetime import datetime
from datetime import timezone
import logging import logging
from collections import defaultdict from collections import defaultdict
@@ -10,7 +11,7 @@ from django.db import models, DatabaseError
from django.db.models.functions import Cast from django.db.models.functions import Cast
from django.utils.dateparse import parse_datetime from django.utils.dateparse import parse_datetime
from django.utils.text import Truncator from django.utils.text import Truncator
from django.utils.timezone import utc, now from django.utils.timezone import now
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from django.utils.encoding import force_str from django.utils.encoding import force_str
@@ -422,7 +423,7 @@ class BasePlaybookEvent(CreatedModifiedModel):
if not isinstance(kwargs['created'], datetime.datetime): if not isinstance(kwargs['created'], datetime.datetime):
kwargs['created'] = parse_datetime(kwargs['created']) kwargs['created'] = parse_datetime(kwargs['created'])
if not kwargs['created'].tzinfo: if not kwargs['created'].tzinfo:
kwargs['created'] = kwargs['created'].replace(tzinfo=utc) kwargs['created'] = kwargs['created'].replace(tzinfo=timezone.utc)
except (KeyError, ValueError): except (KeyError, ValueError):
kwargs.pop('created', None) kwargs.pop('created', None)
@@ -432,7 +433,7 @@ class BasePlaybookEvent(CreatedModifiedModel):
if not isinstance(kwargs['job_created'], datetime.datetime): if not isinstance(kwargs['job_created'], datetime.datetime):
kwargs['job_created'] = parse_datetime(kwargs['job_created']) kwargs['job_created'] = parse_datetime(kwargs['job_created'])
if not kwargs['job_created'].tzinfo: if not kwargs['job_created'].tzinfo:
kwargs['job_created'] = kwargs['job_created'].replace(tzinfo=utc) kwargs['job_created'] = kwargs['job_created'].replace(tzinfo=timezone.utc)
except (KeyError, ValueError): except (KeyError, ValueError):
kwargs.pop('job_created', None) kwargs.pop('job_created', None)
@@ -470,11 +471,11 @@ class JobEvent(BasePlaybookEvent):
class Meta: class Meta:
app_label = 'main' app_label = 'main'
ordering = ('pk',) ordering = ('pk',)
index_together = [ indexes = [
('job', 'job_created', 'event'), models.Index(fields=['job', 'job_created', 'event']),
('job', 'job_created', 'uuid'), models.Index(fields=['job', 'job_created', 'uuid']),
('job', 'job_created', 'parent_uuid'), models.Index(fields=['job', 'job_created', 'parent_uuid']),
('job', 'job_created', 'counter'), models.Index(fields=['job', 'job_created', 'counter']),
] ]
id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID') id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')
@@ -632,10 +633,10 @@ class ProjectUpdateEvent(BasePlaybookEvent):
class Meta: class Meta:
app_label = 'main' app_label = 'main'
ordering = ('pk',) ordering = ('pk',)
index_together = [ indexes = [
('project_update', 'job_created', 'event'), models.Index(fields=['project_update', 'job_created', 'event']),
('project_update', 'job_created', 'uuid'), models.Index(fields=['project_update', 'job_created', 'uuid']),
('project_update', 'job_created', 'counter'), models.Index(fields=['project_update', 'job_created', 'counter']),
] ]
id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID') id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')
@@ -734,7 +735,7 @@ class BaseCommandEvent(CreatedModifiedModel):
if not isinstance(kwargs['created'], datetime.datetime): if not isinstance(kwargs['created'], datetime.datetime):
kwargs['created'] = parse_datetime(kwargs['created']) kwargs['created'] = parse_datetime(kwargs['created'])
if not kwargs['created'].tzinfo: if not kwargs['created'].tzinfo:
kwargs['created'] = kwargs['created'].replace(tzinfo=utc) kwargs['created'] = kwargs['created'].replace(tzinfo=timezone.utc)
except (KeyError, ValueError): except (KeyError, ValueError):
kwargs.pop('created', None) kwargs.pop('created', None)
@@ -770,10 +771,10 @@ class AdHocCommandEvent(BaseCommandEvent):
class Meta: class Meta:
app_label = 'main' app_label = 'main'
ordering = ('-pk',) ordering = ('-pk',)
index_together = [ indexes = [
('ad_hoc_command', 'job_created', 'event'), models.Index(fields=['ad_hoc_command', 'job_created', 'event']),
('ad_hoc_command', 'job_created', 'uuid'), models.Index(fields=['ad_hoc_command', 'job_created', 'uuid']),
('ad_hoc_command', 'job_created', 'counter'), models.Index(fields=['ad_hoc_command', 'job_created', 'counter']),
] ]
EVENT_TYPES = [ EVENT_TYPES = [
@@ -875,9 +876,9 @@ class InventoryUpdateEvent(BaseCommandEvent):
class Meta: class Meta:
app_label = 'main' app_label = 'main'
ordering = ('-pk',) ordering = ('-pk',)
index_together = [ indexes = [
('inventory_update', 'job_created', 'uuid'), models.Index(fields=['inventory_update', 'job_created', 'uuid']),
('inventory_update', 'job_created', 'counter'), models.Index(fields=['inventory_update', 'job_created', 'counter']),
] ]
id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID') id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')
@@ -920,9 +921,9 @@ class SystemJobEvent(BaseCommandEvent):
class Meta: class Meta:
app_label = 'main' app_label = 'main'
ordering = ('-pk',) ordering = ('-pk',)
index_together = [ indexes = [
('system_job', 'job_created', 'uuid'), models.Index(fields=['system_job', 'job_created', 'uuid']),
('system_job', 'job_created', 'counter'), models.Index(fields=['system_job', 'job_created', 'counter']),
] ]
id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID') id = models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')

View File

@@ -141,7 +141,7 @@ class Role(models.Model):
app_label = 'main' app_label = 'main'
verbose_name_plural = _('roles') verbose_name_plural = _('roles')
db_table = 'main_rbac_roles' db_table = 'main_rbac_roles'
index_together = [("content_type", "object_id")] indexes = [models.Index(fields=["content_type", "object_id"])]
ordering = ("content_type", "object_id") ordering = ("content_type", "object_id")
role_field = models.TextField(null=False) role_field = models.TextField(null=False)
@@ -447,10 +447,10 @@ class RoleAncestorEntry(models.Model):
app_label = 'main' app_label = 'main'
verbose_name_plural = _('role_ancestors') verbose_name_plural = _('role_ancestors')
db_table = 'main_rbac_role_ancestors' db_table = 'main_rbac_role_ancestors'
index_together = [ indexes = [
("ancestor", "content_type_id", "object_id"), # used by get_roles_on_resource models.Index(fields=["ancestor", "content_type_id", "object_id"]), # used by get_roles_on_resource
("ancestor", "content_type_id", "role_field"), # used by accessible_objects models.Index(fields=["ancestor", "content_type_id", "role_field"]), # used by accessible_objects
("ancestor", "descendent"), # used by rebuild_role_ancestor_list in the NOT EXISTS clauses. models.Index(fields=["ancestor", "descendent"]), # used by rebuild_role_ancestor_list in the NOT EXISTS clauses.
] ]
descendent = models.ForeignKey(Role, null=False, on_delete=models.CASCADE, related_name='+') descendent = models.ForeignKey(Role, null=False, on_delete=models.CASCADE, related_name='+')

View File

@@ -1137,11 +1137,9 @@ class UnifiedJob(
if total > max_supported: if total > max_supported:
raise StdoutMaxBytesExceeded(total, max_supported) raise StdoutMaxBytesExceeded(total, max_supported)
# psycopg2's copy_expert writes bytes, but callers of this # psycopg3's copy writes bytes, but callers of this
# function assume a str-based fd will be returned; decode # function assume a str-based fd will be returned; decode
# .write() calls on the fly to maintain this interface # .write() calls on the fly to maintain this interface
_write = fd.write
fd.write = lambda s: _write(smart_str(s))
tbl = self._meta.db_table + 'event' tbl = self._meta.db_table + 'event'
created_by_cond = '' created_by_cond = ''
if self.has_unpartitioned_events: if self.has_unpartitioned_events:
@@ -1150,7 +1148,9 @@ class UnifiedJob(
created_by_cond = f"job_created='{self.created.isoformat()}' AND " created_by_cond = f"job_created='{self.created.isoformat()}' AND "
sql = f"copy (select stdout from {tbl} where {created_by_cond}{self.event_parent_key}={self.id} and stdout != '' order by start_line) to stdout" # nosql sql = f"copy (select stdout from {tbl} where {created_by_cond}{self.event_parent_key}={self.id} and stdout != '' order by start_line) to stdout" # nosql
cursor.copy_expert(sql, fd) with cursor.copy(sql) as copy:
while data := copy.read():
fd.write(smart_str(bytes(data)))
if hasattr(fd, 'name'): if hasattr(fd, 'name'):
# If we're dealing with a physical file, use `sed` to clean # If we're dealing with a physical file, use `sed` to clean

View File

@@ -2,8 +2,8 @@ import pytest
import tempfile import tempfile
import os import os
import re import re
import shutil
import csv import csv
from io import StringIO
from django.utils.timezone import now from django.utils.timezone import now
from datetime import timedelta from datetime import timedelta
@@ -20,15 +20,16 @@ from awx.main.models import (
) )
@pytest.fixture class MockCopy:
def sqlite_copy_expert(request): headers = None
# copy_expert is postgres-specific, and SQLite doesn't support it; mock its results = None
# behavior to test that it writes a file that contains stdout from events sent_data = False
path = tempfile.mkdtemp(prefix="copied_tables")
def write_stdout(self, sql, fd): def __init__(self, sql, parent_connection):
# Would be cool if we instead properly disected the SQL query and verified # Would be cool if we instead properly disected the SQL query and verified
# it that way. But instead, we just take the naive approach here. # it that way. But instead, we just take the naive approach here.
self.results = None
self.headers = None
sql = sql.strip() sql = sql.strip()
assert sql.startswith("COPY (") assert sql.startswith("COPY (")
assert sql.endswith(") TO STDOUT WITH CSV HEADER") assert sql.endswith(") TO STDOUT WITH CSV HEADER")
@@ -51,29 +52,49 @@ def sqlite_copy_expert(request):
elif not line.endswith(","): elif not line.endswith(","):
sql_new[-1] = sql_new[-1].rstrip(",") sql_new[-1] = sql_new[-1].rstrip(",")
sql = "\n".join(sql_new) sql = "\n".join(sql_new)
parent_connection.execute(sql)
self.results = parent_connection.fetchall()
self.headers = [i[0] for i in parent_connection.description]
self.execute(sql) def read(self):
results = self.fetchall() if not self.sent_data:
headers = [i[0] for i in self.description] mem_file = StringIO()
csv_handle = csv.writer(
mem_file,
delimiter=",",
quoting=csv.QUOTE_ALL,
escapechar="\\",
lineterminator="\n",
)
if self.headers:
csv_handle.writerow(self.headers)
if self.results:
csv_handle.writerows(self.results)
self.sent_data = True
return memoryview((mem_file.getvalue()).encode())
return None
csv_handle = csv.writer( def __enter__(self):
fd, return self
delimiter=",",
quoting=csv.QUOTE_ALL,
escapechar="\\",
lineterminator="\n",
)
csv_handle.writerow(headers)
csv_handle.writerows(results)
setattr(SQLiteCursorWrapper, "copy_expert", write_stdout) def __exit__(self, exc_type, exc_val, exc_tb):
request.addfinalizer(lambda: shutil.rmtree(path)) pass
request.addfinalizer(lambda: delattr(SQLiteCursorWrapper, "copy_expert"))
return path
@pytest.fixture
def sqlite_copy(request, mocker):
# copy is postgres-specific, and SQLite doesn't support it; mock its
# behavior to test that it writes a file that contains stdout from events
def write_stdout(self, sql):
mock_copy = MockCopy(sql, self)
return mock_copy
mocker.patch.object(SQLiteCursorWrapper, 'copy', write_stdout, create=True)
@pytest.mark.django_db @pytest.mark.django_db
def test_copy_tables_unified_job_query(sqlite_copy_expert, project, inventory, job_template): def test_copy_tables_unified_job_query(sqlite_copy, project, inventory, job_template):
""" """
Ensure that various unified job types are in the output of the query. Ensure that various unified job types are in the output of the query.
""" """
@@ -127,7 +148,7 @@ def workflow_job(states=["new", "new", "new", "new", "new"]):
@pytest.mark.django_db @pytest.mark.django_db
def test_copy_tables_workflow_job_node_query(sqlite_copy_expert, workflow_job): def test_copy_tables_workflow_job_node_query(sqlite_copy, workflow_job):
time_start = now() - timedelta(hours=9) time_start = now() - timedelta(hours=9)
with tempfile.TemporaryDirectory() as tmpdir: with tempfile.TemporaryDirectory() as tmpdir:

View File

@@ -214,7 +214,7 @@ class TestControllerNode:
return AdHocCommand.objects.create(inventory=inventory) return AdHocCommand.objects.create(inventory=inventory)
@pytest.mark.django_db @pytest.mark.django_db
def test_field_controller_node_exists(self, sqlite_copy_expert, admin_user, job, project_update, inventory_update, adhoc, get, system_job_factory): def test_field_controller_node_exists(self, sqlite_copy, admin_user, job, project_update, inventory_update, adhoc, get, system_job_factory):
system_job = system_job_factory() system_job = system_job_factory()
r = get(reverse('api:unified_job_list') + '?id={}'.format(job.id), admin_user, expect=200) r = get(reverse('api:unified_job_list') + '?id={}'.format(job.id), admin_user, expect=200)

View File

@@ -57,7 +57,7 @@ def _mk_inventory_update(created=None):
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'], [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
], ],
) )
def test_text_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin): def test_text_stdout(sqlite_copy, Parent, Child, relation, view, get, admin):
job = Parent() job = Parent()
job.save() job.save()
for i in range(3): for i in range(3):
@@ -79,7 +79,7 @@ def test_text_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, adm
], ],
) )
@pytest.mark.parametrize('download', [True, False]) @pytest.mark.parametrize('download', [True, False])
def test_ansi_stdout_filtering(sqlite_copy_expert, Parent, Child, relation, view, download, get, admin): def test_ansi_stdout_filtering(sqlite_copy, Parent, Child, relation, view, download, get, admin):
job = Parent() job = Parent()
job.save() job.save()
for i in range(3): for i in range(3):
@@ -111,7 +111,7 @@ def test_ansi_stdout_filtering(sqlite_copy_expert, Parent, Child, relation, view
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'], [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
], ],
) )
def test_colorized_html_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin): def test_colorized_html_stdout(sqlite_copy, Parent, Child, relation, view, get, admin):
job = Parent() job = Parent()
job.save() job.save()
for i in range(3): for i in range(3):
@@ -134,7 +134,7 @@ def test_colorized_html_stdout(sqlite_copy_expert, Parent, Child, relation, view
[_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'], [_mk_inventory_update, InventoryUpdateEvent, 'inventory_update', 'api:inventory_update_stdout'],
], ],
) )
def test_stdout_line_range(sqlite_copy_expert, Parent, Child, relation, view, get, admin): def test_stdout_line_range(sqlite_copy, Parent, Child, relation, view, get, admin):
job = Parent() job = Parent()
job.save() job.save()
for i in range(20): for i in range(20):
@@ -146,7 +146,7 @@ def test_stdout_line_range(sqlite_copy_expert, Parent, Child, relation, view, ge
@pytest.mark.django_db @pytest.mark.django_db
def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin): def test_text_stdout_from_system_job_events(sqlite_copy, get, admin):
created = tz_now() created = tz_now()
job = SystemJob(created=created) job = SystemJob(created=created)
job.save() job.save()
@@ -158,7 +158,7 @@ def test_text_stdout_from_system_job_events(sqlite_copy_expert, get, admin):
@pytest.mark.django_db @pytest.mark.django_db
def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin): def test_text_stdout_with_max_stdout(sqlite_copy, get, admin):
created = tz_now() created = tz_now()
job = SystemJob(created=created) job = SystemJob(created=created)
job.save() job.save()
@@ -185,7 +185,7 @@ def test_text_stdout_with_max_stdout(sqlite_copy_expert, get, admin):
) )
@pytest.mark.parametrize('fmt', ['txt', 'ansi']) @pytest.mark.parametrize('fmt', ['txt', 'ansi'])
@mock.patch('awx.main.redact.UriCleaner.SENSITIVE_URI_PATTERN', mock.Mock(**{'search.return_value': None})) # really slow for large strings @mock.patch('awx.main.redact.UriCleaner.SENSITIVE_URI_PATTERN', mock.Mock(**{'search.return_value': None})) # really slow for large strings
def test_max_bytes_display(sqlite_copy_expert, Parent, Child, relation, view, fmt, get, admin): def test_max_bytes_display(sqlite_copy, Parent, Child, relation, view, fmt, get, admin):
created = tz_now() created = tz_now()
job = Parent(created=created) job = Parent(created=created)
job.save() job.save()
@@ -255,7 +255,7 @@ def test_legacy_result_stdout_with_max_bytes(Cls, view, fmt, get, admin):
], ],
) )
@pytest.mark.parametrize('fmt', ['txt', 'ansi', 'txt_download', 'ansi_download']) @pytest.mark.parametrize('fmt', ['txt', 'ansi', 'txt_download', 'ansi_download'])
def test_text_with_unicode_stdout(sqlite_copy_expert, Parent, Child, relation, view, get, admin, fmt): def test_text_with_unicode_stdout(sqlite_copy, Parent, Child, relation, view, get, admin, fmt):
job = Parent() job = Parent()
job.save() job.save()
for i in range(3): for i in range(3):
@@ -267,7 +267,7 @@ def test_text_with_unicode_stdout(sqlite_copy_expert, Parent, Child, relation, v
@pytest.mark.django_db @pytest.mark.django_db
def test_unicode_with_base64_ansi(sqlite_copy_expert, get, admin): def test_unicode_with_base64_ansi(sqlite_copy, get, admin):
created = tz_now() created = tz_now()
job = Job(created=created) job = Job(created=created)
job.save() job.save()

View File

@@ -1,8 +1,6 @@
# Python # Python
import pytest import pytest
from unittest import mock from unittest import mock
import tempfile
import shutil
import urllib.parse import urllib.parse
from unittest.mock import PropertyMock from unittest.mock import PropertyMock
@@ -789,25 +787,43 @@ def oauth_application(admin):
return Application.objects.create(name='test app', user=admin, client_type='confidential', authorization_grant_type='password') return Application.objects.create(name='test app', user=admin, client_type='confidential', authorization_grant_type='password')
@pytest.fixture class MockCopy:
def sqlite_copy_expert(request): events = []
# copy_expert is postgres-specific, and SQLite doesn't support it; mock its index = -1
# behavior to test that it writes a file that contains stdout from events
path = tempfile.mkdtemp(prefix='job-event-stdout')
def write_stdout(self, sql, fd): def __init__(self, sql):
# simulate postgres copy_expert support with ORM code self.events = []
parts = sql.split(' ') parts = sql.split(' ')
tablename = parts[parts.index('from') + 1] tablename = parts[parts.index('from') + 1]
for cls in (JobEvent, AdHocCommandEvent, ProjectUpdateEvent, InventoryUpdateEvent, SystemJobEvent): for cls in (JobEvent, AdHocCommandEvent, ProjectUpdateEvent, InventoryUpdateEvent, SystemJobEvent):
if cls._meta.db_table == tablename: if cls._meta.db_table == tablename:
for event in cls.objects.order_by('start_line').all(): for event in cls.objects.order_by('start_line').all():
fd.write(event.stdout) self.events.append(event.stdout)
setattr(SQLiteCursorWrapper, 'copy_expert', write_stdout) def read(self):
request.addfinalizer(lambda: shutil.rmtree(path)) self.index = self.index + 1
request.addfinalizer(lambda: delattr(SQLiteCursorWrapper, 'copy_expert')) if self.index < len(self.events):
return path return memoryview(self.events[self.index].encode())
return None
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
pass
@pytest.fixture
def sqlite_copy(request, mocker):
# copy is postgres-specific, and SQLite doesn't support it; mock its
# behavior to test that it writes a file that contains stdout from events
def write_stdout(self, sql):
mock_copy = MockCopy(sql)
return mock_copy
mocker.patch.object(SQLiteCursorWrapper, 'copy', write_stdout, create=True)
@pytest.fixture @pytest.fixture

View File

@@ -98,7 +98,7 @@ class TestJobNotificationMixin(object):
@pytest.mark.django_db @pytest.mark.django_db
@pytest.mark.parametrize('JobClass', [AdHocCommand, InventoryUpdate, Job, ProjectUpdate, SystemJob, WorkflowJob]) @pytest.mark.parametrize('JobClass', [AdHocCommand, InventoryUpdate, Job, ProjectUpdate, SystemJob, WorkflowJob])
def test_context(self, JobClass, sqlite_copy_expert, project, inventory_source): def test_context(self, JobClass, sqlite_copy, project, inventory_source):
"""The Jinja context defines all of the fields that can be used by a template. Ensure that the context generated """The Jinja context defines all of the fields that can be used by a template. Ensure that the context generated
for each job type has the expected structure.""" for each job type has the expected structure."""
kwargs = {} kwargs = {}

View File

@@ -1,5 +1,5 @@
from datetime import datetime from datetime import datetime
from django.utils.timezone import utc from datetime import timezone
import pytest import pytest
from awx.main.models import JobEvent, ProjectUpdateEvent, AdHocCommandEvent, InventoryUpdateEvent, SystemJobEvent from awx.main.models import JobEvent, ProjectUpdateEvent, AdHocCommandEvent, InventoryUpdateEvent, SystemJobEvent
@@ -18,7 +18,7 @@ from awx.main.models import JobEvent, ProjectUpdateEvent, AdHocCommandEvent, Inv
@pytest.mark.parametrize('created', [datetime(2018, 1, 1).isoformat(), datetime(2018, 1, 1)]) @pytest.mark.parametrize('created', [datetime(2018, 1, 1).isoformat(), datetime(2018, 1, 1)])
def test_event_parse_created(job_identifier, cls, created): def test_event_parse_created(job_identifier, cls, created):
event = cls.create_from_data(**{job_identifier: 123, 'created': created}) event = cls.create_from_data(**{job_identifier: 123, 'created': created})
assert event.created == datetime(2018, 1, 1).replace(tzinfo=utc) assert event.created == datetime(2018, 1, 1).replace(tzinfo=timezone.utc)
@pytest.mark.parametrize( @pytest.mark.parametrize(

View File

@@ -195,9 +195,9 @@ function getRouteConfig(userProfile = {}) {
deleteRoute('host_metrics'); deleteRoute('host_metrics');
deleteRouteGroup('settings'); deleteRouteGroup('settings');
deleteRoute('management_jobs'); deleteRoute('management_jobs');
if (userProfile?.isOrgAdmin) return routeConfig;
deleteRoute('topology_view'); deleteRoute('topology_view');
deleteRoute('instances'); deleteRoute('instances');
if (userProfile?.isOrgAdmin) return routeConfig;
if (!userProfile?.isNotificationAdmin) deleteRoute('notification_templates'); if (!userProfile?.isNotificationAdmin) deleteRoute('notification_templates');
return routeConfig; return routeConfig;

View File

@@ -101,10 +101,8 @@ describe('getRouteConfig', () => {
'/credential_types', '/credential_types',
'/notification_templates', '/notification_templates',
'/instance_groups', '/instance_groups',
'/instances',
'/applications', '/applications',
'/execution_environments', '/execution_environments',
'/topology_view',
]); ]);
}); });
@@ -237,10 +235,8 @@ describe('getRouteConfig', () => {
'/credential_types', '/credential_types',
'/notification_templates', '/notification_templates',
'/instance_groups', '/instance_groups',
'/instances',
'/applications', '/applications',
'/execution_environments', '/execution_environments',
'/topology_view',
]); ]);
}); });
@@ -268,10 +264,8 @@ describe('getRouteConfig', () => {
'/credential_types', '/credential_types',
'/notification_templates', '/notification_templates',
'/instance_groups', '/instance_groups',
'/instances',
'/applications', '/applications',
'/execution_environments', '/execution_environments',
'/topology_view',
]); ]);
}); });
}); });

View File

@@ -12,8 +12,13 @@ To encrypt secret fields, AWX uses AES in CBC mode with a 256-bit key for encryp
If necessary, credentials and encrypted settings can be extracted using the AWX shell: If necessary, credentials and encrypted settings can be extracted using the AWX shell:
```python ```python
# awx-manage shell_plus $ awx-manage shell_plus
>>> from awx.main.utils import decrypt_field >>> from awx.main.utils import decrypt_field
>>> cred = Credential.objects.get(name="my private key") >>> print(decrypt_field(Credential.objects.get(name="my private key"), "ssh_key_data")) # Example for a credential
>>> print(decrypt_field(cred, "ssh_key_data")) >>> print(decrypt_field(Setting.objects.get(key='SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET'), 'value')) # Example for a setting
``` ```
If you are running a kubernetes based deployment, you can execute awx-manage like this:
```bash
$ kubectl exec --stdin --tty [instance name]-task-[...] -c [instance name]-task -- awx-manage shell_plus
```