Removing psycopg2 references

This commit is contained in:
John Westcott IV
2023-05-09 09:26:46 -04:00
committed by John Westcott IV
parent 2b8ed66f3e
commit e47d30974c
7 changed files with 19 additions and 22 deletions

View File

@@ -52,7 +52,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
# Python packages to install only from source (not from binary wheels) # Python packages to install only from source (not from binary wheels)
# Comma separated list # Comma separated list
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
# These should be upgraded in the AWX and Ansible venv before attempting # These should be upgraded in the AWX and Ansible venv before attempting
# to install the actual requirements # to install the actual requirements
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==7.0.5 wheel==0.38.4 VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==7.0.5 wheel==0.38.4

View File

@@ -1,5 +1,5 @@
import os import os
import psycopg2 import psycopg
import select import select
from contextlib import contextmanager from contextlib import contextmanager
@@ -64,9 +64,9 @@ class PubSub(object):
if yield_timeouts: if yield_timeouts:
yield None yield None
else: else:
self.conn.poll() notification_generator = self.conn.notifies()
while self.conn.notifies: for notification in notification_generator:
yield self.conn.notifies.pop(0) yield notification
def close(self): def close(self):
self.conn.close() self.conn.close()
@@ -89,9 +89,8 @@ def pg_bus_conn(new_connection=False):
conf['OPTIONS'] = conf.get('OPTIONS', {}).copy() conf['OPTIONS'] = conf.get('OPTIONS', {}).copy()
# Modify the application name to distinguish from other connections the process might use # Modify the application name to distinguish from other connections the process might use
conf['OPTIONS']['application_name'] = get_application_name(settings.CLUSTER_HOST_ID, function='listener') conf['OPTIONS']['application_name'] = get_application_name(settings.CLUSTER_HOST_ID, function='listener')
conn = psycopg2.connect(dbname=conf['NAME'], host=conf['HOST'], user=conf['USER'], password=conf['PASSWORD'], port=conf['PORT'], **conf['OPTIONS']) connection_data = f"dbname={conf['NAME']} host={conf['HOST']} user={conf['USER']} password={conf['PASSWORD']} port={conf['PORT']}"
# Django connection.cursor().connection doesn't have autocommit=True on by default conn = psycopg.connect(connection_data, autocommit=True, **conf['OPTIONS'])
conn.set_session(autocommit=True)
else: else:
if pg_connection.connection is None: if pg_connection.connection is None:
pg_connection.connect() pg_connection.connect()

View File

@@ -7,7 +7,7 @@ import signal
import sys import sys
import redis import redis
import json import json
import psycopg2 import psycopg
import time import time
from uuid import UUID from uuid import UUID
from queue import Empty as QueueEmpty from queue import Empty as QueueEmpty
@@ -205,10 +205,10 @@ class AWXConsumerPG(AWXConsumerBase):
self.listen_start = time.time() self.listen_start = time.time()
if self.should_stop: if self.should_stop:
return return
except psycopg2.InterfaceError: except psycopg.InterfaceError:
logger.warning("Stale Postgres message bus connection, reconnecting") logger.warning("Stale Postgres message bus connection, reconnecting")
continue continue
except (db.DatabaseError, psycopg2.OperationalError): except (db.DatabaseError, psycopg.OperationalError):
# If we have attained stady state operation, tolerate short-term database hickups # If we have attained stady state operation, tolerate short-term database hickups
if not self.pg_is_down: if not self.pg_is_down:
logger.exception(f"Error consuming new events from postgres, will retry for {self.pg_max_wait} s") logger.exception(f"Error consuming new events from postgres, will retry for {self.pg_max_wait} s")

View File

@@ -124,7 +124,7 @@ Ansible source, set up a dedicated virtual environment:
``` ```
mkvirtualenv my_new_venv mkvirtualenv my_new_venv
# may need to replace psycopg2 with psycopg2-binary in requirements/requirements.txt # may need to replace psycopg3 with psycopg3-binary in requirements/requirements.txt
pip install -r requirements/requirements.txt -r requirements/requirements_dev.txt -r requirements/requirements_git.txt pip install -r requirements/requirements.txt -r requirements/requirements_dev.txt -r requirements/requirements_git.txt
make clean-api make clean-api
pip install -e <path to your Ansible> pip install -e <path to your Ansible>

View File

@@ -126,7 +126,7 @@ The websocket backplane is handled by the wsbroadcast service that is part of th
### Postgres ### Postgres
AWX is a Django application and uses the psycopg2 library to establish connections to the Postgres database. AWX is a Django application and uses the psycopg3 library to establish connections to the Postgres database.
Only control nodes need direct access to the database. Only control nodes need direct access to the database.
Importantly AWX relies on the Postgres notify system for inter-process communication. The dispatcher system spawns separate processes/threads that run in parallel. For example, it runs the task manager periodically, and the task manager needs to be able to communicate with the main dispatcher thread. It does this via `pg_notify`. Importantly AWX relies on the Postgres notify system for inter-process communication. The dispatcher system spawns separate processes/threads that run in parallel. For example, it runs the task manager periodically, and the task manager needs to be able to communicate with the main dispatcher thread. It does this via `pg_notify`.

View File

@@ -40,14 +40,13 @@ RUN dnf -y update && dnf install -y 'dnf-command(config-manager)' && \
postgresql-devel \ postgresql-devel \
python3-devel \ python3-devel \
python3-pip \ python3-pip \
python3-psycopg2 \
python3-setuptools \ python3-setuptools \
swig \ swig \
unzip \ unzip \
xmlsec1-devel \ xmlsec1-devel \
xmlsec1-openssl-devel xmlsec1-openssl-devel
RUN pip3 install virtualenv build RUN pip3 install virtualenv build psycopg
{% if image_architecture == 'ppc64le' %} {% if image_architecture == 'ppc64le' %}
RUN dnf -y update && dnf install -y wget && \ RUN dnf -y update && dnf install -y wget && \
@@ -121,7 +120,6 @@ RUN dnf -y update && dnf install -y 'dnf-command(config-manager)' && \
python3-devel \ python3-devel \
python3-libselinux \ python3-libselinux \
python3-pip \ python3-pip \
python3-psycopg2 \
python3-setuptools \ python3-setuptools \
rsync \ rsync \
rsyslog-8.2102.0-106.el9 \ rsyslog-8.2102.0-106.el9 \
@@ -133,7 +131,7 @@ RUN dnf -y update && dnf install -y 'dnf-command(config-manager)' && \
xmlsec1-openssl && \ xmlsec1-openssl && \
dnf -y clean all dnf -y clean all
RUN pip3 install virtualenv supervisor dumb-init RUN pip3 install virtualenv supervisor dumb-init psycopg
RUN rm -rf /root/.cache && rm -rf /tmp/* RUN rm -rf /root/.cache && rm -rf /tmp/*

View File

@@ -38,7 +38,7 @@ from io import StringIO
from time import time from time import time
from uuid import uuid4 from uuid import uuid4
import psycopg2 import psycopg
from django import setup as setup_django from django import setup as setup_django
from django.db import connection from django.db import connection
@@ -111,7 +111,7 @@ class YieldedRows(StringIO):
def firehose(job, count, created_stamp, modified_stamp): def firehose(job, count, created_stamp, modified_stamp):
conn = psycopg2.connect(dsn) conn = psycopg.connect(dsn)
f = YieldedRows(job, count, created_stamp, modified_stamp) f = YieldedRows(job, count, created_stamp, modified_stamp)
with conn.cursor() as cursor: with conn.cursor() as cursor:
cursor.copy_expert( cursor.copy_expert(
@@ -133,7 +133,7 @@ def firehose(job, count, created_stamp, modified_stamp):
def cleanup(sql): def cleanup(sql):
print(sql) print(sql)
conn = psycopg2.connect(dsn) conn = psycopg.connect(dsn)
with conn.cursor() as cursor: with conn.cursor() as cursor:
cursor.execute(sql) cursor.execute(sql)
conn.commit() conn.commit()
@@ -221,7 +221,7 @@ def generate_jobs(jobs, batch_size, time_delta):
def generate_events(events, job, time_delta): def generate_events(events, job, time_delta):
conn = psycopg2.connect(dsn) conn = psycopg.connect(dsn)
cursor = conn.cursor() cursor = conn.cursor()
created_time = datetime.datetime.today() - time_delta - datetime.timedelta(seconds=5) created_time = datetime.datetime.today() - time_delta - datetime.timedelta(seconds=5)
@@ -282,7 +282,7 @@ if __name__ == '__main__':
days_delta = params.days_delta days_delta = params.days_delta
batch_size = params.batch_size batch_size = params.batch_size
try: try:
conn = psycopg2.connect(dsn) conn = psycopg.connect(dsn)
cursor = conn.cursor() cursor = conn.cursor()
# Drop all the indexes before generating jobs # Drop all the indexes before generating jobs