mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 19:44:43 -03:30
Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1a8c96a5e6 |
1
.github/workflows/devel_images.yml
vendored
1
.github/workflows/devel_images.yml
vendored
@@ -3,7 +3,6 @@ name: Build/Push Development Images
|
||||
env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- devel
|
||||
|
||||
113
.vscode/launch.json
vendored
113
.vscode/launch.json
vendored
@@ -1,113 +0,0 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "run_ws_heartbeat",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_ws_heartbeat"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-ws-heartbeat",
|
||||
"postDebugTask": "start awx-ws-heartbeat"
|
||||
},
|
||||
{
|
||||
"name": "run_cache_clear",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_cache_clear"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-cache-clear",
|
||||
"postDebugTask": "start awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"name": "run_callback_receiver",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_callback_receiver"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-receiver",
|
||||
"postDebugTask": "start awx-receiver"
|
||||
},
|
||||
{
|
||||
"name": "run_dispatcher",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_dispatcher"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-dispatcher",
|
||||
"postDebugTask": "start awx-dispatcher"
|
||||
},
|
||||
{
|
||||
"name": "run_rsyslog_configurer",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_rsyslog_configurer"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-rsyslog-configurer",
|
||||
"postDebugTask": "start awx-rsyslog-configurer"
|
||||
},
|
||||
{
|
||||
"name": "run_cache_clear",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_cache_clear"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-cache-clear",
|
||||
"postDebugTask": "start awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"name": "run_wsrelay",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["run_wsrelay"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-wsrelay",
|
||||
"postDebugTask": "start awx-wsrelay"
|
||||
},
|
||||
{
|
||||
"name": "daphne",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "/var/lib/awx/venv/awx/bin/daphne",
|
||||
"args": ["-b", "127.0.0.1", "-p", "8051", "awx.asgi:channel_layer"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-daphne",
|
||||
"postDebugTask": "start awx-daphne"
|
||||
},
|
||||
{
|
||||
"name": "runserver(uwsgi alternative)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["runserver", "127.0.0.1:8052"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-uwsgi",
|
||||
"postDebugTask": "start awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"name": "runserver_plus(uwsgi alternative)",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["runserver_plus", "127.0.0.1:8052"],
|
||||
"django": true,
|
||||
"preLaunchTask": "stop awx-uwsgi and install Werkzeug",
|
||||
"postDebugTask": "start awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"name": "shell_plus",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "manage.py",
|
||||
"args": ["shell_plus"],
|
||||
"django": true,
|
||||
},
|
||||
]
|
||||
}
|
||||
100
.vscode/tasks.json
vendored
100
.vscode/tasks.json
vendored
@@ -1,100 +0,0 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "start awx-cache-clear",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-cache-clear",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-cache-clear"
|
||||
},
|
||||
{
|
||||
"label": "start awx-daphne",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-daphne"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-daphne",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-daphne"
|
||||
},
|
||||
{
|
||||
"label": "start awx-dispatcher",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-dispatcher"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-dispatcher",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-dispatcher"
|
||||
},
|
||||
{
|
||||
"label": "start awx-receiver",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-receiver"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-receiver",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-receiver"
|
||||
},
|
||||
{
|
||||
"label": "start awx-rsyslog-configurer",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-rsyslog-configurer"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-rsyslog-configurer",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-rsyslog-configurer"
|
||||
},
|
||||
{
|
||||
"label": "start awx-rsyslogd",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-rsyslogd"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-rsyslogd",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-rsyslogd"
|
||||
},
|
||||
{
|
||||
"label": "start awx-uwsgi",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-uwsgi",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-uwsgi and install Werkzeug",
|
||||
"type": "shell",
|
||||
"command": "pip install Werkzeug; supervisorctl stop tower-processes:awx-uwsgi"
|
||||
},
|
||||
{
|
||||
"label": "start awx-ws-heartbeat",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-ws-heartbeat"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-ws-heartbeat",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-ws-heartbeat"
|
||||
},
|
||||
{
|
||||
"label": "start awx-wsrelay",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl start tower-processes:awx-wsrelay"
|
||||
},
|
||||
{
|
||||
"label": "stop awx-wsrelay",
|
||||
"type": "shell",
|
||||
"command": "supervisorctl stop tower-processes:awx-wsrelay"
|
||||
}
|
||||
]
|
||||
}
|
||||
9
Makefile
9
Makefile
@@ -216,6 +216,8 @@ collectstatic:
|
||||
fi; \
|
||||
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
|
||||
|
||||
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
|
||||
|
||||
uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
@@ -223,7 +225,7 @@ uwsgi: collectstatic
|
||||
uwsgi /etc/tower/uwsgi.ini
|
||||
|
||||
awx-autoreload:
|
||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx
|
||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
|
||||
|
||||
daphne:
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
@@ -303,7 +305,7 @@ swagger: reports
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs | tee reports/$@.report)
|
||||
(set -o pipefail && py.test $(PYTEST_ARGS) awx/conf/tests/functional awx/main/tests/functional/api awx/main/tests/docs --release=$(VERSION_TARGET) | tee reports/$@.report)
|
||||
|
||||
check: black
|
||||
|
||||
@@ -629,6 +631,9 @@ clean-elk:
|
||||
docker rm tools_elasticsearch_1
|
||||
docker rm tools_kibana_1
|
||||
|
||||
psql-container:
|
||||
docker run -it --net tools_default --rm postgres:12 sh -c 'exec psql -h "postgres" -p "5432" -U postgres'
|
||||
|
||||
VERSION:
|
||||
@echo "awx: $(VERSION)"
|
||||
|
||||
|
||||
@@ -154,12 +154,10 @@ def manage():
|
||||
from django.conf import settings
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
# enforce the postgres version is a minimum of 12 (we need this for partitioning); if not, then terminate program with exit code of 1
|
||||
# In the future if we require a feature of a version of postgres > 12 this should be updated to reflect that.
|
||||
# The return of connection.pg_version is something like 12013
|
||||
# enforce the postgres version is equal to 12. if not, then terminate program with exit code of 1
|
||||
if not os.getenv('SKIP_PG_VERSION_CHECK', False) and not MODE == 'development':
|
||||
if (connection.pg_version // 10000) < 12:
|
||||
sys.stderr.write("At a minimum, postgres version 12 is required\n")
|
||||
sys.stderr.write("Postgres version 12 is required\n")
|
||||
sys.exit(1)
|
||||
|
||||
if len(sys.argv) >= 2 and sys.argv[1] in ('version', '--version'): # pragma: no cover
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# Python
|
||||
import contextlib
|
||||
import logging
|
||||
import psycopg
|
||||
import threading
|
||||
import time
|
||||
import os
|
||||
@@ -14,7 +13,7 @@ from django.conf import settings, UserSettingsHolder
|
||||
from django.core.cache import cache as django_cache
|
||||
from django.core.exceptions import ImproperlyConfigured, SynchronousOnlyOperation
|
||||
from django.db import transaction, connection
|
||||
from django.db.utils import DatabaseError, ProgrammingError
|
||||
from django.db.utils import Error as DBError, ProgrammingError
|
||||
from django.utils.functional import cached_property
|
||||
|
||||
# Django REST Framework
|
||||
@@ -81,26 +80,18 @@ def _ctit_db_wrapper(trans_safe=False):
|
||||
logger.debug('Obtaining database settings in spite of broken transaction.')
|
||||
transaction.set_rollback(False)
|
||||
yield
|
||||
except ProgrammingError as e:
|
||||
# Exception raised for programming errors
|
||||
# Examples may be table not found or already exists,
|
||||
# this generally means we can't fetch Tower configuration
|
||||
# because the database hasn't actually finished migrating yet;
|
||||
# this is usually a sign that a service in a container (such as ws_broadcast)
|
||||
# has come up *before* the database has finished migrating, and
|
||||
# especially that the conf.settings table doesn't exist yet
|
||||
# syntax error in the SQL statement, wrong number of parameters specified, etc.
|
||||
except DBError as exc:
|
||||
if trans_safe:
|
||||
logger.debug(f'Database settings are not available, using defaults. error: {str(e)}')
|
||||
else:
|
||||
logger.exception('Error modifying something related to database settings.')
|
||||
except DatabaseError as e:
|
||||
if trans_safe:
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
level = logger.warning
|
||||
if isinstance(exc, ProgrammingError):
|
||||
if 'relation' in str(exc) and 'does not exist' in str(exc):
|
||||
# this generally means we can't fetch Tower configuration
|
||||
# because the database hasn't actually finished migrating yet;
|
||||
# this is usually a sign that a service in a container (such as ws_broadcast)
|
||||
# has come up *before* the database has finished migrating, and
|
||||
# especially that the conf.settings table doesn't exist yet
|
||||
level = logger.debug
|
||||
level(f'Database settings are not available, using defaults. error: {str(exc)}')
|
||||
else:
|
||||
logger.exception('Error modifying something related to database settings.')
|
||||
finally:
|
||||
|
||||
@@ -419,7 +419,7 @@ def _events_table(since, full_path, until, tbl, where_column, project_job_create
|
||||
resolved_action,
|
||||
resolved_role,
|
||||
-- '-' operator listed here:
|
||||
-- https://www.postgresql.org/docs/15/functions-json.html
|
||||
-- https://www.postgresql.org/docs/12/functions-json.html
|
||||
-- note that operator is only supported by jsonb objects
|
||||
-- https://www.postgresql.org/docs/current/datatype-json.html
|
||||
(CASE WHEN event = 'playbook_on_stats' THEN {event_data} - 'artifact_data' END) as playbook_on_stats,
|
||||
|
||||
@@ -14,7 +14,7 @@ __all__ = [
|
||||
'STANDARD_INVENTORY_UPDATE_ENV',
|
||||
]
|
||||
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights', 'terraform')
|
||||
CLOUD_PROVIDERS = ('azure_rm', 'ec2', 'gce', 'vmware', 'openstack', 'rhv', 'satellite6', 'controller', 'insights')
|
||||
PRIVILEGE_ESCALATION_METHODS = [
|
||||
('sudo', _('Sudo')),
|
||||
('su', _('Su')),
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
# Generated by Django 4.2.6 on 2024-02-15 20:51
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0189_inbound_hop_nodes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='inventorysource',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='inventoryupdate',
|
||||
name='source',
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('file', 'File, Directory or Script'),
|
||||
('constructed', 'Template additional groups and hostvars at runtime'),
|
||||
('scm', 'Sourced from a Project'),
|
||||
('ec2', 'Amazon EC2'),
|
||||
('gce', 'Google Compute Engine'),
|
||||
('azure_rm', 'Microsoft Azure Resource Manager'),
|
||||
('vmware', 'VMware vCenter'),
|
||||
('satellite6', 'Red Hat Satellite 6'),
|
||||
('openstack', 'OpenStack'),
|
||||
('rhv', 'Red Hat Virtualization'),
|
||||
('controller', 'Red Hat Ansible Automation Platform'),
|
||||
('insights', 'Red Hat Insights'),
|
||||
('terraform', 'Terraform State'),
|
||||
],
|
||||
default=None,
|
||||
max_length=32,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -925,7 +925,6 @@ class InventorySourceOptions(BaseModel):
|
||||
('rhv', _('Red Hat Virtualization')),
|
||||
('controller', _('Red Hat Ansible Automation Platform')),
|
||||
('insights', _('Red Hat Insights')),
|
||||
('terraform', _('Terraform State')),
|
||||
]
|
||||
|
||||
# From the options of the Django management base command
|
||||
@@ -1631,20 +1630,6 @@ class satellite6(PluginFileInjector):
|
||||
return ret
|
||||
|
||||
|
||||
class terraform(PluginFileInjector):
|
||||
plugin_name = 'terraform_state'
|
||||
base_injector = 'managed'
|
||||
namespace = 'cloud'
|
||||
collection = 'terraform'
|
||||
use_fqcn = True
|
||||
|
||||
def inventory_as_dict(self, inventory_update, private_data_dir):
|
||||
env = super(terraform, self).get_plugin_env(inventory_update, private_data_dir, None)
|
||||
ret = super().inventory_as_dict(inventory_update, private_data_dir)
|
||||
ret['backend_config_files'] = env["TF_BACKEND_CONFIG_FILE"]
|
||||
return ret
|
||||
|
||||
|
||||
class controller(PluginFileInjector):
|
||||
plugin_name = 'tower' # TODO: relying on routing for now, update after EEs pick up revised collection
|
||||
base_injector = 'template'
|
||||
|
||||
@@ -6,7 +6,6 @@ import itertools
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import psycopg
|
||||
from io import StringIO
|
||||
from contextlib import redirect_stdout
|
||||
import shutil
|
||||
@@ -417,7 +416,7 @@ def handle_removed_image(remove_images=None):
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
def cleanup_images_and_files():
|
||||
_cleanup_images_and_files(image_prune=True)
|
||||
_cleanup_images_and_files()
|
||||
|
||||
|
||||
@task(queue=get_task_queuename)
|
||||
@@ -631,18 +630,10 @@ def cluster_node_heartbeat(dispatch_time=None, worker_tasks=None):
|
||||
logger.error("Host {} last checked in at {}, marked as lost.".format(other_inst.hostname, other_inst.last_seen))
|
||||
|
||||
except DatabaseError as e:
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.debug('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
|
||||
if sqlstate == psycopg.errors.NoData:
|
||||
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
|
||||
else:
|
||||
logger.exception("Error marking {} as lost.".format(other_inst.hostname))
|
||||
if 'did not affect any rows' in str(e):
|
||||
logger.debug('Another instance has marked {} as lost'.format(other_inst.hostname))
|
||||
else:
|
||||
logger.exception('No SQL state available. Error marking {} as lost'.format(other_inst.hostname))
|
||||
logger.exception('Error marking {} as lost'.format(other_inst.hostname))
|
||||
|
||||
# Run local reaper
|
||||
if worker_tasks is not None:
|
||||
@@ -797,19 +788,10 @@ def update_inventory_computed_fields(inventory_id):
|
||||
try:
|
||||
i.update_computed_fields()
|
||||
except DatabaseError as e:
|
||||
# https://github.com/django/django/blob/eff21d8e7a1cb297aedf1c702668b590a1b618f3/django/db/models/base.py#L1105
|
||||
# django raises DatabaseError("Forced update did not affect any rows.")
|
||||
|
||||
# if sqlstate is set then there was a database error and otherwise will re-raise that error
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
raise
|
||||
|
||||
# otherwise
|
||||
logger.debug('Exiting duplicate update_inventory_computed_fields task.')
|
||||
if 'did not affect any rows' in str(e):
|
||||
logger.debug('Exiting duplicate update_inventory_computed_fields task.')
|
||||
return
|
||||
raise
|
||||
|
||||
|
||||
def update_smart_memberships_for_inventory(smart_inventory):
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"TF_BACKEND_CONFIG_FILE": "{{ file_reference }}"
|
||||
}
|
||||
@@ -1,8 +1,13 @@
|
||||
from awx.main.tests.functional.conftest import * # noqa
|
||||
import os
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def release():
|
||||
return os.environ.get('VERSION_TARGET', '')
|
||||
def pytest_addoption(parser):
|
||||
parser.addoption("--release", action="store", help="a release version number, e.g., 3.3.0")
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc):
|
||||
# This is called for every test. Only get/set command line arguments
|
||||
# if the argument is specified in the list of test "fixturenames".
|
||||
option_value = metafunc.config.option.release
|
||||
if 'release' in metafunc.fixturenames and option_value is not None:
|
||||
metafunc.parametrize("release", [option_value])
|
||||
|
||||
@@ -3,19 +3,15 @@ import pytest
|
||||
from unittest import mock
|
||||
import urllib.parse
|
||||
from unittest.mock import PropertyMock
|
||||
import importlib
|
||||
|
||||
# Django
|
||||
from django.urls import resolve
|
||||
from django.http import Http404
|
||||
from django.apps import apps
|
||||
from django.core.handlers.exception import response_for_exception
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db.backends.sqlite3.base import SQLiteCursorWrapper
|
||||
|
||||
from django.db.models.signals import post_migrate
|
||||
|
||||
# AWX
|
||||
from awx.main.models.projects import Project
|
||||
from awx.main.models.ha import Instance
|
||||
@@ -45,19 +41,10 @@ from awx.main.models.workflow import WorkflowJobTemplate
|
||||
from awx.main.models.ad_hoc_commands import AdHocCommand
|
||||
from awx.main.models.oauth import OAuth2Application as Application
|
||||
from awx.main.models.execution_environments import ExecutionEnvironment
|
||||
from awx.main.utils import is_testing
|
||||
|
||||
__SWAGGER_REQUESTS__ = {}
|
||||
|
||||
|
||||
# HACK: the dab_resource_registry app required ServiceID in migrations which checks do not run
|
||||
dab_rr_initial = importlib.import_module('ansible_base.resource_registry.migrations.0001_initial')
|
||||
|
||||
|
||||
if is_testing():
|
||||
post_migrate.connect(lambda **kwargs: dab_rr_initial.create_service_id(apps, None))
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def swagger_autogen(requests=__SWAGGER_REQUESTS__):
|
||||
return requests
|
||||
|
||||
@@ -193,7 +193,6 @@ class TestInventorySourceInjectors:
|
||||
('satellite6', 'theforeman.foreman.foreman'),
|
||||
('insights', 'redhatinsights.insights.insights'),
|
||||
('controller', 'awx.awx.tower'),
|
||||
('terraform', 'cloud.terraform.terraform_state'),
|
||||
],
|
||||
)
|
||||
def test_plugin_proper_names(self, source, proper_name):
|
||||
|
||||
@@ -107,7 +107,6 @@ def read_content(private_data_dir, raw_env, inventory_update):
|
||||
for filename in os.listdir(os.path.join(private_data_dir, subdir)):
|
||||
filename_list.append(os.path.join(subdir, filename))
|
||||
filename_list = sorted(filename_list, key=lambda fn: inverse_env.get(os.path.join(private_data_dir, fn), [fn])[0])
|
||||
inventory_content = ""
|
||||
for filename in filename_list:
|
||||
if filename in ('args', 'project'):
|
||||
continue # Ansible runner
|
||||
@@ -131,7 +130,6 @@ def read_content(private_data_dir, raw_env, inventory_update):
|
||||
dir_contents[abs_file_path] = f.read()
|
||||
# Declare a reference to inventory plugin file if it exists
|
||||
if abs_file_path.endswith('.yml') and 'plugin: ' in dir_contents[abs_file_path]:
|
||||
inventory_content = dir_contents[abs_file_path]
|
||||
referenced_paths.add(abs_file_path) # used as inventory file
|
||||
elif cache_file_regex.match(abs_file_path):
|
||||
file_aliases[abs_file_path] = 'cache_file'
|
||||
@@ -159,11 +157,7 @@ def read_content(private_data_dir, raw_env, inventory_update):
|
||||
content = {}
|
||||
for abs_file_path, file_content in dir_contents.items():
|
||||
# assert that all files laid down are used
|
||||
if (
|
||||
abs_file_path not in referenced_paths
|
||||
and to_container_path(abs_file_path, private_data_dir) not in inventory_content
|
||||
and abs_file_path not in ignore_files
|
||||
):
|
||||
if abs_file_path not in referenced_paths and abs_file_path not in ignore_files:
|
||||
raise AssertionError(
|
||||
"File {} is not referenced. References and files:\n{}\n{}".format(abs_file_path, json.dumps(env, indent=4), json.dumps(dir_contents, indent=4))
|
||||
)
|
||||
|
||||
@@ -411,14 +411,14 @@ def test_project_delete(delete, organization, admin_user):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'order_by, expected_names',
|
||||
'order_by, expected_names, expected_ids',
|
||||
[
|
||||
('name', ['alice project', 'bob project', 'shared project']),
|
||||
('-name', ['shared project', 'bob project', 'alice project']),
|
||||
('name', ['alice project', 'bob project', 'shared project'], [1, 2, 3]),
|
||||
('-name', ['shared project', 'bob project', 'alice project'], [3, 2, 1]),
|
||||
],
|
||||
)
|
||||
@pytest.mark.django_db
|
||||
def test_project_list_ordering_by_name(get, order_by, expected_names, organization_factory):
|
||||
def test_project_list_ordering_by_name(get, order_by, expected_names, expected_ids, organization_factory):
|
||||
'ensure sorted order of project list is maintained correctly when the requested order is invalid or not applicable'
|
||||
objects = organization_factory(
|
||||
'org1',
|
||||
@@ -426,11 +426,13 @@ def test_project_list_ordering_by_name(get, order_by, expected_names, organizati
|
||||
superusers=['admin'],
|
||||
)
|
||||
project_names = []
|
||||
project_ids = []
|
||||
# TODO: ask for an order by here that doesn't apply
|
||||
results = get(reverse('api:project_list'), objects.superusers.admin, QUERY_STRING='order_by=%s' % order_by).data['results']
|
||||
for x in range(len(results)):
|
||||
project_names.append(results[x]['name'])
|
||||
assert project_names == expected_names
|
||||
project_ids.append(results[x]['id'])
|
||||
assert project_names == expected_names and project_ids == expected_ids
|
||||
|
||||
|
||||
@pytest.mark.parametrize('order_by', ('name', '-name'))
|
||||
@@ -448,8 +450,7 @@ def test_project_list_ordering_with_duplicate_names(get, order_by, organization_
|
||||
for x in range(3):
|
||||
results = get(reverse('api:project_list'), objects.superusers.admin, QUERY_STRING='order_by=%s' % order_by).data['results']
|
||||
project_ids[x] = [proj['id'] for proj in results]
|
||||
assert project_ids[0] == project_ids[1] == project_ids[2]
|
||||
assert project_ids[0] == sorted(project_ids[0])
|
||||
assert project_ids[0] == project_ids[1] == project_ids[2] == [1, 2, 3, 4, 5]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
# Python
|
||||
from unittest import mock
|
||||
import uuid
|
||||
|
||||
# patch python-ldap
|
||||
with mock.patch('__main__.__builtins__.dir', return_value=[]):
|
||||
import ldap # NOQA
|
||||
|
||||
# Load development settings for base variables.
|
||||
from awx.settings.development import * # NOQA
|
||||
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch
|
||||
from awx.main.tasks.system import update_inventory_computed_fields
|
||||
from awx.main.models import Inventory
|
||||
from django.db import DatabaseError
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_logger():
|
||||
with patch("awx.main.tasks.system.logger") as logger:
|
||||
yield logger
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_inventory():
|
||||
return MagicMock(spec=Inventory)
|
||||
|
||||
|
||||
def test_update_inventory_computed_fields_existing_inventory(mock_logger, mock_inventory):
|
||||
# Mocking the Inventory.objects.filter method to return a non-empty queryset
|
||||
with patch("awx.main.tasks.system.Inventory.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
mock_filter.return_value.__getitem__.return_value = mock_inventory
|
||||
|
||||
# Mocking the update_computed_fields method
|
||||
with patch.object(mock_inventory, "update_computed_fields") as mock_update_computed_fields:
|
||||
update_inventory_computed_fields(1)
|
||||
|
||||
# Assertions
|
||||
mock_filter.assert_called_once_with(id=1)
|
||||
mock_update_computed_fields.assert_called_once()
|
||||
|
||||
# You can add more assertions based on your specific requirements
|
||||
|
||||
|
||||
def test_update_inventory_computed_fields_missing_inventory(mock_logger):
|
||||
# Mocking the Inventory.objects.filter method to return an empty queryset
|
||||
with patch("awx.main.tasks.system.Inventory.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = False
|
||||
|
||||
update_inventory_computed_fields(1)
|
||||
|
||||
# Assertions
|
||||
mock_filter.assert_called_once_with(id=1)
|
||||
mock_logger.error.assert_called_once_with("Update Inventory Computed Fields failed due to missing inventory: 1")
|
||||
|
||||
|
||||
def test_update_inventory_computed_fields_database_error_nosqlstate(mock_logger, mock_inventory):
|
||||
# Mocking the Inventory.objects.filter method to return a non-empty queryset
|
||||
with patch("awx.main.tasks.system.Inventory.objects.filter") as mock_filter:
|
||||
mock_filter.return_value.exists.return_value = True
|
||||
mock_filter.return_value.__getitem__.return_value = mock_inventory
|
||||
|
||||
# Mocking the update_computed_fields method
|
||||
with patch.object(mock_inventory, "update_computed_fields") as mock_update_computed_fields:
|
||||
# Simulating the update_computed_fields method to explicitly raise a DatabaseError
|
||||
mock_update_computed_fields.side_effect = DatabaseError("Some error")
|
||||
|
||||
update_inventory_computed_fields(1)
|
||||
|
||||
# Assertions
|
||||
mock_filter.assert_called_once_with(id=1)
|
||||
mock_update_computed_fields.assert_called_once()
|
||||
mock_inventory.update_computed_fields.assert_called_once()
|
||||
@@ -121,10 +121,6 @@ def test_get_model_for_valid_type(model_type, model_class):
|
||||
assert common.get_model_for_type(model_type) == model_class
|
||||
|
||||
|
||||
def test_is_testing():
|
||||
assert common.is_testing() is True
|
||||
|
||||
|
||||
@pytest.mark.parametrize("model_type,model_class", [(name, cls) for cls, name in TEST_MODELS])
|
||||
def test_get_capacity_type(model_type, model_class):
|
||||
if model_type in ('job', 'ad_hoc_command', 'inventory_update', 'job_template'):
|
||||
|
||||
@@ -7,7 +7,6 @@ import json
|
||||
import yaml
|
||||
import logging
|
||||
import time
|
||||
import psycopg
|
||||
import os
|
||||
import subprocess
|
||||
import re
|
||||
@@ -24,7 +23,7 @@ from django.core.exceptions import ObjectDoesNotExist, FieldDoesNotExist
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.functional import cached_property
|
||||
from django.db import connection, DatabaseError, transaction, ProgrammingError, IntegrityError
|
||||
from django.db import connection, transaction, ProgrammingError, IntegrityError
|
||||
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField
|
||||
from django.db.models.fields.related_descriptors import ForwardManyToOneDescriptor, ManyToManyDescriptor
|
||||
from django.db.models.query import QuerySet
|
||||
@@ -137,7 +136,7 @@ def underscore_to_camelcase(s):
|
||||
@functools.cache
|
||||
def is_testing(argv=None):
|
||||
'''Return True if running django or py.test unit tests.'''
|
||||
if os.environ.get('DJANGO_SETTINGS_MODULE') == 'awx.main.tests.settings_for_test':
|
||||
if 'PYTEST_CURRENT_TEST' in os.environ.keys():
|
||||
return True
|
||||
argv = sys.argv if argv is None else argv
|
||||
if len(argv) >= 1 and ('py.test' in argv[0] or 'py/test.py' in argv[0]):
|
||||
@@ -1156,26 +1155,11 @@ def create_partition(tblname, start=None):
|
||||
f'ALTER TABLE {tblname} ATTACH PARTITION {tblname}_{partition_label} '
|
||||
f'FOR VALUES FROM (\'{start_timestamp}\') TO (\'{end_timestamp}\');'
|
||||
)
|
||||
|
||||
except (ProgrammingError, IntegrityError) as e:
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
# 42P07 = DuplicateTable
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
|
||||
if psycopg.errors.DuplicateTable == sqlstate:
|
||||
logger.info(f'Caught known error due to partition creation race: {e}')
|
||||
else:
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
raise
|
||||
except DatabaseError as e:
|
||||
cause = e.__cause__
|
||||
if cause and hasattr(cause, 'sqlstate'):
|
||||
sqlstate = cause.sqlstate
|
||||
sqlstate_str = psycopg.errors.lookup(sqlstate)
|
||||
logger.error('SQL Error state: {} - {}'.format(sqlstate, sqlstate_str))
|
||||
raise
|
||||
if 'already exists' in str(e):
|
||||
logger.info(f'Caught known error due to partition creation race: {e}')
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def cleanup_new_process(func):
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
from ansible_base.resource_registry.registry import ParentResource, ResourceConfig, ServiceAPIConfig, SharedResource
|
||||
from ansible_base.resource_registry.shared_types import OrganizationType, TeamType, UserType
|
||||
|
||||
from awx.main import models
|
||||
|
||||
|
||||
class APIConfig(ServiceAPIConfig):
|
||||
service_type = "awx"
|
||||
|
||||
|
||||
RESOURCE_LIST = (
|
||||
ResourceConfig(
|
||||
models.Organization,
|
||||
shared_resource=SharedResource(serializer=OrganizationType, is_provider=False),
|
||||
),
|
||||
ResourceConfig(models.User, shared_resource=SharedResource(serializer=UserType, is_provider=False), name_field="username"),
|
||||
ResourceConfig(
|
||||
models.Team,
|
||||
shared_resource=SharedResource(serializer=TeamType, is_provider=False),
|
||||
parent_resources=[ParentResource(model=models.Organization, field_name="organization")],
|
||||
),
|
||||
)
|
||||
@@ -354,10 +354,8 @@ INSTALLED_APPS = [
|
||||
'solo',
|
||||
'ansible_base.rest_filters',
|
||||
'ansible_base.jwt_consumer',
|
||||
'ansible_base.resource_registry',
|
||||
]
|
||||
|
||||
|
||||
INTERNAL_IPS = ('127.0.0.1',)
|
||||
|
||||
MAX_PAGE_SIZE = 200
|
||||
@@ -759,14 +757,6 @@ SATELLITE6_INSTANCE_ID_VAR = 'foreman_id,foreman.id'
|
||||
INSIGHTS_INSTANCE_ID_VAR = 'insights_id'
|
||||
INSIGHTS_EXCLUDE_EMPTY_GROUPS = False
|
||||
|
||||
# ----------------
|
||||
# -- Terraform State --
|
||||
# ----------------
|
||||
# TERRAFORM_ENABLED_VAR =
|
||||
# TERRAFORM_ENABLED_VALUE =
|
||||
TERRAFORM_INSTANCE_ID_VAR = 'id'
|
||||
TERRAFORM_EXCLUDE_EMPTY_GROUPS = True
|
||||
|
||||
# ---------------------
|
||||
# ----- Custom -----
|
||||
# ---------------------
|
||||
@@ -1120,7 +1110,6 @@ METRICS_SUBSYSTEM_CONFIG = {
|
||||
# django-ansible-base
|
||||
ANSIBLE_BASE_TEAM_MODEL = 'main.Team'
|
||||
ANSIBLE_BASE_ORGANIZATION_MODEL = 'main.Organization'
|
||||
ANSIBLE_BASE_RESOURCE_CONFIG_MODULE = 'awx.resource_api'
|
||||
|
||||
from ansible_base.lib import dynamic_config # noqa: E402
|
||||
|
||||
|
||||
@@ -72,8 +72,6 @@ AWX_CALLBACK_PROFILE = True
|
||||
# Allows user to trigger task managers directly for debugging and profiling purposes.
|
||||
# Only works in combination with settings.SETTINGS_MODULE == 'awx.settings.development'
|
||||
AWX_DISABLE_TASK_MANAGERS = False
|
||||
|
||||
# Needed for launching runserver in debug mode
|
||||
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
||||
|
||||
# Store a snapshot of default settings at this point before loading any
|
||||
|
||||
46
awx/ui/package-lock.json
generated
46
awx/ui/package-lock.json
generated
@@ -13,7 +13,7 @@
|
||||
"@patternfly/react-table": "4.113.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "^1.6.7",
|
||||
"axios": "0.27.2",
|
||||
"d3": "7.6.1",
|
||||
"dagre": "^0.8.4",
|
||||
"dompurify": "2.4.0",
|
||||
@@ -5940,13 +5940,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/axios": {
|
||||
"version": "1.6.7",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz",
|
||||
"integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==",
|
||||
"version": "0.27.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz",
|
||||
"integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==",
|
||||
"dependencies": {
|
||||
"follow-redirects": "^1.15.4",
|
||||
"form-data": "^4.0.0",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
"follow-redirects": "^1.14.9",
|
||||
"form-data": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/axios/node_modules/form-data": {
|
||||
@@ -10388,9 +10387,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/follow-redirects": {
|
||||
"version": "1.15.5",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz",
|
||||
"integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==",
|
||||
"version": "1.15.1",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz",
|
||||
"integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "individual",
|
||||
@@ -18350,11 +18349,6 @@
|
||||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
||||
},
|
||||
"node_modules/pseudolocale": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pseudolocale/-/pseudolocale-1.2.0.tgz",
|
||||
@@ -26921,13 +26915,12 @@
|
||||
"dev": true
|
||||
},
|
||||
"axios": {
|
||||
"version": "1.6.7",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.7.tgz",
|
||||
"integrity": "sha512-/hDJGff6/c7u0hDkvkGxR/oy6CbCs8ziCsC7SqmhjfozqiJGc8Z11wrv9z9lYfY4K8l+H9TpjcMDX0xOZmx+RA==",
|
||||
"version": "0.27.2",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz",
|
||||
"integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==",
|
||||
"requires": {
|
||||
"follow-redirects": "^1.15.4",
|
||||
"form-data": "^4.0.0",
|
||||
"proxy-from-env": "^1.1.0"
|
||||
"follow-redirects": "^1.14.9",
|
||||
"form-data": "^4.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"form-data": {
|
||||
@@ -30378,9 +30371,9 @@
|
||||
}
|
||||
},
|
||||
"follow-redirects": {
|
||||
"version": "1.15.5",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz",
|
||||
"integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw=="
|
||||
"version": "1.15.1",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz",
|
||||
"integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA=="
|
||||
},
|
||||
"fork-ts-checker-webpack-plugin": {
|
||||
"version": "6.5.2",
|
||||
@@ -36332,11 +36325,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"proxy-from-env": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
||||
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
||||
},
|
||||
"pseudolocale": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/pseudolocale/-/pseudolocale-1.2.0.tgz",
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"@patternfly/react-table": "4.113.0",
|
||||
"ace-builds": "^1.10.1",
|
||||
"ansi-to-html": "0.7.2",
|
||||
"axios": "^1.6.7",
|
||||
"axios": "0.27.2",
|
||||
"d3": "7.6.1",
|
||||
"dagre": "^0.8.4",
|
||||
"dompurify": "2.4.0",
|
||||
|
||||
@@ -6,6 +6,7 @@ import sortErrorMessages from './sortErrorMessages';
|
||||
|
||||
function FormSubmitError({ error }) {
|
||||
const [errorMessage, setErrorMessage] = useState(null);
|
||||
const [fieldError, setFieldsMessage] = useState(null);
|
||||
const { values, setErrors } = useFormikContext();
|
||||
|
||||
useEffect(() => {
|
||||
@@ -15,6 +16,7 @@ function FormSubmitError({ error }) {
|
||||
}
|
||||
if (fieldErrors) {
|
||||
setErrors(fieldErrors);
|
||||
setFieldsMessage(fieldErrors);
|
||||
}
|
||||
}, [error, setErrors, values]);
|
||||
|
||||
@@ -30,10 +32,34 @@ function FormSubmitError({ error }) {
|
||||
ouiaId="form-submit-error-alert"
|
||||
title={
|
||||
Array.isArray(errorMessage)
|
||||
? errorMessage.map((msg) => <div key={msg}>{msg}</div>)
|
||||
: errorMessage
|
||||
? errorMessage.map((msg) => (
|
||||
<div key={msg}>
|
||||
{msg.messages ? msg.messages : JSON.stringify(msg)}
|
||||
</div>
|
||||
))
|
||||
: errorMessage && (
|
||||
<div>
|
||||
{errorMessage.messages
|
||||
? errorMessage.messages
|
||||
: JSON.stringify(errorMessage)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
/>
|
||||
>
|
||||
{Array.isArray(fieldError)
|
||||
? fieldError.map((msg) => (
|
||||
<div key={msg}>
|
||||
{msg.messages ? msg.messages : JSON.stringify(msg)}
|
||||
</div>
|
||||
))
|
||||
: fieldError && (
|
||||
<div>
|
||||
{fieldError.messages
|
||||
? fieldError.messages
|
||||
: JSON.stringify(fieldError)}
|
||||
</div>
|
||||
)}
|
||||
</Alert>
|
||||
</FormFullWidthLayout>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -5,14 +5,15 @@ import { mountWithContexts } from '../../../testUtils/enzymeHelpers';
|
||||
import FormSubmitError from './FormSubmitError';
|
||||
|
||||
describe('<FormSubmitError>', () => {
|
||||
test('should render null when no error present', () => {
|
||||
test('should render null when no error present', async () => {
|
||||
const wrapper = mountWithContexts(
|
||||
<Formik>{() => <FormSubmitError error={null} />}</Formik>
|
||||
);
|
||||
expect(wrapper.find('FormSubmitError').text()).toEqual('');
|
||||
const ele = await wrapper.find('FormSubmitError').text();
|
||||
expect(ele).toEqual('');
|
||||
});
|
||||
|
||||
test('should pass field errors to Formik', () => {
|
||||
test('should pass field errors to Formik', async () => {
|
||||
const error = {
|
||||
response: {
|
||||
data: {
|
||||
@@ -30,26 +31,7 @@ describe('<FormSubmitError>', () => {
|
||||
)}
|
||||
</Formik>
|
||||
);
|
||||
expect(wrapper.find('p').text()).toEqual('invalid');
|
||||
});
|
||||
|
||||
test('should display error message if field errors not provided', async () => {
|
||||
const realConsole = global.console;
|
||||
global.console = {
|
||||
error: jest.fn(),
|
||||
};
|
||||
const error = {
|
||||
message: 'There was an error',
|
||||
};
|
||||
let wrapper;
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik>{() => <FormSubmitError error={error} />}</Formik>
|
||||
);
|
||||
});
|
||||
wrapper.update();
|
||||
expect(wrapper.find('Alert').prop('title')).toEqual('There was an error');
|
||||
expect(global.console.error).toHaveBeenCalledWith(error);
|
||||
global.console = realConsole;
|
||||
const pp = await wrapper.find('p').text();
|
||||
expect(pp).toEqual('invalid');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -9,15 +9,18 @@ export default function sortErrorMessages(error, formValues = {}) {
|
||||
Object.keys(error.response.data).length > 0
|
||||
) {
|
||||
const parsed = parseFieldErrors(error.response.data, formValues);
|
||||
|
||||
return {
|
||||
formError: parsed.formErrors.join('; '),
|
||||
formError:
|
||||
parsed.formErrors.indexOf(';') > -1
|
||||
? parsed.formErrors.join('; ')
|
||||
: 'Error in fields',
|
||||
fieldErrors: Object.keys(parsed.fieldErrors).length
|
||||
? parsed.fieldErrors
|
||||
: null,
|
||||
};
|
||||
}
|
||||
/* eslint-disable-next-line no-console */
|
||||
console.error(error);
|
||||
|
||||
return {
|
||||
formError: error.message,
|
||||
fieldErrors: null,
|
||||
|
||||
@@ -35,7 +35,7 @@ describe('sortErrorMessages', () => {
|
||||
};
|
||||
const parsed = sortErrorMessages(error, { foo: '', baz: '' });
|
||||
expect(parsed).toEqual({
|
||||
formError: '',
|
||||
formError: 'Error in fields',
|
||||
fieldErrors: {
|
||||
foo: 'bar',
|
||||
baz: 'bam',
|
||||
@@ -54,7 +54,7 @@ describe('sortErrorMessages', () => {
|
||||
};
|
||||
const parsed = sortErrorMessages(error, { foo: '', baz: '' });
|
||||
expect(parsed).toEqual({
|
||||
formError: 'oopsie',
|
||||
formError: 'Error in fields',
|
||||
fieldErrors: {
|
||||
baz: 'bam',
|
||||
},
|
||||
@@ -72,7 +72,7 @@ describe('sortErrorMessages', () => {
|
||||
};
|
||||
const parsed = sortErrorMessages(error, { foo: '', baz: '' });
|
||||
expect(parsed).toEqual({
|
||||
formError: '',
|
||||
formError: 'Error in fields',
|
||||
fieldErrors: {
|
||||
foo: 'bar; bar2',
|
||||
baz: 'bam',
|
||||
@@ -103,7 +103,7 @@ describe('sortErrorMessages', () => {
|
||||
};
|
||||
const parsed = sortErrorMessages(error, formValues);
|
||||
expect(parsed).toEqual({
|
||||
formError: '',
|
||||
formError: 'Error in fields',
|
||||
fieldErrors: {
|
||||
inputs: {
|
||||
url: 'URL Error',
|
||||
@@ -135,7 +135,7 @@ describe('sortErrorMessages', () => {
|
||||
};
|
||||
const parsed = sortErrorMessages(error, formValues);
|
||||
expect(parsed).toEqual({
|
||||
formError: 'Other stuff error',
|
||||
formError: 'Error in fields',
|
||||
fieldErrors: {
|
||||
inputs: {
|
||||
url: 'URL Error',
|
||||
|
||||
@@ -21,8 +21,6 @@ const ansibleDocUrls = {
|
||||
'https://docs.ansible.com/ansible/latest/collections/community/vmware/vmware_vm_inventory_inventory.html',
|
||||
constructed:
|
||||
'https://docs.ansible.com/ansible/latest/collections/ansible/builtin/constructed_inventory.html',
|
||||
terraform:
|
||||
'https://github.com/ansible-collections/cloud.terraform/blob/stable-statefile-inventory/plugins/inventory/terraform_state.py',
|
||||
};
|
||||
|
||||
const getInventoryHelpTextStrings = () => ({
|
||||
@@ -121,10 +119,10 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
{t`If you want the Inventory Source to update on
|
||||
launch and on project update, click on Update on launch, and also go to`}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch.`}
|
||||
{t`and click on Update Revision on Launch`}
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
@@ -140,8 +138,8 @@ const getInventoryHelpTextStrings = () => ({
|
||||
<br />
|
||||
{value && (
|
||||
<div>
|
||||
{t`If you want the Inventory Source to update on launch , click on Update on Launch,
|
||||
and also go to `}
|
||||
{t`If you want the Inventory Source to update on
|
||||
launch and on project update, click on Update on launch, and also go to`}
|
||||
<Link to={`/projects/${value.id}/details`}> {value.name} </Link>
|
||||
{t`and click on Update Revision on Launch`}
|
||||
</div>
|
||||
|
||||
@@ -23,7 +23,6 @@ import {
|
||||
SCMSubForm,
|
||||
SatelliteSubForm,
|
||||
ControllerSubForm,
|
||||
TerraformSubForm,
|
||||
VMwareSubForm,
|
||||
VirtualizationSubForm,
|
||||
} from './InventorySourceSubForms';
|
||||
@@ -215,14 +214,6 @@ const InventorySourceFormFields = ({
|
||||
}
|
||||
/>
|
||||
),
|
||||
terraform: (
|
||||
<TerraformSubForm
|
||||
autoPopulateCredential={
|
||||
!source?.id || source?.source !== 'terraform'
|
||||
}
|
||||
sourceOptions={sourceOptions}
|
||||
/>
|
||||
),
|
||||
vmware: (
|
||||
<VMwareSubForm
|
||||
autoPopulateCredential={
|
||||
|
||||
@@ -38,7 +38,6 @@ describe('<InventorySourceForm />', () => {
|
||||
['openstack', 'OpenStack'],
|
||||
['rhv', 'Red Hat Virtualization'],
|
||||
['controller', 'Red Hat Ansible Automation Platform'],
|
||||
['terraform', 'Terraform State'],
|
||||
],
|
||||
},
|
||||
},
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import { useField, useFormikContext } from 'formik';
|
||||
import { t } from '@lingui/macro';
|
||||
import getDocsBaseUrl from 'util/getDocsBaseUrl';
|
||||
import { useConfig } from 'contexts/Config';
|
||||
import CredentialLookup from 'components/Lookup/CredentialLookup';
|
||||
import { required } from 'util/validators';
|
||||
import {
|
||||
OptionsField,
|
||||
VerbosityField,
|
||||
EnabledVarField,
|
||||
EnabledValueField,
|
||||
HostFilterField,
|
||||
SourceVarsField,
|
||||
} from './SharedFields';
|
||||
import getHelpText from '../Inventory.helptext';
|
||||
|
||||
const TerraformSubForm = ({ autoPopulateCredential }) => {
|
||||
const helpText = getHelpText();
|
||||
const { setFieldValue, setFieldTouched } = useFormikContext();
|
||||
const [credentialField, credentialMeta, credentialHelpers] =
|
||||
useField('credential');
|
||||
const config = useConfig();
|
||||
const handleCredentialUpdate = useCallback(
|
||||
(value) => {
|
||||
setFieldValue('credential', value);
|
||||
setFieldTouched('credential', true, false);
|
||||
},
|
||||
[setFieldValue, setFieldTouched]
|
||||
);
|
||||
const docsBaseUrl = getDocsBaseUrl(config);
|
||||
|
||||
return (
|
||||
<>
|
||||
<CredentialLookup
|
||||
credentialTypeNamespace="terraform"
|
||||
label={t`Credential`}
|
||||
helperTextInvalid={credentialMeta.error}
|
||||
isValid={!credentialMeta.touched || !credentialMeta.error}
|
||||
onBlur={() => credentialHelpers.setTouched()}
|
||||
onChange={handleCredentialUpdate}
|
||||
value={credentialField.value}
|
||||
required
|
||||
autoPopulate={autoPopulateCredential}
|
||||
validate={required(t`Select a value for this field`)}
|
||||
/>
|
||||
<VerbosityField />
|
||||
<HostFilterField />
|
||||
<EnabledVarField />
|
||||
<EnabledValueField />
|
||||
<OptionsField />
|
||||
<SourceVarsField
|
||||
popoverContent={helpText.sourceVars(docsBaseUrl, 'terraform')}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default TerraformSubForm;
|
||||
@@ -1,70 +0,0 @@
|
||||
import React from 'react';
|
||||
import { act } from 'react-dom/test-utils';
|
||||
import { Formik } from 'formik';
|
||||
import { CredentialsAPI } from 'api';
|
||||
import { mountWithContexts } from '../../../../../testUtils/enzymeHelpers';
|
||||
import TerraformSubForm from './TerraformSubForm';
|
||||
|
||||
jest.mock('../../../../api');
|
||||
|
||||
const initialValues = {
|
||||
credential: null,
|
||||
overwrite: false,
|
||||
overwrite_vars: false,
|
||||
source_path: '',
|
||||
source_project: null,
|
||||
source_script: null,
|
||||
source_vars: '---\n',
|
||||
update_cache_timeout: 0,
|
||||
update_on_launch: true,
|
||||
verbosity: 1,
|
||||
};
|
||||
|
||||
const mockSourceOptions = {
|
||||
actions: {
|
||||
POST: {},
|
||||
},
|
||||
};
|
||||
|
||||
describe('<TerraformSubForm />', () => {
|
||||
let wrapper;
|
||||
|
||||
beforeEach(async () => {
|
||||
CredentialsAPI.read.mockResolvedValue({
|
||||
data: { count: 0, results: [] },
|
||||
});
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<Formik initialValues={initialValues}>
|
||||
<TerraformSubForm sourceOptions={mockSourceOptions} />
|
||||
</Formik>
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
test('should render subform fields', () => {
|
||||
expect(wrapper.find('FormGroup[label="Credential"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Verbosity"]')).toHaveLength(1);
|
||||
expect(wrapper.find('FormGroup[label="Update options"]')).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('FormGroup[label="Cache timeout (seconds)"]')
|
||||
).toHaveLength(1);
|
||||
expect(
|
||||
wrapper.find('VariablesField[label="Source variables"]')
|
||||
).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('should make expected api calls', () => {
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledTimes(1);
|
||||
expect(CredentialsAPI.read).toHaveBeenCalledWith({
|
||||
credential_type__namespace: 'terraform',
|
||||
order_by: 'name',
|
||||
page: 1,
|
||||
page_size: 5,
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -6,6 +6,5 @@ export { default as OpenStackSubForm } from './OpenStackSubForm';
|
||||
export { default as SCMSubForm } from './SCMSubForm';
|
||||
export { default as SatelliteSubForm } from './SatelliteSubForm';
|
||||
export { default as ControllerSubForm } from './ControllerSubForm';
|
||||
export { default as TerraformSubForm } from './TerraformSubForm';
|
||||
export { default as VMwareSubForm } from './VMwareSubForm';
|
||||
export { default as VirtualizationSubForm } from './VirtualizationSubForm';
|
||||
|
||||
@@ -4,8 +4,6 @@
|
||||
from django.conf import settings
|
||||
from django.urls import re_path, include
|
||||
|
||||
from ansible_base.resource_registry.urls import urlpatterns as resource_api_urls
|
||||
|
||||
from awx.main.views import handle_400, handle_403, handle_404, handle_500, handle_csp_violation, handle_login_redirect
|
||||
|
||||
|
||||
@@ -13,7 +11,6 @@ urlpatterns = [
|
||||
re_path(r'', include('awx.ui.urls', namespace='ui')),
|
||||
re_path(r'^ui_next/.*', include('awx.ui_next.urls', namespace='ui_next')),
|
||||
re_path(r'^api/', include('awx.api.urls', namespace='api')),
|
||||
re_path(r'^api/v2/', include(resource_api_urls)),
|
||||
re_path(r'^sso/', include('awx.sso.urls', namespace='sso')),
|
||||
re_path(r'^sso/', include('social_django.urls', namespace='social')),
|
||||
re_path(r'^(?:api/)?400.html$', handle_400),
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
The MIT License
|
||||
|
||||
Copyright (C) 2016-2018 Rob Wu <rob@robwu.nl>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
@@ -20,13 +20,9 @@ jupyter
|
||||
# matplotlib - Caused issues when bumping to setuptools 58
|
||||
backports.tempfile # support in unit tests for py32+ tempfile.TemporaryDirectory
|
||||
git+https://github.com/artefactual-labs/mockldap.git@master#egg=mockldap
|
||||
sdb
|
||||
remote-pdb
|
||||
gprof2dot
|
||||
atomicwrites==1.4.0
|
||||
flake8
|
||||
yamllint
|
||||
pip>=21.3 # PEP 660 – Editable installs for pyproject.toml based builds (wheel based)
|
||||
|
||||
# python debuggers
|
||||
debugpy
|
||||
remote-pdb
|
||||
sdb
|
||||
|
||||
@@ -5,4 +5,4 @@ git+https://github.com/ansible/ansible-runner.git@devel#egg=ansible-runner
|
||||
# specifically need https://github.com/robgolding/django-radius/pull/27
|
||||
git+https://github.com/ansible/django-radius.git@develop#egg=django-radius
|
||||
git+https://github.com/ansible/python3-saml.git@devel#egg=python3-saml
|
||||
django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel#egg=django-ansible-base[rest_filters,jwt_consumer,resource_registry]
|
||||
django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel#egg=django-ansible-base[rest_filters,jwt_consumer]
|
||||
|
||||
@@ -249,7 +249,7 @@ RUN for dir in \
|
||||
/var/lib/awx/.local/share/containers/storage \
|
||||
/var/run/awx-rsyslog \
|
||||
/var/log/nginx \
|
||||
/var/lib/pgsql \
|
||||
/var/lib/postgresql \
|
||||
/var/run/supervisor \
|
||||
/var/run/awx-receptor \
|
||||
/var/lib/nginx ; \
|
||||
@@ -300,6 +300,7 @@ RUN ln -sf /dev/stdout /var/log/nginx/access.log && \
|
||||
{% endif %}
|
||||
|
||||
ENV HOME="/var/lib/awx"
|
||||
ENV PATH="/usr/pgsql-12/bin:${PATH}"
|
||||
|
||||
{% if build_dev|bool %}
|
||||
ENV PATH="/var/lib/awx/venv/awx/bin/:${PATH}"
|
||||
|
||||
@@ -36,7 +36,7 @@ stderr_logfile_maxbytes=0
|
||||
|
||||
{% if kube_dev | bool %}
|
||||
[program:awx-autoreload]
|
||||
command = /awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx
|
||||
command = /awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx 'supervisorctl -c /etc/supervisord_rsyslog.conf restart tower-processes:*'
|
||||
autostart = true
|
||||
autorestart = true
|
||||
stopasgroup=true
|
||||
|
||||
@@ -58,7 +58,7 @@ stderr_logfile_maxbytes=0
|
||||
|
||||
{% if kube_dev | bool %}
|
||||
[program:awx-autoreload]
|
||||
command = /awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx
|
||||
command = /awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx 'supervisorctl -c /etc/supervisord_task.conf restart tower-processes:*'
|
||||
autostart = true
|
||||
autorestart = true
|
||||
stopasgroup=true
|
||||
|
||||
@@ -91,7 +91,7 @@ stderr_logfile_maxbytes=0
|
||||
|
||||
{% if kube_dev | bool %}
|
||||
[program:awx-autoreload]
|
||||
command = /awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx
|
||||
command = /awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx 'supervisorctl -c /etc/supervisord_web.conf restart tower-processes:*'
|
||||
autostart = true
|
||||
autorestart = true
|
||||
stopasgroup=true
|
||||
|
||||
@@ -207,16 +207,17 @@ services:
|
||||
# context: ../../docker-compose
|
||||
# dockerfile: Dockerfile-logstash
|
||||
postgres:
|
||||
image: quay.io/sclorg/postgresql-15-c9s
|
||||
image: postgres:12
|
||||
container_name: tools_postgres_1
|
||||
# additional logging settings for postgres can be found https://www.postgresql.org/docs/current/runtime-config-logging.html
|
||||
command: run-postgresql -c log_destination=stderr -c log_min_messages=info -c log_min_duration_statement={{ pg_log_min_duration_statement|default(1000) }} -c max_connections={{ pg_max_connections|default(1024) }}
|
||||
command: postgres -c log_destination=stderr -c log_min_messages=info -c log_min_duration_statement={{ pg_log_min_duration_statement|default(1000) }} -c max_connections={{ pg_max_connections|default(1024) }}
|
||||
environment:
|
||||
POSTGRESQL_USER: {{ pg_username }}
|
||||
POSTGRESQL_DATABASE: {{ pg_database }}
|
||||
POSTGRESQL_PASSWORD: {{ pg_password }}
|
||||
POSTGRES_HOST_AUTH_METHOD: trust
|
||||
POSTGRES_USER: {{ pg_username }}
|
||||
POSTGRES_DB: {{ pg_database }}
|
||||
POSTGRES_PASSWORD: {{ pg_password }}
|
||||
volumes:
|
||||
- "awx_db_15:/var/lib/pgsql/data"
|
||||
- "awx_db:/var/lib/postgresql/data"
|
||||
networks:
|
||||
- awx
|
||||
ports:
|
||||
@@ -304,9 +305,8 @@ services:
|
||||
{% endif %}
|
||||
|
||||
volumes:
|
||||
{# For the postgres 15 db upgrade we changed the mount name because 15 can't load a 12 DB #}
|
||||
awx_db_15:
|
||||
name: tools_awx_db_15
|
||||
awx_db:
|
||||
name: tools_awx_db
|
||||
{% for i in range(control_plane_node_count|int) -%}
|
||||
{% set container_postfix = loop.index %}
|
||||
redis_socket_{{ container_postfix }}:
|
||||
|
||||
@@ -29,10 +29,6 @@ http {
|
||||
server localhost:8050;
|
||||
}
|
||||
|
||||
upstream runserver {
|
||||
server localhost:8052;
|
||||
}
|
||||
|
||||
upstream daphne {
|
||||
server localhost:8051;
|
||||
}
|
||||
|
||||
@@ -38,13 +38,4 @@ location {{ ingress_path }} {
|
||||
uwsgi_read_timeout 120s;
|
||||
uwsgi_pass uwsgi;
|
||||
include /etc/nginx/uwsgi_params;
|
||||
error_page 502 = @fallback;
|
||||
}
|
||||
|
||||
# Enable scenario where we shutdown uwsgi and launching runserver for debugging purposes
|
||||
location @fallback {
|
||||
# Add trailing / if missing
|
||||
rewrite ^(.*)$http_host(.*[^/])$ $1$http_host$2/ permanent;
|
||||
proxy_pass http://runserver;
|
||||
proxy_set_header Host $http_host;
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
#!/bin/env bash
|
||||
|
||||
if [ $# -lt 1 ]; then
|
||||
if [ $# -lt 2 ]; then
|
||||
echo "Usage:"
|
||||
echo " autoreload directory"
|
||||
echo " autoreload directory command"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -13,14 +13,8 @@ inotifywait -mrq -e create,delete,attrib,close_write,move --exclude '(/awx_devel
|
||||
since_last=$((this_reload-last_reload))
|
||||
if [[ "$file" =~ ^[^.].*\.py$ ]] && [[ "$since_last" -gt 1 ]]; then
|
||||
echo "File changed: $file"
|
||||
if [ -n "$SUPERVISOR_CONFIG_PATH" ]; then
|
||||
supervisorctl_command="supervisorctl -c $SUPERVISOR_CONFIG_PATH"
|
||||
else
|
||||
supervisorctl_command="supervisorctl"
|
||||
fi
|
||||
tower_processes=`$supervisorctl_command status tower-processes:* | grep -v STOPPED | awk '{print $1}' | tr '\n' ' '`
|
||||
echo echo "Running command: $supervisorctl_command restart $tower_processes"
|
||||
eval $supervisorctl_command restart $tower_processes
|
||||
echo "Running command: $2"
|
||||
eval $2
|
||||
last_reload=`date +%s`
|
||||
fi
|
||||
done
|
||||
|
||||
Reference in New Issue
Block a user