mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 03:24:50 -03:30
Compare commits
46 Commits
constructe
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ea87f07e53 | ||
|
|
2dcda04a9e | ||
|
|
52d46c88e4 | ||
|
|
c2df22e0f0 | ||
|
|
cf21eab7f4 | ||
|
|
ccaace8b30 | ||
|
|
2902b40084 | ||
|
|
9669b9dd2f | ||
|
|
d27aada817 | ||
|
|
2fca07ee4c | ||
|
|
335ac636b5 | ||
|
|
f4bcc03ac7 | ||
|
|
3051384f95 | ||
|
|
811ecb8673 | ||
|
|
5e28f5dca1 | ||
|
|
d088d36448 | ||
|
|
89e41597a6 | ||
|
|
283adc30a8 | ||
|
|
019e6a52fe | ||
|
|
35e5610642 | ||
|
|
3a303875bb | ||
|
|
4499a50019 | ||
|
|
3fe46e2e27 | ||
|
|
6d3f39fe92 | ||
|
|
a3233b5fdd | ||
|
|
fe3aa6ce2b | ||
|
|
77ec46f6cf | ||
|
|
b5f240ce70 | ||
|
|
fb2647ff7b | ||
|
|
35fbb94aa6 | ||
|
|
f2ab8d637c | ||
|
|
166b586591 | ||
|
|
d1c608a281 | ||
|
|
b4803ca894 | ||
|
|
23a34c5dc9 | ||
|
|
bef3da6fb2 | ||
|
|
7f50679e68 | ||
|
|
52d071f9d1 | ||
|
|
26a888547d | ||
|
|
951eee944c | ||
|
|
4630757f5f | ||
|
|
eb9431ee1f | ||
|
|
fd6605932a | ||
|
|
5d96ee084d | ||
|
|
e2cee10767 | ||
|
|
31c2e1a450 |
1
.github/workflows/promote.yml
vendored
1
.github/workflows/promote.yml
vendored
@@ -10,6 +10,7 @@ on:
|
||||
|
||||
jobs:
|
||||
promote:
|
||||
if: endsWith(github.repository, '/awx')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout awx
|
||||
|
||||
1
.github/workflows/stage.yml
vendored
1
.github/workflows/stage.yml
vendored
@@ -21,6 +21,7 @@ on:
|
||||
|
||||
jobs:
|
||||
stage:
|
||||
if: endsWith(github.repository, '/awx')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
14
Makefile
14
Makefile
@@ -203,19 +203,7 @@ uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
uwsgi -b 32768 \
|
||||
--socket 127.0.0.1:8050 \
|
||||
--module=awx.wsgi:application \
|
||||
--home=/var/lib/awx/venv/awx \
|
||||
--chdir=/awx_devel/ \
|
||||
--vacuum \
|
||||
--processes=5 \
|
||||
--harakiri=120 --master \
|
||||
--no-orphans \
|
||||
--max-requests=1000 \
|
||||
--stats /tmp/stats.socket \
|
||||
--lazy-apps \
|
||||
--logformat "%(addr) %(method) %(uri) - %(proto) %(status)"
|
||||
uwsgi /etc/tower/uwsgi.ini
|
||||
|
||||
awx-autoreload:
|
||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
@@ -9,6 +8,7 @@ from rest_framework import serializers
|
||||
from awx.conf import fields, register, register_validate
|
||||
from awx.api.fields import OAuth2ProviderField
|
||||
from oauth2_provider.settings import oauth2_settings
|
||||
from awx.sso.common import is_remote_auth_enabled
|
||||
|
||||
|
||||
register(
|
||||
@@ -108,19 +108,8 @@ register(
|
||||
|
||||
|
||||
def authentication_validate(serializer, attrs):
|
||||
remote_auth_settings = [
|
||||
'AUTH_LDAP_SERVER_URI',
|
||||
'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_ORG_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_TEAM_KEY',
|
||||
'SOCIAL_AUTH_SAML_ENABLED_IDPS',
|
||||
'RADIUS_SERVER',
|
||||
'TACACSPLUS_HOST',
|
||||
]
|
||||
if attrs.get('DISABLE_LOCAL_AUTH', False):
|
||||
if not any(getattr(settings, s, None) for s in remote_auth_settings):
|
||||
raise serializers.ValidationError(_("There are no remote authentication systems configured."))
|
||||
if attrs.get('DISABLE_LOCAL_AUTH', False) and not is_remote_auth_enabled():
|
||||
raise serializers.ValidationError(_("There are no remote authentication systems configured."))
|
||||
return attrs
|
||||
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ from rest_framework import generics
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework import views
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.renderers import StaticHTMLRenderer
|
||||
from rest_framework.negotiation import DefaultContentNegotiation
|
||||
|
||||
@@ -822,7 +822,7 @@ def trigger_delayed_deep_copy(*args, **kwargs):
|
||||
|
||||
class CopyAPIView(GenericAPIView):
|
||||
serializer_class = CopySerializer
|
||||
permission_classes = (AllowAny,)
|
||||
permission_classes = (IsAuthenticated,)
|
||||
copy_return_serializer_class = None
|
||||
new_in_330 = True
|
||||
new_in_api_v2 = True
|
||||
|
||||
@@ -108,7 +108,6 @@ from awx.main.utils import (
|
||||
extract_ansible_vars,
|
||||
encrypt_dict,
|
||||
prefetch_page_capabilities,
|
||||
get_external_account,
|
||||
truncate_stdout,
|
||||
)
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
@@ -124,6 +123,8 @@ from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, Ver
|
||||
# AWX Utils
|
||||
from awx.api.validators import HostnameRegexValidator
|
||||
|
||||
from awx.sso.common import get_external_account
|
||||
|
||||
logger = logging.getLogger('awx.api.serializers')
|
||||
|
||||
# Fields that should be summarized regardless of object type.
|
||||
@@ -987,23 +988,8 @@ class UserSerializer(BaseSerializer):
|
||||
def _update_password(self, obj, new_password):
|
||||
# For now we're not raising an error, just not saving password for
|
||||
# users managed by LDAP who already have an unusable password set.
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
|
||||
try:
|
||||
if obj.pk and obj.profile.ldap_dn and not obj.has_usable_password():
|
||||
new_password = None
|
||||
except AttributeError:
|
||||
pass
|
||||
if (
|
||||
getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)
|
||||
) and obj.social_auth.all():
|
||||
new_password = None
|
||||
if (getattr(settings, 'RADIUS_SERVER', None) or getattr(settings, 'TACACSPLUS_HOST', None)) and obj.enterprise_auth.all():
|
||||
new_password = None
|
||||
if new_password:
|
||||
# Get external password will return something like ldap or enterprise or None if the user isn't external. We only want to allow a password update for a None option
|
||||
if new_password and not self.get_external_account(obj):
|
||||
obj.set_password(new_password)
|
||||
obj.save(update_fields=['password'])
|
||||
|
||||
@@ -4028,7 +4014,7 @@ class ProjectUpdateEventSerializer(JobEventSerializer):
|
||||
# raw SCM URLs in their stdout (which *could* contain passwords)
|
||||
# attempt to detect and filter HTTP basic auth passwords in the stdout
|
||||
# of these types of events
|
||||
if obj.event_data.get('task_action') in ('git', 'svn'):
|
||||
if obj.event_data.get('task_action') in ('git', 'svn', 'ansible.builtin.git', 'ansible.builtin.svn'):
|
||||
try:
|
||||
return json.loads(UriCleaner.remove_sensitive(json.dumps(obj.event_data)))
|
||||
except Exception:
|
||||
|
||||
@@ -4288,7 +4288,7 @@ class WorkflowApprovalTemplateJobsList(SubListAPIView):
|
||||
parent_key = 'workflow_approval_template'
|
||||
|
||||
|
||||
class WorkflowApprovalList(ListCreateAPIView):
|
||||
class WorkflowApprovalList(ListAPIView):
|
||||
model = models.WorkflowApproval
|
||||
serializer_class = serializers.WorkflowApprovalListSerializer
|
||||
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import inspect
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.timezone import now
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.conf.migrations')
|
||||
|
||||
|
||||
def fill_ldap_group_type_params(apps, schema_editor):
|
||||
@@ -15,7 +19,7 @@ def fill_ldap_group_type_params(apps, schema_editor):
|
||||
entry = qs[0]
|
||||
group_type_params = entry.value
|
||||
else:
|
||||
entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS', value=group_type_params, created=now(), modified=now())
|
||||
return # for new installs we prefer to use the default value
|
||||
|
||||
init_attrs = set(inspect.getfullargspec(group_type.__init__).args[1:])
|
||||
for k in list(group_type_params.keys()):
|
||||
@@ -23,4 +27,5 @@ def fill_ldap_group_type_params(apps, schema_editor):
|
||||
del group_type_params[k]
|
||||
|
||||
entry.value = group_type_params
|
||||
logger.warning(f'Migration updating AUTH_LDAP_GROUP_TYPE_PARAMS with value {entry.value}')
|
||||
entry.save()
|
||||
|
||||
25
awx/conf/tests/functional/test_migrations.py
Normal file
25
awx/conf/tests/functional/test_migrations.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import pytest
|
||||
|
||||
from awx.conf.migrations._ldap_group_type import fill_ldap_group_type_params
|
||||
from awx.conf.models import Setting
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fill_group_type_params_no_op():
|
||||
fill_ldap_group_type_params(apps, 'dont-use-me')
|
||||
assert Setting.objects.count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_keep_old_setting_with_default_value():
|
||||
Setting.objects.create(key='AUTH_LDAP_GROUP_TYPE', value={'name_attr': 'cn', 'member_attr': 'member'})
|
||||
fill_ldap_group_type_params(apps, 'dont-use-me')
|
||||
assert Setting.objects.count() == 1
|
||||
s = Setting.objects.first()
|
||||
assert s.value == {'name_attr': 'cn', 'member_attr': 'member'}
|
||||
|
||||
|
||||
# NOTE: would be good to test the removal of attributes by migration
|
||||
# but this requires fighting with the validator and is not done here
|
||||
143
awx/main/management/commands/disable_instance.py
Normal file
143
awx/main/management/commands/disable_instance.py
Normal file
@@ -0,0 +1,143 @@
|
||||
import time
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from argparse import ArgumentTypeError
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db.models import Q
|
||||
from django.utils.timezone import now
|
||||
|
||||
from awx.main.models import Instance, UnifiedJob
|
||||
|
||||
|
||||
class AWXInstance:
|
||||
def __init__(self, **filter):
|
||||
self.filter = filter
|
||||
self.get_instance()
|
||||
|
||||
def get_instance(self):
|
||||
filter = self.filter if self.filter is not None else dict(hostname=settings.CLUSTER_HOST_ID)
|
||||
qs = Instance.objects.filter(**filter)
|
||||
if not qs.exists():
|
||||
raise ValueError(f"No AWX instance found with {filter} parameters")
|
||||
self.instance = qs.first()
|
||||
|
||||
def disable(self):
|
||||
if self.instance.enabled:
|
||||
self.instance.enabled = False
|
||||
self.instance.save()
|
||||
return True
|
||||
|
||||
def enable(self):
|
||||
if not self.instance.enabled:
|
||||
self.instance.enabled = True
|
||||
self.instance.save()
|
||||
return True
|
||||
|
||||
def jobs(self):
|
||||
return UnifiedJob.objects.filter(
|
||||
Q(controller_node=self.instance.hostname) | Q(execution_node=self.instance.hostname), status__in=("running", "waiting")
|
||||
)
|
||||
|
||||
def jobs_pretty(self):
|
||||
jobs = []
|
||||
for j in self.jobs():
|
||||
job_started = j.started if j.started else now()
|
||||
# similar calculation of `elapsed` as the corresponding serializer
|
||||
# does
|
||||
td = now() - job_started
|
||||
elapsed = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / (10**6 * 1.0)
|
||||
elapsed = float(elapsed)
|
||||
details = dict(
|
||||
name=j.name,
|
||||
url=j.get_ui_url(),
|
||||
elapsed=elapsed,
|
||||
)
|
||||
jobs.append(details)
|
||||
|
||||
jobs = sorted(jobs, reverse=True, key=lambda j: j["elapsed"])
|
||||
|
||||
return ", ".join([f"[\"{j['name']}\"]({j['url']})" for j in jobs])
|
||||
|
||||
def instance_pretty(self):
|
||||
instance = (
|
||||
self.instance.hostname,
|
||||
urljoin(settings.TOWER_URL_BASE, f"/#/instances/{self.instance.pk}/details"),
|
||||
)
|
||||
return f"[\"{instance[0]}\"]({instance[1]})"
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Disable instance, optionally waiting for all its managed jobs to finish."
|
||||
|
||||
@staticmethod
|
||||
def ge_1(arg):
|
||||
if arg == "inf":
|
||||
return float("inf")
|
||||
|
||||
int_arg = int(arg)
|
||||
if int_arg < 1:
|
||||
raise ArgumentTypeError(f"The value must be a positive number >= 1. Provided: \"{arg}\"")
|
||||
return int_arg
|
||||
|
||||
def add_arguments(self, parser):
|
||||
filter_group = parser.add_mutually_exclusive_group()
|
||||
|
||||
filter_group.add_argument(
|
||||
"--hostname",
|
||||
type=str,
|
||||
default=settings.CLUSTER_HOST_ID,
|
||||
help=f"{Instance.hostname.field.help_text} Defaults to the hostname of the machine where the Python interpreter is currently executing".strip(),
|
||||
)
|
||||
filter_group.add_argument("--id", type=self.ge_1, help=Instance.id.field.help_text)
|
||||
|
||||
parser.add_argument(
|
||||
"--wait",
|
||||
action="store_true",
|
||||
help="Wait for jobs managed by the instance to finish. With default retry arguments waits ~1h",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--retry",
|
||||
type=self.ge_1,
|
||||
default=120,
|
||||
help="Number of retries when waiting for jobs to finish. Default: 120. Also accepts \"inf\" to wait indefinitely",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--retry_sleep",
|
||||
type=self.ge_1,
|
||||
default=30,
|
||||
help="Number of seconds to sleep before consequtive retries when waiting. Default: 30",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
try:
|
||||
filter = dict(id=options["id"]) if options["id"] is not None else dict(hostname=options["hostname"])
|
||||
instance = AWXInstance(**filter)
|
||||
except ValueError as e:
|
||||
raise CommandError(e)
|
||||
|
||||
if instance.disable():
|
||||
self.stdout.write(self.style.SUCCESS(f"Instance {instance.instance_pretty()} has been disabled"))
|
||||
else:
|
||||
self.stdout.write(f"Instance {instance.instance_pretty()} has already been disabled")
|
||||
|
||||
if not options["wait"]:
|
||||
return
|
||||
|
||||
rc = 1
|
||||
while instance.jobs().count() > 0:
|
||||
if rc < options["retry"]:
|
||||
self.stdout.write(
|
||||
f"{rc}/{options['retry']}: Waiting {options['retry_sleep']}s before the next attempt to see if the following instance' managed jobs have finished: {instance.jobs_pretty()}"
|
||||
)
|
||||
rc += 1
|
||||
time.sleep(options["retry_sleep"])
|
||||
else:
|
||||
raise CommandError(
|
||||
f"{rc}/{options['retry']}: No more retry attempts left, but the instance still has associated managed jobs: {instance.jobs_pretty()}"
|
||||
)
|
||||
else:
|
||||
self.stdout.write(self.style.SUCCESS("Done waiting for instance' managed jobs to finish!"))
|
||||
@@ -14,7 +14,7 @@ from oauth2_provider.models import AbstractApplication, AbstractAccessToken
|
||||
from oauth2_provider.generators import generate_client_secret
|
||||
from oauthlib import oauth2
|
||||
|
||||
from awx.main.utils import get_external_account
|
||||
from awx.sso.common import get_external_account
|
||||
from awx.main.fields import OAuth2ClientSecretField
|
||||
|
||||
|
||||
|
||||
@@ -27,8 +27,8 @@ class AWXProtocolTypeRouter(ProtocolTypeRouter):
|
||||
|
||||
|
||||
websocket_urlpatterns = [
|
||||
re_path(r'websocket/$', consumers.EventConsumer.as_asgi()),
|
||||
re_path(r'websocket/broadcast/$', consumers.BroadcastConsumer.as_asgi()),
|
||||
re_path(r'websocket/', consumers.EventConsumer.as_asgi()),
|
||||
re_path(r'websocket/broadcast/', consumers.BroadcastConsumer.as_asgi()),
|
||||
]
|
||||
|
||||
application = AWXProtocolTypeRouter(
|
||||
|
||||
@@ -116,7 +116,7 @@ class RunnerCallback:
|
||||
# so it *should* have a negligible performance impact
|
||||
task = event_data.get('event_data', {}).get('task_action')
|
||||
try:
|
||||
if task in ('git', 'svn'):
|
||||
if task in ('git', 'svn', 'ansible.builtin.git', 'ansible.builtin.svn'):
|
||||
event_data_json = json.dumps(event_data)
|
||||
event_data_json = UriCleaner.remove_sensitive(event_data_json)
|
||||
event_data = json.loads(event_data_json)
|
||||
@@ -219,7 +219,7 @@ class RunnerCallbackForProjectUpdate(RunnerCallback):
|
||||
def event_handler(self, event_data):
|
||||
super_return_value = super(RunnerCallbackForProjectUpdate, self).event_handler(event_data)
|
||||
returned_data = event_data.get('event_data', {})
|
||||
if returned_data.get('task_action', '') == 'set_fact':
|
||||
if returned_data.get('task_action', '') in ('set_fact', 'ansible.builtin.set_fact'):
|
||||
returned_facts = returned_data.get('res', {}).get('ansible_facts', {})
|
||||
if 'scm_version' in returned_facts:
|
||||
self.playbook_new_revision = returned_facts['scm_version']
|
||||
|
||||
@@ -311,7 +311,7 @@ class BaseTask(object):
|
||||
env['AWX_PRIVATE_DATA_DIR'] = private_data_dir
|
||||
|
||||
if self.instance.execution_environment is None:
|
||||
raise RuntimeError('The project could not sync because there is no Execution Environment.')
|
||||
raise RuntimeError(f'The {self.model.__name__} could not run because there is no Execution Environment.')
|
||||
|
||||
return env
|
||||
|
||||
|
||||
@@ -2008,7 +2008,7 @@ def test_project_update_no_ee(mock_me):
|
||||
with pytest.raises(RuntimeError) as e:
|
||||
task.build_env(job, {})
|
||||
|
||||
assert 'The project could not sync because there is no Execution Environment' in str(e.value)
|
||||
assert 'The ProjectUpdate could not run because there is no Execution Environment' in str(e.value)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -80,7 +80,6 @@ __all__ = [
|
||||
'set_environ',
|
||||
'IllegalArgumentError',
|
||||
'get_custom_venv_choices',
|
||||
'get_external_account',
|
||||
'ScheduleTaskManager',
|
||||
'ScheduleDependencyManager',
|
||||
'ScheduleWorkflowManager',
|
||||
@@ -1089,29 +1088,6 @@ def has_model_field_prefetched(model_obj, field_name):
|
||||
return getattr(getattr(model_obj, field_name, None), 'prefetch_cache_name', '') in getattr(model_obj, '_prefetched_objects_cache', {})
|
||||
|
||||
|
||||
def get_external_account(user):
|
||||
from django.conf import settings
|
||||
|
||||
account_type = None
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
|
||||
try:
|
||||
if user.pk and user.profile.ldap_dn and not user.has_usable_password():
|
||||
account_type = "ldap"
|
||||
except AttributeError:
|
||||
pass
|
||||
if (
|
||||
getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)
|
||||
) and user.social_auth.all():
|
||||
account_type = "social"
|
||||
if (getattr(settings, 'RADIUS_SERVER', None) or getattr(settings, 'TACACSPLUS_HOST', None)) and user.enterprise_auth.all():
|
||||
account_type = "enterprise"
|
||||
return account_type
|
||||
|
||||
|
||||
class classproperty:
|
||||
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
|
||||
self.fget = fget
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
@@ -6,8 +7,15 @@ from django.conf import settings
|
||||
from awx.main.models.execution_environments import ExecutionEnvironment
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_control_plane_execution_environment():
|
||||
return ExecutionEnvironment.objects.filter(organization=None, managed=True).first()
|
||||
ee = ExecutionEnvironment.objects.filter(organization=None, managed=True).first()
|
||||
if ee == None:
|
||||
logger.error('Failed to find control plane ee, there are no managed EEs without organizations')
|
||||
raise RuntimeError("Failed to find default control plane EE")
|
||||
return ee
|
||||
|
||||
|
||||
def get_default_execution_environment():
|
||||
|
||||
@@ -25,42 +25,47 @@
|
||||
connection: local
|
||||
name: Update source tree if necessary
|
||||
tasks:
|
||||
|
||||
- name: delete project directory before update
|
||||
command: "find -delete" # volume mounted, cannot delete folder itself
|
||||
- name: Delete project directory before update
|
||||
ansible.builtin.shell: set -o pipefail && find . -delete -print | tail -2 # volume mounted, cannot delete folder itself
|
||||
register: reg
|
||||
changed_when: reg.stdout_lines | length > 1
|
||||
args:
|
||||
chdir: "{{ project_path }}"
|
||||
tags:
|
||||
- delete
|
||||
|
||||
- block:
|
||||
- name: update project using git
|
||||
git:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url}}"
|
||||
version: "{{scm_branch|quote}}"
|
||||
refspec: "{{scm_refspec|default(omit)}}"
|
||||
force: "{{scm_clean}}"
|
||||
track_submodules: "{{scm_track_submodules|default(omit)}}"
|
||||
accept_hostkey: "{{scm_accept_hostkey|default(omit)}}"
|
||||
- name: Update project using git
|
||||
tags:
|
||||
- update_git
|
||||
block:
|
||||
- name: Update project using git
|
||||
ansible.builtin.git:
|
||||
dest: "{{ project_path | quote }}"
|
||||
repo: "{{ scm_url }}"
|
||||
version: "{{ scm_branch | quote }}"
|
||||
refspec: "{{ scm_refspec | default(omit) }}"
|
||||
force: "{{ scm_clean }}"
|
||||
track_submodules: "{{ scm_track_submodules | default(omit) }}"
|
||||
accept_hostkey: "{{ scm_accept_hostkey | default(omit) }}"
|
||||
register: git_result
|
||||
|
||||
- name: Set the git repository version
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ git_result['after'] }}"
|
||||
when: "'after' in git_result"
|
||||
tags:
|
||||
- update_git
|
||||
|
||||
- block:
|
||||
- name: update project using svn
|
||||
subversion:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url|quote}}"
|
||||
revision: "{{scm_branch|quote}}"
|
||||
force: "{{scm_clean}}"
|
||||
username: "{{scm_username|default(omit)}}"
|
||||
password: "{{scm_password|default(omit)}}"
|
||||
- name: Update project using svn
|
||||
tags:
|
||||
- update_svn
|
||||
block:
|
||||
- name: Update project using svn
|
||||
ansible.builtin.subversion:
|
||||
dest: "{{ project_path | quote }}"
|
||||
repo: "{{ scm_url | quote }}"
|
||||
revision: "{{ scm_branch | quote }}"
|
||||
force: "{{ scm_clean }}"
|
||||
username: "{{ scm_username | default(omit) }}"
|
||||
password: "{{ scm_password | default(omit) }}"
|
||||
# must be in_place because folder pre-existing, because it is mounted
|
||||
in_place: true
|
||||
environment:
|
||||
@@ -68,85 +73,90 @@
|
||||
register: svn_result
|
||||
|
||||
- name: Set the svn repository version
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ svn_result['after'] }}"
|
||||
when: "'after' in svn_result"
|
||||
|
||||
- name: parse subversion version string properly
|
||||
set_fact:
|
||||
scm_version: "{{scm_version|regex_replace('^.*Revision: ([0-9]+).*$', '\\1')}}"
|
||||
tags:
|
||||
- update_svn
|
||||
- name: Parse subversion version string properly
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ scm_version | regex_replace('^.*Revision: ([0-9]+).*$', '\\1') }}"
|
||||
|
||||
- block:
|
||||
|
||||
- name: Project update for Insights
|
||||
tags:
|
||||
- update_insights
|
||||
block:
|
||||
- name: Ensure the project directory is present
|
||||
file:
|
||||
dest: "{{project_path|quote}}"
|
||||
ansible.builtin.file:
|
||||
dest: "{{ project_path | quote }}"
|
||||
state: directory
|
||||
mode: '0755'
|
||||
|
||||
- name: Fetch Insights Playbook(s)
|
||||
insights:
|
||||
insights_url: "{{insights_url}}"
|
||||
username: "{{scm_username}}"
|
||||
password: "{{scm_password}}"
|
||||
project_path: "{{project_path}}"
|
||||
awx_license_type: "{{awx_license_type}}"
|
||||
awx_version: "{{awx_version}}"
|
||||
insights_url: "{{ insights_url }}"
|
||||
username: "{{ scm_username }}"
|
||||
password: "{{ scm_password }}"
|
||||
project_path: "{{ project_path }}"
|
||||
awx_license_type: "{{ awx_license_type }}"
|
||||
awx_version: "{{ awx_version }}"
|
||||
register: results
|
||||
|
||||
- name: Save Insights Version
|
||||
set_fact:
|
||||
scm_version: "{{results.version}}"
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ results.version }}"
|
||||
when: results is defined
|
||||
tags:
|
||||
- update_insights
|
||||
|
||||
- block:
|
||||
|
||||
- name: Update project using archive
|
||||
tags:
|
||||
- update_archive
|
||||
block:
|
||||
- name: Ensure the project archive directory is present
|
||||
file:
|
||||
dest: "{{ project_path|quote }}/.archive"
|
||||
ansible.builtin.file:
|
||||
dest: "{{ project_path | quote }}/.archive"
|
||||
state: directory
|
||||
mode: '0755'
|
||||
|
||||
- name: Get archive from url
|
||||
get_url:
|
||||
url: "{{ scm_url|quote }}"
|
||||
dest: "{{ project_path|quote }}/.archive/"
|
||||
url_username: "{{ scm_username|default(omit) }}"
|
||||
url_password: "{{ scm_password|default(omit) }}"
|
||||
ansible.builtin.get_url:
|
||||
url: "{{ scm_url | quote }}"
|
||||
dest: "{{ project_path | quote }}/.archive/"
|
||||
url_username: "{{ scm_username | default(omit) }}"
|
||||
url_password: "{{ scm_password | default(omit) }}"
|
||||
force_basic_auth: true
|
||||
mode: '0755'
|
||||
register: get_archive
|
||||
|
||||
- name: Unpack archive
|
||||
project_archive:
|
||||
src: "{{ get_archive.dest }}"
|
||||
project_path: "{{ project_path|quote }}"
|
||||
project_path: "{{ project_path | quote }}"
|
||||
force: "{{ scm_clean }}"
|
||||
when: get_archive.changed or scm_clean
|
||||
register: unarchived
|
||||
|
||||
- name: Find previous archives
|
||||
find:
|
||||
paths: "{{ project_path|quote }}/.archive/"
|
||||
ansible.builtin.find:
|
||||
paths: "{{ project_path | quote }}/.archive/"
|
||||
excludes:
|
||||
- "{{ get_archive.dest|basename }}"
|
||||
- "{{ get_archive.dest | basename }}"
|
||||
when: unarchived.changed
|
||||
register: previous_archive
|
||||
|
||||
- name: Remove previous archives
|
||||
file:
|
||||
ansible.builtin.file:
|
||||
path: "{{ item.path }}"
|
||||
state: absent
|
||||
loop: "{{ previous_archive.files }}"
|
||||
when: previous_archive.files|default([])
|
||||
when: previous_archive.files | default([])
|
||||
|
||||
- name: Set scm_version to archive sha1 checksum
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ get_archive.checksum_src }}"
|
||||
tags:
|
||||
- update_archive
|
||||
|
||||
- name: Repository Version
|
||||
debug:
|
||||
ansible.builtin.debug:
|
||||
msg: "Repository Version {{ scm_version }}"
|
||||
tags:
|
||||
- update_git
|
||||
@@ -183,60 +193,59 @@
|
||||
additional_collections_env:
|
||||
# These environment variables are used for installing collections, in addition to galaxy_task_env
|
||||
# setting the collections paths silences warnings
|
||||
ANSIBLE_COLLECTIONS_PATHS: "{{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_collections"
|
||||
ANSIBLE_COLLECTIONS_PATHS: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections"
|
||||
# Put the local tmp directory in same volume as collection destination
|
||||
# otherwise, files cannot be moved accross volumes and will cause error
|
||||
ANSIBLE_LOCAL_TEMP: "{{projects_root}}/.__awx_cache/{{local_path}}/stage/tmp"
|
||||
ANSIBLE_LOCAL_TEMP: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/tmp"
|
||||
tasks:
|
||||
|
||||
- name: Check content sync settings
|
||||
block:
|
||||
- debug:
|
||||
msg: >
|
||||
Collection and role syncing disabled. Check the AWX_ROLES_ENABLED and
|
||||
AWX_COLLECTIONS_ENABLED settings and Galaxy credentials on the project's organization.
|
||||
|
||||
- meta: end_play
|
||||
|
||||
when: not roles_enabled|bool and not collections_enabled|bool
|
||||
when: not roles_enabled | bool and not collections_enabled | bool
|
||||
tags:
|
||||
- install_roles
|
||||
- install_collections
|
||||
block:
|
||||
- name: Warn about disabled content sync
|
||||
ansible.builtin.debug:
|
||||
msg: >
|
||||
Collection and role syncing disabled. Check the AWX_ROLES_ENABLED and
|
||||
AWX_COLLECTIONS_ENABLED settings and Galaxy credentials on the project's organization.
|
||||
- name: End play due to disabled content sync
|
||||
ansible.builtin.meta: end_play
|
||||
|
||||
- name: fetch galaxy roles from requirements.(yml/yaml)
|
||||
command: >
|
||||
- name: Fetch galaxy roles from requirements.(yml/yaml)
|
||||
ansible.builtin.command: >
|
||||
ansible-galaxy role install -r {{ item }}
|
||||
--roles-path {{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_roles
|
||||
--roles-path {{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_roles
|
||||
{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}"
|
||||
chdir: "{{ project_path | quote }}"
|
||||
register: galaxy_result
|
||||
with_fileglob:
|
||||
- "{{project_path|quote}}/roles/requirements.yaml"
|
||||
- "{{project_path|quote}}/roles/requirements.yml"
|
||||
- "{{ project_path | quote }}/roles/requirements.yaml"
|
||||
- "{{ project_path | quote }}/roles/requirements.yml"
|
||||
changed_when: "'was installed successfully' in galaxy_result.stdout"
|
||||
environment: "{{ galaxy_task_env }}"
|
||||
when: roles_enabled|bool
|
||||
when: roles_enabled | bool
|
||||
tags:
|
||||
- install_roles
|
||||
|
||||
- name: fetch galaxy collections from collections/requirements.(yml/yaml)
|
||||
command: >
|
||||
- name: Fetch galaxy collections from collections/requirements.(yml/yaml)
|
||||
ansible.builtin.command: >
|
||||
ansible-galaxy collection install -r {{ item }}
|
||||
--collections-path {{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_collections
|
||||
--collections-path {{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections
|
||||
{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}"
|
||||
chdir: "{{ project_path | quote }}"
|
||||
register: galaxy_collection_result
|
||||
with_fileglob:
|
||||
- "{{project_path|quote}}/collections/requirements.yaml"
|
||||
- "{{project_path|quote}}/collections/requirements.yml"
|
||||
- "{{project_path|quote}}/requirements.yaml"
|
||||
- "{{project_path|quote}}/requirements.yml"
|
||||
- "{{ project_path | quote }}/collections/requirements.yaml"
|
||||
- "{{ project_path | quote }}/collections/requirements.yml"
|
||||
- "{{ project_path | quote }}/requirements.yaml"
|
||||
- "{{ project_path | quote }}/requirements.yml"
|
||||
changed_when: "'Installing ' in galaxy_collection_result.stdout"
|
||||
environment: "{{ additional_collections_env | combine(galaxy_task_env) }}"
|
||||
when:
|
||||
- "ansible_version.full is version_compare('2.9', '>=')"
|
||||
- collections_enabled|bool
|
||||
- collections_enabled | bool
|
||||
tags:
|
||||
- install_collections
|
||||
|
||||
@@ -385,10 +385,10 @@ def on_populate_user(sender, **kwargs):
|
||||
logger.warning('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len))
|
||||
|
||||
org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {})
|
||||
team_map = getattr(backend.settings, 'TEAM_MAP', {})
|
||||
team_map_settings = getattr(backend.settings, 'TEAM_MAP', {})
|
||||
orgs_list = list(org_map.keys())
|
||||
team_map = {}
|
||||
for team_name, team_opts in team_map.items():
|
||||
for team_name, team_opts in team_map_settings.items():
|
||||
if not team_opts.get('organization', None):
|
||||
# You can't save the LDAP config in the UI w/o an org (or '' or null as the org) so if we somehow got this condition its an error
|
||||
logger.error("Team named {} in LDAP team map settings is invalid due to missing organization".format(team_name))
|
||||
@@ -416,7 +416,7 @@ def on_populate_user(sender, **kwargs):
|
||||
|
||||
# Compute in memory what the state is of the different LDAP teams
|
||||
desired_team_states = {}
|
||||
for team_name, team_opts in team_map.items():
|
||||
for team_name, team_opts in team_map_settings.items():
|
||||
if 'organization' not in team_opts:
|
||||
continue
|
||||
users_opts = team_opts.get('users', None)
|
||||
|
||||
@@ -169,3 +169,45 @@ def get_or_create_org_with_default_galaxy_cred(**kwargs):
|
||||
else:
|
||||
logger.debug("Could not find default Ansible Galaxy credential to add to org")
|
||||
return org
|
||||
|
||||
|
||||
def get_external_account(user):
|
||||
account_type = None
|
||||
|
||||
# Previously this method also checked for active configuration which meant that if a user logged in from LDAP
|
||||
# and then LDAP was no longer configured it would "convert" the user from an LDAP account_type to none.
|
||||
# This did have one benefit that if a login type was removed intentionally the user could be given a username password.
|
||||
# But it had a limitation that the user would have to have an active session (or an admin would have to go set a temp password).
|
||||
# It also lead to the side affect that if LDAP was ever reconfigured the user would convert back to LDAP but still have a local password.
|
||||
# That local password could then be used to bypass LDAP authentication.
|
||||
try:
|
||||
if user.pk and user.profile.ldap_dn and not user.has_usable_password():
|
||||
account_type = "ldap"
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if user.social_auth.all():
|
||||
account_type = "social"
|
||||
|
||||
if user.enterprise_auth.all():
|
||||
account_type = "enterprise"
|
||||
|
||||
return account_type
|
||||
|
||||
|
||||
def is_remote_auth_enabled():
|
||||
from django.conf import settings
|
||||
|
||||
# Append LDAP, Radius, TACACS+ and SAML options
|
||||
settings_that_turn_on_remote_auth = [
|
||||
'AUTH_LDAP_SERVER_URI',
|
||||
'SOCIAL_AUTH_SAML_ENABLED_IDPS',
|
||||
'RADIUS_SERVER',
|
||||
'TACACSPLUS_HOST',
|
||||
]
|
||||
# Also include any SOCAIL_AUTH_*KEY (except SAML)
|
||||
for social_auth_key in dir(settings):
|
||||
if social_auth_key.startswith('SOCIAL_AUTH_') and social_auth_key.endswith('_KEY') and 'SAML' not in social_auth_key:
|
||||
settings_that_turn_on_remote_auth.append(social_auth_key)
|
||||
|
||||
return any(getattr(settings, s, None) for s in settings_that_turn_on_remote_auth)
|
||||
|
||||
@@ -2,9 +2,22 @@ import pytest
|
||||
from collections import Counter
|
||||
from django.core.exceptions import FieldError
|
||||
from django.utils.timezone import now
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from awx.main.models import Credential, CredentialType, Organization, Team, User
|
||||
from awx.sso.common import get_orgs_by_ids, reconcile_users_org_team_mappings, create_org_and_teams, get_or_create_org_with_default_galaxy_cred
|
||||
from awx.sso.common import (
|
||||
get_orgs_by_ids,
|
||||
reconcile_users_org_team_mappings,
|
||||
create_org_and_teams,
|
||||
get_or_create_org_with_default_galaxy_cred,
|
||||
is_remote_auth_enabled,
|
||||
get_external_account,
|
||||
)
|
||||
|
||||
|
||||
class MicroMockObject(object):
|
||||
def all(self):
|
||||
return True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -278,3 +291,87 @@ class TestCommonFunctions:
|
||||
|
||||
for o in Organization.objects.all():
|
||||
assert o.galaxy_credentials.count() == 0
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"enable_ldap, enable_social, enable_enterprise, expected_results",
|
||||
[
|
||||
(False, False, False, None),
|
||||
(True, False, False, 'ldap'),
|
||||
(True, True, False, 'social'),
|
||||
(True, True, True, 'enterprise'),
|
||||
(False, True, True, 'enterprise'),
|
||||
(False, False, True, 'enterprise'),
|
||||
(False, True, False, 'social'),
|
||||
],
|
||||
)
|
||||
def test_get_external_account(self, enable_ldap, enable_social, enable_enterprise, expected_results):
|
||||
try:
|
||||
user = User.objects.get(username="external_tester")
|
||||
except User.DoesNotExist:
|
||||
user = User(username="external_tester")
|
||||
user.set_unusable_password()
|
||||
user.save()
|
||||
|
||||
if enable_ldap:
|
||||
user.profile.ldap_dn = 'test.dn'
|
||||
if enable_social:
|
||||
from social_django.models import UserSocialAuth
|
||||
|
||||
social_auth, _ = UserSocialAuth.objects.get_or_create(
|
||||
uid='667ec049-cdf3-45d0-a4dc-0465f7505954',
|
||||
provider='oidc',
|
||||
extra_data={},
|
||||
user_id=user.id,
|
||||
)
|
||||
user.social_auth.set([social_auth])
|
||||
if enable_enterprise:
|
||||
from awx.sso.models import UserEnterpriseAuth
|
||||
|
||||
enterprise_auth = UserEnterpriseAuth(user=user, provider='tacacs+')
|
||||
enterprise_auth.save()
|
||||
|
||||
assert get_external_account(user) == expected_results
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"setting, expected",
|
||||
[
|
||||
# Set none of the social auth settings
|
||||
('JUNK_SETTING', False),
|
||||
# Set the hard coded settings
|
||||
('AUTH_LDAP_SERVER_URI', True),
|
||||
('SOCIAL_AUTH_SAML_ENABLED_IDPS', True),
|
||||
('RADIUS_SERVER', True),
|
||||
('TACACSPLUS_HOST', True),
|
||||
# Set some SOCIAL_SOCIAL_AUTH_OIDC_KEYAUTH_*_KEY settings
|
||||
('SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ORG_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_TEAM_KEY', True),
|
||||
('SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', True),
|
||||
('SOCIAL_AUTH_OIDC_KEY', True),
|
||||
# Try a hypothetical future one
|
||||
('SOCIAL_AUTH_GIBBERISH_KEY', True),
|
||||
# Do a SAML one
|
||||
('SOCIAL_AUTH_SAML_SP_PRIVATE_KEY', False),
|
||||
],
|
||||
)
|
||||
def test_is_remote_auth_enabled(self, setting, expected):
|
||||
with override_settings(**{setting: True}):
|
||||
assert is_remote_auth_enabled() == expected
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"key_one, key_one_value, key_two, key_two_value, expected",
|
||||
[
|
||||
('JUNK_SETTING', True, 'JUNK2_SETTING', True, False),
|
||||
('AUTH_LDAP_SERVER_URI', True, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', True, True),
|
||||
('JUNK_SETTING', True, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', True, True),
|
||||
('AUTH_LDAP_SERVER_URI', False, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', False, False),
|
||||
],
|
||||
)
|
||||
def test_is_remote_auth_enabled_multiple_keys(self, key_one, key_one_value, key_two, key_two_value, expected):
|
||||
with override_settings(**{key_one: key_one_value}):
|
||||
with override_settings(**{key_two: key_two_value}):
|
||||
assert is_remote_auth_enabled() == expected
|
||||
|
||||
106
awx/ui/package-lock.json
generated
106
awx/ui/package-lock.json
generated
@@ -6,7 +6,7 @@
|
||||
"": {
|
||||
"name": "ui",
|
||||
"dependencies": {
|
||||
"@lingui/react": "3.14.0",
|
||||
"@lingui/react": "3.17.2",
|
||||
"@patternfly/patternfly": "4.217.1",
|
||||
"@patternfly/react-core": "^4.264.0",
|
||||
"@patternfly/react-icons": "4.92.10",
|
||||
@@ -1872,11 +1872,11 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/runtime": {
|
||||
"version": "7.16.7",
|
||||
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.16.7.tgz",
|
||||
"integrity": "sha512-9E9FJowqAsytyOY6LG+1KuueckRL+aQW+mKvXRXnuFGyRAyepJPmEo9vgMfXUA6O9u3IeEdv9MAkppFcaQwogQ==",
|
||||
"version": "7.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.21.0.tgz",
|
||||
"integrity": "sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw==",
|
||||
"dependencies": {
|
||||
"regenerator-runtime": "^0.13.4"
|
||||
"regenerator-runtime": "^0.13.11"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.9.0"
|
||||
@@ -3724,16 +3724,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@lingui/core": {
|
||||
"version": "3.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/core/-/core-3.14.0.tgz",
|
||||
"integrity": "sha512-ertREq9oi9B/umxpd/pInm9uFO8FLK2/0FXfDmMqvH5ydswWn/c9nY5YO4W1h4/8LWO45mewypOIyjoue4De1w==",
|
||||
"version": "3.17.2",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/core/-/core-3.17.2.tgz",
|
||||
"integrity": "sha512-YOd068NanznN8lLQqOKPlAY0ill3rrgmiAvPRKuYkrxzJMIHqlIFO/2Kcc/RH5vClOmLfg+wgR4rsHK/kLKelQ==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.11.2",
|
||||
"make-plural": "^6.2.2",
|
||||
"messageformat-parser": "^4.1.3"
|
||||
"@babel/runtime": "^7.20.13",
|
||||
"@messageformat/parser": "^5.0.0",
|
||||
"make-plural": "^6.2.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
"node": ">=14.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@lingui/loader": {
|
||||
@@ -3769,20 +3769,28 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@lingui/react": {
|
||||
"version": "3.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/react/-/react-3.14.0.tgz",
|
||||
"integrity": "sha512-ow9Mtru7f0T2S9AwnPWRejppcucCW0LmoDR3P4wqHjL+eH5f8a6nxd2doxGieC91/2i4qqW88y4K/zXJxwRSQw==",
|
||||
"version": "3.17.2",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/react/-/react-3.17.2.tgz",
|
||||
"integrity": "sha512-+PECPknIiQn/zdXgrXVTCbcJUSjPFZTtPOuIO72huGNh/QA+iRVsGTvef0/Q+pWkiPEOU6CmaGHkwdtgFMtJ8g==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.11.2",
|
||||
"@lingui/core": "^3.14.0"
|
||||
"@babel/runtime": "^7.20.13",
|
||||
"@lingui/core": "3.17.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
"node": ">=14.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0 || ^18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@messageformat/parser": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@messageformat/parser/-/parser-5.0.0.tgz",
|
||||
"integrity": "sha512-WiDKhi8F0zQaFU8cXgqq69eYFarCnTVxKcvhAONufKf0oUxbqLMW6JX6rV4Hqh+BEQWGyKKKHY4g1XA6bCLylA==",
|
||||
"dependencies": {
|
||||
"moo": "^0.5.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@nodelib/fs.scandir": {
|
||||
"version": "2.1.5",
|
||||
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
|
||||
@@ -16113,7 +16121,8 @@
|
||||
"node_modules/messageformat-parser": {
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/messageformat-parser/-/messageformat-parser-4.1.3.tgz",
|
||||
"integrity": "sha512-2fU3XDCanRqeOCkn7R5zW5VQHWf+T3hH65SzuqRvjatBK7r4uyFa5mEX+k6F9Bd04LVM5G4/BHBTUJsOdW7uyg=="
|
||||
"integrity": "sha512-2fU3XDCanRqeOCkn7R5zW5VQHWf+T3hH65SzuqRvjatBK7r4uyFa5mEX+k6F9Bd04LVM5G4/BHBTUJsOdW7uyg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/methods": {
|
||||
"version": "1.1.2",
|
||||
@@ -16319,8 +16328,7 @@
|
||||
"node_modules/moo": {
|
||||
"version": "0.5.1",
|
||||
"resolved": "https://registry.npmjs.org/moo/-/moo-0.5.1.tgz",
|
||||
"integrity": "sha512-I1mnb5xn4fO80BH9BLcF0yLypy2UKl+Cb01Fu0hJRkJjlCRtxZMWkTdAtDd5ZqCOxtCkhmRwyI57vWT+1iZ67w==",
|
||||
"dev": true
|
||||
"integrity": "sha512-I1mnb5xn4fO80BH9BLcF0yLypy2UKl+Cb01Fu0hJRkJjlCRtxZMWkTdAtDd5ZqCOxtCkhmRwyI57vWT+1iZ67w=="
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.1.2",
|
||||
@@ -19214,9 +19222,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/regenerator-runtime": {
|
||||
"version": "0.13.9",
|
||||
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz",
|
||||
"integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA=="
|
||||
"version": "0.13.11",
|
||||
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz",
|
||||
"integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg=="
|
||||
},
|
||||
"node_modules/regenerator-transform": {
|
||||
"version": "0.14.5",
|
||||
@@ -23830,11 +23838,11 @@
|
||||
}
|
||||
},
|
||||
"@babel/runtime": {
|
||||
"version": "7.16.7",
|
||||
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.16.7.tgz",
|
||||
"integrity": "sha512-9E9FJowqAsytyOY6LG+1KuueckRL+aQW+mKvXRXnuFGyRAyepJPmEo9vgMfXUA6O9u3IeEdv9MAkppFcaQwogQ==",
|
||||
"version": "7.21.0",
|
||||
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.21.0.tgz",
|
||||
"integrity": "sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw==",
|
||||
"requires": {
|
||||
"regenerator-runtime": "^0.13.4"
|
||||
"regenerator-runtime": "^0.13.11"
|
||||
}
|
||||
},
|
||||
"@babel/runtime-corejs3": {
|
||||
@@ -25306,13 +25314,13 @@
|
||||
}
|
||||
},
|
||||
"@lingui/core": {
|
||||
"version": "3.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/core/-/core-3.14.0.tgz",
|
||||
"integrity": "sha512-ertREq9oi9B/umxpd/pInm9uFO8FLK2/0FXfDmMqvH5ydswWn/c9nY5YO4W1h4/8LWO45mewypOIyjoue4De1w==",
|
||||
"version": "3.17.2",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/core/-/core-3.17.2.tgz",
|
||||
"integrity": "sha512-YOd068NanznN8lLQqOKPlAY0ill3rrgmiAvPRKuYkrxzJMIHqlIFO/2Kcc/RH5vClOmLfg+wgR4rsHK/kLKelQ==",
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.11.2",
|
||||
"make-plural": "^6.2.2",
|
||||
"messageformat-parser": "^4.1.3"
|
||||
"@babel/runtime": "^7.20.13",
|
||||
"@messageformat/parser": "^5.0.0",
|
||||
"make-plural": "^6.2.2"
|
||||
}
|
||||
},
|
||||
"@lingui/loader": {
|
||||
@@ -25339,12 +25347,20 @@
|
||||
}
|
||||
},
|
||||
"@lingui/react": {
|
||||
"version": "3.14.0",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/react/-/react-3.14.0.tgz",
|
||||
"integrity": "sha512-ow9Mtru7f0T2S9AwnPWRejppcucCW0LmoDR3P4wqHjL+eH5f8a6nxd2doxGieC91/2i4qqW88y4K/zXJxwRSQw==",
|
||||
"version": "3.17.2",
|
||||
"resolved": "https://registry.npmjs.org/@lingui/react/-/react-3.17.2.tgz",
|
||||
"integrity": "sha512-+PECPknIiQn/zdXgrXVTCbcJUSjPFZTtPOuIO72huGNh/QA+iRVsGTvef0/Q+pWkiPEOU6CmaGHkwdtgFMtJ8g==",
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.11.2",
|
||||
"@lingui/core": "^3.14.0"
|
||||
"@babel/runtime": "^7.20.13",
|
||||
"@lingui/core": "3.17.2"
|
||||
}
|
||||
},
|
||||
"@messageformat/parser": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@messageformat/parser/-/parser-5.0.0.tgz",
|
||||
"integrity": "sha512-WiDKhi8F0zQaFU8cXgqq69eYFarCnTVxKcvhAONufKf0oUxbqLMW6JX6rV4Hqh+BEQWGyKKKHY4g1XA6bCLylA==",
|
||||
"requires": {
|
||||
"moo": "^0.5.1"
|
||||
}
|
||||
},
|
||||
"@nodelib/fs.scandir": {
|
||||
@@ -34870,7 +34886,8 @@
|
||||
"messageformat-parser": {
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/messageformat-parser/-/messageformat-parser-4.1.3.tgz",
|
||||
"integrity": "sha512-2fU3XDCanRqeOCkn7R5zW5VQHWf+T3hH65SzuqRvjatBK7r4uyFa5mEX+k6F9Bd04LVM5G4/BHBTUJsOdW7uyg=="
|
||||
"integrity": "sha512-2fU3XDCanRqeOCkn7R5zW5VQHWf+T3hH65SzuqRvjatBK7r4uyFa5mEX+k6F9Bd04LVM5G4/BHBTUJsOdW7uyg==",
|
||||
"dev": true
|
||||
},
|
||||
"methods": {
|
||||
"version": "1.1.2",
|
||||
@@ -35014,8 +35031,7 @@
|
||||
"moo": {
|
||||
"version": "0.5.1",
|
||||
"resolved": "https://registry.npmjs.org/moo/-/moo-0.5.1.tgz",
|
||||
"integrity": "sha512-I1mnb5xn4fO80BH9BLcF0yLypy2UKl+Cb01Fu0hJRkJjlCRtxZMWkTdAtDd5ZqCOxtCkhmRwyI57vWT+1iZ67w==",
|
||||
"dev": true
|
||||
"integrity": "sha512-I1mnb5xn4fO80BH9BLcF0yLypy2UKl+Cb01Fu0hJRkJjlCRtxZMWkTdAtDd5ZqCOxtCkhmRwyI57vWT+1iZ67w=="
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.1.2",
|
||||
@@ -37116,9 +37132,9 @@
|
||||
}
|
||||
},
|
||||
"regenerator-runtime": {
|
||||
"version": "0.13.9",
|
||||
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz",
|
||||
"integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA=="
|
||||
"version": "0.13.11",
|
||||
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz",
|
||||
"integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg=="
|
||||
},
|
||||
"regenerator-transform": {
|
||||
"version": "0.14.5",
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"node": ">=16.13.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@lingui/react": "3.14.0",
|
||||
"@lingui/react": "3.17.2",
|
||||
"@patternfly/patternfly": "4.217.1",
|
||||
"@patternfly/react-core": "^4.264.0",
|
||||
"@patternfly/react-icons": "4.92.10",
|
||||
|
||||
@@ -196,7 +196,7 @@ class LookupModule(LookupBase):
|
||||
if isinstance(rule[field_name], int):
|
||||
rule[field_name] = [rule[field_name]]
|
||||
# If its not a list, we need to split it into a list
|
||||
if isinstance(rule[field_name], list):
|
||||
if not isinstance(rule[field_name], list):
|
||||
rule[field_name] = rule[field_name].split(',')
|
||||
for value in rule[field_name]:
|
||||
# If they have a list of strs we want to strip the str incase its space delineated
|
||||
@@ -210,7 +210,8 @@ class LookupModule(LookupBase):
|
||||
|
||||
def process_list(self, field_name, rule, valid_list, rule_number):
|
||||
return_values = []
|
||||
if isinstance(rule[field_name], list):
|
||||
# If its not a list, we need to split it into a list
|
||||
if not isinstance(rule[field_name], list):
|
||||
rule[field_name] = rule[field_name].split(',')
|
||||
for value in rule[field_name]:
|
||||
value = value.strip()
|
||||
|
||||
@@ -57,7 +57,15 @@ extends_documentation_fragment: awx.awx.auth
|
||||
|
||||
|
||||
EXAMPLES = """
|
||||
- name: Launch a workflow with a timeout of 10 seconds
|
||||
- name: Create a workflow approval node
|
||||
workflow_job_template_node:
|
||||
identifier: approval_test
|
||||
approval_node:
|
||||
name: approval_jt_name
|
||||
timeout: 900
|
||||
workflow: "Test Workflow"
|
||||
|
||||
- name: Launch the workflow with a timeout of 10 seconds
|
||||
workflow_launch:
|
||||
workflow_template: "Test Workflow"
|
||||
wait: False
|
||||
@@ -66,7 +74,7 @@ EXAMPLES = """
|
||||
- name: Wait for approval node to activate and approve
|
||||
workflow_approval:
|
||||
workflow_job_id: "{{ workflow.id }}"
|
||||
name: Approve Me
|
||||
name: approval_jt_name
|
||||
interval: 10
|
||||
timeout: 20
|
||||
action: deny
|
||||
|
||||
@@ -183,7 +183,21 @@ options:
|
||||
inventory:
|
||||
description:
|
||||
- Inventory applied as a prompt, if job template prompts for inventory
|
||||
type: str
|
||||
type: dict
|
||||
suboptions:
|
||||
name:
|
||||
description:
|
||||
- Name Inventory to be applied to job as launch-time prompts.
|
||||
type: str
|
||||
organization:
|
||||
description:
|
||||
- Name of key for use in model for organizational reference
|
||||
type: dict
|
||||
suboptions:
|
||||
name:
|
||||
description:
|
||||
- The organization of the credentials exists in.
|
||||
type: str
|
||||
scm_branch:
|
||||
description:
|
||||
- SCM branch applied as a prompt, if job template prompts for SCM branch
|
||||
@@ -544,6 +558,10 @@ EXAMPLES = '''
|
||||
type: job_template
|
||||
execution_environment:
|
||||
name: My EE
|
||||
inventory:
|
||||
name: Test inventory
|
||||
organization:
|
||||
name: Default
|
||||
related:
|
||||
credentials:
|
||||
- name: cyberark
|
||||
@@ -613,10 +631,6 @@ def create_workflow_nodes(module, response, workflow_nodes, workflow_id):
|
||||
if workflow_node['unified_job_template']['type'] != 'workflow_approval':
|
||||
module.fail_json(msg="Unable to Find unified_job_template: {0}".format(search_fields))
|
||||
|
||||
inventory = workflow_node.get('inventory')
|
||||
if inventory:
|
||||
workflow_node_fields['inventory'] = module.resolve_name_to_id('inventories', inventory)
|
||||
|
||||
# Lookup Values for other fields
|
||||
|
||||
for field_name in (
|
||||
@@ -645,6 +659,17 @@ def create_workflow_nodes(module, response, workflow_nodes, workflow_id):
|
||||
'execution_environments', name_or_id=workflow_node['execution_environment']['name']
|
||||
)['id']
|
||||
|
||||
# Two lookup methods are used based on a fix added in 21.11.0, and the awx export model
|
||||
if 'inventory' in workflow_node:
|
||||
if 'name' in workflow_node['inventory']:
|
||||
inv_lookup_data = {}
|
||||
if 'organization' in workflow_node['inventory']:
|
||||
inv_lookup_data['organization'] = module.resolve_name_to_id('organizations', workflow_node['inventory']['organization']['name'])
|
||||
workflow_node_fields['inventory'] = module.get_one(
|
||||
'inventories', name_or_id=workflow_node['inventory']['name'], data=inv_lookup_data)['id']
|
||||
else:
|
||||
workflow_node_fields['inventory'] = module.get_one('inventories', name_or_id=workflow_node['inventory'])['id']
|
||||
|
||||
# Set Search fields
|
||||
search_fields['workflow_job_template'] = workflow_node_fields['workflow_job_template'] = workflow_id
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ import glob
|
||||
# Normally a read-only endpoint should not have a module (i.e. /api/v2/me) but sometimes we reuse a name
|
||||
# For example, we have a role module but /api/v2/roles is a read only endpoint.
|
||||
# This list indicates which read-only endpoints have associated modules with them.
|
||||
read_only_endpoints_with_modules = ['settings', 'role', 'project_update']
|
||||
read_only_endpoints_with_modules = ['settings', 'role', 'project_update', 'workflow_approval']
|
||||
|
||||
# If a module should not be created for an endpoint and the endpoint is not read-only add it here
|
||||
# THINK HARD ABOUT DOING THIS
|
||||
|
||||
@@ -95,6 +95,22 @@
|
||||
- results is failed
|
||||
- "'In rule 2 end_on must either be an integer or in the format YYYY-MM-DD [HH:MM:SS]' in results.msg"
|
||||
|
||||
- name: Every Mondays
|
||||
set_fact:
|
||||
complex_rule: "{{ query(ruleset_plugin_name, '2022-04-30 10:30:45', rules=rrules, timezone='UTC' ) }}"
|
||||
ignore_errors: True
|
||||
register: results
|
||||
vars:
|
||||
rrules:
|
||||
- frequency: 'day'
|
||||
interval: 1
|
||||
byweekday: 'monday'
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- results is success
|
||||
- "'DTSTART;TZID=UTC:20220430T103045 RRULE:FREQ=DAILY;BYDAY=MO;INTERVAL=1' == complex_rule"
|
||||
|
||||
|
||||
- name: call rruleset with an invalid byweekday
|
||||
set_fact:
|
||||
|
||||
@@ -0,0 +1,57 @@
|
||||
---
|
||||
- name: Generate a random string for names
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
test_prefix: AWX-Collection-tests-workflow_approval
|
||||
|
||||
- name: Generate random names for test objects
|
||||
set_fact:
|
||||
org_name: "{{ test_prefix }}-org-{{ test_id }}"
|
||||
approval_node_name: "{{ test_prefix }}-node-{{ test_id }}"
|
||||
wfjt_name: "{{ test_prefix }}-wfjt-{{ test_id }}"
|
||||
|
||||
- block:
|
||||
- name: Create a new organization for test isolation
|
||||
organization:
|
||||
name: "{{ org_name }}"
|
||||
|
||||
- name: Create a workflow job template
|
||||
workflow_job_template:
|
||||
name: "{{ wfjt_name }}"
|
||||
organization: "{{ org_name }}"
|
||||
|
||||
- name: Create approval node
|
||||
workflow_job_template_node:
|
||||
identifier: approval_test
|
||||
approval_node:
|
||||
name: "{{ approval_node_name }}" # Referenced later on
|
||||
timeout: 900
|
||||
workflow: "{{ wfjt_name }}"
|
||||
|
||||
# Launch and approve the workflow
|
||||
- name: Launch the workflow
|
||||
workflow_launch:
|
||||
workflow_template: "{{ wfjt_name }}"
|
||||
wait: False
|
||||
register: workflow_job
|
||||
|
||||
- name: Wait for approval node to activate and approve
|
||||
workflow_approval:
|
||||
workflow_job_id: "{{ workflow_job.id }}"
|
||||
name: "{{ approval_node_name }}"
|
||||
interval: 10
|
||||
timeout: 20
|
||||
action: approve
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
- "result is not failed"
|
||||
|
||||
always:
|
||||
- name: Delete the workflow job template
|
||||
workflow_job_template:
|
||||
name: "{{ wfjt_name }}"
|
||||
state: absent
|
||||
ignore_errors: True
|
||||
@@ -493,6 +493,7 @@
|
||||
workflow_job_template:
|
||||
name: "copy_{{ wfjt_name }}"
|
||||
organization: Default
|
||||
ask_inventory_on_launch: true
|
||||
survey_spec:
|
||||
name: Basic Survey
|
||||
description: Basic Survey
|
||||
@@ -737,6 +738,10 @@
|
||||
timeout: 23
|
||||
execution_environment:
|
||||
name: "{{ ee1 }}"
|
||||
inventory:
|
||||
name: Test inventory
|
||||
organization:
|
||||
name: Default
|
||||
related:
|
||||
credentials:
|
||||
- name: "{{ scm_cred_name }}"
|
||||
|
||||
@@ -75,7 +75,8 @@ In the root of awx-operator:
|
||||
-e image_version=devel \
|
||||
-e image_pull_policy=Always \
|
||||
-e service_type=nodeport \
|
||||
-e namespace=awx
|
||||
-e namespace=awx \
|
||||
-e nodeport_port=30080
|
||||
```
|
||||
Check the operator with the following commands:
|
||||
|
||||
|
||||
@@ -199,11 +199,11 @@ ADD tools/ansible/roles/dockerfile/files/rsyslog.conf /var/lib/awx/rsyslog/rsysl
|
||||
ADD tools/ansible/roles/dockerfile/files/wait-for-migrations /usr/local/bin/wait-for-migrations
|
||||
ADD tools/ansible/roles/dockerfile/files/stop-supervisor /usr/local/bin/stop-supervisor
|
||||
|
||||
ADD tools/ansible/roles/dockerfile/files/uwsgi.ini /etc/tower/uwsgi.ini
|
||||
|
||||
## File mappings
|
||||
{% if build_dev|bool %}
|
||||
ADD tools/docker-compose/launch_awx.sh /usr/bin/launch_awx.sh
|
||||
ADD tools/docker-compose/nginx.conf /etc/nginx/nginx.conf
|
||||
ADD tools/docker-compose/nginx.vh.default.conf /etc/nginx/conf.d/nginx.vh.default.conf
|
||||
ADD tools/docker-compose/start_tests.sh /start_tests.sh
|
||||
ADD tools/docker-compose/bootstrap_development.sh /usr/bin/bootstrap_development.sh
|
||||
ADD tools/docker-compose/entrypoint.sh /entrypoint.sh
|
||||
@@ -213,7 +213,6 @@ ADD https://raw.githubusercontent.com/containers/libpod/master/contrib/podmanima
|
||||
{% else %}
|
||||
ADD tools/ansible/roles/dockerfile/files/launch_awx.sh /usr/bin/launch_awx.sh
|
||||
ADD tools/ansible/roles/dockerfile/files/launch_awx_task.sh /usr/bin/launch_awx_task.sh
|
||||
ADD tools/ansible/roles/dockerfile/files/uwsgi.ini /etc/tower/uwsgi.ini
|
||||
ADD {{ template_dest }}/supervisor.conf /etc/supervisord.conf
|
||||
ADD {{ template_dest }}/supervisor_task.conf /etc/supervisord_task.conf
|
||||
{% endif %}
|
||||
|
||||
@@ -30,8 +30,8 @@ environment =
|
||||
DEV_RELOAD_COMMAND='supervisorctl -c /etc/supervisord_task.conf restart all; supervisorctl restart tower-processes:daphne tower-processes:wsbroadcast'
|
||||
{% else %}
|
||||
command = /var/lib/awx/venv/awx/bin/uwsgi /etc/tower/uwsgi.ini
|
||||
directory = /var/lib/awx
|
||||
{% endif %}
|
||||
directory = /var/lib/awx
|
||||
autorestart = true
|
||||
startsecs = 30
|
||||
stopasgroup=true
|
||||
|
||||
@@ -9,6 +9,7 @@ control_plane_node_count: 1
|
||||
minikube_container_group: false
|
||||
receptor_socket_file: /var/run/awx-receptor/receptor.sock
|
||||
receptor_image: quay.io/ansible/receptor:devel
|
||||
ingress_path: /
|
||||
|
||||
# Keys for signing work
|
||||
receptor_rsa_bits: 4096
|
||||
|
||||
@@ -49,18 +49,11 @@
|
||||
mode: '0600'
|
||||
with_items:
|
||||
- "database.py"
|
||||
- "local_settings.py"
|
||||
- "websocket_secret.py"
|
||||
- "haproxy.cfg"
|
||||
|
||||
- name: Delete old local_settings.py
|
||||
file:
|
||||
path: "{{ playbook_dir }}/../../../awx/settings/local_settings.py"
|
||||
state: absent
|
||||
|
||||
- name: Copy local_settings.py
|
||||
copy:
|
||||
src: "local_settings.py"
|
||||
dest: "{{ sources_dest }}/local_settings.py"
|
||||
- "nginx.conf"
|
||||
- "nginx.locations.conf"
|
||||
|
||||
- name: Get OS info for sdb
|
||||
shell: |
|
||||
|
||||
@@ -24,6 +24,7 @@ services:
|
||||
EXECUTION_NODE_COUNT: {{ execution_node_count|int }}
|
||||
AWX_LOGGING_MODE: stdout
|
||||
DJANGO_SUPERUSER_PASSWORD: {{ admin_password }}
|
||||
UWSGI_MOUNT_PATH: {{ ingress_path }}
|
||||
{% if loop.index == 1 %}
|
||||
RUN_MIGRATIONS: 1
|
||||
{% endif %}
|
||||
@@ -40,6 +41,8 @@ services:
|
||||
- "../../docker-compose/_sources/database.py:/etc/tower/conf.d/database.py"
|
||||
- "../../docker-compose/_sources/websocket_secret.py:/etc/tower/conf.d/websocket_secret.py"
|
||||
- "../../docker-compose/_sources/local_settings.py:/etc/tower/conf.d/local_settings.py"
|
||||
- "../../docker-compose/_sources/nginx.conf:/etc/nginx/nginx.conf"
|
||||
- "../../docker-compose/_sources/nginx.locations.conf:/etc/nginx/conf.d/nginx.locations.conf"
|
||||
- "../../docker-compose/_sources/SECRET_KEY:/etc/tower/SECRET_KEY"
|
||||
- "../../docker-compose/_sources/receptor/receptor-awx-{{ loop.index }}.conf:/etc/receptor/receptor.conf"
|
||||
- "../../docker-compose/_sources/receptor/receptor-awx-{{ loop.index }}.conf.lock:/etc/receptor/receptor.conf.lock"
|
||||
|
||||
@@ -46,3 +46,5 @@ SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
BROADCAST_WEBSOCKET_PORT = 8013
|
||||
BROADCAST_WEBSOCKET_VERIFY_CERT = False
|
||||
BROADCAST_WEBSOCKET_PROTOCOL = 'http'
|
||||
|
||||
STATIC_URL = '{{ (ingress_path + '/static/').replace('//', '/') }}'
|
||||
@@ -1,8 +1,7 @@
|
||||
#user awx;
|
||||
|
||||
worker_processes 1;
|
||||
|
||||
pid /tmp/nginx.pid;
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
@@ -17,7 +16,7 @@ http {
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
access_log /dev/stdout main;
|
||||
access_log /var/log/nginx/access.log main;
|
||||
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
@@ -25,41 +24,17 @@ http {
|
||||
}
|
||||
|
||||
sendfile on;
|
||||
#tcp_nopush on;
|
||||
#gzip on;
|
||||
|
||||
upstream uwsgi {
|
||||
server 127.0.0.1:8050;
|
||||
}
|
||||
server localhost:8050;
|
||||
}
|
||||
|
||||
upstream daphne {
|
||||
server 127.0.0.1:8051;
|
||||
server localhost:8051;
|
||||
}
|
||||
|
||||
{% if ssl_certificate is defined %}
|
||||
server {
|
||||
listen 8052 default_server;
|
||||
server_name _;
|
||||
|
||||
# Redirect all HTTP links to the matching HTTPS page
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
{%endif %}
|
||||
|
||||
server {
|
||||
{% if (ssl_certificate is defined) and (ssl_certificate_key is defined) %}
|
||||
listen 8053 ssl;
|
||||
|
||||
ssl_certificate /etc/nginx/awxweb.pem;
|
||||
ssl_certificate_key /etc/nginx/awxweb_key.pem;
|
||||
{% elif (ssl_certificate is defined) and (ssl_certificate_key is not defined) %}
|
||||
listen 8053 ssl;
|
||||
|
||||
ssl_certificate /etc/nginx/awxweb.pem;
|
||||
ssl_certificate_key /etc/nginx/awxweb.pem;
|
||||
{% else %}
|
||||
listen 8052 default_server;
|
||||
{% endif %}
|
||||
listen 8013 default_server;
|
||||
|
||||
# If you have a domain name, this is where to add it
|
||||
server_name _;
|
||||
@@ -67,56 +42,35 @@ http {
|
||||
|
||||
# HSTS (ngx_http_headers_module is required) (15768000 seconds = 6 months)
|
||||
add_header Strict-Transport-Security max-age=15768000;
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
|
||||
# Protect against click-jacking https://www.owasp.org/index.php/Testing_for_Clickjacking_(OTG-CLIENT-009)
|
||||
add_header X-Frame-Options "DENY";
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
}
|
||||
|
||||
location /nginx_status {
|
||||
stub_status on;
|
||||
access_log off;
|
||||
allow 127.0.0.1;
|
||||
deny all;
|
||||
}
|
||||
server {
|
||||
listen 8043 default_server ssl;
|
||||
|
||||
location /static/ {
|
||||
alias /var/lib/awx/public/static/;
|
||||
}
|
||||
# If you have a domain name, this is where to add it
|
||||
server_name _;
|
||||
keepalive_timeout 65;
|
||||
|
||||
location /favicon.ico { alias /var/lib/awx/public/static/favicon.ico; }
|
||||
ssl_certificate /etc/nginx/nginx.crt;
|
||||
ssl_certificate_key /etc/nginx/nginx.key;
|
||||
|
||||
location /websocket {
|
||||
# Pass request to the upstream alias
|
||||
proxy_pass http://daphne;
|
||||
# Require http version 1.1 to allow for upgrade requests
|
||||
proxy_http_version 1.1;
|
||||
# We want proxy_buffering off for proxying to websockets.
|
||||
proxy_buffering off;
|
||||
# http://en.wikipedia.org/wiki/X-Forwarded-For
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# enable this if you use HTTPS:
|
||||
proxy_set_header X-Forwarded-Proto https;
|
||||
# pass the Host: header from the client for the sake of redirects
|
||||
proxy_set_header Host $http_host;
|
||||
# We've set the Host header, so we don't need Nginx to muddle
|
||||
# about with redirects
|
||||
proxy_redirect off;
|
||||
# Depending on the request value, set the Upgrade and
|
||||
# connection headers
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
}
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:SSL:50m;
|
||||
ssl_session_tickets off;
|
||||
|
||||
location / {
|
||||
# Add trailing / if missing
|
||||
rewrite ^(.*)$http_host(.*[^/])$ $1$http_host$2/ permanent;
|
||||
uwsgi_read_timeout 120s;
|
||||
uwsgi_pass uwsgi;
|
||||
include /etc/nginx/uwsgi_params;
|
||||
{%- if extra_nginx_include is defined %}
|
||||
include {{ extra_nginx_include }};
|
||||
{%- endif %}
|
||||
proxy_set_header X-Forwarded-Port 443;
|
||||
uwsgi_param HTTP_X_FORWARDED_PORT 443;
|
||||
}
|
||||
# intermediate configuration. tweak to your needs.
|
||||
ssl_protocols TLSv1.2;
|
||||
ssl_ciphers 'ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256';
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
||||
# HSTS (ngx_http_headers_module is required) (15768000 seconds = 6 months)
|
||||
add_header Strict-Transport-Security max-age=15768000;
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
|
||||
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
location {{ (ingress_path + '/static').replace('//', '/') }} {
|
||||
alias /var/lib/awx/public/static/;
|
||||
}
|
||||
|
||||
location {{ (ingress_path + '/favicon.ico').replace('//', '/') }} {
|
||||
alias /awx_devel/awx/public/static/favicon.ico;
|
||||
}
|
||||
|
||||
location {{ (ingress_path + '/websocket').replace('//', '/') }} {
|
||||
# Pass request to the upstream alias
|
||||
proxy_pass http://daphne;
|
||||
# Require http version 1.1 to allow for upgrade requests
|
||||
proxy_http_version 1.1;
|
||||
# We want proxy_buffering off for proxying to websockets.
|
||||
proxy_buffering off;
|
||||
# http://en.wikipedia.org/wiki/X-Forwarded-For
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# enable this if you use HTTPS:
|
||||
proxy_set_header X-Forwarded-Proto https;
|
||||
# pass the Host: header from the client for the sake of redirects
|
||||
proxy_set_header Host $http_host;
|
||||
# We've set the Host header, so we don't need Nginx to muddle
|
||||
# about with redirects
|
||||
proxy_redirect off;
|
||||
# Depending on the request value, set the Upgrade and
|
||||
# connection headers
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
}
|
||||
|
||||
location {{ ingress_path }} {
|
||||
# Add trailing / if missing
|
||||
rewrite ^(.*[^/])$ $1/ permanent;
|
||||
uwsgi_read_timeout 120s;
|
||||
uwsgi_pass uwsgi;
|
||||
include /etc/nginx/uwsgi_params;
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
worker_processes 1;
|
||||
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
server_tokens off;
|
||||
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
access_log /var/log/nginx/access.log main;
|
||||
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
sendfile on;
|
||||
#tcp_nopush on;
|
||||
#gzip on;
|
||||
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
}
|
||||
@@ -1,124 +0,0 @@
|
||||
upstream uwsgi {
|
||||
server localhost:8050;
|
||||
}
|
||||
|
||||
upstream daphne {
|
||||
server localhost:8051;
|
||||
}
|
||||
|
||||
# server {
|
||||
# listen 8013 default_server;
|
||||
# listen [::]:8013 default_server;
|
||||
# server_name _;
|
||||
# return 301 https://$host:8043$request_uri;
|
||||
# }
|
||||
|
||||
server {
|
||||
listen 8013 default_server;
|
||||
|
||||
# If you have a domain name, this is where to add it
|
||||
server_name _;
|
||||
keepalive_timeout 65;
|
||||
|
||||
# HSTS (ngx_http_headers_module is required) (15768000 seconds = 6 months)
|
||||
add_header Strict-Transport-Security max-age=15768000;
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
|
||||
location /static/ {
|
||||
alias /var/lib/awx/public/static/;
|
||||
}
|
||||
|
||||
location /favicon.ico { alias /awx_devel/awx/public/static/favicon.ico; }
|
||||
|
||||
location ~ ^/websocket {
|
||||
# Pass request to the upstream alias
|
||||
proxy_pass http://daphne;
|
||||
# Require http version 1.1 to allow for upgrade requests
|
||||
proxy_http_version 1.1;
|
||||
# We want proxy_buffering off for proxying to websockets.
|
||||
proxy_buffering off;
|
||||
# http://en.wikipedia.org/wiki/X-Forwarded-For
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# enable this if you use HTTPS:
|
||||
proxy_set_header X-Forwarded-Proto https;
|
||||
# pass the Host: header from the client for the sake of redirects
|
||||
proxy_set_header Host $http_host;
|
||||
# We've set the Host header, so we don't need Nginx to muddle
|
||||
# about with redirects
|
||||
proxy_redirect off;
|
||||
# Depending on the request value, set the Upgrade and
|
||||
# connection headers
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
}
|
||||
|
||||
location / {
|
||||
# Add trailing / if missing
|
||||
rewrite ^(.*[^/])$ $1/ permanent;
|
||||
uwsgi_read_timeout 120s;
|
||||
uwsgi_pass uwsgi;
|
||||
include /etc/nginx/uwsgi_params;
|
||||
}
|
||||
}
|
||||
|
||||
server {
|
||||
listen 8043 default_server ssl;
|
||||
|
||||
# If you have a domain name, this is where to add it
|
||||
server_name _;
|
||||
keepalive_timeout 65;
|
||||
|
||||
ssl_certificate /etc/nginx/nginx.crt;
|
||||
ssl_certificate_key /etc/nginx/nginx.key;
|
||||
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:SSL:50m;
|
||||
ssl_session_tickets off;
|
||||
|
||||
# intermediate configuration. tweak to your needs.
|
||||
ssl_protocols TLSv1.2;
|
||||
ssl_ciphers 'ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256';
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
||||
# HSTS (ngx_http_headers_module is required) (15768000 seconds = 6 months)
|
||||
add_header Strict-Transport-Security max-age=15768000;
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
|
||||
location /static/ {
|
||||
alias /var/lib/awx/public/static/;
|
||||
access_log off;
|
||||
sendfile off;
|
||||
}
|
||||
|
||||
location /favicon.ico { alias /awx_devel/awx/public/static/favicon.ico; }
|
||||
|
||||
location ~ ^/websocket {
|
||||
# Pass request to the upstream alias
|
||||
proxy_pass http://daphne;
|
||||
# Require http version 1.1 to allow for upgrade requests
|
||||
proxy_http_version 1.1;
|
||||
# We want proxy_buffering off for proxying to websockets.
|
||||
proxy_buffering off;
|
||||
# http://en.wikipedia.org/wiki/X-Forwarded-For
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# enable this if you use HTTPS:
|
||||
proxy_set_header X-Forwarded-Proto https;
|
||||
# pass the Host: header from the client for the sake of redirects
|
||||
proxy_set_header Host $http_host;
|
||||
# We've set the Host header, so we don't need Nginx to muddle
|
||||
# about with redirects
|
||||
proxy_redirect off;
|
||||
# Depending on the request value, set the Upgrade and
|
||||
# connection headers
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
}
|
||||
|
||||
location / {
|
||||
# Add trailing / if missing
|
||||
rewrite ^(.*[^/])$ $1/ permanent;
|
||||
uwsgi_read_timeout 120s;
|
||||
uwsgi_pass uwsgi;
|
||||
include /etc/nginx/uwsgi_params;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user