mirror of
https://github.com/ansible/awx.git
synced 2026-02-06 12:04:44 -03:30
Compare commits
55 Commits
constructe
...
21.13.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f0c967c1b2 | ||
|
|
2ca0b7bc01 | ||
|
|
1411d11a0e | ||
|
|
2fe1ea94bd | ||
|
|
a47cfc55ab | ||
|
|
0eb9de02f3 | ||
|
|
39ee4285ce | ||
|
|
2dcda04a9e | ||
|
|
52d46c88e4 | ||
|
|
c2df22e0f0 | ||
|
|
cf21eab7f4 | ||
|
|
98b2f51c18 | ||
|
|
327352feaf | ||
|
|
ccaace8b30 | ||
|
|
2902b40084 | ||
|
|
9669b9dd2f | ||
|
|
d27aada817 | ||
|
|
2fca07ee4c | ||
|
|
335ac636b5 | ||
|
|
f4bcc03ac7 | ||
|
|
3051384f95 | ||
|
|
811ecb8673 | ||
|
|
5e28f5dca1 | ||
|
|
d088d36448 | ||
|
|
89e41597a6 | ||
|
|
283adc30a8 | ||
|
|
019e6a52fe | ||
|
|
35e5610642 | ||
|
|
3a303875bb | ||
|
|
4499a50019 | ||
|
|
3fe46e2e27 | ||
|
|
6d3f39fe92 | ||
|
|
a3233b5fdd | ||
|
|
fe3aa6ce2b | ||
|
|
77ec46f6cf | ||
|
|
b5f240ce70 | ||
|
|
fb2647ff7b | ||
|
|
35fbb94aa6 | ||
|
|
f2ab8d637c | ||
|
|
166b586591 | ||
|
|
d1c608a281 | ||
|
|
b4803ca894 | ||
|
|
ce7f597c7e | ||
|
|
23a34c5dc9 | ||
|
|
bef3da6fb2 | ||
|
|
7f50679e68 | ||
|
|
52d071f9d1 | ||
|
|
26a888547d | ||
|
|
951eee944c | ||
|
|
4630757f5f | ||
|
|
eb9431ee1f | ||
|
|
fd6605932a | ||
|
|
5d96ee084d | ||
|
|
e2cee10767 | ||
|
|
31c2e1a450 |
1
.github/workflows/promote.yml
vendored
1
.github/workflows/promote.yml
vendored
@@ -10,6 +10,7 @@ on:
|
||||
|
||||
jobs:
|
||||
promote:
|
||||
if: endsWith(github.repository, '/awx')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout awx
|
||||
|
||||
1
.github/workflows/stage.yml
vendored
1
.github/workflows/stage.yml
vendored
@@ -21,6 +21,7 @@ on:
|
||||
|
||||
jobs:
|
||||
stage:
|
||||
if: endsWith(github.repository, '/awx')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
31
Makefile
31
Makefile
@@ -1,4 +1,5 @@
|
||||
PYTHON ?= python3.9
|
||||
DOCKER_COMPOSE ?= docker-compose
|
||||
OFFICIAL ?= no
|
||||
NODE ?= node
|
||||
NPM_BIN ?= npm
|
||||
@@ -203,19 +204,7 @@ uwsgi: collectstatic
|
||||
@if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
uwsgi -b 32768 \
|
||||
--socket 127.0.0.1:8050 \
|
||||
--module=awx.wsgi:application \
|
||||
--home=/var/lib/awx/venv/awx \
|
||||
--chdir=/awx_devel/ \
|
||||
--vacuum \
|
||||
--processes=5 \
|
||||
--harakiri=120 --master \
|
||||
--no-orphans \
|
||||
--max-requests=1000 \
|
||||
--stats /tmp/stats.socket \
|
||||
--lazy-apps \
|
||||
--logformat "%(addr) %(method) %(uri) - %(proto) %(status)"
|
||||
uwsgi /etc/tower/uwsgi.ini
|
||||
|
||||
awx-autoreload:
|
||||
@/awx_devel/tools/docker-compose/awx-autoreload /awx_devel/awx "$(DEV_RELOAD_COMMAND)"
|
||||
@@ -509,20 +498,20 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||
|
||||
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
||||
echo -e "\033[0;31mTo generate a CyberArk Conjur API key: docker exec -it tools_conjur_1 conjurctl account create quick-start\033[0m"
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/docker-credential-plugins-override.yml up --no-recreate awx_1 --remove-orphans
|
||||
|
||||
docker-compose-test: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /bin/bash
|
||||
|
||||
docker-compose-runtest: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports awx_1 /start_tests.sh
|
||||
|
||||
docker-compose-build-swagger: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml run --rm --service-ports --no-deps awx_1 /start_tests.sh swagger
|
||||
|
||||
SCHEMA_DIFF_BASE_BRANCH ?= devel
|
||||
detect-schema-change: genschema
|
||||
@@ -531,7 +520,7 @@ detect-schema-change: genschema
|
||||
diff -u -b reference-schema.json schema.json
|
||||
|
||||
docker-compose-clean: awx/projects
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml rm -sf
|
||||
|
||||
docker-compose-container-group-clean:
|
||||
@if [ -f "tools/docker-compose-minikube/_sources/minikube" ]; then \
|
||||
@@ -559,10 +548,10 @@ docker-refresh: docker-clean docker-compose
|
||||
|
||||
## Docker Development Environment with Elastic Stack Connected
|
||||
docker-compose-elk: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
docker-compose-cluster-elk: awx/projects docker-compose-sources
|
||||
docker-compose -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml -f tools/elastic/docker-compose.logstash-link-cluster.yml -f tools/elastic/docker-compose.elastic-override.yml up --no-recreate
|
||||
|
||||
docker-compose-container-group:
|
||||
MINIKUBE_CONTAINER_GROUP=true make docker-compose
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
@@ -9,6 +8,7 @@ from rest_framework import serializers
|
||||
from awx.conf import fields, register, register_validate
|
||||
from awx.api.fields import OAuth2ProviderField
|
||||
from oauth2_provider.settings import oauth2_settings
|
||||
from awx.sso.common import is_remote_auth_enabled
|
||||
|
||||
|
||||
register(
|
||||
@@ -108,19 +108,8 @@ register(
|
||||
|
||||
|
||||
def authentication_validate(serializer, attrs):
|
||||
remote_auth_settings = [
|
||||
'AUTH_LDAP_SERVER_URI',
|
||||
'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_ORG_KEY',
|
||||
'SOCIAL_AUTH_GITHUB_TEAM_KEY',
|
||||
'SOCIAL_AUTH_SAML_ENABLED_IDPS',
|
||||
'RADIUS_SERVER',
|
||||
'TACACSPLUS_HOST',
|
||||
]
|
||||
if attrs.get('DISABLE_LOCAL_AUTH', False):
|
||||
if not any(getattr(settings, s, None) for s in remote_auth_settings):
|
||||
raise serializers.ValidationError(_("There are no remote authentication systems configured."))
|
||||
if attrs.get('DISABLE_LOCAL_AUTH', False) and not is_remote_auth_enabled():
|
||||
raise serializers.ValidationError(_("There are no remote authentication systems configured."))
|
||||
return attrs
|
||||
|
||||
|
||||
|
||||
@@ -155,7 +155,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
'search',
|
||||
)
|
||||
|
||||
# A list of fields that we know can be filtered on without the possiblity
|
||||
# A list of fields that we know can be filtered on without the possibility
|
||||
# of introducing duplicates
|
||||
NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField, TextField)
|
||||
|
||||
@@ -268,7 +268,7 @@ class FieldLookupBackend(BaseFilterBackend):
|
||||
continue
|
||||
|
||||
# HACK: make `created` available via API for the Django User ORM model
|
||||
# so it keep compatiblity with other objects which exposes the `created` attr.
|
||||
# so it keep compatibility with other objects which exposes the `created` attr.
|
||||
if queryset.model._meta.object_name == 'User' and key.startswith('created'):
|
||||
key = key.replace('created', 'date_joined')
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ from rest_framework import generics
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework import views
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.renderers import StaticHTMLRenderer
|
||||
from rest_framework.negotiation import DefaultContentNegotiation
|
||||
|
||||
@@ -674,7 +674,7 @@ class SubListCreateAttachDetachAPIView(SubListCreateAPIView):
|
||||
location = None
|
||||
created = True
|
||||
|
||||
# Retrive the sub object (whether created or by ID).
|
||||
# Retrieve the sub object (whether created or by ID).
|
||||
sub = get_object_or_400(self.model, pk=sub_id)
|
||||
|
||||
# Verify we have permission to attach.
|
||||
@@ -822,7 +822,7 @@ def trigger_delayed_deep_copy(*args, **kwargs):
|
||||
|
||||
class CopyAPIView(GenericAPIView):
|
||||
serializer_class = CopySerializer
|
||||
permission_classes = (AllowAny,)
|
||||
permission_classes = (IsAuthenticated,)
|
||||
copy_return_serializer_class = None
|
||||
new_in_330 = True
|
||||
new_in_api_v2 = True
|
||||
|
||||
@@ -60,7 +60,7 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
|
||||
delattr(renderer_context['view'], '_request')
|
||||
|
||||
def get_raw_data_form(self, data, view, method, request):
|
||||
# Set a flag on the view to indiciate to the view/serializer that we're
|
||||
# Set a flag on the view to indicate to the view/serializer that we're
|
||||
# creating a raw data form for the browsable API. Store the original
|
||||
# request method to determine how to populate the raw data form.
|
||||
if request.method in {'OPTIONS', 'DELETE'}:
|
||||
|
||||
@@ -108,7 +108,6 @@ from awx.main.utils import (
|
||||
extract_ansible_vars,
|
||||
encrypt_dict,
|
||||
prefetch_page_capabilities,
|
||||
get_external_account,
|
||||
truncate_stdout,
|
||||
)
|
||||
from awx.main.utils.filters import SmartFilter
|
||||
@@ -124,6 +123,8 @@ from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, Ver
|
||||
# AWX Utils
|
||||
from awx.api.validators import HostnameRegexValidator
|
||||
|
||||
from awx.sso.common import get_external_account
|
||||
|
||||
logger = logging.getLogger('awx.api.serializers')
|
||||
|
||||
# Fields that should be summarized regardless of object type.
|
||||
@@ -536,7 +537,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
||||
#
|
||||
# This logic is to force rendering choice's on an uneditable field.
|
||||
# Note: Consider expanding this rendering for more than just choices fields
|
||||
# Note: This logic works in conjuction with
|
||||
# Note: This logic works in conjunction with
|
||||
if hasattr(model_field, 'choices') and model_field.choices:
|
||||
was_editable = model_field.editable
|
||||
model_field.editable = True
|
||||
@@ -987,23 +988,8 @@ class UserSerializer(BaseSerializer):
|
||||
def _update_password(self, obj, new_password):
|
||||
# For now we're not raising an error, just not saving password for
|
||||
# users managed by LDAP who already have an unusable password set.
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
|
||||
try:
|
||||
if obj.pk and obj.profile.ldap_dn and not obj.has_usable_password():
|
||||
new_password = None
|
||||
except AttributeError:
|
||||
pass
|
||||
if (
|
||||
getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)
|
||||
) and obj.social_auth.all():
|
||||
new_password = None
|
||||
if (getattr(settings, 'RADIUS_SERVER', None) or getattr(settings, 'TACACSPLUS_HOST', None)) and obj.enterprise_auth.all():
|
||||
new_password = None
|
||||
if new_password:
|
||||
# Get external password will return something like ldap or enterprise or None if the user isn't external. We only want to allow a password update for a None option
|
||||
if new_password and not self.get_external_account(obj):
|
||||
obj.set_password(new_password)
|
||||
obj.save(update_fields=['password'])
|
||||
|
||||
@@ -3997,7 +3983,7 @@ class JobEventSerializer(BaseSerializer):
|
||||
# Show full stdout for playbook_on_* events.
|
||||
if obj and obj.event.startswith('playbook_on'):
|
||||
return data
|
||||
# If the view logic says to not trunctate (request was to the detail view or a param was used)
|
||||
# If the view logic says to not truncate (request was to the detail view or a param was used)
|
||||
if self.context.get('no_truncate', False):
|
||||
return data
|
||||
max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY
|
||||
@@ -4028,7 +4014,7 @@ class ProjectUpdateEventSerializer(JobEventSerializer):
|
||||
# raw SCM URLs in their stdout (which *could* contain passwords)
|
||||
# attempt to detect and filter HTTP basic auth passwords in the stdout
|
||||
# of these types of events
|
||||
if obj.event_data.get('task_action') in ('git', 'svn'):
|
||||
if obj.event_data.get('task_action') in ('git', 'svn', 'ansible.builtin.git', 'ansible.builtin.svn'):
|
||||
try:
|
||||
return json.loads(UriCleaner.remove_sensitive(json.dumps(obj.event_data)))
|
||||
except Exception:
|
||||
@@ -4072,7 +4058,7 @@ class AdHocCommandEventSerializer(BaseSerializer):
|
||||
|
||||
def to_representation(self, obj):
|
||||
data = super(AdHocCommandEventSerializer, self).to_representation(obj)
|
||||
# If the view logic says to not trunctate (request was to the detail view or a param was used)
|
||||
# If the view logic says to not truncate (request was to the detail view or a param was used)
|
||||
if self.context.get('no_truncate', False):
|
||||
return data
|
||||
max_bytes = settings.EVENT_STDOUT_MAX_BYTES_DISPLAY
|
||||
@@ -4765,7 +4751,7 @@ class ScheduleSerializer(LaunchConfigurationBaseSerializer, SchedulePreviewSeria
|
||||
),
|
||||
)
|
||||
until = serializers.SerializerMethodField(
|
||||
help_text=_('The date this schedule will end. This field is computed from the RRULE. If the schedule does not end an emptry string will be returned'),
|
||||
help_text=_('The date this schedule will end. This field is computed from the RRULE. If the schedule does not end an empty string will be returned'),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -3,7 +3,7 @@ Make a GET request to this resource to retrieve aggregate statistics about inven
|
||||
Including fetching the number of total hosts tracked by Tower over an amount of time and the current success or
|
||||
failed status of hosts which have run jobs within an Inventory.
|
||||
|
||||
## Parmeters and Filtering
|
||||
## Parameters and Filtering
|
||||
|
||||
The `period` of the data can be adjusted with:
|
||||
|
||||
@@ -24,7 +24,7 @@ Data about the number of hosts will be returned in the following format:
|
||||
Each element contains an epoch timestamp represented in seconds and a numerical value indicating
|
||||
the number of hosts that exist at a given moment
|
||||
|
||||
Data about failed and successfull hosts by inventory will be given as:
|
||||
Data about failed and successful hosts by inventory will be given as:
|
||||
|
||||
{
|
||||
"sources": [
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Make a GET request to this resource to retrieve aggregate statistics about job runs suitable for graphing.
|
||||
|
||||
## Parmeters and Filtering
|
||||
## Parameters and Filtering
|
||||
|
||||
The `period` of the data can be adjusted with:
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ inventory sources:
|
||||
* `inventory_update`: ID of the inventory update job that was started.
|
||||
(integer, read-only)
|
||||
* `project_update`: ID of the project update job that was started if this inventory source is an SCM source.
|
||||
(interger, read-only, optional)
|
||||
(integer, read-only, optional)
|
||||
|
||||
Note: All manual inventory sources (source="") will be ignored by the update_inventory_sources endpoint. This endpoint will not update inventory sources for Smart Inventories.
|
||||
|
||||
|
||||
@@ -152,7 +152,7 @@ def api_exception_handler(exc, context):
|
||||
if 'awx.named_url_rewritten' in req.environ and not str(getattr(exc, 'status_code', 0)).startswith('2'):
|
||||
# if the URL was rewritten, and it's not a 2xx level status code,
|
||||
# revert the request.path to its original value to avoid leaking
|
||||
# any context about the existance of resources
|
||||
# any context about the existence of resources
|
||||
req.path = req.environ['awx.named_url_rewritten']
|
||||
if exc.status_code == 403:
|
||||
exc = NotFound(detail=_('Not found.'))
|
||||
@@ -172,7 +172,7 @@ class DashboardView(APIView):
|
||||
user_inventory = get_user_queryset(request.user, models.Inventory)
|
||||
inventory_with_failed_hosts = user_inventory.filter(hosts_with_active_failures__gt=0)
|
||||
user_inventory_external = user_inventory.filter(has_inventory_sources=True)
|
||||
# if there are *zero* inventories, this aggregrate query will be None, fall back to 0
|
||||
# if there are *zero* inventories, this aggregate query will be None, fall back to 0
|
||||
failed_inventory = user_inventory.aggregate(Sum('inventory_sources_with_failures'))['inventory_sources_with_failures__sum'] or 0
|
||||
data['inventories'] = {
|
||||
'url': reverse('api:inventory_list', request=request),
|
||||
@@ -1667,7 +1667,7 @@ class GroupList(ListCreateAPIView):
|
||||
|
||||
class EnforceParentRelationshipMixin(object):
|
||||
"""
|
||||
Useful when you have a self-refering ManyToManyRelationship.
|
||||
Useful when you have a self-referring ManyToManyRelationship.
|
||||
* Tower uses a shallow (2-deep only) url pattern. For example:
|
||||
|
||||
When an object hangs off of a parent object you would have the url of the
|
||||
@@ -2415,7 +2415,7 @@ class JobTemplateSurveySpec(GenericAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
# if it's a multiselect or multiple choice, it must have coices listed
|
||||
# choices and defualts must come in as strings seperated by /n characters.
|
||||
# choices and defaults must come in as strings separated by /n characters.
|
||||
if qtype == 'multiselect' or qtype == 'multiplechoice':
|
||||
if 'choices' in survey_item:
|
||||
if isinstance(survey_item['choices'], str):
|
||||
@@ -3430,7 +3430,7 @@ class JobCreateSchedule(RetrieveAPIView):
|
||||
|
||||
config = obj.launch_config
|
||||
|
||||
# Make up a name for the schedule, guarentee that it is unique
|
||||
# Make up a name for the schedule, guarantee that it is unique
|
||||
name = 'Auto-generated schedule from job {}'.format(obj.id)
|
||||
existing_names = models.Schedule.objects.filter(name__startswith=name).values_list('name', flat=True)
|
||||
if name in existing_names:
|
||||
@@ -3621,7 +3621,7 @@ class JobJobEventsChildrenSummary(APIView):
|
||||
# key is counter of meta events (i.e. verbose), value is uuid of the assigned parent
|
||||
map_meta_counter_nested_uuid = {}
|
||||
|
||||
# collapsable tree view in the UI only makes sense for tree-like
|
||||
# collapsible tree view in the UI only makes sense for tree-like
|
||||
# hierarchy. If ansible is ran with a strategy like free or host_pinned, then
|
||||
# events can be out of sequential order, and no longer follow a tree structure
|
||||
# E1
|
||||
@@ -4288,7 +4288,7 @@ class WorkflowApprovalTemplateJobsList(SubListAPIView):
|
||||
parent_key = 'workflow_approval_template'
|
||||
|
||||
|
||||
class WorkflowApprovalList(ListCreateAPIView):
|
||||
class WorkflowApprovalList(ListAPIView):
|
||||
model = models.WorkflowApproval
|
||||
serializer_class = serializers.WorkflowApprovalListSerializer
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ logger = logging.getLogger('awx.conf.fields')
|
||||
# Use DRF fields to convert/validate settings:
|
||||
# - to_representation(obj) should convert a native Python object to a primitive
|
||||
# serializable type. This primitive type will be what is presented in the API
|
||||
# and stored in the JSON field in the datbase.
|
||||
# and stored in the JSON field in the database.
|
||||
# - to_internal_value(data) should convert the primitive type back into the
|
||||
# appropriate Python type to be used in settings.
|
||||
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import inspect
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.timezone import now
|
||||
|
||||
import logging
|
||||
|
||||
|
||||
logger = logging.getLogger('awx.conf.migrations')
|
||||
|
||||
|
||||
def fill_ldap_group_type_params(apps, schema_editor):
|
||||
@@ -15,7 +19,7 @@ def fill_ldap_group_type_params(apps, schema_editor):
|
||||
entry = qs[0]
|
||||
group_type_params = entry.value
|
||||
else:
|
||||
entry = Setting(key='AUTH_LDAP_GROUP_TYPE_PARAMS', value=group_type_params, created=now(), modified=now())
|
||||
return # for new installs we prefer to use the default value
|
||||
|
||||
init_attrs = set(inspect.getfullargspec(group_type.__init__).args[1:])
|
||||
for k in list(group_type_params.keys()):
|
||||
@@ -23,4 +27,5 @@ def fill_ldap_group_type_params(apps, schema_editor):
|
||||
del group_type_params[k]
|
||||
|
||||
entry.value = group_type_params
|
||||
logger.warning(f'Migration updating AUTH_LDAP_GROUP_TYPE_PARAMS with value {entry.value}')
|
||||
entry.save()
|
||||
|
||||
25
awx/conf/tests/functional/test_migrations.py
Normal file
25
awx/conf/tests/functional/test_migrations.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import pytest
|
||||
|
||||
from awx.conf.migrations._ldap_group_type import fill_ldap_group_type_params
|
||||
from awx.conf.models import Setting
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fill_group_type_params_no_op():
|
||||
fill_ldap_group_type_params(apps, 'dont-use-me')
|
||||
assert Setting.objects.count() == 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_keep_old_setting_with_default_value():
|
||||
Setting.objects.create(key='AUTH_LDAP_GROUP_TYPE', value={'name_attr': 'cn', 'member_attr': 'member'})
|
||||
fill_ldap_group_type_params(apps, 'dont-use-me')
|
||||
assert Setting.objects.count() == 1
|
||||
s = Setting.objects.first()
|
||||
assert s.value == {'name_attr': 'cn', 'member_attr': 'member'}
|
||||
|
||||
|
||||
# NOTE: would be good to test the removal of attributes by migration
|
||||
# but this requires fighting with the validator and is not done here
|
||||
@@ -180,7 +180,7 @@ class SettingLoggingTest(GenericAPIView):
|
||||
if not port:
|
||||
return Response({'error': 'Port required for ' + protocol}, status=status.HTTP_400_BAD_REQUEST)
|
||||
else:
|
||||
# if http/https by this point, domain is reacheable
|
||||
# if http/https by this point, domain is reachable
|
||||
return Response(status=status.HTTP_202_ACCEPTED)
|
||||
|
||||
if protocol == 'udp':
|
||||
|
||||
@@ -1972,7 +1972,7 @@ msgid ""
|
||||
"HTTP headers and meta keys to search to determine remote host name or IP. "
|
||||
"Add additional items to this list, such as \"HTTP_X_FORWARDED_FOR\", if "
|
||||
"behind a reverse proxy. See the \"Proxy Support\" section of the "
|
||||
"Adminstrator guide for more details."
|
||||
"Administrator guide for more details."
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/conf.py:85
|
||||
@@ -2457,7 +2457,7 @@ msgid ""
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/conf.py:631
|
||||
msgid "Maximum disk persistance for external log aggregation (in GB)"
|
||||
msgid "Maximum disk persistence for external log aggregation (in GB)"
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/conf.py:633
|
||||
@@ -2548,7 +2548,7 @@ msgid "Enable"
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/constants.py:27
|
||||
msgid "Doas"
|
||||
msgid "Does"
|
||||
msgstr ""
|
||||
|
||||
#: awx/main/constants.py:28
|
||||
@@ -4801,7 +4801,7 @@ msgstr ""
|
||||
|
||||
#: awx/main/models/workflow.py:251
|
||||
msgid ""
|
||||
"An identifier coresponding to the workflow job template node that this node "
|
||||
"An identifier corresponding to the workflow job template node that this node "
|
||||
"was created from."
|
||||
msgstr ""
|
||||
|
||||
@@ -5521,7 +5521,7 @@ msgstr ""
|
||||
#: awx/sso/conf.py:606
|
||||
msgid ""
|
||||
"Extra arguments for Google OAuth2 login. You can restrict it to only allow a "
|
||||
"single domain to authenticate, even if the user is logged in with multple "
|
||||
"single domain to authenticate, even if the user is logged in with multiple "
|
||||
"Google accounts. Refer to the documentation for more detail."
|
||||
msgstr ""
|
||||
|
||||
@@ -5905,7 +5905,7 @@ msgstr ""
|
||||
|
||||
#: awx/sso/conf.py:1290
|
||||
msgid ""
|
||||
"Create a keypair to use as a service provider (SP) and include the "
|
||||
"Create a key pair to use as a service provider (SP) and include the "
|
||||
"certificate content here."
|
||||
msgstr ""
|
||||
|
||||
@@ -5915,7 +5915,7 @@ msgstr ""
|
||||
|
||||
#: awx/sso/conf.py:1302
|
||||
msgid ""
|
||||
"Create a keypair to use as a service provider (SP) and include the private "
|
||||
"Create a key pair to use as a service provider (SP) and include the private "
|
||||
"key content here."
|
||||
msgstr ""
|
||||
|
||||
|
||||
@@ -1971,7 +1971,7 @@ msgid ""
|
||||
"HTTP headers and meta keys to search to determine remote host name or IP. "
|
||||
"Add additional items to this list, such as \"HTTP_X_FORWARDED_FOR\", if "
|
||||
"behind a reverse proxy. See the \"Proxy Support\" section of the "
|
||||
"Adminstrator guide for more details."
|
||||
"Administrator guide for more details."
|
||||
msgstr "Los encabezados HTTP y las llaves de activación para buscar y determinar el nombre de host remoto o IP. Añada elementos adicionales a esta lista, como \"HTTP_X_FORWARDED_FOR\", si está detrás de un proxy inverso. Consulte la sección \"Soporte de proxy\" de la guía del adminstrador para obtener más información."
|
||||
|
||||
#: awx/main/conf.py:85
|
||||
@@ -4804,7 +4804,7 @@ msgstr "Indica que un trabajo no se creará cuando es sea True. La semántica de
|
||||
|
||||
#: awx/main/models/workflow.py:251
|
||||
msgid ""
|
||||
"An identifier coresponding to the workflow job template node that this node "
|
||||
"An identifier corresponding to the workflow job template node that this node "
|
||||
"was created from."
|
||||
msgstr "Un identificador que corresponde al nodo de plantilla de tarea del flujo de trabajo a partir del cual se creó este nodo."
|
||||
|
||||
@@ -5526,7 +5526,7 @@ msgstr "Argumentos adicionales para Google OAuth2"
|
||||
#: awx/sso/conf.py:606
|
||||
msgid ""
|
||||
"Extra arguments for Google OAuth2 login. You can restrict it to only allow a "
|
||||
"single domain to authenticate, even if the user is logged in with multple "
|
||||
"single domain to authenticate, even if the user is logged in with multiple "
|
||||
"Google accounts. Refer to the documentation for more detail."
|
||||
msgstr "Argumentos adicionales para el inicio de sesión en Google OAuth2. Puede limitarlo para permitir la autenticación de un solo dominio, incluso si el usuario ha iniciado sesión con varias cuentas de Google. Consulte la documentación para obtener información detallada."
|
||||
|
||||
@@ -5910,7 +5910,7 @@ msgstr "Certificado público del proveedor de servicio SAML"
|
||||
|
||||
#: awx/sso/conf.py:1290
|
||||
msgid ""
|
||||
"Create a keypair to use as a service provider (SP) and include the "
|
||||
"Create a key pair to use as a service provider (SP) and include the "
|
||||
"certificate content here."
|
||||
msgstr "Crear un par de claves para usar como proveedor de servicio (SP) e incluir el contenido del certificado aquí."
|
||||
|
||||
@@ -5920,7 +5920,7 @@ msgstr "Clave privada del proveedor de servicio SAML"
|
||||
|
||||
#: awx/sso/conf.py:1302
|
||||
msgid ""
|
||||
"Create a keypair to use as a service provider (SP) and include the private "
|
||||
"Create a key pair to use as a service provider (SP) and include the private "
|
||||
"key content here."
|
||||
msgstr "Crear un par de claves para usar como proveedor de servicio (SP) e incluir el contenido de la clave privada aquí."
|
||||
|
||||
|
||||
@@ -70,7 +70,7 @@ def aim_backend(**kwargs):
|
||||
client_cert = kwargs.get('client_cert', None)
|
||||
client_key = kwargs.get('client_key', None)
|
||||
verify = kwargs['verify']
|
||||
webservice_id = kwargs['webservice_id']
|
||||
webservice_id = kwargs.get('webservice_id', '')
|
||||
app_id = kwargs['app_id']
|
||||
object_query = kwargs['object_query']
|
||||
object_query_format = kwargs['object_query_format']
|
||||
|
||||
143
awx/main/management/commands/disable_instance.py
Normal file
143
awx/main/management/commands/disable_instance.py
Normal file
@@ -0,0 +1,143 @@
|
||||
import time
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from argparse import ArgumentTypeError
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.db.models import Q
|
||||
from django.utils.timezone import now
|
||||
|
||||
from awx.main.models import Instance, UnifiedJob
|
||||
|
||||
|
||||
class AWXInstance:
|
||||
def __init__(self, **filter):
|
||||
self.filter = filter
|
||||
self.get_instance()
|
||||
|
||||
def get_instance(self):
|
||||
filter = self.filter if self.filter is not None else dict(hostname=settings.CLUSTER_HOST_ID)
|
||||
qs = Instance.objects.filter(**filter)
|
||||
if not qs.exists():
|
||||
raise ValueError(f"No AWX instance found with {filter} parameters")
|
||||
self.instance = qs.first()
|
||||
|
||||
def disable(self):
|
||||
if self.instance.enabled:
|
||||
self.instance.enabled = False
|
||||
self.instance.save()
|
||||
return True
|
||||
|
||||
def enable(self):
|
||||
if not self.instance.enabled:
|
||||
self.instance.enabled = True
|
||||
self.instance.save()
|
||||
return True
|
||||
|
||||
def jobs(self):
|
||||
return UnifiedJob.objects.filter(
|
||||
Q(controller_node=self.instance.hostname) | Q(execution_node=self.instance.hostname), status__in=("running", "waiting")
|
||||
)
|
||||
|
||||
def jobs_pretty(self):
|
||||
jobs = []
|
||||
for j in self.jobs():
|
||||
job_started = j.started if j.started else now()
|
||||
# similar calculation of `elapsed` as the corresponding serializer
|
||||
# does
|
||||
td = now() - job_started
|
||||
elapsed = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / (10**6 * 1.0)
|
||||
elapsed = float(elapsed)
|
||||
details = dict(
|
||||
name=j.name,
|
||||
url=j.get_ui_url(),
|
||||
elapsed=elapsed,
|
||||
)
|
||||
jobs.append(details)
|
||||
|
||||
jobs = sorted(jobs, reverse=True, key=lambda j: j["elapsed"])
|
||||
|
||||
return ", ".join([f"[\"{j['name']}\"]({j['url']})" for j in jobs])
|
||||
|
||||
def instance_pretty(self):
|
||||
instance = (
|
||||
self.instance.hostname,
|
||||
urljoin(settings.TOWER_URL_BASE, f"/#/instances/{self.instance.pk}/details"),
|
||||
)
|
||||
return f"[\"{instance[0]}\"]({instance[1]})"
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Disable instance, optionally waiting for all its managed jobs to finish."
|
||||
|
||||
@staticmethod
|
||||
def ge_1(arg):
|
||||
if arg == "inf":
|
||||
return float("inf")
|
||||
|
||||
int_arg = int(arg)
|
||||
if int_arg < 1:
|
||||
raise ArgumentTypeError(f"The value must be a positive number >= 1. Provided: \"{arg}\"")
|
||||
return int_arg
|
||||
|
||||
def add_arguments(self, parser):
|
||||
filter_group = parser.add_mutually_exclusive_group()
|
||||
|
||||
filter_group.add_argument(
|
||||
"--hostname",
|
||||
type=str,
|
||||
default=settings.CLUSTER_HOST_ID,
|
||||
help=f"{Instance.hostname.field.help_text} Defaults to the hostname of the machine where the Python interpreter is currently executing".strip(),
|
||||
)
|
||||
filter_group.add_argument("--id", type=self.ge_1, help=Instance.id.field.help_text)
|
||||
|
||||
parser.add_argument(
|
||||
"--wait",
|
||||
action="store_true",
|
||||
help="Wait for jobs managed by the instance to finish. With default retry arguments waits ~1h",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--retry",
|
||||
type=self.ge_1,
|
||||
default=120,
|
||||
help="Number of retries when waiting for jobs to finish. Default: 120. Also accepts \"inf\" to wait indefinitely",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--retry_sleep",
|
||||
type=self.ge_1,
|
||||
default=30,
|
||||
help="Number of seconds to sleep before consequtive retries when waiting. Default: 30",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
try:
|
||||
filter = dict(id=options["id"]) if options["id"] is not None else dict(hostname=options["hostname"])
|
||||
instance = AWXInstance(**filter)
|
||||
except ValueError as e:
|
||||
raise CommandError(e)
|
||||
|
||||
if instance.disable():
|
||||
self.stdout.write(self.style.SUCCESS(f"Instance {instance.instance_pretty()} has been disabled"))
|
||||
else:
|
||||
self.stdout.write(f"Instance {instance.instance_pretty()} has already been disabled")
|
||||
|
||||
if not options["wait"]:
|
||||
return
|
||||
|
||||
rc = 1
|
||||
while instance.jobs().count() > 0:
|
||||
if rc < options["retry"]:
|
||||
self.stdout.write(
|
||||
f"{rc}/{options['retry']}: Waiting {options['retry_sleep']}s before the next attempt to see if the following instance' managed jobs have finished: {instance.jobs_pretty()}"
|
||||
)
|
||||
rc += 1
|
||||
time.sleep(options["retry_sleep"])
|
||||
else:
|
||||
raise CommandError(
|
||||
f"{rc}/{options['retry']}: No more retry attempts left, but the instance still has associated managed jobs: {instance.jobs_pretty()}"
|
||||
)
|
||||
else:
|
||||
self.stdout.write(self.style.SUCCESS("Done waiting for instance' managed jobs to finish!"))
|
||||
@@ -14,7 +14,7 @@ from oauth2_provider.models import AbstractApplication, AbstractAccessToken
|
||||
from oauth2_provider.generators import generate_client_secret
|
||||
from oauthlib import oauth2
|
||||
|
||||
from awx.main.utils import get_external_account
|
||||
from awx.sso.common import get_external_account
|
||||
from awx.main.fields import OAuth2ClientSecretField
|
||||
|
||||
|
||||
|
||||
@@ -116,7 +116,7 @@ class RunnerCallback:
|
||||
# so it *should* have a negligible performance impact
|
||||
task = event_data.get('event_data', {}).get('task_action')
|
||||
try:
|
||||
if task in ('git', 'svn'):
|
||||
if task in ('git', 'svn', 'ansible.builtin.git', 'ansible.builtin.svn'):
|
||||
event_data_json = json.dumps(event_data)
|
||||
event_data_json = UriCleaner.remove_sensitive(event_data_json)
|
||||
event_data = json.loads(event_data_json)
|
||||
@@ -219,7 +219,7 @@ class RunnerCallbackForProjectUpdate(RunnerCallback):
|
||||
def event_handler(self, event_data):
|
||||
super_return_value = super(RunnerCallbackForProjectUpdate, self).event_handler(event_data)
|
||||
returned_data = event_data.get('event_data', {})
|
||||
if returned_data.get('task_action', '') == 'set_fact':
|
||||
if returned_data.get('task_action', '') in ('set_fact', 'ansible.builtin.set_fact'):
|
||||
returned_facts = returned_data.get('res', {}).get('ansible_facts', {})
|
||||
if 'scm_version' in returned_facts:
|
||||
self.playbook_new_revision = returned_facts['scm_version']
|
||||
|
||||
@@ -311,7 +311,7 @@ class BaseTask(object):
|
||||
env['AWX_PRIVATE_DATA_DIR'] = private_data_dir
|
||||
|
||||
if self.instance.execution_environment is None:
|
||||
raise RuntimeError('The project could not sync because there is no Execution Environment.')
|
||||
raise RuntimeError(f'The {self.model.__name__} could not run because there is no Execution Environment.')
|
||||
|
||||
return env
|
||||
|
||||
|
||||
@@ -2008,7 +2008,7 @@ def test_project_update_no_ee(mock_me):
|
||||
with pytest.raises(RuntimeError) as e:
|
||||
task.build_env(job, {})
|
||||
|
||||
assert 'The project could not sync because there is no Execution Environment' in str(e.value)
|
||||
assert 'The ProjectUpdate could not run because there is no Execution Environment' in str(e.value)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -80,7 +80,6 @@ __all__ = [
|
||||
'set_environ',
|
||||
'IllegalArgumentError',
|
||||
'get_custom_venv_choices',
|
||||
'get_external_account',
|
||||
'ScheduleTaskManager',
|
||||
'ScheduleDependencyManager',
|
||||
'ScheduleWorkflowManager',
|
||||
@@ -1089,29 +1088,6 @@ def has_model_field_prefetched(model_obj, field_name):
|
||||
return getattr(getattr(model_obj, field_name, None), 'prefetch_cache_name', '') in getattr(model_obj, '_prefetched_objects_cache', {})
|
||||
|
||||
|
||||
def get_external_account(user):
|
||||
from django.conf import settings
|
||||
|
||||
account_type = None
|
||||
if getattr(settings, 'AUTH_LDAP_SERVER_URI', None):
|
||||
try:
|
||||
if user.pk and user.profile.ldap_dn and not user.has_usable_password():
|
||||
account_type = "ldap"
|
||||
except AttributeError:
|
||||
pass
|
||||
if (
|
||||
getattr(settings, 'SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_ORG_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_GITHUB_TEAM_KEY', None)
|
||||
or getattr(settings, 'SOCIAL_AUTH_SAML_ENABLED_IDPS', None)
|
||||
) and user.social_auth.all():
|
||||
account_type = "social"
|
||||
if (getattr(settings, 'RADIUS_SERVER', None) or getattr(settings, 'TACACSPLUS_HOST', None)) and user.enterprise_auth.all():
|
||||
account_type = "enterprise"
|
||||
return account_type
|
||||
|
||||
|
||||
class classproperty:
|
||||
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
|
||||
self.fget = fget
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import os
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
@@ -6,8 +7,15 @@ from django.conf import settings
|
||||
from awx.main.models.execution_environments import ExecutionEnvironment
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_control_plane_execution_environment():
|
||||
return ExecutionEnvironment.objects.filter(organization=None, managed=True).first()
|
||||
ee = ExecutionEnvironment.objects.filter(organization=None, managed=True).first()
|
||||
if ee == None:
|
||||
logger.error('Failed to find control plane ee, there are no managed EEs without organizations')
|
||||
raise RuntimeError("Failed to find default control plane EE")
|
||||
return ee
|
||||
|
||||
|
||||
def get_default_execution_environment():
|
||||
|
||||
@@ -25,42 +25,47 @@
|
||||
connection: local
|
||||
name: Update source tree if necessary
|
||||
tasks:
|
||||
|
||||
- name: delete project directory before update
|
||||
command: "find -delete" # volume mounted, cannot delete folder itself
|
||||
- name: Delete project directory before update
|
||||
ansible.builtin.shell: set -o pipefail && find . -delete -print | tail -2 # volume mounted, cannot delete folder itself
|
||||
register: reg
|
||||
changed_when: reg.stdout_lines | length > 1
|
||||
args:
|
||||
chdir: "{{ project_path }}"
|
||||
tags:
|
||||
- delete
|
||||
|
||||
- block:
|
||||
- name: update project using git
|
||||
git:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url}}"
|
||||
version: "{{scm_branch|quote}}"
|
||||
refspec: "{{scm_refspec|default(omit)}}"
|
||||
force: "{{scm_clean}}"
|
||||
track_submodules: "{{scm_track_submodules|default(omit)}}"
|
||||
accept_hostkey: "{{scm_accept_hostkey|default(omit)}}"
|
||||
- name: Update project using git
|
||||
tags:
|
||||
- update_git
|
||||
block:
|
||||
- name: Update project using git
|
||||
ansible.builtin.git:
|
||||
dest: "{{ project_path | quote }}"
|
||||
repo: "{{ scm_url }}"
|
||||
version: "{{ scm_branch | quote }}"
|
||||
refspec: "{{ scm_refspec | default(omit) }}"
|
||||
force: "{{ scm_clean }}"
|
||||
track_submodules: "{{ scm_track_submodules | default(omit) }}"
|
||||
accept_hostkey: "{{ scm_accept_hostkey | default(omit) }}"
|
||||
register: git_result
|
||||
|
||||
- name: Set the git repository version
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ git_result['after'] }}"
|
||||
when: "'after' in git_result"
|
||||
tags:
|
||||
- update_git
|
||||
|
||||
- block:
|
||||
- name: update project using svn
|
||||
subversion:
|
||||
dest: "{{project_path|quote}}"
|
||||
repo: "{{scm_url|quote}}"
|
||||
revision: "{{scm_branch|quote}}"
|
||||
force: "{{scm_clean}}"
|
||||
username: "{{scm_username|default(omit)}}"
|
||||
password: "{{scm_password|default(omit)}}"
|
||||
- name: Update project using svn
|
||||
tags:
|
||||
- update_svn
|
||||
block:
|
||||
- name: Update project using svn
|
||||
ansible.builtin.subversion:
|
||||
dest: "{{ project_path | quote }}"
|
||||
repo: "{{ scm_url | quote }}"
|
||||
revision: "{{ scm_branch | quote }}"
|
||||
force: "{{ scm_clean }}"
|
||||
username: "{{ scm_username | default(omit) }}"
|
||||
password: "{{ scm_password | default(omit) }}"
|
||||
# must be in_place because folder pre-existing, because it is mounted
|
||||
in_place: true
|
||||
environment:
|
||||
@@ -68,85 +73,90 @@
|
||||
register: svn_result
|
||||
|
||||
- name: Set the svn repository version
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ svn_result['after'] }}"
|
||||
when: "'after' in svn_result"
|
||||
|
||||
- name: parse subversion version string properly
|
||||
set_fact:
|
||||
scm_version: "{{scm_version|regex_replace('^.*Revision: ([0-9]+).*$', '\\1')}}"
|
||||
tags:
|
||||
- update_svn
|
||||
- name: Parse subversion version string properly
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ scm_version | regex_replace('^.*Revision: ([0-9]+).*$', '\\1') }}"
|
||||
|
||||
- block:
|
||||
|
||||
- name: Project update for Insights
|
||||
tags:
|
||||
- update_insights
|
||||
block:
|
||||
- name: Ensure the project directory is present
|
||||
file:
|
||||
dest: "{{project_path|quote}}"
|
||||
ansible.builtin.file:
|
||||
dest: "{{ project_path | quote }}"
|
||||
state: directory
|
||||
mode: '0755'
|
||||
|
||||
- name: Fetch Insights Playbook(s)
|
||||
insights:
|
||||
insights_url: "{{insights_url}}"
|
||||
username: "{{scm_username}}"
|
||||
password: "{{scm_password}}"
|
||||
project_path: "{{project_path}}"
|
||||
awx_license_type: "{{awx_license_type}}"
|
||||
awx_version: "{{awx_version}}"
|
||||
insights_url: "{{ insights_url }}"
|
||||
username: "{{ scm_username }}"
|
||||
password: "{{ scm_password }}"
|
||||
project_path: "{{ project_path }}"
|
||||
awx_license_type: "{{ awx_license_type }}"
|
||||
awx_version: "{{ awx_version }}"
|
||||
register: results
|
||||
|
||||
- name: Save Insights Version
|
||||
set_fact:
|
||||
scm_version: "{{results.version}}"
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ results.version }}"
|
||||
when: results is defined
|
||||
tags:
|
||||
- update_insights
|
||||
|
||||
- block:
|
||||
|
||||
- name: Update project using archive
|
||||
tags:
|
||||
- update_archive
|
||||
block:
|
||||
- name: Ensure the project archive directory is present
|
||||
file:
|
||||
dest: "{{ project_path|quote }}/.archive"
|
||||
ansible.builtin.file:
|
||||
dest: "{{ project_path | quote }}/.archive"
|
||||
state: directory
|
||||
mode: '0755'
|
||||
|
||||
- name: Get archive from url
|
||||
get_url:
|
||||
url: "{{ scm_url|quote }}"
|
||||
dest: "{{ project_path|quote }}/.archive/"
|
||||
url_username: "{{ scm_username|default(omit) }}"
|
||||
url_password: "{{ scm_password|default(omit) }}"
|
||||
ansible.builtin.get_url:
|
||||
url: "{{ scm_url | quote }}"
|
||||
dest: "{{ project_path | quote }}/.archive/"
|
||||
url_username: "{{ scm_username | default(omit) }}"
|
||||
url_password: "{{ scm_password | default(omit) }}"
|
||||
force_basic_auth: true
|
||||
mode: '0755'
|
||||
register: get_archive
|
||||
|
||||
- name: Unpack archive
|
||||
project_archive:
|
||||
src: "{{ get_archive.dest }}"
|
||||
project_path: "{{ project_path|quote }}"
|
||||
project_path: "{{ project_path | quote }}"
|
||||
force: "{{ scm_clean }}"
|
||||
when: get_archive.changed or scm_clean
|
||||
register: unarchived
|
||||
|
||||
- name: Find previous archives
|
||||
find:
|
||||
paths: "{{ project_path|quote }}/.archive/"
|
||||
ansible.builtin.find:
|
||||
paths: "{{ project_path | quote }}/.archive/"
|
||||
excludes:
|
||||
- "{{ get_archive.dest|basename }}"
|
||||
- "{{ get_archive.dest | basename }}"
|
||||
when: unarchived.changed
|
||||
register: previous_archive
|
||||
|
||||
- name: Remove previous archives
|
||||
file:
|
||||
ansible.builtin.file:
|
||||
path: "{{ item.path }}"
|
||||
state: absent
|
||||
loop: "{{ previous_archive.files }}"
|
||||
when: previous_archive.files|default([])
|
||||
when: previous_archive.files | default([])
|
||||
|
||||
- name: Set scm_version to archive sha1 checksum
|
||||
set_fact:
|
||||
ansible.builtin.set_fact:
|
||||
scm_version: "{{ get_archive.checksum_src }}"
|
||||
tags:
|
||||
- update_archive
|
||||
|
||||
- name: Repository Version
|
||||
debug:
|
||||
ansible.builtin.debug:
|
||||
msg: "Repository Version {{ scm_version }}"
|
||||
tags:
|
||||
- update_git
|
||||
@@ -183,60 +193,59 @@
|
||||
additional_collections_env:
|
||||
# These environment variables are used for installing collections, in addition to galaxy_task_env
|
||||
# setting the collections paths silences warnings
|
||||
ANSIBLE_COLLECTIONS_PATHS: "{{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_collections"
|
||||
ANSIBLE_COLLECTIONS_PATHS: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections"
|
||||
# Put the local tmp directory in same volume as collection destination
|
||||
# otherwise, files cannot be moved accross volumes and will cause error
|
||||
ANSIBLE_LOCAL_TEMP: "{{projects_root}}/.__awx_cache/{{local_path}}/stage/tmp"
|
||||
ANSIBLE_LOCAL_TEMP: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/tmp"
|
||||
tasks:
|
||||
|
||||
- name: Check content sync settings
|
||||
block:
|
||||
- debug:
|
||||
msg: >
|
||||
Collection and role syncing disabled. Check the AWX_ROLES_ENABLED and
|
||||
AWX_COLLECTIONS_ENABLED settings and Galaxy credentials on the project's organization.
|
||||
|
||||
- meta: end_play
|
||||
|
||||
when: not roles_enabled|bool and not collections_enabled|bool
|
||||
when: not roles_enabled | bool and not collections_enabled | bool
|
||||
tags:
|
||||
- install_roles
|
||||
- install_collections
|
||||
block:
|
||||
- name: Warn about disabled content sync
|
||||
ansible.builtin.debug:
|
||||
msg: >
|
||||
Collection and role syncing disabled. Check the AWX_ROLES_ENABLED and
|
||||
AWX_COLLECTIONS_ENABLED settings and Galaxy credentials on the project's organization.
|
||||
- name: End play due to disabled content sync
|
||||
ansible.builtin.meta: end_play
|
||||
|
||||
- name: fetch galaxy roles from requirements.(yml/yaml)
|
||||
command: >
|
||||
- name: Fetch galaxy roles from requirements.(yml/yaml)
|
||||
ansible.builtin.command: >
|
||||
ansible-galaxy role install -r {{ item }}
|
||||
--roles-path {{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_roles
|
||||
--roles-path {{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_roles
|
||||
{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}"
|
||||
chdir: "{{ project_path | quote }}"
|
||||
register: galaxy_result
|
||||
with_fileglob:
|
||||
- "{{project_path|quote}}/roles/requirements.yaml"
|
||||
- "{{project_path|quote}}/roles/requirements.yml"
|
||||
- "{{ project_path | quote }}/roles/requirements.yaml"
|
||||
- "{{ project_path | quote }}/roles/requirements.yml"
|
||||
changed_when: "'was installed successfully' in galaxy_result.stdout"
|
||||
environment: "{{ galaxy_task_env }}"
|
||||
when: roles_enabled|bool
|
||||
when: roles_enabled | bool
|
||||
tags:
|
||||
- install_roles
|
||||
|
||||
- name: fetch galaxy collections from collections/requirements.(yml/yaml)
|
||||
command: >
|
||||
- name: Fetch galaxy collections from collections/requirements.(yml/yaml)
|
||||
ansible.builtin.command: >
|
||||
ansible-galaxy collection install -r {{ item }}
|
||||
--collections-path {{projects_root}}/.__awx_cache/{{local_path}}/stage/requirements_collections
|
||||
--collections-path {{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections
|
||||
{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
|
||||
args:
|
||||
chdir: "{{project_path|quote}}"
|
||||
chdir: "{{ project_path | quote }}"
|
||||
register: galaxy_collection_result
|
||||
with_fileglob:
|
||||
- "{{project_path|quote}}/collections/requirements.yaml"
|
||||
- "{{project_path|quote}}/collections/requirements.yml"
|
||||
- "{{project_path|quote}}/requirements.yaml"
|
||||
- "{{project_path|quote}}/requirements.yml"
|
||||
- "{{ project_path | quote }}/collections/requirements.yaml"
|
||||
- "{{ project_path | quote }}/collections/requirements.yml"
|
||||
- "{{ project_path | quote }}/requirements.yaml"
|
||||
- "{{ project_path | quote }}/requirements.yml"
|
||||
changed_when: "'Installing ' in galaxy_collection_result.stdout"
|
||||
environment: "{{ additional_collections_env | combine(galaxy_task_env) }}"
|
||||
when:
|
||||
- "ansible_version.full is version_compare('2.9', '>=')"
|
||||
- collections_enabled|bool
|
||||
- collections_enabled | bool
|
||||
tags:
|
||||
- install_collections
|
||||
|
||||
@@ -385,10 +385,10 @@ def on_populate_user(sender, **kwargs):
|
||||
logger.warning('LDAP user {} has {} > max {} characters'.format(user.username, field, max_len))
|
||||
|
||||
org_map = getattr(backend.settings, 'ORGANIZATION_MAP', {})
|
||||
team_map = getattr(backend.settings, 'TEAM_MAP', {})
|
||||
team_map_settings = getattr(backend.settings, 'TEAM_MAP', {})
|
||||
orgs_list = list(org_map.keys())
|
||||
team_map = {}
|
||||
for team_name, team_opts in team_map.items():
|
||||
for team_name, team_opts in team_map_settings.items():
|
||||
if not team_opts.get('organization', None):
|
||||
# You can't save the LDAP config in the UI w/o an org (or '' or null as the org) so if we somehow got this condition its an error
|
||||
logger.error("Team named {} in LDAP team map settings is invalid due to missing organization".format(team_name))
|
||||
@@ -416,7 +416,7 @@ def on_populate_user(sender, **kwargs):
|
||||
|
||||
# Compute in memory what the state is of the different LDAP teams
|
||||
desired_team_states = {}
|
||||
for team_name, team_opts in team_map.items():
|
||||
for team_name, team_opts in team_map_settings.items():
|
||||
if 'organization' not in team_opts:
|
||||
continue
|
||||
users_opts = team_opts.get('users', None)
|
||||
|
||||
@@ -169,3 +169,45 @@ def get_or_create_org_with_default_galaxy_cred(**kwargs):
|
||||
else:
|
||||
logger.debug("Could not find default Ansible Galaxy credential to add to org")
|
||||
return org
|
||||
|
||||
|
||||
def get_external_account(user):
|
||||
account_type = None
|
||||
|
||||
# Previously this method also checked for active configuration which meant that if a user logged in from LDAP
|
||||
# and then LDAP was no longer configured it would "convert" the user from an LDAP account_type to none.
|
||||
# This did have one benefit that if a login type was removed intentionally the user could be given a username password.
|
||||
# But it had a limitation that the user would have to have an active session (or an admin would have to go set a temp password).
|
||||
# It also lead to the side affect that if LDAP was ever reconfigured the user would convert back to LDAP but still have a local password.
|
||||
# That local password could then be used to bypass LDAP authentication.
|
||||
try:
|
||||
if user.pk and user.profile.ldap_dn and not user.has_usable_password():
|
||||
account_type = "ldap"
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if user.social_auth.all():
|
||||
account_type = "social"
|
||||
|
||||
if user.enterprise_auth.all():
|
||||
account_type = "enterprise"
|
||||
|
||||
return account_type
|
||||
|
||||
|
||||
def is_remote_auth_enabled():
|
||||
from django.conf import settings
|
||||
|
||||
# Append LDAP, Radius, TACACS+ and SAML options
|
||||
settings_that_turn_on_remote_auth = [
|
||||
'AUTH_LDAP_SERVER_URI',
|
||||
'SOCIAL_AUTH_SAML_ENABLED_IDPS',
|
||||
'RADIUS_SERVER',
|
||||
'TACACSPLUS_HOST',
|
||||
]
|
||||
# Also include any SOCAIL_AUTH_*KEY (except SAML)
|
||||
for social_auth_key in dir(settings):
|
||||
if social_auth_key.startswith('SOCIAL_AUTH_') and social_auth_key.endswith('_KEY') and 'SAML' not in social_auth_key:
|
||||
settings_that_turn_on_remote_auth.append(social_auth_key)
|
||||
|
||||
return any(getattr(settings, s, None) for s in settings_that_turn_on_remote_auth)
|
||||
|
||||
@@ -2,9 +2,22 @@ import pytest
|
||||
from collections import Counter
|
||||
from django.core.exceptions import FieldError
|
||||
from django.utils.timezone import now
|
||||
from django.test.utils import override_settings
|
||||
|
||||
from awx.main.models import Credential, CredentialType, Organization, Team, User
|
||||
from awx.sso.common import get_orgs_by_ids, reconcile_users_org_team_mappings, create_org_and_teams, get_or_create_org_with_default_galaxy_cred
|
||||
from awx.sso.common import (
|
||||
get_orgs_by_ids,
|
||||
reconcile_users_org_team_mappings,
|
||||
create_org_and_teams,
|
||||
get_or_create_org_with_default_galaxy_cred,
|
||||
is_remote_auth_enabled,
|
||||
get_external_account,
|
||||
)
|
||||
|
||||
|
||||
class MicroMockObject(object):
|
||||
def all(self):
|
||||
return True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -278,3 +291,87 @@ class TestCommonFunctions:
|
||||
|
||||
for o in Organization.objects.all():
|
||||
assert o.galaxy_credentials.count() == 0
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"enable_ldap, enable_social, enable_enterprise, expected_results",
|
||||
[
|
||||
(False, False, False, None),
|
||||
(True, False, False, 'ldap'),
|
||||
(True, True, False, 'social'),
|
||||
(True, True, True, 'enterprise'),
|
||||
(False, True, True, 'enterprise'),
|
||||
(False, False, True, 'enterprise'),
|
||||
(False, True, False, 'social'),
|
||||
],
|
||||
)
|
||||
def test_get_external_account(self, enable_ldap, enable_social, enable_enterprise, expected_results):
|
||||
try:
|
||||
user = User.objects.get(username="external_tester")
|
||||
except User.DoesNotExist:
|
||||
user = User(username="external_tester")
|
||||
user.set_unusable_password()
|
||||
user.save()
|
||||
|
||||
if enable_ldap:
|
||||
user.profile.ldap_dn = 'test.dn'
|
||||
if enable_social:
|
||||
from social_django.models import UserSocialAuth
|
||||
|
||||
social_auth, _ = UserSocialAuth.objects.get_or_create(
|
||||
uid='667ec049-cdf3-45d0-a4dc-0465f7505954',
|
||||
provider='oidc',
|
||||
extra_data={},
|
||||
user_id=user.id,
|
||||
)
|
||||
user.social_auth.set([social_auth])
|
||||
if enable_enterprise:
|
||||
from awx.sso.models import UserEnterpriseAuth
|
||||
|
||||
enterprise_auth = UserEnterpriseAuth(user=user, provider='tacacs+')
|
||||
enterprise_auth.save()
|
||||
|
||||
assert get_external_account(user) == expected_results
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"setting, expected",
|
||||
[
|
||||
# Set none of the social auth settings
|
||||
('JUNK_SETTING', False),
|
||||
# Set the hard coded settings
|
||||
('AUTH_LDAP_SERVER_URI', True),
|
||||
('SOCIAL_AUTH_SAML_ENABLED_IDPS', True),
|
||||
('RADIUS_SERVER', True),
|
||||
('TACACSPLUS_HOST', True),
|
||||
# Set some SOCIAL_SOCIAL_AUTH_OIDC_KEYAUTH_*_KEY settings
|
||||
('SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_ORG_KEY', True),
|
||||
('SOCIAL_AUTH_GITHUB_TEAM_KEY', True),
|
||||
('SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', True),
|
||||
('SOCIAL_AUTH_OIDC_KEY', True),
|
||||
# Try a hypothetical future one
|
||||
('SOCIAL_AUTH_GIBBERISH_KEY', True),
|
||||
# Do a SAML one
|
||||
('SOCIAL_AUTH_SAML_SP_PRIVATE_KEY', False),
|
||||
],
|
||||
)
|
||||
def test_is_remote_auth_enabled(self, setting, expected):
|
||||
with override_settings(**{setting: True}):
|
||||
assert is_remote_auth_enabled() == expected
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"key_one, key_one_value, key_two, key_two_value, expected",
|
||||
[
|
||||
('JUNK_SETTING', True, 'JUNK2_SETTING', True, False),
|
||||
('AUTH_LDAP_SERVER_URI', True, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', True, True),
|
||||
('JUNK_SETTING', True, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', True, True),
|
||||
('AUTH_LDAP_SERVER_URI', False, 'SOCIAL_AUTH_AZUREAD_OAUTH2_KEY', False, False),
|
||||
],
|
||||
)
|
||||
def test_is_remote_auth_enabled_multiple_keys(self, key_one, key_one_value, key_two, key_two_value, expected):
|
||||
with override_settings(**{key_one: key_one_value}):
|
||||
with override_settings(**{key_two: key_two_value}):
|
||||
assert is_remote_auth_enabled() == expected
|
||||
|
||||
@@ -22,14 +22,19 @@ import { CredentialsAPI } from 'api';
|
||||
import CredentialDetail from './CredentialDetail';
|
||||
import CredentialEdit from './CredentialEdit';
|
||||
|
||||
const jobTemplateCredentialTypes = [
|
||||
'machine',
|
||||
'cloud',
|
||||
'net',
|
||||
'ssh',
|
||||
'vault',
|
||||
'kubernetes',
|
||||
'cryptography',
|
||||
const unacceptableCredentialTypes = [
|
||||
'centrify_vault_kv',
|
||||
'aim',
|
||||
'conjur',
|
||||
'hashivault_kv',
|
||||
'hashivault_ssh',
|
||||
'azure_kv',
|
||||
'thycotic_dsv',
|
||||
'thycotic_tss',
|
||||
'galaxy_api_token',
|
||||
'insights',
|
||||
'registry',
|
||||
'scm',
|
||||
];
|
||||
|
||||
function Credential({ setBreadcrumb }) {
|
||||
@@ -86,7 +91,10 @@ function Credential({ setBreadcrumb }) {
|
||||
id: 1,
|
||||
},
|
||||
];
|
||||
if (jobTemplateCredentialTypes.includes(credential?.kind)) {
|
||||
if (
|
||||
!unacceptableCredentialTypes.includes(credential?.kind) &&
|
||||
credential !== null
|
||||
) {
|
||||
tabsArray.push({
|
||||
name: t`Job Templates`,
|
||||
link: `/credentials/${id}/job_templates`,
|
||||
@@ -115,12 +123,14 @@ function Credential({ setBreadcrumb }) {
|
||||
</PageSection>
|
||||
);
|
||||
}
|
||||
if (hasContentLoading) {
|
||||
return <ContentLoading />;
|
||||
}
|
||||
|
||||
return (
|
||||
<PageSection>
|
||||
<Card>
|
||||
{showCardHeader && <RoutedTabs tabsArray={tabsArray} />}
|
||||
{hasContentLoading && <ContentLoading />}
|
||||
{!hasContentLoading && credential && (
|
||||
<Switch>
|
||||
<Redirect
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
} from '../../../testUtils/enzymeHelpers';
|
||||
import mockMachineCredential from './shared/data.machineCredential.json';
|
||||
import mockSCMCredential from './shared/data.scmCredential.json';
|
||||
import mockCyberArkCredential from './shared/data.cyberArkCredential.json';
|
||||
import Credential from './Credential';
|
||||
|
||||
jest.mock('../../api');
|
||||
@@ -21,6 +22,11 @@ jest.mock('react-router-dom', () => ({
|
||||
|
||||
describe('<Credential />', () => {
|
||||
let wrapper;
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
wrapper.unmount();
|
||||
});
|
||||
|
||||
test('initially renders user-based machine credential successfully', async () => {
|
||||
CredentialsAPI.readDetail.mockResolvedValueOnce({
|
||||
@@ -61,6 +67,19 @@ describe('<Credential />', () => {
|
||||
});
|
||||
});
|
||||
|
||||
test('should not render job template tab', async () => {
|
||||
CredentialsAPI.readDetail.mockResolvedValueOnce({
|
||||
data: { ...mockCyberArkCredential, kind: 'registry' },
|
||||
});
|
||||
const expectedTabs = ['Back to Credentials', 'Details', 'Access'];
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(<Credential setBreadcrumb={() => {}} />);
|
||||
});
|
||||
wrapper.find('RoutedTabs li').forEach((tab, index) => {
|
||||
expect(tab.text()).toEqual(expectedTabs[index]);
|
||||
});
|
||||
});
|
||||
|
||||
test('should show content error when user attempts to navigate to erroneous route', async () => {
|
||||
const history = createMemoryHistory({
|
||||
initialEntries: ['/credentials/2/foobar'],
|
||||
@@ -85,3 +104,4 @@ describe('<Credential />', () => {
|
||||
await waitForElement(wrapper, 'ContentError', (el) => el.length === 1);
|
||||
});
|
||||
});
|
||||
describe('<Credential> should not show job template tab', () => {});
|
||||
|
||||
@@ -196,7 +196,7 @@ class LookupModule(LookupBase):
|
||||
if isinstance(rule[field_name], int):
|
||||
rule[field_name] = [rule[field_name]]
|
||||
# If its not a list, we need to split it into a list
|
||||
if isinstance(rule[field_name], list):
|
||||
if not isinstance(rule[field_name], list):
|
||||
rule[field_name] = rule[field_name].split(',')
|
||||
for value in rule[field_name]:
|
||||
# If they have a list of strs we want to strip the str incase its space delineated
|
||||
@@ -210,7 +210,8 @@ class LookupModule(LookupBase):
|
||||
|
||||
def process_list(self, field_name, rule, valid_list, rule_number):
|
||||
return_values = []
|
||||
if isinstance(rule[field_name], list):
|
||||
# If its not a list, we need to split it into a list
|
||||
if not isinstance(rule[field_name], list):
|
||||
rule[field_name] = rule[field_name].split(',')
|
||||
for value in rule[field_name]:
|
||||
value = value.strip()
|
||||
|
||||
@@ -57,7 +57,15 @@ extends_documentation_fragment: awx.awx.auth
|
||||
|
||||
|
||||
EXAMPLES = """
|
||||
- name: Launch a workflow with a timeout of 10 seconds
|
||||
- name: Create a workflow approval node
|
||||
workflow_job_template_node:
|
||||
identifier: approval_test
|
||||
approval_node:
|
||||
name: approval_jt_name
|
||||
timeout: 900
|
||||
workflow: "Test Workflow"
|
||||
|
||||
- name: Launch the workflow with a timeout of 10 seconds
|
||||
workflow_launch:
|
||||
workflow_template: "Test Workflow"
|
||||
wait: False
|
||||
@@ -66,7 +74,7 @@ EXAMPLES = """
|
||||
- name: Wait for approval node to activate and approve
|
||||
workflow_approval:
|
||||
workflow_job_id: "{{ workflow.id }}"
|
||||
name: Approve Me
|
||||
name: approval_jt_name
|
||||
interval: 10
|
||||
timeout: 20
|
||||
action: deny
|
||||
|
||||
@@ -183,7 +183,21 @@ options:
|
||||
inventory:
|
||||
description:
|
||||
- Inventory applied as a prompt, if job template prompts for inventory
|
||||
type: str
|
||||
type: dict
|
||||
suboptions:
|
||||
name:
|
||||
description:
|
||||
- Name Inventory to be applied to job as launch-time prompts.
|
||||
type: str
|
||||
organization:
|
||||
description:
|
||||
- Name of key for use in model for organizational reference
|
||||
type: dict
|
||||
suboptions:
|
||||
name:
|
||||
description:
|
||||
- The organization of the credentials exists in.
|
||||
type: str
|
||||
scm_branch:
|
||||
description:
|
||||
- SCM branch applied as a prompt, if job template prompts for SCM branch
|
||||
@@ -544,6 +558,10 @@ EXAMPLES = '''
|
||||
type: job_template
|
||||
execution_environment:
|
||||
name: My EE
|
||||
inventory:
|
||||
name: Test inventory
|
||||
organization:
|
||||
name: Default
|
||||
related:
|
||||
credentials:
|
||||
- name: cyberark
|
||||
@@ -613,10 +631,6 @@ def create_workflow_nodes(module, response, workflow_nodes, workflow_id):
|
||||
if workflow_node['unified_job_template']['type'] != 'workflow_approval':
|
||||
module.fail_json(msg="Unable to Find unified_job_template: {0}".format(search_fields))
|
||||
|
||||
inventory = workflow_node.get('inventory')
|
||||
if inventory:
|
||||
workflow_node_fields['inventory'] = module.resolve_name_to_id('inventories', inventory)
|
||||
|
||||
# Lookup Values for other fields
|
||||
|
||||
for field_name in (
|
||||
@@ -645,6 +659,17 @@ def create_workflow_nodes(module, response, workflow_nodes, workflow_id):
|
||||
'execution_environments', name_or_id=workflow_node['execution_environment']['name']
|
||||
)['id']
|
||||
|
||||
# Two lookup methods are used based on a fix added in 21.11.0, and the awx export model
|
||||
if 'inventory' in workflow_node:
|
||||
if 'name' in workflow_node['inventory']:
|
||||
inv_lookup_data = {}
|
||||
if 'organization' in workflow_node['inventory']:
|
||||
inv_lookup_data['organization'] = module.resolve_name_to_id('organizations', workflow_node['inventory']['organization']['name'])
|
||||
workflow_node_fields['inventory'] = module.get_one(
|
||||
'inventories', name_or_id=workflow_node['inventory']['name'], data=inv_lookup_data)['id']
|
||||
else:
|
||||
workflow_node_fields['inventory'] = module.get_one('inventories', name_or_id=workflow_node['inventory'])['id']
|
||||
|
||||
# Set Search fields
|
||||
search_fields['workflow_job_template'] = workflow_node_fields['workflow_job_template'] = workflow_id
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ import glob
|
||||
# Normally a read-only endpoint should not have a module (i.e. /api/v2/me) but sometimes we reuse a name
|
||||
# For example, we have a role module but /api/v2/roles is a read only endpoint.
|
||||
# This list indicates which read-only endpoints have associated modules with them.
|
||||
read_only_endpoints_with_modules = ['settings', 'role', 'project_update']
|
||||
read_only_endpoints_with_modules = ['settings', 'role', 'project_update', 'workflow_approval']
|
||||
|
||||
# If a module should not be created for an endpoint and the endpoint is not read-only add it here
|
||||
# THINK HARD ABOUT DOING THIS
|
||||
|
||||
@@ -95,6 +95,22 @@
|
||||
- results is failed
|
||||
- "'In rule 2 end_on must either be an integer or in the format YYYY-MM-DD [HH:MM:SS]' in results.msg"
|
||||
|
||||
- name: Every Mondays
|
||||
set_fact:
|
||||
complex_rule: "{{ query(ruleset_plugin_name, '2022-04-30 10:30:45', rules=rrules, timezone='UTC' ) }}"
|
||||
ignore_errors: True
|
||||
register: results
|
||||
vars:
|
||||
rrules:
|
||||
- frequency: 'day'
|
||||
interval: 1
|
||||
byweekday: 'monday'
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- results is success
|
||||
- "'DTSTART;TZID=UTC:20220430T103045 RRULE:FREQ=DAILY;BYDAY=MO;INTERVAL=1' == complex_rule"
|
||||
|
||||
|
||||
- name: call rruleset with an invalid byweekday
|
||||
set_fact:
|
||||
|
||||
@@ -0,0 +1,57 @@
|
||||
---
|
||||
- name: Generate a random string for names
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
test_prefix: AWX-Collection-tests-workflow_approval
|
||||
|
||||
- name: Generate random names for test objects
|
||||
set_fact:
|
||||
org_name: "{{ test_prefix }}-org-{{ test_id }}"
|
||||
approval_node_name: "{{ test_prefix }}-node-{{ test_id }}"
|
||||
wfjt_name: "{{ test_prefix }}-wfjt-{{ test_id }}"
|
||||
|
||||
- block:
|
||||
- name: Create a new organization for test isolation
|
||||
organization:
|
||||
name: "{{ org_name }}"
|
||||
|
||||
- name: Create a workflow job template
|
||||
workflow_job_template:
|
||||
name: "{{ wfjt_name }}"
|
||||
organization: "{{ org_name }}"
|
||||
|
||||
- name: Create approval node
|
||||
workflow_job_template_node:
|
||||
identifier: approval_test
|
||||
approval_node:
|
||||
name: "{{ approval_node_name }}" # Referenced later on
|
||||
timeout: 900
|
||||
workflow: "{{ wfjt_name }}"
|
||||
|
||||
# Launch and approve the workflow
|
||||
- name: Launch the workflow
|
||||
workflow_launch:
|
||||
workflow_template: "{{ wfjt_name }}"
|
||||
wait: False
|
||||
register: workflow_job
|
||||
|
||||
- name: Wait for approval node to activate and approve
|
||||
workflow_approval:
|
||||
workflow_job_id: "{{ workflow_job.id }}"
|
||||
name: "{{ approval_node_name }}"
|
||||
interval: 10
|
||||
timeout: 20
|
||||
action: approve
|
||||
register: result
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
- "result is not failed"
|
||||
|
||||
always:
|
||||
- name: Delete the workflow job template
|
||||
workflow_job_template:
|
||||
name: "{{ wfjt_name }}"
|
||||
state: absent
|
||||
ignore_errors: True
|
||||
@@ -493,6 +493,7 @@
|
||||
workflow_job_template:
|
||||
name: "copy_{{ wfjt_name }}"
|
||||
organization: Default
|
||||
ask_inventory_on_launch: true
|
||||
survey_spec:
|
||||
name: Basic Survey
|
||||
description: Basic Survey
|
||||
@@ -737,6 +738,10 @@
|
||||
timeout: 23
|
||||
execution_environment:
|
||||
name: "{{ ee1 }}"
|
||||
inventory:
|
||||
name: Test inventory
|
||||
organization:
|
||||
name: Default
|
||||
related:
|
||||
credentials:
|
||||
- name: "{{ scm_cred_name }}"
|
||||
|
||||
@@ -75,7 +75,8 @@ In the root of awx-operator:
|
||||
-e image_version=devel \
|
||||
-e image_pull_policy=Always \
|
||||
-e service_type=nodeport \
|
||||
-e namespace=awx
|
||||
-e namespace=awx \
|
||||
-e nodeport_port=30080
|
||||
```
|
||||
Check the operator with the following commands:
|
||||
|
||||
|
||||
@@ -199,11 +199,11 @@ ADD tools/ansible/roles/dockerfile/files/rsyslog.conf /var/lib/awx/rsyslog/rsysl
|
||||
ADD tools/ansible/roles/dockerfile/files/wait-for-migrations /usr/local/bin/wait-for-migrations
|
||||
ADD tools/ansible/roles/dockerfile/files/stop-supervisor /usr/local/bin/stop-supervisor
|
||||
|
||||
ADD tools/ansible/roles/dockerfile/files/uwsgi.ini /etc/tower/uwsgi.ini
|
||||
|
||||
## File mappings
|
||||
{% if build_dev|bool %}
|
||||
ADD tools/docker-compose/launch_awx.sh /usr/bin/launch_awx.sh
|
||||
ADD tools/docker-compose/nginx.conf /etc/nginx/nginx.conf
|
||||
ADD tools/docker-compose/nginx.vh.default.conf /etc/nginx/conf.d/nginx.vh.default.conf
|
||||
ADD tools/docker-compose/start_tests.sh /start_tests.sh
|
||||
ADD tools/docker-compose/bootstrap_development.sh /usr/bin/bootstrap_development.sh
|
||||
ADD tools/docker-compose/entrypoint.sh /entrypoint.sh
|
||||
@@ -213,7 +213,6 @@ ADD https://raw.githubusercontent.com/containers/libpod/master/contrib/podmanima
|
||||
{% else %}
|
||||
ADD tools/ansible/roles/dockerfile/files/launch_awx.sh /usr/bin/launch_awx.sh
|
||||
ADD tools/ansible/roles/dockerfile/files/launch_awx_task.sh /usr/bin/launch_awx_task.sh
|
||||
ADD tools/ansible/roles/dockerfile/files/uwsgi.ini /etc/tower/uwsgi.ini
|
||||
ADD {{ template_dest }}/supervisor.conf /etc/supervisord.conf
|
||||
ADD {{ template_dest }}/supervisor_task.conf /etc/supervisord_task.conf
|
||||
{% endif %}
|
||||
|
||||
@@ -30,8 +30,8 @@ environment =
|
||||
DEV_RELOAD_COMMAND='supervisorctl -c /etc/supervisord_task.conf restart all; supervisorctl restart tower-processes:daphne tower-processes:wsbroadcast'
|
||||
{% else %}
|
||||
command = /var/lib/awx/venv/awx/bin/uwsgi /etc/tower/uwsgi.ini
|
||||
directory = /var/lib/awx
|
||||
{% endif %}
|
||||
directory = /var/lib/awx
|
||||
autorestart = true
|
||||
startsecs = 30
|
||||
stopasgroup=true
|
||||
|
||||
@@ -33,8 +33,6 @@ Notable files:
|
||||
### Prerequisites
|
||||
|
||||
- [Docker](https://docs.docker.com/engine/installation/) on the host where AWX will be deployed. After installing Docker, the Docker service must be started (depending on your OS, you may have to add the local user that uses Docker to the `docker` group, refer to the documentation for details)
|
||||
- [docker-compose](https://pypi.org/project/docker-compose/) Python module.
|
||||
- This also installs the `docker` Python module, which is incompatible with [`docker-py`](https://pypi.org/project/docker-py/). If you have previously installed `docker-py`, please uninstall it.
|
||||
- [Docker Compose](https://docs.docker.com/compose/install/).
|
||||
- [Ansible](https://docs.ansible.com/ansible/latest/installation_guide/intro_installation.html) will need to be installed as we use it to template files needed for the docker-compose.
|
||||
- OpenSSL.
|
||||
|
||||
@@ -9,6 +9,7 @@ control_plane_node_count: 1
|
||||
minikube_container_group: false
|
||||
receptor_socket_file: /var/run/awx-receptor/receptor.sock
|
||||
receptor_image: quay.io/ansible/receptor:devel
|
||||
ingress_path: /
|
||||
|
||||
# Keys for signing work
|
||||
receptor_rsa_bits: 4096
|
||||
|
||||
@@ -49,18 +49,11 @@
|
||||
mode: '0600'
|
||||
with_items:
|
||||
- "database.py"
|
||||
- "local_settings.py"
|
||||
- "websocket_secret.py"
|
||||
- "haproxy.cfg"
|
||||
|
||||
- name: Delete old local_settings.py
|
||||
file:
|
||||
path: "{{ playbook_dir }}/../../../awx/settings/local_settings.py"
|
||||
state: absent
|
||||
|
||||
- name: Copy local_settings.py
|
||||
copy:
|
||||
src: "local_settings.py"
|
||||
dest: "{{ sources_dest }}/local_settings.py"
|
||||
- "nginx.conf"
|
||||
- "nginx.locations.conf"
|
||||
|
||||
- name: Get OS info for sdb
|
||||
shell: |
|
||||
|
||||
@@ -24,6 +24,7 @@ services:
|
||||
EXECUTION_NODE_COUNT: {{ execution_node_count|int }}
|
||||
AWX_LOGGING_MODE: stdout
|
||||
DJANGO_SUPERUSER_PASSWORD: {{ admin_password }}
|
||||
UWSGI_MOUNT_PATH: {{ ingress_path }}
|
||||
{% if loop.index == 1 %}
|
||||
RUN_MIGRATIONS: 1
|
||||
{% endif %}
|
||||
@@ -40,6 +41,8 @@ services:
|
||||
- "../../docker-compose/_sources/database.py:/etc/tower/conf.d/database.py"
|
||||
- "../../docker-compose/_sources/websocket_secret.py:/etc/tower/conf.d/websocket_secret.py"
|
||||
- "../../docker-compose/_sources/local_settings.py:/etc/tower/conf.d/local_settings.py"
|
||||
- "../../docker-compose/_sources/nginx.conf:/etc/nginx/nginx.conf"
|
||||
- "../../docker-compose/_sources/nginx.locations.conf:/etc/nginx/conf.d/nginx.locations.conf"
|
||||
- "../../docker-compose/_sources/SECRET_KEY:/etc/tower/SECRET_KEY"
|
||||
- "../../docker-compose/_sources/receptor/receptor-awx-{{ loop.index }}.conf:/etc/receptor/receptor.conf"
|
||||
- "../../docker-compose/_sources/receptor/receptor-awx-{{ loop.index }}.conf.lock:/etc/receptor/receptor.conf.lock"
|
||||
|
||||
@@ -46,3 +46,5 @@ SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
BROADCAST_WEBSOCKET_PORT = 8013
|
||||
BROADCAST_WEBSOCKET_VERIFY_CERT = False
|
||||
BROADCAST_WEBSOCKET_PROTOCOL = 'http'
|
||||
|
||||
STATIC_URL = '{{ (ingress_path + '/static/').replace('//', '/') }}'
|
||||
@@ -1,8 +1,7 @@
|
||||
#user awx;
|
||||
|
||||
worker_processes 1;
|
||||
|
||||
pid /tmp/nginx.pid;
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
@@ -17,7 +16,7 @@ http {
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
access_log /dev/stdout main;
|
||||
access_log /var/log/nginx/access.log main;
|
||||
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
@@ -25,41 +24,17 @@ http {
|
||||
}
|
||||
|
||||
sendfile on;
|
||||
#tcp_nopush on;
|
||||
#gzip on;
|
||||
|
||||
upstream uwsgi {
|
||||
server 127.0.0.1:8050;
|
||||
}
|
||||
server localhost:8050;
|
||||
}
|
||||
|
||||
upstream daphne {
|
||||
server 127.0.0.1:8051;
|
||||
server localhost:8051;
|
||||
}
|
||||
|
||||
{% if ssl_certificate is defined %}
|
||||
server {
|
||||
listen 8052 default_server;
|
||||
server_name _;
|
||||
|
||||
# Redirect all HTTP links to the matching HTTPS page
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
{%endif %}
|
||||
|
||||
server {
|
||||
{% if (ssl_certificate is defined) and (ssl_certificate_key is defined) %}
|
||||
listen 8053 ssl;
|
||||
|
||||
ssl_certificate /etc/nginx/awxweb.pem;
|
||||
ssl_certificate_key /etc/nginx/awxweb_key.pem;
|
||||
{% elif (ssl_certificate is defined) and (ssl_certificate_key is not defined) %}
|
||||
listen 8053 ssl;
|
||||
|
||||
ssl_certificate /etc/nginx/awxweb.pem;
|
||||
ssl_certificate_key /etc/nginx/awxweb.pem;
|
||||
{% else %}
|
||||
listen 8052 default_server;
|
||||
{% endif %}
|
||||
listen 8013 default_server;
|
||||
|
||||
# If you have a domain name, this is where to add it
|
||||
server_name _;
|
||||
@@ -67,56 +42,35 @@ http {
|
||||
|
||||
# HSTS (ngx_http_headers_module is required) (15768000 seconds = 6 months)
|
||||
add_header Strict-Transport-Security max-age=15768000;
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
|
||||
# Protect against click-jacking https://www.owasp.org/index.php/Testing_for_Clickjacking_(OTG-CLIENT-009)
|
||||
add_header X-Frame-Options "DENY";
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
}
|
||||
|
||||
location /nginx_status {
|
||||
stub_status on;
|
||||
access_log off;
|
||||
allow 127.0.0.1;
|
||||
deny all;
|
||||
}
|
||||
server {
|
||||
listen 8043 default_server ssl;
|
||||
|
||||
location /static/ {
|
||||
alias /var/lib/awx/public/static/;
|
||||
}
|
||||
# If you have a domain name, this is where to add it
|
||||
server_name _;
|
||||
keepalive_timeout 65;
|
||||
|
||||
location /favicon.ico { alias /var/lib/awx/public/static/favicon.ico; }
|
||||
ssl_certificate /etc/nginx/nginx.crt;
|
||||
ssl_certificate_key /etc/nginx/nginx.key;
|
||||
|
||||
location /websocket {
|
||||
# Pass request to the upstream alias
|
||||
proxy_pass http://daphne;
|
||||
# Require http version 1.1 to allow for upgrade requests
|
||||
proxy_http_version 1.1;
|
||||
# We want proxy_buffering off for proxying to websockets.
|
||||
proxy_buffering off;
|
||||
# http://en.wikipedia.org/wiki/X-Forwarded-For
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# enable this if you use HTTPS:
|
||||
proxy_set_header X-Forwarded-Proto https;
|
||||
# pass the Host: header from the client for the sake of redirects
|
||||
proxy_set_header Host $http_host;
|
||||
# We've set the Host header, so we don't need Nginx to muddle
|
||||
# about with redirects
|
||||
proxy_redirect off;
|
||||
# Depending on the request value, set the Upgrade and
|
||||
# connection headers
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
}
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:SSL:50m;
|
||||
ssl_session_tickets off;
|
||||
|
||||
location / {
|
||||
# Add trailing / if missing
|
||||
rewrite ^(.*)$http_host(.*[^/])$ $1$http_host$2/ permanent;
|
||||
uwsgi_read_timeout 120s;
|
||||
uwsgi_pass uwsgi;
|
||||
include /etc/nginx/uwsgi_params;
|
||||
{%- if extra_nginx_include is defined %}
|
||||
include {{ extra_nginx_include }};
|
||||
{%- endif %}
|
||||
proxy_set_header X-Forwarded-Port 443;
|
||||
uwsgi_param HTTP_X_FORWARDED_PORT 443;
|
||||
}
|
||||
# intermediate configuration. tweak to your needs.
|
||||
ssl_protocols TLSv1.2;
|
||||
ssl_ciphers 'ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256';
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
||||
# HSTS (ngx_http_headers_module is required) (15768000 seconds = 6 months)
|
||||
add_header Strict-Transport-Security max-age=15768000;
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
|
||||
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
location {{ (ingress_path + '/static').replace('//', '/') }} {
|
||||
alias /var/lib/awx/public/static/;
|
||||
}
|
||||
|
||||
location {{ (ingress_path + '/favicon.ico').replace('//', '/') }} {
|
||||
alias /awx_devel/awx/public/static/favicon.ico;
|
||||
}
|
||||
|
||||
location {{ (ingress_path + '/websocket').replace('//', '/') }} {
|
||||
# Pass request to the upstream alias
|
||||
proxy_pass http://daphne;
|
||||
# Require http version 1.1 to allow for upgrade requests
|
||||
proxy_http_version 1.1;
|
||||
# We want proxy_buffering off for proxying to websockets.
|
||||
proxy_buffering off;
|
||||
# http://en.wikipedia.org/wiki/X-Forwarded-For
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# enable this if you use HTTPS:
|
||||
proxy_set_header X-Forwarded-Proto https;
|
||||
# pass the Host: header from the client for the sake of redirects
|
||||
proxy_set_header Host $http_host;
|
||||
# We've set the Host header, so we don't need Nginx to muddle
|
||||
# about with redirects
|
||||
proxy_redirect off;
|
||||
# Depending on the request value, set the Upgrade and
|
||||
# connection headers
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
}
|
||||
|
||||
location {{ ingress_path }} {
|
||||
# Add trailing / if missing
|
||||
rewrite ^(.*[^/])$ $1/ permanent;
|
||||
uwsgi_read_timeout 120s;
|
||||
uwsgi_pass uwsgi;
|
||||
include /etc/nginx/uwsgi_params;
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
worker_processes 1;
|
||||
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
server_tokens off;
|
||||
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
access_log /var/log/nginx/access.log main;
|
||||
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
sendfile on;
|
||||
#tcp_nopush on;
|
||||
#gzip on;
|
||||
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
}
|
||||
@@ -1,124 +0,0 @@
|
||||
upstream uwsgi {
|
||||
server localhost:8050;
|
||||
}
|
||||
|
||||
upstream daphne {
|
||||
server localhost:8051;
|
||||
}
|
||||
|
||||
# server {
|
||||
# listen 8013 default_server;
|
||||
# listen [::]:8013 default_server;
|
||||
# server_name _;
|
||||
# return 301 https://$host:8043$request_uri;
|
||||
# }
|
||||
|
||||
server {
|
||||
listen 8013 default_server;
|
||||
|
||||
# If you have a domain name, this is where to add it
|
||||
server_name _;
|
||||
keepalive_timeout 65;
|
||||
|
||||
# HSTS (ngx_http_headers_module is required) (15768000 seconds = 6 months)
|
||||
add_header Strict-Transport-Security max-age=15768000;
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
|
||||
location /static/ {
|
||||
alias /var/lib/awx/public/static/;
|
||||
}
|
||||
|
||||
location /favicon.ico { alias /awx_devel/awx/public/static/favicon.ico; }
|
||||
|
||||
location ~ ^/websocket {
|
||||
# Pass request to the upstream alias
|
||||
proxy_pass http://daphne;
|
||||
# Require http version 1.1 to allow for upgrade requests
|
||||
proxy_http_version 1.1;
|
||||
# We want proxy_buffering off for proxying to websockets.
|
||||
proxy_buffering off;
|
||||
# http://en.wikipedia.org/wiki/X-Forwarded-For
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# enable this if you use HTTPS:
|
||||
proxy_set_header X-Forwarded-Proto https;
|
||||
# pass the Host: header from the client for the sake of redirects
|
||||
proxy_set_header Host $http_host;
|
||||
# We've set the Host header, so we don't need Nginx to muddle
|
||||
# about with redirects
|
||||
proxy_redirect off;
|
||||
# Depending on the request value, set the Upgrade and
|
||||
# connection headers
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
}
|
||||
|
||||
location / {
|
||||
# Add trailing / if missing
|
||||
rewrite ^(.*[^/])$ $1/ permanent;
|
||||
uwsgi_read_timeout 120s;
|
||||
uwsgi_pass uwsgi;
|
||||
include /etc/nginx/uwsgi_params;
|
||||
}
|
||||
}
|
||||
|
||||
server {
|
||||
listen 8043 default_server ssl;
|
||||
|
||||
# If you have a domain name, this is where to add it
|
||||
server_name _;
|
||||
keepalive_timeout 65;
|
||||
|
||||
ssl_certificate /etc/nginx/nginx.crt;
|
||||
ssl_certificate_key /etc/nginx/nginx.key;
|
||||
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:SSL:50m;
|
||||
ssl_session_tickets off;
|
||||
|
||||
# intermediate configuration. tweak to your needs.
|
||||
ssl_protocols TLSv1.2;
|
||||
ssl_ciphers 'ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256';
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
||||
# HSTS (ngx_http_headers_module is required) (15768000 seconds = 6 months)
|
||||
add_header Strict-Transport-Security max-age=15768000;
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
|
||||
location /static/ {
|
||||
alias /var/lib/awx/public/static/;
|
||||
access_log off;
|
||||
sendfile off;
|
||||
}
|
||||
|
||||
location /favicon.ico { alias /awx_devel/awx/public/static/favicon.ico; }
|
||||
|
||||
location ~ ^/websocket {
|
||||
# Pass request to the upstream alias
|
||||
proxy_pass http://daphne;
|
||||
# Require http version 1.1 to allow for upgrade requests
|
||||
proxy_http_version 1.1;
|
||||
# We want proxy_buffering off for proxying to websockets.
|
||||
proxy_buffering off;
|
||||
# http://en.wikipedia.org/wiki/X-Forwarded-For
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# enable this if you use HTTPS:
|
||||
proxy_set_header X-Forwarded-Proto https;
|
||||
# pass the Host: header from the client for the sake of redirects
|
||||
proxy_set_header Host $http_host;
|
||||
# We've set the Host header, so we don't need Nginx to muddle
|
||||
# about with redirects
|
||||
proxy_redirect off;
|
||||
# Depending on the request value, set the Upgrade and
|
||||
# connection headers
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
}
|
||||
|
||||
location / {
|
||||
# Add trailing / if missing
|
||||
rewrite ^(.*[^/])$ $1/ permanent;
|
||||
uwsgi_read_timeout 120s;
|
||||
uwsgi_pass uwsgi;
|
||||
include /etc/nginx/uwsgi_params;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user