mirror of
https://github.com/ansible/awx.git
synced 2026-02-11 06:34:42 -03:30
Compare commits
1 Commits
24.2.0
...
dmzoneill-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5ca017d359 |
8
.github/PULL_REQUEST_TEMPLATE.md
vendored
8
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -7,6 +7,14 @@ commit message and your description; but you should still explain what
|
||||
the change does.
|
||||
-->
|
||||
|
||||
##### Depends on
|
||||
<!---
|
||||
Please provide links to any other PR dependanices.
|
||||
Indicating these should be merged first prior to this PR.
|
||||
-->
|
||||
- #12345
|
||||
- https://github.com/xxx/yyy/pulls/1234
|
||||
|
||||
##### ISSUE TYPE
|
||||
<!--- Pick one below and delete the rest: -->
|
||||
- Breaking Change
|
||||
|
||||
2
.github/actions/run_awx_devel/action.yml
vendored
2
.github/actions/run_awx_devel/action.yml
vendored
@@ -71,7 +71,7 @@ runs:
|
||||
id: data
|
||||
shell: bash
|
||||
run: |
|
||||
AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks.awx.IPAddress}}' tools_awx_1)
|
||||
AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks._sources_awx.IPAddress}}' tools_awx_1)
|
||||
ADMIN_TOKEN=$(docker exec -i tools_awx_1 awx-manage create_oauth2_token --user admin)
|
||||
echo "ip=$AWX_IP" >> $GITHUB_OUTPUT
|
||||
echo "admin_token=$ADMIN_TOKEN" >> $GITHUB_OUTPUT
|
||||
|
||||
12
.github/workflows/ci.yml
vendored
12
.github/workflows/ci.yml
vendored
@@ -94,11 +94,11 @@ jobs:
|
||||
- name: Build AWX image
|
||||
working-directory: awx
|
||||
run: |
|
||||
VERSION=`make version-for-buildyml` make awx-kube-build
|
||||
env:
|
||||
COMPOSE_TAG: ci
|
||||
DEV_DOCKER_TAG_BASE: local
|
||||
HEADLESS: yes
|
||||
ansible-playbook -v tools/ansible/build.yml \
|
||||
-e headless=yes \
|
||||
-e awx_image=awx \
|
||||
-e awx_image_tag=ci \
|
||||
-e ansible_python_interpreter=$(which python3)
|
||||
|
||||
- name: Run test deployment with awx-operator
|
||||
working-directory: awx-operator
|
||||
@@ -109,7 +109,7 @@ jobs:
|
||||
make kustomize
|
||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule -v test -s kind -- --skip-tags=replicas
|
||||
env:
|
||||
AWX_TEST_IMAGE: local/awx
|
||||
AWX_TEST_IMAGE: awx
|
||||
AWX_TEST_VERSION: ci
|
||||
|
||||
collection-sanity:
|
||||
|
||||
96
.github/workflows/stage.yml
vendored
96
.github/workflows/stage.yml
vendored
@@ -49,20 +49,7 @@ jobs:
|
||||
with:
|
||||
path: awx
|
||||
|
||||
- name: Checkout awx-operator
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ${{ github.repository_owner }}/awx-operator
|
||||
path: awx-operator
|
||||
|
||||
- name: Checkout awx-logos
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ansible/awx-logos
|
||||
path: awx-logos
|
||||
|
||||
- name: Get python version from Makefile
|
||||
working-directory: awx
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
@@ -70,76 +57,63 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.py_version }}
|
||||
|
||||
- name: Checkout awx-logos
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ansible/awx-logos
|
||||
path: awx-logos
|
||||
|
||||
- name: Checkout awx-operator
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: ${{ github.repository_owner }}/awx-operator
|
||||
path: awx-operator
|
||||
|
||||
- name: Install playbook dependencies
|
||||
run: |
|
||||
python3 -m pip install docker
|
||||
|
||||
- name: Build and stage AWX
|
||||
working-directory: awx
|
||||
run: |
|
||||
ansible-playbook -v tools/ansible/build.yml \
|
||||
-e registry=ghcr.io \
|
||||
-e registry_username=${{ github.actor }} \
|
||||
-e registry_password=${{ secrets.GITHUB_TOKEN }} \
|
||||
-e awx_image=${{ github.repository }} \
|
||||
-e awx_version=${{ github.event.inputs.version }} \
|
||||
-e ansible_python_interpreter=$(which python3) \
|
||||
-e push=yes \
|
||||
-e awx_official=yes
|
||||
|
||||
- name: Log into registry ghcr.io
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Copy logos for inclusion in sdist for official build
|
||||
working-directory: awx
|
||||
run: |
|
||||
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
|
||||
|
||||
- name: Setup node and npm
|
||||
uses: actions/setup-node@v2
|
||||
- name: Log into registry quay.io
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0
|
||||
with:
|
||||
node-version: '16.13.1'
|
||||
|
||||
- name: Prebuild UI for awx image (to speed up build process)
|
||||
working-directory: awx
|
||||
run: |
|
||||
sudo apt-get install gettext
|
||||
make ui-release
|
||||
make ui-next
|
||||
|
||||
- name: Set build env variables
|
||||
run: |
|
||||
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
|
||||
echo "COMPOSE_TAG=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||
echo "VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||
echo "AWX_TEST_VERSION=${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||
echo "AWX_TEST_IMAGE=ghcr.io/${OWNER,,}/awx" >> $GITHUB_ENV
|
||||
echo "AWX_EE_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-ee:${{ github.event.inputs.version }}" >> $GITHUB_ENV
|
||||
echo "AWX_OPERATOR_TEST_IMAGE=ghcr.io/${OWNER,,}/awx-operator:${{ github.event.inputs.operator_version }}" >> $GITHUB_ENV
|
||||
env:
|
||||
OWNER: ${{ github.repository_owner }}
|
||||
|
||||
- name: Build and stage AWX
|
||||
working-directory: awx
|
||||
env:
|
||||
DOCKER_BUILDX_PUSH: true
|
||||
HEADLESS: false
|
||||
PLATFORMS: linux/amd64,linux/arm64
|
||||
run: |
|
||||
make awx-kube-buildx
|
||||
registry: quay.io
|
||||
username: ${{ secrets.QUAY_USER }}
|
||||
password: ${{ secrets.QUAY_TOKEN }}
|
||||
|
||||
- name: tag awx-ee:latest with version input
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
quay.io/ansible/awx-ee:latest \
|
||||
--tag ${AWX_EE_TEST_IMAGE}
|
||||
--tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
|
||||
- name: Stage awx-operator image
|
||||
working-directory: awx-operator
|
||||
run: |
|
||||
BUILD_ARGS="--build-arg DEFAULT_AWX_VERSION=${{ github.event.inputs.version}} \
|
||||
--build-arg OPERATOR_VERSION=${{ github.event.inputs.operator_version }}" \
|
||||
IMG=${AWX_OPERATOR_TEST_IMAGE} \
|
||||
IMG=ghcr.io/${{ github.repository_owner }}/awx-operator:${{ github.event.inputs.operator_version }} \
|
||||
make docker-buildx
|
||||
|
||||
- name: Pulling images for test deployment with awx-operator
|
||||
# awx operator molecue test expect to kind load image and buildx exports image to registry and not local
|
||||
run: |
|
||||
docker pull ${AWX_OPERATOR_TEST_IMAGE}
|
||||
docker pull ${AWX_EE_TEST_IMAGE}
|
||||
docker pull ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
|
||||
|
||||
- name: Run test deployment with awx-operator
|
||||
working-directory: awx-operator
|
||||
run: |
|
||||
@@ -148,6 +122,10 @@ jobs:
|
||||
sudo rm -f $(which kustomize)
|
||||
make kustomize
|
||||
KUSTOMIZE_PATH=$(readlink -f bin/kustomize) molecule test -s kind
|
||||
env:
|
||||
AWX_TEST_IMAGE: ${{ github.repository }}
|
||||
AWX_TEST_VERSION: ${{ github.event.inputs.version }}
|
||||
AWX_EE_TEST_IMAGE: ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
|
||||
|
||||
- name: Create draft release for AWX
|
||||
working-directory: awx
|
||||
|
||||
2
Makefile
2
Makefile
@@ -2,7 +2,7 @@
|
||||
|
||||
PYTHON := $(notdir $(shell for i in python3.11 python3; do command -v $$i; done|sed 1q))
|
||||
SHELL := bash
|
||||
DOCKER_COMPOSE ?= docker compose
|
||||
DOCKER_COMPOSE ?= docker-compose
|
||||
OFFICIAL ?= no
|
||||
NODE ?= node
|
||||
NPM_BIN ?= npm
|
||||
|
||||
@@ -2,21 +2,32 @@
|
||||
# All Rights Reserved.
|
||||
|
||||
from django.conf import settings
|
||||
from django.urls import NoReverseMatch
|
||||
|
||||
from rest_framework.reverse import reverse as drf_reverse
|
||||
from rest_framework.reverse import _reverse
|
||||
from rest_framework.versioning import URLPathVersioning as BaseVersioning
|
||||
|
||||
|
||||
def is_optional_api_urlpattern_prefix_request(request):
|
||||
def drf_reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra):
|
||||
"""
|
||||
Copy and monkey-patch `rest_framework.reverse.reverse` to prevent adding unwarranted
|
||||
query string parameters.
|
||||
"""
|
||||
scheme = getattr(request, 'versioning_scheme', None)
|
||||
if scheme is not None:
|
||||
try:
|
||||
url = scheme.reverse(viewname, args, kwargs, request, format, **extra)
|
||||
except NoReverseMatch:
|
||||
# In case the versioning scheme reversal fails, fallback to the
|
||||
# default implementation
|
||||
url = _reverse(viewname, args, kwargs, request, format, **extra)
|
||||
else:
|
||||
url = _reverse(viewname, args, kwargs, request, format, **extra)
|
||||
|
||||
if settings.OPTIONAL_API_URLPATTERN_PREFIX and request:
|
||||
if request.path.startswith(f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}"):
|
||||
return True
|
||||
return False
|
||||
url = url.replace('/api', f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}")
|
||||
|
||||
|
||||
def transform_optional_api_urlpattern_prefix_url(request, url):
|
||||
if is_optional_api_urlpattern_prefix_request(request):
|
||||
url = url.replace('/api', f"/api/{settings.OPTIONAL_API_URLPATTERN_PREFIX}")
|
||||
return url
|
||||
|
||||
|
||||
@@ -29,9 +40,7 @@ def reverse(viewname, args=None, kwargs=None, request=None, format=None, **extra
|
||||
kwargs = {}
|
||||
if 'version' not in kwargs:
|
||||
kwargs['version'] = settings.REST_FRAMEWORK['DEFAULT_VERSION']
|
||||
url = drf_reverse(viewname, args, kwargs, request, format, **extra)
|
||||
|
||||
return transform_optional_api_urlpattern_prefix_url(request, url)
|
||||
return drf_reverse(viewname, args, kwargs, request, format, **extra)
|
||||
|
||||
|
||||
class URLPathVersioning(BaseVersioning):
|
||||
|
||||
@@ -48,23 +48,23 @@ class AnalyticsRootView(APIView):
|
||||
|
||||
def get(self, request, format=None):
|
||||
data = OrderedDict()
|
||||
data['authorized'] = reverse('api:analytics_authorized', request=request)
|
||||
data['reports'] = reverse('api:analytics_reports_list', request=request)
|
||||
data['report_options'] = reverse('api:analytics_report_options_list', request=request)
|
||||
data['adoption_rate'] = reverse('api:analytics_adoption_rate', request=request)
|
||||
data['adoption_rate_options'] = reverse('api:analytics_adoption_rate_options', request=request)
|
||||
data['event_explorer'] = reverse('api:analytics_event_explorer', request=request)
|
||||
data['event_explorer_options'] = reverse('api:analytics_event_explorer_options', request=request)
|
||||
data['host_explorer'] = reverse('api:analytics_host_explorer', request=request)
|
||||
data['host_explorer_options'] = reverse('api:analytics_host_explorer_options', request=request)
|
||||
data['job_explorer'] = reverse('api:analytics_job_explorer', request=request)
|
||||
data['job_explorer_options'] = reverse('api:analytics_job_explorer_options', request=request)
|
||||
data['probe_templates'] = reverse('api:analytics_probe_templates_explorer', request=request)
|
||||
data['probe_templates_options'] = reverse('api:analytics_probe_templates_options', request=request)
|
||||
data['probe_template_for_hosts'] = reverse('api:analytics_probe_template_for_hosts_explorer', request=request)
|
||||
data['probe_template_for_hosts_options'] = reverse('api:analytics_probe_template_for_hosts_options', request=request)
|
||||
data['roi_templates'] = reverse('api:analytics_roi_templates_explorer', request=request)
|
||||
data['roi_templates_options'] = reverse('api:analytics_roi_templates_options', request=request)
|
||||
data['authorized'] = reverse('api:analytics_authorized')
|
||||
data['reports'] = reverse('api:analytics_reports_list')
|
||||
data['report_options'] = reverse('api:analytics_report_options_list')
|
||||
data['adoption_rate'] = reverse('api:analytics_adoption_rate')
|
||||
data['adoption_rate_options'] = reverse('api:analytics_adoption_rate_options')
|
||||
data['event_explorer'] = reverse('api:analytics_event_explorer')
|
||||
data['event_explorer_options'] = reverse('api:analytics_event_explorer_options')
|
||||
data['host_explorer'] = reverse('api:analytics_host_explorer')
|
||||
data['host_explorer_options'] = reverse('api:analytics_host_explorer_options')
|
||||
data['job_explorer'] = reverse('api:analytics_job_explorer')
|
||||
data['job_explorer_options'] = reverse('api:analytics_job_explorer_options')
|
||||
data['probe_templates'] = reverse('api:analytics_probe_templates_explorer')
|
||||
data['probe_templates_options'] = reverse('api:analytics_probe_templates_options')
|
||||
data['probe_template_for_hosts'] = reverse('api:analytics_probe_template_for_hosts_explorer')
|
||||
data['probe_template_for_hosts_options'] = reverse('api:analytics_probe_template_for_hosts_options')
|
||||
data['roi_templates'] = reverse('api:analytics_roi_templates_explorer')
|
||||
data['roi_templates_options'] = reverse('api:analytics_roi_templates_options')
|
||||
return Response(data)
|
||||
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ from awx.main.analytics import all_collectors
|
||||
from awx.main.ha import is_ha_environment
|
||||
from awx.main.utils import get_awx_version, get_custom_venv_choices
|
||||
from awx.main.utils.licensing import validate_entitlement_manifest
|
||||
from awx.api.versioning import URLPathVersioning, is_optional_api_urlpattern_prefix_request, reverse, drf_reverse
|
||||
from awx.api.versioning import reverse, drf_reverse
|
||||
from awx.main.constants import PRIVILEGE_ESCALATION_METHODS
|
||||
from awx.main.models import Project, Organization, Instance, InstanceGroup, JobTemplate
|
||||
from awx.main.utils import set_environ
|
||||
@@ -40,19 +40,19 @@ logger = logging.getLogger('awx.api.views.root')
|
||||
class ApiRootView(APIView):
|
||||
permission_classes = (AllowAny,)
|
||||
name = _('REST API')
|
||||
versioning_class = URLPathVersioning
|
||||
versioning_class = None
|
||||
swagger_topic = 'Versioning'
|
||||
|
||||
@method_decorator(ensure_csrf_cookie)
|
||||
def get(self, request, format=None):
|
||||
'''List supported API versions'''
|
||||
v2 = reverse('api:api_v2_root_view', request=request, kwargs={'version': 'v2'})
|
||||
|
||||
v2 = reverse('api:api_v2_root_view', kwargs={'version': 'v2'})
|
||||
data = OrderedDict()
|
||||
data['description'] = _('AWX REST API')
|
||||
data['current_version'] = v2
|
||||
data['available_versions'] = dict(v2=v2)
|
||||
if not is_optional_api_urlpattern_prefix_request(request):
|
||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||
data['oauth2'] = drf_reverse('api:oauth_authorization_root_view')
|
||||
data['custom_logo'] = settings.CUSTOM_LOGO
|
||||
data['custom_login_info'] = settings.CUSTOM_LOGIN_INFO
|
||||
data['login_redirect_override'] = settings.LOGIN_REDIRECT_OVERRIDE
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import os
|
||||
import psycopg
|
||||
import select
|
||||
from copy import deepcopy
|
||||
|
||||
from contextlib import contextmanager
|
||||
|
||||
@@ -95,8 +94,8 @@ class PubSub(object):
|
||||
|
||||
|
||||
def create_listener_connection():
|
||||
conf = deepcopy(settings.DATABASES['default'])
|
||||
conf['OPTIONS'] = deepcopy(conf.get('OPTIONS', {}))
|
||||
conf = settings.DATABASES['default'].copy()
|
||||
conf['OPTIONS'] = conf.get('OPTIONS', {}).copy()
|
||||
# Modify the application name to distinguish from other connections the process might use
|
||||
conf['OPTIONS']['application_name'] = get_application_name(settings.CLUSTER_HOST_ID, function='listener')
|
||||
|
||||
|
||||
@@ -1599,8 +1599,7 @@ class UnifiedJob(
|
||||
extra["controller_node"] = self.controller_node or "NOT_SET"
|
||||
elif state == "execution_node_chosen":
|
||||
extra["execution_node"] = self.execution_node or "NOT_SET"
|
||||
|
||||
logger_job_lifecycle.info(f"{msg} {json.dumps(extra)}")
|
||||
logger_job_lifecycle.info(msg, extra=extra)
|
||||
|
||||
@property
|
||||
def launched_by(self):
|
||||
|
||||
@@ -6,7 +6,7 @@ from django.test import Client
|
||||
from rest_framework.test import APIRequestFactory
|
||||
|
||||
from awx.api.generics import LoggedLoginView
|
||||
from rest_framework.reverse import reverse as drf_reverse
|
||||
from awx.api.versioning import drf_reverse
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -8,10 +8,8 @@ from django.db import connection
|
||||
from django.test.utils import override_settings
|
||||
from django.utils.encoding import smart_str, smart_bytes
|
||||
|
||||
from rest_framework.reverse import reverse as drf_reverse
|
||||
|
||||
from awx.main.utils.encryption import decrypt_value, get_encryption_key
|
||||
from awx.api.versioning import reverse
|
||||
from awx.api.versioning import reverse, drf_reverse
|
||||
from awx.main.models.oauth import OAuth2Application as Application, OAuth2AccessToken as AccessToken
|
||||
from awx.main.tests.functional import immediate_on_commit
|
||||
from awx.sso.models import UserEnterpriseAuth
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
|
||||
from copy import copy
|
||||
import json
|
||||
import json_log_formatter
|
||||
import logging
|
||||
import traceback
|
||||
import socket
|
||||
@@ -14,6 +15,15 @@ from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class JobLifeCycleFormatter(json_log_formatter.JSONFormatter):
|
||||
def json_record(self, message: str, extra: dict, record: logging.LogRecord):
|
||||
if 'time' not in extra:
|
||||
extra['time'] = now()
|
||||
if record.exc_info:
|
||||
extra['exc_info'] = self.formatException(record.exc_info)
|
||||
return extra
|
||||
|
||||
|
||||
class TimeFormatter(logging.Formatter):
|
||||
"""
|
||||
Custom log formatter used for inventory imports
|
||||
|
||||
@@ -2,7 +2,6 @@ import json
|
||||
import logging
|
||||
import asyncio
|
||||
from typing import Dict
|
||||
from copy import deepcopy
|
||||
|
||||
import ipaddress
|
||||
|
||||
@@ -303,17 +302,14 @@ class WebSocketRelayManager(object):
|
||||
self.stats_mgr.start()
|
||||
|
||||
# Set up a pg_notify consumer for allowing web nodes to "provision" and "deprovision" themselves gracefully.
|
||||
database_conf = deepcopy(settings.DATABASES['default'])
|
||||
database_conf['OPTIONS'] = deepcopy(database_conf.get('OPTIONS', {}))
|
||||
database_conf = settings.DATABASES['default'].copy()
|
||||
database_conf['OPTIONS'] = database_conf.get('OPTIONS', {}).copy()
|
||||
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).items():
|
||||
database_conf[k] = v
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items():
|
||||
database_conf['OPTIONS'][k] = v
|
||||
|
||||
if 'PASSWORD' in database_conf:
|
||||
database_conf['OPTIONS']['password'] = database_conf.pop('PASSWORD')
|
||||
|
||||
task = None
|
||||
|
||||
# Establishes a websocket connection to /websocket/relay on all API servers
|
||||
@@ -324,6 +320,7 @@ class WebSocketRelayManager(object):
|
||||
dbname=database_conf['NAME'],
|
||||
host=database_conf['HOST'],
|
||||
user=database_conf['USER'],
|
||||
password=database_conf['PASSWORD'],
|
||||
port=database_conf['PORT'],
|
||||
**database_conf.get("OPTIONS", {}),
|
||||
)
|
||||
|
||||
@@ -849,6 +849,7 @@ LOGGING = {
|
||||
'json': {'()': 'awx.main.utils.formatters.LogstashFormatter'},
|
||||
'timed_import': {'()': 'awx.main.utils.formatters.TimeFormatter', 'format': '%(relativeSeconds)9.3f %(levelname)-8s %(message)s'},
|
||||
'dispatcher': {'format': '%(asctime)s %(levelname)-8s [%(guid)s] %(name)s PID:%(process)d %(message)s'},
|
||||
'job_lifecycle': {'()': 'awx.main.utils.formatters.JobLifeCycleFormatter'},
|
||||
},
|
||||
# Extended below based on install scenario. You probably don't want to add something directly here.
|
||||
# See 'handler_config' below.
|
||||
@@ -916,7 +917,7 @@ handler_config = {
|
||||
'wsrelay': {'filename': 'wsrelay.log'},
|
||||
'task_system': {'filename': 'task_system.log'},
|
||||
'rbac_migrations': {'filename': 'tower_rbac_migrations.log'},
|
||||
'job_lifecycle': {'filename': 'job_lifecycle.log'},
|
||||
'job_lifecycle': {'filename': 'job_lifecycle.log', 'formatter': 'job_lifecycle'},
|
||||
'rsyslog_configurer': {'filename': 'rsyslog_configurer.log'},
|
||||
'cache_clear': {'filename': 'cache_clear.log'},
|
||||
'ws_heartbeat': {'filename': 'ws_heartbeat.log'},
|
||||
|
||||
@@ -39,16 +39,6 @@ options:
|
||||
description:
|
||||
- Limit to use for the I(job_template).
|
||||
type: str
|
||||
tags:
|
||||
description:
|
||||
- Specific tags to apply from the I(job_template).
|
||||
type: list
|
||||
elements: str
|
||||
skip_tags:
|
||||
description:
|
||||
- Specific tags to skip from the I(job_template).
|
||||
type: list
|
||||
elements: str
|
||||
scm_branch:
|
||||
description:
|
||||
- A specific branch of the SCM project to run the template on.
|
||||
@@ -110,8 +100,6 @@ def main():
|
||||
organization=dict(),
|
||||
inventory=dict(),
|
||||
limit=dict(),
|
||||
tags=dict(type='list', elements='str'),
|
||||
skip_tags=dict(type='list', elements='str'),
|
||||
scm_branch=dict(),
|
||||
extra_vars=dict(type='dict'),
|
||||
wait=dict(required=False, default=True, type='bool'),
|
||||
@@ -140,14 +128,6 @@ def main():
|
||||
if field_val is not None:
|
||||
optional_args[field_name] = field_val
|
||||
|
||||
# Special treatment of tags parameters
|
||||
job_tags = module.params.get('tags')
|
||||
if job_tags is not None:
|
||||
optional_args['job_tags'] = ",".join(job_tags)
|
||||
skip_tags = module.params.get('skip_tags')
|
||||
if skip_tags is not None:
|
||||
optional_args['skip_tags'] = ",".join(skip_tags)
|
||||
|
||||
# Create a datastructure to pass into our job launch
|
||||
post_data = {}
|
||||
for arg_name, arg_value in optional_args.items():
|
||||
@@ -172,8 +152,6 @@ def main():
|
||||
check_vars_to_prompts = {
|
||||
'inventory': 'ask_inventory_on_launch',
|
||||
'limit': 'ask_limit_on_launch',
|
||||
'job_tags': 'ask_tags_on_launch',
|
||||
'skip_tags': 'ask_skip_tags_on_launch',
|
||||
'scm_branch': 'ask_scm_branch_on_launch',
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ __metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
from awx.main.models import WorkflowJobTemplate, WorkflowJob, NotificationTemplate
|
||||
from awx.main.models import WorkflowJobTemplate, NotificationTemplate
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -135,37 +135,6 @@ def test_associate_only_on_success(run_module, admin_user, organization, project
|
||||
assert list(wfjt.notification_templates_error.values_list('id', flat=True)) == [nt1.id]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_workflow_launch_with_prompting(run_module, admin_user, organization, inventory):
|
||||
WorkflowJobTemplate.objects.create(
|
||||
name='foo-workflow-launch-test',
|
||||
organization=organization,
|
||||
ask_variables_on_launch=True,
|
||||
ask_inventory_on_launch=True,
|
||||
ask_tags_on_launch=True,
|
||||
ask_skip_tags_on_launch=True,
|
||||
)
|
||||
result = run_module(
|
||||
'workflow_launch',
|
||||
dict(
|
||||
name='foo-workflow-launch-test',
|
||||
inventory=inventory.name,
|
||||
wait=False,
|
||||
extra_vars={"var1": "My First Variable", "var2": "My Second Variable", "var3": "My Third Variable"},
|
||||
tags=["my_tag"],
|
||||
skip_tags=["your_tag", "their_tag"],
|
||||
),
|
||||
admin_user,
|
||||
)
|
||||
assert result.get('changed', True), result
|
||||
|
||||
job = WorkflowJob.objects.get(id=result['id'])
|
||||
assert job.extra_vars == '{"var1": "My First Variable", "var2": "My Second Variable", "var3": "My Third Variable"}'
|
||||
assert job.inventory == inventory
|
||||
assert job.job_tags == "my_tag"
|
||||
assert job.skip_tags == "your_tag,their_tag"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_delete_with_spec(run_module, admin_user, organization, survey_spec):
|
||||
WorkflowJobTemplate.objects.create(organization=organization, name='foo-workflow', survey_enabled=True, survey_spec=survey_spec)
|
||||
|
||||
@@ -2,10 +2,12 @@
|
||||
|
||||
## Build & Push Image
|
||||
|
||||
To build a custom awx image to use with the awx-operator:
|
||||
To build a custom awx image to use with the awx-operator, use the `build_image` role:
|
||||
|
||||
```
|
||||
make awx-kube-build
|
||||
$ ansible-playbook tools/ansible/build.yml \
|
||||
-e awx_image=registry.example.com/ansible/awx \
|
||||
-e awx_image_tag=test -v
|
||||
```
|
||||
|
||||
> Note: The development image (`make docker-compose-build`) will not work with the awx-operator, the UI is not built in that image, among other things (see Dockerfile.j2 for more info).
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 75 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 210 KiB |
@@ -646,39 +646,6 @@ Source Control credentials have several attributes that may be configured:
|
||||
If you are using a GitHub account for a Source Control credential and you have 2FA (Two Factor Authentication) enabled on your account, you will need to use your Personal Access Token in the password field rather than your account password.
|
||||
|
||||
|
||||
.. _ug_credentials_terraform:
|
||||
|
||||
Terraform backend configuration
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. index::
|
||||
pair: credential types; Terraform
|
||||
pair: backend configuration; Terraform
|
||||
|
||||
|
||||
Terraform is a HashiCorp tool used to automate various infrastructure tasks. Select this credential type to enable synchronization with the Terraform inventory source.
|
||||
|
||||
The Terraform credential requires the **Backend configuration** attribute which should contain the data from a `Terraform backend block <https://developer.hashicorp.com/terraform/language/settings/backends/configuration>`_. You can paste, drag a file, browse to upload a file, or click the (|key icon|) button to populate the field from an external :ref:`ug_credential_plugins`. An example configuration for an S3 backend:
|
||||
|
||||
.. |key icon| image:: ../common/images/key-mgmt-button.png
|
||||
:alt: Credentials - create Terraform backend configuration credential form
|
||||
|
||||
::
|
||||
|
||||
bucket = "my-terraform-state-bucket"
|
||||
key = "path/to/terraform-state-file"
|
||||
region = "us-east-1"
|
||||
access_key = "my-aws-access-key"
|
||||
secret_key = "my-aws-secret-access-key"
|
||||
|
||||
|Credentials - create terraform credential|
|
||||
|
||||
.. |Credentials - create terraform credential| image:: ../common/images/credentials-create-terraform-credential.png
|
||||
:alt: Credentials - create Terraform backend configuration credential form
|
||||
|
||||
Saving it stores the file path to the backend configuration in an environment variable ``TF_BACKEND_CONFIG_FILE`` that is made available to any job with the credential attached.
|
||||
|
||||
|
||||
Thycotic DevOps Secrets Vault
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
This is considered part of the secret management capability. See :ref:`ug_credentials_thycoticvault` for more detail.
|
||||
|
||||
@@ -481,7 +481,6 @@ Inventory updates use dynamically-generated YAML files which are parsed by their
|
||||
- :ref:`ug_source_openstack`
|
||||
- :ref:`ug_source_rhv`
|
||||
- :ref:`ug_source_rhaap`
|
||||
- :ref:`ug_source_terraform`
|
||||
|
||||
|
||||
Newly created configurations for inventory sources will contain the default plugin configuration values. If you want your newly created inventory sources to match the output of legacy sources, you must apply a specific set of configuration values for that source. To ensure backward compatibility, AWX uses "templates" for each of these sources to force the output of inventory plugins into the legacy format. Refer to :ref:`ir_inv_plugin_templates_reference` section of this guide for each source and their respective templates to help you migrate to the new style inventory plugin output.
|
||||
@@ -1085,41 +1084,6 @@ Red Hat Ansible Automation Platform
|
||||
|
||||
4. Use the **Source Variables** field to override variables used by the ``controller`` inventory plugin. Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.
|
||||
|
||||
|
||||
.. _ug_source_terraform:
|
||||
|
||||
Terraform State
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
.. index::
|
||||
pair: inventories; Terraform
|
||||
pair: inventory source; Terraform state
|
||||
|
||||
|
||||
This inventory source uses the `terraform_state <https://github.com/ansible-collections/cloud.terraform/blob/main/plugins/inventory/terraform_state.py>`_ inventory plugin from the `cloud.terraform <https://github.com/ansible-collections/cloud.terraform>`_ collection. The plugin will parse a terraform state file and add hosts for AWS EC2, GCE, and Azure instances.
|
||||
|
||||
1. To configure this type of sourced inventory, select **Terraform State** from the Source field.
|
||||
|
||||
2. The Create new source window expands with the required **Credential** field. Choose from an existing Terraform backend Credential. For more information, refer to :ref:`ug_credentials`.
|
||||
|
||||
3. You can optionally specify the verbosity, host filter, enabled variable/value, and update options as described in the main procedure for :ref:`adding a source <ug_add_inv_common_fields>`. For Terraform, enable **Overwrite** and **Update on launch** options.
|
||||
|
||||
4. Use the **Source Variables** field to override variables used by the ``controller`` inventory plugin. Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two. For more information on these variables, see the `terraform_state <https://github.com/ansible-collections/cloud.terraform/blob/main/plugins/inventory/terraform_state.py>`_ file for detail.
|
||||
|
||||
The ``backend_type`` variable is required by the Terraform state inventory plugin. This should match the remote backend configured in the Terraform backend credential, here is an example for an Amazon S3 backend:
|
||||
|
||||
::
|
||||
|
||||
---
|
||||
backend_type: s3
|
||||
|
||||
5. Enter an |ee| in the **Execution Environment** field that contains a Terraform binary. This is required for the inventory plugin to run the Terraform commands that read inventory data from the Terraform state file. Refer to the `Terraform EE readme <https://github.com/ansible-cloud/terraform_ee>`_ that contains an example |ee| configuration with a Terraform binary.
|
||||
|
||||
.. image:: ../common/images/inventories-create-source-terraform-example.png
|
||||
|
||||
6. To add hosts for AWS EC2, GCE, and Azure instances, the Terraform state file in the backend must contain state for resources already deployed to EC2, GCE, or Azure. Refer to each of the Terraform providers' respective documentation to provision instances.
|
||||
|
||||
|
||||
.. _ug_customscripts:
|
||||
|
||||
Export old inventory scripts
|
||||
|
||||
25
tools/ansible/build.yml
Normal file
25
tools/ansible/build.yml
Normal file
@@ -0,0 +1,25 @@
|
||||
---
|
||||
- name: Build AWX Docker Images
|
||||
hosts: localhost
|
||||
gather_facts: true
|
||||
tasks:
|
||||
- name: Get version from SCM if not explicitly provided
|
||||
command: make version-for-buildyml
|
||||
args:
|
||||
chdir: '../../'
|
||||
register: scm_version
|
||||
failed_when: not scm_version.stdout
|
||||
when: awx_version is not defined
|
||||
|
||||
- name: Set awx_version
|
||||
set_fact:
|
||||
awx_version: "{{ scm_version.stdout }}"
|
||||
when: awx_version is not defined
|
||||
|
||||
- include_role:
|
||||
name: dockerfile
|
||||
- include_role:
|
||||
name: image_build
|
||||
- include_role:
|
||||
name: image_push
|
||||
when: push | default(false) | bool
|
||||
@@ -242,8 +242,7 @@ ADD tools/scripts/awx-python /usr/bin/awx-python
|
||||
|
||||
{% if (build_dev|bool) or (kube_dev|bool) %}
|
||||
RUN echo /awx_devel > /var/lib/awx/venv/awx/lib/python3.11/site-packages/awx.egg-link
|
||||
RUN echo /awx_devel > /var/lib/awx/venv/awx/lib/python3.11/site-packages/awx.pth
|
||||
RUN ln -sf /awx_devel/tools/docker-compose/awx-manage /usr/local/bin/awx-manage
|
||||
ADD tools/docker-compose/awx-manage /usr/local/bin/awx-manage
|
||||
RUN ln -sf /awx_devel/tools/scripts/awx-python /usr/bin/awx-python
|
||||
RUN ln -sf /awx_devel/tools/scripts/rsyslog-4xx-recovery /usr/bin/rsyslog-4xx-recovery
|
||||
{% endif %}
|
||||
|
||||
5
tools/ansible/roles/image_build/defaults/main.yml
Normal file
5
tools/ansible/roles/image_build/defaults/main.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
awx_image: ansible/awx
|
||||
awx_image_tag: "{{ awx_version }}"
|
||||
dockerfile_name: 'Dockerfile'
|
||||
headless: no
|
||||
30
tools/ansible/roles/image_build/tasks/main.yml
Normal file
30
tools/ansible/roles/image_build/tasks/main.yml
Normal file
@@ -0,0 +1,30 @@
|
||||
---
|
||||
- name: Verify awx-logos directory exists for official install
|
||||
stat:
|
||||
path: "../../../awx-logos"
|
||||
register: logosdir
|
||||
failed_when: logosdir.stat.isdir is not defined or not logosdir.stat.isdir
|
||||
when: awx_official|default(false)|bool
|
||||
|
||||
- name: Copy logos for inclusion in sdist
|
||||
copy:
|
||||
src: "../../../awx-logos/awx/ui/client/assets/"
|
||||
dest: "../../awx/ui/public/static/media/"
|
||||
when: awx_official|default(false)|bool
|
||||
|
||||
- set_fact:
|
||||
command_to_run: |
|
||||
docker build -t {{ awx_image }}:{{ awx_image_tag }} \
|
||||
-f {{ dockerfile_name }} \
|
||||
--build-arg VERSION={{ awx_version }} \
|
||||
--build-arg SETUPTOOLS_SCM_PRETEND_VERSION={{ awx_version }} \
|
||||
--build-arg HEADLESS={{ headless }} \
|
||||
.
|
||||
|
||||
# Calling Docker directly because docker-py doesnt support BuildKit
|
||||
- name: Build AWX image
|
||||
shell: "{{ command_to_run }}"
|
||||
environment:
|
||||
DOCKER_BUILDKIT: 1
|
||||
args:
|
||||
chdir: "{{ playbook_dir }}/../../"
|
||||
4
tools/ansible/roles/image_push/defaults/main.yml
Normal file
4
tools/ansible/roles/image_push/defaults/main.yml
Normal file
@@ -0,0 +1,4 @@
|
||||
---
|
||||
registry: quay.io
|
||||
awx_image: ansible/awx
|
||||
awx_image_tag: "{{ awx_version }}"
|
||||
22
tools/ansible/roles/image_push/tasks/main.yml
Normal file
22
tools/ansible/roles/image_push/tasks/main.yml
Normal file
@@ -0,0 +1,22 @@
|
||||
---
|
||||
- name: Authenticate with Docker registry if registry password given
|
||||
docker_login:
|
||||
registry: "{{ registry }}"
|
||||
username: "{{ registry_username }}"
|
||||
password: "{{ registry_password }}"
|
||||
reauthorize: true
|
||||
when:
|
||||
- registry is defined
|
||||
- registry_username is defined
|
||||
- registry_password is defined
|
||||
|
||||
- name: Tag and Push Container Images
|
||||
docker_image:
|
||||
name: "{{ awx_image }}:{{ awx_image_tag }}"
|
||||
repository: "{{ registry }}/{{ awx_image }}:{{ item }}"
|
||||
force_tag: yes
|
||||
push: true
|
||||
source: local
|
||||
with_items:
|
||||
- "latest"
|
||||
- "{{ awx_image_tag }}"
|
||||
@@ -27,7 +27,7 @@ services:
|
||||
user: "{{ ansible_user_uid }}"
|
||||
image: "{{ awx_image }}:{{ awx_image_tag }}"
|
||||
container_name: tools_awx_{{ container_postfix }}
|
||||
hostname: awx-{{ container_postfix }}
|
||||
hostname: awx_{{ container_postfix }}
|
||||
command: launch_awx.sh
|
||||
environment:
|
||||
OS: "{{ os_info.stdout }}"
|
||||
@@ -363,7 +363,6 @@ volumes:
|
||||
|
||||
networks:
|
||||
awx:
|
||||
name: awx
|
||||
service-mesh:
|
||||
name: service-mesh
|
||||
{% if minikube_container_group|bool %}
|
||||
|
||||
@@ -4,10 +4,12 @@ global:
|
||||
scrape_interval: {{ scrape_interval }} # Set the scrape interval to something faster. Default is every 1 minute.
|
||||
|
||||
scrape_configs:
|
||||
- job_name: 'awx-metrics'
|
||||
- job_name: 'awx'
|
||||
static_configs:
|
||||
- targets:
|
||||
- awx-1:8013
|
||||
# metrics are broadcast to all nodes in the cluster,
|
||||
# so no need to track nodes individually.
|
||||
- awx1:8013
|
||||
metrics_path: /api/v2/metrics
|
||||
scrape_interval: {{ scrape_interval }}
|
||||
scheme: http
|
||||
@@ -16,16 +18,3 @@ scrape_configs:
|
||||
basic_auth:
|
||||
username: admin
|
||||
password: {{ admin_password }}
|
||||
|
||||
- job_name: 'awx-wsrelay'
|
||||
static_configs:
|
||||
- targets:
|
||||
{% for i in range(control_plane_node_count|int) %}
|
||||
{% set container_postfix = loop.index %}
|
||||
- awx-{{ container_postfix }}:8016
|
||||
{% endfor %}
|
||||
metrics_path: /
|
||||
scrape_interval: {{ scrape_interval }}
|
||||
scheme: http
|
||||
params:
|
||||
format: ['txt']
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
---
|
||||
- node:
|
||||
id: awx-{{ item }}
|
||||
id: awx_{{ item }}
|
||||
firewallrules:
|
||||
- action: "reject"
|
||||
tonode: awx-{{ item }}
|
||||
tonode: awx_{{ item }}
|
||||
toservice: "control"
|
||||
|
||||
- log-level: info
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
{% for i in range(item | int + 1, control_plane_node_count | int + 1) %}
|
||||
- tcp-peer:
|
||||
address: awx-{{ i }}:2222
|
||||
address: awx_{{ i }}:2222
|
||||
redial: true
|
||||
{% endfor %}
|
||||
|
||||
|
||||
@@ -1,17 +1,8 @@
|
||||
#!/usr/bin/awx-python
|
||||
# EASY-INSTALL-ENTRY-SCRIPT: 'awx','console_scripts','awx-manage'
|
||||
import sys
|
||||
from importlib.metadata import distribution
|
||||
|
||||
|
||||
def load_entry_point(dist, group, name):
|
||||
dist_name, _, _ = dist.partition('==')
|
||||
matches = (
|
||||
entry_point
|
||||
for entry_point in distribution(dist_name).entry_points
|
||||
if entry_point.group == group and entry_point.name == name
|
||||
)
|
||||
return next(matches).load()
|
||||
|
||||
from pkg_resources import load_entry_point
|
||||
__requires__ = 'awx'
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(
|
||||
|
||||
@@ -49,14 +49,14 @@ awx-manage register_queue --queuename=default --instance_percent=100
|
||||
if [[ -n "$RUN_MIGRATIONS" ]]; then
|
||||
for (( i=1; i<$CONTROL_PLANE_NODE_COUNT; i++ )); do
|
||||
for (( j=i + 1; j<=$CONTROL_PLANE_NODE_COUNT; j++ )); do
|
||||
awx-manage register_peers "awx-$i" --peers "awx-$j"
|
||||
awx-manage register_peers "awx_$i" --peers "awx_$j"
|
||||
done
|
||||
done
|
||||
|
||||
if [[ $EXECUTION_NODE_COUNT > 0 ]]; then
|
||||
awx-manage provision_instance --hostname="receptor-hop" --node_type="hop"
|
||||
awx-manage add_receptor_address --instance="receptor-hop" --address="receptor-hop" --port=5555 --canonical
|
||||
awx-manage register_peers "receptor-hop" --peers "awx-1"
|
||||
awx-manage register_peers "receptor-hop" --peers "awx_1"
|
||||
for (( e=1; e<=$EXECUTION_NODE_COUNT; e++ )); do
|
||||
awx-manage provision_instance --hostname="receptor-$e" --node_type="execution"
|
||||
awx-manage register_peers "receptor-$e" --peers "receptor-hop"
|
||||
|
||||
@@ -179,7 +179,7 @@ groups:
|
||||
datasourceUid: awx_prometheus
|
||||
model:
|
||||
editorMode: code
|
||||
expr: irate(callback_receiver_events_insert_db{node='awx-1'}[1m])
|
||||
expr: irate(callback_receiver_events_insert_db{node='awx_1'}[1m])
|
||||
hide: false
|
||||
intervalMs: 1000
|
||||
legendFormat: __auto
|
||||
@@ -228,7 +228,7 @@ groups:
|
||||
type: prometheus
|
||||
uid: awx_prometheus
|
||||
editorMode: code
|
||||
expr: callback_receiver_events_queue_size_redis{node='awx-1'}
|
||||
expr: callback_receiver_events_queue_size_redis{node='awx_1'}
|
||||
hide: false
|
||||
intervalMs: 1000
|
||||
legendFormat: __auto
|
||||
|
||||
Reference in New Issue
Block a user