Compare commits

...

18 Commits

Author SHA1 Message Date
John Westcott IV
cb5a8aa194 Fix black pre-commit hook (#14212) 2023-07-06 16:36:50 -04:00
Seth Foster
8b49f910c7 Add settings.RECEPTOR_LOG_LEVEL, update work signing key path (#14098) 2023-07-06 11:39:30 -04:00
kialam
a4f808df34 Schedules form - pass time prop as string. (#14206) 2023-07-06 07:57:55 -07:00
Alan Rominger
82abd18927 Fix DELETE 500 KeyError due to eventless model events (#14172) 2023-07-05 15:37:52 -04:00
John Westcott IV
5e9d514e5e Added CSRF Origin in settings (#14062) 2023-07-05 15:18:23 -04:00
Rick Elrod
4a34ee1f1e Add optional pgbouncer to dev environment (#14083)
Signed-off-by: Rick Elrod <rick@elrod.me>
2023-07-05 13:41:47 -05:00
John Westcott IV
3624fe2cac Add combined roles/collection requirements on project sync (#14081) 2023-07-05 13:25:44 -03:00
Cesar Francisco San Nicolas Martinez
0f96d9aca2 Rename/relocate receptor crt in install bundle (#14201) 2023-07-05 14:50:55 +02:00
Shane McDonald
989b80e771 Fix selinux errors with Redis mount in dev env 2023-07-03 09:57:01 -04:00
John Westcott IV
cc64be937d Fix spelling errors in readme of awx_collection/tools
Signed-off-by: John Westcott <john.westcott.iv@redhat.com>
2023-06-30 15:41:47 -04:00
John Westcott IV
94183d602c Enhancing vault integration
Added persistent storage

Auto-create vault and awx via playbooks

Create a new pattern for custom containers where we can do initialization

Auto-install roles needed for plumbing via the Makefile
2023-06-30 10:05:15 -04:00
Vidya Nambiar
ac4ef141bf Fix filter experience when assigning access to teams (#14175) 2023-06-29 15:15:32 -04:00
jainnikhil30
86f6b54eec add the bulk api swagger topic for API reference docs (#14181) 2023-06-28 21:55:38 +05:30
Michael Abashian
bd8108b27c Fixed bug where a weekly rrule string without a BYDAY would result in the UI throwing a TypeError (#14182) 2023-06-28 11:10:49 -04:00
Alan Rominger
aed96fb365 Use the proper queryset to filter project update events (#14166) 2023-06-26 21:41:08 -04:00
Alan Rominger
fe2da52eec Upgrade Github actions issue labeler to fix 404 errors (#14163) 2023-06-26 17:14:53 -04:00
Alan Rominger
974465e46a Add hashivault option as docker-compose optional container (#14161)
Co-authored-by: Sarabraj Singh <singh.sarabraj@gmail.com>
2023-06-26 15:48:58 -04:00
Alan Rominger
c736986023 Try to fix CI by adding dropped coreapi lib (#14165) 2023-06-26 15:11:12 -04:00
39 changed files with 435 additions and 62 deletions

View File

@@ -17,7 +17,7 @@ jobs:
steps:
- name: Label Issue
uses: github/issue-labeler@v2.4.1
uses: github/issue-labeler@v3.1
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
not-before: 2021-12-07T07:00:00Z

View File

@@ -27,6 +27,8 @@ COLLECTION_TEMPLATE_VERSION ?= false
# NOTE: This defaults the container image version to the branch that's active
COMPOSE_TAG ?= $(GIT_BRANCH)
MAIN_NODE_TYPE ?= hybrid
# If set to true docker-compose will also start a pgbouncer instance and use it
PGBOUNCER ?= false
# If set to true docker-compose will also start a keycloak instance
KEYCLOAK ?= false
# If set to true docker-compose will also start an ldap instance
@@ -37,6 +39,8 @@ SPLUNK ?= false
PROMETHEUS ?= false
# If set to true docker-compose will also start a grafana instance
GRAFANA ?= false
# If set to true docker-compose will also start a hashicorp vault instance
VAULT ?= false
# If set to true docker-compose will also start a tacacs+ instance
TACACS ?= false
@@ -520,15 +524,20 @@ docker-compose-sources: .git/hooks/pre-commit
-e control_plane_node_count=$(CONTROL_PLANE_NODE_COUNT) \
-e execution_node_count=$(EXECUTION_NODE_COUNT) \
-e minikube_container_group=$(MINIKUBE_CONTAINER_GROUP) \
-e enable_pgbouncer=$(PGBOUNCER) \
-e enable_keycloak=$(KEYCLOAK) \
-e enable_ldap=$(LDAP) \
-e enable_splunk=$(SPLUNK) \
-e enable_prometheus=$(PROMETHEUS) \
-e enable_grafana=$(GRAFANA) \
-e enable_vault=$(VAULT) \
-e enable_tacacs=$(TACACS) \
$(EXTRA_SOURCES_ANSIBLE_OPTS)
docker-compose: awx/projects docker-compose-sources
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
-e enable_vault=$(VAULT);
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
docker-compose-credential-plugins: awx/projects docker-compose-sources
@@ -580,7 +589,7 @@ docker-clean:
-$(foreach image_id,$(shell docker images --filter=reference='*/*/*awx_devel*' --filter=reference='*/*awx_devel*' --filter=reference='*awx_devel*' -aq),docker rmi --force $(image_id);)
docker-clean-volumes: docker-compose-clean docker-compose-container-group-clean
docker volume rm -f tools_awx_db tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
docker volume rm -f tools_awx_db tools_vault_1 tools_grafana_storage tools_prometheus_storage $(docker volume ls --filter name=tools_redis_socket_ -q)
docker-refresh: docker-clean docker-compose

View File

@@ -1629,8 +1629,8 @@ class ProjectUpdateDetailSerializer(ProjectUpdateSerializer):
fields = ('*', 'host_status_counts', 'playbook_counts')
def get_playbook_counts(self, obj):
task_count = obj.project_update_events.filter(event='playbook_on_task_start').count()
play_count = obj.project_update_events.filter(event='playbook_on_play_start').count()
task_count = obj.get_event_queryset().filter(event='playbook_on_task_start').count()
play_count = obj.get_event_queryset().filter(event='playbook_on_play_start').count()
data = {'play_count': play_count, 'task_count': task_count}

View File

@@ -12,7 +12,7 @@ receptor_work_commands:
custom_worksign_public_keyfile: receptor/work_public_key.pem
custom_tls_certfile: receptor/tls/receptor.crt
custom_tls_keyfile: receptor/tls/receptor.key
custom_ca_certfile: receptor/tls/ca/receptor-ca.crt
custom_ca_certfile: receptor/tls/ca/mesh-CA.crt
receptor_protocol: 'tcp'
receptor_listener: true
receptor_port: {{ instance.listener_port }}

View File

@@ -1,5 +1,7 @@
from collections import OrderedDict
from django.utils.translation import gettext_lazy as _
from rest_framework.permissions import IsAuthenticated
from rest_framework.renderers import JSONRenderer
from rest_framework.reverse import reverse
@@ -18,6 +20,9 @@ from awx.api import (
class BulkView(APIView):
name = _('Bulk')
swagger_topic = 'Bulk'
permission_classes = [IsAuthenticated]
renderer_classes = [
renderers.BrowsableAPIRenderer,

View File

@@ -14,7 +14,7 @@ class ConfConfig(AppConfig):
def ready(self):
self.module.autodiscover()
if not set(sys.argv) & {'migrate', 'check_migrations'}:
if not set(sys.argv) & {'migrate', 'check_migrations', 'showmigrations'}:
from .settings import SettingsWrapper
SettingsWrapper.initialize()

View File

@@ -94,6 +94,20 @@ register(
category_slug='system',
)
register(
'CSRF_TRUSTED_ORIGINS',
default=[],
field_class=fields.StringListField,
label=_('CSRF Trusted Origins List'),
help_text=_(
"If the service is behind a reverse proxy/load balancer, use this setting "
"to configure the schema://addresses from which the service should trust "
"Origin header values. "
),
category=_('System'),
category_slug='system',
)
register(
'LICENSE',
field_class=fields.DictField,

View File

@@ -265,6 +265,8 @@ def kv_backend(**kwargs):
if secret_key:
try:
if (secret_key != 'data') and (secret_key not in json['data']) and ('data' in json['data']):
return json['data']['data'][secret_key]
return json['data'][secret_key]
except KeyError:
raise RuntimeError('{} is not present at {}'.format(secret_key, secret_path))

View File

@@ -661,7 +661,11 @@ class WorkflowJob(UnifiedJob, WorkflowJobOptions, SurveyJobMixin, JobNotificatio
@property
def event_processing_finished(self):
return True
return True # workflow jobs do not have events
@property
def has_unpartitioned_events(self):
return False # workflow jobs do not have events
def _get_parent_field_name(self):
if self.job_template_id:
@@ -914,7 +918,11 @@ class WorkflowApproval(UnifiedJob, JobNotificationMixin):
@property
def event_processing_finished(self):
return True
return True # approval jobs do not have events
@property
def has_unpartitioned_events(self):
return False # approval jobs do not have events
def send_approval_notification(self, approval_status):
from awx.main.tasks.system import send_notifications # avoid circular import

View File

@@ -639,7 +639,7 @@ class AWXReceptorJob:
#
RECEPTOR_CONFIG_STARTER = (
{'local-only': None},
{'log-level': 'info'},
{'log-level': settings.RECEPTOR_LOG_LEVEL},
{'node': {'firewallrules': [{'action': 'reject', 'tonode': settings.CLUSTER_HOST_ID, 'toservice': 'control'}]}},
{'control-service': {'service': 'control', 'filename': '/var/run/receptor/receptor.sock', 'permissions': '0660'}},
{'work-command': {'worktype': 'local', 'command': 'ansible-runner', 'params': 'worker', 'allowruntimeparams': True}},

View File

@@ -189,11 +189,12 @@
connection: local
name: Install content with ansible-galaxy command if necessary
vars:
galaxy_task_env: # configure in settings
additional_collections_env:
# These environment variables are used for installing collections, in addition to galaxy_task_env
# setting the collections paths silences warnings
galaxy_task_env: # configured in settings
# additional_galaxy_env contains environment variables are used for installing roles and collections and will take precedence over items in galaxy_task_env
additional_galaxy_env:
# These paths control where ansible-galaxy installs collections and roles on top the filesystem
ANSIBLE_COLLECTIONS_PATHS: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections"
ANSIBLE_ROLES_PATH: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_roles"
# Put the local tmp directory in same volume as collection destination
# otherwise, files cannot be moved accross volumes and will cause error
ANSIBLE_LOCAL_TEMP: "{{ projects_root }}/.__awx_cache/{{ local_path }}/stage/tmp"
@@ -212,40 +213,50 @@
- name: End play due to disabled content sync
ansible.builtin.meta: end_play
- name: Fetch galaxy roles from requirements.(yml/yaml)
ansible.builtin.command: >
ansible-galaxy role install -r {{ item }}
--roles-path {{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_roles
{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
args:
chdir: "{{ project_path | quote }}"
register: galaxy_result
with_fileglob:
- "{{ project_path | quote }}/roles/requirements.yaml"
- "{{ project_path | quote }}/roles/requirements.yml"
changed_when: "'was installed successfully' in galaxy_result.stdout"
environment: "{{ galaxy_task_env }}"
when: roles_enabled | bool
tags:
- install_roles
- block:
- name: Fetch galaxy roles from roles/requirements.(yml/yaml)
ansible.builtin.command:
cmd: "ansible-galaxy role install -r {{ item }} {{ verbosity }}"
register: galaxy_result
with_fileglob:
- "{{ project_path | quote }}/roles/requirements.yaml"
- "{{ project_path | quote }}/roles/requirements.yml"
changed_when: "'was installed successfully' in galaxy_result.stdout"
when: roles_enabled | bool
tags:
- install_roles
- name: Fetch galaxy collections from collections/requirements.(yml/yaml)
ansible.builtin.command: >
ansible-galaxy collection install -r {{ item }}
--collections-path {{ projects_root }}/.__awx_cache/{{ local_path }}/stage/requirements_collections
{{ ' -' + 'v' * ansible_verbosity if ansible_verbosity else '' }}
args:
chdir: "{{ project_path | quote }}"
register: galaxy_collection_result
with_fileglob:
- "{{ project_path | quote }}/collections/requirements.yaml"
- "{{ project_path | quote }}/collections/requirements.yml"
- "{{ project_path | quote }}/requirements.yaml"
- "{{ project_path | quote }}/requirements.yml"
changed_when: "'Installing ' in galaxy_collection_result.stdout"
environment: "{{ additional_collections_env | combine(galaxy_task_env) }}"
when:
- "ansible_version.full is version_compare('2.9', '>=')"
- collections_enabled | bool
tags:
- install_collections
- name: Fetch galaxy collections from collections/requirements.(yml/yaml)
ansible.builtin.command:
cmd: "ansible-galaxy collection install -r {{ item }} {{ verbosity }}"
register: galaxy_collection_result
with_fileglob:
- "{{ project_path | quote }}/collections/requirements.yaml"
- "{{ project_path | quote }}/collections/requirements.yml"
changed_when: "'Nothing to do.' not in galaxy_collection_result.stdout"
when:
- "ansible_version.full is version_compare('2.9', '>=')"
- collections_enabled | bool
tags:
- install_collections
- name: Fetch galaxy roles and collections from requirements.(yml/yaml)
ansible.builtin.command:
cmd: "ansible-galaxy install -r {{ item }} {{ verbosity }}"
register: galaxy_combined_result
with_fileglob:
- "{{ project_path | quote }}/requirements.yaml"
- "{{ project_path | quote }}/requirements.yml"
changed_when: "'Nothing to do.' not in galaxy_combined_result.stdout"
when:
- "ansible_version.full is version_compare('2.10', '>=')"
- collections_enabled | bool
- roles_enabled | bool
tags:
- install_collections
- install_roles
# We combine our additional_galaxy_env into galaxy_task_env so that our values are preferred over anything a user would set
environment: "{{ galaxy_task_env | combine(additional_galaxy_env) }}"
vars:
verbosity: "{{ (ansible_verbosity) | ternary('-'+'v'*ansible_verbosity, '') }}"

View File

@@ -158,6 +158,11 @@ REMOTE_HOST_HEADERS = ['REMOTE_ADDR', 'REMOTE_HOST']
# REMOTE_HOST_HEADERS will be trusted unconditionally')
PROXY_IP_ALLOWED_LIST = []
# If we are behind a reverse proxy/load balancer, use this setting to
# allow the scheme://addresses from which Tower should trust csrf requests from
# If this setting is an empty list (the default), we will only trust ourself
CSRF_TRUSTED_ORIGINS = []
CUSTOM_VENV_PATHS = []
# Warning: this is a placeholder for a database setting
@@ -959,6 +964,9 @@ AWX_RUNNER_KEEPALIVE_SECONDS = 0
# Delete completed work units in receptor
RECEPTOR_RELEASE_WORK = True
# K8S only. Use receptor_log_level on AWX spec to set this properly
RECEPTOR_LOG_LEVEL = 'info'
MIDDLEWARE = [
'django_guid.middleware.guid_middleware',
'awx.main.middleware.SettingsCacheMiddleware',

View File

@@ -50,7 +50,7 @@ const userSortColumns = [
const teamSearchColumns = [
{
name: t`Name`,
key: 'name',
key: 'name__icontains',
isDefault: true,
},
{

View File

@@ -94,7 +94,7 @@ export default function FrequencyDetails({
value={getRunEveryLabel()}
dataCy={`${prefix}-run-every`}
/>
{type === 'week' ? (
{type === 'week' && options.daysOfWeek ? (
<Detail
label={t`On days`}
value={options.daysOfWeek

View File

@@ -24,10 +24,10 @@ function DateTimePicker({ dateFieldName, timeFieldName, label }) {
validate: combine([required(null), validateTime()]),
});
const onDateChange = (inputDate, newDate) => {
const onDateChange = (_, dateString, date) => {
dateHelpers.setTouched();
if (isValidDate(newDate) && inputDate === yyyyMMddFormat(newDate)) {
dateHelpers.setValue(inputDate);
if (isValidDate(date) && dateString === yyyyMMddFormat(date)) {
dateHelpers.setValue(dateString);
}
};
@@ -62,7 +62,7 @@ function DateTimePicker({ dateFieldName, timeFieldName, label }) {
}
time={timeField.value}
{...timeField}
onChange={(time) => timeHelpers.setValue(time)}
onChange={(_, time) => timeHelpers.setValue(time)}
/>
</DateTimeGroup>
</FormGroup>

View File

@@ -43,10 +43,11 @@ describe('<DateTimePicker/>', () => {
await act(async () => {
wrapper.find('DatePicker').prop('onChange')(
null,
'2021-05-29',
new Date('Sat May 29 2021 00:00:00 GMT-0400 (Eastern Daylight Time)')
);
wrapper.find('TimePicker').prop('onChange')('7:15 PM');
wrapper.find('TimePicker').prop('onChange')(null, '7:15 PM');
});
wrapper.update();
expect(wrapper.find('DatePicker').prop('value')).toBe('2021-05-29');

View File

@@ -885,6 +885,7 @@ describe('<ScheduleForm />', () => {
).toBe(true);
await act(async () => {
wrapper.find('DatePicker[aria-label="End date"]').prop('onChange')(
null,
'2020-03-14',
new Date('2020-03-14')
);
@@ -905,6 +906,7 @@ describe('<ScheduleForm />', () => {
const laterTime = DateTime.now().plus({ hours: 1 }).toFormat('h:mm a');
await act(async () => {
wrapper.find('DatePicker[aria-label="End date"]').prop('onChange')(
null,
today,
new Date(today)
);
@@ -919,6 +921,7 @@ describe('<ScheduleForm />', () => {
);
await act(async () => {
wrapper.find('TimePicker[aria-label="End time"]').prop('onChange')(
null,
laterTime
);
});

View File

@@ -59,6 +59,7 @@ function MiscSystemDetail() {
'TOWER_URL_BASE',
'DEFAULT_EXECUTION_ENVIRONMENT',
'PROXY_IP_ALLOWED_LIST',
'CSRF_TRUSTED_ORIGINS',
'AUTOMATION_ANALYTICS_LAST_GATHER',
'AUTOMATION_ANALYTICS_LAST_ENTRIES',
'UI_NEXT'

View File

@@ -29,6 +29,7 @@ describe('<MiscSystemDetail />', () => {
TOWER_URL_BASE: 'https://towerhost',
REMOTE_HOST_HEADERS: [],
PROXY_IP_ALLOWED_LIST: [],
CSRF_TRUSTED_ORIGINS: [],
LICENSE: null,
REDHAT_USERNAME: 'name1',
REDHAT_PASSWORD: '$encrypted$',

View File

@@ -53,6 +53,7 @@ function MiscSystemEdit() {
'TOWER_URL_BASE',
'DEFAULT_EXECUTION_ENVIRONMENT',
'PROXY_IP_ALLOWED_LIST',
'CSRF_TRUSTED_ORIGINS',
'UI_NEXT'
);
@@ -95,6 +96,7 @@ function MiscSystemEdit() {
await submitForm({
...form,
PROXY_IP_ALLOWED_LIST: formatJson(form.PROXY_IP_ALLOWED_LIST),
CSRF_TRUSTED_ORIGINS: formatJson(form.CSRF_TRUSTED_ORIGINS),
REMOTE_HOST_HEADERS: formatJson(form.REMOTE_HOST_HEADERS),
DEFAULT_EXECUTION_ENVIRONMENT:
form.DEFAULT_EXECUTION_ENVIRONMENT?.id || null,
@@ -239,6 +241,11 @@ function MiscSystemEdit() {
config={system.PROXY_IP_ALLOWED_LIST}
isRequired
/>
<ObjectField
name="CSRF_TRUSTED_ORIGINS"
config={system.CSRF_TRUSTED_ORIGINS}
isRequired
/>
{submitError && <FormSubmitError error={submitError} />}
{revertError && <FormSubmitError error={revertError} />}
</FormColumnLayout>

View File

@@ -39,6 +39,7 @@ const systemData = {
REMOTE_HOST_HEADERS: ['REMOTE_ADDR', 'REMOTE_HOST'],
TOWER_URL_BASE: 'https://localhost:3000',
PROXY_IP_ALLOWED_LIST: [],
CSRF_TRUSTED_ORIGINS: [],
UI_NEXT: false,
};

View File

@@ -78,6 +78,20 @@
"read_only": false
}
},
"CSRF_TRUSTED_ORIGINS": {
"type": "list",
"required": true,
"label": "CSRF Origins List",
"help_text": "If the service is behind a reverse proxy/load balancer, use this setting to configure the schema://addresses from which the service should trust Origin header values. ",
"category": "System",
"category_slug": "system",
"default": [],
"child": {
"type": "string",
"required": true,
"read_only": false
}
},
"REDHAT_USERNAME": {
"type": "string",
"required": false,
@@ -4487,6 +4501,17 @@
"type": "string"
}
},
"CSRF_TRUSTED_ORIGINS": {
"type": "list",
"label": "CSRF Origins List",
"help_text": "If the service is behind a reverse proxy/load balancer, use this setting to configure the schema://addresses from which the service should trust Origin header values. ",
"category": "System",
"category_slug": "system",
"defined_in_file": false,
"child": {
"type": "string"
}
},
"LICENSE": {
"type": "nested object",
"label": "License",

View File

@@ -9,6 +9,7 @@
"REMOTE_HOST"
],
"PROXY_IP_ALLOWED_LIST": [],
"CSRF_TRUSTED_ORIGINS": [],
"LICENSE": {},
"REDHAT_USERNAME": "",
"REDHAT_PASSWORD": "",

View File

@@ -19,8 +19,8 @@ It is intended as a tool for writing new modules or enforcing consistency.
These instructions assume you have ansible-core and the collection installed.
To install the collection in-place (to pick up any local changes to source)
the `make symlink_collection` will simplink the `awx_collection/` folder to
the approprate place under `~/.ansible/collections`.
the `make symlink_collection` will symlink the `awx_collection/` folder to
the appropriate place under `~/.ansible/collections`.
This is a shortcut for quick validation of tests that bypasses `ansible-test`.
To use this, you need the `~/.tower_cli.cfg` config file populated,

View File

@@ -1,5 +1,5 @@
if [ -z $AWX_IGNORE_BLACK ] ; then
python_files_changed=$(git diff --cached --name-only --diff-filter=AM | grep -E '\.py')
python_files_changed=$(git diff --cached --name-only --diff-filter=AM | grep -E '\.py$')
if [ "x$python_files_changed" != "x" ] ; then
black --check $python_files_changed || \
if [ $? != 0 ] ; then

View File

@@ -1,4 +1,5 @@
build
coreapi
django-debug-toolbar==3.2.4
drf-yasg
# pprofile - re-add once https://github.com/vpelletier/pprofile/issues/41 is addressed

View File

@@ -303,7 +303,7 @@ To bring up a 1 node AWX + minikube that is accessible from AWX run the followin
Start minikube
```bash
(host)$minikube start --cpus=4 --memory=8g --addons=ingress`
(host)$minikube start --cpus=4 --memory=8g --addons=ingress
```
Start AWX
@@ -497,6 +497,62 @@ ansible-playbook tools/docker-compose/ansible/plumb_tacacs.yml
Once the playbook is done running tacacs+ should now be setup in your development environment. This server has the accounts listed on https://hub.docker.com/r/dchidell/docker-tacacs
### HashiVault Integration
Run a HashiVault container alongside of AWX.
```bash
VAULT=true make docker-compose
```
You can find the initialization data at `tools/docker-compose/_sources/secrets/vault_init.yml`,
This includes the unseal keys and a root token.
You will need to unseal the HashiVault each time the container is started.
The easiest way to do that is to run:
```bash
ansible-playbook tools/docker-compose/ansible/unseal_vault.yml
```
This will perform the unseal and also display the root token for login.
For demo purposes, Vault will be auto-configured to include a Key Value (KV) vault called `my_engine` along with a secret called `my_key` in `/my_engine/my_root/my_folder`.
The secret value is `this_is_the_secret_value`.
To create a secret connected to this vault in AWX you can run the following playbook:
```bash
export CONTROLLER_USERNAME=<your username>
export CONTROLLER_PASSWORD=<your password>
ansible-playbook tools/docker-compose/ansible/plumb_vault.yml
```
This will create the following items in your AWX instance:
* A credential called `Vault Lookup Cred` tied to the vault instance.
* A custom credential type called `Vault Custom Cred Type`.
* A credential called `Credential From Vault` which is of the created type using the `Vault Lookup Cred` to get the password.
The custom credential type adds a variable when used in a playbook called `the_secret_from_vault`.
If you have a playbook like:
```
---
- name: Show a vault secret
hosts: localhost
connection: local
gather_facts: False
tasks:
- debug:
var: the_secret_from_vault
```
And run it through AWX with the credential `Credential From Vault` tied to it, the debug should result in `this_is_the_secret_value`
The extremely non-obvious input is the fact that the fact prefixes "data/" unexpectedly.
This was discovered by inspecting the secret with the vault CLI, which may help with future troubleshooting.
```
docker exec -it -e VAULT_TOKEN=<token> tools_vault_1 vault kv get --address=http://127.0.0.1:1234 my_engine/my_root/my_folder
```
### Prometheus and Grafana integration
See docs at https://github.com/ansible/awx/blob/devel/tools/grafana/README.md

View File

@@ -0,0 +1,2 @@
---
sources_dest: '../_sources'

View File

@@ -0,0 +1,10 @@
---
- name: Run any pre-hooks for other container
hosts: localhost
gather_facts: false
tasks:
- name: Initialize vault
include_role:
name: vault
tasks_from: initialize
when: enable_vault | bool

View File

@@ -0,0 +1,8 @@
---
- name: Plumb AWX for Vault
hosts: localhost
gather_facts: False
tasks:
- include_role:
name: vault
tasks_from: plumb

View File

@@ -0,0 +1,5 @@
---
collections:
- awx.awx
- flowerysong.hvault
- community.docker

View File

@@ -1,5 +1,4 @@
---
sources_dest: '../_sources'
compose_name: 'docker-compose.yml'
awx_image: 'ghcr.io/ansible/awx_devel'
pg_port: 5432
@@ -36,3 +35,8 @@ enable_splunk: false
enable_grafana: false
enable_prometheus: false
scrape_interval: '5s'
# pgbouncer
enable_pgbouncer: false
pgbouncer_port: 6432
pgbouncer_max_pool_size: 70

View File

@@ -5,7 +5,12 @@ DATABASES = {
'NAME': "{{ pg_database }}",
'USER': "{{ pg_username }}",
'PASSWORD': "{{ pg_password }}",
{% if enable_pgbouncer|bool %}
'HOST': "pgbouncer",
'PORT': "{{ pgbouncer_port }}",
{% else %}
'HOST': "{{ pg_hostname | default('postgres') }}",
'PORT': "{{ pg_port }}",
{% endif %}
}
}

View File

@@ -71,7 +71,7 @@ services:
image: redis:latest
container_name: tools_redis_{{ container_postfix }}
volumes:
- "../../redis/redis.conf:/usr/local/etc/redis/redis.conf"
- "../../redis/redis.conf:/usr/local/etc/redis/redis.conf:Z"
- "redis_socket_{{ container_postfix }}:/var/run/redis/:rw"
entrypoint: ["redis-server"]
command: ["/usr/local/etc/redis/redis.conf"]
@@ -201,6 +201,25 @@ services:
POSTGRES_PASSWORD: {{ pg_password }}
volumes:
- "awx_db:/var/lib/postgresql/data"
{% if enable_pgbouncer|bool %}
pgbouncer:
image: bitnami/pgbouncer:latest
container_name: tools_pgbouncer_1
hostname: pgbouncer
environment:
POSTGRESQL_USERNAME: {{ pg_username }}
POSTGRESQL_DATABASE: {{ pg_database }}
PGBOUNCER_DATABASE: {{ pg_database }}
POSTGRESQL_PASSWORD: {{ pg_password }}
POSTGRESQL_HOST: {{ pg_hostname | default('postgres') }}
POSTGRESQL_PORT: {{ pg_port }}
PGBOUNCER_AUTH_TYPE: trust
PGBOUNCER_PORT: {{ pgbouncer_port }}
PGBOUNCER_DEFAULT_POOL_SIZE: {{ pgbouncer_max_pool_size }}
# This is the default, but we're being explicit here because it's important:
# pg_notify will NOT work in transaction mode.
PGBOUNCER_POOL_MODE: session
{% endif %}
{% if execution_node_count|int > 0 %}
receptor-hop:
image: {{ receptor_image }}
@@ -233,6 +252,21 @@ services:
privileged: true
{% endfor %}
{% endif %}
{% if enable_vault|bool %}
vault:
image: hashicorp/vault:1.14
container_name: tools_vault_1
command: server
hostname: vault
ports:
- "1234:1234"
environment:
VAULT_LOCAL_CONFIG: '{"storage": {"file": {"path": "/vault/file"}}, "listener": [{"tcp": { "address": "0.0.0.0:1234", "tls_disable": true}}], "default_lease_ttl": "168h", "max_lease_ttl": "720h", "ui": true}'
cap_add:
- IPC_LOCK
volumes:
- 'hashicorp_vault_data:/vault/file'
{% endif %}
volumes:
awx_db:
@@ -247,6 +281,10 @@ volumes:
name: tools_ldap_1
driver: local
{% endif %}
{% if enable_vault|bool %}
hashicorp_vault_data:
name: tools_vault_1
{% endif %}
{% if enable_prometheus|bool %}
prometheus_storage:
name: tools_prometheus_storage

View File

@@ -0,0 +1,2 @@
---
vault_file: "{{ sources_dest }}/secrets/vault_init.yml"

View File

@@ -0,0 +1,62 @@
---
- name: See if vault has been initialized
ansible.builtin.stat:
path: "{{ vault_file }}"
register: vault_secret_file_info
- block:
- name: Start the vault
community.docker.docker_compose:
state: present
services: vault
project_src: "{{ sources_dest }}"
- name: Run the initialization
community.docker.docker_container_exec:
command: vault operator init
container: tools_vault_1
env:
VAULT_ADDR: "http://127.0.0.1:1234"
register: vault_initialization
- name: Write out initialization file
copy:
# lines 1-4 are the keys, 6 is the root token
content: |
{{ vault_initialization.stdout_lines[0] | regex_replace('Unseal Key ', 'Unseal_Key_') }}
{{ vault_initialization.stdout_lines[1] | regex_replace('Unseal Key ', 'Unseal_Key_') }}
{{ vault_initialization.stdout_lines[2] | regex_replace('Unseal Key ', 'Unseal_Key_') }}
{{ vault_initialization.stdout_lines[3] | regex_replace('Unseal Key ', 'Unseal_Key_') }}
{{ vault_initialization.stdout_lines[4] | regex_replace('Unseal Key ', 'Unseal_Key_') }}
{{ vault_initialization.stdout_lines[6] | regex_replace('Initial Root Token', 'Initial_Root_Token') }}
dest: "{{ vault_file }}"
- name: Unlock the vault
include_role:
name: vault
tasks_from: unseal.yml
- name: Create an engine
flowerysong.hvault.engine:
path: "my_engine"
type: "kv"
vault_addr: "http://localhost:1234"
token: "{{ Initial_Root_Token }}"
register: engine
- name: Create a secret
flowerysong.hvault.kv:
mount_point: "my_engine/my_root"
key: "my_folder"
value:
my_key: "this_is_the_secret_value"
vault_addr: "http://localhost:1234"
token: "{{ Initial_Root_Token }}"
always:
- name: Stop the vault
community.docker.docker_compose:
state: absent
project_src: "{{ sources_dest }}"
when: not vault_secret_file_info.stat.exists

View File

@@ -0,0 +1,56 @@
---
- name: Load vault keys
include_vars:
file: "{{ vault_file }}"
- name: Create a HashiCorp Vault Credential
awx.awx.credential:
credential_type: HashiCorp Vault Secret Lookup
name: Vault Lookup Cred
organization: Default
inputs:
api_version: "v1"
cacert: ""
default_auth_path: "approle"
kubernetes_role: ""
namespace: ""
role_id: ""
secret_id: ""
token: "{{ Initial_Root_Token }}"
url: "http://tools_vault_1:1234"
register: vault_cred
- name: Create a custom credential type
awx.awx.credential_type:
name: Vault Custom Cred Type
kind: cloud
injectors:
extra_vars:
the_secret_from_vault: "{{ '{{' }} password {{ '}}' }}"
inputs:
fields:
- type: "string"
id: "password"
label: "Password"
secret: true
register: custom_vault_cred_type
- name: Create a credential of the custom type
awx.awx.credential:
credential_type: "{{ custom_vault_cred_type.id }}"
name: Credential From Vault
inputs: {}
organization: Default
register: custom_credential
- name: Use the Vault Credential For the new credential
awx.awx.credential_input_source:
input_field_name: password
target_credential: "{{ custom_credential.id }}"
source_credential: "{{ vault_cred.id }}"
metadata:
auth_path: ""
secret_backend: "my_engine"
secret_key: "my_key"
secret_path: "/my_root/my_folder"
secret_version: ""

View File

@@ -0,0 +1,14 @@
---
- name: Load vault keys
include_vars:
file: "{{ vault_file }}"
- name: Unseal the vault
flowerysong.hvault.seal:
vault_addr: "http://localhost:1234"
state: unsealed
key: "{{ item }}"
loop:
- "{{ Unseal_Key_1 }}"
- "{{ Unseal_Key_2 }}"
- "{{ Unseal_Key_3 }}"

View File

@@ -0,0 +1,13 @@
---
- name: Run tasks post startup
hosts: localhost
gather_facts: False
tasks:
- name: Unseal the vault
include_role:
name: vault
tasks_from: unseal
- name: Display root token
debug:
var: Initial_Root_Token