mirror of
https://github.com/ansible/awx.git
synced 2026-02-06 12:04:44 -03:30
Compare commits
46 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c5bea2b557 | ||
|
|
6d0c47fdd0 | ||
|
|
54b4acbdfc | ||
|
|
a41766090e | ||
|
|
34fa897dda | ||
|
|
32df114e41 | ||
|
|
018f235a64 | ||
|
|
7e77235d5e | ||
|
|
139d8f0ae2 | ||
|
|
7691365aea | ||
|
|
59f61517d4 | ||
|
|
fa670e2d7f | ||
|
|
a87a044d64 | ||
|
|
381ade1148 | ||
|
|
864a30e3d4 | ||
|
|
5f42db67e6 | ||
|
|
ddf4f288d4 | ||
|
|
e75bc8bc1e | ||
|
|
bb533287b8 | ||
|
|
9979fc659e | ||
|
|
9e5babc093 | ||
|
|
c71e2524ed | ||
|
|
48b4c62186 | ||
|
|
853730acb9 | ||
|
|
f1448fced1 | ||
|
|
7697b6a69b | ||
|
|
22a491c32c | ||
|
|
cbd9dce940 | ||
|
|
a4fdcc1cca | ||
|
|
df95439008 | ||
|
|
acd834df8b | ||
|
|
587f0ecf98 | ||
|
|
5a2091f7bf | ||
|
|
fa7423819a | ||
|
|
fde8af9f11 | ||
|
|
209e7e27b1 | ||
|
|
6c7d29a982 | ||
|
|
282ba36839 | ||
|
|
b727d2c3b3 | ||
|
|
7fc3d5c7c7 | ||
|
|
4e055f46c4 | ||
|
|
f595985b7c | ||
|
|
ea232315bf | ||
|
|
ee251812b5 | ||
|
|
00ba1ea569 | ||
|
|
d91af132c1 |
2
.github/actions/awx_devel_image/action.yml
vendored
2
.github/actions/awx_devel_image/action.yml
vendored
@@ -24,7 +24,7 @@ runs:
|
||||
|
||||
- name: Pre-pull latest devel image to warm cache
|
||||
shell: bash
|
||||
run: docker pull ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
|
||||
run: docker pull -q ghcr.io/${OWNER_LC}/awx_devel:${{ github.base_ref }}
|
||||
|
||||
- name: Build image for current source checkout
|
||||
shell: bash
|
||||
|
||||
10
.github/actions/run_awx_devel/action.yml
vendored
10
.github/actions/run_awx_devel/action.yml
vendored
@@ -57,16 +57,6 @@ runs:
|
||||
awx-manage update_password --username=admin --password=password
|
||||
EOSH
|
||||
|
||||
- name: Build UI
|
||||
# This must be a string comparison in composite actions:
|
||||
# https://github.com/actions/runner/issues/2238
|
||||
if: ${{ inputs.build-ui == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
docker exec -i tools_awx_1 sh <<-EOSH
|
||||
make ui-devel
|
||||
EOSH
|
||||
|
||||
- name: Get instance data
|
||||
id: data
|
||||
shell: bash
|
||||
|
||||
2
.github/triage_replies.md
vendored
2
.github/triage_replies.md
vendored
@@ -1,7 +1,7 @@
|
||||
## General
|
||||
- For the roundup of all the different mailing lists available from AWX, Ansible, and beyond visit: https://docs.ansible.com/ansible/latest/community/communication.html
|
||||
- Hello, we think your question is answered in our FAQ. Does this: https://www.ansible.com/products/awx-project/faq cover your question?
|
||||
- You can find the latest documentation here: https://docs.ansible.com/automation-controller/latest/html/userguide/index.html
|
||||
- You can find the latest documentation here: https://ansible.readthedocs.io/projects/awx/en/latest/userguide/index.html
|
||||
|
||||
|
||||
|
||||
|
||||
26
.github/workflows/ci.yml
vendored
26
.github/workflows/ci.yml
vendored
@@ -38,7 +38,9 @@ jobs:
|
||||
- name: ui-test-general
|
||||
command: make ui-test-general
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Build awx_devel image for running checks
|
||||
uses: ./.github/actions/awx_devel_image
|
||||
@@ -52,7 +54,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
@@ -70,13 +74,15 @@ jobs:
|
||||
DEBUG_OUTPUT_DIR: /tmp/awx_operator_molecule_test
|
||||
steps:
|
||||
- name: Checkout awx
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
path: awx
|
||||
|
||||
- name: Checkout awx-operator
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false\
|
||||
repository: ansible/awx-operator
|
||||
path: awx-operator
|
||||
|
||||
@@ -130,7 +136,9 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
|
||||
- name: Upgrade ansible-core
|
||||
@@ -154,7 +162,9 @@ jobs:
|
||||
- name: r-z0-9
|
||||
regex: ^[r-z0-9]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/run_awx_devel
|
||||
id: awx
|
||||
@@ -200,7 +210,9 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Upgrade ansible-core
|
||||
run: python3 -m pip install --upgrade ansible-core
|
||||
|
||||
57
.github/workflows/dab-release.yml
vendored
Normal file
57
.github/workflows/dab-release.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
---
|
||||
name: django-ansible-base requirements update
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 6 * * *' # once an day @ 6 AM
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
jobs:
|
||||
dab-pin-newest:
|
||||
if: (github.repository_owner == 'ansible' && endsWith(github.repository, 'awx')) || github.event_name != 'schedule'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: dab-release
|
||||
name: Get current django-ansible-base release version
|
||||
uses: pozetroninc/github-action-get-latest-release@2a61c339ea7ef0a336d1daa35ef0cb1418e7676c # v0.8.0
|
||||
with:
|
||||
owner: ansible
|
||||
repo: django-ansible-base
|
||||
excludes: prerelease, draft
|
||||
|
||||
- name: Check out respository code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- id: dab-pinned
|
||||
name: Get current django-ansible-base pinned version
|
||||
run:
|
||||
echo "version=$(requirements/django-ansible-base-pinned-version.sh)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Update django-ansible-base pinned version to upstream release
|
||||
run:
|
||||
requirements/django-ansible-base-pinned-version.sh -s ${{ steps.dab-release.outputs.release }}
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6
|
||||
with:
|
||||
base: devel
|
||||
branch: bump-django-ansible-base
|
||||
title: Bump django-ansible-base to ${{ steps.dab-release.outputs.release }}
|
||||
body: |
|
||||
##### SUMMARY
|
||||
Automated .github/workflows/dab-release.yml
|
||||
|
||||
django-ansible-base upstream released version == ${{ steps.dab-release.outputs.release }}
|
||||
requirements_git.txt django-ansible-base pinned version == ${{ steps.dab-pinned.outputs.version }}
|
||||
|
||||
##### ISSUE TYPE
|
||||
- Bug, Docs Fix or other nominal change
|
||||
|
||||
##### COMPONENT NAME
|
||||
- API
|
||||
|
||||
commit-message: |
|
||||
Update django-ansible-base version to ${{ steps.dab-pinned.outputs.version }}
|
||||
add-paths:
|
||||
requirements/requirements_git.txt
|
||||
12
.github/workflows/devel_images.yml
vendored
12
.github/workflows/devel_images.yml
vendored
@@ -35,7 +35,9 @@ jobs:
|
||||
exit 0
|
||||
if: matrix.build-targets.image-name == 'awx' && !endsWith(github.repository, '/awx')
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
@@ -60,16 +62,14 @@ jobs:
|
||||
run: |
|
||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- name: Setup node and npm
|
||||
- name: Setup node and npm for the new UI build
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16.13.1'
|
||||
node-version: '18'
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
- name: Prebuild UI for awx image (to speed up build process)
|
||||
- name: Prebuild new UI for awx image (to speed up build process)
|
||||
run: |
|
||||
sudo apt-get install gettext
|
||||
make ui-release
|
||||
make ui-next
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
|
||||
4
.github/workflows/docs.yml
vendored
4
.github/workflows/docs.yml
vendored
@@ -8,7 +8,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: install tox
|
||||
run: pip install tox
|
||||
|
||||
5
.github/workflows/label_issue.yml
vendored
5
.github/workflows/label_issue.yml
vendored
@@ -30,7 +30,10 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
name: Label Issue - Community
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Install python requests
|
||||
run: pip install requests
|
||||
|
||||
5
.github/workflows/label_pr.yml
vendored
5
.github/workflows/label_pr.yml
vendored
@@ -29,7 +29,10 @@ jobs:
|
||||
timeout-minutes: 20
|
||||
name: Label PR - Community
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Install python requests
|
||||
run: pip install requests
|
||||
|
||||
4
.github/workflows/promote.yml
vendored
4
.github/workflows/promote.yml
vendored
@@ -32,7 +32,9 @@ jobs:
|
||||
echo "TAG_NAME=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout awx
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
26
.github/workflows/stage.yml
vendored
26
.github/workflows/stage.yml
vendored
@@ -45,19 +45,22 @@ jobs:
|
||||
exit 0
|
||||
|
||||
- name: Checkout awx
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
path: awx
|
||||
|
||||
- name: Checkout awx-operator
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
repository: ${{ github.repository_owner }}/awx-operator
|
||||
path: awx-operator
|
||||
|
||||
- name: Checkout awx-logos
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
repository: ansible/awx-logos
|
||||
path: awx-logos
|
||||
|
||||
@@ -86,17 +89,14 @@ jobs:
|
||||
run: |
|
||||
cp ../awx-logos/awx/ui/client/assets/* awx/ui/public/static/media/
|
||||
|
||||
- name: Setup node and npm
|
||||
- name: Setup node and npm for new UI build
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16.13.1'
|
||||
node-version: '18'
|
||||
|
||||
- name: Prebuild UI for awx image (to speed up build process)
|
||||
- name: Prebuild new UI for awx image (to speed up build process)
|
||||
working-directory: awx
|
||||
run: |
|
||||
sudo apt-get install gettext
|
||||
make ui-release
|
||||
make ui-next
|
||||
run: make ui-next
|
||||
|
||||
- name: Set build env variables
|
||||
run: |
|
||||
@@ -136,9 +136,9 @@ jobs:
|
||||
- name: Pulling images for test deployment with awx-operator
|
||||
# awx operator molecue test expect to kind load image and buildx exports image to registry and not local
|
||||
run: |
|
||||
docker pull ${AWX_OPERATOR_TEST_IMAGE}
|
||||
docker pull ${AWX_EE_TEST_IMAGE}
|
||||
docker pull ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
|
||||
docker pull -q ${AWX_OPERATOR_TEST_IMAGE}
|
||||
docker pull -q ${AWX_EE_TEST_IMAGE}
|
||||
docker pull -q ${AWX_TEST_IMAGE}:${AWX_TEST_VERSION}
|
||||
|
||||
- name: Run test deployment with awx-operator
|
||||
working-directory: awx-operator
|
||||
|
||||
4
.github/workflows/update_dependabot_prs.yml
vendored
4
.github/workflows/update_dependabot_prs.yml
vendored
@@ -13,7 +13,9 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout branch
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Update PR Body
|
||||
env:
|
||||
|
||||
6
.github/workflows/upload_schema.yml
vendored
6
.github/workflows/upload_schema.yml
vendored
@@ -18,7 +18,9 @@ jobs:
|
||||
packages: write
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
@@ -34,7 +36,7 @@ jobs:
|
||||
|
||||
- name: Pre-pull image to warm build cache
|
||||
run: |
|
||||
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||
|
||||
- name: Build image
|
||||
run: |
|
||||
|
||||
@@ -67,7 +67,7 @@ If you're not using Docker for Mac, or Docker for Windows, you may need, or choo
|
||||
|
||||
#### Frontend Development
|
||||
|
||||
See [the ui development documentation](awx/ui/CONTRIBUTING.md).
|
||||
See [the ansible-ui development documentation](https://github.com/ansible/ansible-ui/blob/main/CONTRIBUTING.md).
|
||||
|
||||
#### Fork and clone the AWX repo
|
||||
|
||||
@@ -121,7 +121,7 @@ If it has someone assigned to it then that person is the person responsible for
|
||||
|
||||
**NOTES**
|
||||
|
||||
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
|
||||
> Issue assignment will only be done for maintainers of the project. If you decide to work on an issue, please feel free to add a comment in the issue to let others know that you are working on it; but know that we will accept the first pull request from whomever is able to fix an issue. Once your PR is accepted we can add you as an assignee to an issue upon request.
|
||||
|
||||
|
||||
> If you work in a part of the codebase that is going through active development, your changes may be rejected, or you may be asked to `rebase`. A good idea before starting work is to have a discussion with us in the `#ansible-awx` channel on irc.libera.chat, or on the [mailing list](https://groups.google.com/forum/#!forum/awx-project).
|
||||
@@ -132,7 +132,7 @@ If it has someone assigned to it then that person is the person responsible for
|
||||
|
||||
At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language.
|
||||
|
||||
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
|
||||
If you find an issue with an existing translation, please see the [Reporting Issues](#reporting-issues) section to open an issue and our translation team will work with you on a resolution.
|
||||
|
||||
|
||||
## Submitting Pull Requests
|
||||
@@ -161,7 +161,7 @@ Sometimes it might take us a while to fully review your PR. We try to keep the `
|
||||
When your PR is initially submitted the checks will not be run until a maintainer allows them to be. Once a maintainer has done a quick review of your work the PR will have the linter and unit tests run against them via GitHub Actions, and the status reported in the PR.
|
||||
|
||||
## Reporting Issues
|
||||
|
||||
|
||||
We welcome your feedback, and encourage you to file an issue when you run into a problem. But before opening a new issues, we ask that you please view our [Issues guide](./ISSUES.md).
|
||||
|
||||
## Getting Help
|
||||
|
||||
6
Makefile
6
Makefile
@@ -502,13 +502,7 @@ ui-test-general:
|
||||
$(NPM_BIN) run --prefix awx/ui pretest
|
||||
$(NPM_BIN) run --prefix awx/ui/ test-general --runInBand
|
||||
|
||||
# NOTE: The make target ui-next is imported from awx/ui_next/Makefile
|
||||
HEADLESS ?= no
|
||||
ifeq ($(HEADLESS), yes)
|
||||
dist/$(SDIST_TAR_FILE):
|
||||
else
|
||||
dist/$(SDIST_TAR_FILE): $(UI_BUILD_FLAG_FILE) ui-next
|
||||
endif
|
||||
$(PYTHON) -m build -s
|
||||
ln -sf $(SDIST_TAR_FILE) dist/awx.tar.gz
|
||||
|
||||
|
||||
@@ -103,7 +103,7 @@ class Metadata(metadata.SimpleMetadata):
|
||||
default = field.get_default()
|
||||
if type(default) is UUID:
|
||||
default = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
|
||||
if field.field_name == 'TOWER_URL_BASE' and default == 'https://towerhost':
|
||||
if field.field_name == 'TOWER_URL_BASE' and default == 'https://platformhost':
|
||||
default = '{}://{}'.format(self.request.scheme, self.request.get_host())
|
||||
field_info['default'] = default
|
||||
except serializers.SkipField:
|
||||
|
||||
@@ -2,6 +2,12 @@
|
||||
- hosts: all
|
||||
become: yes
|
||||
tasks:
|
||||
- name: Create the receptor group
|
||||
group:
|
||||
{% verbatim %}
|
||||
name: "{{ receptor_group }}"
|
||||
{% endverbatim %}
|
||||
state: present
|
||||
- name: Create the receptor user
|
||||
user:
|
||||
{% verbatim %}
|
||||
|
||||
@@ -1400,7 +1400,9 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
||||
|
||||
def filtered_queryset(self):
|
||||
return ExecutionEnvironment.objects.filter(
|
||||
Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role')) | Q(organization__isnull=True)
|
||||
Q(organization__in=Organization.accessible_pk_qs(self.user, 'read_role'))
|
||||
| Q(organization__isnull=True)
|
||||
| Q(id__in=ExecutionEnvironment.access_ids_qs(self.user, 'change'))
|
||||
).distinct()
|
||||
|
||||
@check_superuser
|
||||
@@ -1419,11 +1421,13 @@ class ExecutionEnvironmentAccess(BaseAccess):
|
||||
else:
|
||||
if self.user not in obj.organization.execution_environment_admin_role:
|
||||
raise PermissionDenied
|
||||
if data and 'organization' in data:
|
||||
new_org = get_object_from_data('organization', Organization, data, obj=obj)
|
||||
if not new_org or self.user not in new_org.execution_environment_admin_role:
|
||||
if not self.check_related('organization', Organization, data, obj=obj, role_field='execution_environment_admin_role'):
|
||||
return False
|
||||
# Special case that check_related does not catch, org users can not remove the organization from the EE
|
||||
if data and ('organization' in data or 'organization_id' in data):
|
||||
if (not data.get('organization')) and (not data.get('organization_id')):
|
||||
return False
|
||||
return self.check_related('organization', Organization, data, obj=obj, role_field='execution_environment_admin_role')
|
||||
return True
|
||||
|
||||
def can_delete(self, obj):
|
||||
if obj.managed:
|
||||
@@ -2100,15 +2104,18 @@ class WorkflowJobTemplateAccess(NotificationAttachMixin, BaseAccess):
|
||||
|
||||
if not self.check_related('organization', Organization, data, role_field='workflow_admin_role', mandatory=True):
|
||||
if data.get('organization', None) is None:
|
||||
self.messages['organization'] = [_('An organization is required to create a workflow job template for normal user')]
|
||||
if self.save_messages:
|
||||
self.messages['organization'] = [_('An organization is required to create a workflow job template for normal user')]
|
||||
return False
|
||||
|
||||
if not self.check_related('inventory', Inventory, data, role_field='use_role'):
|
||||
self.messages['inventory'] = [_('You do not have use_role to the inventory')]
|
||||
if self.save_messages:
|
||||
self.messages['inventory'] = [_('You do not have use_role to the inventory')]
|
||||
return False
|
||||
|
||||
if not self.check_related('execution_environment', ExecutionEnvironment, data, role_field='read_role'):
|
||||
self.messages['execution_environment'] = [_('You do not have read_role to the execution environment')]
|
||||
if self.save_messages:
|
||||
self.messages['execution_environment'] = [_('You do not have read_role to the execution environment')]
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -66,10 +66,8 @@ class FixedSlidingWindow:
|
||||
|
||||
|
||||
class RelayWebsocketStatsManager:
|
||||
def __init__(self, event_loop, local_hostname):
|
||||
def __init__(self, local_hostname):
|
||||
self._local_hostname = local_hostname
|
||||
|
||||
self._event_loop = event_loop
|
||||
self._stats = dict()
|
||||
self._redis_key = BROADCAST_WEBSOCKET_REDIS_KEY_NAME
|
||||
|
||||
@@ -94,7 +92,10 @@ class RelayWebsocketStatsManager:
|
||||
self.start()
|
||||
|
||||
def start(self):
|
||||
self.async_task = self._event_loop.create_task(self.run_loop())
|
||||
self.async_task = asyncio.get_running_loop().create_task(
|
||||
self.run_loop(),
|
||||
name='RelayWebsocketStatsManager.run_loop',
|
||||
)
|
||||
return self.async_task
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -929,6 +929,16 @@ register(
|
||||
category_slug='debug',
|
||||
)
|
||||
|
||||
register(
|
||||
'RECEPTOR_KEEP_WORK_ON_ERROR',
|
||||
field_class=fields.BooleanField,
|
||||
label=_('Keep receptor work on error'),
|
||||
default=False,
|
||||
help_text=_('Prevent receptor work from being released on when error is detected'),
|
||||
category=('Debug'),
|
||||
category_slug='debug',
|
||||
)
|
||||
|
||||
|
||||
def logging_validate(serializer, attrs):
|
||||
if not serializer.instance or not hasattr(serializer.instance, 'LOG_AGGREGATOR_HOST') or not hasattr(serializer.instance, 'LOG_AGGREGATOR_TYPE'):
|
||||
|
||||
@@ -43,6 +43,7 @@ STANDARD_INVENTORY_UPDATE_ENV = {
|
||||
}
|
||||
CAN_CANCEL = ('new', 'pending', 'waiting', 'running')
|
||||
ACTIVE_STATES = CAN_CANCEL
|
||||
ERROR_STATES = ('error',)
|
||||
MINIMAL_EVENTS = set(['playbook_on_play_start', 'playbook_on_task_start', 'playbook_on_stats', 'EOF'])
|
||||
CENSOR_VALUE = '************'
|
||||
ENV_BLOCKLIST = frozenset(
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# All Rights Reserved
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
from crum import impersonate
|
||||
from awx.main.models import User, Organization, Project, Inventory, CredentialType, Credential, Host, JobTemplate
|
||||
from awx.main.signals import disable_computed_fields
|
||||
@@ -13,6 +14,12 @@ class Command(BaseCommand):
|
||||
help = 'Creates a preload tower data if there is none.'
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
# Wrap the operation in an atomic block, so we do not on accident
|
||||
# create the organization but not create the project, etc.
|
||||
with transaction.atomic():
|
||||
self._handle()
|
||||
|
||||
def _handle(self):
|
||||
changed = False
|
||||
|
||||
# Create a default organization as the first superuser found.
|
||||
@@ -43,10 +50,11 @@ class Command(BaseCommand):
|
||||
|
||||
ssh_type = CredentialType.objects.filter(namespace='ssh').first()
|
||||
c, _ = Credential.objects.get_or_create(
|
||||
credential_type=ssh_type, name='Demo Credential', inputs={'username': superuser.username}, created_by=superuser
|
||||
credential_type=ssh_type, name='Demo Credential', inputs={'username': getattr(superuser, 'username', 'null')}, created_by=superuser
|
||||
)
|
||||
|
||||
c.admin_role.members.add(superuser)
|
||||
if superuser:
|
||||
c.admin_role.members.add(superuser)
|
||||
|
||||
public_galaxy_credential, _ = Credential.objects.get_or_create(
|
||||
name='Ansible Galaxy',
|
||||
|
||||
151
awx/main/management/commands/job_performance_rollup.py
Normal file
151
awx/main/management/commands/job_performance_rollup.py
Normal file
@@ -0,0 +1,151 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved
|
||||
|
||||
# Django
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection
|
||||
|
||||
import json
|
||||
import re
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""
|
||||
Emits some simple statistics suitable for external monitoring
|
||||
"""
|
||||
|
||||
help = 'Run queries that provide an overview of the performance of the system over a given period of time'
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument('--since', action='store', dest='days', type=str, default="1", help='Max days to look back to for data')
|
||||
parser.add_argument('--limit', action='store', dest='limit', type=str, default="10", help='Max number of records for database queries (LIMIT)')
|
||||
|
||||
def execute_query(self, query):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(query)
|
||||
rows = cursor.fetchall()
|
||||
return rows
|
||||
|
||||
def jsonify(self, title, keys, values, query):
|
||||
result = []
|
||||
query = re.sub('\n', ' ', query)
|
||||
query = re.sub('\s{2,}', ' ', query)
|
||||
for value in values:
|
||||
result.append(dict(zip(keys, value)))
|
||||
return {title: result, 'count': len(values), 'query': query}
|
||||
|
||||
def jobs_pending_duration(self, days, limit):
|
||||
"""Return list of jobs sorted by time in pending within configured number of days (within limit)"""
|
||||
query = f"""
|
||||
SELECT name, id AS job_id, unified_job_template_id, created, started - created AS pending_duration
|
||||
FROM main_unifiedjob
|
||||
WHERE finished IS NOT null
|
||||
AND started IS NOT null
|
||||
AND cancel_flag IS NOT true
|
||||
AND created > NOW() - INTERVAL '{days} days'
|
||||
AND started - created > INTERVAL '0 seconds'
|
||||
ORDER BY pending_duration DESC
|
||||
LIMIT {limit};"""
|
||||
values = self.execute_query(query)
|
||||
return self.jsonify(
|
||||
title='completed_or_started_jobs_by_pending_duration',
|
||||
keys=('job_name', 'job_id', 'unified_job_template_id', 'job_created', 'pending_duration'),
|
||||
values=values,
|
||||
query=query,
|
||||
)
|
||||
|
||||
def times_of_day_pending_more_than_X_min(self, days, limit, minutes_pending):
|
||||
"""Return list of jobs sorted by time in pending within configured number of days (within limit)"""
|
||||
query = f"""
|
||||
SELECT
|
||||
date_trunc('hour', created) as day_and_hour,
|
||||
COUNT(created) as count_jobs_pending_greater_than_{minutes_pending}_min
|
||||
FROM main_unifiedjob
|
||||
WHERE started IS NOT NULL
|
||||
AND started - created > INTERVAL '{minutes_pending} minutes'
|
||||
AND created > NOW() - INTERVAL '{days} days'
|
||||
GROUP BY date_trunc('hour', created)
|
||||
ORDER BY count_jobs_pending_greater_than_{minutes_pending}_min DESC
|
||||
LIMIT {limit};"""
|
||||
values = self.execute_query(query)
|
||||
return self.jsonify(
|
||||
title=f'times_of_day_pending_more_than_{minutes_pending}',
|
||||
keys=('day_and_hour', f'count_jobs_pending_more_than_{minutes_pending}_min'),
|
||||
values=values,
|
||||
query=query,
|
||||
)
|
||||
|
||||
def pending_jobs_details(self, days, limit):
|
||||
"""Return list of jobs that are in pending and list details such as reasons they may be blocked, within configured number of days and limit."""
|
||||
query = f"""
|
||||
SELECT DISTINCT ON(A.id) A.name, A.id, A.unified_job_template_id, A.created, NOW() - A.created as pending_duration, F.allow_simultaneous, B.current_job_id as current_ujt_job, I.to_unifiedjob_id as dependency_job_id, A.dependencies_processed
|
||||
FROM main_unifiedjob A
|
||||
LEFT JOIN (
|
||||
SELECT C.id, C.current_job_id FROM main_unifiedjobtemplate as C
|
||||
) B
|
||||
ON A.unified_job_template_id = B.id
|
||||
LEFT JOIN main_job F ON A.id = F.unifiedjob_ptr_id
|
||||
LEFT JOIN (
|
||||
SELECT * FROM main_unifiedjob_dependent_jobs as G
|
||||
RIGHT JOIN main_unifiedjob H ON G.to_unifiedjob_id = H.id
|
||||
) I
|
||||
ON A.id = I.from_unifiedjob_id
|
||||
WHERE A.status = 'pending'
|
||||
AND A.created > NOW() - INTERVAL '{days} days'
|
||||
ORDER BY id DESC
|
||||
LIMIT {limit};"""
|
||||
values = self.execute_query(query)
|
||||
return self.jsonify(
|
||||
title='pending_jobs_details',
|
||||
keys=(
|
||||
'job_name',
|
||||
'job_id',
|
||||
'unified_job_template_id',
|
||||
'job_created',
|
||||
'pending_duration',
|
||||
'allow_simultaneous',
|
||||
'current_ujt_job',
|
||||
'dependency_job_id',
|
||||
'dependencies_processed',
|
||||
),
|
||||
values=values,
|
||||
query=query,
|
||||
)
|
||||
|
||||
def jobs_by_FUNC_event_processing_time(self, func, days, limit):
|
||||
"""Return list of jobs sorted by MAX job event procesing time within configured number of days (within limit)"""
|
||||
if func not in ('MAX', 'MIN', 'AVG', 'SUM'):
|
||||
raise RuntimeError('Only able to asses job events grouped by job with MAX, MIN, AVG, SUM functions')
|
||||
|
||||
query = f"""SELECT job_id, {func}(A.modified - A.created) as job_event_processing_delay_{func}, B.name, B.created, B.finished, B.controller_node, B.execution_node
|
||||
FROM main_jobevent A
|
||||
RIGHT JOIN (
|
||||
SELECT id, created, name, finished, controller_node, execution_node FROM
|
||||
main_unifiedjob
|
||||
WHERE created > NOW() - INTERVAL '{days} days'
|
||||
AND created IS NOT null
|
||||
AND finished IS NOT null
|
||||
AND id IS NOT null
|
||||
AND name IS NOT null
|
||||
) B
|
||||
ON A.job_id=B.id
|
||||
WHERE A.job_id is not null
|
||||
GROUP BY job_id, B.name, B.created, B.finished, B.controller_node, B.execution_node
|
||||
ORDER BY job_event_processing_delay_{func} DESC
|
||||
LIMIT {limit};"""
|
||||
values = self.execute_query(query)
|
||||
return self.jsonify(
|
||||
title=f'jobs_by_{func}_event_processing',
|
||||
keys=('job_id', f'{func}_job_event_processing_delay', 'job_name', 'job_created_time', 'job_finished_time', 'controller_node', 'execution_node'),
|
||||
values=values,
|
||||
query=query,
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
items = []
|
||||
for func in ('MAX', 'MIN', 'AVG'):
|
||||
items.append(self.jobs_by_FUNC_event_processing_time(func, options['days'], options['limit']))
|
||||
items.append(self.jobs_pending_duration(options['days'], options['limit']))
|
||||
items.append(self.pending_jobs_details(options['days'], options['limit']))
|
||||
items.append(self.times_of_day_pending_more_than_X_min(options['days'], options['limit'], minutes_pending=10))
|
||||
self.stdout.write(json.dumps(items, indent=4, sort_keys=True, default=str))
|
||||
@@ -277,7 +277,6 @@ def setup_managed_role_definitions(apps, schema_editor):
|
||||
to_create = {
|
||||
'object_admin': '{cls.__name__} Admin',
|
||||
'org_admin': 'Organization Admin',
|
||||
'org_audit': 'Organization Audit',
|
||||
'org_children': 'Organization {cls.__name__} Admin',
|
||||
'special': '{cls.__name__} {action}',
|
||||
}
|
||||
@@ -334,12 +333,19 @@ def setup_managed_role_definitions(apps, schema_editor):
|
||||
for perm in special_perms:
|
||||
action = perm.codename.split('_')[0]
|
||||
view_perm = Permission.objects.get(content_type=ct, codename__startswith='view_')
|
||||
perm_list = [perm, view_perm]
|
||||
# Handle special-case where adhoc role also listed use permission
|
||||
if action == 'adhoc':
|
||||
for other_perm in object_perms:
|
||||
if other_perm.codename == 'use_inventory':
|
||||
perm_list.append(other_perm)
|
||||
break
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
to_create['special'].format(cls=cls, action=action.title()),
|
||||
f'Has {action} permissions to a single {cls._meta.verbose_name}',
|
||||
ct,
|
||||
[perm, view_perm],
|
||||
perm_list,
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
@@ -355,18 +361,40 @@ def setup_managed_role_definitions(apps, schema_editor):
|
||||
)
|
||||
)
|
||||
|
||||
if 'org_audit' in to_create:
|
||||
audit_permissions = [perm for perm in org_perms if perm.codename.startswith('view_')]
|
||||
audit_permissions.append(Permission.objects.get(codename='audit_organization'))
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
to_create['org_audit'].format(cls=Organization),
|
||||
'Has permission to view all objects inside of a single organization',
|
||||
org_ct,
|
||||
audit_permissions,
|
||||
RoleDefinition,
|
||||
)
|
||||
# Special "organization action" roles
|
||||
audit_permissions = [perm for perm in org_perms if perm.codename.startswith('view_')]
|
||||
audit_permissions.append(Permission.objects.get(codename='audit_organization'))
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
'Organization Audit',
|
||||
'Has permission to view all objects inside of a single organization',
|
||||
org_ct,
|
||||
audit_permissions,
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
|
||||
org_execute_permissions = {'view_jobtemplate', 'execute_jobtemplate', 'view_workflowjobtemplate', 'execute_workflowjobtemplate', 'view_organization'}
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
'Organization Execute',
|
||||
'Has permission to execute all runnable objects in the organization',
|
||||
org_ct,
|
||||
[perm for perm in org_perms if perm.codename in org_execute_permissions],
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
|
||||
org_approval_permissions = {'view_organization', 'view_workflowjobtemplate', 'approve_workflowjobtemplate'}
|
||||
managed_role_definitions.append(
|
||||
get_or_create_managed(
|
||||
'Organization Approval',
|
||||
'Has permission to approve any workflow steps within a single organization',
|
||||
org_ct,
|
||||
[perm for perm in org_perms if perm.codename in org_approval_permissions],
|
||||
RoleDefinition,
|
||||
)
|
||||
)
|
||||
|
||||
unexpected_role_definitions = RoleDefinition.objects.filter(managed=True).exclude(pk__in=[rd.pk for rd in managed_role_definitions])
|
||||
for role_definition in unexpected_role_definitions:
|
||||
|
||||
@@ -321,13 +321,14 @@ class Credential(PasswordFieldsModel, CommonModelNameNotUnique, ResourceMixin):
|
||||
raise ValueError('{} is not a dynamic input field'.format(field_name))
|
||||
|
||||
def validate_role_assignment(self, actor, role_definition):
|
||||
if isinstance(actor, User):
|
||||
if actor.is_superuser or Organization.access_qs(actor, 'change').filter(id=self.organization.id).exists():
|
||||
return
|
||||
if isinstance(actor, Team):
|
||||
if actor.organization == self.organization:
|
||||
return
|
||||
raise DRFValidationError({'detail': _(f"You cannot grant credential access to a {actor._meta.object_name} not in the credentials' organization")})
|
||||
if self.organization:
|
||||
if isinstance(actor, User):
|
||||
if actor.is_superuser or Organization.access_qs(actor, 'member').filter(id=self.organization.id).exists():
|
||||
return
|
||||
if isinstance(actor, Team):
|
||||
if actor.organization == self.organization:
|
||||
return
|
||||
raise DRFValidationError({'detail': _(f"You cannot grant credential access to a {actor._meta.object_name} not in the credentials' organization")})
|
||||
|
||||
|
||||
class CredentialType(CommonModelNameNotUnique):
|
||||
|
||||
@@ -66,7 +66,3 @@ class ExecutionEnvironment(CommonModel):
|
||||
|
||||
if actor._meta.model_name == 'user' and (not actor.has_obj_perm(self.organization, 'view')):
|
||||
raise ValidationError({'user': _('User must have view permission to Execution Environment organization')})
|
||||
if actor._meta.model_name == 'team':
|
||||
organization_cls = self._meta.get_field('organization').related_model
|
||||
if self.orgaanization not in organization_cls.access_qs(actor, 'view'):
|
||||
raise ValidationError({'team': _('Team must have view permission to Execution Environment organization')})
|
||||
|
||||
@@ -396,11 +396,11 @@ class JobNotificationMixin(object):
|
||||
'verbosity': 0,
|
||||
},
|
||||
'job_friendly_name': 'Job',
|
||||
'url': 'https://towerhost/#/jobs/playbook/1010',
|
||||
'url': 'https://platformhost/#/jobs/playbook/1010',
|
||||
'approval_status': 'approved',
|
||||
'approval_node_name': 'Approve Me',
|
||||
'workflow_url': 'https://towerhost/#/jobs/workflow/1010',
|
||||
'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
|
||||
'workflow_url': 'https://platformhost/#/jobs/workflow/1010',
|
||||
'job_metadata': """{'url': 'https://platformhost/$/jobs/playbook/13',
|
||||
'traceback': '',
|
||||
'status': 'running',
|
||||
'started': '2019-08-07T21:46:38.362630+00:00',
|
||||
|
||||
@@ -689,9 +689,15 @@ def sync_parents_to_new_rbac(instance, action, model, pk_set, reverse, **kwargs)
|
||||
|
||||
for role_id in pk_set:
|
||||
if reverse:
|
||||
child_role = Role.objects.get(id=role_id)
|
||||
try:
|
||||
child_role = Role.objects.get(id=role_id)
|
||||
except Role.DoesNotExist:
|
||||
continue
|
||||
else:
|
||||
parent_role = Role.objects.get(id=role_id)
|
||||
try:
|
||||
parent_role = Role.objects.get(id=role_id)
|
||||
except Role.DoesNotExist:
|
||||
continue
|
||||
|
||||
# To a fault, we want to avoid running this if triggered from implicit_parents management
|
||||
# we only want to do anything if we know for sure this is a non-implicit team role
|
||||
|
||||
@@ -405,10 +405,11 @@ class AWXReceptorJob:
|
||||
finally:
|
||||
# Make sure to always release the work unit if we established it
|
||||
if self.unit_id is not None and settings.RECEPTOR_RELEASE_WORK:
|
||||
try:
|
||||
receptor_ctl.simple_command(f"work release {self.unit_id}")
|
||||
except Exception:
|
||||
logger.exception(f"Error releasing work unit {self.unit_id}.")
|
||||
if settings.RECPETOR_KEEP_WORK_ON_ERROR and getattr(res, 'status', 'error') == 'error':
|
||||
try:
|
||||
receptor_ctl.simple_command(f"work release {self.unit_id}")
|
||||
except Exception:
|
||||
logger.exception(f"Error releasing work unit {self.unit_id}.")
|
||||
|
||||
def _run_internal(self, receptor_ctl):
|
||||
# Create a socketpair. Where the left side will be used for writing our payload
|
||||
|
||||
@@ -54,7 +54,7 @@ from awx.main.models import (
|
||||
Job,
|
||||
convert_jsonfields,
|
||||
)
|
||||
from awx.main.constants import ACTIVE_STATES
|
||||
from awx.main.constants import ACTIVE_STATES, ERROR_STATES
|
||||
from awx.main.dispatch.publish import task
|
||||
from awx.main.dispatch import get_task_queuename, reaper
|
||||
from awx.main.utils.common import ignore_inventory_computed_fields, ignore_inventory_group_removal
|
||||
@@ -685,6 +685,8 @@ def awx_receptor_workunit_reaper():
|
||||
|
||||
unit_ids = [id for id in receptor_work_list]
|
||||
jobs_with_unreleased_receptor_units = UnifiedJob.objects.filter(work_unit_id__in=unit_ids).exclude(status__in=ACTIVE_STATES)
|
||||
if settings.RECEPTOR_KEEP_WORK_ON_ERROR:
|
||||
jobs_with_unreleased_receptor_units = jobs_with_unreleased_receptor_units.exclude(status__in=ERROR_STATES)
|
||||
for job in jobs_with_unreleased_receptor_units:
|
||||
logger.debug(f"{job.log_format} is not active, reaping receptor work unit {job.work_unit_id}")
|
||||
receptor_ctl.simple_command(f"work cancel {job.work_unit_id}")
|
||||
@@ -704,7 +706,10 @@ def awx_k8s_reaper():
|
||||
logger.debug("Checking for orphaned k8s pods for {}.".format(group))
|
||||
pods = PodManager.list_active_jobs(group)
|
||||
time_cutoff = now() - timedelta(seconds=settings.K8S_POD_REAPER_GRACE_PERIOD)
|
||||
for job in UnifiedJob.objects.filter(pk__in=pods.keys(), finished__lte=time_cutoff).exclude(status__in=ACTIVE_STATES):
|
||||
reap_job_candidates = UnifiedJob.objects.filter(pk__in=pods.keys(), finished__lte=time_cutoff).exclude(status__in=ACTIVE_STATES)
|
||||
if settings.RECEPTOR_KEEP_WORK_ON_ERROR:
|
||||
reap_job_candidates = reap_job_candidates.exclude(status__in=ERROR_STATES)
|
||||
for job in reap_job_candidates:
|
||||
logger.debug('{} is no longer active, reaping orphaned k8s pod'.format(job.log_format))
|
||||
try:
|
||||
pm = PodManager(job)
|
||||
@@ -980,5 +985,15 @@ def periodic_resource_sync():
|
||||
if acquired is False:
|
||||
logger.debug("Not running periodic_resource_sync, another task holds lock")
|
||||
return
|
||||
logger.debug("Running periodic resource sync")
|
||||
|
||||
SyncExecutor().run()
|
||||
executor = SyncExecutor()
|
||||
executor.run()
|
||||
for key, item_list in executor.results.items():
|
||||
if not item_list or key == 'noop':
|
||||
continue
|
||||
# Log creations and conflicts
|
||||
if len(item_list) > 10 and settings.LOG_AGGREGATOR_LEVEL != 'DEBUG':
|
||||
logger.info(f'Periodic resource sync {key}, first 10 items:\n{item_list[:10]}')
|
||||
else:
|
||||
logger.info(f'Periodic resource sync {key}:\n{item_list}')
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -68,13 +68,17 @@ def test_assign_managed_role(admin_user, alice, rando, inventory, post, setup_ma
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_assign_custom_delete_role(admin_user, rando, inventory, delete, patch):
|
||||
# TODO: just a delete_inventory, without change_inventory
|
||||
rd, _ = RoleDefinition.objects.get_or_create(
|
||||
name='inventory-delete', permissions=['delete_inventory', 'view_inventory'], content_type=ContentType.objects.get_for_model(Inventory)
|
||||
name='inventory-delete',
|
||||
permissions=['delete_inventory', 'view_inventory', 'change_inventory'],
|
||||
content_type=ContentType.objects.get_for_model(Inventory),
|
||||
)
|
||||
rd.give_permission(rando, inventory)
|
||||
inv_id = inventory.pk
|
||||
inv_url = reverse('api:inventory_detail', kwargs={'pk': inv_id})
|
||||
patch(url=inv_url, data={"description": "new"}, user=rando, expect=403)
|
||||
# TODO: eventually this will be valid test, for now ignore
|
||||
# patch(url=inv_url, data={"description": "new"}, user=rando, expect=403)
|
||||
delete(url=inv_url, user=rando, expect=202)
|
||||
assert Inventory.objects.get(id=inv_id).pending_deletion
|
||||
|
||||
@@ -128,7 +132,7 @@ def test_assign_credential_to_user_of_another_org(setup_managed_roles, credentia
|
||||
rd = RoleDefinition.objects.get(name="Credential Admin")
|
||||
credential.organization = organization
|
||||
credential.save(update_fields=['organization'])
|
||||
assert credential.organization not in Organization.access_qs(rando, 'change')
|
||||
assert credential.organization not in Organization.access_qs(rando, 'member')
|
||||
url = django_reverse('roleuserassignment-list')
|
||||
resp = post(url=url, data={"user": rando.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=400)
|
||||
assert "You cannot grant credential access to a User not in the credentials' organization" in str(resp.data)
|
||||
@@ -139,7 +143,7 @@ def test_assign_credential_to_user_of_another_org(setup_managed_roles, credentia
|
||||
post(url=url, data={"user": rando.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=201)
|
||||
|
||||
# can assign credential to org_admin
|
||||
assert credential.organization in Organization.access_qs(org_admin, 'change')
|
||||
assert credential.organization in Organization.access_qs(org_admin, 'member')
|
||||
post(url=url, data={"user": org_admin.id, "role_definition": rd.id, "object_id": credential.id}, user=admin_user, expect=201)
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from unittest import mock
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -6,11 +7,13 @@ from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from crum import impersonate
|
||||
|
||||
from awx.main.models.rbac import get_role_from_object_role, give_creator_permissions
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.models.rbac import get_role_from_object_role, give_creator_permissions, get_role_codenames, get_role_definition
|
||||
from awx.main.models import User, Organization, WorkflowJobTemplate, WorkflowJobTemplateNode, Team
|
||||
from awx.api.versioning import reverse
|
||||
|
||||
from ansible_base.rbac.models import RoleUserAssignment, RoleDefinition
|
||||
from ansible_base.rbac import permission_registry
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@@ -24,6 +27,7 @@ from ansible_base.rbac.models import RoleUserAssignment, RoleDefinition
|
||||
'auditor_role',
|
||||
'read_role',
|
||||
'execute_role',
|
||||
'approval_role',
|
||||
'notification_admin_role',
|
||||
],
|
||||
)
|
||||
@@ -39,6 +43,37 @@ def test_round_trip_roles(organization, rando, role_name, setup_managed_roles):
|
||||
assert old_role.id == getattr(organization, role_name).id
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('model', sorted(permission_registry.all_registered_models, key=lambda cls: cls._meta.model_name))
|
||||
def test_role_migration_matches(request, model, setup_managed_roles):
|
||||
fixture_name = model._meta.verbose_name.replace(' ', '_')
|
||||
obj = request.getfixturevalue(fixture_name)
|
||||
role_ct = 0
|
||||
for field in obj._meta.get_fields():
|
||||
if isinstance(field, ImplicitRoleField):
|
||||
if field.name == 'read_role':
|
||||
continue # intentionally left as "Compat" roles
|
||||
role_ct += 1
|
||||
old_role = getattr(obj, field.name)
|
||||
old_codenames = set(get_role_codenames(old_role))
|
||||
rd = get_role_definition(old_role)
|
||||
new_codenames = set(rd.permissions.values_list('codename', flat=True))
|
||||
# all the old roles should map to a non-Compat role definition
|
||||
if 'Compat' not in rd.name:
|
||||
model_rds = RoleDefinition.objects.filter(content_type=ContentType.objects.get_for_model(obj))
|
||||
rd_data = {}
|
||||
for rd in model_rds:
|
||||
rd_data[rd.name] = list(rd.permissions.values_list('codename', flat=True))
|
||||
assert (
|
||||
'Compat' not in rd.name
|
||||
), f'Permissions for old vs new roles did not match.\nold {field.name}: {old_codenames}\nnew:\n{json.dumps(rd_data, indent=2)}'
|
||||
assert new_codenames == set(old_codenames)
|
||||
|
||||
# In the old system these models did not have object-level roles, all others expect some model roles
|
||||
if model._meta.model_name not in ('notificationtemplate', 'executionenvironment'):
|
||||
assert role_ct > 0
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_role_naming(setup_managed_roles):
|
||||
qs = RoleDefinition.objects.filter(content_type=ContentType.objects.get(model='jobtemplate'), name__endswith='dmin')
|
||||
|
||||
@@ -3,7 +3,7 @@ import pytest
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from awx.main.access import ExecutionEnvironmentAccess
|
||||
from awx.main.models import ExecutionEnvironment, Organization
|
||||
from awx.main.models import ExecutionEnvironment, Organization, Team
|
||||
from awx.main.models.rbac import get_role_codenames
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
@@ -63,6 +63,11 @@ def check_user_capabilities(get, setup_managed_roles):
|
||||
# ___ begin tests ___
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_any_user_can_view_global_ee(control_plane_execution_environment, rando):
|
||||
assert ExecutionEnvironmentAccess(rando).can_read(control_plane_execution_environment)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_managed_ee_not_assignable(control_plane_execution_environment, ee_rd, rando, admin_user, post):
|
||||
url = django_reverse('roleuserassignment-list')
|
||||
@@ -77,6 +82,21 @@ def test_org_member_required_for_assignment(org_ee, ee_rd, rando, admin_user, po
|
||||
assert 'User must have view permission to Execution Environment organization' in str(r.data)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_team_can_have_permission(org_ee, ee_rd, rando, admin_user, post):
|
||||
org2 = Organization.objects.create(name='a different team')
|
||||
team = Team.objects.create(name='a team', organization=org2)
|
||||
team.member_role.members.add(rando)
|
||||
assert org_ee not in ExecutionEnvironmentAccess(rando).get_queryset() # user can not view the EE
|
||||
|
||||
url = django_reverse('roleteamassignment-list')
|
||||
|
||||
# can give object roles to the team now
|
||||
post(url, {'role_definition': ee_rd.pk, 'team': team.id, 'object_id': org_ee.pk}, user=admin_user, expect=201)
|
||||
assert rando.has_obj_perm(org_ee, 'change')
|
||||
assert org_ee in ExecutionEnvironmentAccess(rando).get_queryset() # user can view the EE now
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_give_object_permission_to_ee(org_ee, ee_rd, org_member, check_user_capabilities):
|
||||
access = ExecutionEnvironmentAccess(org_member)
|
||||
@@ -85,11 +105,29 @@ def test_give_object_permission_to_ee(org_ee, ee_rd, org_member, check_user_capa
|
||||
check_user_capabilities(org_member, org_ee, {'edit': False, 'delete': False, 'copy': False})
|
||||
|
||||
ee_rd.give_permission(org_member, org_ee)
|
||||
assert access.can_change(org_ee, {'name': 'new'})
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': org_ee.organization.id})
|
||||
|
||||
check_user_capabilities(org_member, org_ee, {'edit': True, 'delete': True, 'copy': False})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_need_related_organization_access(org_ee, ee_rd, org_member):
|
||||
org2 = Organization.objects.create(name='another organization')
|
||||
ee_rd.give_permission(org_member, org_ee)
|
||||
org2.member_role.members.add(org_member)
|
||||
access = ExecutionEnvironmentAccess(org_member)
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': org_ee.organization})
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': org_ee.organization.id})
|
||||
assert not access.can_change(org_ee, {'name': 'new', 'organization': org2.id})
|
||||
assert not access.can_change(org_ee, {'name': 'new', 'organization': org2})
|
||||
|
||||
# User can make the change if they have relevant permission to the new organization
|
||||
org_ee.organization.execution_environment_admin_role.members.add(org_member)
|
||||
org2.execution_environment_admin_role.members.add(org_member)
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': org2.id})
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': org2})
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('style', ['new', 'old'])
|
||||
def test_give_org_permission_to_ee(org_ee, organization, org_member, check_user_capabilities, style, org_ee_rd):
|
||||
@@ -103,5 +141,8 @@ def test_give_org_permission_to_ee(org_ee, organization, org_member, check_user_
|
||||
else:
|
||||
organization.execution_environment_admin_role.members.add(org_member)
|
||||
|
||||
assert access.can_change(org_ee, {'name': 'new'})
|
||||
assert access.can_change(org_ee, {'name': 'new', 'organization': organization.id})
|
||||
check_user_capabilities(org_member, org_ee, {'edit': True, 'delete': True, 'copy': True})
|
||||
|
||||
# Extra check, user can not remove the EE from the organization
|
||||
assert not access.can_change(org_ee, {'name': 'new', 'organization': None})
|
||||
|
||||
@@ -48,3 +48,17 @@ def test_org_resource_role(ext_auth, organization, rando, org_admin):
|
||||
assert access.can_attach(organization, rando, 'member_role.members') == ext_auth
|
||||
organization.member_role.members.add(rando)
|
||||
assert access.can_unattach(organization, rando, 'member_role.members') == ext_auth
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_delete_org_while_workflow_active(workflow_job_template):
|
||||
'''
|
||||
Delete org while workflow job is active (i.e. changing status)
|
||||
'''
|
||||
assert workflow_job_template.organization # sanity check
|
||||
wj = workflow_job_template.create_unified_job() # status should be new
|
||||
workflow_job_template.organization.delete()
|
||||
wj.refresh_from_db()
|
||||
assert wj.status != 'pending' # sanity check
|
||||
wj.status = 'pending' # status needs to change in order to trigger workflow_job_template.save()
|
||||
wj.save(update_fields=['status'])
|
||||
|
||||
@@ -17,13 +17,13 @@ def advisory_lock(*args, lock_session_timeout_milliseconds=0, **kwargs):
|
||||
with connection.cursor() as cur:
|
||||
idle_in_transaction_session_timeout = cur.execute('SHOW idle_in_transaction_session_timeout').fetchone()[0]
|
||||
idle_session_timeout = cur.execute('SHOW idle_session_timeout').fetchone()[0]
|
||||
cur.execute(f"SET idle_in_transaction_session_timeout = {lock_session_timeout_milliseconds}")
|
||||
cur.execute(f"SET idle_session_timeout = {lock_session_timeout_milliseconds}")
|
||||
cur.execute(f"SET idle_in_transaction_session_timeout = '{lock_session_timeout_milliseconds}'")
|
||||
cur.execute(f"SET idle_session_timeout = '{lock_session_timeout_milliseconds}'")
|
||||
with django_pglocks_advisory_lock(*args, **kwargs) as internal_lock:
|
||||
yield internal_lock
|
||||
if lock_session_timeout_milliseconds > 0:
|
||||
with connection.cursor() as cur:
|
||||
cur.execute(f"SET idle_in_transaction_session_timeout = {idle_in_transaction_session_timeout}")
|
||||
cur.execute(f"SET idle_session_timeout = {idle_session_timeout}")
|
||||
cur.execute(f"SET idle_in_transaction_session_timeout = '{idle_in_transaction_session_timeout}'")
|
||||
cur.execute(f"SET idle_session_timeout = '{idle_session_timeout}'")
|
||||
else:
|
||||
yield True
|
||||
|
||||
@@ -47,7 +47,6 @@ class WebsocketRelayConnection:
|
||||
verify_ssl: bool = settings.BROADCAST_WEBSOCKET_VERIFY_CERT,
|
||||
):
|
||||
self.name = name
|
||||
self.event_loop = asyncio.get_event_loop()
|
||||
self.stats = stats
|
||||
self.remote_host = remote_host
|
||||
self.remote_port = remote_port
|
||||
@@ -110,7 +109,10 @@ class WebsocketRelayConnection:
|
||||
self.stats.record_connection_lost()
|
||||
|
||||
def start(self):
|
||||
self.async_task = self.event_loop.create_task(self.connect())
|
||||
self.async_task = asyncio.get_running_loop().create_task(
|
||||
self.connect(),
|
||||
name=f"WebsocketRelayConnection.connect.{self.name}",
|
||||
)
|
||||
return self.async_task
|
||||
|
||||
def cancel(self):
|
||||
@@ -121,7 +123,10 @@ class WebsocketRelayConnection:
|
||||
# metrics messages
|
||||
# the "metrics" group is not subscribed to in the typical fashion, so we
|
||||
# just explicitly create it
|
||||
producer = self.event_loop.create_task(self.run_producer("metrics", websocket, "metrics"))
|
||||
producer = asyncio.get_running_loop().create_task(
|
||||
self.run_producer("metrics", websocket, "metrics"),
|
||||
name="WebsocketRelayConnection.run_producer.metrics",
|
||||
)
|
||||
self.producers["metrics"] = {"task": producer, "subscriptions": {"metrics"}}
|
||||
async for msg in websocket:
|
||||
self.stats.record_message_received()
|
||||
@@ -143,7 +148,10 @@ class WebsocketRelayConnection:
|
||||
name = f"{self.remote_host}-{group}"
|
||||
origin_channel = payload['origin_channel']
|
||||
if not self.producers.get(name):
|
||||
producer = self.event_loop.create_task(self.run_producer(name, websocket, group))
|
||||
producer = asyncio.get_running_loop().create_task(
|
||||
self.run_producer(name, websocket, group),
|
||||
name=f"WebsocketRelayConnection.run_producer.{name}",
|
||||
)
|
||||
self.producers[name] = {"task": producer, "subscriptions": {origin_channel}}
|
||||
logger.debug(f"Producer {name} started.")
|
||||
else:
|
||||
@@ -297,9 +305,7 @@ class WebSocketRelayManager(object):
|
||||
pass
|
||||
|
||||
async def run(self):
|
||||
event_loop = asyncio.get_running_loop()
|
||||
|
||||
self.stats_mgr = RelayWebsocketStatsManager(event_loop, self.local_hostname)
|
||||
self.stats_mgr = RelayWebsocketStatsManager(self.local_hostname)
|
||||
self.stats_mgr.start()
|
||||
|
||||
database_conf = deepcopy(settings.DATABASES['default'])
|
||||
@@ -323,7 +329,10 @@ class WebSocketRelayManager(object):
|
||||
)
|
||||
|
||||
await async_conn.set_autocommit(True)
|
||||
on_ws_heartbeat_task = event_loop.create_task(self.on_ws_heartbeat(async_conn))
|
||||
on_ws_heartbeat_task = asyncio.get_running_loop().create_task(
|
||||
self.on_ws_heartbeat(async_conn),
|
||||
name="WebSocketRelayManager.on_ws_heartbeat",
|
||||
)
|
||||
|
||||
# Establishes a websocket connection to /websocket/relay on all API servers
|
||||
while True:
|
||||
|
||||
@@ -828,7 +828,7 @@ MANAGE_ORGANIZATION_AUTH = True
|
||||
DISABLE_LOCAL_AUTH = False
|
||||
|
||||
# Note: This setting may be overridden by database settings.
|
||||
TOWER_URL_BASE = "https://towerhost"
|
||||
TOWER_URL_BASE = "https://platformhost"
|
||||
|
||||
INSIGHTS_URL_BASE = "https://example.org"
|
||||
INSIGHTS_AGENT_MIME = 'application/example'
|
||||
@@ -1009,6 +1009,7 @@ AWX_RUNNER_KEEPALIVE_SECONDS = 0
|
||||
|
||||
# Delete completed work units in receptor
|
||||
RECEPTOR_RELEASE_WORK = True
|
||||
RECPETOR_KEEP_WORK_ON_ERROR = False
|
||||
|
||||
# K8S only. Use receptor_log_level on AWX spec to set this properly
|
||||
RECEPTOR_LOG_LEVEL = 'info'
|
||||
|
||||
@@ -64,7 +64,7 @@
|
||||
<div class="col-sm-6">
|
||||
</div>
|
||||
<div class="col-sm-6 footer-copyright">
|
||||
Copyright © 2021 <a href="http://www.redhat.com" target="_blank">Red Hat</a>, Inc. All Rights Reserved.
|
||||
Copyright © 2024 <a href="http://www.redhat.com" target="_blank">Red Hat</a>, Inc. All Rights Reserved.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -59,7 +59,7 @@ function ActivityStream() {
|
||||
{
|
||||
page: 1,
|
||||
page_size: 20,
|
||||
order_by: '-timestamp',
|
||||
order_by: '-id',
|
||||
},
|
||||
['id', 'page', 'page_size']
|
||||
);
|
||||
|
||||
@@ -89,7 +89,7 @@
|
||||
"LC_ALL": "en_US.UTF-8",
|
||||
"MFLAGS": "-w",
|
||||
"OLDPWD": "/awx_devel",
|
||||
"AWX_HOST": "https://towerhost",
|
||||
"AWX_HOST": "https://platformhost",
|
||||
"HOSTNAME": "awx",
|
||||
"LANGUAGE": "en_US:en",
|
||||
"SDB_HOST": "0.0.0.0",
|
||||
|
||||
@@ -89,7 +89,7 @@
|
||||
"LC_ALL": "en_US.UTF-8",
|
||||
"MFLAGS": "-w",
|
||||
"OLDPWD": "/awx_devel",
|
||||
"AWX_HOST": "https://towerhost",
|
||||
"AWX_HOST": "https://platformhost",
|
||||
"HOSTNAME": "awx",
|
||||
"LANGUAGE": "en_US:en",
|
||||
"SDB_HOST": "0.0.0.0",
|
||||
|
||||
@@ -164,7 +164,7 @@
|
||||
"ANSIBLE_RETRY_FILES_ENABLED": "False",
|
||||
"MAX_EVENT_RES": "700000",
|
||||
"ANSIBLE_CALLBACK_PLUGINS": "/awx_devel/awx/plugins/callback",
|
||||
"AWX_HOST": "https://towerhost",
|
||||
"AWX_HOST": "https://platformhost",
|
||||
"ANSIBLE_SSH_CONTROL_PATH_DIR": "/tmp/awx_2_a4b1afiw/cp",
|
||||
"ANSIBLE_STDOUT_CALLBACK": "awx_display"
|
||||
},
|
||||
|
||||
@@ -16,7 +16,7 @@ describe('<AzureAD />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/azuread-oauth2/',
|
||||
'https://platformhost/sso/complete/azuread-oauth2/',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_KEY: 'mock key',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP: {},
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<AzureADDetail />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/azuread-oauth2/',
|
||||
'https://platformhost/sso/complete/azuread-oauth2/',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_KEY: 'mock key',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP: {},
|
||||
@@ -62,7 +62,7 @@ describe('<AzureADDetail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'Azure AD OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/azuread-oauth2/'
|
||||
'https://platformhost/sso/complete/azuread-oauth2/'
|
||||
);
|
||||
assertDetail(wrapper, 'Azure AD OAuth2 Key', 'mock key');
|
||||
assertDetail(wrapper, 'Azure AD OAuth2 Secret', 'Encrypted');
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<AzureADEdit />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/azuread-oauth2/',
|
||||
'https://platformhost/sso/complete/azuread-oauth2/',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_KEY: 'mock key',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_AZUREAD_OAUTH2_ORGANIZATION_MAP: {},
|
||||
|
||||
@@ -19,7 +19,7 @@ describe('<GitHub />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValueOnce({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github/',
|
||||
'https://platformhost/sso/complete/github/',
|
||||
SOCIAL_AUTH_GITHUB_KEY: 'mock github key',
|
||||
SOCIAL_AUTH_GITHUB_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP: null,
|
||||
@@ -29,7 +29,7 @@ describe('<GitHub />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValueOnce({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ORG_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-org/',
|
||||
'https://platformhost/sso/complete/github-org/',
|
||||
SOCIAL_AUTH_GITHUB_ORG_KEY: '',
|
||||
SOCIAL_AUTH_GITHUB_ORG_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_ORG_NAME: '',
|
||||
@@ -40,7 +40,7 @@ describe('<GitHub />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValueOnce({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_TEAM_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-team/',
|
||||
'https://platformhost/sso/complete/github-team/',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_KEY: 'OAuth2 key (Client ID)',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_ID: 'team_id',
|
||||
@@ -51,7 +51,7 @@ describe('<GitHub />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValueOnce({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise/',
|
||||
'https://platformhost/sso/complete/github-enterprise/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_URL: 'https://localhost/url',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL: 'https://localhost/apiurl',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY: 'ent_key',
|
||||
@@ -63,7 +63,7 @@ describe('<GitHub />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValueOnce({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise-org/',
|
||||
'https://platformhost/sso/complete/github-enterprise-org/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_URL: 'https://localhost/url',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_API_URL: 'https://localhost/apiurl',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_KEY: 'ent_org_key',
|
||||
@@ -76,7 +76,7 @@ describe('<GitHub />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValueOnce({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise-team/',
|
||||
'https://platformhost/sso/complete/github-enterprise-team/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_URL: 'https://localhost/url',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_API_URL: 'https://localhost/apiurl',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_KEY: 'ent_team_key',
|
||||
|
||||
@@ -22,7 +22,8 @@ jest.mock('../../../../api');
|
||||
|
||||
const mockDefault = {
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_CALLBACK_URL: 'https://towerhost/sso/complete/github/',
|
||||
SOCIAL_AUTH_GITHUB_CALLBACK_URL:
|
||||
'https://platformhost/sso/complete/github/',
|
||||
SOCIAL_AUTH_GITHUB_KEY: 'mock github key',
|
||||
SOCIAL_AUTH_GITHUB_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_ORGANIZATION_MAP: null,
|
||||
@@ -32,7 +33,7 @@ const mockDefault = {
|
||||
const mockOrg = {
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ORG_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-org/',
|
||||
'https://platformhost/sso/complete/github-org/',
|
||||
SOCIAL_AUTH_GITHUB_ORG_KEY: '',
|
||||
SOCIAL_AUTH_GITHUB_ORG_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_ORG_NAME: '',
|
||||
@@ -43,7 +44,7 @@ const mockOrg = {
|
||||
const mockTeam = {
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_TEAM_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-team/',
|
||||
'https://platformhost/sso/complete/github-team/',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_KEY: 'OAuth2 key (Client ID)',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_ID: 'team_id',
|
||||
@@ -54,7 +55,7 @@ const mockTeam = {
|
||||
const mockEnterprise = {
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise/',
|
||||
'https://platformhost/sso/complete/github-enterprise/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_URL: 'https://localhost/enterpriseurl',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL: 'https://localhost/enterpriseapi',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY: 'foobar',
|
||||
@@ -66,7 +67,7 @@ const mockEnterprise = {
|
||||
const mockEnterpriseOrg = {
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise-org/',
|
||||
'https://platformhost/sso/complete/github-enterprise-org/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_URL: 'https://localhost/orgurl',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_API_URL: 'https://localhost/orgapi',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_KEY: 'foobar',
|
||||
@@ -79,7 +80,7 @@ const mockEnterpriseOrg = {
|
||||
const mockEnterpriseTeam = {
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise-team/',
|
||||
'https://platformhost/sso/complete/github-enterprise-team/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_URL: 'https://localhost/teamurl',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_API_URL: 'https://localhost/teamapi',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_KEY: 'foobar',
|
||||
@@ -143,7 +144,7 @@ describe('<GitHubDetail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'GitHub OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/github/'
|
||||
'https://platformhost/sso/complete/github/'
|
||||
);
|
||||
assertDetail(wrapper, 'GitHub OAuth2 Key', 'mock github key');
|
||||
assertDetail(wrapper, 'GitHub OAuth2 Secret', 'Encrypted');
|
||||
@@ -218,7 +219,7 @@ describe('<GitHubDetail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'GitHub Organization OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/github-org/'
|
||||
'https://platformhost/sso/complete/github-org/'
|
||||
);
|
||||
assertDetail(wrapper, 'GitHub Organization OAuth2 Key', 'Not configured');
|
||||
assertDetail(wrapper, 'GitHub Organization OAuth2 Secret', 'Encrypted');
|
||||
@@ -269,7 +270,7 @@ describe('<GitHubDetail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'GitHub Team OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/github-team/'
|
||||
'https://platformhost/sso/complete/github-team/'
|
||||
);
|
||||
assertDetail(wrapper, 'GitHub Team OAuth2 Key', 'OAuth2 key (Client ID)');
|
||||
assertDetail(wrapper, 'GitHub Team OAuth2 Secret', 'Encrypted');
|
||||
@@ -316,7 +317,7 @@ describe('<GitHubDetail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'GitHub Enterprise OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/github-enterprise/'
|
||||
'https://platformhost/sso/complete/github-enterprise/'
|
||||
);
|
||||
assertDetail(
|
||||
wrapper,
|
||||
@@ -343,7 +344,7 @@ describe('<GitHubDetail />', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Enterprise Org', () => {
|
||||
describe('Enterprise Organization', () => {
|
||||
let wrapper;
|
||||
|
||||
beforeAll(async () => {
|
||||
@@ -376,7 +377,7 @@ describe('<GitHubDetail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'GitHub Enterprise Organization OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/github-enterprise-org/'
|
||||
'https://platformhost/sso/complete/github-enterprise-org/'
|
||||
);
|
||||
assertDetail(
|
||||
wrapper,
|
||||
@@ -445,7 +446,7 @@ describe('<GitHubDetail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'GitHub Enterprise Team OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/github-enterprise-team/'
|
||||
'https://platformhost/sso/complete/github-enterprise-team/'
|
||||
);
|
||||
assertDetail(
|
||||
wrapper,
|
||||
@@ -476,23 +477,4 @@ describe('<GitHubDetail />', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Redirect', () => {
|
||||
test('should render redirect when user navigates to erroneous category', async () => {
|
||||
let wrapper;
|
||||
useRouteMatch.mockImplementation(() => ({
|
||||
url: '/settings/github/foo/details',
|
||||
path: '/settings/github/:category/details',
|
||||
params: { category: 'foo' },
|
||||
}));
|
||||
await act(async () => {
|
||||
wrapper = mountWithContexts(
|
||||
<SettingsProvider value={mockAllOptions.actions}>
|
||||
<GitHubDetail />
|
||||
</SettingsProvider>
|
||||
);
|
||||
});
|
||||
await waitForElement(wrapper, 'Redirect');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<GitHubEnterpriseEdit />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise/',
|
||||
'https://platformhost/sso/complete/github-enterprise/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_URL: '',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL: '',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY: '',
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<GitHubEnterpriseOrgEdit />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise-org/',
|
||||
'https://platformhost/sso/complete/github-enterprise-org/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_URL: '',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_API_URL: '',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_ORG_KEY: '',
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<GitHubEnterpriseTeamEdit />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-enterprise-team/',
|
||||
'https://platformhost/sso/complete/github-enterprise-team/',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_URL: '',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_API_URL: '',
|
||||
SOCIAL_AUTH_GITHUB_ENTERPRISE_TEAM_KEY: '',
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<GitHubOrgEdit />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_ORG_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-org/',
|
||||
'https://platformhost/sso/complete/github-org/',
|
||||
SOCIAL_AUTH_GITHUB_ORG_KEY: '',
|
||||
SOCIAL_AUTH_GITHUB_ORG_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_ORG_NAME: '',
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<GitHubTeamEdit />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GITHUB_TEAM_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/github-team/',
|
||||
'https://platformhost/sso/complete/github-team/',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_KEY: 'OAuth2 key (Client ID)',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GITHUB_TEAM_ID: 'team_id',
|
||||
|
||||
@@ -16,7 +16,7 @@ describe('<GoogleOAuth2 />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/google-oauth2/',
|
||||
'https://platformhost/sso/complete/google-oauth2/',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY: 'mock key',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS: [
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<GoogleOAuth2Detail />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/google-oauth2/',
|
||||
'https://platformhost/sso/complete/google-oauth2/',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY: 'mock key',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS: [
|
||||
@@ -68,7 +68,7 @@ describe('<GoogleOAuth2Detail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'Google OAuth2 Callback URL',
|
||||
'https://towerhost/sso/complete/google-oauth2/'
|
||||
'https://platformhost/sso/complete/google-oauth2/'
|
||||
);
|
||||
assertDetail(wrapper, 'Google OAuth2 Key', 'mock key');
|
||||
assertDetail(wrapper, 'Google OAuth2 Secret', 'Encrypted');
|
||||
|
||||
@@ -22,7 +22,7 @@ describe('<GoogleOAuth2Edit />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_CALLBACK_URL:
|
||||
'https://towerhost/sso/complete/google-oauth2/',
|
||||
'https://platformhost/sso/complete/google-oauth2/',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY: 'mock key',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET: '$encrypted$',
|
||||
SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS: [
|
||||
|
||||
@@ -26,7 +26,7 @@ describe('<MiscSystemDetail />', () => {
|
||||
ACTIVITY_STREAM_ENABLED_FOR_INVENTORY_SYNC: false,
|
||||
ORG_ADMINS_CAN_SEE_ALL_USERS: true,
|
||||
MANAGE_ORGANIZATION_AUTH: true,
|
||||
TOWER_URL_BASE: 'https://towerhost',
|
||||
TOWER_URL_BASE: 'https://platformhost',
|
||||
REMOTE_HOST_HEADERS: [],
|
||||
PROXY_IP_ALLOWED_LIST: [],
|
||||
CSRF_TRUSTED_ORIGINS: [],
|
||||
@@ -94,7 +94,7 @@ describe('<MiscSystemDetail />', () => {
|
||||
'Automation Analytics upload URL',
|
||||
'https://example.com'
|
||||
);
|
||||
assertDetail(wrapper, 'Base URL of the service', 'https://towerhost');
|
||||
assertDetail(wrapper, 'Base URL of the service', 'https://platformhost');
|
||||
assertDetail(wrapper, 'Gather data for Automation Analytics', 'Off');
|
||||
assertDetail(
|
||||
wrapper,
|
||||
|
||||
@@ -15,8 +15,10 @@ describe('<SAML />', () => {
|
||||
beforeEach(() => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_SAML_CALLBACK_URL: 'https://towerhost/sso/complete/saml/',
|
||||
SOCIAL_AUTH_SAML_METADATA_URL: 'https://towerhost/sso/metadata/saml/',
|
||||
SOCIAL_AUTH_SAML_CALLBACK_URL:
|
||||
'https://platformhost/sso/complete/saml/',
|
||||
SOCIAL_AUTH_SAML_METADATA_URL:
|
||||
'https://platformhost/sso/metadata/saml/',
|
||||
SOCIAL_AUTH_SAML_SP_ENTITY_ID: '',
|
||||
SOCIAL_AUTH_SAML_SP_PUBLIC_CERT: '',
|
||||
SOCIAL_AUTH_SAML_SP_PRIVATE_KEY: '',
|
||||
|
||||
@@ -21,8 +21,10 @@ describe('<SAMLDetail />', () => {
|
||||
beforeEach(() => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SOCIAL_AUTH_SAML_CALLBACK_URL: 'https://towerhost/sso/complete/saml/',
|
||||
SOCIAL_AUTH_SAML_METADATA_URL: 'https://towerhost/sso/metadata/saml/',
|
||||
SOCIAL_AUTH_SAML_CALLBACK_URL:
|
||||
'https://platformhost/sso/complete/saml/',
|
||||
SOCIAL_AUTH_SAML_METADATA_URL:
|
||||
'https://platformhost/sso/metadata/saml/',
|
||||
SOCIAL_AUTH_SAML_SP_ENTITY_ID: 'mock_id',
|
||||
SOCIAL_AUTH_SAML_SP_PUBLIC_CERT: 'mock_cert',
|
||||
SOCIAL_AUTH_SAML_SP_PRIVATE_KEY: '',
|
||||
@@ -71,12 +73,12 @@ describe('<SAMLDetail />', () => {
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'SAML Assertion Consumer Service (ACS) URL',
|
||||
'https://towerhost/sso/complete/saml/'
|
||||
'https://platformhost/sso/complete/saml/'
|
||||
);
|
||||
assertDetail(
|
||||
wrapper,
|
||||
'SAML Service Provider Metadata URL',
|
||||
'https://towerhost/sso/metadata/saml/'
|
||||
'https://platformhost/sso/metadata/saml/'
|
||||
);
|
||||
assertDetail(wrapper, 'SAML Service Provider Entity ID', 'mock_id');
|
||||
assertVariableDetail(
|
||||
|
||||
@@ -22,8 +22,10 @@ describe('<SAMLEdit />', () => {
|
||||
SettingsAPI.readCategory.mockResolvedValue({
|
||||
data: {
|
||||
SAML_AUTO_CREATE_OBJECTS: true,
|
||||
SOCIAL_AUTH_SAML_CALLBACK_URL: 'https://towerhost/sso/complete/saml/',
|
||||
SOCIAL_AUTH_SAML_METADATA_URL: 'https://towerhost/sso/metadata/saml/',
|
||||
SOCIAL_AUTH_SAML_CALLBACK_URL:
|
||||
'https://platformhost/sso/complete/saml/',
|
||||
SOCIAL_AUTH_SAML_METADATA_URL:
|
||||
'https://platformhost/sso/metadata/saml/',
|
||||
SOCIAL_AUTH_SAML_SP_ENTITY_ID: 'mock_id',
|
||||
SOCIAL_AUTH_SAML_SP_PUBLIC_CERT: 'mock_cert',
|
||||
SOCIAL_AUTH_SAML_SP_PRIVATE_KEY: '$encrypted$',
|
||||
|
||||
@@ -117,6 +117,10 @@ function TroubleshootingEdit() {
|
||||
name="RECEPTOR_RELEASE_WORK"
|
||||
config={debug.RECEPTOR_RELEASE_WORK}
|
||||
/>
|
||||
<BooleanField
|
||||
name="RECEPTOR_KEEP_WORK_ON_ERROR"
|
||||
config={debug.RECEPTOR_KEEP_WORK_ON_ERROR}
|
||||
/>
|
||||
{submitError && <FormSubmitError error={submitError} />}
|
||||
{revertError && <FormSubmitError error={revertError} />}
|
||||
</FormColumnLayout>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"AWX_CLEANUP_PATHS": false,
|
||||
"AWX_REQUEST_PROFILE": false,
|
||||
"RECEPTOR_RELEASE_WORK": false
|
||||
}
|
||||
"RECEPTOR_RELEASE_WORK": false,
|
||||
"RECEPTOR_KEEP_WORK_ON_ERROR": false
|
||||
}
|
||||
|
||||
@@ -830,6 +830,15 @@
|
||||
"category_slug": "debug",
|
||||
"default": true
|
||||
},
|
||||
"RECEPTOR_KEEP_WORK_ON_ERROR": {
|
||||
"type": "boolean",
|
||||
"required": false,
|
||||
"label": "Keep receptor work on error",
|
||||
"help_text": "Prevent receptor work from being released on when error is detected",
|
||||
"category": "Debug",
|
||||
"category_slug": "debug",
|
||||
"default": false
|
||||
},
|
||||
"SESSION_COOKIE_AGE": {
|
||||
"type": "integer",
|
||||
"required": true,
|
||||
@@ -5173,6 +5182,14 @@
|
||||
"category_slug": "debug",
|
||||
"defined_in_file": false
|
||||
},
|
||||
"RECEPTOR_KEEP_WORK_ON_ERROR": {
|
||||
"type": "boolean",
|
||||
"label": "Keep receptor work on error",
|
||||
"help_text": "Prevent receptor work from being released on when error is detected",
|
||||
"category": "Debug",
|
||||
"category_slug": "debug",
|
||||
"defined_in_file": false
|
||||
},
|
||||
"SESSION_COOKIE_AGE": {
|
||||
"type": "integer",
|
||||
"label": "Idle Time Force Log Out",
|
||||
|
||||
@@ -91,6 +91,7 @@
|
||||
"slirp4netns:enable_ipv6=true"
|
||||
],
|
||||
"RECEPTOR_RELEASE_WORK": true,
|
||||
"RECEPTOR_KEEP_WORK_ON_ERROR": false,
|
||||
"SESSION_COOKIE_AGE": 1800,
|
||||
"SESSIONS_PER_USER": -1,
|
||||
"DISABLE_LOCAL_AUTH": false,
|
||||
|
||||
@@ -35,7 +35,7 @@ ui-next/src/build: $(UI_NEXT_DIR)/src/build/awx
|
||||
## True target for ui-next/src/build. Build ui_next from source.
|
||||
$(UI_NEXT_DIR)/src/build/awx: $(UI_NEXT_DIR)/src $(UI_NEXT_DIR)/src/node_modules/webpack
|
||||
@echo "=== Building ui_next ==="
|
||||
@cd $(UI_NEXT_DIR)/src && PRODUCT="$(PRODUCT)" PUBLIC_PATH=/static/awx/ ROUTE_PREFIX=/ui_next npm run build:awx
|
||||
@cd $(UI_NEXT_DIR)/src && PRODUCT="$(PRODUCT)" PUBLIC_PATH=/static/awx/ ROUTE_PREFIX=/ npm run build:awx
|
||||
@mv $(UI_NEXT_DIR)/src/build/awx/index.html $(UI_NEXT_DIR)/src/build/awx/index_awx.html
|
||||
|
||||
.PHONY: ui-next/src
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from django.conf import settings
|
||||
from django.http import Http404
|
||||
from django.urls import re_path
|
||||
from django.views.generic.base import TemplateView
|
||||
|
||||
@@ -7,12 +5,6 @@ from django.views.generic.base import TemplateView
|
||||
class IndexView(TemplateView):
|
||||
template_name = 'index_awx.html'
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
if settings.UI_NEXT is False:
|
||||
raise Http404()
|
||||
|
||||
return super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
app_name = 'ui_next'
|
||||
|
||||
|
||||
@@ -18,8 +18,6 @@ def get_urlpatterns(prefix=None):
|
||||
prefix = f'/{prefix}/'
|
||||
|
||||
urlpatterns = [
|
||||
re_path(r'', include('awx.ui.urls', namespace='ui')),
|
||||
re_path(r'^ui_next/.*', include('awx.ui_next.urls', namespace='ui_next')),
|
||||
path(f'api{prefix}', include('awx.api.urls', namespace='api')),
|
||||
]
|
||||
|
||||
@@ -36,6 +34,9 @@ def get_urlpatterns(prefix=None):
|
||||
re_path(r'^(?:api/)?500.html$', handle_500),
|
||||
re_path(r'^csp-violation/', handle_csp_violation),
|
||||
re_path(r'^login/', handle_login_redirect),
|
||||
# want api/v2/doesnotexist to return a 404, not match the ui_next urls,
|
||||
# so use a negative lookahead assertion here
|
||||
re_path(r'^(?!api/|sso/).*', include('awx.ui_next.urls', namespace='ui_next')),
|
||||
]
|
||||
|
||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
||||
|
||||
@@ -32,7 +32,7 @@ Installing the `tar.gz` involves no special instructions.
|
||||
## Running
|
||||
|
||||
Non-deprecated modules in this collection have no Python requirements, but
|
||||
may require the official [AWX CLI](https://docs.ansible.com/ansible-tower/latest/html/towercli/index.html)
|
||||
may require the official [AWX CLI](https://pypi.org/project/awxkit/)
|
||||
in the future. The `DOCUMENTATION` for each module will report this.
|
||||
|
||||
You can specify authentication by a combination of either:
|
||||
@@ -41,8 +41,7 @@ You can specify authentication by a combination of either:
|
||||
- host, OAuth2 token
|
||||
|
||||
The OAuth2 token is the preferred method. You can obtain a token via the
|
||||
AWX CLI [login](https://docs.ansible.com/ansible-tower/latest/html/towercli/reference.html#awx-login)
|
||||
command.
|
||||
``login`` command with the AWX CLI.
|
||||
|
||||
These can be specified via (from highest to lowest precedence):
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ requirements:
|
||||
- None
|
||||
description:
|
||||
- Returns GET requests from the Automation Platform Controller API. See
|
||||
U(https://docs.ansible.com/ansible-tower/latest/html/towerapi/index.html) for API usage.
|
||||
U(https://docs.ansible.com/automation-controller/latest/html/towerapi/) for API usage.
|
||||
- For use that is cross-compatible between the awx.awx and ansible.controller collection
|
||||
see the controller_meta module
|
||||
options:
|
||||
|
||||
@@ -16,9 +16,9 @@ DOCUMENTATION = '''
|
||||
---
|
||||
module: job_template
|
||||
author: "Wayne Witzel III (@wwitzel3)"
|
||||
short_description: create, update, or destroy Automation Platform Controller job templates.
|
||||
short_description: create, update, or destroy job templates.
|
||||
description:
|
||||
- Create, update, or destroy Automation Platform Controller job templates. See
|
||||
- Create, update, or destroy job templates. See
|
||||
U(https://www.ansible.com/tower) for an overview.
|
||||
options:
|
||||
name:
|
||||
@@ -320,8 +320,8 @@ extends_documentation_fragment: awx.awx.auth
|
||||
|
||||
notes:
|
||||
- JSON for survey_spec can be found in the API Documentation. See
|
||||
U(https://docs.ansible.com/ansible-tower/latest/html/towerapi/api_ref.html#/Job_Templates/Job_Templates_job_templates_survey_spec_create)
|
||||
for POST operation payload example.
|
||||
U(https://docs.ansible.com/automation-controller/latest/html/towerapi)
|
||||
for job template survey creation and POST operation payload example.
|
||||
'''
|
||||
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ This collection should be installed from [Content Hub](https://cloud.redhat.com/
|
||||
## Running
|
||||
|
||||
Non-deprecated modules in this collection have no Python requirements, but
|
||||
may require the official [AWX CLI](https://docs.ansible.com/ansible-tower/latest/html/towercli/index.html)
|
||||
may require the AWX CLI
|
||||
in the future. The `DOCUMENTATION` for each module will report this.
|
||||
|
||||
You can specify authentication by a combination of either:
|
||||
@@ -46,8 +46,7 @@ You can specify authentication by a combination of either:
|
||||
- host, OAuth2 token
|
||||
|
||||
The OAuth2 token is the preferred method. You can obtain a token via the
|
||||
AWX CLI [login](https://docs.ansible.com/ansible-tower/latest/html/towercli/reference.html#awx-login)
|
||||
command.
|
||||
``login`` command with the AWX CLI.
|
||||
|
||||
These can be specified via (from highest to lowest precedence):
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ The Lightweight Directory Access Protocol (LDAP) is an open, vendor-neutral, ind
|
||||
|
||||
# Configure LDAP Authentication
|
||||
|
||||
Please see the [Tower documentation](https://docs.ansible.com/ansible-tower/latest/html/administration/ldap_auth.html) as well as [Ansible blog post](https://www.ansible.com/blog/getting-started-ldap-authentication-in-ansible-tower) for basic LDAP configuration.
|
||||
Please see the [AWX documentation](https://ansible.readthedocs.io/projects/awx/en/latest/administration/ldap_auth.html) for basic LDAP configuration.
|
||||
|
||||
LDAP Authentication provides duplicate sets of configuration fields for authentication with up to six different LDAP servers.
|
||||
The default set of configuration fields take the form `AUTH_LDAP_<field name>`. Configuration fields for additional LDAP servers are numbered `AUTH_LDAP_<n>_<field name>`.
|
||||
|
||||
@@ -3,7 +3,7 @@ Security Assertion Markup Language, or SAML, is an open standard for exchanging
|
||||
|
||||
|
||||
# Configure SAML Authentication
|
||||
Please see the [Tower documentation](https://docs.ansible.com/ansible-tower/latest/html/administration/ent_auth.html#saml-authentication-settings) as well as the [Ansible blog post](https://www.ansible.com/blog/using-saml-with-red-hat-ansible-tower) for basic SAML configuration. Note that AWX's SAML implementation relies on `python-social-auth` which uses `python-saml`. AWX exposes three fields which are directly passed to the lower libraries:
|
||||
Please see the [AWX documentation](https://ansible.readthedocs.io/projects/awx/en/latest/administration/ent_auth.html#saml-settings) for basic SAML configuration. Note that AWX's SAML implementation relies on `python-social-auth` which uses `python-saml`. AWX exposes three fields which are directly passed to the lower libraries:
|
||||
* `SOCIAL_AUTH_SAML_SP_EXTRA` is passed to the `python-saml` library configuration's `sp` setting.
|
||||
* `SOCIAL_AUTH_SAML_SECURITY_CONFIG` is passed to the `python-saml` library configuration's `security` setting.
|
||||
* `SOCIAL_AUTH_SAML_EXTRA_DATA`
|
||||
|
||||
@@ -71,8 +71,8 @@ rst_epilog = """
|
||||
.. |aap| replace:: Ansible Automation Platform
|
||||
.. |ab| replace:: ansible-builder
|
||||
.. |ap| replace:: Automation Platform
|
||||
.. |at| replace:: automation controller
|
||||
.. |At| replace:: Automation controller
|
||||
.. |at| replace:: AWX
|
||||
.. |At| replace:: AWX
|
||||
.. |ah| replace:: Automation Hub
|
||||
.. |EE| replace:: Execution Environment
|
||||
.. |EEs| replace:: Execution Environments
|
||||
|
||||
@@ -23,7 +23,6 @@ Authentication
|
||||
.. index::
|
||||
single: social authentication
|
||||
single: authentication
|
||||
single: enterprise authentication
|
||||
pair: configuration; authentication
|
||||
|
||||
.. include:: ./configure_awx_authentication.rst
|
||||
|
||||
@@ -320,6 +320,43 @@ Items surrounded by ``{}`` will be substituted when the log error is generated.
|
||||
- **error**: The error message returned by the API or, if no error is specified, the HTTP status as text
|
||||
|
||||
|
||||
.. _logging-api-otel:
|
||||
|
||||
OTel configuration with AWX
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
You can integrate OTel with AWX by configuring logging manually to point to your OTel collector. To do this, add the following codeblock in your `settings file <https://github.com/ansible/awx/blob/devel/tools/docker-compose/ansible/roles/sources/templates/local_settings.py.j2#L50>`_ (``local_settings.py.j2``):
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
LOGGING['handlers']['otel'] |= {
|
||||
'class': 'awx.main.utils.handlers.OTLPHandler',
|
||||
'endpoint': 'http://otel:4317',
|
||||
}
|
||||
# Add otel log handler to all log handlers where propagate is False
|
||||
for name in LOGGING['loggers'].keys():
|
||||
if not LOGGING['loggers'][name].get('propagate', True):
|
||||
handler = LOGGING['loggers'][name].get('handlers', [])
|
||||
if 'otel' not in handler:
|
||||
LOGGING['loggers'][name].get('handlers', []).append('otel')
|
||||
|
||||
# Everything without explicit propagate=False ends up logging to 'awx' so add it
|
||||
handler = LOGGING['loggers']['awx'].get('handlers', [])
|
||||
if 'otel' not in handler:
|
||||
LOGGING['loggers']['awx'].get('handlers', []).append('otel')
|
||||
|
||||
Edit ``'endpoint': 'http://otel:4317',`` to point to your OTel collector.
|
||||
|
||||
To see it working in the dev environment, set the following:
|
||||
|
||||
::
|
||||
|
||||
OTEL=true GRAFANA=true LOKI=true PROMETHEUS=true make docker-compose
|
||||
|
||||
Then go to `http://localhost:3001 <http://localhost:3001>`_ to access Grafana and see the logs.
|
||||
|
||||
|
||||
|
||||
Troubleshoot Logging
|
||||
---------------------
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ For example, if you uploaded a specific logo, and added the following text:
|
||||
:alt: Edit User Interface Settings form populated with custom text and logo.
|
||||
|
||||
|
||||
The Tower login dialog would look like this:
|
||||
The AWX login dialog would look like this:
|
||||
|
||||
.. image:: ../common/images/configure-awx-ui-angry-spud-login.png
|
||||
:alt: AWX login screen with custom text and logo.
|
||||
|
||||
@@ -40,6 +40,4 @@ Things to know prior to submitting revisions
|
||||
Translations
|
||||
-------------
|
||||
|
||||
At this time we do not accept PRs for adding additional language translations as we have an automated process for generating our translations. This is because translations require constant care as new strings are added and changed in the code base. Because of this the .po files are overwritten during every translation release cycle. We also can't support a lot of translations on AWX as its an open source project and each language adds time and cost to maintain. If you would like to see AWX translated into a new language please create an issue and ask others you know to upvote the issue. Our translation team will review the needs of the community and see what they can do around supporting additional language.
|
||||
|
||||
If you find an issue with an existing translation, please see the `Reporting Issues <https://github.com/ansible/awx/blob/devel/CONTRIBUTING.md#reporting-issues>`_ section to open an issue and our translation team will work with you on a resolution.
|
||||
At this time we do not accept PRs for language translations.
|
||||
|
||||
@@ -9,7 +9,7 @@ An organization is a logical collection of users, teams, projects, and inventori
|
||||
From the left navigation bar, click **Organizations**.
|
||||
|
||||
.. note::
|
||||
AWX creates a default organization automatically. Users of Tower with a Self-support level license only have the
|
||||
AWX creates a default organization automatically.
|
||||
|
||||
|Organizations - default view|
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ You can manage playbooks and playbook directories by either placing them manuall
|
||||
|
||||
.. note::
|
||||
|
||||
It is recommended that, whenever possible, you use source control to manage your playbooks. This type of best practice provides the ability to treat your infrastructure as code and is in line with DevOps ideals. While this Quick Start Guide uses lightweight examples to get you up and running, we suggest using source control to manage playbook for production purposes.
|
||||
It is recommended that, whenever possible, you use source control to manage your playbooks. This type of best practice provides the ability to treat your infrastructure as code and is in line with DevOps ideals. While this Quick Start Guide uses lightweight examples to get you up and running, we suggest using source control to manage your actual playbooks.
|
||||
|
||||
To review existing projects, click **Projects** from the left navigation bar.
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ Examine the AWX Dashboard
|
||||
pair: settings menu; help about
|
||||
|
||||
|
||||
The Dashboard offers a friendly graphical framework for your IT orchestration needs. Along the left side of the Dashboard is the navigation menu, where you can quickly display different views, navigate to your resources, grant access, and administer certain AWX features in the UI.
|
||||
The Dashboard offers a friendly graphical framework for your Ansible needs. Along the left side of the Dashboard is the navigation menu, where you can quickly display different views, navigate to your resources, grant access, and administer certain AWX features in the UI.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -40,7 +40,7 @@ On the main Dashboard view, a summary appears listing your current **Job Status*
|
||||
|
||||
The very last item in the navigation bar is **Settings**, which provides access to the AWX configuration Settings.
|
||||
|
||||
The Settings page allows administrators to configure authentication, jobs, system-level attributes, customize the user interface, and product license information. Refer to :ref:`ag_configure_awx` section for more detail.
|
||||
The Settings page allows administrators to configure authentication, jobs, system-level attributes, and customize the user interface. Refer to :ref:`ag_configure_awx` section for more detail.
|
||||
|
||||
.. image:: ../common/images/ug-settings-menu-screen.png
|
||||
|
||||
@@ -50,4 +50,4 @@ Regardless of the window or action you're performing, the very top of each page
|
||||
.. |about| image:: ../common/images/help-about-icon.png
|
||||
|
||||
.. note::
|
||||
Keep in mind that the goal of this Quick Start is to launch a simple playbook. To do this, a number of configuration options must be setup. Completing the quick start configuration tasks now ensures that tAWX is configured properly and allows for easier executions of more involved playbooks later on.
|
||||
Keep in mind that the goal of this Quick Start is to launch a simple playbook. To do this, a number of configuration options must be setup. Completing the quick start configuration tasks now ensures that AWX is configured properly and allows for easier executions of more involved playbooks later on.
|
||||
@@ -8,7 +8,7 @@ Authentication Methods Using the API
|
||||
pair: OAuth 2 Token; authentication
|
||||
pair: SSO; authentication
|
||||
|
||||
This chapter describes the numerous enterprise authentication methods, the best use case for each, and examples:
|
||||
This chapter describes different authentication methods, the best use case for each, and examples:
|
||||
|
||||
.. contents::
|
||||
:local:
|
||||
|
||||
@@ -94,7 +94,7 @@ Field lookups may also be used for more advanced queries, by appending the looku
|
||||
|
||||
The following field lookups are supported:
|
||||
|
||||
- ``exact``: Exact match (default lookup if not specified).
|
||||
- ``exact``: Exact match (default lookup if not specified, refer to the following note for more information).
|
||||
- ``iexact``: Case-insensitive version of exact.
|
||||
- ``contains``: Field contains value.
|
||||
- ``icontains``: Case-insensitive version of contains.
|
||||
@@ -122,3 +122,18 @@ Filtering based on the requesting user's level of access by query string paramet
|
||||
|
||||
- ``role_level``: Level of role to filter on, such as ``admin_role``
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
Previous releases of AWX returned queries with **__exact** results by default, but you may find that the latest versions are returning a larger subset instead. As a workaround, set the ``limit`` to ``?limit__exact`` for the default filter. For example, ``/api/v2/jobs/?limit__exact=example.domain.com`` results in:
|
||||
|
||||
::
|
||||
|
||||
{
|
||||
"count": 1,
|
||||
"next": null,
|
||||
"previous": null,
|
||||
"results": [
|
||||
...
|
||||
|
||||
.. this note is generically written for AWX. For downstream, the change started in AAP 2.0 so we can be more specific if necessary.
|
||||
|
||||
@@ -170,10 +170,13 @@ To use ``ansible_facts`` to define the host filter when creating Smart Inventori
|
||||
1. In the *Create new smart inventory screen*, click the |search| button next to the **Smart host filter** field to open a pop-up window to filter hosts for this inventory.
|
||||
|
||||
.. image:: ../common/images/inventories-smart-create-filter-highlighted.png
|
||||
:alt: Create smart inventory window with Smart host filter highlighted
|
||||
|
||||
|
||||
2. In the search pop-up window, change the search criteria from **Name** to **Advanced** and select **ansible_facts** from the **Key** field.
|
||||
|
||||
.. image:: ../common/images/inventories-smart-define-host-filter.png
|
||||
:alt: Host filter with drop-down menu options
|
||||
|
||||
|
||||
If you wanted to add an ansible fact of
|
||||
@@ -186,24 +189,29 @@ If you wanted to add an ansible fact of
|
||||
In the search field, enter ``ansible_processor[]="GenuineIntel"`` (no extra spaces or ``__`` before the value) and press **[Enter]**.
|
||||
|
||||
.. image:: ../common/images/inventories-smart-define-host-filter-facts.png
|
||||
:alt: Example of an advanced search for host filter Ansible facts
|
||||
|
||||
The resulting search criteria for the specified ansible fact populates in the lower part of the window.
|
||||
|
||||
.. image:: ../common/images/inventories-smart-define-host-filter-facts2.png
|
||||
:alt: Selected search criteria for host filter Ansible facts listed below search field
|
||||
|
||||
3. Click **Select** to add it to the **Smart host filter** field.
|
||||
|
||||
.. image:: ../common/images/inventories-smart-create-filter-added.png
|
||||
:alt: Specified search criteria for host filter Ansible facts shown in the Smart host filter field of the Create new smart inventory window
|
||||
|
||||
4. Click **Save** to save the new Smart Inventory.
|
||||
|
||||
The Details tab of the new Smart Inventory opens and displays the specified ansible facts in the **Smart host filter** field.
|
||||
|
||||
.. image:: ../common/images/inventories-smart-create-details.png
|
||||
:alt: Details tab of the new Smart Inventory displaying the specified Ansible facts in the Smart host filter field
|
||||
|
||||
5. From the Details view, you can edit the **Smart host filter** field by clicking **Edit** and delete existing filter(s), clear all existing filters, or add new ones.
|
||||
|
||||
.. image:: ../common/images/inventories-smart-define-host-filter-facts-group.png
|
||||
:alt: Specified search criteria consisting of host filter Ansible facts and groups listed below search field
|
||||
|
||||
|
||||
.. _ug_inventories_constructed:
|
||||
@@ -246,6 +254,7 @@ The hosts inside the input inventory will fit one condition, the other condition
|
||||
neither, or both. This results in four hosts total for demonstration purposes.
|
||||
|
||||
.. image:: ../common/images/inventories-constructed-inventory-venn.png
|
||||
:alt: Venn diagram describing the input inventory content for a constructed inventory
|
||||
|
||||
|
||||
This folder defines the inventory as an ini type named ``two_conditions.ini``:
|
||||
@@ -459,7 +468,7 @@ Follow the procedure described in the subsequent section, :ref:`ug_inventories_a
|
||||
Example of a constructed inventory details view:
|
||||
|
||||
.. image:: ../common/images/inventories-constructed-inventory-details.png
|
||||
|
||||
:alt: Constructed inventory details
|
||||
|
||||
|
||||
.. _ug_inventories_plugins:
|
||||
@@ -517,6 +526,7 @@ The type of inventory is identified at the top of the create form.
|
||||
|Inventories_create_new - create new inventory|
|
||||
|
||||
.. |Inventories_create_new - create new inventory| image:: ../common/images/inventories-create-new-inventory.png
|
||||
:alt: Create new inventory form
|
||||
|
||||
2. Enter the appropriate details into the following fields:
|
||||
|
||||
@@ -530,6 +540,7 @@ The type of inventory is identified at the top of the create form.
|
||||
- **Instance Groups**: Click the |search| button to open a separate window. Choose the instance group(s) for this inventory to run on. If the list is extensive, use the search to narrow the options. You may select multiple instance groups and sort them in the order you want them ran.
|
||||
|
||||
.. image:: ../common/images/select-instance-groups-modal.png
|
||||
:alt: Select instance groups dialog
|
||||
|
||||
- **Labels**: Optionally supply labels that describe this inventory, so they can be used to group and filter inventories and jobs.
|
||||
|
||||
@@ -561,6 +572,7 @@ The type of inventory is identified at the top of the create form.
|
||||
|Inventories_create_new_saved - create new inventory|
|
||||
|
||||
.. |Inventories_create_new_saved - create new inventory| image:: ../common/images/inventories-create-new-saved-inventory.png
|
||||
:alt: Example Create new inventory form filled out
|
||||
|
||||
3. Click **Save** when done.
|
||||
|
||||
@@ -604,6 +616,7 @@ To create a new group for an inventory:
|
||||
|Inventories_manage_group_add|
|
||||
|
||||
.. |Inventories_manage_group_add| image:: ../common/images/inventories-add-group-new.png
|
||||
:alt: Create new group form for inventories
|
||||
|
||||
2. Enter the appropriate details into the required and optional fields:
|
||||
|
||||
@@ -636,12 +649,16 @@ The **Create Group** window closes and the newly created group displays as an en
|
||||
|Inventories add group subgroup|
|
||||
|
||||
.. |Inventories add group subgroup| image:: ../common/images/inventories-add-group-subgroup-added.png
|
||||
:alt: Related Groups tab of the Groups form for inventories
|
||||
|
||||
|
||||
If you chose to add an existing group, available groups will appear in a separate selection window.
|
||||
|
||||
|Inventories add group existing subgroup|
|
||||
|
||||
.. |Inventories add group existing subgroup| image:: ../common/images/inventories-add-group-existing-subgroup.png
|
||||
:alt: Existing group appearing in a separate selection window
|
||||
|
||||
|
||||
Once a group is selected, it displays as an entry in the list of groups associated with the group.
|
||||
|
||||
@@ -657,6 +674,7 @@ The list view displays all your inventory groups at once, or you can filter it t
|
||||
You may be able to delete a subgroup without concern for dependencies, since AWX will look for dependencies such as any child groups or hosts. If any exists, a confirmation dialog displays for you to choose whether to delete the root group and all of its subgroups and hosts; or promote the subgroup(s) so they become the top-level inventory group(s), along with their host(s).
|
||||
|
||||
.. image:: ../common/images/inventories-groups-delete-root-with-children.png
|
||||
:alt: Delete group confirmation dialog box with a prompt to select whether to delete all groups and hosts or promote child groups and hosts
|
||||
|
||||
.. _ug_inventories_add_host:
|
||||
|
||||
@@ -672,6 +690,8 @@ You can configure hosts for the inventory as well as for groups and groups withi
|
||||
3. If creating a new host, select the |toggle button| button to specify whether or not to include this host while running jobs.
|
||||
|
||||
.. |toggle button| image:: ../common/images/on-off-toggle-button.png
|
||||
:alt: Toggle button to include this host while running jobs
|
||||
|
||||
|
||||
4. Enter the appropriate details into the required and optional fields:
|
||||
|
||||
@@ -686,12 +706,16 @@ The **Create Host** window closes and the newly created host displays as an entr
|
||||
|Inventories add group host|
|
||||
|
||||
.. |Inventories add group host| image:: ../common/images/inventories-add-group-host-added.png
|
||||
:alt: Hosts tab of the Groups window showing available hosts
|
||||
|
||||
|
||||
If you chose to add an existing host, available hosts will appear in a separate selection window.
|
||||
|
||||
|Inventories add existing host|
|
||||
|
||||
.. |Inventories add existing host| image:: ../common/images/inventories-add-existing-host.png
|
||||
:alt: Existing host appearing in a separate selection window
|
||||
|
||||
|
||||
Once a host is selected, it displays as an entry in the list of hosts associated with the group. You can disassociate a host from this screen by selecting the host and click the **Disassociate** button.
|
||||
|
||||
@@ -706,12 +730,16 @@ list of hosts.
|
||||
|Inventories add group host emphasized|
|
||||
|
||||
.. |Inventories add group host emphasized| image:: ../common/images/inventories-add-group-host-added-emphasized.png
|
||||
:alt: Inventories add host emphasized
|
||||
|
||||
|
||||
This opens the Details tab of the selected host.
|
||||
|
||||
|Inventories add group host details|
|
||||
|
||||
.. |Inventories add group host details| image:: ../common/images/inventories-add-group-host-details.png
|
||||
:alt: Host details tab for the selected inventory
|
||||
|
||||
|
||||
7. Click the **Groups** tab to configure groups for the host.
|
||||
|
||||
@@ -720,6 +748,7 @@ This opens the Details tab of the selected host.
|
||||
Available groups appear in a separate selection window.
|
||||
|
||||
.. image:: ../common/images/inventories-add-group-hosts-add-groups.png
|
||||
:alt: Select Groups dialog showing two available groups
|
||||
|
||||
b. Click to select the group(s) to associate with the host and click **Save**.
|
||||
|
||||
@@ -728,13 +757,14 @@ This opens the Details tab of the selected host.
|
||||
8. If a host was used to run a job, you can view details about those jobs in the **Completed Jobs** tab of the host and click **Expanded** to view details about each job.
|
||||
|
||||
.. image:: ../common/images/inventories-add-host-view-completed-jobs.png
|
||||
:alt: Jobs tab showing list of completed jobs associated with the selected host
|
||||
|
||||
|
||||
.. _ug_inventories_add_host_bulk_api:
|
||||
|
||||
.. note::
|
||||
|
||||
You may create hosts in bulk using the newly added endpoint in the API, ``/api/v2/bulk/host_create``. This endpoint accepts JSON and you can specify the target inventory and a list of hosts to add to the inventory. These hosts must be unique within the inventory. Either all hosts are added, or an error is returned indicating why the operation was not able to complete. Use the **OPTIONS** request to return relevant schema. For more information, see the `Bulk endpoint <https://docs.ansible.com/automation-controller/latest/html/controllerapi/api_ref.html#/Bulk>`_ of the *Reference* section of the |atapi|.
|
||||
You may create hosts in bulk using the newly added endpoint in the API, ``/api/v2/bulk/host_create``. This endpoint accepts JSON and you can specify the target inventory and a list of hosts to add to the inventory. These hosts must be unique within the inventory. Either all hosts are added, or an error is returned indicating why the operation was not able to complete. Use the **OPTIONS** request to return relevant schema. For more information, see the `Bulk endpoint <https://ansible.readthedocs.io/projects/awx/en/latest/rest_api/api_ref.html#/Bulk>`_ of the *Reference* section of the |atapi|.
|
||||
|
||||
.. _ug_inventories_add_source:
|
||||
|
||||
@@ -752,6 +782,7 @@ This opens the Create Source window.
|
||||
|Inventories create source|
|
||||
|
||||
.. |Inventories create source| image:: ../common/images/inventories-create-source.png
|
||||
:alt: Create new source form for inventory source
|
||||
|
||||
|
||||
3. Enter the appropriate details into the required and optional fields:
|
||||
@@ -818,6 +849,7 @@ This opens the Create Source window.
|
||||
The **Notifications** tab is only present after you save the newly-created source.
|
||||
|
||||
.. image:: ../common/images/inventories-create-source-with-notifications-tab.png
|
||||
:alt: Notification tab for the inventory source
|
||||
|
||||
12. To configure notifications for the source, click the **Notifications** tab.
|
||||
|
||||
@@ -833,6 +865,7 @@ Once a source is defined, it displays as an entry in the list of sources associa
|
||||
|Inventories view sources|
|
||||
|
||||
.. |Inventories view sources| image:: ../common/images/inventories-view-sources.png
|
||||
:alt: Sources tab of the inventory showing one inventory source
|
||||
|
||||
|
||||
.. _ug_inventory_sources:
|
||||
@@ -862,12 +895,14 @@ An inventory that is sourced from a project means that is uses the SCM type from
|
||||
This field only displays if the sourced project has the **Allow Branch Override** option checked:
|
||||
|
||||
.. image:: ../common/images/projects-create-scm-project-branch-override-checked.png
|
||||
:alt: Inventory sourced from a project with SCM project branch override checked
|
||||
|
||||
- **Credential**: Optionally specify the credential to use for this source.
|
||||
- **Project**: Required. Pre-populates with a default project, otherwise, specify the project this inventory is using as its source. Click the |search| button to choose from a list of projects. If the list is extensive, use the search to narrow the options.
|
||||
- **Inventory File**: Required. Select an inventory file associated with the sourced project. If not already populated, you can type it into the text field within the drop down menu to filter the extraneous file types. In addition to a flat file inventory, you can point to a directory or an inventory script.
|
||||
|
||||
.. image:: ../common/images/inventories-create-source-sourced-from-project-filter.png
|
||||
:alt: Inventory file field of the Sourced from a project inventory type
|
||||
|
||||
3. You can optionally specify the verbosity, host filter, enabled variable/value, and update options as described in the main procedure for :ref:`adding a source <ug_add_inv_common_fields>`.
|
||||
|
||||
@@ -903,6 +938,8 @@ Amazon Web Services EC2
|
||||
|Inventories - create source - AWS EC2 example|
|
||||
|
||||
.. |Inventories - create source - AWS EC2 example| image:: ../common/images/inventories-create-source-AWS-example.png
|
||||
:alt: Inventories create source AWS example
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -923,6 +960,7 @@ Google Compute Engine
|
||||
|Inventories - create source - GCE example|
|
||||
|
||||
.. |Inventories - create source - GCE example| image:: ../common/images/inventories-create-source-GCE-example.png
|
||||
:alt: Inventories create source Google compute engine example
|
||||
|
||||
3. You can optionally specify the verbosity, host filter, enabled variable/value, and update options as described in the main procedure for :ref:`adding a source <ug_add_inv_common_fields>`.
|
||||
|
||||
@@ -948,7 +986,7 @@ Microsoft Azure Resource Manager
|
||||
|Inventories - create source - Azure RM example|
|
||||
|
||||
.. |Inventories - create source - Azure RM example| image:: ../common/images/inventories-create-source-azurerm-example.png
|
||||
|
||||
:alt: Inventories create source Azure example
|
||||
|
||||
.. _ug_source_vmvcenter:
|
||||
|
||||
@@ -973,7 +1011,7 @@ VMware vCenter
|
||||
|Inventories - create source - VMware example|
|
||||
|
||||
.. |Inventories - create source - VMWare example| image:: ../common/images/inventories-create-source-vmware-example.png
|
||||
|
||||
:alt: Inventories create source VMWare example
|
||||
|
||||
.. _ug_source_satellite:
|
||||
|
||||
@@ -995,6 +1033,7 @@ Red Hat Satellite 6
|
||||
|Inventories - create source - RH Satellite example|
|
||||
|
||||
.. |Inventories - create source - RH Satellite example| image:: ../common/images/inventories-create-source-rhsat6-example.png
|
||||
:alt: Inventories create source Red Hat Satellite example
|
||||
|
||||
If you encounter an issue with AWX inventory not having the "related groups" from Satellite, you might need to define these variables in the inventory source. See the inventory plugins template example for :ref:`ir_plugin_satellite` in the |atir| for detail.
|
||||
|
||||
@@ -1019,7 +1058,7 @@ Red Hat Insights
|
||||
|Inventories - create source - RH Insights example|
|
||||
|
||||
.. |Inventories - create source - RH Insights example| image:: ../common/images/inventories-create-source-insights-example.png
|
||||
|
||||
:alt: Inventories create source Red Hat Insights example
|
||||
|
||||
.. _ug_source_openstack:
|
||||
|
||||
@@ -1041,7 +1080,7 @@ OpenStack
|
||||
|Inventories - create source - OpenStack example|
|
||||
|
||||
.. |Inventories - create source - OpenStack example| image:: ../common/images/inventories-create-source-openstack-example.png
|
||||
|
||||
:alt: Inventories create source OpenStack example
|
||||
|
||||
.. _ug_source_rhv:
|
||||
|
||||
@@ -1062,6 +1101,8 @@ Red Hat Virtualization
|
||||
|Inventories - create source - RHV example|
|
||||
|
||||
.. |Inventories - create source - RHV example| image:: ../common/images/inventories-create-source-rhv-example.png
|
||||
:alt: Inventories create source Red Hat Virtualization example
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -1083,6 +1124,7 @@ Red Hat Ansible Automation Platform
|
||||
3. You can optionally specify the verbosity, host filter, enabled variable/value, and update options as described in the main procedure for :ref:`adding a source <ug_add_inv_common_fields>`.
|
||||
|
||||
.. image:: ../common/images/inventories-create-source-rhaap-example.png
|
||||
:alt: Inventories create source Red Hat Ansible Automation Platform example
|
||||
|
||||
4. Use the **Source Variables** field to override variables used by the ``controller`` inventory plugin. Enter variables using either JSON or YAML syntax. Use the radio button to toggle between the two.
|
||||
|
||||
@@ -1117,6 +1159,7 @@ This inventory source uses the `terraform_state <https://github.com/ansible-coll
|
||||
5. Enter an |ee| in the **Execution Environment** field that contains a Terraform binary. This is required for the inventory plugin to run the Terraform commands that read inventory data from the Terraform state file. Refer to the `Terraform EE readme <https://github.com/ansible-cloud/terraform_ee>`_ that contains an example |ee| configuration with a Terraform binary.
|
||||
|
||||
.. image:: ../common/images/inventories-create-source-terraform-example.png
|
||||
:alt: Inventories create source Terraform example
|
||||
|
||||
6. To add hosts for AWS EC2, GCE, and Azure instances, the Terraform state file in the backend must contain state for resources already deployed to EC2, GCE, or Azure. Refer to each of the Terraform providers' respective documentation to provision instances.
|
||||
|
||||
@@ -1152,6 +1195,7 @@ This inventory source uses a cluster that is able to deploy OpenShift (OCP) virt
|
||||
|
||||
|
||||
.. image:: ../common/images/inventories-create-source-ocpvirt-example.png
|
||||
:alt: Inventories create source OpenShift virtualization example
|
||||
|
||||
5. Save the configuration and click the **Sync** button to sync the inventory.
|
||||
|
||||
@@ -1233,10 +1277,7 @@ If an inventory was used to run a job, you can view details about those jobs in
|
||||
|Inventories view completed jobs|
|
||||
|
||||
.. |Inventories view completed jobs| image:: ../common/images/inventories-view-completed-jobs.png
|
||||
|
||||
|
||||
|
||||
|
||||
:alt: Inventories view completed jobs
|
||||
|
||||
.. _ug_inventories_run_ad_hoc:
|
||||
|
||||
@@ -1255,12 +1296,15 @@ To run an ad hoc command:
|
||||
|ad hoc-commands-inventory-home|
|
||||
|
||||
.. |ad hoc-commands-inventory-home| image:: ../common/images/inventories-add-group-host-added.png
|
||||
:alt: Ad hoc commands inventory home
|
||||
|
||||
|
||||
2. Click the **Run Command** button.
|
||||
|
||||
The Run command window opens.
|
||||
|
||||
.. image:: ../common/images/ad-hoc-run-execute-command.png
|
||||
:alt: Ad hoc run execute command
|
||||
|
||||
3. Enter the details for the following fields:
|
||||
|
||||
@@ -1290,10 +1334,13 @@ The Run command window opens.
|
||||
|ad hoc-commands-inventory-run-command|
|
||||
|
||||
.. |ad hoc-commands-inventory-run-command| image:: ../common/images/ad-hoc-commands-inventory-run-command.png
|
||||
:alt: Ad hoc commands inventory run command
|
||||
|
||||
|
||||
4. Click **Next** to choose the |ee| you want the ad-hoc command to be run against.
|
||||
|
||||
.. image:: ../common/images/ad-hoc-commands-inventory-run-command-ee.png
|
||||
:alt: Ad hoc run command dialog prompting for Execution Environments with two listed to choose from
|
||||
|
||||
5. Click **Next** to choose the credential you want to use and click the **Launch** button.
|
||||
|
||||
@@ -1303,3 +1350,5 @@ The results display in the **Output** tab of the module's job window.
|
||||
|ad hoc-commands-inventory-results-example|
|
||||
|
||||
.. |ad hoc-commands-inventory-results-example| image:: ../common/images/ad-hoc-commands-inventory-results-example.png
|
||||
:alt: Ad hoc commands inventory results example
|
||||
|
||||
|
||||
@@ -473,7 +473,7 @@ When slice jobs are running, job lists display the workflow and job slices, as w
|
||||
|
||||
.. note::
|
||||
|
||||
You may launch jobs in bulk using the newly added endpoint in the API, ``/api/v2/bulk/job_launch``. This endpoint accepts JSON and you can specify a list of unified job templates (such as job templates, project updates, etc) to launch. The user must have the appropriate permission to launch all the jobs. Either all jobs are launched, or an error is returned indicating why the operation was not able to complete. Use the **OPTIONS** request to return relevant schema. For more information, see the `Bulk endpoint <https://docs.ansible.com/automation-controller/latest/html/controllerapi/api_ref.html#/Bulk>`_ of the *Reference* section of the |atapi|.
|
||||
You may launch jobs in bulk using the newly added endpoint in the API, ``/api/v2/bulk/job_launch``. This endpoint accepts JSON and you can specify a list of unified job templates (such as job templates, project updates, etc) to launch. The user must have the appropriate permission to launch all the jobs. Either all jobs are launched, or an error is returned indicating why the operation was not able to complete. Use the **OPTIONS** request to return relevant schema. For more information, see the `Bulk endpoint <https://ansible.readthedocs.io/projects/awx/en/latest/rest_api/api_ref.html#/Bulk>`_ of the *Reference* section of the |atapi|.
|
||||
|
||||
|
||||
Copy a Job Template
|
||||
|
||||
@@ -189,7 +189,7 @@ Authentication Enhancements
|
||||
pair: features; authentication
|
||||
pair: features; OAuth 2 token
|
||||
|
||||
AWX supports LDAP, SAML, token-based authentication. Enhanced LDAP and SAML support allows you to integrate your enterprise account information in a more flexible manner. Token-based Authentication allows for easily authentication of third-party tools and services with AWX via integrated OAuth 2 token support.
|
||||
AWX supports LDAP, SAML, token-based authentication. Enhanced LDAP and SAML support allows you to integrate your account information in a more flexible manner. Token-based Authentication allows for easily authentication of third-party tools and services with AWX via integrated OAuth 2 token support.
|
||||
|
||||
Cluster Management
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
@@ -240,9 +240,8 @@ Job Distribution
|
||||
pair: features; jobs, slicing
|
||||
pair: features; jobs, distribution
|
||||
|
||||
As automation moves enterprise-wide, the need to automate at scale grows. AWX offer the ability to take a fact gathering or
|
||||
configuration job running across thousands of machines and slice it into individual job slices that can be distributed across your AWX cluster for increased reliability, faster job completion, and better cluster utilization. If you need to change a parameter across 15,000 switches at
|
||||
scale, or gather information across your multi-thousand-node RHEL estate, you can now do so easily.
|
||||
AWX offers the ability to take a fact gathering or configuration job running across thousands of machines and slice it into individual job slices that can be distributed across your AWX cluster for increased reliability, faster job completion, and better cluster utilization.
|
||||
If you need to change a parameter across 15,000 switches at scale, or gather information across your multi-thousand-node RHEL estate, you can now do so easily.
|
||||
|
||||
|
||||
Support for deployment in a FIPS-enabled environment
|
||||
|
||||
@@ -21,7 +21,7 @@ DAB RBAC
|
||||
single: roles
|
||||
pair: DAB; RBAC
|
||||
|
||||
This section describes the latest changes to RBAC, involving use of the ``django-ansible-base`` (DAB) library, to enhance existing roles, provide a uniformed model that is compatible with platform (enterprise) components, and allow creation of custom roles. However, the internals of the system in the backend have changes implemented, but they are not reflected yet in the AWX UI. The change to the backend maintains a compatibility layer so the “old” roles in the API still exists temporarily, until a fully-functional compatible UI replaces the existing roles.
|
||||
This section describes the latest changes to RBAC, involving use of the ``django-ansible-base`` (DAB) library, to enhance existing roles, and allow creation of custom roles. However, the internals of the system in the backend have changes implemented, but they are not reflected yet in the AWX UI. The change to the backend maintains a compatibility layer so the “old” roles in the API still exists temporarily, until a fully-functional compatible UI replaces the existing roles.
|
||||
|
||||
New functionality, specifically custom roles, are possible through direct API clients or the API browser, but the presentation in the AWX UI might not reflect the changes made in the API.
|
||||
|
||||
|
||||
@@ -264,7 +264,7 @@ This will create an `httpbin` service reachable from the AWX container at `http:
|
||||
The Grafana notification type allows you to create Grafana annotations. Details about this feature of Grafana are available at http://docs.grafana.org/reference/annotations/. In order to allow AWX to add annotations, an API Key needs to be created in Grafana. Note that the created annotations are region events with start and endtime of the associated AWX Job. The annotation description is also provided by the subject of the associated AWX Job, for example:
|
||||
|
||||
```
|
||||
Job #1 'Ping Macbook' succeeded: https://towerhost/#/jobs/playbook/1
|
||||
Job #1 'Ping Macbook' succeeded: https://platformhost/#/jobs/playbook/1
|
||||
```
|
||||
|
||||
The configurable options of the Grafana notification type are:
|
||||
|
||||
@@ -187,14 +187,14 @@ This task spawns an `ansible` process, which then runs a command using Ansible.
|
||||
- Build a command line argument list for running Ansible, optionally using `ssh-agent` for public/private key authentication.
|
||||
- Return whether the task should use process isolation.
|
||||
|
||||
For more information on ad hoc commands, read the [Running Ad Hoc Commands section](https://docs.ansible.com/ansible-tower/latest/html/userguide/inventories.html#running-ad-hoc-commands) of the Inventories page of the Ansible Tower User Guide.
|
||||
For more information on ad hoc commands, read the [Running Ad Hoc Commands section](https://ansible.readthedocs.io/projects/awx/en/latest/userguide/inventories.html#running-ad-hoc-commands) of the Inventories chapter of _Automating with AWX_ guide.
|
||||
|
||||
|
||||
#### Run Job
|
||||
|
||||
This task is a definition and set of parameters for running `ansible-playbook` via a Job Template. It defines metadata about a given playbook run, such as a named identifier, an associated inventory to run against, the project and `.yml` playbook file to run, etc.
|
||||
|
||||
For more information, visit the [Jobs page](https://docs.ansible.com/ansible-tower/latest/html/userguide/jobs.html) of the Ansible Tower User Guide.
|
||||
For more information, visit the [Jobs chapter](https://ansible.readthedocs.io/projects/awx/en/latest/userguide/jobs.html) of the _Automating with AWX_ guide.
|
||||
|
||||
|
||||
#### Run Project Update
|
||||
@@ -203,7 +203,7 @@ When a Project Update is run in AWX, an `ansible-playbook` command is composed a
|
||||
|
||||
This task also includes a helper method to build SCM url and extra vars with parameters needed for authentication, as well as a method for returning search/replace strings to prevent output URLs from showing sensitive passwords.
|
||||
|
||||
To read more about this topic, visit the [Projects page](https://docs.ansible.com/ansible-tower/latest/html/userguide/projects.html) of the Ansible Tower User Guide.
|
||||
To read more about this topic, visit the [Projects chapter](https://ansible.readthedocs.io/projects/awx/en/latest/userguide/projects.html) of the _Automating with AWX_ guide.
|
||||
|
||||
|
||||
#### Run Inventory Update
|
||||
@@ -214,14 +214,14 @@ In older versions of AWX, the `INI` files were not exclusive for either specific
|
||||
|
||||
Additionally, inventory imports are run through a management command. Inventory in `args` get passed to that command, which results in it not being considered to be an Ansible inventory by Runner even though it is.
|
||||
|
||||
To read more about inventories, visit the [Inventories page](https://docs.ansible.com/ansible-tower/latest/html/userguide/inventories.html) of the Ansible Tower User Guide. For more detail about Runner, visit the [Ansible Runner Integration Overview](https://github.com/ansible/awx/blob/devel/docs/ansible_runner_integration.md) AWX documentation page.
|
||||
To read more about inventories, visit the [Inventories chapter](https://ansible.readthedocs.io/projects/awx/en/latest/userguide/inventories.html) of the _Automating with AWX_ guide. For more detail about Runner, visit the [Ansible Runner Integration Overview](https://github.com/ansible/awx/blob/devel/docs/ansible_runner_integration.md) AWX documentation page.
|
||||
|
||||
|
||||
#### System Jobs
|
||||
|
||||
The main distinctive feature of a System Job (as compared to all other Unified Jobs) is that a system job runs management commands, which are given the highest priority for execution hierarchy purposes. They also implement a database lock while running, _i.e._, no other jobs can be run during that time on the same node. Additionally, they have a fixed fork impact of 5 vs 1.
|
||||
|
||||
You can read more about [Ansible Tower Capacity Determination and Job Impact](https://docs.ansible.com/ansible-tower/latest/html/userguide/jobs.html#at-capacity-determination-and-job-impact) in the Jobs section of the Ansible Tower User Guide.
|
||||
You can read more about [Ansible Tower Capacity Determination and Job Impact](https://ansible.readthedocs.io/projects/awx/en/latest/userguide/jobs.html#awx-capacity-determination-and-job-impact) in the Jobs chapter of the _Automating with AWX_ guide.
|
||||
|
||||
|
||||
### Periodic Background Tasks
|
||||
@@ -247,7 +247,7 @@ The `smart_inventories` field in AWX uses a membership lookup table that identif
|
||||
|
||||
An important thing to note is that this task is only run if the `AWX_REBUILD_SMART_MEMBERSHIP` is set to `True` (default is `False`).
|
||||
|
||||
For more information, visit the [Smart Inventories section](https://docs.ansible.com/ansible-tower/latest/html/userguide/inventories.html#smart-inventories) of the Tower User Guide's "Inventory" page or the AWX documentation page [Inventory Refresh Overview page](https://github.com/ansible/awx/blob/devel/docs/inventory_refresh.md#inventory-changes) in this repo.
|
||||
For more information, visit the [Smart Inventories section](https://ansible.readthedocs.io/projects/awx/en/latest/userguide/inventories.html#smart-inventories) of the Inventories chapter of the _Automating with AWX_ guide.
|
||||
|
||||
|
||||
#### Deep Copy Model Object
|
||||
@@ -277,7 +277,7 @@ This task allows the user to turn on a global profiler in their system, so that
|
||||
|
||||
The analytics collection `gather()` and `ship()` functions are called by an `awx-manage gather_analytics --ship` command, which runs on whichever instance it is invoked on. When these functions are called by Celery beat (currently at midnight local time), it is run on one `execution_node` by the Python in the AWX virtualenv.
|
||||
|
||||
For more details about analytics, please visit the [Usability Analytics and Data Collection](https://docs.ansible.com/ansible-tower/latest/html/administration/usability_data_collection.html) page.
|
||||
For more details about analytics, please visit the [Analytics gathering](https://ansible.readthedocs.io/projects/awx/en/latest/administration/awx-manage.html#analytics-gathering) section of the _Administering AWX Deployments_ guide.
|
||||
|
||||
|
||||
#### Run Administrative Checks
|
||||
@@ -308,4 +308,4 @@ When a user creates a notification template in `/api/v2/notification_templates`,
|
||||
|
||||
Notifications assigned at certain levels will inherit traits defined on parent objects in different ways. For example, ad hoc commands will use notifications defined on the Organization that the inventory is associated with.
|
||||
|
||||
For more details on notifications, visit the [Notifications page](https://docs.ansible.com/ansible-tower/3.4.3/html/userguide/notifications.html) of the Tower user guide, or the AWX documentation on [Notification System Overview](https://github.com/ansible/awx/blob/devel/docs/notification_system.md) in this repository.
|
||||
For more details on notifications, visit the [Notifications chapter](hhttps://ansible.readthedocs.io/projects/awx/en/latest/userguide/notifications.html) of the _Automating with AWX_ guide.
|
||||
|
||||
@@ -18,7 +18,7 @@ X-API-Query-Time: 0.004s
|
||||
X-API-Time: 0.026s
|
||||
|
||||
{
|
||||
"SOCIAL_AUTH_GITHUB_TEAM_CALLBACK_URL": "https://towerhost/sso/complete/github-team/",
|
||||
"SOCIAL_AUTH_GITHUB_TEAM_CALLBACK_URL": "https://platformhost/sso/complete/github-team/",
|
||||
"SOCIAL_AUTH_GITHUB_TEAM_KEY": "",
|
||||
"SOCIAL_AUTH_GITHUB_TEAM_SECRET": "",
|
||||
"SOCIAL_AUTH_GITHUB_TEAM_ID": "",
|
||||
|
||||
77
requirements/django-ansible-base-pinned-version.sh
Executable file
77
requirements/django-ansible-base-pinned-version.sh
Executable file
@@ -0,0 +1,77 @@
|
||||
#!/bin/bash
|
||||
set +x
|
||||
|
||||
# CONSTANTS
|
||||
export REGEX_LEFT='https://github.com/ansible/django-ansible-base@'
|
||||
export REGEX_RIGHT='#egg=django-ansible-base'
|
||||
|
||||
# GLOBALS
|
||||
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||
REQ_FILE=$SCRIPT_DIR/requirements_git.txt
|
||||
|
||||
# Pin Function
|
||||
DESIRED_VERSION=''
|
||||
Pin()
|
||||
{
|
||||
export DESIRED_VERSION
|
||||
perl -p -i -e 's/\Q$ENV{REGEX_LEFT}\E(.*?)\Q$ENV{REGEX_RIGHT}\E/$ENV{REGEX_LEFT}$ENV{DESIRED_VERSION}$ENV{REGEX_RIGHT}/g' $REQ_FILE
|
||||
}
|
||||
|
||||
# Current Function
|
||||
Current()
|
||||
{
|
||||
REQUIREMENTS_LINE=$(grep django-ansible-base $REQ_FILE)
|
||||
|
||||
echo "$REQUIREMENTS_LINE" | perl -nE 'say $1 if /\Q$ENV{REGEX_LEFT}\E(.*?)\Q$ENV{REGEX_RIGHT}\E/'
|
||||
}
|
||||
|
||||
|
||||
Help()
|
||||
{
|
||||
# Display Help
|
||||
echo ""
|
||||
echo "Help:"
|
||||
echo ""
|
||||
echo "Interact with django-ansible-base in $REQ_FILE."
|
||||
echo "By default, output the current django-ansible-base pinned version."
|
||||
echo
|
||||
echo "Syntax: scriptTemplate [-s|h|v]"
|
||||
echo "options:"
|
||||
echo "s Set django-ansible-base version to pin to."
|
||||
echo "h Print this Help."
|
||||
echo "v Verbose mode."
|
||||
echo
|
||||
}
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
Current
|
||||
exit
|
||||
fi
|
||||
|
||||
|
||||
while getopts ":hs:" option; do
|
||||
case $option in
|
||||
h) # display Help
|
||||
Help
|
||||
exit
|
||||
;;
|
||||
s)
|
||||
DESIRED_VERSION=$OPTARG;;
|
||||
:)
|
||||
echo "Option -${OPTARG} requires an argument."
|
||||
Help
|
||||
exit 1
|
||||
;;
|
||||
\?) # Invalid option
|
||||
echo "Error: Invalid option"
|
||||
echo ""
|
||||
Help
|
||||
exit;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -n "$DESIRED_VERSION" ]; then
|
||||
Pin
|
||||
Current
|
||||
fi
|
||||
|
||||
@@ -2,4 +2,4 @@ git+https://github.com/ansible/system-certifi.git@devel#egg=certifi
|
||||
# Remove pbr from requirements.in when moving ansible-runner to requirements.in
|
||||
git+https://github.com/ansible/ansible-runner.git@devel#egg=ansible-runner
|
||||
git+https://github.com/ansible/python3-saml.git@devel#egg=python3-saml
|
||||
django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel#egg=django-ansible-base[rest_filters,jwt_consumer,resource_registry,rbac]
|
||||
django-ansible-base @ git+https://github.com/ansible/django-ansible-base@2024.7.17#egg=django-ansible-base[rest_filters,jwt_consumer,resource_registry,rbac]
|
||||
|
||||
@@ -4,8 +4,20 @@
|
||||
### DO NOT EDIT
|
||||
###
|
||||
|
||||
{% if not headless|bool %}
|
||||
# UI_next build contaienr
|
||||
FROM quay.io/centos/centos:stream9 AS ui-next-builder
|
||||
USER root
|
||||
RUN dnf -y update && dnf install -y nodejs make git
|
||||
RUN npm install -g n && n 18
|
||||
|
||||
COPY . /tmp/src/
|
||||
WORKDIR /tmp/src/
|
||||
RUN make ui-next
|
||||
{% endif %}
|
||||
|
||||
# Build container
|
||||
FROM quay.io/centos/centos:stream9 as builder
|
||||
FROM quay.io/centos/centos:stream9 AS builder
|
||||
|
||||
ENV LANG en_US.UTF-8
|
||||
ENV LANGUAGE en_US:en
|
||||
@@ -31,9 +43,6 @@ RUN dnf -y update && dnf install -y 'dnf-command(config-manager)' && \
|
||||
libffi-devel \
|
||||
libtool-ltdl-devel \
|
||||
make \
|
||||
{% if not headless|bool %}
|
||||
nodejs \
|
||||
{% endif %}
|
||||
nss \
|
||||
openldap-devel \
|
||||
# pin to older openssl, see jira AAP-23449
|
||||
@@ -74,21 +83,20 @@ RUN cd /tmp && make requirements_awx
|
||||
|
||||
ARG VERSION
|
||||
ARG SETUPTOOLS_SCM_PRETEND_VERSION
|
||||
ARG HEADLESS
|
||||
|
||||
{% if (build_dev|bool) or (kube_dev|bool) %}
|
||||
ADD requirements/requirements_dev.txt /tmp/requirements
|
||||
RUN cd /tmp && make requirements_awx_dev
|
||||
{% else %}
|
||||
# Use the distro provided npm to bootstrap our required version of node
|
||||
|
||||
{% if not headless|bool %}
|
||||
RUN npm install -g n && n 16.13.1
|
||||
{% endif %}
|
||||
|
||||
# Copy source into builder, build sdist, install it into awx venv
|
||||
COPY . /tmp/src/
|
||||
WORKDIR /tmp/src/
|
||||
|
||||
{% if not headless|bool %}
|
||||
COPY --from=ui-next-builder /tmp/src/awx/ui_next/build /tmp/src/awx/ui_next/build
|
||||
{% endif %}
|
||||
|
||||
RUN make sdist && /var/lib/awx/venv/awx/bin/pip install dist/awx.tar.gz
|
||||
|
||||
{% if not headless|bool %}
|
||||
@@ -189,7 +197,7 @@ COPY --from=builder /var/lib/awx /var/lib/awx
|
||||
|
||||
RUN ln -s /var/lib/awx/venv/awx/bin/awx-manage /usr/bin/awx-manage
|
||||
|
||||
{%if build_dev|bool %}
|
||||
{% if build_dev|bool %}
|
||||
COPY --from={{ receptor_image }} /usr/bin/receptor /usr/bin/receptor
|
||||
|
||||
RUN openssl req -nodes -newkey rsa:2048 -keyout /etc/nginx/nginx.key -out /etc/nginx/nginx.csr \
|
||||
|
||||
@@ -208,13 +208,25 @@ awx_1 | Applying auth.0001_initial... OK
|
||||
|
||||
##### Clean and build the UI
|
||||
|
||||
Prerequisites (on your local machine)
|
||||
- npm
|
||||
- nodejs
|
||||
|
||||
Required versions listed here https://github.com/ansible/ansible-ui/blob/main/README.md
|
||||
|
||||
On your local machine (not in awx container)
|
||||
|
||||
```bash
|
||||
$ docker exec tools_awx_1 make clean-ui ui-devel
|
||||
make clean/ui-next ui-next
|
||||
```
|
||||
|
||||
See [the ui development documentation](../../awx/ui/README.md) for more information on using the frontend development, build, and test tooling.
|
||||
This will clone the ansible-ui into the `awx/ui_next/src` directory and build the static files. Then when the containers come up, awx-manage collectstatic will copy those files into the proper place.
|
||||
|
||||
Once migrations are completed and the UI is built, you can begin using AWX. The UI can be reached in your browser at `https://localhost:8043/#/home`, and the API can be found at `https://localhost:8043/api/v2`.
|
||||
You can also use `UI_NEXT_LOCAL` to build from a locally cloned ansible-ui repo.
|
||||
|
||||
See [the ui development documentation](https://github.com/ansible/ansible-ui/blob/main/CONTRIBUTING.md) for more information on using the frontend development, build, and test tooling.
|
||||
|
||||
Once migrations are completed and the UI is built, you can begin using AWX. The UI can be reached in your browser at `https://localhost:8043/`, and the API can be found at `https://localhost:8043/api/v2`.
|
||||
|
||||
##### Create an admin user
|
||||
|
||||
@@ -561,11 +573,11 @@ If you have a playbook like:
|
||||
var: the_secret_from_vault
|
||||
```
|
||||
|
||||
And run it through AWX with the credential `Credential From Vault via Token Auth` tied to it, the debug should result in `this_is_the_secret_value`. If you run it through AWX with the credential `Credential From Vault via Userpass Auth`, the debug should result in `this_is_the_userpass_secret_value`.
|
||||
And run it through AWX with the credential `Credential From Vault via Token Auth` tied to it, the debug should result in `this_is_the_secret_value`. If you run it through AWX with the credential `Credential From Vault via Userpass Auth`, the debug should result in `this_is_the_userpass_secret_value`.
|
||||
|
||||
### HashiVault with LDAP
|
||||
|
||||
If you wish to have your OpenLDAP container connected to the Vault container, you will first need to have the OpenLDAP container running alongside AWX and Vault.
|
||||
If you wish to have your OpenLDAP container connected to the Vault container, you will first need to have the OpenLDAP container running alongside AWX and Vault.
|
||||
|
||||
|
||||
```bash
|
||||
@@ -574,7 +586,7 @@ VAULT=true LDAP=true make docker-compose
|
||||
|
||||
```
|
||||
|
||||
Similar to the above, you will need to unseal the vault before we can run the other needed playbooks.
|
||||
Similar to the above, you will need to unseal the vault before we can run the other needed playbooks.
|
||||
|
||||
```bash
|
||||
|
||||
@@ -582,7 +594,7 @@ ansible-playbook tools/docker-compose/ansible/unseal_vault.yml
|
||||
|
||||
```
|
||||
|
||||
Now that the vault is unsealed, we can plumb the vault container now while passing true to enable_ldap extra var.
|
||||
Now that the vault is unsealed, we can plumb the vault container now while passing true to enable_ldap extra var.
|
||||
|
||||
|
||||
```bash
|
||||
@@ -595,7 +607,7 @@ ansible-playbook tools/docker-compose/ansible/plumb_vault.yml -e enable_ldap=tru
|
||||
|
||||
```
|
||||
|
||||
This will populate your AWX instance with LDAP specific items.
|
||||
This will populate your AWX instance with LDAP specific items.
|
||||
|
||||
- A vault LDAP Lookup Cred tied to the LDAP `awx_ldap_vault` user called `Vault LDAP Lookup Cred`
|
||||
- A credential called `Credential From HashiCorp Vault via LDAP Auth` which is of the created type using the `Vault LDAP Lookup Cred` to get the secret.
|
||||
|
||||
@@ -47,9 +47,6 @@ services:
|
||||
{% if minikube_container_group|bool %}
|
||||
MINIKUBE_CONTAINER_GROUP: "true"
|
||||
{% endif %}
|
||||
links:
|
||||
- postgres
|
||||
- redis_{{ container_postfix }}
|
||||
networks:
|
||||
- awx
|
||||
- service-mesh
|
||||
@@ -80,8 +77,12 @@ services:
|
||||
- "~/.kube/config:/var/lib/awx/.kube/config"
|
||||
- "redis_socket_{{ container_postfix }}:/var/run/redis/:rw"
|
||||
privileged: true
|
||||
{% if editable_dependencies | length > 0 %}
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_started
|
||||
redis_{{ container_postfix }}:
|
||||
condition: service_started
|
||||
{% if editable_dependencies | length > 0 %}
|
||||
init_awx:
|
||||
condition: service_completed_successfully
|
||||
{% endif %}
|
||||
@@ -197,10 +198,6 @@ services:
|
||||
volumes:
|
||||
- "../../docker-compose/_sources/prometheus.yml:/etc/prometheus/prometheus.yml"
|
||||
- "prometheus_storage:/prometheus:rw"
|
||||
links:
|
||||
{% for i in range(control_plane_node_count|int) %}
|
||||
- awx_{{ loop.index }}:awx{{ loop.index }} # because underscores are not valid in hostnames
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% if enable_grafana|bool %}
|
||||
grafana:
|
||||
@@ -214,8 +211,6 @@ services:
|
||||
volumes:
|
||||
- "../../grafana:/etc/grafana/provisioning"
|
||||
- "grafana_storage:/var/lib/grafana:rw"
|
||||
links:
|
||||
- prometheus
|
||||
depends_on:
|
||||
- prometheus
|
||||
{% endif %}
|
||||
@@ -318,8 +313,6 @@ services:
|
||||
container_name: tools_receptor_hop
|
||||
hostname: receptor-hop
|
||||
command: 'receptor --config /etc/receptor/receptor.conf'
|
||||
links:
|
||||
- awx_1
|
||||
networks:
|
||||
- awx
|
||||
ports:
|
||||
@@ -335,8 +328,6 @@ services:
|
||||
command: 'receptor --config /etc/receptor/receptor.conf'
|
||||
environment:
|
||||
RECEPTORCTL_SOCKET: {{ receptor_socket_file }}
|
||||
links:
|
||||
- receptor-hop
|
||||
networks:
|
||||
- awx
|
||||
volumes:
|
||||
|
||||
@@ -23,8 +23,6 @@ else
|
||||
wait-for-migrations
|
||||
fi
|
||||
|
||||
# Make sure that the UI static file directory exists, Django complains otherwise.
|
||||
mkdir -p /awx_devel/awx/ui/build/static
|
||||
|
||||
# Make sure that the UI_NEXT statifc file directory exists, if UI_NEXT is not built yet put a placeholder file in it.
|
||||
if [ ! -d "/awx_devel/awx/ui_next/build/awx" ]; then
|
||||
|
||||
@@ -37,7 +37,7 @@ def post_webhook(file, webhook_key, url, verbose, event_type, insecure):
|
||||
|
||||
\b
|
||||
For setting up webhooks in AWX see:
|
||||
https://docs.ansible.com/ansible-tower/latest/html/userguide/webhooks.html
|
||||
https://ansible.readthedocs.io/projects/awx/en/latest/userguide/webhooks.html
|
||||
|
||||
\b
|
||||
Example usage for GitHub:
|
||||
|
||||
@@ -30,13 +30,13 @@ Request server configuration from Ansible Tower
|
||||
|
||||
Usage:
|
||||
Execution using positional parameters:
|
||||
$($MyInvocation.MyCommand.Name) https://example.towerhost.net 44d7507f2ead49af5fca80aa18fd24bc 38
|
||||
$($MyInvocation.MyCommand.Name) https://example.platformhost.net 44d7507f2ead49af5fca80aa18fd24bc 38
|
||||
|
||||
Ignore self-signed certificates using named parameters:
|
||||
$($MyInvocation.MyCommand.Name) -k -s https://example.towerhost.local -c 44d7507f2ead49af5fca80aa18fd24bc -t 38
|
||||
$($MyInvocation.MyCommand.Name) -k -s https://example.platformhost.local -c 44d7507f2ead49af5fca80aa18fd24bc -t 38
|
||||
|
||||
Execution using optional extra_vars:
|
||||
$($MyInvocation.MyCommand.Name) https://example.towerhost.net 44d7507f2ead49af5fca80aa18fd24bc 38 '{ key: value, dict: { key: value }}'
|
||||
$($MyInvocation.MyCommand.Name) https://example.platformhost.net 44d7507f2ead49af5fca80aa18fd24bc 38 '{ key: value, dict: { key: value }}'
|
||||
|
||||
Options:
|
||||
-help, -h Show this message
|
||||
|
||||
Reference in New Issue
Block a user