Compare commits

..

1 Commits

Author SHA1 Message Date
Alex Corey
d87c091eea Refactors the project form 2022-10-05 15:43:01 -04:00
973 changed files with 21109 additions and 25781 deletions

View File

@@ -53,16 +53,6 @@ https://github.com/ansible/awx/#get-involved \
Thank you once again for this and your interest in AWX! Thank you once again for this and your interest in AWX!
### Red Hat Support Team
- Hi! \
\
It appears that you are using an RPM build for RHEL. Please reach out to the Red Hat support team and submit a ticket. \
\
Here is the link to do so: \
\
https://access.redhat.com/support \
\
Thank you for your submission and for supporting AWX!
## Common ## Common
@@ -106,13 +96,6 @@ The Ansible Community is looking at building an EE that corresponds to all of th
### Oracle AWX ### Oracle AWX
We'd be happy to help if you can reproduce this with AWX since we do not have Oracle's Linux Automation Manager. If you need help with this specific version of Oracles Linux Automation Manager you will need to contact your Oracle for support. We'd be happy to help if you can reproduce this with AWX since we do not have Oracle's Linux Automation Manager. If you need help with this specific version of Oracles Linux Automation Manager you will need to contact your Oracle for support.
### Community Resolved
Hi,
We are happy to see that it appears a fix has been provided for your issue, so we will go ahead and close this ticket. Please feel free to reopen if any other problems arise.
<name of community member who helped> thanks so much for taking the time to write a thoughtful and helpful response to this issue!
### AWX Release ### AWX Release
Subject: Announcing AWX Xa.Ya.za and AWX-Operator Xb.Yb.zb Subject: Announcing AWX Xa.Ya.za and AWX-Operator Xb.Yb.zb

View File

@@ -1,10 +1,7 @@
--- ---
name: CI name: CI
env: env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting BRANCH: ${{ github.base_ref || 'devel' }}
CI_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DEV_DOCKER_TAG_BASE: ghcr.io/${{ github.repository_owner }}
COMPOSE_TAG: ${{ github.base_ref || 'devel' }}
on: on:
pull_request: pull_request:
jobs: jobs:
@@ -20,33 +17,85 @@ jobs:
tests: tests:
- name: api-test - name: api-test
command: /start_tests.sh command: /start_tests.sh
label: Run API Tests
- name: api-lint - name: api-lint
command: /var/lib/awx/venv/awx/bin/tox -e linters command: /var/lib/awx/venv/awx/bin/tox -e linters
label: Run API Linters
- name: api-swagger - name: api-swagger
command: /start_tests.sh swagger command: /start_tests.sh swagger
label: Generate API Reference
- name: awx-collection - name: awx-collection
command: /start_tests.sh test_collection_all command: /start_tests.sh test_collection_all
label: Run Collection Tests
- name: api-schema - name: api-schema
label: Check API Schema
command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }} command: /start_tests.sh detect-schema-change SCHEMA_DIFF_BASE_BRANCH=${{ github.event.pull_request.base.ref }}
- name: ui-lint - name: ui-lint
label: Run UI Linters
command: make ui-lint command: make ui-lint
- name: ui-test-screens - name: ui-test-screens
label: Run UI Screens Tests
command: make ui-test-screens command: make ui-test-screens
- name: ui-test-general - name: ui-test-general
label: Run UI General Tests
command: make ui-test-general command: make ui-test-general
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Run check ${{ matrix.tests.name }} - name: Get python version from Makefile
run: AWX_DOCKER_CMD='${{ matrix.tests.command }}' make github_ci_runner run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v2
with:
python-version: ${{ env.py_version }}
- name: Log in to registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Pre-pull image to warm build cache
run: |
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} || :
- name: Build image
run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
- name: ${{ matrix.texts.label }}
run: |
docker run -u $(id -u) --rm -v ${{ github.workspace}}:/awx_devel/:Z \
--workdir=/awx_devel ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} ${{ matrix.tests.command }}
dev-env: dev-env:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Get python version from Makefile
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
- name: Install python ${{ env.py_version }}
uses: actions/setup-python@v2
with:
python-version: ${{ env.py_version }}
- name: Log in to registry
run: |
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Pre-pull image to warm build cache
run: |
docker pull ghcr.io/${{ github.repository_owner }}/awx_devel:${{ env.BRANCH }} || :
- name: Build image
run: |
DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }} COMPOSE_TAG=${{ env.BRANCH }} make docker-compose-build
- name: Run smoke test - name: Run smoke test
run: make github_ci_setup && ansible-playbook tools/docker-compose/ansible/smoke-test.yml -v run: |
export DEV_DOCKER_TAG_BASE=ghcr.io/${{ github.repository_owner }}
export COMPOSE_TAG=${{ env.BRANCH }}
ansible-playbook tools/docker-compose/ansible/smoke-test.yml -e repo_dir=$(pwd) -v
awx-operator: awx-operator:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -95,22 +144,3 @@ jobs:
env: env:
AWX_TEST_IMAGE: awx AWX_TEST_IMAGE: awx
AWX_TEST_VERSION: ci AWX_TEST_VERSION: ci
collection-sanity:
name: awx_collection sanity
runs-on: ubuntu-latest
strategy:
fail-fast: false
steps:
- uses: actions/checkout@v2
# The containers that GitHub Actions use have Ansible installed, so upgrade to make sure we have the latest version.
- name: Upgrade ansible-core
run: python3 -m pip install --upgrade ansible-core
- name: Run sanity tests
run: make test_collection_sanity
env:
# needed due to cgroupsv2. This is fixed, but a stable release
# with the fix has not been made yet.
ANSIBLE_TEST_PREFER_PODMAN: 1

View File

@@ -1,7 +1,5 @@
--- ---
name: Build/Push Development Images name: Build/Push Development Images
env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
on: on:
push: push:
branches: branches:

View File

@@ -1,12 +1,9 @@
--- ---
name: E2E Tests name: E2E Tests
env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
on: on:
pull_request_target: pull_request_target:
types: [labeled] types: [labeled]
jobs: jobs:
e2e-test: e2e-test:
if: contains(github.event.pull_request.labels.*.name, 'qe:e2e') if: contains(github.event.pull_request.labels.*.name, 'qe:e2e')
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -107,3 +104,5 @@ jobs:
with: with:
name: AWX-logs-${{ matrix.job }} name: AWX-logs-${{ matrix.job }}
path: make-docker-compose-output.log path: make-docker-compose-output.log

View File

@@ -1,26 +0,0 @@
---
name: Feature branch deletion cleanup
env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
on:
delete:
branches:
- feature_**
jobs:
push:
runs-on: ubuntu-latest
permissions:
packages: write
contents: read
steps:
- name: Delete API Schema
env:
AWS_ACCESS_KEY: ${{ secrets.AWS_ACCESS_KEY }}
AWS_SECRET_KEY: ${{ secrets.AWS_SECRET_KEY }}
AWS_REGION: 'us-east-1'
run: |
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
ansible localhost -c local -m aws_s3 \
-a "bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=delete permission=public-read"

View File

@@ -13,13 +13,21 @@ jobs:
packages: write packages: write
contents: read contents: read
steps: steps:
- name: Check for each of the lines - name: Write PR body to a file
env:
PR_BODY: ${{ github.event.pull_request.body }}
run: | run: |
echo "$PR_BODY" | grep "Bug, Docs Fix or other nominal change" > Z cat >> pr.body << __SOME_RANDOM_PR_EOF__
echo "$PR_BODY" | grep "New or Enhanced Feature" > Y ${{ github.event.pull_request.body }}
echo "$PR_BODY" | grep "Breaking Change" > X __SOME_RANDOM_PR_EOF__
- name: Display the received body for troubleshooting
run: cat pr.body
# We want to write these out individually just incase the options were joined on a single line
- name: Check for each of the lines
run: |
grep "Bug, Docs Fix or other nominal change" pr.body > Z
grep "New or Enhanced Feature" pr.body > Y
grep "Breaking Change" pr.body > X
exit 0 exit 0
# We exit 0 and set the shell to prevent the returns from the greps from failing this step # We exit 0 and set the shell to prevent the returns from the greps from failing this step
# See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#exit-codes-and-error-action-preference # See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#exit-codes-and-error-action-preference

View File

@@ -1,9 +1,5 @@
--- ---
name: Promote Release name: Promote Release
env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
on: on:
release: release:
types: [published] types: [published]
@@ -38,13 +34,9 @@ jobs:
- name: Build collection and publish to galaxy - name: Build collection and publish to galaxy
run: | run: |
COLLECTION_TEMPLATE_VERSION=true COLLECTION_NAMESPACE=${{ env.collection_namespace }} make build_collection COLLECTION_TEMPLATE_VERSION=true COLLECTION_NAMESPACE=${{ env.collection_namespace }} make build_collection
if [ "$(curl --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz | tail -1)" == "302" ] ; then \ ansible-galaxy collection publish \
echo "Galaxy release already done"; \ --token=${{ secrets.GALAXY_TOKEN }} \
else \ awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz
ansible-galaxy collection publish \
--token=${{ secrets.GALAXY_TOKEN }} \
awx_collection_build/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz; \
fi
- name: Set official pypi info - name: Set official pypi info
run: echo pypi_repo=pypi >> $GITHUB_ENV run: echo pypi_repo=pypi >> $GITHUB_ENV
@@ -56,7 +48,6 @@ jobs:
- name: Build awxkit and upload to pypi - name: Build awxkit and upload to pypi
run: | run: |
git reset --hard
cd awxkit && python3 setup.py bdist_wheel cd awxkit && python3 setup.py bdist_wheel
twine upload \ twine upload \
-r ${{ env.pypi_repo }} \ -r ${{ env.pypi_repo }} \
@@ -79,6 +70,4 @@ jobs:
docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest docker tag ghcr.io/${{ github.repository }}:${{ github.event.release.tag_name }} quay.io/${{ github.repository }}:latest
docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }} docker push quay.io/${{ github.repository }}:${{ github.event.release.tag_name }}
docker push quay.io/${{ github.repository }}:latest docker push quay.io/${{ github.repository }}:latest
docker pull ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
docker tag ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }} quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}
docker push quay.io/${{ github.repository_owner }}/awx-ee:${{ github.event.release.tag_name }}

View File

@@ -1,9 +1,5 @@
--- ---
name: Stage Release name: Stage Release
env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
on: on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
@@ -84,20 +80,6 @@ jobs:
-e push=yes \ -e push=yes \
-e awx_official=yes -e awx_official=yes
- name: Log in to GHCR
run: |
echo ${{ secrets.GITHUB_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Log in to Quay
run: |
echo ${{ secrets.QUAY_TOKEN }} | docker login quay.io -u ${{ secrets.QUAY_USER }} --password-stdin
- name: tag awx-ee:latest with version input
run: |
docker pull quay.io/ansible/awx-ee:latest
docker tag quay.io/ansible/awx-ee:latest ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
docker push ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
- name: Build and stage awx-operator - name: Build and stage awx-operator
working-directory: awx-operator working-directory: awx-operator
run: | run: |
@@ -117,7 +99,6 @@ jobs:
env: env:
AWX_TEST_IMAGE: ${{ github.repository }} AWX_TEST_IMAGE: ${{ github.repository }}
AWX_TEST_VERSION: ${{ github.event.inputs.version }} AWX_TEST_VERSION: ${{ github.event.inputs.version }}
AWX_EE_TEST_IMAGE: ghcr.io/${{ github.repository_owner }}/awx-ee:${{ github.event.inputs.version }}
- name: Create draft release for AWX - name: Create draft release for AWX
working-directory: awx working-directory: awx

View File

@@ -1,15 +1,10 @@
--- ---
name: Upload API Schema name: Upload API Schema
env:
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
on: on:
push: push:
branches: branches:
- devel - devel
- release_** - release_**
- feature_**
jobs: jobs:
push: push:
runs-on: ubuntu-latest runs-on: ubuntu-latest

View File

@@ -12,7 +12,7 @@ recursive-include awx/plugins *.ps1
recursive-include requirements *.txt recursive-include requirements *.txt
recursive-include requirements *.yml recursive-include requirements *.yml
recursive-include config * recursive-include config *
recursive-include licenses * recursive-include docs/licenses *
recursive-exclude awx devonly.py* recursive-exclude awx devonly.py*
recursive-exclude awx/api/tests * recursive-exclude awx/api/tests *
recursive-exclude awx/main/tests * recursive-exclude awx/main/tests *

View File

@@ -6,20 +6,7 @@ CHROMIUM_BIN=/tmp/chrome-linux/chrome
GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD) GIT_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
MANAGEMENT_COMMAND ?= awx-manage MANAGEMENT_COMMAND ?= awx-manage
VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py) VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py)
COLLECTION_VERSION := $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
# ansible-test requires semver compatable version, so we allow overrides to hack it
COLLECTION_VERSION ?= $(shell $(PYTHON) tools/scripts/scm_version.py | cut -d . -f 1-3)
# args for the ansible-test sanity command
COLLECTION_SANITY_ARGS ?= --docker
# collection unit testing directories
COLLECTION_TEST_DIRS ?= awx_collection/test/awx
# collection integration test directories (defaults to all)
COLLECTION_TEST_TARGET ?=
# args for collection install
COLLECTION_PACKAGE ?= awx
COLLECTION_NAMESPACE ?= awx
COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
COLLECTION_TEMPLATE_VERSION ?= false
# NOTE: This defaults the container image version to the branch that's active # NOTE: This defaults the container image version to the branch that's active
COMPOSE_TAG ?= $(GIT_BRANCH) COMPOSE_TAG ?= $(GIT_BRANCH)
@@ -47,7 +34,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio SRC_ONLY_PKGS ?= cffi,pycparser,psycopg2,twilio
# These should be upgraded in the AWX and Ansible venv before attempting # These should be upgraded in the AWX and Ansible venv before attempting
# to install the actual requirements # to install the actual requirements
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==7.0.5 wheel==0.38.4 VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==58.2.0 setuptools_scm[toml]==6.4.2 wheel==0.36.2
NAME ?= awx NAME ?= awx
@@ -65,7 +52,7 @@ I18N_FLAG_FILE = .i18n_built
sdist \ sdist \
ui-release ui-devel \ ui-release ui-devel \
VERSION PYTHON_VERSION docker-compose-sources \ VERSION PYTHON_VERSION docker-compose-sources \
.git/hooks/pre-commit github_ci_setup github_ci_runner .git/hooks/pre-commit
clean-tmp: clean-tmp:
rm -rf tmp/ rm -rf tmp/
@@ -98,7 +85,6 @@ clean: clean-ui clean-api clean-awxkit clean-dist
clean-api: clean-api:
rm -rf build $(NAME)-$(VERSION) *.egg-info rm -rf build $(NAME)-$(VERSION) *.egg-info
rm -rf .tox
find . -type f -regex ".*\.py[co]$$" -delete find . -type f -regex ".*\.py[co]$$" -delete
find . -type d -name "__pycache__" -delete find . -type d -name "__pycache__" -delete
rm -f awx/awx_test.sqlite3* rm -f awx/awx_test.sqlite3*
@@ -131,7 +117,7 @@ virtualenv_awx:
fi; \ fi; \
fi fi
## Install third-party requirements needed for AWX's environment. ## Install third-party requirements needed for AWX's environment.
# this does not use system site packages intentionally # this does not use system site packages intentionally
requirements_awx: virtualenv_awx requirements_awx: virtualenv_awx
if [[ "$(PIP_OPTIONS)" == *"--no-index"* ]]; then \ if [[ "$(PIP_OPTIONS)" == *"--no-index"* ]]; then \
@@ -195,7 +181,7 @@ collectstatic:
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \ . $(VENV_BASE)/awx/bin/activate; \
fi; \ fi; \
$(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1 mkdir -p awx/public/static && $(PYTHON) manage.py collectstatic --clear --noinput > /dev/null 2>&1
DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:* DEV_RELOAD_COMMAND ?= supervisorctl restart tower-processes:*
@@ -301,28 +287,19 @@ test:
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3 cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
awx-manage check_migrations --dry-run --check -n 'missing_migration_file' awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
## Login to Github container image registry, pull image, then build image. COLLECTION_TEST_DIRS ?= awx_collection/test/awx
github_ci_setup: COLLECTION_TEST_TARGET ?=
# GITHUB_ACTOR is automatic github actions env var COLLECTION_PACKAGE ?= awx
# CI_GITHUB_TOKEN is defined in .github files COLLECTION_NAMESPACE ?= awx
echo $(CI_GITHUB_TOKEN) | docker login ghcr.io -u $(GITHUB_ACTOR) --password-stdin COLLECTION_INSTALL = ~/.ansible/collections/ansible_collections/$(COLLECTION_NAMESPACE)/$(COLLECTION_PACKAGE)
docker pull $(DEVEL_IMAGE_NAME) || : # Pre-pull image to warm build cache COLLECTION_TEMPLATE_VERSION ?= false
make docker-compose-build
## Runs AWX_DOCKER_CMD inside a new docker container.
docker-runner:
docker run -u $(shell id -u) --rm -v $(shell pwd):/awx_devel/:Z --workdir=/awx_devel $(DEVEL_IMAGE_NAME) $(AWX_DOCKER_CMD)
## Builds image and runs AWX_DOCKER_CMD in it, mainly for .github checks.
github_ci_runner: github_ci_setup docker-runner
test_collection: test_collection:
rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt rm -f $(shell ls -d $(VENV_BASE)/awx/lib/python* | head -n 1)/no-global-site-packages.txt
if [ "$(VENV_BASE)" ]; then \ if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \ . $(VENV_BASE)/awx/bin/activate; \
fi && \ fi && \
if ! [ -x "$(shell command -v ansible-playbook)" ]; then pip install ansible-core; fi pip install ansible-core && \
ansible --version
py.test $(COLLECTION_TEST_DIRS) -v py.test $(COLLECTION_TEST_DIRS) -v
# The python path needs to be modified so that the tests can find Ansible within the container # The python path needs to be modified so that the tests can find Ansible within the container
# First we will use anything expility set as PYTHONPATH # First we will use anything expility set as PYTHONPATH
@@ -352,13 +329,8 @@ install_collection: build_collection
rm -rf $(COLLECTION_INSTALL) rm -rf $(COLLECTION_INSTALL)
ansible-galaxy collection install awx_collection_build/$(COLLECTION_NAMESPACE)-$(COLLECTION_PACKAGE)-$(COLLECTION_VERSION).tar.gz ansible-galaxy collection install awx_collection_build/$(COLLECTION_NAMESPACE)-$(COLLECTION_PACKAGE)-$(COLLECTION_VERSION).tar.gz
test_collection_sanity: test_collection_sanity: install_collection
rm -rf awx_collection_build/ cd $(COLLECTION_INSTALL) && ansible-test sanity
rm -rf $(COLLECTION_INSTALL)
if ! [ -x "$(shell command -v ansible-test)" ]; then pip install ansible-core; fi
ansible --version
COLLECTION_VERSION=1.0.0 make install_collection
cd $(COLLECTION_INSTALL) && ansible-test sanity $(COLLECTION_SANITY_ARGS)
test_collection_integration: install_collection test_collection_integration: install_collection
cd $(COLLECTION_INSTALL) && ansible-test integration $(COLLECTION_TEST_TARGET) cd $(COLLECTION_INSTALL) && ansible-test integration $(COLLECTION_TEST_TARGET)
@@ -405,8 +377,6 @@ clean-ui:
rm -rf awx/ui/build rm -rf awx/ui/build
rm -rf awx/ui/src/locales/_build rm -rf awx/ui/src/locales/_build
rm -rf $(UI_BUILD_FLAG_FILE) rm -rf $(UI_BUILD_FLAG_FILE)
# the collectstatic command doesn't like it if this dir doesn't exist.
mkdir -p awx/ui/build/static
awx/ui/node_modules: awx/ui/node_modules:
NODE_OPTIONS=--max-old-space-size=6144 $(NPM_BIN) --prefix awx/ui --loglevel warn --force ci NODE_OPTIONS=--max-old-space-size=6144 $(NPM_BIN) --prefix awx/ui --loglevel warn --force ci
@@ -416,18 +386,20 @@ $(UI_BUILD_FLAG_FILE):
$(PYTHON) tools/scripts/compilemessages.py $(PYTHON) tools/scripts/compilemessages.py
$(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings $(NPM_BIN) --prefix awx/ui --loglevel warn run compile-strings
$(NPM_BIN) --prefix awx/ui --loglevel warn run build $(NPM_BIN) --prefix awx/ui --loglevel warn run build
mkdir -p awx/public/static/css
mkdir -p awx/public/static/js
mkdir -p awx/public/static/media
cp -r awx/ui/build/static/css/* awx/public/static/css
cp -r awx/ui/build/static/js/* awx/public/static/js
cp -r awx/ui/build/static/media/* awx/public/static/media
touch $@ touch $@
ui-release: $(UI_BUILD_FLAG_FILE) ui-release: $(UI_BUILD_FLAG_FILE)
ui-devel: awx/ui/node_modules ui-devel: awx/ui/node_modules
@$(MAKE) -B $(UI_BUILD_FLAG_FILE) @$(MAKE) -B $(UI_BUILD_FLAG_FILE)
mkdir -p /var/lib/awx/public/static/css
mkdir -p /var/lib/awx/public/static/js
mkdir -p /var/lib/awx/public/static/media
cp -r awx/ui/build/static/css/* /var/lib/awx/public/static/css
cp -r awx/ui/build/static/js/* /var/lib/awx/public/static/js
cp -r awx/ui/build/static/media/* /var/lib/awx/public/static/media
ui-devel-instrumented: awx/ui/node_modules ui-devel-instrumented: awx/ui/node_modules
$(NPM_BIN) --prefix awx/ui --loglevel warn run start-instrumented $(NPM_BIN) --prefix awx/ui --loglevel warn run start-instrumented
@@ -479,9 +451,8 @@ awx/projects:
COMPOSE_UP_OPTS ?= COMPOSE_UP_OPTS ?=
COMPOSE_OPTS ?= COMPOSE_OPTS ?=
CONTROL_PLANE_NODE_COUNT ?= 1 CONTROL_PLANE_NODE_COUNT ?= 1
EXECUTION_NODE_COUNT ?= 0 EXECUTION_NODE_COUNT ?= 2
MINIKUBE_CONTAINER_GROUP ?= false MINIKUBE_CONTAINER_GROUP ?= false
MINIKUBE_SETUP ?= false # if false, run minikube separately
EXTRA_SOURCES_ANSIBLE_OPTS ?= EXTRA_SOURCES_ANSIBLE_OPTS ?=
ifneq ($(ADMIN_PASSWORD),) ifneq ($(ADMIN_PASSWORD),)
@@ -490,7 +461,7 @@ endif
docker-compose-sources: .git/hooks/pre-commit docker-compose-sources: .git/hooks/pre-commit
@if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\ @if [ $(MINIKUBE_CONTAINER_GROUP) = true ]; then\
ansible-playbook -i tools/docker-compose/inventory -e minikube_setup=$(MINIKUBE_SETUP) tools/docker-compose-minikube/deploy.yml; \ ansible-playbook -i tools/docker-compose/inventory tools/docker-compose-minikube/deploy.yml; \
fi; fi;
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \ ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/sources.yml \
@@ -620,12 +591,13 @@ pot: $(UI_BUILD_FLAG_FILE)
po: $(UI_BUILD_FLAG_FILE) po: $(UI_BUILD_FLAG_FILE)
$(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean $(NPM_BIN) --prefix awx/ui --loglevel warn run extract-strings -- --clean
LANG = "en_us"
## generate API django .pot .po ## generate API django .pot .po
messages: messages:
@if [ "$(VENV_BASE)" ]; then \ @if [ "$(VENV_BASE)" ]; then \
. $(VENV_BASE)/awx/bin/activate; \ . $(VENV_BASE)/awx/bin/activate; \
fi; \ fi; \
$(PYTHON) manage.py makemessages -l en_us --keep-pot $(PYTHON) manage.py makemessages -l $(LANG) --keep-pot
print-%: print-%:
@echo $($*) @echo $($*)
@@ -663,4 +635,4 @@ help/generate:
} \ } \
} \ } \
{ lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u { lastLine = $$0 }' $(MAKEFILE_LIST) | sort -u
@printf "\n" @printf "\n"

View File

@@ -67,6 +67,7 @@ else:
from django.db import connection from django.db import connection
if HAS_DJANGO is True: if HAS_DJANGO is True:
# See upgrade blocker note in requirements/README.md # See upgrade blocker note in requirements/README.md
try: try:
names_digest('foo', 'bar', 'baz', length=8) names_digest('foo', 'bar', 'baz', length=8)

View File

@@ -96,15 +96,6 @@ register(
category=_('Authentication'), category=_('Authentication'),
category_slug='authentication', category_slug='authentication',
) )
register(
'ALLOW_METRICS_FOR_ANONYMOUS_USERS',
field_class=fields.BooleanField,
default=False,
label=_('Allow anonymous users to poll metrics'),
help_text=_('If true, anonymous users are allowed to poll metrics.'),
category=_('Authentication'),
category_slug='authentication',
)
def authentication_validate(serializer, attrs): def authentication_validate(serializer, attrs):

View File

@@ -80,6 +80,7 @@ class VerbatimField(serializers.Field):
class OAuth2ProviderField(fields.DictField): class OAuth2ProviderField(fields.DictField):
default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')} default_error_messages = {'invalid_key_names': _('Invalid key names: {invalid_key_names}')}
valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'} valid_key_names = {'ACCESS_TOKEN_EXPIRE_SECONDS', 'AUTHORIZATION_CODE_EXPIRE_SECONDS', 'REFRESH_TOKEN_EXPIRE_SECONDS'}
child = fields.IntegerField(min_value=1) child = fields.IntegerField(min_value=1)

View File

@@ -160,6 +160,7 @@ class FieldLookupBackend(BaseFilterBackend):
NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField, TextField) NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField, TextField)
def get_fields_from_lookup(self, model, lookup): def get_fields_from_lookup(self, model, lookup):
if '__' in lookup and lookup.rsplit('__', 1)[-1] in self.SUPPORTED_LOOKUPS: if '__' in lookup and lookup.rsplit('__', 1)[-1] in self.SUPPORTED_LOOKUPS:
path, suffix = lookup.rsplit('__', 1) path, suffix = lookup.rsplit('__', 1)
else: else:

View File

@@ -6,6 +6,7 @@ import inspect
import logging import logging
import time import time
import uuid import uuid
import urllib.parse
# Django # Django
from django.conf import settings from django.conf import settings
@@ -13,7 +14,7 @@ from django.contrib.auth import views as auth_views
from django.contrib.contenttypes.models import ContentType from django.contrib.contenttypes.models import ContentType
from django.core.cache import cache from django.core.cache import cache
from django.core.exceptions import FieldDoesNotExist from django.core.exceptions import FieldDoesNotExist
from django.db import connection, transaction from django.db import connection
from django.db.models.fields.related import OneToOneRel from django.db.models.fields.related import OneToOneRel
from django.http import QueryDict from django.http import QueryDict
from django.shortcuts import get_object_or_404 from django.shortcuts import get_object_or_404
@@ -29,7 +30,7 @@ from rest_framework.response import Response
from rest_framework import status from rest_framework import status
from rest_framework import views from rest_framework import views
from rest_framework.permissions import AllowAny from rest_framework.permissions import AllowAny
from rest_framework.renderers import StaticHTMLRenderer from rest_framework.renderers import StaticHTMLRenderer, JSONRenderer
from rest_framework.negotiation import DefaultContentNegotiation from rest_framework.negotiation import DefaultContentNegotiation
# AWX # AWX
@@ -40,7 +41,7 @@ from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd,
from awx.main.utils.db import get_all_field_names from awx.main.utils.db import get_all_field_names
from awx.main.utils.licensing import server_product_name from awx.main.utils.licensing import server_product_name
from awx.main.views import ApiErrorView from awx.main.views import ApiErrorView
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer, UserSerializer
from awx.api.versioning import URLPathVersioning from awx.api.versioning import URLPathVersioning
from awx.api.metadata import SublistAttachDetatchMetadata, Metadata from awx.api.metadata import SublistAttachDetatchMetadata, Metadata
from awx.conf import settings_registry from awx.conf import settings_registry
@@ -64,7 +65,6 @@ __all__ = [
'ParentMixin', 'ParentMixin',
'SubListAttachDetachAPIView', 'SubListAttachDetachAPIView',
'CopyAPIView', 'CopyAPIView',
'GenericCancelView',
'BaseUsersList', 'BaseUsersList',
] ]
@@ -90,9 +90,13 @@ class LoggedLoginView(auth_views.LoginView):
def post(self, request, *args, **kwargs): def post(self, request, *args, **kwargs):
ret = super(LoggedLoginView, self).post(request, *args, **kwargs) ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
current_user = getattr(request, 'user', None)
if request.user.is_authenticated: if request.user.is_authenticated:
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None)))) logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
ret.set_cookie('userLoggedIn', 'true') ret.set_cookie('userLoggedIn', 'true')
current_user = UserSerializer(self.request.user)
current_user = smart_str(JSONRenderer().render(current_user.data))
current_user = urllib.parse.quote('%s' % current_user, '')
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid')) ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
return ret return ret
@@ -135,6 +139,7 @@ def get_default_schema():
class APIView(views.APIView): class APIView(views.APIView):
schema = get_default_schema() schema = get_default_schema()
versioning_class = URLPathVersioning versioning_class = URLPathVersioning
@@ -248,7 +253,7 @@ class APIView(views.APIView):
response['X-API-Query-Time'] = '%0.3fs' % sum(q_times) response['X-API-Query-Time'] = '%0.3fs' % sum(q_times)
if getattr(self, 'deprecated', False): if getattr(self, 'deprecated', False):
response['Warning'] = '299 awx "This resource has been deprecated and will be removed in a future release."' response['Warning'] = '299 awx "This resource has been deprecated and will be removed in a future release."' # noqa
return response return response
@@ -799,6 +804,7 @@ class RetrieveUpdateDestroyAPIView(RetrieveUpdateAPIView, DestroyAPIView):
class ResourceAccessList(ParentMixin, ListAPIView): class ResourceAccessList(ParentMixin, ListAPIView):
serializer_class = ResourceAccessListElementSerializer serializer_class = ResourceAccessListElementSerializer
ordering = ('username',) ordering = ('username',)
@@ -821,6 +827,7 @@ def trigger_delayed_deep_copy(*args, **kwargs):
class CopyAPIView(GenericAPIView): class CopyAPIView(GenericAPIView):
serializer_class = CopySerializer serializer_class = CopySerializer
permission_classes = (AllowAny,) permission_classes = (AllowAny,)
copy_return_serializer_class = None copy_return_serializer_class = None
@@ -983,23 +990,6 @@ class CopyAPIView(GenericAPIView):
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers) return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
class GenericCancelView(RetrieveAPIView):
# In subclass set model, serializer_class
obj_permission_type = 'cancel'
@transaction.non_atomic_requests
def dispatch(self, *args, **kwargs):
return super(GenericCancelView, self).dispatch(*args, **kwargs)
def post(self, request, *args, **kwargs):
obj = self.get_object()
if obj.can_cancel:
obj.cancel()
return Response(status=status.HTTP_202_ACCEPTED)
else:
return self.http_method_not_allowed(request, *args, **kwargs)
class BaseUsersList(SubListCreateAttachDetachAPIView): class BaseUsersList(SubListCreateAttachDetachAPIView):
def post(self, request, *args, **kwargs): def post(self, request, *args, **kwargs):
ret = super(BaseUsersList, self).post(request, *args, **kwargs) ret = super(BaseUsersList, self).post(request, *args, **kwargs)

View File

@@ -128,7 +128,7 @@ class Metadata(metadata.SimpleMetadata):
# Special handling of notification configuration where the required properties # Special handling of notification configuration where the required properties
# are conditional on the type selected. # are conditional on the type selected.
if field.field_name == 'notification_configuration': if field.field_name == 'notification_configuration':
for notification_type_name, notification_tr_name, notification_type_class in NotificationTemplate.NOTIFICATION_TYPES: for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
field_info[notification_type_name] = notification_type_class.init_parameters field_info[notification_type_name] = notification_type_class.init_parameters
# Special handling of notification messages where the required properties # Special handling of notification messages where the required properties
@@ -138,7 +138,7 @@ class Metadata(metadata.SimpleMetadata):
except (AttributeError, KeyError): except (AttributeError, KeyError):
view_model = None view_model = None
if view_model == NotificationTemplate and field.field_name == 'messages': if view_model == NotificationTemplate and field.field_name == 'messages':
for notification_type_name, notification_tr_name, notification_type_class in NotificationTemplate.NOTIFICATION_TYPES: for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
field_info[notification_type_name] = notification_type_class.default_messages field_info[notification_type_name] = notification_type_class.default_messages
# Update type of fields returned... # Update type of fields returned...

View File

@@ -24,6 +24,7 @@ class DisabledPaginator(DjangoPaginator):
class Pagination(pagination.PageNumberPagination): class Pagination(pagination.PageNumberPagination):
page_size_query_param = 'page_size' page_size_query_param = 'page_size'
max_page_size = settings.MAX_PAGE_SIZE max_page_size = settings.MAX_PAGE_SIZE
count_disabled = False count_disabled = False

View File

@@ -24,6 +24,7 @@ __all__ = [
'InventoryInventorySourcesUpdatePermission', 'InventoryInventorySourcesUpdatePermission',
'UserPermission', 'UserPermission',
'IsSystemAdminOrAuditor', 'IsSystemAdminOrAuditor',
'InstanceGroupTowerPermission',
'WorkflowApprovalPermission', 'WorkflowApprovalPermission',
] ]

View File

@@ -22,6 +22,7 @@ class SurrogateEncoder(encoders.JSONEncoder):
class DefaultJSONRenderer(renderers.JSONRenderer): class DefaultJSONRenderer(renderers.JSONRenderer):
encoder_class = SurrogateEncoder encoder_class = SurrogateEncoder
@@ -94,6 +95,7 @@ class BrowsableAPIRenderer(renderers.BrowsableAPIRenderer):
class PlainTextRenderer(renderers.BaseRenderer): class PlainTextRenderer(renderers.BaseRenderer):
media_type = 'text/plain' media_type = 'text/plain'
format = 'txt' format = 'txt'
@@ -104,15 +106,18 @@ class PlainTextRenderer(renderers.BaseRenderer):
class DownloadTextRenderer(PlainTextRenderer): class DownloadTextRenderer(PlainTextRenderer):
format = "txt_download" format = "txt_download"
class AnsiTextRenderer(PlainTextRenderer): class AnsiTextRenderer(PlainTextRenderer):
media_type = 'text/plain' media_type = 'text/plain'
format = 'ansi' format = 'ansi'
class AnsiDownloadRenderer(PlainTextRenderer): class AnsiDownloadRenderer(PlainTextRenderer):
format = "ansi_download" format = "ansi_download"

View File

@@ -29,7 +29,6 @@ from django.utils.translation import gettext_lazy as _
from django.utils.encoding import force_str from django.utils.encoding import force_str
from django.utils.text import capfirst from django.utils.text import capfirst
from django.utils.timezone import now from django.utils.timezone import now
from django.core.validators import RegexValidator, MaxLengthValidator
# Django REST Framework # Django REST Framework
from rest_framework.exceptions import ValidationError, PermissionDenied from rest_framework.exceptions import ValidationError, PermissionDenied
@@ -113,7 +112,7 @@ from awx.main.utils import (
) )
from awx.main.utils.filters import SmartFilter from awx.main.utils.filters import SmartFilter
from awx.main.utils.named_url_graph import reset_counters from awx.main.utils.named_url_graph import reset_counters
from awx.main.scheduler.task_manager_models import TaskManagerModels from awx.main.scheduler.task_manager_models import TaskManagerInstanceGroups, TaskManagerInstances
from awx.main.redact import UriCleaner, REPLACE_STR from awx.main.redact import UriCleaner, REPLACE_STR
from awx.main.validators import vars_validate_or_raise from awx.main.validators import vars_validate_or_raise
@@ -121,9 +120,6 @@ from awx.main.validators import vars_validate_or_raise
from awx.api.versioning import reverse from awx.api.versioning import reverse
from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, VerbatimField, DeprecatedCredentialField from awx.api.fields import BooleanNullField, CharNullField, ChoiceNullField, VerbatimField, DeprecatedCredentialField
# AWX Utils
from awx.api.validators import HostnameRegexValidator
logger = logging.getLogger('awx.api.serializers') logger = logging.getLogger('awx.api.serializers')
# Fields that should be summarized regardless of object type. # Fields that should be summarized regardless of object type.
@@ -200,6 +196,7 @@ def reverse_gfk(content_object, request):
class CopySerializer(serializers.Serializer): class CopySerializer(serializers.Serializer):
name = serializers.CharField() name = serializers.CharField()
def validate(self, attrs): def validate(self, attrs):
@@ -431,6 +428,7 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
continue continue
summary_fields[fk] = OrderedDict() summary_fields[fk] = OrderedDict()
for field in related_fields: for field in related_fields:
fval = getattr(fkval, field, None) fval = getattr(fkval, field, None)
if fval is None and field == 'type': if fval is None and field == 'type':
@@ -928,6 +926,7 @@ class UnifiedJobListSerializer(UnifiedJobSerializer):
class UnifiedJobStdoutSerializer(UnifiedJobSerializer): class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
result_stdout = serializers.SerializerMethodField() result_stdout = serializers.SerializerMethodField()
class Meta: class Meta:
@@ -941,6 +940,7 @@ class UnifiedJobStdoutSerializer(UnifiedJobSerializer):
class UserSerializer(BaseSerializer): class UserSerializer(BaseSerializer):
password = serializers.CharField(required=False, default='', write_only=True, help_text=_('Write-only field used to change the password.')) password = serializers.CharField(required=False, default='', write_only=True, help_text=_('Write-only field used to change the password.'))
ldap_dn = serializers.CharField(source='profile.ldap_dn', read_only=True) ldap_dn = serializers.CharField(source='profile.ldap_dn', read_only=True)
external_account = serializers.SerializerMethodField(help_text=_('Set if the account is managed by an external service')) external_account = serializers.SerializerMethodField(help_text=_('Set if the account is managed by an external service'))
@@ -1100,6 +1100,7 @@ class UserActivityStreamSerializer(UserSerializer):
class BaseOAuth2TokenSerializer(BaseSerializer): class BaseOAuth2TokenSerializer(BaseSerializer):
refresh_token = serializers.SerializerMethodField() refresh_token = serializers.SerializerMethodField()
token = serializers.SerializerMethodField() token = serializers.SerializerMethodField()
ALLOWED_SCOPES = ['read', 'write'] ALLOWED_SCOPES = ['read', 'write']
@@ -1217,6 +1218,7 @@ class UserPersonalTokenSerializer(BaseOAuth2TokenSerializer):
class OAuth2ApplicationSerializer(BaseSerializer): class OAuth2ApplicationSerializer(BaseSerializer):
show_capabilities = ['edit', 'delete'] show_capabilities = ['edit', 'delete']
class Meta: class Meta:
@@ -1451,6 +1453,7 @@ class ExecutionEnvironmentSerializer(BaseSerializer):
class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer): class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
status = serializers.ChoiceField(choices=Project.PROJECT_STATUS_CHOICES, read_only=True) status = serializers.ChoiceField(choices=Project.PROJECT_STATUS_CHOICES, read_only=True)
last_update_failed = serializers.BooleanField(read_only=True) last_update_failed = serializers.BooleanField(read_only=True)
last_updated = serializers.DateTimeField(read_only=True) last_updated = serializers.DateTimeField(read_only=True)
@@ -1541,6 +1544,7 @@ class ProjectSerializer(UnifiedJobTemplateSerializer, ProjectOptionsSerializer):
class ProjectPlaybooksSerializer(ProjectSerializer): class ProjectPlaybooksSerializer(ProjectSerializer):
playbooks = serializers.SerializerMethodField(help_text=_('Array of playbooks available within this project.')) playbooks = serializers.SerializerMethodField(help_text=_('Array of playbooks available within this project.'))
class Meta: class Meta:
@@ -1558,6 +1562,7 @@ class ProjectPlaybooksSerializer(ProjectSerializer):
class ProjectInventoriesSerializer(ProjectSerializer): class ProjectInventoriesSerializer(ProjectSerializer):
inventory_files = serializers.ReadOnlyField(help_text=_('Array of inventory files and directories available within this project, ' 'not comprehensive.')) inventory_files = serializers.ReadOnlyField(help_text=_('Array of inventory files and directories available within this project, ' 'not comprehensive.'))
class Meta: class Meta:
@@ -1572,6 +1577,7 @@ class ProjectInventoriesSerializer(ProjectSerializer):
class ProjectUpdateViewSerializer(ProjectSerializer): class ProjectUpdateViewSerializer(ProjectSerializer):
can_update = serializers.BooleanField(read_only=True) can_update = serializers.BooleanField(read_only=True)
class Meta: class Meta:
@@ -1601,6 +1607,7 @@ class ProjectUpdateSerializer(UnifiedJobSerializer, ProjectOptionsSerializer):
class ProjectUpdateDetailSerializer(ProjectUpdateSerializer): class ProjectUpdateDetailSerializer(ProjectUpdateSerializer):
playbook_counts = serializers.SerializerMethodField(help_text=_('A count of all plays and tasks for the job run.')) playbook_counts = serializers.SerializerMethodField(help_text=_('A count of all plays and tasks for the job run.'))
class Meta: class Meta:
@@ -1623,6 +1630,7 @@ class ProjectUpdateListSerializer(ProjectUpdateSerializer, UnifiedJobListSeriali
class ProjectUpdateCancelSerializer(ProjectUpdateSerializer): class ProjectUpdateCancelSerializer(ProjectUpdateSerializer):
can_cancel = serializers.BooleanField(read_only=True) can_cancel = serializers.BooleanField(read_only=True)
class Meta: class Meta:
@@ -1960,6 +1968,7 @@ class GroupSerializer(BaseSerializerWithVariables):
class GroupTreeSerializer(GroupSerializer): class GroupTreeSerializer(GroupSerializer):
children = serializers.SerializerMethodField() children = serializers.SerializerMethodField()
class Meta: class Meta:
@@ -2057,6 +2066,7 @@ class InventorySourceOptionsSerializer(BaseSerializer):
class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOptionsSerializer): class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOptionsSerializer):
status = serializers.ChoiceField(choices=InventorySource.INVENTORY_SOURCE_STATUS_CHOICES, read_only=True) status = serializers.ChoiceField(choices=InventorySource.INVENTORY_SOURCE_STATUS_CHOICES, read_only=True)
last_update_failed = serializers.BooleanField(read_only=True) last_update_failed = serializers.BooleanField(read_only=True)
last_updated = serializers.DateTimeField(read_only=True) last_updated = serializers.DateTimeField(read_only=True)
@@ -2201,22 +2211,15 @@ class InventorySourceSerializer(UnifiedJobTemplateSerializer, InventorySourceOpt
class InventorySourceUpdateSerializer(InventorySourceSerializer): class InventorySourceUpdateSerializer(InventorySourceSerializer):
can_update = serializers.BooleanField(read_only=True) can_update = serializers.BooleanField(read_only=True)
class Meta: class Meta:
fields = ('can_update',) fields = ('can_update',)
def validate(self, attrs):
project = self.instance.source_project
if project:
failed_reason = project.get_reason_if_failed()
if failed_reason:
raise serializers.ValidationError(failed_reason)
return super(InventorySourceUpdateSerializer, self).validate(attrs)
class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSerializer): class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSerializer):
custom_virtualenv = serializers.ReadOnlyField() custom_virtualenv = serializers.ReadOnlyField()
class Meta: class Meta:
@@ -2257,6 +2260,7 @@ class InventoryUpdateSerializer(UnifiedJobSerializer, InventorySourceOptionsSeri
class InventoryUpdateDetailSerializer(InventoryUpdateSerializer): class InventoryUpdateDetailSerializer(InventoryUpdateSerializer):
source_project = serializers.SerializerMethodField(help_text=_('The project used for this job.'), method_name='get_source_project_id') source_project = serializers.SerializerMethodField(help_text=_('The project used for this job.'), method_name='get_source_project_id')
class Meta: class Meta:
@@ -2307,6 +2311,7 @@ class InventoryUpdateListSerializer(InventoryUpdateSerializer, UnifiedJobListSer
class InventoryUpdateCancelSerializer(InventoryUpdateSerializer): class InventoryUpdateCancelSerializer(InventoryUpdateSerializer):
can_cancel = serializers.BooleanField(read_only=True) can_cancel = serializers.BooleanField(read_only=True)
class Meta: class Meta:
@@ -2664,6 +2669,7 @@ class CredentialSerializer(BaseSerializer):
class CredentialSerializerCreate(CredentialSerializer): class CredentialSerializerCreate(CredentialSerializer):
user = serializers.PrimaryKeyRelatedField( user = serializers.PrimaryKeyRelatedField(
queryset=User.objects.all(), queryset=User.objects.all(),
required=False, required=False,
@@ -3018,6 +3024,7 @@ class JobTemplateWithSpecSerializer(JobTemplateSerializer):
class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer): class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
passwords_needed_to_start = serializers.ReadOnlyField() passwords_needed_to_start = serializers.ReadOnlyField()
artifacts = serializers.SerializerMethodField() artifacts = serializers.SerializerMethodField()
@@ -3100,6 +3107,7 @@ class JobSerializer(UnifiedJobSerializer, JobOptionsSerializer):
class JobDetailSerializer(JobSerializer): class JobDetailSerializer(JobSerializer):
playbook_counts = serializers.SerializerMethodField(help_text=_('A count of all plays and tasks for the job run.')) playbook_counts = serializers.SerializerMethodField(help_text=_('A count of all plays and tasks for the job run.'))
custom_virtualenv = serializers.ReadOnlyField() custom_virtualenv = serializers.ReadOnlyField()
@@ -3117,6 +3125,7 @@ class JobDetailSerializer(JobSerializer):
class JobCancelSerializer(BaseSerializer): class JobCancelSerializer(BaseSerializer):
can_cancel = serializers.BooleanField(read_only=True) can_cancel = serializers.BooleanField(read_only=True)
class Meta: class Meta:
@@ -3125,6 +3134,7 @@ class JobCancelSerializer(BaseSerializer):
class JobRelaunchSerializer(BaseSerializer): class JobRelaunchSerializer(BaseSerializer):
passwords_needed_to_start = serializers.SerializerMethodField() passwords_needed_to_start = serializers.SerializerMethodField()
retry_counts = serializers.SerializerMethodField() retry_counts = serializers.SerializerMethodField()
hosts = serializers.ChoiceField( hosts = serializers.ChoiceField(
@@ -3184,6 +3194,7 @@ class JobRelaunchSerializer(BaseSerializer):
class JobCreateScheduleSerializer(LabelsListMixin, BaseSerializer): class JobCreateScheduleSerializer(LabelsListMixin, BaseSerializer):
can_schedule = serializers.SerializerMethodField() can_schedule = serializers.SerializerMethodField()
prompts = serializers.SerializerMethodField() prompts = serializers.SerializerMethodField()
@@ -3309,6 +3320,7 @@ class AdHocCommandDetailSerializer(AdHocCommandSerializer):
class AdHocCommandCancelSerializer(AdHocCommandSerializer): class AdHocCommandCancelSerializer(AdHocCommandSerializer):
can_cancel = serializers.BooleanField(read_only=True) can_cancel = serializers.BooleanField(read_only=True)
class Meta: class Meta:
@@ -3347,6 +3359,7 @@ class SystemJobTemplateSerializer(UnifiedJobTemplateSerializer):
class SystemJobSerializer(UnifiedJobSerializer): class SystemJobSerializer(UnifiedJobSerializer):
result_stdout = serializers.SerializerMethodField() result_stdout = serializers.SerializerMethodField()
class Meta: class Meta:
@@ -3373,6 +3386,7 @@ class SystemJobSerializer(UnifiedJobSerializer):
class SystemJobCancelSerializer(SystemJobSerializer): class SystemJobCancelSerializer(SystemJobSerializer):
can_cancel = serializers.BooleanField(read_only=True) can_cancel = serializers.BooleanField(read_only=True)
class Meta: class Meta:
@@ -3537,6 +3551,7 @@ class WorkflowJobListSerializer(WorkflowJobSerializer, UnifiedJobListSerializer)
class WorkflowJobCancelSerializer(WorkflowJobSerializer): class WorkflowJobCancelSerializer(WorkflowJobSerializer):
can_cancel = serializers.BooleanField(read_only=True) can_cancel = serializers.BooleanField(read_only=True)
class Meta: class Meta:
@@ -3550,6 +3565,7 @@ class WorkflowApprovalViewSerializer(UnifiedJobSerializer):
class WorkflowApprovalSerializer(UnifiedJobSerializer): class WorkflowApprovalSerializer(UnifiedJobSerializer):
can_approve_or_deny = serializers.SerializerMethodField() can_approve_or_deny = serializers.SerializerMethodField()
approval_expiration = serializers.SerializerMethodField() approval_expiration = serializers.SerializerMethodField()
timed_out = serializers.ReadOnlyField() timed_out = serializers.ReadOnlyField()
@@ -3730,11 +3746,7 @@ class LaunchConfigurationBaseSerializer(BaseSerializer):
# Build unsaved version of this config, use it to detect prompts errors # Build unsaved version of this config, use it to detect prompts errors
mock_obj = self._build_mock_obj(attrs) mock_obj = self._build_mock_obj(attrs)
if set(list(ujt.get_ask_mapping().keys()) + ['extra_data']) & set(attrs.keys()): accepted, rejected, errors = ujt._accept_or_ignore_job_kwargs(_exclude_errors=self.exclude_errors, **mock_obj.prompts_dict())
accepted, rejected, errors = ujt._accept_or_ignore_job_kwargs(_exclude_errors=self.exclude_errors, **mock_obj.prompts_dict())
else:
# Only perform validation of prompts if prompts fields are provided
errors = {}
# Remove all unprocessed $encrypted$ strings, indicating default usage # Remove all unprocessed $encrypted$ strings, indicating default usage
if 'extra_data' in attrs and password_dict: if 'extra_data' in attrs and password_dict:
@@ -3944,6 +3956,7 @@ class JobHostSummarySerializer(BaseSerializer):
class JobEventSerializer(BaseSerializer): class JobEventSerializer(BaseSerializer):
event_display = serializers.CharField(source='get_event_display2', read_only=True) event_display = serializers.CharField(source='get_event_display2', read_only=True)
event_level = serializers.IntegerField(read_only=True) event_level = serializers.IntegerField(read_only=True)
@@ -4039,6 +4052,7 @@ class ProjectUpdateEventSerializer(JobEventSerializer):
class AdHocCommandEventSerializer(BaseSerializer): class AdHocCommandEventSerializer(BaseSerializer):
event_display = serializers.CharField(source='get_event_display', read_only=True) event_display = serializers.CharField(source='get_event_display', read_only=True)
class Meta: class Meta:
@@ -4250,10 +4264,17 @@ class JobLaunchSerializer(BaseSerializer):
# Basic validation - cannot run a playbook without a playbook # Basic validation - cannot run a playbook without a playbook
if not template.project: if not template.project:
errors['project'] = _("A project is required to run a job.") errors['project'] = _("A project is required to run a job.")
else: elif template.project.status in ('error', 'failed'):
failure_reason = template.project.get_reason_if_failed() errors['playbook'] = _("Missing a revision to run due to failed project update.")
if failure_reason:
errors['playbook'] = failure_reason latest_update = template.project.project_updates.last()
if latest_update is not None and latest_update.failed:
failed_validation_tasks = latest_update.project_update_events.filter(
event='runner_on_failed',
play="Perform project signature/checksum verification",
)
if failed_validation_tasks:
errors['playbook'] = _("Last project update failed due to signature validation failure.")
# cannot run a playbook without an inventory # cannot run a playbook without an inventory
if template.inventory and template.inventory.pending_deletion is True: if template.inventory and template.inventory.pending_deletion is True:
@@ -4320,6 +4341,7 @@ class JobLaunchSerializer(BaseSerializer):
class WorkflowJobLaunchSerializer(BaseSerializer): class WorkflowJobLaunchSerializer(BaseSerializer):
can_start_without_user_input = serializers.BooleanField(read_only=True) can_start_without_user_input = serializers.BooleanField(read_only=True)
defaults = serializers.SerializerMethodField() defaults = serializers.SerializerMethodField()
variables_needed_to_start = serializers.ReadOnlyField() variables_needed_to_start = serializers.ReadOnlyField()
@@ -4376,6 +4398,7 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
return False return False
def get_defaults(self, obj): def get_defaults(self, obj):
defaults_dict = {} defaults_dict = {}
for field_name in WorkflowJobTemplate.get_ask_mapping().keys(): for field_name in WorkflowJobTemplate.get_ask_mapping().keys():
if field_name == 'inventory': if field_name == 'inventory':
@@ -4392,6 +4415,7 @@ class WorkflowJobLaunchSerializer(BaseSerializer):
return dict(name=obj.name, id=obj.id, description=obj.description) return dict(name=obj.name, id=obj.id, description=obj.description)
def validate(self, attrs): def validate(self, attrs):
template = self.instance template = self.instance
accepted, rejected, errors = template._accept_or_ignore_job_kwargs(**attrs) accepted, rejected, errors = template._accept_or_ignore_job_kwargs(**attrs)
@@ -4632,6 +4656,7 @@ class NotificationTemplateSerializer(BaseSerializer):
class NotificationSerializer(BaseSerializer): class NotificationSerializer(BaseSerializer):
body = serializers.SerializerMethodField(help_text=_('Notification body')) body = serializers.SerializerMethodField(help_text=_('Notification body'))
class Meta: class Meta:
@@ -4896,19 +4921,6 @@ class InstanceSerializer(BaseSerializer):
extra_kwargs = { extra_kwargs = {
'node_type': {'initial': Instance.Types.EXECUTION, 'default': Instance.Types.EXECUTION}, 'node_type': {'initial': Instance.Types.EXECUTION, 'default': Instance.Types.EXECUTION},
'node_state': {'initial': Instance.States.INSTALLED, 'default': Instance.States.INSTALLED}, 'node_state': {'initial': Instance.States.INSTALLED, 'default': Instance.States.INSTALLED},
'hostname': {
'validators': [
MaxLengthValidator(limit_value=250),
validators.UniqueValidator(queryset=Instance.objects.all()),
RegexValidator(
regex=r'^localhost$|^127(?:\.[0-9]+){0,2}\.[0-9]+$|^(?:0*\:)*?:?0*1$',
flags=re.IGNORECASE,
inverse_match=True,
message="hostname cannot be localhost or 127.0.0.1",
),
HostnameRegexValidator(),
],
},
} }
def get_related(self, obj): def get_related(self, obj):
@@ -4919,7 +4931,7 @@ class InstanceSerializer(BaseSerializer):
res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk}) res['install_bundle'] = self.reverse('api:instance_install_bundle', kwargs={'pk': obj.pk})
res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk}) res['peers'] = self.reverse('api:instance_peers_list', kwargs={"pk": obj.pk})
if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor: if self.context['request'].user.is_superuser or self.context['request'].user.is_system_auditor:
if obj.node_type == 'execution': if obj.node_type != 'hop':
res['health_check'] = self.reverse('api:instance_health_check', kwargs={'pk': obj.pk}) res['health_check'] = self.reverse('api:instance_health_check', kwargs={'pk': obj.pk})
return res return res
@@ -4979,10 +4991,6 @@ class InstanceSerializer(BaseSerializer):
return value return value
def validate_hostname(self, value): def validate_hostname(self, value):
"""
- Hostname cannot be "localhost" - but can be something like localhost.domain
- Cannot change the hostname of an-already instantiated & initialized Instance object
"""
if self.instance and self.instance.hostname != value: if self.instance and self.instance.hostname != value:
raise serializers.ValidationError("Cannot change hostname.") raise serializers.ValidationError("Cannot change hostname.")
@@ -5003,11 +5011,14 @@ class InstanceHealthCheckSerializer(BaseSerializer):
class InstanceGroupSerializer(BaseSerializer): class InstanceGroupSerializer(BaseSerializer):
show_capabilities = ['edit', 'delete'] show_capabilities = ['edit', 'delete']
capacity = serializers.SerializerMethodField()
consumed_capacity = serializers.SerializerMethodField() consumed_capacity = serializers.SerializerMethodField()
percent_capacity_remaining = serializers.SerializerMethodField() percent_capacity_remaining = serializers.SerializerMethodField()
jobs_running = serializers.SerializerMethodField() jobs_running = serializers.IntegerField(
help_text=_('Count of jobs in the running or waiting state that ' 'are targeted for this instance group'), read_only=True
)
jobs_total = serializers.IntegerField(help_text=_('Count of all jobs that target this instance group'), read_only=True) jobs_total = serializers.IntegerField(help_text=_('Count of all jobs that target this instance group'), read_only=True)
instances = serializers.SerializerMethodField() instances = serializers.SerializerMethodField()
is_container_group = serializers.BooleanField( is_container_group = serializers.BooleanField(
@@ -5033,22 +5044,6 @@ class InstanceGroupSerializer(BaseSerializer):
label=_('Policy Instance Minimum'), label=_('Policy Instance Minimum'),
help_text=_("Static minimum number of Instances that will be automatically assign to " "this group when new instances come online."), help_text=_("Static minimum number of Instances that will be automatically assign to " "this group when new instances come online."),
) )
max_concurrent_jobs = serializers.IntegerField(
default=0,
min_value=0,
required=False,
initial=0,
label=_('Max Concurrent Jobs'),
help_text=_("Maximum number of concurrent jobs to run on a group. When set to zero, no maximum is enforced."),
)
max_forks = serializers.IntegerField(
default=0,
min_value=0,
required=False,
initial=0,
label=_('Max Forks'),
help_text=_("Maximum number of forks to execute concurrently on a group. When set to zero, no maximum is enforced."),
)
policy_instance_list = serializers.ListField( policy_instance_list = serializers.ListField(
child=serializers.CharField(), child=serializers.CharField(),
required=False, required=False,
@@ -5070,8 +5065,6 @@ class InstanceGroupSerializer(BaseSerializer):
"consumed_capacity", "consumed_capacity",
"percent_capacity_remaining", "percent_capacity_remaining",
"jobs_running", "jobs_running",
"max_concurrent_jobs",
"max_forks",
"jobs_total", "jobs_total",
"instances", "instances",
"is_container_group", "is_container_group",
@@ -5153,42 +5146,32 @@ class InstanceGroupSerializer(BaseSerializer):
# Store capacity values (globally computed) in the context # Store capacity values (globally computed) in the context
if 'task_manager_igs' not in self.context: if 'task_manager_igs' not in self.context:
instance_groups_queryset = None instance_groups_queryset = None
jobs_qs = UnifiedJob.objects.filter(status__in=('running', 'waiting'))
if self.parent: # Is ListView: if self.parent: # Is ListView:
instance_groups_queryset = self.parent.instance instance_groups_queryset = self.parent.instance
tm_models = TaskManagerModels.init_with_consumed_capacity( instances = TaskManagerInstances(jobs_qs)
instance_fields=['uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'enabled'], instance_groups = TaskManagerInstanceGroups(instances_by_hostname=instances, instance_groups_queryset=instance_groups_queryset)
instance_groups_queryset=instance_groups_queryset,
)
self.context['task_manager_igs'] = tm_models.instance_groups self.context['task_manager_igs'] = instance_groups
return self.context['task_manager_igs'] return self.context['task_manager_igs']
def get_consumed_capacity(self, obj): def get_consumed_capacity(self, obj):
ig_mgr = self.get_ig_mgr() ig_mgr = self.get_ig_mgr()
return ig_mgr.get_consumed_capacity(obj.name) return ig_mgr.get_consumed_capacity(obj.name)
def get_capacity(self, obj):
ig_mgr = self.get_ig_mgr()
return ig_mgr.get_capacity(obj.name)
def get_percent_capacity_remaining(self, obj): def get_percent_capacity_remaining(self, obj):
capacity = self.get_capacity(obj) if not obj.capacity:
if not capacity:
return 0.0 return 0.0
consumed_capacity = self.get_consumed_capacity(obj) ig_mgr = self.get_ig_mgr()
return float("{0:.2f}".format(((float(capacity) - float(consumed_capacity)) / (float(capacity))) * 100)) return float("{0:.2f}".format((float(ig_mgr.get_remaining_capacity(obj.name)) / (float(obj.capacity))) * 100))
def get_instances(self, obj): def get_instances(self, obj):
ig_mgr = self.get_ig_mgr() return obj.instances.count()
return len(ig_mgr.get_instances(obj.name))
def get_jobs_running(self, obj):
ig_mgr = self.get_ig_mgr()
return ig_mgr.get_jobs_running(obj.name)
class ActivityStreamSerializer(BaseSerializer): class ActivityStreamSerializer(BaseSerializer):
changes = serializers.SerializerMethodField() changes = serializers.SerializerMethodField()
object_association = serializers.SerializerMethodField(help_text=_("When present, shows the field name of the role or relationship that changed.")) object_association = serializers.SerializerMethodField(help_text=_("When present, shows the field name of the role or relationship that changed."))
object_type = serializers.SerializerMethodField(help_text=_("When present, shows the model on which the role or relationship was defined.")) object_type = serializers.SerializerMethodField(help_text=_("When present, shows the model on which the role or relationship was defined."))

View File

@@ -1,5 +1,5 @@
Launch a Job Template: Launch a Job Template:
{% ifmeth GET %}
Make a GET request to this resource to determine if the job_template can be Make a GET request to this resource to determine if the job_template can be
launched and whether any passwords are required to launch the job_template. launched and whether any passwords are required to launch the job_template.
The response will include the following fields: The response will include the following fields:
@@ -29,8 +29,8 @@ The response will include the following fields:
* `inventory_needed_to_start`: Flag indicating the presence of an inventory * `inventory_needed_to_start`: Flag indicating the presence of an inventory
associated with the job template. If not then one should be supplied when associated with the job template. If not then one should be supplied when
launching the job (boolean, read-only) launching the job (boolean, read-only)
{% endifmeth %}
{% ifmeth POST %}Make a POST request to this resource to launch the job_template. If any Make a POST request to this resource to launch the job_template. If any
passwords, inventory, or extra variables (extra_vars) are required, they must passwords, inventory, or extra variables (extra_vars) are required, they must
be passed via POST data, with extra_vars given as a YAML or JSON string and be passed via POST data, with extra_vars given as a YAML or JSON string and
escaped parentheses. If the `inventory_needed_to_start` is `True` then the escaped parentheses. If the `inventory_needed_to_start` is `True` then the
@@ -41,4 +41,3 @@ are not provided, a 400 status code will be returned. If the job cannot be
launched, a 405 status code will be returned. If the provided credential or launched, a 405 status code will be returned. If the provided credential or
inventory are not allowed to be used by the user, then a 403 status code will inventory are not allowed to be used by the user, then a 403 status code will
be returned. be returned.
{% endifmeth %}

View File

@@ -1,5 +1,3 @@
receptor_user: awx
receptor_group: awx
receptor_verify: true receptor_verify: true
receptor_tls: true receptor_tls: true
receptor_work_commands: receptor_work_commands:
@@ -12,12 +10,12 @@ custom_worksign_public_keyfile: receptor/work-public-key.pem
custom_tls_certfile: receptor/tls/receptor.crt custom_tls_certfile: receptor/tls/receptor.crt
custom_tls_keyfile: receptor/tls/receptor.key custom_tls_keyfile: receptor/tls/receptor.key
custom_ca_certfile: receptor/tls/ca/receptor-ca.crt custom_ca_certfile: receptor/tls/ca/receptor-ca.crt
receptor_user: awx
receptor_group: awx
receptor_protocol: 'tcp' receptor_protocol: 'tcp'
receptor_listener: true receptor_listener: true
receptor_port: {{ instance.listener_port }} receptor_port: {{ instance.listener_port }}
receptor_dependencies: receptor_dependencies:
- podman
- crun
- python39-pip - python39-pip
{% verbatim %}
podman_user: "{{ receptor_user }}"
podman_group: "{{ receptor_group }}"
{% endverbatim %}

View File

@@ -9,12 +9,10 @@
shell: /bin/bash shell: /bin/bash
- name: Enable Copr repo for Receptor - name: Enable Copr repo for Receptor
command: dnf copr enable ansible-awx/receptor -y command: dnf copr enable ansible-awx/receptor -y
- import_role:
name: ansible.receptor.podman
- import_role: - import_role:
name: ansible.receptor.setup name: ansible.receptor.setup
- name: Install ansible-runner - name: Install ansible-runner
pip: pip:
name: ansible-runner name: ansible-runner
executable: pip3.9 executable: pip3.9
{% endverbatim %} {% endverbatim %}

View File

@@ -1,4 +1,6 @@
--- ---
collections: collections:
- name: ansible.receptor - name: ansible.receptor
version: 1.1.0 source: https://github.com/ansible/receptor-collection/
type: git
version: 0.1.1

View File

@@ -9,9 +9,9 @@ from awx.api.views import (
InstanceUnifiedJobsList, InstanceUnifiedJobsList,
InstanceInstanceGroupsList, InstanceInstanceGroupsList,
InstanceHealthCheck, InstanceHealthCheck,
InstanceInstallBundle,
InstancePeersList, InstancePeersList,
) )
from awx.api.views.instance_install_bundle import InstanceInstallBundle
urls = [ urls = [

View File

@@ -3,28 +3,26 @@
from django.urls import re_path from django.urls import re_path
from awx.api.views.inventory import ( from awx.api.views import (
InventoryList, InventoryList,
InventoryDetail, InventoryDetail,
InventoryHostsList,
InventoryGroupsList,
InventoryRootGroupsList,
InventoryVariableData,
InventoryScriptView,
InventoryTreeView,
InventoryInventorySourcesList,
InventoryInventorySourcesUpdate,
InventoryActivityStreamList, InventoryActivityStreamList,
InventoryJobTemplateList, InventoryJobTemplateList,
InventoryAdHocCommandsList,
InventoryAccessList, InventoryAccessList,
InventoryObjectRolesList, InventoryObjectRolesList,
InventoryInstanceGroupsList, InventoryInstanceGroupsList,
InventoryLabelList, InventoryLabelList,
InventoryCopy, InventoryCopy,
) )
from awx.api.views import (
InventoryHostsList,
InventoryGroupsList,
InventoryInventorySourcesList,
InventoryInventorySourcesUpdate,
InventoryAdHocCommandsList,
InventoryRootGroupsList,
InventoryScriptView,
InventoryTreeView,
InventoryVariableData,
)
urls = [ urls = [

View File

@@ -3,9 +3,6 @@
from django.urls import re_path from django.urls import re_path
from awx.api.views.inventory import (
InventoryUpdateEventsList,
)
from awx.api.views import ( from awx.api.views import (
InventoryUpdateList, InventoryUpdateList,
InventoryUpdateDetail, InventoryUpdateDetail,
@@ -13,6 +10,7 @@ from awx.api.views import (
InventoryUpdateStdout, InventoryUpdateStdout,
InventoryUpdateNotificationsList, InventoryUpdateNotificationsList,
InventoryUpdateCredentialsList, InventoryUpdateCredentialsList,
InventoryUpdateEventsList,
) )

View File

@@ -10,7 +10,7 @@ from oauthlib import oauth2
from oauth2_provider import views from oauth2_provider import views
from awx.main.models import RefreshToken from awx.main.models import RefreshToken
from awx.api.views.root import ApiOAuthAuthorizationRootView from awx.api.views import ApiOAuthAuthorizationRootView
class TokenView(views.TokenView): class TokenView(views.TokenView):

View File

@@ -3,7 +3,7 @@
from django.urls import re_path from django.urls import re_path
from awx.api.views.organization import ( from awx.api.views import (
OrganizationList, OrganizationList,
OrganizationDetail, OrganizationDetail,
OrganizationUsersList, OrganizationUsersList,
@@ -14,6 +14,7 @@ from awx.api.views.organization import (
OrganizationJobTemplatesList, OrganizationJobTemplatesList,
OrganizationWorkflowJobTemplatesList, OrganizationWorkflowJobTemplatesList,
OrganizationTeamsList, OrganizationTeamsList,
OrganizationCredentialList,
OrganizationActivityStreamList, OrganizationActivityStreamList,
OrganizationNotificationTemplatesList, OrganizationNotificationTemplatesList,
OrganizationNotificationTemplatesErrorList, OrganizationNotificationTemplatesErrorList,
@@ -24,8 +25,8 @@ from awx.api.views.organization import (
OrganizationGalaxyCredentialsList, OrganizationGalaxyCredentialsList,
OrganizationObjectRolesList, OrganizationObjectRolesList,
OrganizationAccessList, OrganizationAccessList,
OrganizationApplicationList,
) )
from awx.api.views import OrganizationCredentialList, OrganizationApplicationList
urls = [ urls = [

View File

@@ -6,15 +6,13 @@ from django.urls import include, re_path
from awx import MODE from awx import MODE
from awx.api.generics import LoggedLoginView, LoggedLogoutView from awx.api.generics import LoggedLoginView, LoggedLogoutView
from awx.api.views.root import ( from awx.api.views import (
ApiRootView, ApiRootView,
ApiV2RootView, ApiV2RootView,
ApiV2PingView, ApiV2PingView,
ApiV2ConfigView, ApiV2ConfigView,
ApiV2SubscriptionView, ApiV2SubscriptionView,
ApiV2AttachView, ApiV2AttachView,
)
from awx.api.views import (
AuthView, AuthView,
UserMeList, UserMeList,
DashboardView, DashboardView,
@@ -30,8 +28,8 @@ from awx.api.views import (
OAuth2TokenList, OAuth2TokenList,
ApplicationOAuth2TokenList, ApplicationOAuth2TokenList,
OAuth2ApplicationDetail, OAuth2ApplicationDetail,
MeshVisualizer,
) )
from awx.api.views.mesh_visualizer import MeshVisualizer
from awx.api.views.metrics import MetricsView from awx.api.views.metrics import MetricsView

View File

@@ -1,6 +1,6 @@
from django.urls import re_path from django.urls import re_path
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver from awx.api.views import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
urlpatterns = [ urlpatterns = [

View File

@@ -1,54 +0,0 @@
import re
from django.core.validators import RegexValidator, validate_ipv46_address
from django.core.exceptions import ValidationError
class HostnameRegexValidator(RegexValidator):
"""
Fully validates a domain name that is compliant with norms in Linux/RHEL
- Cannot start with a hyphen
- Cannot begin with, or end with a "."
- Cannot contain any whitespaces
- Entire hostname is max 255 chars (including dots)
- Each domain/label is between 1 and 63 characters, except top level domain, which must be at least 2 characters
- Supports ipv4, ipv6, simple hostnames and FQDNs
- Follows RFC 9210 (modern RFC 1123, 1178) requirements
Accepts an IP Address or Hostname as the argument
"""
regex = '^[a-z0-9][-a-z0-9]*$|^([a-z0-9][-a-z0-9]{0,62}[.])*[a-z0-9][-a-z0-9]{1,62}$'
flags = re.IGNORECASE
def __call__(self, value):
regex_matches, err = self.__validate(value)
invalid_input = regex_matches if self.inverse_match else not regex_matches
if invalid_input:
if err is None:
err = ValidationError(self.message, code=self.code, params={"value": value})
raise err
def __str__(self):
return f"regex={self.regex}, message={self.message}, code={self.code}, inverse_match={self.inverse_match}, flags={self.flags}"
def __validate(self, value):
if ' ' in value:
return False, ValidationError("whitespaces in hostnames are illegal")
"""
If we have an IP address, try and validate it.
"""
try:
validate_ipv46_address(value)
return True, None
except ValidationError:
pass
"""
By this point in the code, we probably have a simple hostname, FQDN or a strange hostname like "192.localhost.domain.101"
"""
if not self.regex.match(value):
return False, ValidationError(f"illegal characters detected in hostname={value}. Please verify.")
return True, None

File diff suppressed because it is too large Load Diff

View File

@@ -25,7 +25,6 @@ from rest_framework import status
# Red Hat has an OID namespace (RHANANA). Receptor has its own designation under that. # Red Hat has an OID namespace (RHANANA). Receptor has its own designation under that.
RECEPTOR_OID = "1.3.6.1.4.1.2312.19.1" RECEPTOR_OID = "1.3.6.1.4.1.2312.19.1"
# generate install bundle for the instance # generate install bundle for the instance
# install bundle directory structure # install bundle directory structure
# ├── install_receptor.yml (playbook) # ├── install_receptor.yml (playbook)
@@ -41,6 +40,7 @@ RECEPTOR_OID = "1.3.6.1.4.1.2312.19.1"
# │ └── work-public-key.pem # │ └── work-public-key.pem
# └── requirements.yml # └── requirements.yml
class InstanceInstallBundle(GenericAPIView): class InstanceInstallBundle(GenericAPIView):
name = _('Install Bundle') name = _('Install Bundle')
model = models.Instance model = models.Instance
serializer_class = serializers.InstanceSerializer serializer_class = serializers.InstanceSerializer
@@ -178,7 +178,7 @@ def generate_receptor_tls(instance_obj):
.public_key(csr.public_key()) .public_key(csr.public_key())
.serial_number(x509.random_serial_number()) .serial_number(x509.random_serial_number())
.not_valid_before(datetime.datetime.utcnow()) .not_valid_before(datetime.datetime.utcnow())
.not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=3650)) .not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=10))
.add_extension( .add_extension(
csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).value, csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).value,
critical=csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).critical, critical=csr.extensions.get_extension_for_class(x509.SubjectAlternativeName).critical,

View File

@@ -46,6 +46,7 @@ logger = logging.getLogger('awx.api.views.organization')
class InventoryUpdateEventsList(SubListAPIView): class InventoryUpdateEventsList(SubListAPIView):
model = InventoryUpdateEvent model = InventoryUpdateEvent
serializer_class = InventoryUpdateEventSerializer serializer_class = InventoryUpdateEventSerializer
parent_model = InventoryUpdate parent_model = InventoryUpdate
@@ -65,11 +66,13 @@ class InventoryUpdateEventsList(SubListAPIView):
class InventoryList(ListCreateAPIView): class InventoryList(ListCreateAPIView):
model = Inventory model = Inventory
serializer_class = InventorySerializer serializer_class = InventorySerializer
class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView): class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
model = Inventory model = Inventory
serializer_class = InventorySerializer serializer_class = InventorySerializer
@@ -95,6 +98,7 @@ class InventoryDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIVie
class InventoryActivityStreamList(SubListAPIView): class InventoryActivityStreamList(SubListAPIView):
model = ActivityStream model = ActivityStream
serializer_class = ActivityStreamSerializer serializer_class = ActivityStreamSerializer
parent_model = Inventory parent_model = Inventory
@@ -109,6 +113,7 @@ class InventoryActivityStreamList(SubListAPIView):
class InventoryInstanceGroupsList(SubListAttachDetachAPIView): class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
model = InstanceGroup model = InstanceGroup
serializer_class = InstanceGroupSerializer serializer_class = InstanceGroupSerializer
parent_model = Inventory parent_model = Inventory
@@ -116,11 +121,13 @@ class InventoryInstanceGroupsList(SubListAttachDetachAPIView):
class InventoryAccessList(ResourceAccessList): class InventoryAccessList(ResourceAccessList):
model = User # needs to be User for AccessLists's model = User # needs to be User for AccessLists's
parent_model = Inventory parent_model = Inventory
class InventoryObjectRolesList(SubListAPIView): class InventoryObjectRolesList(SubListAPIView):
model = Role model = Role
serializer_class = RoleSerializer serializer_class = RoleSerializer
parent_model = Inventory parent_model = Inventory
@@ -133,6 +140,7 @@ class InventoryObjectRolesList(SubListAPIView):
class InventoryJobTemplateList(SubListAPIView): class InventoryJobTemplateList(SubListAPIView):
model = JobTemplate model = JobTemplate
serializer_class = JobTemplateSerializer serializer_class = JobTemplateSerializer
parent_model = Inventory parent_model = Inventory
@@ -146,9 +154,11 @@ class InventoryJobTemplateList(SubListAPIView):
class InventoryLabelList(LabelSubListCreateAttachDetachView): class InventoryLabelList(LabelSubListCreateAttachDetachView):
parent_model = Inventory parent_model = Inventory
class InventoryCopy(CopyAPIView): class InventoryCopy(CopyAPIView):
model = Inventory model = Inventory
copy_return_serializer_class = InventorySerializer copy_return_serializer_class = InventorySerializer

View File

@@ -59,11 +59,13 @@ class LabelSubListCreateAttachDetachView(SubListCreateAttachDetachAPIView):
class LabelDetail(RetrieveUpdateAPIView): class LabelDetail(RetrieveUpdateAPIView):
model = Label model = Label
serializer_class = LabelSerializer serializer_class = LabelSerializer
class LabelList(ListCreateAPIView): class LabelList(ListCreateAPIView):
name = _("Labels") name = _("Labels")
model = Label model = Label
serializer_class = LabelSerializer serializer_class = LabelSerializer

View File

@@ -10,11 +10,13 @@ from awx.main.models import InstanceLink, Instance
class MeshVisualizer(APIView): class MeshVisualizer(APIView):
name = _("Mesh Visualizer") name = _("Mesh Visualizer")
permission_classes = (IsSystemAdminOrAuditor,) permission_classes = (IsSystemAdminOrAuditor,)
swagger_topic = "System Configuration" swagger_topic = "System Configuration"
def get(self, request, format=None): def get(self, request, format=None):
data = { data = {
'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data, 'nodes': InstanceNodeSerializer(Instance.objects.all(), many=True).data,
'links': InstanceLinkSerializer(InstanceLink.objects.select_related('target', 'source'), many=True).data, 'links': InstanceLinkSerializer(InstanceLink.objects.select_related('target', 'source'), many=True).data,

View File

@@ -5,11 +5,9 @@
import logging import logging
# Django # Django
from django.conf import settings
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
# Django REST Framework # Django REST Framework
from rest_framework.permissions import AllowAny
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.exceptions import PermissionDenied from rest_framework.exceptions import PermissionDenied
@@ -27,19 +25,15 @@ logger = logging.getLogger('awx.analytics')
class MetricsView(APIView): class MetricsView(APIView):
name = _('Metrics') name = _('Metrics')
swagger_topic = 'Metrics' swagger_topic = 'Metrics'
renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer] renderer_classes = [renderers.PlainTextRenderer, renderers.PrometheusJSONRenderer, renderers.BrowsableAPIRenderer]
def initialize_request(self, request, *args, **kwargs):
if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS:
self.permission_classes = (AllowAny,)
return super(APIView, self).initialize_request(request, *args, **kwargs)
def get(self, request): def get(self, request):
'''Show Metrics Details''' '''Show Metrics Details'''
if settings.ALLOW_METRICS_FOR_ANONYMOUS_USERS or request.user.is_superuser or request.user.is_system_auditor: if request.user.is_superuser or request.user.is_system_auditor:
metrics_to_show = '' metrics_to_show = ''
if not request.query_params.get('subsystemonly', "0") == "1": if not request.query_params.get('subsystemonly', "0") == "1":
metrics_to_show += metrics().decode('UTF-8') metrics_to_show += metrics().decode('UTF-8')

View File

@@ -16,7 +16,7 @@ from rest_framework import status
from awx.main.constants import ACTIVE_STATES from awx.main.constants import ACTIVE_STATES
from awx.main.utils import get_object_or_400 from awx.main.utils import get_object_or_400
from awx.main.models.ha import Instance, InstanceGroup, schedule_policy_task from awx.main.models.ha import Instance, InstanceGroup
from awx.main.models.organization import Team from awx.main.models.organization import Team
from awx.main.models.projects import Project from awx.main.models.projects import Project
from awx.main.models.inventory import Inventory from awx.main.models.inventory import Inventory
@@ -107,11 +107,6 @@ class InstanceGroupMembershipMixin(object):
if inst_name in ig_obj.policy_instance_list: if inst_name in ig_obj.policy_instance_list:
ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name)) ig_obj.policy_instance_list.pop(ig_obj.policy_instance_list.index(inst_name))
ig_obj.save(update_fields=['policy_instance_list']) ig_obj.save(update_fields=['policy_instance_list'])
# sometimes removing an instance has a non-obvious consequence
# this is almost always true if policy_instance_percentage or _minimum is non-zero
# after removing a single instance, the other memberships need to be re-balanced
schedule_policy_task()
return response return response

View File

@@ -58,6 +58,7 @@ logger = logging.getLogger('awx.api.views.organization')
class OrganizationList(OrganizationCountsMixin, ListCreateAPIView): class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
model = Organization model = Organization
serializer_class = OrganizationSerializer serializer_class = OrganizationSerializer
@@ -69,6 +70,7 @@ class OrganizationList(OrganizationCountsMixin, ListCreateAPIView):
class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView): class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPIView):
model = Organization model = Organization
serializer_class = OrganizationSerializer serializer_class = OrganizationSerializer
@@ -104,6 +106,7 @@ class OrganizationDetail(RelatedJobsPreventDeleteMixin, RetrieveUpdateDestroyAPI
class OrganizationInventoriesList(SubListAPIView): class OrganizationInventoriesList(SubListAPIView):
model = Inventory model = Inventory
serializer_class = InventorySerializer serializer_class = InventorySerializer
parent_model = Organization parent_model = Organization
@@ -111,6 +114,7 @@ class OrganizationInventoriesList(SubListAPIView):
class OrganizationUsersList(BaseUsersList): class OrganizationUsersList(BaseUsersList):
model = User model = User
serializer_class = UserSerializer serializer_class = UserSerializer
parent_model = Organization parent_model = Organization
@@ -119,6 +123,7 @@ class OrganizationUsersList(BaseUsersList):
class OrganizationAdminsList(BaseUsersList): class OrganizationAdminsList(BaseUsersList):
model = User model = User
serializer_class = UserSerializer serializer_class = UserSerializer
parent_model = Organization parent_model = Organization
@@ -127,6 +132,7 @@ class OrganizationAdminsList(BaseUsersList):
class OrganizationProjectsList(SubListCreateAPIView): class OrganizationProjectsList(SubListCreateAPIView):
model = Project model = Project
serializer_class = ProjectSerializer serializer_class = ProjectSerializer
parent_model = Organization parent_model = Organization
@@ -134,6 +140,7 @@ class OrganizationProjectsList(SubListCreateAPIView):
class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView): class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
model = ExecutionEnvironment model = ExecutionEnvironment
serializer_class = ExecutionEnvironmentSerializer serializer_class = ExecutionEnvironmentSerializer
parent_model = Organization parent_model = Organization
@@ -143,6 +150,7 @@ class OrganizationExecutionEnvironmentsList(SubListCreateAttachDetachAPIView):
class OrganizationJobTemplatesList(SubListCreateAPIView): class OrganizationJobTemplatesList(SubListCreateAPIView):
model = JobTemplate model = JobTemplate
serializer_class = JobTemplateSerializer serializer_class = JobTemplateSerializer
parent_model = Organization parent_model = Organization
@@ -150,6 +158,7 @@ class OrganizationJobTemplatesList(SubListCreateAPIView):
class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView): class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
model = WorkflowJobTemplate model = WorkflowJobTemplate
serializer_class = WorkflowJobTemplateSerializer serializer_class = WorkflowJobTemplateSerializer
parent_model = Organization parent_model = Organization
@@ -157,6 +166,7 @@ class OrganizationWorkflowJobTemplatesList(SubListCreateAPIView):
class OrganizationTeamsList(SubListCreateAttachDetachAPIView): class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
model = Team model = Team
serializer_class = TeamSerializer serializer_class = TeamSerializer
parent_model = Organization parent_model = Organization
@@ -165,6 +175,7 @@ class OrganizationTeamsList(SubListCreateAttachDetachAPIView):
class OrganizationActivityStreamList(SubListAPIView): class OrganizationActivityStreamList(SubListAPIView):
model = ActivityStream model = ActivityStream
serializer_class = ActivityStreamSerializer serializer_class = ActivityStreamSerializer
parent_model = Organization parent_model = Organization
@@ -173,6 +184,7 @@ class OrganizationActivityStreamList(SubListAPIView):
class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView): class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
model = NotificationTemplate model = NotificationTemplate
serializer_class = NotificationTemplateSerializer serializer_class = NotificationTemplateSerializer
parent_model = Organization parent_model = Organization
@@ -181,28 +193,34 @@ class OrganizationNotificationTemplatesList(SubListCreateAttachDetachAPIView):
class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView): class OrganizationNotificationTemplatesAnyList(SubListCreateAttachDetachAPIView):
model = NotificationTemplate model = NotificationTemplate
serializer_class = NotificationTemplateSerializer serializer_class = NotificationTemplateSerializer
parent_model = Organization parent_model = Organization
class OrganizationNotificationTemplatesStartedList(OrganizationNotificationTemplatesAnyList): class OrganizationNotificationTemplatesStartedList(OrganizationNotificationTemplatesAnyList):
relationship = 'notification_templates_started' relationship = 'notification_templates_started'
class OrganizationNotificationTemplatesErrorList(OrganizationNotificationTemplatesAnyList): class OrganizationNotificationTemplatesErrorList(OrganizationNotificationTemplatesAnyList):
relationship = 'notification_templates_error' relationship = 'notification_templates_error'
class OrganizationNotificationTemplatesSuccessList(OrganizationNotificationTemplatesAnyList): class OrganizationNotificationTemplatesSuccessList(OrganizationNotificationTemplatesAnyList):
relationship = 'notification_templates_success' relationship = 'notification_templates_success'
class OrganizationNotificationTemplatesApprovalList(OrganizationNotificationTemplatesAnyList): class OrganizationNotificationTemplatesApprovalList(OrganizationNotificationTemplatesAnyList):
relationship = 'notification_templates_approvals' relationship = 'notification_templates_approvals'
class OrganizationInstanceGroupsList(SubListAttachDetachAPIView): class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
model = InstanceGroup model = InstanceGroup
serializer_class = InstanceGroupSerializer serializer_class = InstanceGroupSerializer
parent_model = Organization parent_model = Organization
@@ -210,6 +228,7 @@ class OrganizationInstanceGroupsList(SubListAttachDetachAPIView):
class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView): class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
model = Credential model = Credential
serializer_class = CredentialSerializer serializer_class = CredentialSerializer
parent_model = Organization parent_model = Organization
@@ -221,11 +240,13 @@ class OrganizationGalaxyCredentialsList(SubListAttachDetachAPIView):
class OrganizationAccessList(ResourceAccessList): class OrganizationAccessList(ResourceAccessList):
model = User # needs to be User for AccessLists's model = User # needs to be User for AccessLists's
parent_model = Organization parent_model = Organization
class OrganizationObjectRolesList(SubListAPIView): class OrganizationObjectRolesList(SubListAPIView):
model = Role model = Role
serializer_class = RoleSerializer serializer_class = RoleSerializer
parent_model = Organization parent_model = Organization

View File

@@ -36,6 +36,7 @@ logger = logging.getLogger('awx.api.views.root')
class ApiRootView(APIView): class ApiRootView(APIView):
permission_classes = (AllowAny,) permission_classes = (AllowAny,)
name = _('REST API') name = _('REST API')
versioning_class = None versioning_class = None
@@ -58,6 +59,7 @@ class ApiRootView(APIView):
class ApiOAuthAuthorizationRootView(APIView): class ApiOAuthAuthorizationRootView(APIView):
permission_classes = (AllowAny,) permission_classes = (AllowAny,)
name = _("API OAuth 2 Authorization Root") name = _("API OAuth 2 Authorization Root")
versioning_class = None versioning_class = None
@@ -72,6 +74,7 @@ class ApiOAuthAuthorizationRootView(APIView):
class ApiVersionRootView(APIView): class ApiVersionRootView(APIView):
permission_classes = (AllowAny,) permission_classes = (AllowAny,)
swagger_topic = 'Versioning' swagger_topic = 'Versioning'
@@ -169,6 +172,7 @@ class ApiV2PingView(APIView):
class ApiV2SubscriptionView(APIView): class ApiV2SubscriptionView(APIView):
permission_classes = (IsAuthenticated,) permission_classes = (IsAuthenticated,)
name = _('Subscriptions') name = _('Subscriptions')
swagger_topic = 'System Configuration' swagger_topic = 'System Configuration'
@@ -208,6 +212,7 @@ class ApiV2SubscriptionView(APIView):
class ApiV2AttachView(APIView): class ApiV2AttachView(APIView):
permission_classes = (IsAuthenticated,) permission_classes = (IsAuthenticated,)
name = _('Attach Subscription') name = _('Attach Subscription')
swagger_topic = 'System Configuration' swagger_topic = 'System Configuration'
@@ -225,6 +230,7 @@ class ApiV2AttachView(APIView):
user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None) user = getattr(settings, 'SUBSCRIPTIONS_USERNAME', None)
pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None) pw = getattr(settings, 'SUBSCRIPTIONS_PASSWORD', None)
if pool_id and user and pw: if pool_id and user and pw:
data = request.data.copy() data = request.data.copy()
try: try:
with set_environ(**settings.AWX_TASK_ENV): with set_environ(**settings.AWX_TASK_ENV):
@@ -252,6 +258,7 @@ class ApiV2AttachView(APIView):
class ApiV2ConfigView(APIView): class ApiV2ConfigView(APIView):
permission_classes = (IsAuthenticated,) permission_classes = (IsAuthenticated,)
name = _('Configuration') name = _('Configuration')
swagger_topic = 'System Configuration' swagger_topic = 'System Configuration'

View File

@@ -8,6 +8,7 @@ from django.utils.translation import gettext_lazy as _
class ConfConfig(AppConfig): class ConfConfig(AppConfig):
name = 'awx.conf' name = 'awx.conf'
verbose_name = _('Configuration') verbose_name = _('Configuration')
@@ -15,6 +16,7 @@ class ConfConfig(AppConfig):
self.module.autodiscover() self.module.autodiscover()
if not set(sys.argv) & {'migrate', 'check_migrations'}: if not set(sys.argv) & {'migrate', 'check_migrations'}:
from .settings import SettingsWrapper from .settings import SettingsWrapper
SettingsWrapper.initialize() SettingsWrapper.initialize()

View File

@@ -47,6 +47,7 @@ class IntegerField(IntegerField):
class StringListField(ListField): class StringListField(ListField):
child = CharField() child = CharField()
def to_representation(self, value): def to_representation(self, value):
@@ -56,6 +57,7 @@ class StringListField(ListField):
class StringListBooleanField(ListField): class StringListBooleanField(ListField):
default_error_messages = {'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.')} default_error_messages = {'type_error': _('Expected None, True, False, a string or list of strings but got {input_type} instead.')}
child = CharField() child = CharField()
@@ -94,6 +96,7 @@ class StringListBooleanField(ListField):
class StringListPathField(StringListField): class StringListPathField(StringListField):
default_error_messages = {'type_error': _('Expected list of strings but got {input_type} instead.'), 'path_error': _('{path} is not a valid path choice.')} default_error_messages = {'type_error': _('Expected list of strings but got {input_type} instead.'), 'path_error': _('{path} is not a valid path choice.')}
def to_internal_value(self, paths): def to_internal_value(self, paths):
@@ -123,6 +126,7 @@ class StringListIsolatedPathField(StringListField):
} }
def to_internal_value(self, paths): def to_internal_value(self, paths):
if isinstance(paths, (list, tuple)): if isinstance(paths, (list, tuple)):
for p in paths: for p in paths:
if not isinstance(p, str): if not isinstance(p, str):

View File

@@ -8,6 +8,7 @@ import awx.main.fields
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)] dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
operations = [ operations = [

View File

@@ -48,6 +48,7 @@ def revert_tower_settings(apps, schema_editor):
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [('conf', '0001_initial'), ('main', '0004_squashed_v310_release')] dependencies = [('conf', '0001_initial'), ('main', '0004_squashed_v310_release')]
run_before = [('main', '0005_squashed_v310_v313_updates')] run_before = [('main', '0005_squashed_v310_v313_updates')]

View File

@@ -7,6 +7,7 @@ import awx.main.fields
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [('conf', '0002_v310_copy_tower_settings')] dependencies = [('conf', '0002_v310_copy_tower_settings')]
operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONBlob(null=True))] operations = [migrations.AlterField(model_name='setting', name='value', field=awx.main.fields.JSONBlob(null=True))]

View File

@@ -5,6 +5,7 @@ from django.db import migrations
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [('conf', '0003_v310_JSONField_changes')] dependencies = [('conf', '0003_v310_JSONField_changes')]
operations = [ operations = [

View File

@@ -15,6 +15,7 @@ def reverse_copy_session_settings(apps, schema_editor):
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [('conf', '0004_v320_reencrypt')] dependencies = [('conf', '0004_v320_reencrypt')]
operations = [migrations.RunPython(copy_session_settings, reverse_copy_session_settings)] operations = [migrations.RunPython(copy_session_settings, reverse_copy_session_settings)]

View File

@@ -8,6 +8,7 @@ from django.db import migrations
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [('conf', '0005_v330_rename_two_session_settings')] dependencies = [('conf', '0005_v330_rename_two_session_settings')]
operations = [migrations.RunPython(fill_ldap_group_type_params)] operations = [migrations.RunPython(fill_ldap_group_type_params)]

View File

@@ -9,6 +9,7 @@ def copy_allowed_ips(apps, schema_editor):
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [('conf', '0006_v331_ldap_group_type')] dependencies = [('conf', '0006_v331_ldap_group_type')]
operations = [migrations.RunPython(copy_allowed_ips)] operations = [migrations.RunPython(copy_allowed_ips)]

View File

@@ -14,6 +14,7 @@ def _noop(apps, schema_editor):
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [('conf', '0007_v380_rename_more_settings')] dependencies = [('conf', '0007_v380_rename_more_settings')]
operations = [migrations.RunPython(clear_old_license, _noop), migrations.RunPython(prefill_rh_credentials, _noop)] operations = [migrations.RunPython(clear_old_license, _noop), migrations.RunPython(prefill_rh_credentials, _noop)]

View File

@@ -10,6 +10,7 @@ def rename_proot_settings(apps, schema_editor):
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [('conf', '0008_subscriptions')] dependencies = [('conf', '0008_subscriptions')]
operations = [migrations.RunPython(rename_proot_settings)] operations = [migrations.RunPython(rename_proot_settings)]

View File

@@ -10,6 +10,7 @@ __all__ = ['rename_setting']
def rename_setting(apps, schema_editor, old_key, new_key): def rename_setting(apps, schema_editor, old_key, new_key):
old_setting = None old_setting = None
Setting = apps.get_model('conf', 'Setting') Setting = apps.get_model('conf', 'Setting')
if Setting.objects.filter(key=new_key).exists() or hasattr(settings, new_key): if Setting.objects.filter(key=new_key).exists() or hasattr(settings, new_key):

View File

@@ -17,6 +17,7 @@ __all__ = ['Setting']
class Setting(CreatedModifiedModel): class Setting(CreatedModifiedModel):
key = models.CharField(max_length=255) key = models.CharField(max_length=255)
value = JSONBlob(null=True) value = JSONBlob(null=True)
user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE)) user = prevent_search(models.ForeignKey('auth.User', related_name='settings', default=None, null=True, editable=False, on_delete=models.CASCADE))

View File

@@ -104,6 +104,7 @@ def filter_sensitive(registry, key, value):
class TransientSetting(object): class TransientSetting(object):
__slots__ = ('pk', 'value') __slots__ = ('pk', 'value')
def __init__(self, pk, value): def __init__(self, pk, value):

View File

@@ -5,6 +5,7 @@ from awx.conf.fields import StringListBooleanField, StringListPathField, ListTup
class TestStringListBooleanField: class TestStringListBooleanField:
FIELD_VALUES = [ FIELD_VALUES = [
("hello", "hello"), ("hello", "hello"),
(("a", "b"), ["a", "b"]), (("a", "b"), ["a", "b"]),
@@ -52,6 +53,7 @@ class TestStringListBooleanField:
class TestListTuplesField: class TestListTuplesField:
FIELD_VALUES = [([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")])] FIELD_VALUES = [([('a', 'b'), ('abc', '123')], [("a", "b"), ("abc", "123")])]
FIELD_VALUES_INVALID = [("abc", type("abc")), ([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))), (['a', 'b'], type('a')), (123, type(123))] FIELD_VALUES_INVALID = [("abc", type("abc")), ([('a', 'b', 'c'), ('abc', '123', '456')], type(('a',))), (['a', 'b'], type('a')), (123, type(123))]
@@ -71,6 +73,7 @@ class TestListTuplesField:
class TestStringListPathField: class TestStringListPathField:
FIELD_VALUES = [ FIELD_VALUES = [
((".", "..", "/"), [".", "..", "/"]), ((".", "..", "/"), [".", "..", "/"]),
(("/home",), ["/home"]), (("/home",), ["/home"]),

View File

@@ -36,6 +36,7 @@ SettingCategory = collections.namedtuple('SettingCategory', ('url', 'slug', 'nam
class SettingCategoryList(ListAPIView): class SettingCategoryList(ListAPIView):
model = Setting # Not exactly, but needed for the view. model = Setting # Not exactly, but needed for the view.
serializer_class = SettingCategorySerializer serializer_class = SettingCategorySerializer
filter_backends = [] filter_backends = []
@@ -57,6 +58,7 @@ class SettingCategoryList(ListAPIView):
class SettingSingletonDetail(RetrieveUpdateDestroyAPIView): class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
model = Setting # Not exactly, but needed for the view. model = Setting # Not exactly, but needed for the view.
serializer_class = SettingSingletonSerializer serializer_class = SettingSingletonSerializer
filter_backends = [] filter_backends = []
@@ -144,6 +146,7 @@ class SettingSingletonDetail(RetrieveUpdateDestroyAPIView):
class SettingLoggingTest(GenericAPIView): class SettingLoggingTest(GenericAPIView):
name = _('Logging Connectivity Test') name = _('Logging Connectivity Test')
model = Setting model = Setting
serializer_class = SettingSingletonSerializer serializer_class = SettingSingletonSerializer

View File

@@ -6237,5 +6237,4 @@ msgstr "%s se está actualizando."
#: awx/ui/urls.py:24 #: awx/ui/urls.py:24
msgid "This page will refresh when complete." msgid "This page will refresh when complete."
msgstr "Esta página se actualizará cuando se complete." msgstr "Esta página se actualizará cuando se complete."

View File

@@ -721,7 +721,7 @@ msgstr "DTSTART valide obligatoire dans rrule. La valeur doit commencer par : DT
#: awx/api/serializers.py:4657 #: awx/api/serializers.py:4657
msgid "" msgid ""
"DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ." "DTSTART cannot be a naive datetime. Specify ;TZINFO= or YYYYMMDDTHHMMSSZZ."
msgstr "DTSTART ne peut correspondre à une date-heure naïve. Spécifier ;TZINFO= ou YYYYMMDDTHHMMSSZZ." msgstr "DTSTART ne peut correspondre à une DateHeure naïve. Spécifier ;TZINFO= ou YYYYMMDDTHHMMSSZZ."
#: awx/api/serializers.py:4659 #: awx/api/serializers.py:4659
msgid "Multiple DTSTART is not supported." msgid "Multiple DTSTART is not supported."
@@ -6239,5 +6239,4 @@ msgstr "%s est en cours de mise à niveau."
#: awx/ui/urls.py:24 #: awx/ui/urls.py:24
msgid "This page will refresh when complete." msgid "This page will refresh when complete."
msgstr "Cette page sera rafraîchie une fois terminée." msgstr "Cette page sera rafraîchie une fois terminée."

View File

@@ -6237,5 +6237,4 @@ msgstr "Er wordt momenteel een upgrade van%s geïnstalleerd."
#: awx/ui/urls.py:24 #: awx/ui/urls.py:24
msgid "This page will refresh when complete." msgid "This page will refresh when complete."
msgstr "Deze pagina wordt vernieuwd als hij klaar is." msgstr "Deze pagina wordt vernieuwd als hij klaar is."

View File

@@ -561,6 +561,7 @@ class NotificationAttachMixin(BaseAccess):
class InstanceAccess(BaseAccess): class InstanceAccess(BaseAccess):
model = Instance model = Instance
prefetch_related = ('rampart_groups',) prefetch_related = ('rampart_groups',)
@@ -578,6 +579,7 @@ class InstanceAccess(BaseAccess):
return super(InstanceAccess, self).can_unattach(obj, sub_obj, relationship, relationship, data=data) return super(InstanceAccess, self).can_unattach(obj, sub_obj, relationship, relationship, data=data)
def can_add(self, data): def can_add(self, data):
return self.user.is_superuser return self.user.is_superuser
def can_change(self, obj, data): def can_change(self, obj, data):
@@ -588,6 +590,7 @@ class InstanceAccess(BaseAccess):
class InstanceGroupAccess(BaseAccess): class InstanceGroupAccess(BaseAccess):
model = InstanceGroup model = InstanceGroup
prefetch_related = ('instances',) prefetch_related = ('instances',)
@@ -990,6 +993,9 @@ class HostAccess(BaseAccess):
if data and 'name' in data: if data and 'name' in data:
self.check_license(add_host_name=data['name']) self.check_license(add_host_name=data['name'])
# Check the per-org limit
self.check_org_host_limit({'inventory': obj.inventory}, add_host_name=data['name'])
# Checks for admin or change permission on inventory, controls whether # Checks for admin or change permission on inventory, controls whether
# the user can edit variable data. # the user can edit variable data.
return obj and self.user in obj.inventory.admin_role return obj and self.user in obj.inventory.admin_role
@@ -1027,9 +1033,7 @@ class GroupAccess(BaseAccess):
return Group.objects.filter(inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role')) return Group.objects.filter(inventory__in=Inventory.accessible_pk_qs(self.user, 'read_role'))
def can_add(self, data): def can_add(self, data):
if not data: # So the browseable API will work if not data or 'inventory' not in data:
return Inventory.accessible_objects(self.user, 'admin_role').exists()
if 'inventory' not in data:
return False return False
# Checks for admin or change permission on inventory. # Checks for admin or change permission on inventory.
return self.check_related('inventory', Inventory, data) return self.check_related('inventory', Inventory, data)
@@ -2351,6 +2355,7 @@ class JobEventAccess(BaseAccess):
class UnpartitionedJobEventAccess(JobEventAccess): class UnpartitionedJobEventAccess(JobEventAccess):
model = UnpartitionedJobEvent model = UnpartitionedJobEvent
@@ -2695,66 +2700,46 @@ class ActivityStreamAccess(BaseAccess):
# 'job_template', 'job', 'project', 'project_update', 'workflow_job', # 'job_template', 'job', 'project', 'project_update', 'workflow_job',
# 'inventory_source', 'workflow_job_template' # 'inventory_source', 'workflow_job_template'
q = Q(user=self.user) inventory_set = Inventory.accessible_objects(self.user, 'read_role')
inventory_set = Inventory.accessible_pk_qs(self.user, 'read_role') credential_set = Credential.accessible_objects(self.user, 'read_role')
if inventory_set:
q |= (
Q(ad_hoc_command__inventory__in=inventory_set)
| Q(inventory__in=inventory_set)
| Q(host__inventory__in=inventory_set)
| Q(group__inventory__in=inventory_set)
| Q(inventory_source__inventory__in=inventory_set)
| Q(inventory_update__inventory_source__inventory__in=inventory_set)
)
credential_set = Credential.accessible_pk_qs(self.user, 'read_role')
if credential_set:
q |= Q(credential__in=credential_set)
auditing_orgs = ( auditing_orgs = (
(Organization.accessible_objects(self.user, 'admin_role') | Organization.accessible_objects(self.user, 'auditor_role')) (Organization.accessible_objects(self.user, 'admin_role') | Organization.accessible_objects(self.user, 'auditor_role'))
.distinct() .distinct()
.values_list('id', flat=True) .values_list('id', flat=True)
) )
if auditing_orgs: project_set = Project.accessible_objects(self.user, 'read_role')
q |= ( jt_set = JobTemplate.accessible_objects(self.user, 'read_role')
Q(user__in=auditing_orgs.values('member_role__members')) team_set = Team.accessible_objects(self.user, 'read_role')
| Q(organization__in=auditing_orgs) wfjt_set = WorkflowJobTemplate.accessible_objects(self.user, 'read_role')
| Q(notification_template__organization__in=auditing_orgs)
| Q(notification__notification_template__organization__in=auditing_orgs)
| Q(label__organization__in=auditing_orgs)
| Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
)
project_set = Project.accessible_pk_qs(self.user, 'read_role')
if project_set:
q |= Q(project__in=project_set) | Q(project_update__project__in=project_set)
jt_set = JobTemplate.accessible_pk_qs(self.user, 'read_role')
if jt_set:
q |= Q(job_template__in=jt_set) | Q(job__job_template__in=jt_set)
wfjt_set = WorkflowJobTemplate.accessible_pk_qs(self.user, 'read_role')
if wfjt_set:
q |= (
Q(workflow_job_template__in=wfjt_set)
| Q(workflow_job_template_node__workflow_job_template__in=wfjt_set)
| Q(workflow_job__workflow_job_template__in=wfjt_set)
)
team_set = Team.accessible_pk_qs(self.user, 'read_role')
if team_set:
q |= Q(team__in=team_set)
app_set = OAuth2ApplicationAccess(self.user).filtered_queryset() app_set = OAuth2ApplicationAccess(self.user).filtered_queryset()
if app_set:
q |= Q(o_auth2_application__in=app_set)
token_set = OAuth2TokenAccess(self.user).filtered_queryset() token_set = OAuth2TokenAccess(self.user).filtered_queryset()
if token_set:
q |= Q(o_auth2_access_token__in=token_set)
return qs.filter(q).distinct() return qs.filter(
Q(ad_hoc_command__inventory__in=inventory_set)
| Q(o_auth2_application__in=app_set)
| Q(o_auth2_access_token__in=token_set)
| Q(user__in=auditing_orgs.values('member_role__members'))
| Q(user=self.user)
| Q(organization__in=auditing_orgs)
| Q(inventory__in=inventory_set)
| Q(host__inventory__in=inventory_set)
| Q(group__inventory__in=inventory_set)
| Q(inventory_source__inventory__in=inventory_set)
| Q(inventory_update__inventory_source__inventory__in=inventory_set)
| Q(credential__in=credential_set)
| Q(team__in=team_set)
| Q(project__in=project_set)
| Q(project_update__project__in=project_set)
| Q(job_template__in=jt_set)
| Q(job__job_template__in=jt_set)
| Q(workflow_job_template__in=wfjt_set)
| Q(workflow_job_template_node__workflow_job_template__in=wfjt_set)
| Q(workflow_job__workflow_job_template__in=wfjt_set)
| Q(notification_template__organization__in=auditing_orgs)
| Q(notification__notification_template__organization__in=auditing_orgs)
| Q(label__organization__in=auditing_orgs)
| Q(role__in=Role.objects.filter(ancestors__in=self.user.roles.all()) if auditing_orgs else [])
).distinct()
def can_add(self, data): def can_add(self, data):
return False return False

View File

@@ -1,8 +1,8 @@
import datetime import datetime
import asyncio import asyncio
import logging import logging
import aioredis
import redis import redis
import redis.asyncio
import re import re
from prometheus_client import ( from prometheus_client import (
@@ -82,7 +82,7 @@ class BroadcastWebsocketStatsManager:
async def run_loop(self): async def run_loop(self):
try: try:
redis_conn = await redis.asyncio.Redis.from_url(settings.BROKER_URL) redis_conn = await aioredis.create_redis_pool(settings.BROKER_URL)
while True: while True:
stats_data_str = ''.join(stat.serialize() for stat in self._stats.values()) stats_data_str = ''.join(stat.serialize() for stat in self._stats.values())
await redis_conn.set(self._redis_key, stats_data_str) await redis_conn.set(self._redis_key, stats_data_str)
@@ -122,8 +122,8 @@ class BroadcastWebsocketStats:
'Number of messages received, to be forwarded, by the broadcast websocket system', 'Number of messages received, to be forwarded, by the broadcast websocket system',
registry=self._registry, registry=self._registry,
) )
self._messages_received_current_conn = Gauge( self._messages_received = Gauge(
f'awx_{self.remote_name}_messages_received_currrent_conn', f'awx_{self.remote_name}_messages_received',
'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection', 'Number forwarded messages received by the broadcast websocket system, for the duration of the current connection',
registry=self._registry, registry=self._registry,
) )
@@ -144,13 +144,13 @@ class BroadcastWebsocketStats:
def record_message_received(self): def record_message_received(self):
self._internal_messages_received_per_minute.record() self._internal_messages_received_per_minute.record()
self._messages_received_current_conn.inc() self._messages_received.inc()
self._messages_received_total.inc() self._messages_received_total.inc()
def record_connection_established(self): def record_connection_established(self):
self._connection.state('connected') self._connection.state('connected')
self._connection_start.set_to_current_time() self._connection_start.set_to_current_time()
self._messages_received_current_conn.set(0) self._messages_received.set(0)
def record_connection_lost(self): def record_connection_lost(self):
self._connection.state('disconnected') self._connection.state('disconnected')

View File

@@ -16,7 +16,7 @@ from awx.conf.license import get_license
from awx.main.utils import get_awx_version, camelcase_to_underscore, datetime_hook from awx.main.utils import get_awx_version, camelcase_to_underscore, datetime_hook
from awx.main import models from awx.main import models
from awx.main.analytics import register from awx.main.analytics import register
from awx.main.scheduler.task_manager_models import TaskManagerModels from awx.main.scheduler.task_manager_models import TaskManagerInstances
""" """
This module is used to define metrics collected by awx.main.analytics.gather() This module is used to define metrics collected by awx.main.analytics.gather()
@@ -237,8 +237,9 @@ def projects_by_scm_type(since, **kwargs):
def instance_info(since, include_hostnames=False, **kwargs): def instance_info(since, include_hostnames=False, **kwargs):
info = {} info = {}
# Use same method that the TaskManager does to compute consumed capacity without querying all running jobs for each Instance # Use same method that the TaskManager does to compute consumed capacity without querying all running jobs for each Instance
tm_models = TaskManagerModels.init_with_consumed_capacity(instance_fields=['uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'enabled']) active_tasks = models.UnifiedJob.objects.filter(status__in=['running', 'waiting']).only('task_impact', 'controller_node', 'execution_node')
for tm_instance in tm_models.instances.instances_by_hostname.values(): tm_instances = TaskManagerInstances(active_tasks, instance_fields=['uuid', 'version', 'capacity', 'cpu', 'memory', 'managed_by_policy', 'enabled'])
for tm_instance in tm_instances.instances_by_hostname.values():
instance = tm_instance.obj instance = tm_instance.obj
instance_info = { instance_info = {
'uuid': instance.uuid, 'uuid': instance.uuid,
@@ -250,7 +251,6 @@ def instance_info(since, include_hostnames=False, **kwargs):
'enabled': instance.enabled, 'enabled': instance.enabled,
'consumed_capacity': tm_instance.consumed_capacity, 'consumed_capacity': tm_instance.consumed_capacity,
'remaining_capacity': instance.capacity - tm_instance.consumed_capacity, 'remaining_capacity': instance.capacity - tm_instance.consumed_capacity,
'node_type': instance.node_type,
} }
if include_hostnames is True: if include_hostnames is True:
instance_info['hostname'] = instance.hostname instance_info['hostname'] = instance.hostname

View File

@@ -57,7 +57,6 @@ def metrics():
[ [
'hostname', 'hostname',
'instance_uuid', 'instance_uuid',
'node_type',
], ],
registry=REGISTRY, registry=REGISTRY,
) )
@@ -85,7 +84,6 @@ def metrics():
[ [
'hostname', 'hostname',
'instance_uuid', 'instance_uuid',
'node_type',
], ],
registry=REGISTRY, registry=REGISTRY,
) )
@@ -113,7 +111,6 @@ def metrics():
[ [
'hostname', 'hostname',
'instance_uuid', 'instance_uuid',
'node_type',
], ],
registry=REGISTRY, registry=REGISTRY,
) )
@@ -123,7 +120,6 @@ def metrics():
[ [
'hostname', 'hostname',
'instance_uuid', 'instance_uuid',
'node_type',
], ],
registry=REGISTRY, registry=REGISTRY,
) )
@@ -184,13 +180,12 @@ def metrics():
instance_data = instance_info(None, include_hostnames=True) instance_data = instance_info(None, include_hostnames=True)
for uuid, info in instance_data.items(): for uuid, info in instance_data.items():
hostname = info['hostname'] hostname = info['hostname']
node_type = info['node_type'] INSTANCE_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['capacity'])
INSTANCE_CAPACITY.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).set(instance_data[uuid]['capacity'])
INSTANCE_CPU.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['cpu']) INSTANCE_CPU.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['cpu'])
INSTANCE_MEMORY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['memory']) INSTANCE_MEMORY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['memory'])
INSTANCE_CONSUMED_CAPACITY.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).set(instance_data[uuid]['consumed_capacity']) INSTANCE_CONSUMED_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['consumed_capacity'])
INSTANCE_REMAINING_CAPACITY.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).set(instance_data[uuid]['remaining_capacity']) INSTANCE_REMAINING_CAPACITY.labels(hostname=hostname, instance_uuid=uuid).set(instance_data[uuid]['remaining_capacity'])
INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid, node_type=node_type).info( INSTANCE_INFO.labels(hostname=hostname, instance_uuid=uuid).info(
{ {
'enabled': str(instance_data[uuid]['enabled']), 'enabled': str(instance_data[uuid]['enabled']),
'managed_by_policy': str(instance_data[uuid]['managed_by_policy']), 'managed_by_policy': str(instance_data[uuid]['managed_by_policy']),

View File

@@ -5,9 +5,7 @@ import logging
from django.conf import settings from django.conf import settings
from django.apps import apps from django.apps import apps
from awx.main.consumers import emit_channel_notification from awx.main.consumers import emit_channel_notification
from awx.main.utils import is_testing
root_key = 'awx_metrics' root_key = 'awx_metrics'
logger = logging.getLogger('awx.main.analytics') logger = logging.getLogger('awx.main.analytics')
@@ -165,10 +163,14 @@ class Metrics:
Instance = apps.get_model('main', 'Instance') Instance = apps.get_model('main', 'Instance')
if instance_name: if instance_name:
self.instance_name = instance_name self.instance_name = instance_name
elif is_testing(): elif settings.IS_TESTING():
self.instance_name = "awx_testing" self.instance_name = "awx_testing"
else: else:
self.instance_name = Instance.objects.my_hostname() try:
self.instance_name = Instance.objects.me().hostname
except Exception as e:
self.instance_name = settings.CLUSTER_HOST_ID
logger.info(f'Instance {self.instance_name} seems to be unregistered, error: {e}')
# metric name, help_text # metric name, help_text
METRICSLIST = [ METRICSLIST = [

View File

@@ -3,5 +3,6 @@ from django.utils.translation import gettext_lazy as _
class MainConfig(AppConfig): class MainConfig(AppConfig):
name = 'awx.main' name = 'awx.main'
verbose_name = _('Main') verbose_name = _('Main')

View File

@@ -569,7 +569,7 @@ register(
register( register(
'LOG_AGGREGATOR_LOGGERS', 'LOG_AGGREGATOR_LOGGERS',
field_class=fields.StringListField, field_class=fields.StringListField,
default=['awx', 'activity_stream', 'job_events', 'system_tracking', 'broadcast_websocket'], default=['awx', 'activity_stream', 'job_events', 'system_tracking'],
label=_('Loggers Sending Data to Log Aggregator Form'), label=_('Loggers Sending Data to Log Aggregator Form'),
help_text=_( help_text=_(
'List of loggers that will send HTTP logs to the collector, these can ' 'List of loggers that will send HTTP logs to the collector, these can '
@@ -577,8 +577,7 @@ register(
'awx - service logs\n' 'awx - service logs\n'
'activity_stream - activity stream records\n' 'activity_stream - activity stream records\n'
'job_events - callback data from Ansible job events\n' 'job_events - callback data from Ansible job events\n'
'system_tracking - facts gathered from scan jobs\n' 'system_tracking - facts gathered from scan jobs.'
'broadcast_websocket - errors pertaining to websockets broadcast metrics\n'
), ),
category=_('Logging'), category=_('Logging'),
category_slug='logging', category_slug='logging',

View File

@@ -9,16 +9,10 @@ aim_inputs = {
'fields': [ 'fields': [
{ {
'id': 'url', 'id': 'url',
'label': _('CyberArk CCP URL'), 'label': _('CyberArk AIM URL'),
'type': 'string', 'type': 'string',
'format': 'url', 'format': 'url',
}, },
{
'id': 'webservice_id',
'label': _('Web Service ID'),
'type': 'string',
'help_text': _('The CCP Web Service ID. Leave blank to default to AIMWebService.'),
},
{ {
'id': 'app_id', 'id': 'app_id',
'label': _('Application ID'), 'label': _('Application ID'),
@@ -70,13 +64,10 @@ def aim_backend(**kwargs):
client_cert = kwargs.get('client_cert', None) client_cert = kwargs.get('client_cert', None)
client_key = kwargs.get('client_key', None) client_key = kwargs.get('client_key', None)
verify = kwargs['verify'] verify = kwargs['verify']
webservice_id = kwargs['webservice_id']
app_id = kwargs['app_id'] app_id = kwargs['app_id']
object_query = kwargs['object_query'] object_query = kwargs['object_query']
object_query_format = kwargs['object_query_format'] object_query_format = kwargs['object_query_format']
reason = kwargs.get('reason', None) reason = kwargs.get('reason', None)
if webservice_id == '':
webservice_id = 'AIMWebService'
query_params = { query_params = {
'AppId': app_id, 'AppId': app_id,
@@ -87,7 +78,7 @@ def aim_backend(**kwargs):
query_params['reason'] = reason query_params['reason'] = reason
request_qs = '?' + urlencode(query_params, quote_via=quote) request_qs = '?' + urlencode(query_params, quote_via=quote)
request_url = urljoin(url, '/'.join([webservice_id, 'api', 'Accounts'])) request_url = urljoin(url, '/'.join(['AIMWebService', 'api', 'Accounts']))
with CertFiles(client_cert, client_key) as cert: with CertFiles(client_cert, client_key) as cert:
res = requests.get( res = requests.get(
@@ -101,4 +92,4 @@ def aim_backend(**kwargs):
return res.json()['Content'] return res.json()['Content']
aim_plugin = CredentialPlugin('CyberArk Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend) aim_plugin = CredentialPlugin('CyberArk AIM Central Credential Provider Lookup', inputs=aim_inputs, backend=aim_backend)

View File

@@ -1,5 +1,6 @@
from .plugin import CredentialPlugin, CertFiles, raise_for_status from .plugin import CredentialPlugin, CertFiles, raise_for_status
import base64
from urllib.parse import urljoin, quote from urllib.parse import urljoin, quote
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
@@ -60,7 +61,7 @@ def conjur_backend(**kwargs):
cacert = kwargs.get('cacert', None) cacert = kwargs.get('cacert', None)
auth_kwargs = { auth_kwargs = {
'headers': {'Content-Type': 'text/plain', 'Accept-Encoding': 'base64'}, 'headers': {'Content-Type': 'text/plain'},
'data': api_key, 'data': api_key,
'allow_redirects': False, 'allow_redirects': False,
} }
@@ -68,13 +69,9 @@ def conjur_backend(**kwargs):
with CertFiles(cacert) as cert: with CertFiles(cacert) as cert:
# https://www.conjur.org/api.html#authentication-authenticate-post # https://www.conjur.org/api.html#authentication-authenticate-post
auth_kwargs['verify'] = cert auth_kwargs['verify'] = cert
try: resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
resp = requests.post(urljoin(url, '/'.join(['authn', account, username, 'authenticate'])), **auth_kwargs)
resp.raise_for_status()
except requests.exceptions.HTTPError:
resp = requests.post(urljoin(url, '/'.join(['api', 'authn', account, username, 'authenticate'])), **auth_kwargs)
raise_for_status(resp) raise_for_status(resp)
token = resp.content.decode('utf-8') token = base64.b64encode(resp.content).decode('utf-8')
lookup_kwargs = { lookup_kwargs = {
'headers': {'Authorization': 'Token token="{}"'.format(token)}, 'headers': {'Authorization': 'Token token="{}"'.format(token)},
@@ -83,21 +80,14 @@ def conjur_backend(**kwargs):
# https://www.conjur.org/api.html#secrets-retrieve-a-secret-get # https://www.conjur.org/api.html#secrets-retrieve-a-secret-get
path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path])) path = urljoin(url, '/'.join(['secrets', account, 'variable', secret_path]))
path_conjurcloud = urljoin(url, '/'.join(['api', 'secrets', account, 'variable', secret_path]))
if version: if version:
ver = "version={}".format(version) path = '?'.join([path, version])
path = '?'.join([path, ver])
path_conjurcloud = '?'.join([path_conjurcloud, ver])
with CertFiles(cacert) as cert: with CertFiles(cacert) as cert:
lookup_kwargs['verify'] = cert lookup_kwargs['verify'] = cert
try: resp = requests.get(path, timeout=30, **lookup_kwargs)
resp = requests.get(path, timeout=30, **lookup_kwargs)
resp.raise_for_status()
except requests.exceptions.HTTPError:
resp = requests.get(path_conjurcloud, timeout=30, **lookup_kwargs)
raise_for_status(resp) raise_for_status(resp)
return resp.text return resp.text
conjur_plugin = CredentialPlugin('CyberArk Conjur Secrets Manager Lookup', inputs=conjur_inputs, backend=conjur_backend) conjur_plugin = CredentialPlugin('CyberArk Conjur Secret Lookup', inputs=conjur_inputs, backend=conjur_backend)

View File

@@ -1,7 +1,6 @@
import copy import copy
import os import os
import pathlib import pathlib
import time
from urllib.parse import urljoin from urllib.parse import urljoin
from .plugin import CredentialPlugin, CertFiles, raise_for_status from .plugin import CredentialPlugin, CertFiles, raise_for_status
@@ -248,15 +247,7 @@ def kv_backend(**kwargs):
request_url = urljoin(url, '/'.join(['v1'] + path_segments)).rstrip('/') request_url = urljoin(url, '/'.join(['v1'] + path_segments)).rstrip('/')
with CertFiles(cacert) as cert: with CertFiles(cacert) as cert:
request_kwargs['verify'] = cert request_kwargs['verify'] = cert
request_retries = 0 response = sess.get(request_url, **request_kwargs)
while request_retries < 5:
response = sess.get(request_url, **request_kwargs)
# https://developer.hashicorp.com/vault/docs/enterprise/consistency
if response.status_code == 412:
request_retries += 1
time.sleep(1)
else:
break
raise_for_status(response) raise_for_status(response)
json = response.json() json = response.json()
@@ -298,15 +289,8 @@ def ssh_backend(**kwargs):
with CertFiles(cacert) as cert: with CertFiles(cacert) as cert:
request_kwargs['verify'] = cert request_kwargs['verify'] = cert
request_retries = 0 resp = sess.post(request_url, **request_kwargs)
while request_retries < 5:
resp = sess.post(request_url, **request_kwargs)
# https://developer.hashicorp.com/vault/docs/enterprise/consistency
if resp.status_code == 412:
request_retries += 1
time.sleep(1)
else:
break
raise_for_status(resp) raise_for_status(resp)
return resp.json()['data']['signed_key'] return resp.json()['data']['signed_key']

View File

@@ -3,7 +3,6 @@ import uuid
import json import json
from django.conf import settings from django.conf import settings
from django.db import connection
import redis import redis
from awx.main.dispatch import get_local_queuename from awx.main.dispatch import get_local_queuename
@@ -14,6 +13,7 @@ logger = logging.getLogger('awx.main.dispatch')
class Control(object): class Control(object):
services = ('dispatcher', 'callback_receiver') services = ('dispatcher', 'callback_receiver')
result = None result = None
@@ -49,10 +49,7 @@ class Control(object):
reply_queue = Control.generate_reply_queue_name() reply_queue = Control.generate_reply_queue_name()
self.result = None self.result = None
if not connection.get_autocommit(): with pg_bus_conn(new_connection=True) as conn:
raise RuntimeError('Control-with-reply messages can only be done in autocommit mode')
with pg_bus_conn() as conn:
conn.listen(reply_queue) conn.listen(reply_queue)
send_data = {'control': command, 'reply_to': reply_queue} send_data = {'control': command, 'reply_to': reply_queue}
if extra_data: if extra_data:

View File

@@ -192,6 +192,7 @@ class PoolWorker(object):
class StatefulPoolWorker(PoolWorker): class StatefulPoolWorker(PoolWorker):
track_managed_tasks = True track_managed_tasks = True
@@ -386,8 +387,6 @@ class AutoscalePool(WorkerPool):
reaper.reap_job(j, 'failed') reaper.reap_job(j, 'failed')
except Exception: except Exception:
logger.exception('failed to reap job UUID {}'.format(w.current_task['uuid'])) logger.exception('failed to reap job UUID {}'.format(w.current_task['uuid']))
else:
logger.warning(f'Worker was told to quit but has not, pid={w.pid}')
orphaned.extend(w.orphaned_tasks) orphaned.extend(w.orphaned_tasks)
self.workers.remove(w) self.workers.remove(w)
elif w.idle and len(self.workers) > self.min_workers: elif w.idle and len(self.workers) > self.min_workers:
@@ -451,6 +450,9 @@ class AutoscalePool(WorkerPool):
try: try:
if isinstance(body, dict) and body.get('bind_kwargs'): if isinstance(body, dict) and body.get('bind_kwargs'):
self.add_bind_kwargs(body) self.add_bind_kwargs(body)
# when the cluster heartbeat occurs, clean up internally
if isinstance(body, dict) and 'cluster_node_heartbeat' in body['task']:
self.cleanup()
if self.should_grow: if self.should_grow:
self.up() self.up()
# we don't care about "preferred queue" round robin distribution, just # we don't care about "preferred queue" round robin distribution, just
@@ -465,7 +467,7 @@ class AutoscalePool(WorkerPool):
task_name = 'unknown' task_name = 'unknown'
if isinstance(body, dict): if isinstance(body, dict):
task_name = body.get('task') task_name = body.get('task')
logger.warning(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}') logger.warn(f'Workers maxed, queuing {task_name}, load: {sum(len(w.managed_tasks) for w in self.workers)} / {len(self.workers)}')
return super(AutoscalePool, self).write(preferred_queue, body) return super(AutoscalePool, self).write(preferred_queue, body)
except Exception: except Exception:
for conn in connections.all(): for conn in connections.all():

View File

@@ -1,13 +1,14 @@
import inspect import inspect
import logging import logging
import sys
import json import json
import time import time
from uuid import uuid4 from uuid import uuid4
from django.conf import settings
from django_guid import get_guid from django_guid import get_guid
from . import pg_bus_conn from . import pg_bus_conn
from awx.main.utils import is_testing
logger = logging.getLogger('awx.main.dispatch') logger = logging.getLogger('awx.main.dispatch')
@@ -66,6 +67,7 @@ class task:
bind_kwargs = self.bind_kwargs bind_kwargs = self.bind_kwargs
class PublisherMixin(object): class PublisherMixin(object):
queue = None queue = None
@classmethod @classmethod
@@ -91,7 +93,7 @@ class task:
obj.update(**kw) obj.update(**kw)
if callable(queue): if callable(queue):
queue = queue() queue = queue()
if not is_testing(): if not settings.IS_TESTING(sys.argv):
with pg_bus_conn() as conn: with pg_bus_conn() as conn:
conn.notify(queue, json.dumps(obj)) conn.notify(queue, json.dumps(obj))
return (obj, queue) return (obj, queue)

View File

@@ -16,7 +16,12 @@ def startup_reaping():
If this particular instance is starting, then we know that any running jobs are invalid If this particular instance is starting, then we know that any running jobs are invalid
so we will reap those jobs as a special action here so we will reap those jobs as a special action here
""" """
jobs = UnifiedJob.objects.filter(status='running', controller_node=Instance.objects.my_hostname()) try:
me = Instance.objects.me()
except RuntimeError as e:
logger.warning(f'Local instance is not registered, not running startup reaper: {e}')
return
jobs = UnifiedJob.objects.filter(status='running', controller_node=me.hostname)
job_ids = [] job_ids = []
for j in jobs: for j in jobs:
job_ids.append(j.id) job_ids.append(j.id)
@@ -57,13 +62,16 @@ def reap_waiting(instance=None, status='failed', job_explanation=None, grace_per
if grace_period is None: if grace_period is None:
grace_period = settings.JOB_WAITING_GRACE_PERIOD + settings.TASK_MANAGER_TIMEOUT grace_period = settings.JOB_WAITING_GRACE_PERIOD + settings.TASK_MANAGER_TIMEOUT
if instance is None: me = instance
hostname = Instance.objects.my_hostname() if me is None:
else: try:
hostname = instance.hostname me = Instance.objects.me()
except RuntimeError as e:
logger.warning(f'Local instance is not registered, not running reaper: {e}')
return
if ref_time is None: if ref_time is None:
ref_time = tz_now() ref_time = tz_now()
jobs = UnifiedJob.objects.filter(status='waiting', modified__lte=ref_time - timedelta(seconds=grace_period), controller_node=hostname) jobs = UnifiedJob.objects.filter(status='waiting', modified__lte=ref_time - timedelta(seconds=grace_period), controller_node=me.hostname)
if excluded_uuids: if excluded_uuids:
jobs = jobs.exclude(celery_task_id__in=excluded_uuids) jobs = jobs.exclude(celery_task_id__in=excluded_uuids)
for j in jobs: for j in jobs:
@@ -74,13 +82,16 @@ def reap(instance=None, status='failed', job_explanation=None, excluded_uuids=No
""" """
Reap all jobs in running for this instance. Reap all jobs in running for this instance.
""" """
if instance is None: me = instance
hostname = Instance.objects.my_hostname() if me is None:
else: try:
hostname = instance.hostname me = Instance.objects.me()
except RuntimeError as e:
logger.warning(f'Local instance is not registered, not running reaper: {e}')
return
workflow_ctype_id = ContentType.objects.get_for_model(WorkflowJob).id workflow_ctype_id = ContentType.objects.get_for_model(WorkflowJob).id
jobs = UnifiedJob.objects.filter( jobs = UnifiedJob.objects.filter(
Q(status='running') & (Q(execution_node=hostname) | Q(controller_node=hostname)) & ~Q(polymorphic_ctype_id=workflow_ctype_id) Q(status='running') & (Q(execution_node=me.hostname) | Q(controller_node=me.hostname)) & ~Q(polymorphic_ctype_id=workflow_ctype_id)
) )
if excluded_uuids: if excluded_uuids:
jobs = jobs.exclude(celery_task_id__in=excluded_uuids) jobs = jobs.exclude(celery_task_id__in=excluded_uuids)

View File

@@ -40,6 +40,7 @@ class WorkerSignalHandler:
class AWXConsumerBase(object): class AWXConsumerBase(object):
last_stats = time.time() last_stats = time.time()
def __init__(self, name, worker, queues=[], pool=None): def __init__(self, name, worker, queues=[], pool=None):
@@ -113,6 +114,7 @@ class AWXConsumerBase(object):
queue = 0 queue = 0
self.pool.write(queue, body) self.pool.write(queue, body)
self.total_messages += 1 self.total_messages += 1
self.record_statistics()
@log_excess_runtime(logger) @log_excess_runtime(logger)
def record_statistics(self): def record_statistics(self):
@@ -154,16 +156,6 @@ class AWXConsumerPG(AWXConsumerBase):
# if no successful loops have ran since startup, then we should fail right away # if no successful loops have ran since startup, then we should fail right away
self.pg_is_down = True # set so that we fail if we get database errors on startup self.pg_is_down = True # set so that we fail if we get database errors on startup
self.pg_down_time = time.time() - self.pg_max_wait # allow no grace period self.pg_down_time = time.time() - self.pg_max_wait # allow no grace period
self.last_cleanup = time.time()
def run_periodic_tasks(self):
self.record_statistics() # maintains time buffer in method
if time.time() - self.last_cleanup > 60: # same as cluster_node_heartbeat
# NOTE: if we run out of database connections, it is important to still run cleanup
# so that we scale down workers and free up connections
self.pool.cleanup()
self.last_cleanup = time.time()
def run(self, *args, **kwargs): def run(self, *args, **kwargs):
super(AWXConsumerPG, self).run(*args, **kwargs) super(AWXConsumerPG, self).run(*args, **kwargs)
@@ -179,10 +171,8 @@ class AWXConsumerPG(AWXConsumerBase):
if init is False: if init is False:
self.worker.on_start() self.worker.on_start()
init = True init = True
for e in conn.events(yield_timeouts=True): for e in conn.events():
if e is not None: self.process_task(json.loads(e.payload))
self.process_task(json.loads(e.payload))
self.run_periodic_tasks()
self.pg_is_down = False self.pg_is_down = False
if self.should_stop: if self.should_stop:
return return
@@ -239,8 +229,6 @@ class BaseWorker(object):
# so we can establish a new connection # so we can establish a new connection
conn.close_if_unusable_or_obsolete() conn.close_if_unusable_or_obsolete()
self.perform_work(body, *args) self.perform_work(body, *args)
except Exception:
logger.exception(f'Unhandled exception in perform_work in worker pid={os.getpid()}')
finally: finally:
if 'uuid' in body: if 'uuid' in body:
uuid = body['uuid'] uuid = body['uuid']

View File

@@ -3,12 +3,14 @@ import logging
import os import os
import signal import signal
import time import time
import traceback
import datetime import datetime
from django.conf import settings from django.conf import settings
from django.utils.functional import cached_property from django.utils.functional import cached_property
from django.utils.timezone import now as tz_now from django.utils.timezone import now as tz_now
from django.db import transaction, connection as django_connection from django.db import DatabaseError, OperationalError, transaction, connection as django_connection
from django.db.utils import InterfaceError, InternalError
from django_guid import set_guid from django_guid import set_guid
import psutil import psutil
@@ -62,7 +64,6 @@ class CallbackBrokerWorker(BaseWorker):
""" """
MAX_RETRIES = 2 MAX_RETRIES = 2
INDIVIDUAL_EVENT_RETRIES = 3
last_stats = time.time() last_stats = time.time()
last_flush = time.time() last_flush = time.time()
total = 0 total = 0
@@ -154,8 +155,6 @@ class CallbackBrokerWorker(BaseWorker):
metrics_events_missing_created = 0 metrics_events_missing_created = 0
metrics_total_job_event_processing_seconds = datetime.timedelta(seconds=0) metrics_total_job_event_processing_seconds = datetime.timedelta(seconds=0)
for cls, events in self.buff.items(): for cls, events in self.buff.items():
if not events:
continue
logger.debug(f'{cls.__name__}.objects.bulk_create({len(events)})') logger.debug(f'{cls.__name__}.objects.bulk_create({len(events)})')
for e in events: for e in events:
e.modified = now # this can be set before created because now is set above on line 149 e.modified = now # this can be set before created because now is set above on line 149
@@ -165,48 +164,38 @@ class CallbackBrokerWorker(BaseWorker):
else: # only calculate the seconds if the created time already has been set else: # only calculate the seconds if the created time already has been set
metrics_total_job_event_processing_seconds += e.modified - e.created metrics_total_job_event_processing_seconds += e.modified - e.created
metrics_duration_to_save = time.perf_counter() metrics_duration_to_save = time.perf_counter()
saved_events = []
try: try:
cls.objects.bulk_create(events) cls.objects.bulk_create(events)
metrics_bulk_events_saved += len(events) metrics_bulk_events_saved += len(events)
saved_events = events
self.buff[cls] = []
except Exception as exc: except Exception as exc:
# If the database is flaking, let ensure_connection throw a general exception logger.warning(f'Error in events bulk_create, will try indiviually up to 5 errors, error {str(exc)}')
# will be caught by the outer loop, which goes into a proper sleep and retry loop
django_connection.ensure_connection()
logger.warning(f'Error in events bulk_create, will try indiviually, error: {str(exc)}')
# if an exception occurs, we should re-attempt to save the # if an exception occurs, we should re-attempt to save the
# events one-by-one, because something in the list is # events one-by-one, because something in the list is
# broken/stale # broken/stale
consecutive_errors = 0
events_saved = 0
metrics_events_batch_save_errors += 1 metrics_events_batch_save_errors += 1
for e in events.copy(): for e in events:
try: try:
e.save() e.save()
metrics_singular_events_saved += 1 events_saved += 1
events.remove(e) consecutive_errors = 0
saved_events.append(e) # Importantly, remove successfully saved events from the buffer
except Exception as exc_indv: except Exception as exc_indv:
retry_count = getattr(e, '_retry_count', 0) + 1 consecutive_errors += 1
e._retry_count = retry_count logger.info(f'Database Error Saving individual Job Event, error {str(exc_indv)}')
if consecutive_errors >= 5:
# special sanitization logic for postgres treatment of NUL 0x00 char raise
if (retry_count == 1) and isinstance(exc_indv, ValueError) and ("\x00" in e.stdout): metrics_singular_events_saved += events_saved
e.stdout = e.stdout.replace("\x00", "") if events_saved == 0:
raise
if retry_count >= self.INDIVIDUAL_EVENT_RETRIES:
logger.error(f'Hit max retries ({retry_count}) saving individual Event error: {str(exc_indv)}\ndata:\n{e.__dict__}')
events.remove(e)
else:
logger.info(f'Database Error Saving individual Event uuid={e.uuid} try={retry_count}, error: {str(exc_indv)}')
metrics_duration_to_save = time.perf_counter() - metrics_duration_to_save metrics_duration_to_save = time.perf_counter() - metrics_duration_to_save
for e in saved_events: for e in events:
if not getattr(e, '_skip_websocket_message', False): if not getattr(e, '_skip_websocket_message', False):
metrics_events_broadcast += 1 metrics_events_broadcast += 1
emit_event_detail(e) emit_event_detail(e)
if getattr(e, '_notification_trigger_event', False): if getattr(e, '_notification_trigger_event', False):
job_stats_wrapup(getattr(e, e.JOB_REFERENCE), event=e) job_stats_wrapup(getattr(e, e.JOB_REFERENCE), event=e)
self.buff = {}
self.last_flush = time.time() self.last_flush = time.time()
# only update metrics if we saved events # only update metrics if we saved events
if (metrics_bulk_events_saved + metrics_singular_events_saved) > 0: if (metrics_bulk_events_saved + metrics_singular_events_saved) > 0:
@@ -278,16 +267,20 @@ class CallbackBrokerWorker(BaseWorker):
try: try:
self.flush(force=flush) self.flush(force=flush)
break break
except Exception as exc: except (OperationalError, InterfaceError, InternalError) as exc:
# Aside form bugs, exceptions here are assumed to be due to database flake
if retries >= self.MAX_RETRIES: if retries >= self.MAX_RETRIES:
logger.exception('Worker could not re-establish database connectivity, giving up on one or more events.') logger.exception('Worker could not re-establish database connectivity, giving up on one or more events.')
self.buff = {}
return return
delay = 60 * retries delay = 60 * retries
logger.warning(f'Database Error Flushing Job Events, retry #{retries + 1} in {delay} seconds: {str(exc)}') logger.warning(f'Database Error Flushing Job Events, retry #{retries + 1} in {delay} seconds: {str(exc)}')
django_connection.close() django_connection.close()
time.sleep(delay) time.sleep(delay)
retries += 1 retries += 1
except Exception: except DatabaseError:
logger.exception(f'Callback Task Processor Raised Unexpected Exception processing event data:\n{body}') logger.exception('Database Error Flushing Job Events')
django_connection.close()
break
except Exception as exc:
tb = traceback.format_exc()
logger.error('Callback Task Processor Raised Exception: %r', exc)
logger.error('Detail: {}'.format(tb))

View File

@@ -232,6 +232,7 @@ class ImplicitRoleField(models.ForeignKey):
field_names = [field_names] field_names = [field_names]
for field_name in field_names: for field_name in field_names:
if field_name.startswith('singleton:'): if field_name.startswith('singleton:'):
continue continue
@@ -243,6 +244,7 @@ class ImplicitRoleField(models.ForeignKey):
field = getattr(cls, field_name, None) field = getattr(cls, field_name, None)
if field and type(field) is ReverseManyToOneDescriptor or type(field) is ManyToManyDescriptor: if field and type(field) is ReverseManyToOneDescriptor or type(field) is ManyToManyDescriptor:
if '.' in field_attr: if '.' in field_attr:
raise Exception('Referencing deep roles through ManyToMany fields is unsupported.') raise Exception('Referencing deep roles through ManyToMany fields is unsupported.')
@@ -627,6 +629,7 @@ class CredentialInputField(JSONSchemaField):
# `ssh_key_unlock` requirements are very specific and can't be # `ssh_key_unlock` requirements are very specific and can't be
# represented without complicated JSON schema # represented without complicated JSON schema
if model_instance.credential_type.managed is True and 'ssh_key_unlock' in defined_fields: if model_instance.credential_type.managed is True and 'ssh_key_unlock' in defined_fields:
# in order to properly test the necessity of `ssh_key_unlock`, we # in order to properly test the necessity of `ssh_key_unlock`, we
# need to know the real value of `ssh_key_data`; for a payload like: # need to know the real value of `ssh_key_data`; for a payload like:
# { # {
@@ -788,8 +791,7 @@ class CredentialTypeInjectorField(JSONSchemaField):
'type': 'object', 'type': 'object',
'patternProperties': { 'patternProperties': {
# http://docs.ansible.com/ansible/playbooks_variables.html#what-makes-a-valid-variable-name # http://docs.ansible.com/ansible/playbooks_variables.html#what-makes-a-valid-variable-name
# plus, add ability to template '^[a-zA-Z_]+[a-zA-Z0-9_]*$': {'type': 'string'},
r'^[a-zA-Z_\{\}]+[a-zA-Z0-9_\{\}]*$': {"anyOf": [{'type': 'string'}, {'type': 'array'}, {'$ref': '#/properties/extra_vars'}]}
}, },
'additionalProperties': False, 'additionalProperties': False,
}, },
@@ -856,44 +858,27 @@ class CredentialTypeInjectorField(JSONSchemaField):
template_name = template_name.split('.')[1] template_name = template_name.split('.')[1]
setattr(valid_namespace['tower'].filename, template_name, 'EXAMPLE_FILENAME') setattr(valid_namespace['tower'].filename, template_name, 'EXAMPLE_FILENAME')
def validate_template_string(type_, key, tmpl):
try:
sandbox.ImmutableSandboxedEnvironment(undefined=StrictUndefined).from_string(tmpl).render(valid_namespace)
except UndefinedError as e:
raise django_exceptions.ValidationError(
_('{sub_key} uses an undefined field ({error_msg})').format(sub_key=key, error_msg=e),
code='invalid',
params={'value': value},
)
except SecurityError as e:
raise django_exceptions.ValidationError(_('Encountered unsafe code execution: {}').format(e))
except TemplateSyntaxError as e:
raise django_exceptions.ValidationError(
_('Syntax error rendering template for {sub_key} inside of {type} ({error_msg})').format(sub_key=key, type=type_, error_msg=e),
code='invalid',
params={'value': value},
)
def validate_extra_vars(key, node):
if isinstance(node, dict):
for k, v in node.items():
validate_template_string("extra_vars", 'a key' if key is None else key, k)
validate_extra_vars(k if key is None else "{key}.{k}".format(key=key, k=k), v)
elif isinstance(node, list):
for i, x in enumerate(node):
validate_extra_vars("{key}[{i}]".format(key=key, i=i), x)
else:
validate_template_string("extra_vars", key, node)
for type_, injector in value.items(): for type_, injector in value.items():
if type_ == 'env': if type_ == 'env':
for key in injector.keys(): for key in injector.keys():
self.validate_env_var_allowed(key) self.validate_env_var_allowed(key)
if type_ == 'extra_vars': for key, tmpl in injector.items():
validate_extra_vars(None, injector) try:
else: sandbox.ImmutableSandboxedEnvironment(undefined=StrictUndefined).from_string(tmpl).render(valid_namespace)
for key, tmpl in injector.items(): except UndefinedError as e:
validate_template_string(type_, key, tmpl) raise django_exceptions.ValidationError(
_('{sub_key} uses an undefined field ({error_msg})').format(sub_key=key, error_msg=e),
code='invalid',
params={'value': value},
)
except SecurityError as e:
raise django_exceptions.ValidationError(_('Encountered unsafe code execution: {}').format(e))
except TemplateSyntaxError as e:
raise django_exceptions.ValidationError(
_('Syntax error rendering template for {sub_key} inside of {type} ({error_msg})').format(sub_key=key, type=type_, error_msg=e),
code='invalid',
params={'value': value},
)
class AskForField(models.BooleanField): class AskForField(models.BooleanField):

View File

@@ -25,7 +25,7 @@ class Command(BaseCommand):
with connection.cursor() as cursor: with connection.cursor() as cursor:
cursor.execute( cursor.execute(
f''' f'''
SELECT SELECT
b.id, b.job_id, b.host_name, b.created - a.created delta, b.id, b.job_id, b.host_name, b.created - a.created delta,
b.task task, b.task task,
b.event_data::json->'task_action' task_action, b.event_data::json->'task_action' task_action,

View File

@@ -9,6 +9,7 @@ class Command(BaseCommand):
"""Checks connection to the database, and prints out connection info if not connected""" """Checks connection to the database, and prints out connection info if not connected"""
def handle(self, *args, **options): def handle(self, *args, **options):
with connection.cursor() as cursor: with connection.cursor() as cursor:
cursor.execute("SELECT version()") cursor.execute("SELECT version()")
version = str(cursor.fetchone()[0]) version = str(cursor.fetchone()[0])

View File

@@ -82,6 +82,7 @@ class DeleteMeta:
part_drop = {} part_drop = {}
for pk, status, created in self.jobs_qs: for pk, status, created in self.jobs_qs:
part_key = partition_table_name(self.job_class, created) part_key = partition_table_name(self.job_class, created)
if status in ['pending', 'waiting', 'running']: if status in ['pending', 'waiting', 'running']:
part_drop[part_key] = False part_drop[part_key] = False

View File

@@ -17,6 +17,7 @@ class Command(BaseCommand):
def handle(self, *args, **options): def handle(self, *args, **options):
if not options['user']: if not options['user']:
raise CommandError('Username not supplied. Usage: awx-manage create_oauth2_token --user=username.') raise CommandError('Username not supplied. Usage: awx-manage create_oauth2_token --user=username.')
try: try:
user = User.objects.get(username=options['user']) user = User.objects.get(username=options['user'])

View File

@@ -1,35 +0,0 @@
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
class Command(BaseCommand):
"""enable or disable authentication system"""
def add_arguments(self, parser):
"""
This adds the --enable --disable functionalities to the command using mutally_exclusive to avoid situations in which users pass both flags
"""
group = parser.add_mutually_exclusive_group()
group.add_argument('--enable', dest='enable', action='store_true', help='Pass --enable to enable local authentication')
group.add_argument('--disable', dest='disable', action='store_true', help='Pass --disable to disable local authentication')
def _enable_disable_auth(self, enable, disable):
"""
this method allows the disabling or enabling of local authenication based on the argument passed into the parser
if no arguments throw a command error, if --enable set the DISABLE_LOCAL_AUTH to False
if --disable it's set to True. Realizing that the flag is counterintuitive to what is expected.
"""
if enable:
settings.DISABLE_LOCAL_AUTH = False
print("Setting has changed to {} allowing local authentication".format(settings.DISABLE_LOCAL_AUTH))
elif disable:
settings.DISABLE_LOCAL_AUTH = True
print("Setting has changed to {} disallowing local authentication".format(settings.DISABLE_LOCAL_AUTH))
else:
raise CommandError('Please pass --enable flag to allow local auth or --disable flag to disable local auth')
def handle(self, **options):
self._enable_disable_auth(options.get('enable'), options.get('disable'))

View File

@@ -10,6 +10,7 @@ from django.utils.text import slugify
class Command(BaseCommand): class Command(BaseCommand):
help = 'Export custom inventory scripts into a tarfile.' help = 'Export custom inventory scripts into a tarfile.'
def add_arguments(self, parser): def add_arguments(self, parser):
@@ -20,6 +21,7 @@ class Command(BaseCommand):
with tempfile.TemporaryDirectory() as tmpdirname: with tempfile.TemporaryDirectory() as tmpdirname:
with tarfile.open(tar_filename, "w") as tar: with tarfile.open(tar_filename, "w") as tar:
for cis in CustomInventoryScript.objects.all(): for cis in CustomInventoryScript.objects.all():
# naming convention similar to project paths # naming convention similar to project paths
slug_name = slugify(str(cis.name)).replace(u'-', u'_') slug_name = slugify(str(cis.name)).replace(u'-', u'_')

View File

@@ -6,6 +6,7 @@ import json
class Command(BaseCommand): class Command(BaseCommand):
help = 'This is for offline licensing usage' help = 'This is for offline licensing usage'
def add_arguments(self, parser): def add_arguments(self, parser):

View File

@@ -934,6 +934,7 @@ class Command(BaseCommand):
# (even though inventory_import.Command.handle -- which calls # (even though inventory_import.Command.handle -- which calls
# perform_update -- has its own lock, inventory_ID_import) # perform_update -- has its own lock, inventory_ID_import)
with advisory_lock('inventory_{}_perform_update'.format(self.inventory.id)): with advisory_lock('inventory_{}_perform_update'.format(self.inventory.id)):
try: try:
self.check_license() self.check_license()
except PermissionDenied as e: except PermissionDenied as e:

View File

@@ -6,6 +6,7 @@ from django.core.management.base import BaseCommand
class Ungrouped(object): class Ungrouped(object):
name = 'ungrouped' name = 'ungrouped'
policy_instance_percentage = None policy_instance_percentage = None
policy_instance_minimum = None policy_instance_minimum = None

View File

@@ -38,14 +38,7 @@ class Command(BaseCommand):
(changed, instance) = Instance.objects.register(ip_address=os.environ.get('MY_POD_IP'), node_type='control', uuid=settings.SYSTEM_UUID) (changed, instance) = Instance.objects.register(ip_address=os.environ.get('MY_POD_IP'), node_type='control', uuid=settings.SYSTEM_UUID)
RegisterQueue(settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME, 100, 0, [], is_container_group=False).register() RegisterQueue(settings.DEFAULT_CONTROL_PLANE_QUEUE_NAME, 100, 0, [], is_container_group=False).register()
RegisterQueue( RegisterQueue(
settings.DEFAULT_EXECUTION_QUEUE_NAME, settings.DEFAULT_EXECUTION_QUEUE_NAME, 100, 0, [], is_container_group=True, pod_spec_override=settings.DEFAULT_EXECUTION_QUEUE_POD_SPEC_OVERRIDE
100,
0,
[],
is_container_group=True,
pod_spec_override=settings.DEFAULT_EXECUTION_QUEUE_POD_SPEC_OVERRIDE,
max_forks=settings.DEFAULT_EXECUTION_QUEUE_MAX_FORKS,
max_concurrent_jobs=settings.DEFAULT_EXECUTION_QUEUE_MAX_CONCURRENT_JOBS,
).register() ).register()
else: else:
(changed, instance) = Instance.objects.register(hostname=hostname, node_type=node_type, uuid=uuid) (changed, instance) = Instance.objects.register(hostname=hostname, node_type=node_type, uuid=uuid)

View File

@@ -32,14 +32,8 @@ class Command(BaseCommand):
def handle(self, **options): def handle(self, **options):
self.old_key = settings.SECRET_KEY self.old_key = settings.SECRET_KEY
custom_key = os.environ.get("TOWER_SECRET_KEY") custom_key = os.environ.get("TOWER_SECRET_KEY")
if options.get("use_custom_key"): if options.get("use_custom_key") and custom_key:
if custom_key: self.new_key = custom_key
self.new_key = custom_key
else:
print("Use custom key was specified but the env var TOWER_SECRET_KEY was not available")
import sys
sys.exit(1)
else: else:
self.new_key = base64.encodebytes(os.urandom(33)).decode().rstrip() self.new_key = base64.encodebytes(os.urandom(33)).decode().rstrip()
self._notification_templates() self._notification_templates()

View File

@@ -17,9 +17,7 @@ class InstanceNotFound(Exception):
class RegisterQueue: class RegisterQueue:
def __init__( def __init__(self, queuename, instance_percent, inst_min, hostname_list, is_container_group=None, pod_spec_override=None):
self, queuename, instance_percent, inst_min, hostname_list, is_container_group=None, pod_spec_override=None, max_forks=None, max_concurrent_jobs=None
):
self.instance_not_found_err = None self.instance_not_found_err = None
self.queuename = queuename self.queuename = queuename
self.instance_percent = instance_percent self.instance_percent = instance_percent
@@ -27,8 +25,6 @@ class RegisterQueue:
self.hostname_list = hostname_list self.hostname_list = hostname_list
self.is_container_group = is_container_group self.is_container_group = is_container_group
self.pod_spec_override = pod_spec_override self.pod_spec_override = pod_spec_override
self.max_forks = max_forks
self.max_concurrent_jobs = max_concurrent_jobs
def get_create_update_instance_group(self): def get_create_update_instance_group(self):
created = False created = False
@@ -49,14 +45,6 @@ class RegisterQueue:
ig.pod_spec_override = self.pod_spec_override ig.pod_spec_override = self.pod_spec_override
changed = True changed = True
if self.max_forks and (ig.max_forks != self.max_forks):
ig.max_forks = self.max_forks
changed = True
if self.max_concurrent_jobs and (ig.max_concurrent_jobs != self.max_concurrent_jobs):
ig.max_concurrent_jobs = self.max_concurrent_jobs
changed = True
if changed: if changed:
ig.save() ig.save()

View File

@@ -7,6 +7,7 @@ from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand): class Command(BaseCommand):
help = ( help = (
"Remove an instance (specified by --hostname) from the specified queue (instance group).\n" "Remove an instance (specified by --hostname) from the specified queue (instance group).\n"
"In order remove the queue, use the `unregister_queue` command." "In order remove the queue, use the `unregister_queue` command."

View File

@@ -28,6 +28,7 @@ class JobStatusLifeCycle:
class ReplayJobEvents(JobStatusLifeCycle): class ReplayJobEvents(JobStatusLifeCycle):
recording_start = None recording_start = None
replay_start = None replay_start = None
@@ -189,6 +190,7 @@ class ReplayJobEvents(JobStatusLifeCycle):
class Command(BaseCommand): class Command(BaseCommand):
help = 'Replay job events over websockets ordered by created on date.' help = 'Replay job events over websockets ordered by created on date.'
def _parse_slice_range(self, slice_arg): def _parse_slice_range(self, slice_arg):

View File

@@ -53,7 +53,7 @@ class Command(BaseCommand):
return lines return lines
@classmethod @classmethod
def get_connection_status(cls, hostnames, data): def get_connection_status(cls, me, hostnames, data):
host_stats = [('hostname', 'state', 'start time', 'duration (sec)')] host_stats = [('hostname', 'state', 'start time', 'duration (sec)')]
for h in hostnames: for h in hostnames:
connection_color = '91' # red connection_color = '91' # red
@@ -78,7 +78,7 @@ class Command(BaseCommand):
return host_stats return host_stats
@classmethod @classmethod
def get_connection_stats(cls, hostnames, data): def get_connection_stats(cls, me, hostnames, data):
host_stats = [('hostname', 'total', 'per minute')] host_stats = [('hostname', 'total', 'per minute')]
for h in hostnames: for h in hostnames:
h_safe = safe_name(h) h_safe = safe_name(h)
@@ -119,8 +119,8 @@ class Command(BaseCommand):
return return
try: try:
my_hostname = Instance.objects.my_hostname() me = Instance.objects.me()
logger.info('Active instance with hostname {} is registered.'.format(my_hostname)) logger.info('Active instance with hostname {} is registered.'.format(me.hostname))
except RuntimeError as e: except RuntimeError as e:
# the CLUSTER_HOST_ID in the task, and web instance must match and # the CLUSTER_HOST_ID in the task, and web instance must match and
# ensure network connectivity between the task and web instance # ensure network connectivity between the task and web instance
@@ -145,19 +145,19 @@ class Command(BaseCommand):
else: else:
data[family.name] = family.samples[0].value data[family.name] = family.samples[0].value
my_hostname = Instance.objects.my_hostname() me = Instance.objects.me()
hostnames = [i.hostname for i in Instance.objects.exclude(hostname=my_hostname)] hostnames = [i.hostname for i in Instance.objects.exclude(hostname=me.hostname)]
host_stats = Command.get_connection_status(hostnames, data) host_stats = Command.get_connection_status(me, hostnames, data)
lines = Command._format_lines(host_stats) lines = Command._format_lines(host_stats)
print(f'Broadcast websocket connection status from "{my_hostname}" to:') print(f'Broadcast websocket connection status from "{me.hostname}" to:')
print('\n'.join(lines)) print('\n'.join(lines))
host_stats = Command.get_connection_stats(hostnames, data) host_stats = Command.get_connection_stats(me, hostnames, data)
lines = Command._format_lines(host_stats) lines = Command._format_lines(host_stats)
print(f'\nBroadcast websocket connection stats from "{my_hostname}" to:') print(f'\nBroadcast websocket connection stats from "{me.hostname}" to:')
print('\n'.join(lines)) print('\n'.join(lines))
return return

View File

@@ -7,6 +7,7 @@ from awx.main.models import CredentialType
class Command(BaseCommand): class Command(BaseCommand):
help = 'Load default managed credential types.' help = 'Load default managed credential types.'
def handle(self, *args, **options): def handle(self, *args, **options):

View File

@@ -10,6 +10,7 @@ from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand): class Command(BaseCommand):
help = ( help = (
"Remove specified queue (instance group) from database.\n" "Remove specified queue (instance group) from database.\n"
"Instances inside of queue will continue to exist, \n" "Instances inside of queue will continue to exist, \n"

View File

@@ -99,12 +99,9 @@ class InstanceManager(models.Manager):
instance or role. instance or role.
""" """
def my_hostname(self):
return settings.CLUSTER_HOST_ID
def me(self): def me(self):
"""Return the currently active instance.""" """Return the currently active instance."""
node = self.filter(hostname=self.my_hostname()) node = self.filter(hostname=settings.CLUSTER_HOST_ID)
if node.exists(): if node.exists():
return node[0] return node[0]
raise RuntimeError("No instance found with the current cluster host id") raise RuntimeError("No instance found with the current cluster host id")
@@ -158,11 +155,7 @@ class InstanceManager(models.Manager):
return (False, instance) return (False, instance)
# Create new instance, and fill in default values # Create new instance, and fill in default values
create_defaults = { create_defaults = {'node_state': Instance.States.INSTALLED, 'capacity': 0}
'node_state': Instance.States.INSTALLED,
'capacity': 0,
'listener_port': 27199,
}
if defaults is not None: if defaults is not None:
create_defaults.update(defaults) create_defaults.update(defaults)
uuid_option = {} uuid_option = {}

View File

@@ -38,6 +38,7 @@ class SettingsCacheMiddleware(MiddlewareMixin):
class TimingMiddleware(threading.local, MiddlewareMixin): class TimingMiddleware(threading.local, MiddlewareMixin):
dest = '/var/log/tower/profile' dest = '/var/log/tower/profile'
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):

View File

@@ -14,6 +14,7 @@ import awx.main.fields
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
('taggit', '0002_auto_20150616_2121'), ('taggit', '0002_auto_20150616_2121'),
('contenttypes', '0002_remove_content_type_name'), ('contenttypes', '0002_remove_content_type_name'),

View File

@@ -12,6 +12,7 @@ from ._squashed_30 import SQUASHED_30
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
('main', '0003_squashed_v300_v303_updates'), ('main', '0003_squashed_v300_v303_updates'),
] ]

View File

@@ -7,6 +7,7 @@ from ._squashed_31 import SQUASHED_31
class Migration(migrations.Migration): class Migration(migrations.Migration):
dependencies = [ dependencies = [
('main', '0004_squashed_v310_release'), ('main', '0004_squashed_v310_release'),
] ]

Some files were not shown because too many files have changed in this diff Show More