mirror of
https://github.com/ansible/awx.git
synced 2026-02-10 22:24:45 -03:30
Compare commits
65 Commits
23.4.0
...
stale-acti
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8369c43123 | ||
|
|
65655f84de | ||
|
|
9aa3d5584a | ||
|
|
266e31d71a | ||
|
|
a1bbe75aed | ||
|
|
695f1cf892 | ||
|
|
0ab103d8c4 | ||
|
|
9ac1c0f6c2 | ||
|
|
2e168d8177 | ||
|
|
d4f7bfef18 | ||
|
|
985a8d499d | ||
|
|
e3b52f0169 | ||
|
|
f69f600cff | ||
|
|
74cd23be5c | ||
|
|
209747d88e | ||
|
|
d91da39f81 | ||
|
|
5cd029df96 | ||
|
|
5a93a519f6 | ||
|
|
5f5cd960d5 | ||
|
|
42701f32fe | ||
|
|
30d4df788f | ||
|
|
1bcd71a8ac | ||
|
|
43be90f051 | ||
|
|
bb1922cdbb | ||
|
|
403f545071 | ||
|
|
a06a2a883c | ||
|
|
2529fdcfd7 | ||
|
|
19dff9c2d1 | ||
|
|
2a6cf032f8 | ||
|
|
6119b33a50 | ||
|
|
aacf9653c5 | ||
|
|
325f5250db | ||
|
|
b14518c1e5 | ||
|
|
6440e3cb55 | ||
|
|
b5f6aac3aa | ||
|
|
6e5e1c8fff | ||
|
|
bf42c63c12 | ||
|
|
df24cb692b | ||
|
|
0d825a744b | ||
|
|
5e48bf091b | ||
|
|
1294cec92c | ||
|
|
dae12ee1b8 | ||
|
|
b091f6cf79 | ||
|
|
fe564c5fad | ||
|
|
eb3bc84461 | ||
|
|
6aa2997dce | ||
|
|
dd00bbba42 | ||
|
|
fe6bac6d9e | ||
|
|
87abbd4b10 | ||
|
|
fb04e5d9f6 | ||
|
|
478e2cb28d | ||
|
|
2ac304d289 | ||
|
|
3e5851f3af | ||
|
|
adb1b12074 | ||
|
|
8fae20c48a | ||
|
|
ec364cc60e | ||
|
|
1cfd51764e | ||
|
|
0b8fedfd04 | ||
|
|
72a8173462 | ||
|
|
873b1fbe07 | ||
|
|
1f36e84b45 | ||
|
|
8c4bff2b86 | ||
|
|
14f636af84 | ||
|
|
0057c8daf6 | ||
|
|
d8a28b3c06 |
14
.github/actions/run_awx_devel/action.yml
vendored
14
.github/actions/run_awx_devel/action.yml
vendored
@@ -43,10 +43,14 @@ runs:
|
||||
- name: Update default AWX password
|
||||
shell: bash
|
||||
run: |
|
||||
while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' -k https://localhost:8043/api/v2/ping/)" != "200" ]]
|
||||
do
|
||||
echo "Waiting for AWX..."
|
||||
sleep 5
|
||||
SECONDS=0
|
||||
while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' -k https://localhost:8043/api/v2/ping/)" != "200" ]]; do
|
||||
if [[ $SECONDS -gt 600 ]]; then
|
||||
echo "Timing out, AWX never came up"
|
||||
exit 1
|
||||
fi
|
||||
echo "Waiting for AWX..."
|
||||
sleep 5
|
||||
done
|
||||
echo "AWX is up, updating the password..."
|
||||
docker exec -i tools_awx_1 sh <<-EOSH
|
||||
@@ -67,7 +71,7 @@ runs:
|
||||
id: data
|
||||
shell: bash
|
||||
run: |
|
||||
AWX_IP=$(docker inspect -f '{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}' tools_awx_1)
|
||||
AWX_IP=$(docker inspect -f '{{.NetworkSettings.Networks._sources_awx.IPAddress}}' tools_awx_1)
|
||||
ADMIN_TOKEN=$(docker exec -i tools_awx_1 awx-manage create_oauth2_token --user admin)
|
||||
echo "ip=$AWX_IP" >> $GITHUB_OUTPUT
|
||||
echo "admin_token=$ADMIN_TOKEN" >> $GITHUB_OUTPUT
|
||||
|
||||
31
.github/actions/stale/stale.yml
vendored
Normal file
31
.github/actions/stale/stale.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: 'Close stale issues and PRs'
|
||||
on:
|
||||
schedule:
|
||||
- cron: '30 1 * * *'
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v9
|
||||
with:
|
||||
|
||||
debug-only: true
|
||||
operations-per-run: 30
|
||||
|
||||
days-before-stale: 30
|
||||
days-before-close: 5
|
||||
|
||||
days-before-issue-stale: 180
|
||||
days-before-issue-close: 14
|
||||
stale-issue-message: 'This issue is stale because it has been open 30 days with no activity. Remove stale label or comment or this will be closed in 5 days.'
|
||||
close-issue-message: 'This issue was closed because it has been stalled for 5 days with no activity.'
|
||||
exempt-issue-labels: 'needs_triage'
|
||||
stale-issue-label: 'no_issue_activity'
|
||||
|
||||
days-before-pr-stale: 90
|
||||
days-before-pr-close: 14
|
||||
stale-pr-message: 'This PR is stale because it has been open 45 days with no activity. Remove stale label or comment or this will be closed in 10 days.'
|
||||
close-pr-message: 'This PR was closed because it has been stalled for 10 days with no activity.'
|
||||
exempt-pr-labels: 'needs_triage'
|
||||
stale-pr-label: 'no_pr_activity'
|
||||
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip"
|
||||
directory: "docs/docsite/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 2
|
||||
labels:
|
||||
- "docs"
|
||||
- "dependencies"
|
||||
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
@@ -11,6 +11,7 @@ jobs:
|
||||
common-tests:
|
||||
name: ${{ matrix.tests.name }}
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
@@ -20,6 +21,8 @@ jobs:
|
||||
tests:
|
||||
- name: api-test
|
||||
command: /start_tests.sh
|
||||
- name: api-migrations
|
||||
command: /start_tests.sh test_migrations
|
||||
- name: api-lint
|
||||
command: /var/lib/awx/venv/awx/bin/tox -e linters
|
||||
- name: api-swagger
|
||||
@@ -47,6 +50,7 @@ jobs:
|
||||
|
||||
dev-env:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
@@ -61,6 +65,7 @@ jobs:
|
||||
|
||||
awx-operator:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: Checkout awx
|
||||
uses: actions/checkout@v3
|
||||
@@ -110,6 +115,7 @@ jobs:
|
||||
collection-sanity:
|
||||
name: awx_collection sanity
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
steps:
|
||||
@@ -129,6 +135,7 @@ jobs:
|
||||
collection-integration:
|
||||
name: awx_collection integration
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -180,6 +187,7 @@ jobs:
|
||||
collection-integration-coverage-combine:
|
||||
name: combine awx_collection integration coverage
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
needs:
|
||||
- collection-integration
|
||||
strategy:
|
||||
|
||||
1
.github/workflows/devel_images.yml
vendored
1
.github/workflows/devel_images.yml
vendored
@@ -12,6 +12,7 @@ jobs:
|
||||
push:
|
||||
if: endsWith(github.repository, '/awx') || startsWith(github.ref, 'refs/heads/release_')
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
|
||||
1
.github/workflows/docs.yml
vendored
1
.github/workflows/docs.yml
vendored
@@ -6,6 +6,7 @@ jobs:
|
||||
docsite-build:
|
||||
name: docsite test build
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ on:
|
||||
jobs:
|
||||
push:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
|
||||
2
.github/workflows/label_issue.yml
vendored
2
.github/workflows/label_issue.yml
vendored
@@ -13,6 +13,7 @@ permissions:
|
||||
jobs:
|
||||
triage:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
name: Label Issue
|
||||
|
||||
steps:
|
||||
@@ -26,6 +27,7 @@ jobs:
|
||||
|
||||
community:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
name: Label Issue - Community
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
2
.github/workflows/label_pr.yml
vendored
2
.github/workflows/label_pr.yml
vendored
@@ -14,6 +14,7 @@ permissions:
|
||||
jobs:
|
||||
triage:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
name: Label PR
|
||||
|
||||
steps:
|
||||
@@ -25,6 +26,7 @@ jobs:
|
||||
|
||||
community:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
name: Label PR - Community
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
1
.github/workflows/pr_body_check.yml
vendored
1
.github/workflows/pr_body_check.yml
vendored
@@ -10,6 +10,7 @@ jobs:
|
||||
if: github.repository_owner == 'ansible' && endsWith(github.repository, 'awx')
|
||||
name: Scan PR description for semantic versioning keywords
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
|
||||
7
.github/workflows/promote.yml
vendored
7
.github/workflows/promote.yml
vendored
@@ -13,8 +13,9 @@ permissions:
|
||||
|
||||
jobs:
|
||||
promote:
|
||||
if: endsWith(github.repository, '/awx')
|
||||
if: endsWith(github.repository, '/awx')
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 90
|
||||
steps:
|
||||
- name: Checkout awx
|
||||
uses: actions/checkout@v3
|
||||
@@ -46,7 +47,7 @@ jobs:
|
||||
COLLECTION_TEMPLATE_VERSION: true
|
||||
run: |
|
||||
make build_collection
|
||||
if [ "$(curl --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz | tail -1)" == "302" ] ; then \
|
||||
if [ "$(curl -L --head -sw '%{http_code}' https://galaxy.ansible.com/download/${{ env.collection_namespace }}-awx-${{ github.event.release.tag_name }}.tar.gz | tail -1)" == "302" ] ; then \
|
||||
echo "Galaxy release already done"; \
|
||||
else \
|
||||
ansible-galaxy collection publish \
|
||||
@@ -65,7 +66,7 @@ jobs:
|
||||
- name: Build awxkit and upload to pypi
|
||||
run: |
|
||||
git reset --hard
|
||||
cd awxkit && python3 setup.py bdist_wheel
|
||||
cd awxkit && python3 setup.py sdist bdist_wheel
|
||||
twine upload \
|
||||
-r ${{ env.pypi_repo }} \
|
||||
-u ${{ secrets.PYPI_USERNAME }} \
|
||||
|
||||
1
.github/workflows/stage.yml
vendored
1
.github/workflows/stage.yml
vendored
@@ -23,6 +23,7 @@ jobs:
|
||||
stage:
|
||||
if: endsWith(github.repository, '/awx')
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 90
|
||||
permissions:
|
||||
packages: write
|
||||
contents: write
|
||||
|
||||
1
.github/workflows/update_dependabot_prs.yml
vendored
1
.github/workflows/update_dependabot_prs.yml
vendored
@@ -9,6 +9,7 @@ jobs:
|
||||
name: Update Dependabot Prs
|
||||
if: contains(github.event.pull_request.labels.*.name, 'dependencies') && contains(github.event.pull_request.labels.*.name, 'component:ui')
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
|
||||
steps:
|
||||
- name: Checkout branch
|
||||
|
||||
1
.github/workflows/upload_schema.yml
vendored
1
.github/workflows/upload_schema.yml
vendored
@@ -13,6 +13,7 @@ on:
|
||||
jobs:
|
||||
push:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
permissions:
|
||||
packages: write
|
||||
contents: read
|
||||
|
||||
@@ -10,6 +10,7 @@ build:
|
||||
3.11
|
||||
commands:
|
||||
- pip install --user tox
|
||||
- python3 -m tox -e docs
|
||||
- python3 -m tox -e docs --notest -v
|
||||
- python3 -m tox -e docs --skip-pkg-install -q
|
||||
- mkdir -p _readthedocs/html/
|
||||
- mv docs/docsite/build/html/* _readthedocs/html/
|
||||
|
||||
@@ -22,7 +22,7 @@ recursive-exclude awx/settings local_settings.py*
|
||||
include tools/scripts/request_tower_configuration.sh
|
||||
include tools/scripts/request_tower_configuration.ps1
|
||||
include tools/scripts/automation-controller-service
|
||||
include tools/scripts/failure-event-handler
|
||||
include tools/scripts/rsyslog-4xx-recovery
|
||||
include tools/scripts/awx-python
|
||||
include awx/playbooks/library/mkfifo.py
|
||||
include tools/sosreport/*
|
||||
|
||||
14
Makefile
14
Makefile
@@ -43,6 +43,8 @@ PROMETHEUS ?= false
|
||||
GRAFANA ?= false
|
||||
# If set to true docker-compose will also start a hashicorp vault instance
|
||||
VAULT ?= false
|
||||
# If set to true docker-compose will also start a hashicorp vault instance with TLS enabled
|
||||
VAULT_TLS ?= false
|
||||
# If set to true docker-compose will also start a tacacs+ instance
|
||||
TACACS ?= false
|
||||
|
||||
@@ -61,7 +63,7 @@ RECEPTOR_IMAGE ?= quay.io/ansible/receptor:devel
|
||||
SRC_ONLY_PKGS ?= cffi,pycparser,psycopg,twilio
|
||||
# These should be upgraded in the AWX and Ansible venv before attempting
|
||||
# to install the actual requirements
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==7.0.5 wheel==0.38.4
|
||||
VENV_BOOTSTRAP ?= pip==21.2.4 setuptools==65.6.3 setuptools_scm[toml]==8.0.4 wheel==0.38.4
|
||||
|
||||
NAME ?= awx
|
||||
|
||||
@@ -324,6 +326,12 @@ test:
|
||||
cd awxkit && $(VENV_BASE)/awx/bin/tox -re py3
|
||||
awx-manage check_migrations --dry-run --check -n 'missing_migration_file'
|
||||
|
||||
test_migrations:
|
||||
if [ "$(VENV_BASE)" ]; then \
|
||||
. $(VENV_BASE)/awx/bin/activate; \
|
||||
fi; \
|
||||
PYTHONDONTWRITEBYTECODE=1 py.test -p no:cacheprovider --migrations -m migration_test $(PYTEST_ARGS) $(TEST_DIRS)
|
||||
|
||||
## Runs AWX_DOCKER_CMD inside a new docker container.
|
||||
docker-runner:
|
||||
docker run -u $(shell id -u) --rm -v $(shell pwd):/awx_devel/:Z --workdir=/awx_devel $(DEVEL_IMAGE_NAME) $(AWX_DOCKER_CMD)
|
||||
@@ -522,13 +530,15 @@ docker-compose-sources: .git/hooks/pre-commit
|
||||
-e enable_prometheus=$(PROMETHEUS) \
|
||||
-e enable_grafana=$(GRAFANA) \
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS) \
|
||||
-e enable_tacacs=$(TACACS) \
|
||||
$(EXTRA_SOURCES_ANSIBLE_OPTS)
|
||||
|
||||
docker-compose: awx/projects docker-compose-sources
|
||||
ansible-galaxy install --ignore-certs -r tools/docker-compose/ansible/requirements.yml;
|
||||
ansible-playbook -i tools/docker-compose/inventory tools/docker-compose/ansible/initialize_containers.yml \
|
||||
-e enable_vault=$(VAULT);
|
||||
-e enable_vault=$(VAULT) \
|
||||
-e vault_tls=$(VAULT_TLS);
|
||||
$(DOCKER_COMPOSE) -f tools/docker-compose/_sources/docker-compose.yml $(COMPOSE_OPTS) up $(COMPOSE_UP_OPTS) --remove-orphans
|
||||
|
||||
docker-compose-credential-plugins: awx/projects docker-compose-sources
|
||||
|
||||
@@ -7,7 +7,7 @@ AWX provides a web-based user interface, REST API, and task engine built on top
|
||||
|
||||
To install AWX, please view the [Install guide](./INSTALL.md).
|
||||
|
||||
To learn more about using AWX, and Tower, view the [Tower docs site](http://docs.ansible.com/ansible-tower/index.html).
|
||||
To learn more about using AWX, view the [AWX docs site](https://ansible.readthedocs.io/projects/awx/en/latest/).
|
||||
|
||||
The AWX Project Frequently Asked Questions can be found [here](https://www.ansible.com/awx-project-faq).
|
||||
|
||||
|
||||
@@ -1,450 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Python
|
||||
import re
|
||||
import json
|
||||
from functools import reduce
|
||||
|
||||
# Django
|
||||
from django.core.exceptions import FieldError, ValidationError, FieldDoesNotExist
|
||||
from django.db import models
|
||||
from django.db.models import Q, CharField, IntegerField, BooleanField, TextField, JSONField
|
||||
from django.db.models.fields.related import ForeignObjectRel, ManyToManyField, ForeignKey
|
||||
from django.db.models.functions import Cast
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.utils.encoding import force_str
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# Django REST Framework
|
||||
from rest_framework.exceptions import ParseError, PermissionDenied
|
||||
from rest_framework.filters import BaseFilterBackend
|
||||
|
||||
# AWX
|
||||
from awx.main.utils import get_type_for_model, to_python_boolean
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
|
||||
|
||||
class TypeFilterBackend(BaseFilterBackend):
|
||||
"""
|
||||
Filter on type field now returned with all objects.
|
||||
"""
|
||||
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
try:
|
||||
types = None
|
||||
for key, value in request.query_params.items():
|
||||
if key == 'type':
|
||||
if ',' in value:
|
||||
types = value.split(',')
|
||||
else:
|
||||
types = (value,)
|
||||
if types:
|
||||
types_map = {}
|
||||
for ct in ContentType.objects.filter(Q(app_label='main') | Q(app_label='auth', model='user')):
|
||||
ct_model = ct.model_class()
|
||||
if not ct_model:
|
||||
continue
|
||||
ct_type = get_type_for_model(ct_model)
|
||||
types_map[ct_type] = ct.pk
|
||||
model = queryset.model
|
||||
model_type = get_type_for_model(model)
|
||||
if 'polymorphic_ctype' in get_all_field_names(model):
|
||||
types_pks = set([v for k, v in types_map.items() if k in types])
|
||||
queryset = queryset.filter(polymorphic_ctype_id__in=types_pks)
|
||||
elif model_type in types:
|
||||
queryset = queryset
|
||||
else:
|
||||
queryset = queryset.none()
|
||||
return queryset
|
||||
except FieldError as e:
|
||||
# Return a 400 for invalid field names.
|
||||
raise ParseError(*e.args)
|
||||
|
||||
|
||||
def get_fields_from_path(model, path):
|
||||
"""
|
||||
Given a Django ORM lookup path (possibly over multiple models)
|
||||
Returns the fields in the line, and also the revised lookup path
|
||||
ex., given
|
||||
model=Organization
|
||||
path='project__timeout'
|
||||
returns tuple of fields traversed as well and a corrected path,
|
||||
for special cases we do substitutions
|
||||
([<IntegerField for timeout>], 'project__timeout')
|
||||
"""
|
||||
# Store of all the fields used to detect repeats
|
||||
field_list = []
|
||||
new_parts = []
|
||||
for name in path.split('__'):
|
||||
if model is None:
|
||||
raise ParseError(_('No related model for field {}.').format(name))
|
||||
# HACK: Make project and inventory source filtering by old field names work for backwards compatibility.
|
||||
if model._meta.object_name in ('Project', 'InventorySource'):
|
||||
name = {'current_update': 'current_job', 'last_update': 'last_job', 'last_update_failed': 'last_job_failed', 'last_updated': 'last_job_run'}.get(
|
||||
name, name
|
||||
)
|
||||
|
||||
if name == 'type' and 'polymorphic_ctype' in get_all_field_names(model):
|
||||
name = 'polymorphic_ctype'
|
||||
new_parts.append('polymorphic_ctype__model')
|
||||
else:
|
||||
new_parts.append(name)
|
||||
|
||||
if name in getattr(model, 'PASSWORD_FIELDS', ()):
|
||||
raise PermissionDenied(_('Filtering on password fields is not allowed.'))
|
||||
elif name == 'pk':
|
||||
field = model._meta.pk
|
||||
else:
|
||||
name_alt = name.replace("_", "")
|
||||
if name_alt in model._meta.fields_map.keys():
|
||||
field = model._meta.fields_map[name_alt]
|
||||
new_parts.pop()
|
||||
new_parts.append(name_alt)
|
||||
else:
|
||||
field = model._meta.get_field(name)
|
||||
if isinstance(field, ForeignObjectRel) and getattr(field.field, '__prevent_search__', False):
|
||||
raise PermissionDenied(_('Filtering on %s is not allowed.' % name))
|
||||
elif getattr(field, '__prevent_search__', False):
|
||||
raise PermissionDenied(_('Filtering on %s is not allowed.' % name))
|
||||
if field in field_list:
|
||||
# Field traversed twice, could create infinite JOINs, DoSing Tower
|
||||
raise ParseError(_('Loops not allowed in filters, detected on field {}.').format(field.name))
|
||||
field_list.append(field)
|
||||
model = getattr(field, 'related_model', None)
|
||||
|
||||
return field_list, '__'.join(new_parts)
|
||||
|
||||
|
||||
def get_field_from_path(model, path):
|
||||
"""
|
||||
Given a Django ORM lookup path (possibly over multiple models)
|
||||
Returns the last field in the line, and the revised lookup path
|
||||
ex.
|
||||
(<IntegerField for timeout>, 'project__timeout')
|
||||
"""
|
||||
field_list, new_path = get_fields_from_path(model, path)
|
||||
return (field_list[-1], new_path)
|
||||
|
||||
|
||||
class FieldLookupBackend(BaseFilterBackend):
|
||||
"""
|
||||
Filter using field lookups provided via query string parameters.
|
||||
"""
|
||||
|
||||
RESERVED_NAMES = ('page', 'page_size', 'format', 'order', 'order_by', 'search', 'type', 'host_filter', 'count_disabled', 'no_truncate', 'limit')
|
||||
|
||||
SUPPORTED_LOOKUPS = (
|
||||
'exact',
|
||||
'iexact',
|
||||
'contains',
|
||||
'icontains',
|
||||
'startswith',
|
||||
'istartswith',
|
||||
'endswith',
|
||||
'iendswith',
|
||||
'regex',
|
||||
'iregex',
|
||||
'gt',
|
||||
'gte',
|
||||
'lt',
|
||||
'lte',
|
||||
'in',
|
||||
'isnull',
|
||||
'search',
|
||||
)
|
||||
|
||||
# A list of fields that we know can be filtered on without the possibility
|
||||
# of introducing duplicates
|
||||
NO_DUPLICATES_ALLOW_LIST = (CharField, IntegerField, BooleanField, TextField)
|
||||
|
||||
def get_fields_from_lookup(self, model, lookup):
|
||||
if '__' in lookup and lookup.rsplit('__', 1)[-1] in self.SUPPORTED_LOOKUPS:
|
||||
path, suffix = lookup.rsplit('__', 1)
|
||||
else:
|
||||
path = lookup
|
||||
suffix = 'exact'
|
||||
|
||||
if not path:
|
||||
raise ParseError(_('Query string field name not provided.'))
|
||||
|
||||
# FIXME: Could build up a list of models used across relationships, use
|
||||
# those lookups combined with request.user.get_queryset(Model) to make
|
||||
# sure user cannot query using objects he could not view.
|
||||
field_list, new_path = get_fields_from_path(model, path)
|
||||
|
||||
new_lookup = new_path
|
||||
new_lookup = '__'.join([new_path, suffix])
|
||||
return field_list, new_lookup
|
||||
|
||||
def get_field_from_lookup(self, model, lookup):
|
||||
'''Method to match return type of single field, if needed.'''
|
||||
field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
|
||||
return (field_list[-1], new_lookup)
|
||||
|
||||
def to_python_related(self, value):
|
||||
value = force_str(value)
|
||||
if value.lower() in ('none', 'null'):
|
||||
return None
|
||||
else:
|
||||
return int(value)
|
||||
|
||||
def value_to_python_for_field(self, field, value):
|
||||
if isinstance(field, models.BooleanField):
|
||||
return to_python_boolean(value)
|
||||
elif isinstance(field, (ForeignObjectRel, ManyToManyField, GenericForeignKey, ForeignKey)):
|
||||
try:
|
||||
return self.to_python_related(value)
|
||||
except ValueError:
|
||||
raise ParseError(_('Invalid {field_name} id: {field_id}').format(field_name=getattr(field, 'name', 'related field'), field_id=value))
|
||||
else:
|
||||
return field.to_python(value)
|
||||
|
||||
def value_to_python(self, model, lookup, value):
|
||||
try:
|
||||
lookup.encode("ascii")
|
||||
except UnicodeEncodeError:
|
||||
raise ValueError("%r is not an allowed field name. Must be ascii encodable." % lookup)
|
||||
|
||||
field_list, new_lookup = self.get_fields_from_lookup(model, lookup)
|
||||
field = field_list[-1]
|
||||
|
||||
needs_distinct = not all(isinstance(f, self.NO_DUPLICATES_ALLOW_LIST) for f in field_list)
|
||||
|
||||
# Type names are stored without underscores internally, but are presented and
|
||||
# and serialized over the API containing underscores so we remove `_`
|
||||
# for polymorphic_ctype__model lookups.
|
||||
if new_lookup.startswith('polymorphic_ctype__model'):
|
||||
value = value.replace('_', '')
|
||||
elif new_lookup.endswith('__isnull'):
|
||||
value = to_python_boolean(value)
|
||||
elif new_lookup.endswith('__in'):
|
||||
items = []
|
||||
if not value:
|
||||
raise ValueError('cannot provide empty value for __in')
|
||||
for item in value.split(','):
|
||||
items.append(self.value_to_python_for_field(field, item))
|
||||
value = items
|
||||
elif new_lookup.endswith('__regex') or new_lookup.endswith('__iregex'):
|
||||
try:
|
||||
re.compile(value)
|
||||
except re.error as e:
|
||||
raise ValueError(e.args[0])
|
||||
elif new_lookup.endswith('__iexact'):
|
||||
if not isinstance(field, (CharField, TextField)):
|
||||
raise ValueError(f'{field.name} is not a text field and cannot be filtered by case-insensitive search')
|
||||
elif new_lookup.endswith('__search'):
|
||||
related_model = getattr(field, 'related_model', None)
|
||||
if not related_model:
|
||||
raise ValueError('%s is not searchable' % new_lookup[:-8])
|
||||
new_lookups = []
|
||||
for rm_field in related_model._meta.fields:
|
||||
if rm_field.name in ('username', 'first_name', 'last_name', 'email', 'name', 'description', 'playbook'):
|
||||
new_lookups.append('{}__{}__icontains'.format(new_lookup[:-8], rm_field.name))
|
||||
return value, new_lookups, needs_distinct
|
||||
else:
|
||||
if isinstance(field, JSONField):
|
||||
new_lookup = new_lookup.replace(field.name, f'{field.name}_as_txt')
|
||||
value = self.value_to_python_for_field(field, value)
|
||||
return value, new_lookup, needs_distinct
|
||||
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
try:
|
||||
# Apply filters specified via query_params. Each entry in the lists
|
||||
# below is (negate, field, value).
|
||||
and_filters = []
|
||||
or_filters = []
|
||||
chain_filters = []
|
||||
role_filters = []
|
||||
search_filters = {}
|
||||
needs_distinct = False
|
||||
# Can only have two values: 'AND', 'OR'
|
||||
# If 'AND' is used, an item must satisfy all conditions to show up in the results.
|
||||
# If 'OR' is used, an item just needs to satisfy one condition to appear in results.
|
||||
search_filter_relation = 'OR'
|
||||
for key, values in request.query_params.lists():
|
||||
if key in self.RESERVED_NAMES:
|
||||
continue
|
||||
|
||||
# HACK: make `created` available via API for the Django User ORM model
|
||||
# so it keep compatibility with other objects which exposes the `created` attr.
|
||||
if queryset.model._meta.object_name == 'User' and key.startswith('created'):
|
||||
key = key.replace('created', 'date_joined')
|
||||
|
||||
# HACK: Make job event filtering by host name mostly work even
|
||||
# when not capturing job event hosts M2M.
|
||||
if queryset.model._meta.object_name == 'JobEvent' and key.startswith('hosts__name'):
|
||||
key = key.replace('hosts__name', 'or__host__name')
|
||||
or_filters.append((False, 'host__name__isnull', True))
|
||||
|
||||
# Custom __int filter suffix (internal use only).
|
||||
q_int = False
|
||||
if key.endswith('__int'):
|
||||
key = key[:-5]
|
||||
q_int = True
|
||||
|
||||
# RBAC filtering
|
||||
if key == 'role_level':
|
||||
role_filters.append(values[0])
|
||||
continue
|
||||
|
||||
# Search across related objects.
|
||||
if key.endswith('__search'):
|
||||
if values and ',' in values[0]:
|
||||
search_filter_relation = 'AND'
|
||||
values = reduce(lambda list1, list2: list1 + list2, [i.split(',') for i in values])
|
||||
for value in values:
|
||||
search_value, new_keys, _ = self.value_to_python(queryset.model, key, force_str(value))
|
||||
assert isinstance(new_keys, list)
|
||||
search_filters[search_value] = new_keys
|
||||
# by definition, search *only* joins across relations,
|
||||
# so it _always_ needs a .distinct()
|
||||
needs_distinct = True
|
||||
continue
|
||||
|
||||
# Custom chain__ and or__ filters, mutually exclusive (both can
|
||||
# precede not__).
|
||||
q_chain = False
|
||||
q_or = False
|
||||
if key.startswith('chain__'):
|
||||
key = key[7:]
|
||||
q_chain = True
|
||||
elif key.startswith('or__'):
|
||||
key = key[4:]
|
||||
q_or = True
|
||||
|
||||
# Custom not__ filter prefix.
|
||||
q_not = False
|
||||
if key.startswith('not__'):
|
||||
key = key[5:]
|
||||
q_not = True
|
||||
|
||||
# Convert value(s) to python and add to the appropriate list.
|
||||
for value in values:
|
||||
if q_int:
|
||||
value = int(value)
|
||||
value, new_key, distinct = self.value_to_python(queryset.model, key, value)
|
||||
if distinct:
|
||||
needs_distinct = True
|
||||
if '_as_txt' in new_key:
|
||||
fname = next(item for item in new_key.split('__') if item.endswith('_as_txt'))
|
||||
queryset = queryset.annotate(**{fname: Cast(fname[:-7], output_field=TextField())})
|
||||
if q_chain:
|
||||
chain_filters.append((q_not, new_key, value))
|
||||
elif q_or:
|
||||
or_filters.append((q_not, new_key, value))
|
||||
else:
|
||||
and_filters.append((q_not, new_key, value))
|
||||
|
||||
# Now build Q objects for database query filter.
|
||||
if and_filters or or_filters or chain_filters or role_filters or search_filters:
|
||||
args = []
|
||||
for n, k, v in and_filters:
|
||||
if n:
|
||||
args.append(~Q(**{k: v}))
|
||||
else:
|
||||
args.append(Q(**{k: v}))
|
||||
for role_name in role_filters:
|
||||
if not hasattr(queryset.model, 'accessible_pk_qs'):
|
||||
raise ParseError(_('Cannot apply role_level filter to this list because its model does not use roles for access control.'))
|
||||
args.append(Q(pk__in=queryset.model.accessible_pk_qs(request.user, role_name)))
|
||||
if or_filters:
|
||||
q = Q()
|
||||
for n, k, v in or_filters:
|
||||
if n:
|
||||
q |= ~Q(**{k: v})
|
||||
else:
|
||||
q |= Q(**{k: v})
|
||||
args.append(q)
|
||||
if search_filters and search_filter_relation == 'OR':
|
||||
q = Q()
|
||||
for term, constrains in search_filters.items():
|
||||
for constrain in constrains:
|
||||
q |= Q(**{constrain: term})
|
||||
args.append(q)
|
||||
elif search_filters and search_filter_relation == 'AND':
|
||||
for term, constrains in search_filters.items():
|
||||
q_chain = Q()
|
||||
for constrain in constrains:
|
||||
q_chain |= Q(**{constrain: term})
|
||||
queryset = queryset.filter(q_chain)
|
||||
for n, k, v in chain_filters:
|
||||
if n:
|
||||
q = ~Q(**{k: v})
|
||||
else:
|
||||
q = Q(**{k: v})
|
||||
queryset = queryset.filter(q)
|
||||
queryset = queryset.filter(*args)
|
||||
if needs_distinct:
|
||||
queryset = queryset.distinct()
|
||||
return queryset
|
||||
except (FieldError, FieldDoesNotExist, ValueError, TypeError) as e:
|
||||
raise ParseError(e.args[0])
|
||||
except ValidationError as e:
|
||||
raise ParseError(json.dumps(e.messages, ensure_ascii=False))
|
||||
|
||||
|
||||
class OrderByBackend(BaseFilterBackend):
|
||||
"""
|
||||
Filter to apply ordering based on query string parameters.
|
||||
"""
|
||||
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
try:
|
||||
order_by = None
|
||||
for key, value in request.query_params.items():
|
||||
if key in ('order', 'order_by'):
|
||||
order_by = value
|
||||
if ',' in value:
|
||||
order_by = value.split(',')
|
||||
else:
|
||||
order_by = (value,)
|
||||
default_order_by = self.get_default_ordering(view)
|
||||
# glue the order by and default order by together so that the default is the backup option
|
||||
order_by = list(order_by or []) + list(default_order_by or [])
|
||||
if order_by:
|
||||
order_by = self._validate_ordering_fields(queryset.model, order_by)
|
||||
# Special handling of the type field for ordering. In this
|
||||
# case, we're not sorting exactly on the type field, but
|
||||
# given the limited number of views with multiple types,
|
||||
# sorting on polymorphic_ctype.model is effectively the same.
|
||||
new_order_by = []
|
||||
if 'polymorphic_ctype' in get_all_field_names(queryset.model):
|
||||
for field in order_by:
|
||||
if field == 'type':
|
||||
new_order_by.append('polymorphic_ctype__model')
|
||||
elif field == '-type':
|
||||
new_order_by.append('-polymorphic_ctype__model')
|
||||
else:
|
||||
new_order_by.append(field)
|
||||
else:
|
||||
for field in order_by:
|
||||
if field not in ('type', '-type'):
|
||||
new_order_by.append(field)
|
||||
queryset = queryset.order_by(*new_order_by)
|
||||
return queryset
|
||||
except FieldError as e:
|
||||
# Return a 400 for invalid field names.
|
||||
raise ParseError(*e.args)
|
||||
|
||||
def get_default_ordering(self, view):
|
||||
ordering = getattr(view, 'ordering', None)
|
||||
if isinstance(ordering, str):
|
||||
return (ordering,)
|
||||
return ordering
|
||||
|
||||
def _validate_ordering_fields(self, model, order_by):
|
||||
for field_name in order_by:
|
||||
# strip off the negation prefix `-` if it exists
|
||||
prefix = ''
|
||||
path = field_name
|
||||
if field_name[0] == '-':
|
||||
prefix = field_name[0]
|
||||
path = field_name[1:]
|
||||
try:
|
||||
field, new_path = get_field_from_path(model, path)
|
||||
new_path = '{}{}'.format(prefix, new_path)
|
||||
except (FieldError, FieldDoesNotExist) as e:
|
||||
raise ParseError(e.args[0])
|
||||
yield new_path
|
||||
@@ -30,12 +30,13 @@ from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.renderers import StaticHTMLRenderer
|
||||
from rest_framework.negotiation import DefaultContentNegotiation
|
||||
|
||||
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
||||
from ansible_base.lib.utils.models import get_all_field_names
|
||||
|
||||
# AWX
|
||||
from awx.api.filters import FieldLookupBackend
|
||||
from awx.main.models import UnifiedJob, UnifiedJobTemplate, User, Role, Credential, WorkflowJobTemplateNode, WorkflowApprovalTemplate
|
||||
from awx.main.access import optimize_queryset
|
||||
from awx.main.utils import camelcase_to_underscore, get_search_fields, getattrd, get_object_or_400, decrypt_field, get_awx_version
|
||||
from awx.main.utils.db import get_all_field_names
|
||||
from awx.main.utils.licensing import server_product_name
|
||||
from awx.main.views import ApiErrorView
|
||||
from awx.api.serializers import ResourceAccessListElementSerializer, CopySerializer
|
||||
@@ -90,7 +91,7 @@ class LoggedLoginView(auth_views.LoginView):
|
||||
ret = super(LoggedLoginView, self).post(request, *args, **kwargs)
|
||||
if request.user.is_authenticated:
|
||||
logger.info(smart_str(u"User {} logged in from {}".format(self.request.user.username, request.META.get('REMOTE_ADDR', None))))
|
||||
ret.set_cookie('userLoggedIn', 'true')
|
||||
ret.set_cookie('userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False))
|
||||
ret.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||
|
||||
return ret
|
||||
@@ -106,7 +107,7 @@ class LoggedLogoutView(auth_views.LogoutView):
|
||||
original_user = getattr(request, 'user', None)
|
||||
ret = super(LoggedLogoutView, self).dispatch(request, *args, **kwargs)
|
||||
current_user = getattr(request, 'user', None)
|
||||
ret.set_cookie('userLoggedIn', 'false')
|
||||
ret.set_cookie('userLoggedIn', 'false', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False))
|
||||
if (not current_user or not getattr(current_user, 'pk', True)) and current_user != original_user:
|
||||
logger.info("User {} logged out.".format(original_user.username))
|
||||
return ret
|
||||
|
||||
@@ -43,6 +43,8 @@ from rest_framework.utils.serializer_helpers import ReturnList
|
||||
# Django-Polymorphic
|
||||
from polymorphic.models import PolymorphicModel
|
||||
|
||||
from ansible_base.lib.utils.models import get_type_for_model
|
||||
|
||||
# AWX
|
||||
from awx.main.access import get_user_capabilities
|
||||
from awx.main.constants import ACTIVE_STATES, CENSOR_VALUE
|
||||
@@ -99,10 +101,9 @@ from awx.main.models import (
|
||||
CLOUD_INVENTORY_SOURCES,
|
||||
)
|
||||
from awx.main.models.base import VERBOSITY_CHOICES, NEW_JOB_TYPE_CHOICES
|
||||
from awx.main.models.rbac import get_roles_on_resource, role_summary_fields_generator
|
||||
from awx.main.models.rbac import role_summary_fields_generator, RoleAncestorEntry
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.utils import (
|
||||
get_type_for_model,
|
||||
get_model_for_type,
|
||||
camelcase_to_underscore,
|
||||
getattrd,
|
||||
@@ -2201,6 +2202,99 @@ class BulkHostCreateSerializer(serializers.Serializer):
|
||||
return return_data
|
||||
|
||||
|
||||
class BulkHostDeleteSerializer(serializers.Serializer):
|
||||
hosts = serializers.ListField(
|
||||
allow_empty=False,
|
||||
max_length=100000,
|
||||
write_only=True,
|
||||
help_text=_('List of hosts ids to be deleted, e.g. [105, 130, 131, 200]'),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Host
|
||||
fields = ('hosts',)
|
||||
|
||||
def validate(self, attrs):
|
||||
request = self.context.get('request', None)
|
||||
max_hosts = settings.BULK_HOST_MAX_DELETE
|
||||
# Validating the number of hosts to be deleted
|
||||
if len(attrs['hosts']) > max_hosts:
|
||||
raise serializers.ValidationError(
|
||||
{
|
||||
"ERROR": 'Number of hosts exceeds system setting BULK_HOST_MAX_DELETE',
|
||||
"BULK_HOST_MAX_DELETE": max_hosts,
|
||||
"Hosts_count": len(attrs['hosts']),
|
||||
}
|
||||
)
|
||||
|
||||
# Getting list of all host objects, filtered by the list of the hosts to delete
|
||||
attrs['host_qs'] = Host.objects.get_queryset().filter(pk__in=attrs['hosts']).only('id', 'inventory_id', 'name')
|
||||
|
||||
# Converting the queryset data in a dict. to reduce the number of queries when
|
||||
# manipulating the data
|
||||
attrs['hosts_data'] = attrs['host_qs'].values()
|
||||
|
||||
if len(attrs['host_qs']) == 0:
|
||||
error_hosts = {host: "Hosts do not exist or you lack permission to delete it" for host in attrs['hosts']}
|
||||
raise serializers.ValidationError({'hosts': error_hosts})
|
||||
|
||||
if len(attrs['host_qs']) < len(attrs['hosts']):
|
||||
hosts_exists = [host['id'] for host in attrs['hosts_data']]
|
||||
failed_hosts = list(set(attrs['hosts']).difference(hosts_exists))
|
||||
error_hosts = {host: "Hosts do not exist or you lack permission to delete it" for host in failed_hosts}
|
||||
raise serializers.ValidationError({'hosts': error_hosts})
|
||||
|
||||
# Getting all inventories that the hosts can be in
|
||||
inv_list = list(set([host['inventory_id'] for host in attrs['hosts_data']]))
|
||||
|
||||
# Checking that the user have permission to all inventories
|
||||
errors = dict()
|
||||
for inv in Inventory.objects.get_queryset().filter(pk__in=inv_list):
|
||||
if request and not request.user.is_superuser:
|
||||
if request.user not in inv.admin_role:
|
||||
errors[inv.name] = "Lack permissions to delete hosts from this inventory."
|
||||
if errors != {}:
|
||||
raise PermissionDenied({"inventories": errors})
|
||||
|
||||
# check the inventory type only if the user have permission to it.
|
||||
errors = dict()
|
||||
for inv in Inventory.objects.get_queryset().filter(pk__in=inv_list):
|
||||
if inv.kind != '':
|
||||
errors[inv.name] = "Hosts can only be deleted from manual inventories."
|
||||
if errors != {}:
|
||||
raise serializers.ValidationError({"inventories": errors})
|
||||
attrs['inventories'] = inv_list
|
||||
return attrs
|
||||
|
||||
def delete(self, validated_data):
|
||||
result = {"hosts": dict()}
|
||||
changes = {'deleted_hosts': dict()}
|
||||
for inventory in validated_data['inventories']:
|
||||
changes['deleted_hosts'][inventory] = list()
|
||||
|
||||
for host in validated_data['hosts_data']:
|
||||
result["hosts"][host["id"]] = f"The host {host['name']} was deleted"
|
||||
changes['deleted_hosts'][host["inventory_id"]].append({"host_id": host["id"], "host_name": host["name"]})
|
||||
|
||||
try:
|
||||
validated_data['host_qs'].delete()
|
||||
except Exception as e:
|
||||
raise serializers.ValidationError({"detail": _(f"cannot delete hosts, host deletion error {e}")})
|
||||
|
||||
request = self.context.get('request', None)
|
||||
|
||||
for inventory in validated_data['inventories']:
|
||||
activity_entry = ActivityStream.objects.create(
|
||||
operation='update',
|
||||
object1='inventory',
|
||||
changes=json.dumps(changes['deleted_hosts'][inventory]),
|
||||
actor=request.user,
|
||||
)
|
||||
activity_entry.inventory.add(inventory)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class GroupTreeSerializer(GroupSerializer):
|
||||
children = serializers.SerializerMethodField()
|
||||
|
||||
@@ -2664,6 +2758,17 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
||||
if 'summary_fields' not in ret:
|
||||
ret['summary_fields'] = {}
|
||||
|
||||
team_content_type = ContentType.objects.get_for_model(Team)
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
def get_roles_on_resource(parent_role):
|
||||
"Returns a string list of the roles a parent_role has for current obj."
|
||||
return list(
|
||||
RoleAncestorEntry.objects.filter(ancestor=parent_role, content_type_id=content_type.id, object_id=obj.id)
|
||||
.values_list('role_field', flat=True)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def format_role_perm(role):
|
||||
role_dict = {'id': role.id, 'name': role.name, 'description': role.description}
|
||||
try:
|
||||
@@ -2679,7 +2784,7 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
||||
else:
|
||||
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
||||
role_dict['user_capabilities'] = {'unattach': False}
|
||||
return {'role': role_dict, 'descendant_roles': get_roles_on_resource(obj, role)}
|
||||
return {'role': role_dict, 'descendant_roles': get_roles_on_resource(role)}
|
||||
|
||||
def format_team_role_perm(naive_team_role, permissive_role_ids):
|
||||
ret = []
|
||||
@@ -2705,12 +2810,9 @@ class ResourceAccessListElementSerializer(UserSerializer):
|
||||
else:
|
||||
# Singleton roles should not be managed from this view, as per copy/edit rework spec
|
||||
role_dict['user_capabilities'] = {'unattach': False}
|
||||
ret.append({'role': role_dict, 'descendant_roles': get_roles_on_resource(obj, team_role)})
|
||||
ret.append({'role': role_dict, 'descendant_roles': get_roles_on_resource(team_role)})
|
||||
return ret
|
||||
|
||||
team_content_type = ContentType.objects.get_for_model(Team)
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
|
||||
direct_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('id', flat=True)
|
||||
all_permissive_role_ids = Role.objects.filter(content_type=content_type, object_id=obj.id).values_list('ancestors__id', flat=True)
|
||||
|
||||
|
||||
22
awx/api/templates/api/bulk_host_delete_view.md
Normal file
22
awx/api/templates/api/bulk_host_delete_view.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# Bulk Host Delete
|
||||
|
||||
This endpoint allows the client to delete multiple hosts from inventories.
|
||||
They may do this by providing a list of hosts ID's to be deleted.
|
||||
|
||||
Example:
|
||||
|
||||
{
|
||||
"hosts": [1, 2, 3, 4, 5]
|
||||
}
|
||||
|
||||
Return data:
|
||||
|
||||
{
|
||||
"hosts": {
|
||||
"1": "The host a1 was deleted",
|
||||
"2": "The host a2 was deleted",
|
||||
"3": "The host a3 was deleted",
|
||||
"4": "The host a4 was deleted",
|
||||
"5": "The host a5 was deleted",
|
||||
}
|
||||
}
|
||||
@@ -36,6 +36,7 @@ from awx.api.views import (
|
||||
from awx.api.views.bulk import (
|
||||
BulkView,
|
||||
BulkHostCreateView,
|
||||
BulkHostDeleteView,
|
||||
BulkJobLaunchView,
|
||||
)
|
||||
|
||||
@@ -152,6 +153,7 @@ v2_urls = [
|
||||
re_path(r'^workflow_approvals/', include(workflow_approval_urls)),
|
||||
re_path(r'^bulk/$', BulkView.as_view(), name='bulk'),
|
||||
re_path(r'^bulk/host_create/$', BulkHostCreateView.as_view(), name='bulk_host_create'),
|
||||
re_path(r'^bulk/host_delete/$', BulkHostDeleteView.as_view(), name='bulk_host_delete'),
|
||||
re_path(r'^bulk/job_launch/$', BulkJobLaunchView.as_view(), name='bulk_job_launch'),
|
||||
]
|
||||
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
from django.urls import re_path
|
||||
|
||||
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver
|
||||
from awx.api.views.webhooks import WebhookKeyView, GithubWebhookReceiver, GitlabWebhookReceiver, BitbucketDcWebhookReceiver
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
re_path(r'^webhook_key/$', WebhookKeyView.as_view(), name='webhook_key'),
|
||||
re_path(r'^github/$', GithubWebhookReceiver.as_view(), name='webhook_receiver_github'),
|
||||
re_path(r'^gitlab/$', GitlabWebhookReceiver.as_view(), name='webhook_receiver_gitlab'),
|
||||
re_path(r'^bitbucket_dc/$', BitbucketDcWebhookReceiver.as_view(), name='webhook_receiver_bitbucket_dc'),
|
||||
]
|
||||
|
||||
@@ -128,6 +128,10 @@ logger = logging.getLogger('awx.api.views')
|
||||
|
||||
|
||||
def unpartitioned_event_horizon(cls):
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(f"SELECT 1 FROM INFORMATION_SCHEMA.TABLES WHERE table_name = '_unpartitioned_{cls._meta.db_table}';")
|
||||
if not cursor.fetchone():
|
||||
return 0
|
||||
with connection.cursor() as cursor:
|
||||
try:
|
||||
cursor.execute(f'SELECT MAX(id) FROM _unpartitioned_{cls._meta.db_table}')
|
||||
@@ -738,8 +742,8 @@ class TeamActivityStreamList(SubListAPIView):
|
||||
qs = self.request.user.get_queryset(self.model)
|
||||
return qs.filter(
|
||||
Q(team=parent)
|
||||
| Q(project__in=models.Project.accessible_objects(parent, 'read_role'))
|
||||
| Q(credential__in=models.Credential.accessible_objects(parent, 'read_role'))
|
||||
| Q(project__in=models.Project.accessible_objects(parent.member_role, 'read_role'))
|
||||
| Q(credential__in=models.Credential.accessible_objects(parent.member_role, 'read_role'))
|
||||
)
|
||||
|
||||
|
||||
@@ -1393,7 +1397,7 @@ class OrganizationCredentialList(SubListCreateAPIView):
|
||||
self.check_parent_access(organization)
|
||||
|
||||
user_visible = models.Credential.accessible_objects(self.request.user, 'read_role').all()
|
||||
org_set = models.Credential.accessible_objects(organization.admin_role, 'read_role').all()
|
||||
org_set = models.Credential.objects.filter(organization=organization)
|
||||
|
||||
if self.request.user.is_superuser or self.request.user.is_system_auditor:
|
||||
return org_set
|
||||
|
||||
@@ -34,6 +34,7 @@ class BulkView(APIView):
|
||||
'''List top level resources'''
|
||||
data = OrderedDict()
|
||||
data['host_create'] = reverse('api:bulk_host_create', request=request)
|
||||
data['host_delete'] = reverse('api:bulk_host_delete', request=request)
|
||||
data['job_launch'] = reverse('api:bulk_job_launch', request=request)
|
||||
return Response(data)
|
||||
|
||||
@@ -72,3 +73,20 @@ class BulkHostCreateView(GenericAPIView):
|
||||
result = serializer.create(serializer.validated_data)
|
||||
return Response(result, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
class BulkHostDeleteView(GenericAPIView):
|
||||
permission_classes = [IsAuthenticated]
|
||||
model = Host
|
||||
serializer_class = serializers.BulkHostDeleteSerializer
|
||||
allowed_methods = ['GET', 'POST', 'OPTIONS']
|
||||
|
||||
def get(self, request):
|
||||
return Response({"detail": "Bulk delete hosts with this endpoint"}, status=status.HTTP_200_OK)
|
||||
|
||||
def post(self, request):
|
||||
serializer = serializers.BulkHostDeleteSerializer(data=request.data, context={'request': request})
|
||||
if serializer.is_valid():
|
||||
result = serializer.delete(serializer.validated_data)
|
||||
return Response(result, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from hashlib import sha1
|
||||
from hashlib import sha1, sha256
|
||||
import hmac
|
||||
import logging
|
||||
import urllib.parse
|
||||
@@ -99,14 +99,31 @@ class WebhookReceiverBase(APIView):
|
||||
def get_signature(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def must_check_signature(self):
|
||||
return True
|
||||
|
||||
def is_ignored_request(self):
|
||||
return False
|
||||
|
||||
def check_signature(self, obj):
|
||||
if not obj.webhook_key:
|
||||
raise PermissionDenied
|
||||
if not self.must_check_signature():
|
||||
logger.debug("skipping signature validation")
|
||||
return
|
||||
|
||||
mac = hmac.new(force_bytes(obj.webhook_key), msg=force_bytes(self.request.body), digestmod=sha1)
|
||||
logger.debug("header signature: %s", self.get_signature())
|
||||
hash_alg, expected_digest = self.get_signature()
|
||||
if hash_alg == 'sha1':
|
||||
mac = hmac.new(force_bytes(obj.webhook_key), msg=force_bytes(self.request.body), digestmod=sha1)
|
||||
elif hash_alg == 'sha256':
|
||||
mac = hmac.new(force_bytes(obj.webhook_key), msg=force_bytes(self.request.body), digestmod=sha256)
|
||||
else:
|
||||
logger.debug("Unsupported signature type, supported: sha1, sha256, received: {}".format(hash_alg))
|
||||
raise PermissionDenied
|
||||
|
||||
logger.debug("header signature: %s", expected_digest)
|
||||
logger.debug("calculated signature: %s", force_bytes(mac.hexdigest()))
|
||||
if not hmac.compare_digest(force_bytes(mac.hexdigest()), self.get_signature()):
|
||||
if not hmac.compare_digest(force_bytes(mac.hexdigest()), expected_digest):
|
||||
raise PermissionDenied
|
||||
|
||||
@csrf_exempt
|
||||
@@ -118,6 +135,10 @@ class WebhookReceiverBase(APIView):
|
||||
obj = self.get_object()
|
||||
self.check_signature(obj)
|
||||
|
||||
if self.is_ignored_request():
|
||||
# This was an ignored request type (e.g. ping), don't act on it
|
||||
return Response({'message': _("Webhook ignored")}, status=status.HTTP_200_OK)
|
||||
|
||||
event_type = self.get_event_type()
|
||||
event_guid = self.get_event_guid()
|
||||
event_ref = self.get_event_ref()
|
||||
@@ -186,7 +207,7 @@ class GithubWebhookReceiver(WebhookReceiverBase):
|
||||
if hash_alg != 'sha1':
|
||||
logger.debug("Unsupported signature type, expected: sha1, received: {}".format(hash_alg))
|
||||
raise PermissionDenied
|
||||
return force_bytes(signature)
|
||||
return hash_alg, force_bytes(signature)
|
||||
|
||||
|
||||
class GitlabWebhookReceiver(WebhookReceiverBase):
|
||||
@@ -214,15 +235,73 @@ class GitlabWebhookReceiver(WebhookReceiverBase):
|
||||
|
||||
return "{}://{}/api/v4/projects/{}/statuses/{}".format(parsed.scheme, parsed.netloc, project['id'], self.get_event_ref())
|
||||
|
||||
def get_signature(self):
|
||||
return force_bytes(self.request.META.get('HTTP_X_GITLAB_TOKEN') or '')
|
||||
|
||||
def check_signature(self, obj):
|
||||
if not obj.webhook_key:
|
||||
raise PermissionDenied
|
||||
|
||||
token_from_request = force_bytes(self.request.META.get('HTTP_X_GITLAB_TOKEN') or '')
|
||||
|
||||
# GitLab only returns the secret token, not an hmac hash. Use
|
||||
# the hmac `compare_digest` helper function to prevent timing
|
||||
# analysis by attackers.
|
||||
if not hmac.compare_digest(force_bytes(obj.webhook_key), self.get_signature()):
|
||||
if not hmac.compare_digest(force_bytes(obj.webhook_key), token_from_request):
|
||||
raise PermissionDenied
|
||||
|
||||
|
||||
class BitbucketDcWebhookReceiver(WebhookReceiverBase):
|
||||
service = 'bitbucket_dc'
|
||||
|
||||
ref_keys = {
|
||||
'repo:refs_changed': 'changes.0.toHash',
|
||||
'mirror:repo_synchronized': 'changes.0.toHash',
|
||||
'pr:opened': 'pullRequest.toRef.latestCommit',
|
||||
'pr:from_ref_updated': 'pullRequest.toRef.latestCommit',
|
||||
'pr:modified': 'pullRequest.toRef.latestCommit',
|
||||
}
|
||||
|
||||
def get_event_type(self):
|
||||
return self.request.META.get('HTTP_X_EVENT_KEY')
|
||||
|
||||
def get_event_guid(self):
|
||||
return self.request.META.get('HTTP_X_REQUEST_ID')
|
||||
|
||||
def get_event_status_api(self):
|
||||
# https://<bitbucket-base-url>/rest/build-status/1.0/commits/<commit-hash>
|
||||
if self.get_event_type() not in self.ref_keys.keys():
|
||||
return
|
||||
if self.get_event_ref() is None:
|
||||
return
|
||||
any_url = None
|
||||
if 'actor' in self.request.data:
|
||||
any_url = self.request.data['actor'].get('links', {}).get('self')
|
||||
if any_url is None and 'repository' in self.request.data:
|
||||
any_url = self.request.data['repository'].get('links', {}).get('self')
|
||||
if any_url is None:
|
||||
return
|
||||
any_url = any_url[0].get('href')
|
||||
if any_url is None:
|
||||
return
|
||||
parsed = urllib.parse.urlparse(any_url)
|
||||
|
||||
return "{}://{}/rest/build-status/1.0/commits/{}".format(parsed.scheme, parsed.netloc, self.get_event_ref())
|
||||
|
||||
def is_ignored_request(self):
|
||||
return self.get_event_type() not in [
|
||||
'repo:refs_changed',
|
||||
'mirror:repo_synchronized',
|
||||
'pr:opened',
|
||||
'pr:from_ref_updated',
|
||||
'pr:modified',
|
||||
]
|
||||
|
||||
def must_check_signature(self):
|
||||
# Bitbucket does not sign ping requests...
|
||||
return self.get_event_type() != 'diagnostics:ping'
|
||||
|
||||
def get_signature(self):
|
||||
header_sig = self.request.META.get('HTTP_X_HUB_SIGNATURE')
|
||||
if not header_sig:
|
||||
logger.debug("Expected signature missing from header key HTTP_X_HUB_SIGNATURE")
|
||||
raise PermissionDenied
|
||||
hash_alg, signature = header_sig.split('=')
|
||||
return hash_alg, force_bytes(signature)
|
||||
|
||||
@@ -7,8 +7,10 @@ import json
|
||||
# Django
|
||||
from django.db import models
|
||||
|
||||
from ansible_base.lib.utils.models import prevent_search
|
||||
|
||||
# AWX
|
||||
from awx.main.models.base import CreatedModifiedModel, prevent_search
|
||||
from awx.main.models.base import CreatedModifiedModel
|
||||
from awx.main.utils import encrypt_field
|
||||
from awx.conf import settings_registry
|
||||
|
||||
|
||||
@@ -20,11 +20,12 @@ from rest_framework.exceptions import ParseError, PermissionDenied
|
||||
# Django OAuth Toolkit
|
||||
from awx.main.models.oauth import OAuth2Application, OAuth2AccessToken
|
||||
|
||||
from ansible_base.lib.utils.validation import to_python_boolean
|
||||
|
||||
# AWX
|
||||
from awx.main.utils import (
|
||||
get_object_or_400,
|
||||
get_pk_from_dict,
|
||||
to_python_boolean,
|
||||
get_licenser,
|
||||
)
|
||||
from awx.main.models import (
|
||||
@@ -79,7 +80,6 @@ __all__ = [
|
||||
'get_user_queryset',
|
||||
'check_user_access',
|
||||
'check_user_access_with_errors',
|
||||
'user_accessible_objects',
|
||||
'consumer_access',
|
||||
]
|
||||
|
||||
@@ -136,10 +136,6 @@ def register_access(model_class, access_class):
|
||||
access_registry[model_class] = access_class
|
||||
|
||||
|
||||
def user_accessible_objects(user, role_name):
|
||||
return ResourceMixin._accessible_objects(User, user, role_name)
|
||||
|
||||
|
||||
def get_user_queryset(user, model_class):
|
||||
"""
|
||||
Return a queryset for the given model_class containing only the instances
|
||||
|
||||
@@ -827,6 +827,16 @@ register(
|
||||
category_slug='bulk',
|
||||
)
|
||||
|
||||
register(
|
||||
'BULK_HOST_MAX_DELETE',
|
||||
field_class=fields.IntegerField,
|
||||
default=250,
|
||||
label=_('Max number of hosts to allow to be deleted in a single bulk action'),
|
||||
help_text=_('Max number of hosts to allow to be deleted in a single bulk action'),
|
||||
category=_('Bulk Actions'),
|
||||
category_slug='bulk',
|
||||
)
|
||||
|
||||
register(
|
||||
'UI_NEXT',
|
||||
field_class=fields.BooleanField,
|
||||
|
||||
@@ -58,7 +58,7 @@ aim_inputs = {
|
||||
'id': 'object_property',
|
||||
'label': _('Object Property'),
|
||||
'type': 'string',
|
||||
'help_text': _('The property of the object to return. Default: Content Ex: Username, Address, etc.'),
|
||||
'help_text': _('The property of the object to return. Available properties: Username, Password and Address.'),
|
||||
},
|
||||
{
|
||||
'id': 'reason',
|
||||
@@ -111,8 +111,12 @@ def aim_backend(**kwargs):
|
||||
object_property = 'Content'
|
||||
elif object_property.lower() == 'username':
|
||||
object_property = 'UserName'
|
||||
elif object_property.lower() == 'password':
|
||||
object_property = 'Content'
|
||||
elif object_property.lower() == 'address':
|
||||
object_property = 'Address'
|
||||
elif object_property not in res:
|
||||
raise KeyError('Property {} not found in object'.format(object_property))
|
||||
raise KeyError('Property {} not found in object, available properties: Username, Password and Address'.format(object_property))
|
||||
else:
|
||||
object_property = object_property.capitalize()
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ from .plugin import CredentialPlugin
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from delinea.secrets.vault import PasswordGrantAuthorizer, SecretsVault
|
||||
from base64 import b64decode
|
||||
|
||||
dsv_inputs = {
|
||||
'fields': [
|
||||
@@ -44,8 +45,16 @@ dsv_inputs = {
|
||||
'help_text': _('The field to extract from the secret'),
|
||||
'type': 'string',
|
||||
},
|
||||
{
|
||||
'id': 'secret_decoding',
|
||||
'label': _('Should the secret be base64 decoded?'),
|
||||
'help_text': _('Specify whether the secret should be base64 decoded, typically used for storing files, such as SSH keys'),
|
||||
'choices': ['No Decoding', 'Decode Base64'],
|
||||
'type': 'string',
|
||||
'default': 'No Decoding',
|
||||
},
|
||||
],
|
||||
'required': ['tenant', 'client_id', 'client_secret', 'path', 'secret_field'],
|
||||
'required': ['tenant', 'client_id', 'client_secret', 'path', 'secret_field', 'secret_decoding'],
|
||||
}
|
||||
|
||||
if settings.DEBUG:
|
||||
@@ -67,12 +76,18 @@ def dsv_backend(**kwargs):
|
||||
client_secret = kwargs['client_secret']
|
||||
secret_path = kwargs['path']
|
||||
secret_field = kwargs['secret_field']
|
||||
# providing a default value to remain backward compatible for secrets that have not specified this option
|
||||
secret_decoding = kwargs.get('secret_decoding', 'No Decoding')
|
||||
|
||||
tenant_url = tenant_url_template.format(tenant_name, tenant_tld.strip("."))
|
||||
|
||||
authorizer = PasswordGrantAuthorizer(tenant_url, client_id, client_secret)
|
||||
dsv_secret = SecretsVault(tenant_url, authorizer).get_secret(secret_path)
|
||||
|
||||
# files can be uploaded base64 decoded to DSV and thus decoding it only, when asked for
|
||||
if secret_decoding == 'Decode Base64':
|
||||
return b64decode(dsv_secret['data'][secret_field]).decode()
|
||||
|
||||
return dsv_secret['data'][secret_field]
|
||||
|
||||
|
||||
|
||||
@@ -41,6 +41,34 @@ base_inputs = {
|
||||
'secret': True,
|
||||
'help_text': _('The Secret ID for AppRole Authentication'),
|
||||
},
|
||||
{
|
||||
'id': 'client_cert_public',
|
||||
'label': _('Client Certificate'),
|
||||
'type': 'string',
|
||||
'multiline': True,
|
||||
'help_text': _(
|
||||
'The PEM-encoded client certificate used for TLS client authentication.'
|
||||
' This should include the certificate and any intermediate certififcates.'
|
||||
),
|
||||
},
|
||||
{
|
||||
'id': 'client_cert_private',
|
||||
'label': _('Client Certificate Key'),
|
||||
'type': 'string',
|
||||
'multiline': True,
|
||||
'secret': True,
|
||||
'help_text': _('The certificate private key used for TLS client authentication.'),
|
||||
},
|
||||
{
|
||||
'id': 'client_cert_role',
|
||||
'label': _('TLS Authentication Role'),
|
||||
'type': 'string',
|
||||
'multiline': False,
|
||||
'help_text': _(
|
||||
'The role configured in Hashicorp Vault for TLS client authentication.'
|
||||
' If not provided, Hashicorp Vault may assign roles based on the certificate used.'
|
||||
),
|
||||
},
|
||||
{
|
||||
'id': 'namespace',
|
||||
'label': _('Namespace name (Vault Enterprise only)'),
|
||||
@@ -59,6 +87,20 @@ base_inputs = {
|
||||
' see https://www.vaultproject.io/docs/auth/kubernetes#configuration'
|
||||
),
|
||||
},
|
||||
{
|
||||
'id': 'username',
|
||||
'label': _('Username'),
|
||||
'type': 'string',
|
||||
'secret': False,
|
||||
'help_text': _('Username for user authentication.'),
|
||||
},
|
||||
{
|
||||
'id': 'password',
|
||||
'label': _('Password'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': _('Password for user authentication.'),
|
||||
},
|
||||
{
|
||||
'id': 'default_auth_path',
|
||||
'label': _('Path to Auth'),
|
||||
@@ -157,19 +199,25 @@ hashi_ssh_inputs['required'].extend(['public_key', 'role'])
|
||||
|
||||
def handle_auth(**kwargs):
|
||||
token = None
|
||||
|
||||
if kwargs.get('token'):
|
||||
token = kwargs['token']
|
||||
elif kwargs.get('username') and kwargs.get('password'):
|
||||
token = method_auth(**kwargs, auth_param=userpass_auth(**kwargs))
|
||||
elif kwargs.get('role_id') and kwargs.get('secret_id'):
|
||||
token = method_auth(**kwargs, auth_param=approle_auth(**kwargs))
|
||||
elif kwargs.get('kubernetes_role'):
|
||||
token = method_auth(**kwargs, auth_param=kubernetes_auth(**kwargs))
|
||||
elif kwargs.get('client_cert_public') and kwargs.get('client_cert_private'):
|
||||
token = method_auth(**kwargs, auth_param=client_cert_auth(**kwargs))
|
||||
else:
|
||||
raise Exception('Either token or AppRole/Kubernetes authentication parameters must be set')
|
||||
|
||||
raise Exception('Token, Username/Password, AppRole, Kubernetes, or TLS authentication parameters must be set')
|
||||
return token
|
||||
|
||||
|
||||
def userpass_auth(**kwargs):
|
||||
return {'username': kwargs['username'], 'password': kwargs['password']}
|
||||
|
||||
|
||||
def approle_auth(**kwargs):
|
||||
return {'role_id': kwargs['role_id'], 'secret_id': kwargs['secret_id']}
|
||||
|
||||
@@ -181,6 +229,10 @@ def kubernetes_auth(**kwargs):
|
||||
return {'role': kwargs['kubernetes_role'], 'jwt': jwt}
|
||||
|
||||
|
||||
def client_cert_auth(**kwargs):
|
||||
return {'name': kwargs.get('client_cert_role')}
|
||||
|
||||
|
||||
def method_auth(**kwargs):
|
||||
# get auth method specific params
|
||||
request_kwargs = {'json': kwargs['auth_param'], 'timeout': 30}
|
||||
@@ -193,13 +245,25 @@ def method_auth(**kwargs):
|
||||
cacert = kwargs.get('cacert', None)
|
||||
|
||||
sess = requests.Session()
|
||||
sess.mount(url, requests.adapters.HTTPAdapter(max_retries=5))
|
||||
|
||||
# Namespace support
|
||||
if kwargs.get('namespace'):
|
||||
sess.headers['X-Vault-Namespace'] = kwargs['namespace']
|
||||
request_url = '/'.join([url, 'auth', auth_path, 'login']).rstrip('/')
|
||||
if kwargs['auth_param'].get('username'):
|
||||
request_url = request_url + '/' + (kwargs['username'])
|
||||
with CertFiles(cacert) as cert:
|
||||
request_kwargs['verify'] = cert
|
||||
resp = sess.post(request_url, **request_kwargs)
|
||||
# TLS client certificate support
|
||||
if kwargs.get('client_cert_public') and kwargs.get('client_cert_private'):
|
||||
# Add client cert to requests Session before making call
|
||||
with CertFiles(kwargs['client_cert_public'], key=kwargs['client_cert_private']) as client_cert:
|
||||
sess.cert = client_cert
|
||||
resp = sess.post(request_url, **request_kwargs)
|
||||
else:
|
||||
# Make call without client certificate
|
||||
resp = sess.post(request_url, **request_kwargs)
|
||||
resp.raise_for_status()
|
||||
token = resp.json()['auth']['client_token']
|
||||
return token
|
||||
@@ -220,6 +284,7 @@ def kv_backend(**kwargs):
|
||||
}
|
||||
|
||||
sess = requests.Session()
|
||||
sess.mount(url, requests.adapters.HTTPAdapter(max_retries=5))
|
||||
sess.headers['Authorization'] = 'Bearer {}'.format(token)
|
||||
# Compatibility header for older installs of Hashicorp Vault
|
||||
sess.headers['X-Vault-Token'] = token
|
||||
@@ -290,6 +355,7 @@ def ssh_backend(**kwargs):
|
||||
request_kwargs['json']['valid_principals'] = kwargs['valid_principals']
|
||||
|
||||
sess = requests.Session()
|
||||
sess.mount(url, requests.adapters.HTTPAdapter(max_retries=5))
|
||||
sess.headers['Authorization'] = 'Bearer {}'.format(token)
|
||||
if kwargs.get('namespace'):
|
||||
sess.headers['X-Vault-Namespace'] = kwargs['namespace']
|
||||
|
||||
@@ -93,6 +93,22 @@ class PubSub(object):
|
||||
self.conn.close()
|
||||
|
||||
|
||||
def create_listener_connection():
|
||||
conf = settings.DATABASES['default'].copy()
|
||||
conf['OPTIONS'] = conf.get('OPTIONS', {}).copy()
|
||||
# Modify the application name to distinguish from other connections the process might use
|
||||
conf['OPTIONS']['application_name'] = get_application_name(settings.CLUSTER_HOST_ID, function='listener')
|
||||
|
||||
# Apply overrides specifically for the listener connection
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).items():
|
||||
conf[k] = v
|
||||
for k, v in settings.LISTENER_DATABASES.get('default', {}).get('OPTIONS', {}).items():
|
||||
conf['OPTIONS'][k] = v
|
||||
|
||||
connection_data = f"dbname={conf['NAME']} host={conf['HOST']} user={conf['USER']} password={conf['PASSWORD']} port={conf['PORT']}"
|
||||
return psycopg.connect(connection_data, autocommit=True, **conf['OPTIONS'])
|
||||
|
||||
|
||||
@contextmanager
|
||||
def pg_bus_conn(new_connection=False, select_timeout=None):
|
||||
'''
|
||||
@@ -106,12 +122,7 @@ def pg_bus_conn(new_connection=False, select_timeout=None):
|
||||
'''
|
||||
|
||||
if new_connection:
|
||||
conf = settings.DATABASES['default'].copy()
|
||||
conf['OPTIONS'] = conf.get('OPTIONS', {}).copy()
|
||||
# Modify the application name to distinguish from other connections the process might use
|
||||
conf['OPTIONS']['application_name'] = get_application_name(settings.CLUSTER_HOST_ID, function='listener')
|
||||
connection_data = f"dbname={conf['NAME']} host={conf['HOST']} user={conf['USER']} password={conf['PASSWORD']} port={conf['PORT']}"
|
||||
conn = psycopg.connect(connection_data, autocommit=True, **conf['OPTIONS'])
|
||||
conn = create_listener_connection()
|
||||
else:
|
||||
if pg_connection.connection is None:
|
||||
pg_connection.connect()
|
||||
|
||||
@@ -214,7 +214,10 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
# bypasses pg_notify for scheduled tasks
|
||||
self.dispatch_task(body)
|
||||
|
||||
self.pg_is_down = False
|
||||
if self.pg_is_down:
|
||||
logger.info('Dispatcher listener connection established')
|
||||
self.pg_is_down = False
|
||||
|
||||
self.listen_start = time.time()
|
||||
|
||||
return self.scheduler.time_until_next_run()
|
||||
|
||||
@@ -9,6 +9,7 @@ from django.conf import settings
|
||||
# AWX
|
||||
import awx.main.fields
|
||||
from awx.main.models import Host
|
||||
from ._sqlite_helper import dbawaremigrations
|
||||
|
||||
|
||||
def replaces():
|
||||
@@ -131,9 +132,11 @@ class Migration(migrations.Migration):
|
||||
help_text='If enabled, Tower will act as an Ansible Fact Cache Plugin; persisting facts at the end of a playbook run to the database and caching facts for use by Ansible.',
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
dbawaremigrations.RunSQL(
|
||||
sql="CREATE INDEX host_ansible_facts_default_gin ON {} USING gin(ansible_facts jsonb_path_ops);".format(Host._meta.db_table),
|
||||
reverse_sql='DROP INDEX host_ansible_facts_default_gin;',
|
||||
sqlite_sql=dbawaremigrations.RunSQL.noop,
|
||||
sqlite_reverse_sql=dbawaremigrations.RunSQL.noop,
|
||||
),
|
||||
# SCM file-based inventories
|
||||
migrations.AddField(
|
||||
|
||||
@@ -3,24 +3,27 @@ from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from ._sqlite_helper import dbawaremigrations
|
||||
|
||||
tables_to_drop = [
|
||||
'celery_taskmeta',
|
||||
'celery_tasksetmeta',
|
||||
'djcelery_crontabschedule',
|
||||
'djcelery_intervalschedule',
|
||||
'djcelery_periodictask',
|
||||
'djcelery_periodictasks',
|
||||
'djcelery_taskstate',
|
||||
'djcelery_workerstate',
|
||||
'djkombu_message',
|
||||
'djkombu_queue',
|
||||
]
|
||||
postgres_sql = ([("DROP TABLE IF EXISTS {} CASCADE;".format(table))] for table in tables_to_drop)
|
||||
sqlite_sql = ([("DROP TABLE IF EXISTS {};".format(table))] for table in tables_to_drop)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('main', '0049_v330_validate_instance_capacity_adjustment'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL([("DROP TABLE IF EXISTS {} CASCADE;".format(table))])
|
||||
for table in (
|
||||
'celery_taskmeta',
|
||||
'celery_tasksetmeta',
|
||||
'djcelery_crontabschedule',
|
||||
'djcelery_intervalschedule',
|
||||
'djcelery_periodictask',
|
||||
'djcelery_periodictasks',
|
||||
'djcelery_taskstate',
|
||||
'djcelery_workerstate',
|
||||
'djkombu_message',
|
||||
'djkombu_queue',
|
||||
)
|
||||
]
|
||||
operations = [dbawaremigrations.RunSQL(p, sqlite_sql=s) for p, s in zip(postgres_sql, sqlite_sql)]
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from django.db import migrations, models, connection
|
||||
|
||||
from ._sqlite_helper import dbawaremigrations
|
||||
|
||||
|
||||
def migrate_event_data(apps, schema_editor):
|
||||
# see: https://github.com/ansible/awx/issues/6010
|
||||
@@ -24,6 +26,11 @@ def migrate_event_data(apps, schema_editor):
|
||||
cursor.execute(f'ALTER TABLE {tblname} ALTER COLUMN id TYPE bigint USING id::bigint;')
|
||||
|
||||
|
||||
def migrate_event_data_sqlite(apps, schema_editor):
|
||||
# TODO: cmeyers fill this in
|
||||
return
|
||||
|
||||
|
||||
class FakeAlterField(migrations.AlterField):
|
||||
def database_forwards(self, *args):
|
||||
# this is intentionally left blank, because we're
|
||||
@@ -37,7 +44,7 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_event_data),
|
||||
dbawaremigrations.RunPython(migrate_event_data, sqlite_code=migrate_event_data_sqlite),
|
||||
FakeAlterField(
|
||||
model_name='adhoccommandevent',
|
||||
name='id',
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from django.db import migrations, models, connection
|
||||
|
||||
from ._sqlite_helper import dbawaremigrations
|
||||
|
||||
|
||||
def migrate_event_data(apps, schema_editor):
|
||||
# see: https://github.com/ansible/awx/issues/9039
|
||||
@@ -59,6 +61,10 @@ def migrate_event_data(apps, schema_editor):
|
||||
cursor.execute('DROP INDEX IF EXISTS main_jobevent_job_id_idx')
|
||||
|
||||
|
||||
def migrate_event_data_sqlite(apps, schema_editor):
|
||||
return None
|
||||
|
||||
|
||||
class FakeAddField(migrations.AddField):
|
||||
def database_forwards(self, *args):
|
||||
# this is intentionally left blank, because we're
|
||||
@@ -72,7 +78,7 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_event_data),
|
||||
dbawaremigrations.RunPython(migrate_event_data, sqlite_code=migrate_event_data_sqlite),
|
||||
FakeAddField(
|
||||
model_name='jobevent',
|
||||
name='job_created',
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
import awx.main.models.notifications
|
||||
from django.db import migrations, models
|
||||
|
||||
from ._sqlite_helper import dbawaremigrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
@@ -104,11 +106,12 @@ class Migration(migrations.Migration):
|
||||
name='deleted_actor',
|
||||
field=models.JSONField(null=True),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
dbawaremigrations.RunSQL(
|
||||
"""
|
||||
ALTER TABLE main_activitystream RENAME setting TO setting_old;
|
||||
ALTER TABLE main_activitystream ALTER COLUMN setting_old DROP NOT NULL;
|
||||
""",
|
||||
sqlite_sql="ALTER TABLE main_activitystream RENAME setting TO setting_old",
|
||||
state_operations=[
|
||||
migrations.RemoveField(
|
||||
model_name='activitystream',
|
||||
@@ -121,11 +124,12 @@ class Migration(migrations.Migration):
|
||||
name='setting',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
dbawaremigrations.RunSQL(
|
||||
"""
|
||||
ALTER TABLE main_job RENAME survey_passwords TO survey_passwords_old;
|
||||
ALTER TABLE main_job ALTER COLUMN survey_passwords_old DROP NOT NULL;
|
||||
""",
|
||||
sqlite_sql="ALTER TABLE main_job RENAME survey_passwords TO survey_passwords_old",
|
||||
state_operations=[
|
||||
migrations.RemoveField(
|
||||
model_name='job',
|
||||
@@ -138,11 +142,12 @@ class Migration(migrations.Migration):
|
||||
name='survey_passwords',
|
||||
field=models.JSONField(blank=True, default=dict, editable=False),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
dbawaremigrations.RunSQL(
|
||||
"""
|
||||
ALTER TABLE main_joblaunchconfig RENAME char_prompts TO char_prompts_old;
|
||||
ALTER TABLE main_joblaunchconfig ALTER COLUMN char_prompts_old DROP NOT NULL;
|
||||
""",
|
||||
sqlite_sql="ALTER TABLE main_joblaunchconfig RENAME char_prompts TO char_prompts_old",
|
||||
state_operations=[
|
||||
migrations.RemoveField(
|
||||
model_name='joblaunchconfig',
|
||||
@@ -155,11 +160,12 @@ class Migration(migrations.Migration):
|
||||
name='char_prompts',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
dbawaremigrations.RunSQL(
|
||||
"""
|
||||
ALTER TABLE main_joblaunchconfig RENAME survey_passwords TO survey_passwords_old;
|
||||
ALTER TABLE main_joblaunchconfig ALTER COLUMN survey_passwords_old DROP NOT NULL;
|
||||
""",
|
||||
sqlite_sql="ALTER TABLE main_joblaunchconfig RENAME survey_passwords TO survey_passwords_old;",
|
||||
state_operations=[
|
||||
migrations.RemoveField(
|
||||
model_name='joblaunchconfig',
|
||||
@@ -172,11 +178,12 @@ class Migration(migrations.Migration):
|
||||
name='survey_passwords',
|
||||
field=models.JSONField(blank=True, default=dict, editable=False),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
dbawaremigrations.RunSQL(
|
||||
"""
|
||||
ALTER TABLE main_notification RENAME body TO body_old;
|
||||
ALTER TABLE main_notification ALTER COLUMN body_old DROP NOT NULL;
|
||||
""",
|
||||
sqlite_sql="ALTER TABLE main_notification RENAME body TO body_old",
|
||||
state_operations=[
|
||||
migrations.RemoveField(
|
||||
model_name='notification',
|
||||
@@ -189,11 +196,12 @@ class Migration(migrations.Migration):
|
||||
name='body',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
dbawaremigrations.RunSQL(
|
||||
"""
|
||||
ALTER TABLE main_unifiedjob RENAME job_env TO job_env_old;
|
||||
ALTER TABLE main_unifiedjob ALTER COLUMN job_env_old DROP NOT NULL;
|
||||
""",
|
||||
sqlite_sql="ALTER TABLE main_unifiedjob RENAME job_env TO job_env_old",
|
||||
state_operations=[
|
||||
migrations.RemoveField(
|
||||
model_name='unifiedjob',
|
||||
@@ -206,11 +214,12 @@ class Migration(migrations.Migration):
|
||||
name='job_env',
|
||||
field=models.JSONField(blank=True, default=dict, editable=False),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
dbawaremigrations.RunSQL(
|
||||
"""
|
||||
ALTER TABLE main_workflowjob RENAME char_prompts TO char_prompts_old;
|
||||
ALTER TABLE main_workflowjob ALTER COLUMN char_prompts_old DROP NOT NULL;
|
||||
""",
|
||||
sqlite_sql="ALTER TABLE main_workflowjob RENAME char_prompts TO char_prompts_old",
|
||||
state_operations=[
|
||||
migrations.RemoveField(
|
||||
model_name='workflowjob',
|
||||
@@ -223,11 +232,12 @@ class Migration(migrations.Migration):
|
||||
name='char_prompts',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
dbawaremigrations.RunSQL(
|
||||
"""
|
||||
ALTER TABLE main_workflowjob RENAME survey_passwords TO survey_passwords_old;
|
||||
ALTER TABLE main_workflowjob ALTER COLUMN survey_passwords_old DROP NOT NULL;
|
||||
""",
|
||||
sqlite_sql="ALTER TABLE main_workflowjob RENAME survey_passwords TO survey_passwords_old",
|
||||
state_operations=[
|
||||
migrations.RemoveField(
|
||||
model_name='workflowjob',
|
||||
@@ -240,11 +250,12 @@ class Migration(migrations.Migration):
|
||||
name='survey_passwords',
|
||||
field=models.JSONField(blank=True, default=dict, editable=False),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
dbawaremigrations.RunSQL(
|
||||
"""
|
||||
ALTER TABLE main_workflowjobnode RENAME char_prompts TO char_prompts_old;
|
||||
ALTER TABLE main_workflowjobnode ALTER COLUMN char_prompts_old DROP NOT NULL;
|
||||
""",
|
||||
sqlite_sql="ALTER TABLE main_workflowjobnode RENAME char_prompts TO char_prompts_old",
|
||||
state_operations=[
|
||||
migrations.RemoveField(
|
||||
model_name='workflowjobnode',
|
||||
@@ -257,11 +268,12 @@ class Migration(migrations.Migration):
|
||||
name='char_prompts',
|
||||
field=models.JSONField(blank=True, default=dict),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
dbawaremigrations.RunSQL(
|
||||
"""
|
||||
ALTER TABLE main_workflowjobnode RENAME survey_passwords TO survey_passwords_old;
|
||||
ALTER TABLE main_workflowjobnode ALTER COLUMN survey_passwords_old DROP NOT NULL;
|
||||
""",
|
||||
sqlite_sql="ALTER TABLE main_workflowjobnode RENAME survey_passwords TO survey_passwords_old",
|
||||
state_operations=[
|
||||
migrations.RemoveField(
|
||||
model_name='workflowjobnode',
|
||||
|
||||
@@ -3,6 +3,8 @@ from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from ._sqlite_helper import dbawaremigrations
|
||||
|
||||
|
||||
def delete_taggit_contenttypes(apps, schema_editor):
|
||||
ContentType = apps.get_model('contenttypes', 'ContentType')
|
||||
@@ -20,8 +22,8 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL("DROP TABLE IF EXISTS taggit_tag CASCADE;"),
|
||||
migrations.RunSQL("DROP TABLE IF EXISTS taggit_taggeditem CASCADE;"),
|
||||
dbawaremigrations.RunSQL("DROP TABLE IF EXISTS taggit_tag CASCADE;", sqlite_sql="DROP TABLE IF EXISTS taggit_tag;"),
|
||||
dbawaremigrations.RunSQL("DROP TABLE IF EXISTS taggit_taggeditem CASCADE;", sqlite_sql="DROP TABLE IF EXISTS taggit_taggeditem;"),
|
||||
migrations.RunPython(delete_taggit_contenttypes),
|
||||
migrations.RunPython(delete_taggit_migration_records),
|
||||
]
|
||||
|
||||
52
awx/main/migrations/0188_add_bitbucket_dc_webhook.py
Normal file
52
awx/main/migrations/0188_add_bitbucket_dc_webhook.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# Generated by Django 4.2.6 on 2023-11-16 21:00
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('main', '0187_hop_nodes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='job',
|
||||
name='webhook_service',
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[('github', 'GitHub'), ('gitlab', 'GitLab'), ('bitbucket_dc', 'BitBucket DataCenter')],
|
||||
help_text='Service that webhook requests will be accepted from',
|
||||
max_length=16,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='jobtemplate',
|
||||
name='webhook_service',
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[('github', 'GitHub'), ('gitlab', 'GitLab'), ('bitbucket_dc', 'BitBucket DataCenter')],
|
||||
help_text='Service that webhook requests will be accepted from',
|
||||
max_length=16,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjob',
|
||||
name='webhook_service',
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[('github', 'GitHub'), ('gitlab', 'GitLab'), ('bitbucket_dc', 'BitBucket DataCenter')],
|
||||
help_text='Service that webhook requests will be accepted from',
|
||||
max_length=16,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='workflowjobtemplate',
|
||||
name='webhook_service',
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[('github', 'GitHub'), ('gitlab', 'GitLab'), ('bitbucket_dc', 'BitBucket DataCenter')],
|
||||
help_text='Service that webhook requests will be accepted from',
|
||||
max_length=16,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -76,7 +76,7 @@ class azure_rm(PluginFileInjector):
|
||||
user_filters = []
|
||||
old_filterables = [
|
||||
('resource_groups', 'resource_group'),
|
||||
('tags', 'tags')
|
||||
('tags', 'tags'),
|
||||
# locations / location would be an entry
|
||||
# but this would conflict with source_regions
|
||||
]
|
||||
|
||||
61
awx/main/migrations/_sqlite_helper.py
Normal file
61
awx/main/migrations/_sqlite_helper.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class RunSQL(migrations.operations.special.RunSQL):
|
||||
"""
|
||||
Bit of a hack here. Django actually wants this decision made in the router
|
||||
and we can pass **hints.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if 'sqlite_sql' not in kwargs:
|
||||
raise ValueError("sqlite_sql parameter required")
|
||||
sqlite_sql = kwargs.pop('sqlite_sql')
|
||||
|
||||
self.sqlite_sql = sqlite_sql
|
||||
self.sqlite_reverse_sql = kwargs.pop('sqlite_reverse_sql', None)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def database_forwards(self, app_label, schema_editor, from_state, to_state):
|
||||
if not schema_editor.connection.vendor.startswith('postgres'):
|
||||
self.sql = self.sqlite_sql or migrations.RunSQL.noop
|
||||
super().database_forwards(app_label, schema_editor, from_state, to_state)
|
||||
|
||||
def database_backwards(self, app_label, schema_editor, from_state, to_state):
|
||||
if not schema_editor.connection.vendor.startswith('postgres'):
|
||||
self.reverse_sql = self.sqlite_reverse_sql or migrations.RunSQL.noop
|
||||
super().database_backwards(app_label, schema_editor, from_state, to_state)
|
||||
|
||||
|
||||
class RunPython(migrations.operations.special.RunPython):
|
||||
"""
|
||||
Bit of a hack here. Django actually wants this decision made in the router
|
||||
and we can pass **hints.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if 'sqlite_code' not in kwargs:
|
||||
raise ValueError("sqlite_code parameter required")
|
||||
sqlite_code = kwargs.pop('sqlite_code')
|
||||
|
||||
self.sqlite_code = sqlite_code
|
||||
self.sqlite_reverse_code = kwargs.pop('sqlite_reverse_code', None)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def database_forwards(self, app_label, schema_editor, from_state, to_state):
|
||||
if not schema_editor.connection.vendor.startswith('postgres'):
|
||||
self.code = self.sqlite_code or migrations.RunPython.noop
|
||||
super().database_forwards(app_label, schema_editor, from_state, to_state)
|
||||
|
||||
def database_backwards(self, app_label, schema_editor, from_state, to_state):
|
||||
if not schema_editor.connection.vendor.startswith('postgres'):
|
||||
self.reverse_code = self.sqlite_reverse_code or migrations.RunPython.noop
|
||||
super().database_backwards(app_label, schema_editor, from_state, to_state)
|
||||
|
||||
|
||||
class _sqlitemigrations:
|
||||
RunPython = RunPython
|
||||
RunSQL = RunSQL
|
||||
|
||||
|
||||
dbawaremigrations = _sqlitemigrations()
|
||||
@@ -6,8 +6,10 @@ from django.conf import settings # noqa
|
||||
from django.db import connection
|
||||
from django.db.models.signals import pre_delete # noqa
|
||||
|
||||
from ansible_base.lib.utils.models import prevent_search
|
||||
|
||||
# AWX
|
||||
from awx.main.models.base import BaseModel, PrimordialModel, prevent_search, accepts_json, CLOUD_INVENTORY_SOURCES, VERBOSITY_CHOICES # noqa
|
||||
from awx.main.models.base import BaseModel, PrimordialModel, accepts_json, CLOUD_INVENTORY_SOURCES, VERBOSITY_CHOICES # noqa
|
||||
from awx.main.models.unified_jobs import UnifiedJob, UnifiedJobTemplate, StdoutMaxBytesExceeded # noqa
|
||||
from awx.main.models.organization import Organization, Profile, Team, UserSessionMembership # noqa
|
||||
from awx.main.models.credential import Credential, CredentialType, CredentialInputSource, ManagedCredentialType, build_safe_env # noqa
|
||||
@@ -57,7 +59,6 @@ from awx.main.models.ha import ( # noqa
|
||||
from awx.main.models.rbac import ( # noqa
|
||||
Role,
|
||||
batch_role_ancestor_rebuilding,
|
||||
get_roles_on_resource,
|
||||
role_summary_fields_generator,
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
@@ -91,13 +92,12 @@ from oauth2_provider.models import Grant, RefreshToken # noqa -- needed django-
|
||||
|
||||
# Add custom methods to User model for permissions checks.
|
||||
from django.contrib.auth.models import User # noqa
|
||||
from awx.main.access import get_user_queryset, check_user_access, check_user_access_with_errors, user_accessible_objects # noqa
|
||||
from awx.main.access import get_user_queryset, check_user_access, check_user_access_with_errors # noqa
|
||||
|
||||
|
||||
User.add_to_class('get_queryset', get_user_queryset)
|
||||
User.add_to_class('can_access', check_user_access)
|
||||
User.add_to_class('can_access_with_errors', check_user_access_with_errors)
|
||||
User.add_to_class('accessible_objects', user_accessible_objects)
|
||||
|
||||
|
||||
def convert_jsonfields():
|
||||
|
||||
@@ -12,9 +12,11 @@ from django.utils.text import Truncator
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from ansible_base.lib.utils.models import prevent_search
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models.base import prevent_search, AD_HOC_JOB_TYPE_CHOICES, VERBOSITY_CHOICES, VarsDictProperty
|
||||
from awx.main.models.base import AD_HOC_JOB_TYPE_CHOICES, VERBOSITY_CHOICES, VarsDictProperty
|
||||
from awx.main.models.events import AdHocCommandEvent, UnpartitionedAdHocCommandEvent
|
||||
from awx.main.models.unified_jobs import UnifiedJob
|
||||
from awx.main.models.notifications import JobNotificationMixin, NotificationTemplate
|
||||
|
||||
@@ -15,7 +15,6 @@ from awx.main.utils import encrypt_field, parse_yaml_or_json
|
||||
from awx.main.constants import CLOUD_PROVIDERS
|
||||
|
||||
__all__ = [
|
||||
'prevent_search',
|
||||
'VarsDictProperty',
|
||||
'BaseModel',
|
||||
'CreatedModifiedModel',
|
||||
@@ -384,23 +383,6 @@ class NotificationFieldsModel(BaseModel):
|
||||
notification_templates_started = models.ManyToManyField("NotificationTemplate", blank=True, related_name='%(class)s_notification_templates_for_started')
|
||||
|
||||
|
||||
def prevent_search(relation):
|
||||
"""
|
||||
Used to mark a model field or relation as "restricted from filtering"
|
||||
e.g.,
|
||||
|
||||
class AuthToken(BaseModel):
|
||||
user = prevent_search(models.ForeignKey(...))
|
||||
sensitive_data = prevent_search(models.CharField(...))
|
||||
|
||||
The flag set by this function is used by
|
||||
`awx.api.filters.FieldLookupBackend` to block fields and relations that
|
||||
should not be searchable/filterable via search query params
|
||||
"""
|
||||
setattr(relation, '__prevent_search__', True)
|
||||
return relation
|
||||
|
||||
|
||||
def accepts_json(relation):
|
||||
"""
|
||||
Used to mark a model field as allowing JSON e.g,. JobTemplate.extra_vars
|
||||
|
||||
@@ -953,6 +953,25 @@ ManagedCredentialType(
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='bitbucket_dc_token',
|
||||
kind='token',
|
||||
name=gettext_noop('Bitbucket Data Center HTTP Access Token'),
|
||||
managed=True,
|
||||
inputs={
|
||||
'fields': [
|
||||
{
|
||||
'id': 'token',
|
||||
'label': gettext_noop('Token'),
|
||||
'type': 'string',
|
||||
'secret': True,
|
||||
'help_text': gettext_noop('This token needs to come from your user settings in Bitbucket'),
|
||||
}
|
||||
],
|
||||
'required': ['token'],
|
||||
},
|
||||
)
|
||||
|
||||
ManagedCredentialType(
|
||||
namespace='insights',
|
||||
kind='insights',
|
||||
|
||||
@@ -17,6 +17,8 @@ from django.db.models import Sum, Q
|
||||
import redis
|
||||
from solo.models import SingletonModel
|
||||
|
||||
from ansible_base.lib.utils.models import prevent_search
|
||||
|
||||
# AWX
|
||||
from awx import __version__ as awx_application_version
|
||||
from awx.main.utils import is_testing
|
||||
@@ -24,7 +26,7 @@ from awx.api.versioning import reverse
|
||||
from awx.main.fields import ImplicitRoleField
|
||||
from awx.main.managers import InstanceManager, UUID_DEFAULT
|
||||
from awx.main.constants import JOB_FOLDER_PREFIX
|
||||
from awx.main.models.base import BaseModel, HasEditsMixin, prevent_search
|
||||
from awx.main.models.base import BaseModel, HasEditsMixin
|
||||
from awx.main.models.rbac import (
|
||||
ROLE_SINGLETON_SYSTEM_ADMINISTRATOR,
|
||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
|
||||
@@ -25,6 +25,8 @@ from django.db.models import Q
|
||||
# REST Framework
|
||||
from rest_framework.exceptions import ParseError
|
||||
|
||||
from ansible_base.lib.utils.models import prevent_search
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.constants import CLOUD_PROVIDERS
|
||||
@@ -35,7 +37,7 @@ from awx.main.fields import (
|
||||
OrderedManyToManyField,
|
||||
)
|
||||
from awx.main.managers import HostManager, HostMetricActiveManager
|
||||
from awx.main.models.base import BaseModel, CommonModelNameNotUnique, VarsDictProperty, CLOUD_INVENTORY_SOURCES, prevent_search, accepts_json
|
||||
from awx.main.models.base import BaseModel, CommonModelNameNotUnique, VarsDictProperty, CLOUD_INVENTORY_SOURCES, accepts_json
|
||||
from awx.main.models.events import InventoryUpdateEvent, UnpartitionedInventoryUpdateEvent
|
||||
from awx.main.models.unified_jobs import UnifiedJob, UnifiedJobTemplate
|
||||
from awx.main.models.mixins import (
|
||||
|
||||
@@ -20,13 +20,14 @@ from django.core.exceptions import FieldDoesNotExist
|
||||
# REST Framework
|
||||
from rest_framework.exceptions import ParseError
|
||||
|
||||
from ansible_base.lib.utils.models import prevent_search
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.constants import HOST_FACTS_FIELDS
|
||||
from awx.main.models.base import (
|
||||
BaseModel,
|
||||
CreatedModifiedModel,
|
||||
prevent_search,
|
||||
accepts_json,
|
||||
JOB_TYPE_CHOICES,
|
||||
NEW_JOB_TYPE_CHOICES,
|
||||
|
||||
@@ -9,7 +9,6 @@ import requests
|
||||
# Django
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User # noqa
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
@@ -17,9 +16,10 @@ from django.db.models.query import QuerySet
|
||||
from django.utils.crypto import get_random_string
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from ansible_base.lib.utils.models import prevent_search
|
||||
|
||||
# AWX
|
||||
from awx.main.models.base import prevent_search
|
||||
from awx.main.models.rbac import Role, RoleAncestorEntry, get_roles_on_resource
|
||||
from awx.main.models.rbac import Role, RoleAncestorEntry
|
||||
from awx.main.utils import parse_yaml_or_json, get_custom_venv_choices, get_licenser, polymorphic
|
||||
from awx.main.utils.execution_environments import get_default_execution_environment
|
||||
from awx.main.utils.encryption import decrypt_value, get_encryption_key, is_encrypted
|
||||
@@ -54,10 +54,7 @@ class ResourceMixin(models.Model):
|
||||
Use instead of `MyModel.objects` when you want to only consider
|
||||
resources that a user has specific permissions for. For example:
|
||||
MyModel.accessible_objects(user, 'read_role').filter(name__istartswith='bar');
|
||||
NOTE: This should only be used for list type things. If you have a
|
||||
specific resource you want to check permissions on, it is more
|
||||
performant to resolve the resource in question then call
|
||||
`myresource.get_permissions(user)`.
|
||||
NOTE: This should only be used for list type things.
|
||||
"""
|
||||
return ResourceMixin._accessible_objects(cls, accessor, role_field)
|
||||
|
||||
@@ -67,13 +64,12 @@ class ResourceMixin(models.Model):
|
||||
|
||||
@staticmethod
|
||||
def _accessible_pk_qs(cls, accessor, role_field, content_types=None):
|
||||
if type(accessor) == User:
|
||||
if accessor._meta.model_name == 'user':
|
||||
ancestor_roles = accessor.roles.all()
|
||||
elif type(accessor) == Role:
|
||||
ancestor_roles = [accessor]
|
||||
else:
|
||||
accessor_type = ContentType.objects.get_for_model(accessor)
|
||||
ancestor_roles = Role.objects.filter(content_type__pk=accessor_type.id, object_id=accessor.id)
|
||||
raise RuntimeError(f'Role filters only valid for users and ancestor role, received {accessor}')
|
||||
|
||||
if content_types is None:
|
||||
ct_kwarg = dict(content_type_id=ContentType.objects.get_for_model(cls).id)
|
||||
@@ -86,15 +82,6 @@ class ResourceMixin(models.Model):
|
||||
def _accessible_objects(cls, accessor, role_field):
|
||||
return cls.objects.filter(pk__in=ResourceMixin._accessible_pk_qs(cls, accessor, role_field))
|
||||
|
||||
def get_permissions(self, accessor):
|
||||
"""
|
||||
Returns a string list of the roles a accessor has for a given resource.
|
||||
An accessor can be either a User, Role, or an arbitrary resource that
|
||||
contains one or more Roles associated with it.
|
||||
"""
|
||||
|
||||
return get_roles_on_resource(self, accessor)
|
||||
|
||||
|
||||
class SurveyJobTemplateMixin(models.Model):
|
||||
class Meta:
|
||||
@@ -540,7 +527,6 @@ class CustomVirtualEnvMixin(models.Model):
|
||||
|
||||
|
||||
class RelatedJobsMixin(object):
|
||||
|
||||
"""
|
||||
This method is intended to be overwritten.
|
||||
Called by get_active_jobs()
|
||||
@@ -575,6 +561,7 @@ class WebhookTemplateMixin(models.Model):
|
||||
SERVICES = [
|
||||
('github', "GitHub"),
|
||||
('gitlab', "GitLab"),
|
||||
('bitbucket_dc', "BitBucket DataCenter"),
|
||||
]
|
||||
|
||||
webhook_service = models.CharField(max_length=16, choices=SERVICES, blank=True, help_text=_('Service that webhook requests will be accepted from'))
|
||||
@@ -635,6 +622,7 @@ class WebhookMixin(models.Model):
|
||||
service_header = {
|
||||
'github': ('Authorization', 'token {}'),
|
||||
'gitlab': ('PRIVATE-TOKEN', '{}'),
|
||||
'bitbucket_dc': ('Authorization', 'Bearer {}'),
|
||||
}
|
||||
service_statuses = {
|
||||
'github': {
|
||||
@@ -652,6 +640,14 @@ class WebhookMixin(models.Model):
|
||||
'error': 'failed', # GitLab doesn't have an 'error' status distinct from 'failed' :(
|
||||
'canceled': 'canceled',
|
||||
},
|
||||
'bitbucket_dc': {
|
||||
'pending': 'INPROGRESS', # Bitbucket DC doesn't have any other statuses distinct from INPROGRESS, SUCCESSFUL, FAILED :(
|
||||
'running': 'INPROGRESS',
|
||||
'successful': 'SUCCESSFUL',
|
||||
'failed': 'FAILED',
|
||||
'error': 'FAILED',
|
||||
'canceled': 'FAILED',
|
||||
},
|
||||
}
|
||||
|
||||
statuses = service_statuses[self.webhook_service]
|
||||
@@ -660,11 +656,18 @@ class WebhookMixin(models.Model):
|
||||
return
|
||||
try:
|
||||
license_type = get_licenser().validate().get('license_type')
|
||||
data = {
|
||||
'state': statuses[status],
|
||||
'context': 'ansible/awx' if license_type == 'open' else 'ansible/tower',
|
||||
'target_url': self.get_ui_url(),
|
||||
}
|
||||
if self.webhook_service == 'bitbucket_dc':
|
||||
data = {
|
||||
'state': statuses[status],
|
||||
'key': 'ansible/awx' if license_type == 'open' else 'ansible/tower',
|
||||
'url': self.get_ui_url(),
|
||||
}
|
||||
else:
|
||||
data = {
|
||||
'state': statuses[status],
|
||||
'context': 'ansible/awx' if license_type == 'open' else 'ansible/tower',
|
||||
'target_url': self.get_ui_url(),
|
||||
}
|
||||
k, v = service_header[self.webhook_service]
|
||||
headers = {k: v.format(self.webhook_credential.get_input('token')), 'Content-Type': 'application/json'}
|
||||
response = requests.post(status_api, data=json.dumps(data), headers=headers, timeout=30)
|
||||
|
||||
@@ -15,9 +15,11 @@ from django.utils.encoding import smart_str, force_str
|
||||
from jinja2 import sandbox, ChainableUndefined
|
||||
from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
|
||||
|
||||
from ansible_base.lib.utils.models import prevent_search
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models.base import CommonModelNameNotUnique, CreatedModifiedModel, prevent_search
|
||||
from awx.main.models.base import CommonModelNameNotUnique, CreatedModifiedModel
|
||||
from awx.main.utils import encrypt_field, decrypt_field, set_environ
|
||||
from awx.main.notifications.email_backend import CustomEmailBackend
|
||||
from awx.main.notifications.slack_backend import SlackBackend
|
||||
|
||||
@@ -15,12 +15,10 @@ from django.utils.translation import gettext_lazy as _
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from django.contrib.auth.models import User # noqa
|
||||
|
||||
__all__ = [
|
||||
'Role',
|
||||
'batch_role_ancestor_rebuilding',
|
||||
'get_roles_on_resource',
|
||||
'ROLE_SINGLETON_SYSTEM_ADMINISTRATOR',
|
||||
'ROLE_SINGLETON_SYSTEM_AUDITOR',
|
||||
'role_summary_fields_generator',
|
||||
@@ -170,16 +168,10 @@ class Role(models.Model):
|
||||
return reverse('api:role_detail', kwargs={'pk': self.pk}, request=request)
|
||||
|
||||
def __contains__(self, accessor):
|
||||
if type(accessor) == User:
|
||||
if accessor._meta.model_name == 'user':
|
||||
return self.ancestors.filter(members=accessor).exists()
|
||||
elif accessor.__class__.__name__ == 'Team':
|
||||
return self.ancestors.filter(pk=accessor.member_role.id).exists()
|
||||
elif type(accessor) == Role:
|
||||
return self.ancestors.filter(pk=accessor.pk).exists()
|
||||
else:
|
||||
accessor_type = ContentType.objects.get_for_model(accessor)
|
||||
roles = Role.objects.filter(content_type__pk=accessor_type.id, object_id=accessor.id)
|
||||
return self.ancestors.filter(pk__in=roles).exists()
|
||||
raise RuntimeError(f'Role evaluations only valid for users, received {accessor}')
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -460,31 +452,6 @@ class RoleAncestorEntry(models.Model):
|
||||
object_id = models.PositiveIntegerField(null=False)
|
||||
|
||||
|
||||
def get_roles_on_resource(resource, accessor):
|
||||
"""
|
||||
Returns a string list of the roles a accessor has for a given resource.
|
||||
An accessor can be either a User, Role, or an arbitrary resource that
|
||||
contains one or more Roles associated with it.
|
||||
"""
|
||||
|
||||
if type(accessor) == User:
|
||||
roles = accessor.roles.all()
|
||||
elif type(accessor) == Role:
|
||||
roles = [accessor]
|
||||
else:
|
||||
accessor_type = ContentType.objects.get_for_model(accessor)
|
||||
roles = Role.objects.filter(content_type__pk=accessor_type.id, object_id=accessor.id)
|
||||
|
||||
return [
|
||||
role_field
|
||||
for role_field in RoleAncestorEntry.objects.filter(
|
||||
ancestor__in=roles, content_type_id=ContentType.objects.get_for_model(resource).id, object_id=resource.id
|
||||
)
|
||||
.values_list('role_field', flat=True)
|
||||
.distinct()
|
||||
]
|
||||
|
||||
|
||||
def role_summary_fields_generator(content_object, role_field):
|
||||
global role_descriptions
|
||||
global role_names
|
||||
|
||||
@@ -30,8 +30,10 @@ from rest_framework.exceptions import ParseError
|
||||
# Django-Polymorphic
|
||||
from polymorphic.models import PolymorphicModel
|
||||
|
||||
from ansible_base.lib.utils.models import prevent_search, get_type_for_model
|
||||
|
||||
# AWX
|
||||
from awx.main.models.base import CommonModelNameNotUnique, PasswordFieldsModel, NotificationFieldsModel, prevent_search
|
||||
from awx.main.models.base import CommonModelNameNotUnique, PasswordFieldsModel, NotificationFieldsModel
|
||||
from awx.main.dispatch import get_task_queuename
|
||||
from awx.main.dispatch.control import Control as ControlDispatcher
|
||||
from awx.main.registrar import activity_stream_registrar
|
||||
@@ -42,7 +44,6 @@ from awx.main.utils.common import (
|
||||
_inventory_updates,
|
||||
copy_model_by_class,
|
||||
copy_m2m_relationships,
|
||||
get_type_for_model,
|
||||
parse_yaml_or_json,
|
||||
getattr_dne,
|
||||
ScheduleDependencyManager,
|
||||
|
||||
@@ -23,9 +23,11 @@ from crum import get_current_user
|
||||
from jinja2 import sandbox
|
||||
from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
|
||||
|
||||
from ansible_base.lib.utils.models import prevent_search
|
||||
|
||||
# AWX
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models import prevent_search, accepts_json, UnifiedJobTemplate, UnifiedJob
|
||||
from awx.main.models import accepts_json, UnifiedJobTemplate, UnifiedJob
|
||||
from awx.main.models.notifications import NotificationTemplate, JobNotificationMixin
|
||||
from awx.main.models.base import CreatedModifiedModel, VarsDictProperty
|
||||
from awx.main.models.rbac import ROLE_SINGLETON_SYSTEM_ADMINISTRATOR, ROLE_SINGLETON_SYSTEM_AUDITOR
|
||||
|
||||
@@ -39,11 +39,15 @@ class TwilioBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
logger.error(smart_str(_("Exception connecting to Twilio: {}").format(e)))
|
||||
|
||||
for m in messages:
|
||||
try:
|
||||
connection.messages.create(to=m.to, from_=m.from_email, body=m.subject)
|
||||
sent_messages += 1
|
||||
except Exception as e:
|
||||
logger.error(smart_str(_("Exception sending messages: {}").format(e)))
|
||||
if not self.fail_silently:
|
||||
raise
|
||||
failed = False
|
||||
for dest in m.to:
|
||||
try:
|
||||
logger.debug(smart_str(_("FROM: {} / TO: {}").format(m.from_email, dest)))
|
||||
connection.messages.create(to=dest, from_=m.from_email, body=m.subject)
|
||||
sent_messages += 1
|
||||
except Exception as e:
|
||||
logger.error(smart_str(_("Exception sending messages: {}").format(e)))
|
||||
failed = True
|
||||
if not self.fail_silently and failed:
|
||||
raise
|
||||
return sent_messages
|
||||
|
||||
@@ -17,6 +17,8 @@ from django.utils.timezone import now as tz_now
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from ansible_base.lib.utils.models import get_type_for_model
|
||||
|
||||
# AWX
|
||||
from awx.main.dispatch.reaper import reap_job
|
||||
from awx.main.models import (
|
||||
@@ -34,7 +36,6 @@ from awx.main.models import (
|
||||
from awx.main.scheduler.dag_workflow import WorkflowDAG
|
||||
from awx.main.utils.pglock import advisory_lock
|
||||
from awx.main.utils import (
|
||||
get_type_for_model,
|
||||
ScheduleTaskManager,
|
||||
ScheduleWorkflowManager,
|
||||
)
|
||||
|
||||
@@ -2,12 +2,12 @@ from unittest import mock
|
||||
import pytest
|
||||
import json
|
||||
|
||||
from ansible_base.lib.utils.models import get_type_for_model
|
||||
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.models.jobs import JobTemplate, Job
|
||||
from awx.main.models.activity_stream import ActivityStream
|
||||
from awx.main.access import JobTemplateAccess
|
||||
from awx.main.utils.common import get_type_for_model
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -309,3 +309,139 @@ def test_bulk_job_set_all_prompt(job_template, organization, inventory, project,
|
||||
assert node[0].limit == 'kansas'
|
||||
assert node[0].skip_tags == 'foobar'
|
||||
assert node[0].job_tags == 'untagged'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize('num_hosts, num_queries', [(1, 70), (10, 150), (25, 250)])
|
||||
def test_bulk_host_delete_num_queries(organization, inventory, post, get, user, num_hosts, num_queries, django_assert_max_num_queries):
|
||||
'''
|
||||
If I am a...
|
||||
org admin
|
||||
inventory admin at org level
|
||||
admin of a particular inventory
|
||||
superuser
|
||||
|
||||
Bulk Host delete should take under a certain number of queries
|
||||
'''
|
||||
users_list = setup_admin_users_list(organization, inventory, user)
|
||||
for u in users_list:
|
||||
hosts = [{'name': str(uuid4())} for i in range(num_hosts)]
|
||||
with django_assert_max_num_queries(num_queries):
|
||||
bulk_host_create_response = post(reverse('api:bulk_host_create'), {'inventory': inventory.id, 'hosts': hosts}, u, expect=201).data
|
||||
assert len(bulk_host_create_response['hosts']) == len(hosts), f"unexpected number of hosts created for user {u}"
|
||||
hosts_ids_created = get_inventory_hosts(get, inventory.id, u)
|
||||
bulk_host_delete_response = post(reverse('api:bulk_host_delete'), {'hosts': hosts_ids_created}, u, expect=201).data
|
||||
assert len(bulk_host_delete_response['hosts'].keys()) == len(hosts), f"unexpected number of hosts deleted for user {u}"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bulk_host_delete_rbac(organization, inventory, post, get, user):
|
||||
'''
|
||||
If I am a...
|
||||
org admin
|
||||
inventory admin at org level
|
||||
admin of a particular invenotry
|
||||
... I can bulk delete hosts
|
||||
|
||||
Everyone else cannot
|
||||
'''
|
||||
admin_users_list = setup_admin_users_list(organization, inventory, user)
|
||||
users_list = setup_none_admin_uses_list(organization, inventory, user)
|
||||
|
||||
for indx, u in enumerate(admin_users_list):
|
||||
bulk_host_create_response = post(
|
||||
reverse('api:bulk_host_create'), {'inventory': inventory.id, 'hosts': [{'name': f'foobar-{indx}'}]}, u, expect=201
|
||||
).data
|
||||
assert len(bulk_host_create_response['hosts']) == 1, f"unexpected number of hosts created for user {u}"
|
||||
assert Host.objects.filter(inventory__id=inventory.id)[0].name == f'foobar-{indx}'
|
||||
hosts_ids_created = get_inventory_hosts(get, inventory.id, u)
|
||||
bulk_host_delete_response = post(reverse('api:bulk_host_delete'), {'hosts': hosts_ids_created}, u, expect=201).data
|
||||
assert len(bulk_host_delete_response['hosts'].keys()) == 1, f"unexpected number of hosts deleted by user {u}"
|
||||
|
||||
for indx, create_u in enumerate(admin_users_list):
|
||||
bulk_host_create_response = post(
|
||||
reverse('api:bulk_host_create'), {'inventory': inventory.id, 'hosts': [{'name': f'foobar2-{indx}'}]}, create_u, expect=201
|
||||
).data
|
||||
print(bulk_host_create_response)
|
||||
assert bulk_host_create_response['hosts'][0]['name'] == f'foobar2-{indx}'
|
||||
hosts_ids_created = get_inventory_hosts(get, inventory.id, create_u)
|
||||
print(f"Try to delete {hosts_ids_created}")
|
||||
for delete_u in users_list:
|
||||
bulk_host_delete_response = post(reverse('api:bulk_host_delete'), {'hosts': hosts_ids_created}, delete_u, expect=403).data
|
||||
assert "Lack permissions to delete hosts from this inventory." in bulk_host_delete_response['inventories'].values()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bulk_host_delete_from_multiple_inv(organization, inventory, post, get, user):
|
||||
'''
|
||||
If I am inventory admin at org level
|
||||
|
||||
Bulk Host delete should be enabled only on my inventory
|
||||
'''
|
||||
num_hosts = 10
|
||||
inventory.organization = organization
|
||||
|
||||
# Create second inventory
|
||||
inv2 = organization.inventories.create(name="second-test-inv")
|
||||
inv2.organization = organization
|
||||
admin2_user = user('inventory2_admin', False)
|
||||
inv2.admin_role.members.add(admin2_user)
|
||||
|
||||
admin_user = user('inventory_admin', False)
|
||||
inventory.admin_role.members.add(admin_user)
|
||||
|
||||
organization.member_role.members.add(admin_user)
|
||||
organization.member_role.members.add(admin2_user)
|
||||
|
||||
hosts = [{'name': str(uuid4())} for i in range(num_hosts)]
|
||||
hosts2 = [{'name': str(uuid4())} for i in range(num_hosts)]
|
||||
|
||||
# create hosts in each of the inventories
|
||||
bulk_host_create_response = post(reverse('api:bulk_host_create'), {'inventory': inventory.id, 'hosts': hosts}, admin_user, expect=201).data
|
||||
assert len(bulk_host_create_response['hosts']) == len(hosts), f"unexpected number of hosts created for user {admin_user}"
|
||||
|
||||
bulk_host_create_response2 = post(reverse('api:bulk_host_create'), {'inventory': inv2.id, 'hosts': hosts2}, admin2_user, expect=201).data
|
||||
assert len(bulk_host_create_response2['hosts']) == len(hosts), f"unexpected number of hosts created for user {admin2_user}"
|
||||
|
||||
# get all hosts ids - from both inventories
|
||||
hosts_ids_created = get_inventory_hosts(get, inventory.id, admin_user)
|
||||
hosts_ids_created += get_inventory_hosts(get, inv2.id, admin2_user)
|
||||
|
||||
expected_error = "Lack permissions to delete hosts from this inventory."
|
||||
# try to delete ALL hosts with admin user of inventory 1.
|
||||
for inv_name, invadmin in zip([inv2.name, inventory.name], [admin_user, admin2_user]):
|
||||
bulk_host_delete_response = post(reverse('api:bulk_host_delete'), {'hosts': hosts_ids_created}, invadmin, expect=403).data
|
||||
result_message = bulk_host_delete_response['inventories'][inv_name]
|
||||
assert result_message == expected_error, f"deleted hosts without permission by user {invadmin}"
|
||||
|
||||
|
||||
def setup_admin_users_list(organization, inventory, user):
|
||||
inventory.organization = organization
|
||||
inventory_admin = user('inventory_admin', False)
|
||||
org_admin = user('org_admin', False)
|
||||
org_inv_admin = user('org_admin', False)
|
||||
superuser = user('admin', True)
|
||||
for u in [org_admin, org_inv_admin, inventory_admin]:
|
||||
organization.member_role.members.add(u)
|
||||
organization.admin_role.members.add(org_admin)
|
||||
organization.inventory_admin_role.members.add(org_inv_admin)
|
||||
inventory.admin_role.members.add(inventory_admin)
|
||||
return [inventory_admin, org_inv_admin, superuser, org_admin]
|
||||
|
||||
|
||||
def setup_none_admin_uses_list(organization, inventory, user):
|
||||
inventory.organization = organization
|
||||
auditor = user('auditor', False)
|
||||
member = user('member', False)
|
||||
use_inv_member = user('member', False)
|
||||
for u in [auditor, member, use_inv_member]:
|
||||
organization.member_role.members.add(u)
|
||||
inventory.use_role.members.add(use_inv_member)
|
||||
organization.auditor_role.members.add(auditor)
|
||||
return [auditor, member, use_inv_member]
|
||||
|
||||
|
||||
def get_inventory_hosts(get, inv_id, use_user):
|
||||
data = get(reverse('api:inventory_hosts_list', kwargs={'pk': inv_id}), use_user, expect=200).data
|
||||
results = [host['id'] for host in data['results']]
|
||||
return results
|
||||
|
||||
@@ -81,6 +81,7 @@ def test_default_cred_types():
|
||||
'aws_secretsmanager_credential',
|
||||
'azure_kv',
|
||||
'azure_rm',
|
||||
'bitbucket_dc_token',
|
||||
'centrify_vault_kv',
|
||||
'conjur',
|
||||
'controller',
|
||||
|
||||
@@ -40,6 +40,33 @@ def test_hashivault_kubernetes_auth():
|
||||
assert res == expected_res
|
||||
|
||||
|
||||
def test_hashivault_client_cert_auth_explicit_role():
|
||||
kwargs = {
|
||||
'client_cert_role': 'test-cert-1',
|
||||
}
|
||||
expected_res = {
|
||||
'name': 'test-cert-1',
|
||||
}
|
||||
res = hashivault.client_cert_auth(**kwargs)
|
||||
assert res == expected_res
|
||||
|
||||
|
||||
def test_hashivault_client_cert_auth_no_role():
|
||||
kwargs = {}
|
||||
expected_res = {
|
||||
'name': None,
|
||||
}
|
||||
res = hashivault.client_cert_auth(**kwargs)
|
||||
assert res == expected_res
|
||||
|
||||
|
||||
def test_hashivault_userpass_auth():
|
||||
kwargs = {'username': 'the_username', 'password': 'the_password'}
|
||||
expected_res = {'username': 'the_username', 'password': 'the_password'}
|
||||
res = hashivault.userpass_auth(**kwargs)
|
||||
assert res == expected_res
|
||||
|
||||
|
||||
def test_hashivault_handle_auth_token():
|
||||
kwargs = {
|
||||
'token': 'the_token',
|
||||
@@ -73,6 +100,22 @@ def test_hashivault_handle_auth_kubernetes():
|
||||
assert token == 'the_token'
|
||||
|
||||
|
||||
def test_hashivault_handle_auth_client_cert():
|
||||
kwargs = {
|
||||
'client_cert_public': "foo",
|
||||
'client_cert_private': "bar",
|
||||
'client_cert_role': 'test-cert-1',
|
||||
}
|
||||
auth_params = {
|
||||
'name': 'test-cert-1',
|
||||
}
|
||||
with mock.patch.object(hashivault, 'method_auth') as method_mock:
|
||||
method_mock.return_value = 'the_token'
|
||||
token = hashivault.handle_auth(**kwargs)
|
||||
method_mock.assert_called_with(**kwargs, auth_param=auth_params)
|
||||
assert token == 'the_token'
|
||||
|
||||
|
||||
def test_hashivault_handle_auth_not_enough_args():
|
||||
with pytest.raises(Exception):
|
||||
hashivault.handle_auth()
|
||||
|
||||
44
awx/main/tests/functional/test_migrations.py
Normal file
44
awx/main/tests/functional/test_migrations.py
Normal file
@@ -0,0 +1,44 @@
|
||||
import pytest
|
||||
|
||||
from django_test_migrations.plan import all_migrations, nodes_to_tuples
|
||||
|
||||
"""
|
||||
Most tests that live in here can probably be deleted at some point. They are mainly
|
||||
for a developer. When AWX versions that users upgrade from falls out of support that
|
||||
is when migration tests can be deleted. This is also a good time to squash. Squashing
|
||||
will likely mess with the tests that live here.
|
||||
|
||||
The smoke test should be kept in here. The smoke test ensures that our migrations
|
||||
continue to work when sqlite is the backing database (vs. the default DB of postgres).
|
||||
"""
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestMigrationSmoke:
|
||||
def test_happy_path(self, migrator):
|
||||
"""
|
||||
This smoke test runs all the migrations.
|
||||
|
||||
Example of how to use django-test-migration to invoke particular migration(s)
|
||||
while weaving in object creation and assertions.
|
||||
|
||||
Note that this is more than just an example. It is a smoke test because it runs ALL
|
||||
the migrations. Our "normal" unit tests subvert the migrations running because it is slow.
|
||||
"""
|
||||
migration_nodes = all_migrations('default')
|
||||
migration_tuples = nodes_to_tuples(migration_nodes)
|
||||
final_migration = migration_tuples[-1]
|
||||
|
||||
migrator.apply_initial_migration(('main', None))
|
||||
# I just picked a newish migration at the time of writing this.
|
||||
# If someone from the future finds themselves here because the are squashing migrations
|
||||
# it is fine to change the 0180_... below to some other newish migration
|
||||
intermediate_state = migrator.apply_tested_migration(('main', '0180_add_hostmetric_fields'))
|
||||
|
||||
Instance = intermediate_state.apps.get_model('main', 'Instance')
|
||||
# Create any old object in the database
|
||||
Instance.objects.create(hostname='foobar', node_type='control')
|
||||
|
||||
final_state = migrator.apply_tested_migration(final_migration)
|
||||
Instance = final_state.apps.get_model('main', 'Instance')
|
||||
assert Instance.objects.filter(hostname='foobar').count() == 1
|
||||
@@ -208,6 +208,6 @@ def test_auto_parenting():
|
||||
@pytest.mark.django_db
|
||||
def test_update_parents_keeps_teams(team, project):
|
||||
project.update_role.parents.add(team.member_role)
|
||||
assert team.member_role in project.update_role # test prep sanity check
|
||||
assert list(Project.accessible_objects(team.member_role, 'update_role')) == [project] # test prep sanity check
|
||||
update_role_parentage_for_instance(project)
|
||||
assert team.member_role in project.update_role # actual assertion
|
||||
assert list(Project.accessible_objects(team.member_role, 'update_role')) == [project] # actual assertion
|
||||
|
||||
@@ -92,7 +92,7 @@ def test_team_accessible_by(team, user, project):
|
||||
u = user('team_member', False)
|
||||
|
||||
team.member_role.children.add(project.use_role)
|
||||
assert team in project.read_role
|
||||
assert list(Project.accessible_objects(team.member_role, 'read_role')) == [project]
|
||||
assert u not in project.read_role
|
||||
|
||||
team.member_role.members.add(u)
|
||||
@@ -104,7 +104,7 @@ def test_team_accessible_objects(team, user, project):
|
||||
u = user('team_member', False)
|
||||
|
||||
team.member_role.children.add(project.use_role)
|
||||
assert len(Project.accessible_objects(team, 'read_role')) == 1
|
||||
assert len(Project.accessible_objects(team.member_role, 'read_role')) == 1
|
||||
assert not Project.accessible_objects(u, 'read_role')
|
||||
|
||||
team.member_role.members.add(u)
|
||||
|
||||
@@ -122,25 +122,6 @@ def test_team_org_resource_role(ext_auth, organization, rando, org_admin, team):
|
||||
] == [True for i in range(2)]
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_accessible_objects(user, organization):
|
||||
"""
|
||||
We cannot directly use accessible_objects for User model because
|
||||
both editing and read permissions are obligated to complex business logic
|
||||
"""
|
||||
admin = user('admin', False)
|
||||
u = user('john', False)
|
||||
access = UserAccess(admin)
|
||||
assert access.get_queryset().count() == 1 # can only see himself
|
||||
|
||||
organization.member_role.members.add(u)
|
||||
organization.member_role.members.add(admin)
|
||||
assert access.get_queryset().count() == 2
|
||||
|
||||
organization.member_role.members.remove(u)
|
||||
assert access.get_queryset().count() == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_org_admin_create_sys_auditor(org_admin):
|
||||
access = UserAccess(org_admin)
|
||||
|
||||
@@ -3,15 +3,13 @@
|
||||
import pytest
|
||||
|
||||
# Django
|
||||
from django.core.exceptions import FieldDoesNotExist
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
from rest_framework.exceptions import PermissionDenied, ParseError
|
||||
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
||||
|
||||
from awx.api.filters import FieldLookupBackend, OrderByBackend, get_field_from_path
|
||||
from awx.main.models import (
|
||||
AdHocCommand,
|
||||
ActivityStream,
|
||||
Credential,
|
||||
Job,
|
||||
JobTemplate,
|
||||
SystemJob,
|
||||
@@ -20,88 +18,11 @@ from awx.main.models import (
|
||||
WorkflowJob,
|
||||
WorkflowJobTemplate,
|
||||
WorkflowJobOptions,
|
||||
InventorySource,
|
||||
JobEvent,
|
||||
)
|
||||
from awx.main.models.oauth import OAuth2Application
|
||||
from awx.main.models.jobs import JobOptions
|
||||
|
||||
|
||||
def test_related():
|
||||
field_lookup = FieldLookupBackend()
|
||||
lookup = '__'.join(['inventory', 'organization', 'pk'])
|
||||
field, new_lookup = field_lookup.get_field_from_lookup(InventorySource, lookup)
|
||||
print(field)
|
||||
print(new_lookup)
|
||||
|
||||
|
||||
def test_invalid_filter_key():
|
||||
field_lookup = FieldLookupBackend()
|
||||
# FieldDoesNotExist is caught and converted to ParseError by filter_queryset
|
||||
with pytest.raises(FieldDoesNotExist) as excinfo:
|
||||
field_lookup.value_to_python(JobEvent, 'event_data.task_action', 'foo')
|
||||
assert 'has no field named' in str(excinfo)
|
||||
|
||||
|
||||
def test_invalid_field_hop():
|
||||
with pytest.raises(ParseError) as excinfo:
|
||||
get_field_from_path(Credential, 'organization__description__user')
|
||||
assert 'No related model for' in str(excinfo)
|
||||
|
||||
|
||||
def test_invalid_order_by_key():
|
||||
field_order_by = OrderByBackend()
|
||||
with pytest.raises(ParseError) as excinfo:
|
||||
[f for f in field_order_by._validate_ordering_fields(JobEvent, ('event_data.task_action',))]
|
||||
assert 'has no field named' in str(excinfo)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(u"empty_value", [u'', ''])
|
||||
def test_empty_in(empty_value):
|
||||
field_lookup = FieldLookupBackend()
|
||||
with pytest.raises(ValueError) as excinfo:
|
||||
field_lookup.value_to_python(JobTemplate, 'project__name__in', empty_value)
|
||||
assert 'empty value for __in' in str(excinfo.value)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(u"valid_value", [u'foo', u'foo,'])
|
||||
def test_valid_in(valid_value):
|
||||
field_lookup = FieldLookupBackend()
|
||||
value, new_lookup, _ = field_lookup.value_to_python(JobTemplate, 'project__name__in', valid_value)
|
||||
assert 'foo' in value
|
||||
|
||||
|
||||
def test_invalid_field():
|
||||
invalid_field = u"ヽヾ"
|
||||
field_lookup = FieldLookupBackend()
|
||||
with pytest.raises(ValueError) as excinfo:
|
||||
field_lookup.value_to_python(WorkflowJobTemplate, invalid_field, 'foo')
|
||||
assert 'is not an allowed field name. Must be ascii encodable.' in str(excinfo.value)
|
||||
|
||||
|
||||
def test_valid_iexact():
|
||||
field_lookup = FieldLookupBackend()
|
||||
value, new_lookup, _ = field_lookup.value_to_python(JobTemplate, 'project__name__iexact', 'foo')
|
||||
assert 'foo' in value
|
||||
|
||||
|
||||
def test_invalid_iexact():
|
||||
field_lookup = FieldLookupBackend()
|
||||
with pytest.raises(ValueError) as excinfo:
|
||||
field_lookup.value_to_python(Job, 'id__iexact', '1')
|
||||
assert 'is not a text field and cannot be filtered by case-insensitive search' in str(excinfo.value)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('lookup_suffix', ['', 'contains', 'startswith', 'in'])
|
||||
@pytest.mark.parametrize('password_field', Credential.PASSWORD_FIELDS)
|
||||
def test_filter_on_password_field(password_field, lookup_suffix):
|
||||
field_lookup = FieldLookupBackend()
|
||||
lookup = '__'.join(filter(None, [password_field, lookup_suffix]))
|
||||
with pytest.raises(PermissionDenied) as excinfo:
|
||||
field, new_lookup = field_lookup.get_field_from_lookup(Credential, lookup)
|
||||
assert 'not allowed' in str(excinfo.value)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'model, query',
|
||||
[
|
||||
@@ -128,10 +49,3 @@ def test_filter_sensitive_fields_and_relations(model, query):
|
||||
with pytest.raises(PermissionDenied) as excinfo:
|
||||
field, new_lookup = field_lookup.get_field_from_lookup(model, query)
|
||||
assert 'not allowed' in str(excinfo.value)
|
||||
|
||||
|
||||
def test_looping_filters_prohibited():
|
||||
field_lookup = FieldLookupBackend()
|
||||
with pytest.raises(ParseError) as loop_exc:
|
||||
field_lookup.get_field_from_lookup(Job, 'job_events__job__job_events')
|
||||
assert 'job_events' in str(loop_exc.value)
|
||||
|
||||
@@ -12,6 +12,8 @@ from unittest import mock
|
||||
|
||||
from rest_framework.exceptions import ParseError
|
||||
|
||||
from ansible_base.lib.utils.models import get_type_for_model
|
||||
|
||||
from awx.main.utils import common
|
||||
from awx.api.validators import HostnameRegexValidator
|
||||
|
||||
@@ -106,7 +108,7 @@ TEST_MODELS = [
|
||||
# Cases relied on for scheduler dependent jobs list
|
||||
@pytest.mark.parametrize('model,name', TEST_MODELS)
|
||||
def test_get_type_for_model(model, name):
|
||||
assert common.get_type_for_model(model) == name
|
||||
assert get_type_for_model(model) == name
|
||||
|
||||
|
||||
def test_get_model_for_invalid_type():
|
||||
|
||||
@@ -68,7 +68,9 @@ class mockHost:
|
||||
|
||||
@mock.patch('awx.main.utils.filters.get_model', return_value=mockHost())
|
||||
class TestSmartFilterQueryFromString:
|
||||
@mock.patch('awx.api.filters.get_fields_from_path', lambda model, path: ([model], path)) # disable field filtering, because a__b isn't a real Host field
|
||||
@mock.patch(
|
||||
'ansible_base.rest_filters.rest_framework.field_lookup_backend.get_fields_from_path', lambda model, path: ([model], path)
|
||||
) # disable field filtering, because a__b isn't a real Host field
|
||||
@pytest.mark.parametrize(
|
||||
"filter_string,q_expected",
|
||||
[
|
||||
|
||||
@@ -52,12 +52,10 @@ __all__ = [
|
||||
'get_awx_http_client_headers',
|
||||
'get_awx_version',
|
||||
'update_scm_url',
|
||||
'get_type_for_model',
|
||||
'get_model_for_type',
|
||||
'copy_model_by_class',
|
||||
'copy_m2m_relationships',
|
||||
'prefetch_page_capabilities',
|
||||
'to_python_boolean',
|
||||
'datetime_hook',
|
||||
'ignore_inventory_computed_fields',
|
||||
'ignore_inventory_group_removal',
|
||||
@@ -110,18 +108,6 @@ def get_object_or_400(klass, *args, **kwargs):
|
||||
raise ParseError(*e.args)
|
||||
|
||||
|
||||
def to_python_boolean(value, allow_none=False):
|
||||
value = str(value)
|
||||
if value.lower() in ('true', '1', 't'):
|
||||
return True
|
||||
elif value.lower() in ('false', '0', 'f'):
|
||||
return False
|
||||
elif allow_none and value.lower() in ('none', 'null'):
|
||||
return None
|
||||
else:
|
||||
raise ValueError(_(u'Unable to convert "%s" to boolean') % value)
|
||||
|
||||
|
||||
def datetime_hook(d):
|
||||
new_d = {}
|
||||
for key, value in d.items():
|
||||
@@ -569,14 +555,6 @@ def copy_m2m_relationships(obj1, obj2, fields, kwargs=None):
|
||||
dest_field.add(*list(src_field_value.all().values_list('id', flat=True)))
|
||||
|
||||
|
||||
def get_type_for_model(model):
|
||||
"""
|
||||
Return type name for a given model class.
|
||||
"""
|
||||
opts = model._meta.concrete_model._meta
|
||||
return camelcase_to_underscore(opts.object_name)
|
||||
|
||||
|
||||
def get_model_for_type(type_name):
|
||||
"""
|
||||
Return model class for a given type name.
|
||||
|
||||
@@ -1,27 +1,10 @@
|
||||
# Copyright (c) 2017 Ansible by Red Hat
|
||||
# All Rights Reserved.
|
||||
|
||||
from itertools import chain
|
||||
|
||||
from awx.settings.application_name import set_application_name
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
def get_all_field_names(model):
|
||||
# Implements compatibility with _meta.get_all_field_names
|
||||
# See: https://docs.djangoproject.com/en/1.11/ref/models/meta/#migrating-from-the-old-api
|
||||
return list(
|
||||
set(
|
||||
chain.from_iterable(
|
||||
(field.name, field.attname) if hasattr(field, 'attname') else (field.name,)
|
||||
for field in model._meta.get_fields()
|
||||
# For complete backwards compatibility, you may want to exclude
|
||||
# GenericForeignKey from the results.
|
||||
if not (field.many_to_one and field.related_model is None)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def set_connection_name(function):
|
||||
set_application_name(settings.DATABASES, settings.CLUSTER_HOST_ID, function=function)
|
||||
|
||||
@@ -161,7 +161,7 @@ class SmartFilter(object):
|
||||
else:
|
||||
# detect loops and restrict access to sensitive fields
|
||||
# this import is intentional here to avoid a circular import
|
||||
from awx.api.filters import FieldLookupBackend
|
||||
from ansible_base.rest_filters.rest_framework.field_lookup_backend import FieldLookupBackend
|
||||
|
||||
FieldLookupBackend().get_field_from_lookup(Host, k)
|
||||
kwargs[k] = v
|
||||
|
||||
@@ -199,6 +199,8 @@ class Licenser(object):
|
||||
license['support_level'] = attr.get('value')
|
||||
elif attr.get('name') == 'usage':
|
||||
license['usage'] = attr.get('value')
|
||||
elif attr.get('name') == 'ph_product_name' and attr.get('value') == 'RHEL Developer':
|
||||
license['license_type'] = 'developer'
|
||||
|
||||
if not license:
|
||||
logger.error("No valid subscriptions found in manifest")
|
||||
@@ -322,7 +324,9 @@ class Licenser(object):
|
||||
def generate_license_options_from_entitlements(self, json):
|
||||
from dateutil.parser import parse
|
||||
|
||||
ValidSub = collections.namedtuple('ValidSub', 'sku name support_level end_date trial quantity pool_id satellite subscription_id account_number usage')
|
||||
ValidSub = collections.namedtuple(
|
||||
'ValidSub', 'sku name support_level end_date trial developer_license quantity pool_id satellite subscription_id account_number usage'
|
||||
)
|
||||
valid_subs = []
|
||||
for sub in json:
|
||||
satellite = sub.get('satellite')
|
||||
@@ -350,6 +354,7 @@ class Licenser(object):
|
||||
|
||||
sku = sub['productId']
|
||||
trial = sku.startswith('S') # i.e.,, SER/SVC
|
||||
developer_license = False
|
||||
support_level = ''
|
||||
usage = ''
|
||||
pool_id = sub['id']
|
||||
@@ -364,9 +369,24 @@ class Licenser(object):
|
||||
support_level = attr.get('value')
|
||||
elif attr.get('name') == 'usage':
|
||||
usage = attr.get('value')
|
||||
elif attr.get('name') == 'ph_product_name' and attr.get('value') == 'RHEL Developer':
|
||||
developer_license = True
|
||||
|
||||
valid_subs.append(
|
||||
ValidSub(sku, sub['productName'], support_level, end_date, trial, quantity, pool_id, satellite, subscription_id, account_number, usage)
|
||||
ValidSub(
|
||||
sku,
|
||||
sub['productName'],
|
||||
support_level,
|
||||
end_date,
|
||||
trial,
|
||||
developer_license,
|
||||
quantity,
|
||||
pool_id,
|
||||
satellite,
|
||||
subscription_id,
|
||||
account_number,
|
||||
usage,
|
||||
)
|
||||
)
|
||||
|
||||
if valid_subs:
|
||||
@@ -381,6 +401,8 @@ class Licenser(object):
|
||||
if sub.trial:
|
||||
license._attrs['trial'] = True
|
||||
license._attrs['license_type'] = 'trial'
|
||||
if sub.developer_license:
|
||||
license._attrs['license_type'] = 'developer'
|
||||
license._attrs['instance_count'] = min(MAX_INSTANCES, license._attrs['instance_count'])
|
||||
human_instances = license._attrs['instance_count']
|
||||
if human_instances == MAX_INSTANCES:
|
||||
|
||||
@@ -227,7 +227,8 @@ class WebSocketRelayManager(object):
|
||||
continue
|
||||
try:
|
||||
if not notif.payload or notif.channel != "web_ws_heartbeat":
|
||||
return
|
||||
logger.warning(f"Unexpected channel or missing payload. {notif.channel}, {notif.payload}")
|
||||
continue
|
||||
|
||||
try:
|
||||
payload = json.loads(notif.payload)
|
||||
@@ -235,13 +236,15 @@ class WebSocketRelayManager(object):
|
||||
logmsg = "Failed to decode message from pg_notify channel `web_ws_heartbeat`"
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logmsg = "{} {}".format(logmsg, payload)
|
||||
logger.warning(logmsg)
|
||||
return
|
||||
logger.warning(logmsg)
|
||||
continue
|
||||
|
||||
# Skip if the message comes from the same host we are running on
|
||||
# In this case, we'll be sharing a redis, no need to relay.
|
||||
if payload.get("hostname") == self.local_hostname:
|
||||
return
|
||||
hostname = payload.get("hostname")
|
||||
logger.debug("Received a heartbeat request for {hostname}. Skipping as we use redis for local host.")
|
||||
continue
|
||||
|
||||
action = payload.get("action")
|
||||
|
||||
@@ -250,7 +253,7 @@ class WebSocketRelayManager(object):
|
||||
ip = payload.get("ip") or hostname # try back to hostname if ip isn't supplied
|
||||
if ip is None:
|
||||
logger.warning(f"Received invalid {action} ws_heartbeat, missing hostname and ip: {payload}")
|
||||
return
|
||||
continue
|
||||
logger.debug(f"Web host {hostname} ({ip}) {action} heartbeat received.")
|
||||
|
||||
if action == "online":
|
||||
|
||||
@@ -11,6 +11,7 @@ from datetime import timedelta
|
||||
|
||||
# python-ldap
|
||||
import ldap
|
||||
from split_settings.tools import include
|
||||
|
||||
|
||||
DEBUG = True
|
||||
@@ -36,6 +37,18 @@ DATABASES = {
|
||||
}
|
||||
}
|
||||
|
||||
# Special database overrides for dispatcher connections listening to pg_notify
|
||||
LISTENER_DATABASES = {
|
||||
'default': {
|
||||
'OPTIONS': {
|
||||
'keepalives': 1,
|
||||
'keepalives_idle': 5,
|
||||
'keepalives_interval': 5,
|
||||
'keepalives_count': 5,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
# Whether or not the deployment is a K8S-based deployment
|
||||
# In K8S-based deployments, instances have zero capacity - all playbook
|
||||
# automation is intended to flow through defined Container Groups that
|
||||
@@ -131,6 +144,9 @@ BULK_JOB_MAX_LAUNCH = 100
|
||||
# Maximum number of host that can be created in 1 bulk host create
|
||||
BULK_HOST_MAX_CREATE = 100
|
||||
|
||||
# Maximum number of host that can be deleted in 1 bulk host delete
|
||||
BULK_HOST_MAX_DELETE = 250
|
||||
|
||||
SITE_ID = 1
|
||||
|
||||
# Make this unique, and don't share it with anybody.
|
||||
@@ -336,6 +352,7 @@ INSTALLED_APPS = [
|
||||
'awx.ui',
|
||||
'awx.sso',
|
||||
'solo',
|
||||
'ansible_base.rest_filters',
|
||||
]
|
||||
|
||||
INTERNAL_IPS = ('127.0.0.1',)
|
||||
@@ -350,12 +367,6 @@ REST_FRAMEWORK = {
|
||||
'awx.api.authentication.LoggedBasicAuthentication',
|
||||
),
|
||||
'DEFAULT_PERMISSION_CLASSES': ('awx.api.permissions.ModelAccessPermission',),
|
||||
'DEFAULT_FILTER_BACKENDS': (
|
||||
'awx.api.filters.TypeFilterBackend',
|
||||
'awx.api.filters.FieldLookupBackend',
|
||||
'rest_framework.filters.SearchFilter',
|
||||
'awx.api.filters.OrderByBackend',
|
||||
),
|
||||
'DEFAULT_PARSER_CLASSES': ('awx.api.parsers.JSONParser',),
|
||||
'DEFAULT_RENDERER_CLASSES': ('awx.api.renderers.DefaultJSONRenderer', 'awx.api.renderers.BrowsableAPIRenderer'),
|
||||
'DEFAULT_METADATA_CLASS': 'awx.api.metadata.Metadata',
|
||||
@@ -1063,3 +1074,13 @@ CLEANUP_HOST_METRICS_HARD_THRESHOLD = 36 # months
|
||||
# Host metric summary monthly task - last time of run
|
||||
HOST_METRIC_SUMMARY_TASK_LAST_TS = None
|
||||
HOST_METRIC_SUMMARY_TASK_INTERVAL = 7 # days
|
||||
|
||||
|
||||
# django-ansible-base
|
||||
ANSIBLE_BASE_TEAM_MODEL = 'main.Team'
|
||||
ANSIBLE_BASE_ORGANIZATION_MODEL = 'main.Organization'
|
||||
|
||||
from ansible_base.lib import dynamic_config # noqa: E402
|
||||
|
||||
settings_file = os.path.join(os.path.dirname(dynamic_config.__file__), 'dynamic_settings.py')
|
||||
include(settings_file)
|
||||
|
||||
@@ -68,7 +68,6 @@ class LDAPSettings(BaseLDAPSettings):
|
||||
|
||||
|
||||
class LDAPBackend(BaseLDAPBackend):
|
||||
|
||||
"""
|
||||
Custom LDAP backend for AWX.
|
||||
"""
|
||||
|
||||
@@ -1341,7 +1341,6 @@ register(
|
||||
'SOCIAL_AUTH_SAML_SP_PUBLIC_CERT',
|
||||
field_class=fields.CharField,
|
||||
allow_blank=True,
|
||||
required=True,
|
||||
validators=[validate_certificate],
|
||||
label=_('SAML Service Provider Public Certificate'),
|
||||
help_text=_('Create a keypair to use as a service provider (SP) and include the certificate content here.'),
|
||||
@@ -1353,7 +1352,6 @@ register(
|
||||
'SOCIAL_AUTH_SAML_SP_PRIVATE_KEY',
|
||||
field_class=fields.CharField,
|
||||
allow_blank=True,
|
||||
required=True,
|
||||
validators=[validate_private_key],
|
||||
label=_('SAML Service Provider Private Key'),
|
||||
help_text=_('Create a keypair to use as a service provider (SP) and include the private key content here.'),
|
||||
@@ -1365,7 +1363,6 @@ register(
|
||||
register(
|
||||
'SOCIAL_AUTH_SAML_ORG_INFO',
|
||||
field_class=SAMLOrgInfoField,
|
||||
required=True,
|
||||
label=_('SAML Service Provider Organization Info'),
|
||||
help_text=_('Provide the URL, display name, and the name of your app. Refer to the documentation for example syntax.'),
|
||||
category=_('SAML'),
|
||||
@@ -1379,7 +1376,6 @@ register(
|
||||
'SOCIAL_AUTH_SAML_TECHNICAL_CONTACT',
|
||||
field_class=SAMLContactField,
|
||||
allow_blank=True,
|
||||
required=True,
|
||||
label=_('SAML Service Provider Technical Contact'),
|
||||
help_text=_('Provide the name and email address of the technical contact for your service provider. Refer to the documentation for example syntax.'),
|
||||
category=_('SAML'),
|
||||
@@ -1391,7 +1387,6 @@ register(
|
||||
'SOCIAL_AUTH_SAML_SUPPORT_CONTACT',
|
||||
field_class=SAMLContactField,
|
||||
allow_blank=True,
|
||||
required=True,
|
||||
label=_('SAML Service Provider Support Contact'),
|
||||
help_text=_('Provide the name and email address of the support contact for your service provider. Refer to the documentation for example syntax.'),
|
||||
category=_('SAML'),
|
||||
|
||||
@@ -38,7 +38,7 @@ class CompleteView(BaseRedirectView):
|
||||
response = super(CompleteView, self).dispatch(request, *args, **kwargs)
|
||||
if self.request.user and self.request.user.is_authenticated:
|
||||
logger.info(smart_str(u"User {} logged in".format(self.request.user.username)))
|
||||
response.set_cookie('userLoggedIn', 'true')
|
||||
response.set_cookie('userLoggedIn', 'true', secure=getattr(settings, 'SESSION_COOKIE_SECURE', False))
|
||||
response.setdefault('X-API-Session-Cookie-Name', getattr(settings, 'SESSION_COOKIE_NAME', 'awx_sessionid'))
|
||||
return response
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ import UnsupportedScheduleForm from './UnsupportedScheduleForm';
|
||||
import parseRuleObj, { UnsupportedRRuleError } from './parseRuleObj';
|
||||
import buildRuleObj from './buildRuleObj';
|
||||
import buildRuleSet from './buildRuleSet';
|
||||
import mergeArraysByCredentialType from './mergeArraysByCredentialType';
|
||||
|
||||
const NUM_DAYS_PER_FREQUENCY = {
|
||||
week: 7,
|
||||
@@ -350,6 +351,12 @@ function ScheduleForm({
|
||||
startDate: currentDate,
|
||||
startTime: time,
|
||||
timezone: schedule.timezone || now.zoneName,
|
||||
credentials: mergeArraysByCredentialType(
|
||||
resourceDefaultCredentials,
|
||||
credentials
|
||||
),
|
||||
labels: originalLabels.current,
|
||||
instance_groups: originalInstanceGroups.current,
|
||||
};
|
||||
|
||||
if (hasDaysToKeepField) {
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
export default function mergeArraysByCredentialType(
|
||||
defaultCredentials = [],
|
||||
overrides = []
|
||||
) {
|
||||
const mergedArray = [...defaultCredentials];
|
||||
|
||||
overrides.forEach((override) => {
|
||||
const index = mergedArray.findIndex(
|
||||
(defaultCred) => defaultCred.credential_type === override.credential_type
|
||||
);
|
||||
if (index !== -1) {
|
||||
mergedArray.splice(index, 1);
|
||||
}
|
||||
mergedArray.push(override);
|
||||
});
|
||||
|
||||
return mergedArray;
|
||||
}
|
||||
@@ -119,7 +119,7 @@ function LoggingEdit() {
|
||||
...logging.LOG_AGGREGATOR_ENABLED,
|
||||
help_text: (
|
||||
<>
|
||||
{logging.LOG_AGGREGATOR_ENABLED.help_text}
|
||||
{logging.LOG_AGGREGATOR_ENABLED?.help_text}
|
||||
{!formik.values.LOG_AGGREGATOR_ENABLED &&
|
||||
(!formik.values.LOG_AGGREGATOR_HOST ||
|
||||
!formik.values.LOG_AGGREGATOR_TYPE) && (
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import React, { useCallback, useEffect, useState } from 'react';
|
||||
import styled from 'styled-components';
|
||||
|
||||
import { t } from '@lingui/macro';
|
||||
import {
|
||||
Card,
|
||||
@@ -47,7 +46,7 @@ function SubscriptionUsageChart() {
|
||||
const [periodSelection, setPeriodSelection] = useState('year');
|
||||
const userProfile = useUserProfile();
|
||||
|
||||
const calculateDateRange = () => {
|
||||
const calculateDateRange = useCallback(() => {
|
||||
const today = new Date();
|
||||
let date = '';
|
||||
switch (periodSelection) {
|
||||
@@ -77,7 +76,7 @@ function SubscriptionUsageChart() {
|
||||
break;
|
||||
}
|
||||
return date;
|
||||
};
|
||||
}, [periodSelection]);
|
||||
|
||||
const {
|
||||
isLoading,
|
||||
@@ -89,7 +88,7 @@ function SubscriptionUsageChart() {
|
||||
calculateDateRange()
|
||||
);
|
||||
return data.data.results;
|
||||
}, [periodSelection]),
|
||||
}, [calculateDateRange]),
|
||||
[]
|
||||
);
|
||||
|
||||
|
||||
@@ -112,6 +112,12 @@ function WebhookSubForm({ templateType }) {
|
||||
label: t`GitLab`,
|
||||
isDisabled: false,
|
||||
},
|
||||
{
|
||||
value: 'bitbucket_dc',
|
||||
key: 'bitbucket_dc',
|
||||
label: t`Bitbucket Data Center`,
|
||||
isDisabled: false,
|
||||
},
|
||||
];
|
||||
|
||||
if (error || webhookKeyError) {
|
||||
|
||||
@@ -8,6 +8,7 @@ action_groups:
|
||||
- application
|
||||
- bulk_job_launch
|
||||
- bulk_host_create
|
||||
- bulk_host_delete
|
||||
- controller_meta
|
||||
- credential_input_source
|
||||
- credential
|
||||
|
||||
65
awx_collection/plugins/modules/bulk_host_delete.py
Normal file
65
awx_collection/plugins/modules/bulk_host_delete.py
Normal file
@@ -0,0 +1,65 @@
|
||||
#!/usr/bin/python
|
||||
# coding: utf-8 -*-
|
||||
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'}
|
||||
|
||||
DOCUMENTATION = '''
|
||||
---
|
||||
module: bulk_host_delete
|
||||
author: "Avi Layani (@Avilir)"
|
||||
short_description: Bulk host delete in Automation Platform Controller
|
||||
description:
|
||||
- Single-request bulk host deletion in Automation Platform Controller.
|
||||
- Provides a way to delete many hosts at once from inventories in Controller.
|
||||
options:
|
||||
hosts:
|
||||
description:
|
||||
- List of hosts id's to delete from inventory.
|
||||
required: True
|
||||
type: list
|
||||
elements: int
|
||||
extends_documentation_fragment: awx.awx.auth
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
- name: Bulk host delete
|
||||
bulk_host_delete:
|
||||
hosts:
|
||||
- 1
|
||||
- 2
|
||||
'''
|
||||
|
||||
from ..module_utils.controller_api import ControllerAPIModule
|
||||
|
||||
|
||||
def main():
|
||||
# Any additional arguments that are not fields of the item can be added here
|
||||
argument_spec = dict(
|
||||
hosts=dict(required=True, type='list', elements='int'),
|
||||
)
|
||||
|
||||
# Create a module for ourselves
|
||||
module = ControllerAPIModule(argument_spec=argument_spec)
|
||||
|
||||
# Extract our parameters
|
||||
hosts = module.params.get('hosts')
|
||||
|
||||
# Delete the hosts
|
||||
result = module.post_endpoint("bulk/host_delete", data={"hosts": hosts})
|
||||
|
||||
if result['status_code'] != 201:
|
||||
module.fail_json(msg="Failed to delete hosts, see response for details", response=result)
|
||||
|
||||
module.json_output['changed'] = True
|
||||
|
||||
module.exit_json(**module.json_output)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -58,6 +58,7 @@ options:
|
||||
Insights, Machine, Microsoft Azure Key Vault, Microsoft Azure Resource Manager, Network, OpenShift or Kubernetes API
|
||||
Bearer Token, OpenStack, Red Hat Ansible Automation Platform, Red Hat Satellite 6, Red Hat Virtualization, Source Control,
|
||||
Thycotic DevOps Secrets Vault, Thycotic Secret Server, Vault, VMware vCenter, or a custom credential type
|
||||
required: True
|
||||
type: str
|
||||
inputs:
|
||||
description:
|
||||
@@ -214,7 +215,7 @@ def main():
|
||||
copy_from=dict(),
|
||||
description=dict(),
|
||||
organization=dict(),
|
||||
credential_type=dict(),
|
||||
credential_type=dict(required=True),
|
||||
inputs=dict(type='dict', no_log=True),
|
||||
update_secrets=dict(type='bool', default=True, no_log=False),
|
||||
user=dict(),
|
||||
|
||||
@@ -18,30 +18,55 @@ import pytest
|
||||
from ansible.module_utils.six import raise_from
|
||||
|
||||
from awx.main.tests.functional.conftest import _request
|
||||
from awx.main.models import Organization, Project, Inventory, JobTemplate, Credential, CredentialType, ExecutionEnvironment, UnifiedJob
|
||||
from awx.main.tests.functional.conftest import credentialtype_scm, credentialtype_ssh # noqa: F401; pylint: disable=unused-variable
|
||||
from awx.main.models import (
|
||||
Organization,
|
||||
Project,
|
||||
Inventory,
|
||||
JobTemplate,
|
||||
Credential,
|
||||
CredentialType,
|
||||
ExecutionEnvironment,
|
||||
UnifiedJob,
|
||||
WorkflowJobTemplate,
|
||||
NotificationTemplate,
|
||||
Schedule,
|
||||
)
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
try:
|
||||
import tower_cli # noqa
|
||||
|
||||
HAS_TOWER_CLI = True
|
||||
except ImportError:
|
||||
HAS_TOWER_CLI = False
|
||||
|
||||
try:
|
||||
# Because awxkit will be a directory at the root of this makefile and we are using python3, import awxkit will work even if its not installed.
|
||||
# However, awxkit will not contain api whih causes a stack failure down on line 170 when we try to mock it.
|
||||
# So here we are importing awxkit.api to prevent that. Then you only get an error on tests for awxkit functionality.
|
||||
import awxkit.api # noqa
|
||||
|
||||
HAS_AWX_KIT = True
|
||||
except ImportError:
|
||||
HAS_AWX_KIT = False
|
||||
|
||||
HAS_TOWER_CLI = False
|
||||
HAS_AWX_KIT = False
|
||||
logger = logging.getLogger('awx.main.tests')
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def awxkit_path_set(monkeypatch):
|
||||
"""Monkey patch sys.path, insert awxkit source code so that
|
||||
the package does not need to be installed.
|
||||
"""
|
||||
base_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, 'awxkit'))
|
||||
monkeypatch.syspath_prepend(base_folder)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def import_awxkit():
|
||||
global HAS_TOWER_CLI
|
||||
global HAS_AWX_KIT
|
||||
try:
|
||||
import tower_cli # noqa
|
||||
HAS_TOWER_CLI = True
|
||||
except ImportError:
|
||||
HAS_TOWER_CLI = False
|
||||
|
||||
try:
|
||||
import awxkit # noqa
|
||||
HAS_AWX_KIT = True
|
||||
except ImportError:
|
||||
HAS_AWX_KIT = False
|
||||
|
||||
|
||||
def sanitize_dict(din):
|
||||
"""Sanitize Django response data to purge it of internal types
|
||||
so it may be used to cast a requests response object
|
||||
@@ -123,7 +148,7 @@ def run_module(request, collection_import):
|
||||
sanitize_dict(py_data)
|
||||
resp._content = bytes(json.dumps(django_response.data), encoding='utf8')
|
||||
resp.status_code = django_response.status_code
|
||||
resp.headers = {'X-API-Product-Name': 'AWX', 'X-API-Product-Version': '0.0.1-devel'}
|
||||
resp.headers = dict(django_response.headers)
|
||||
|
||||
if request.config.getoption('verbose') > 0:
|
||||
logger.info('%s %s by %s, code:%s', method, '/api/' + url.split('/api/')[1], request_user.username, resp.status_code)
|
||||
@@ -236,10 +261,8 @@ def job_template(project, inventory):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def machine_credential(organization):
|
||||
ssh_type = CredentialType.defaults['ssh']()
|
||||
ssh_type.save()
|
||||
return Credential.objects.create(credential_type=ssh_type, name='machine-cred', inputs={'username': 'test_user', 'password': 'pas4word'})
|
||||
def machine_credential(credentialtype_ssh, organization): # noqa: F811
|
||||
return Credential.objects.create(credential_type=credentialtype_ssh, name='machine-cred', inputs={'username': 'test_user', 'password': 'pas4word'})
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -253,9 +276,7 @@ def vault_credential(organization):
|
||||
def kube_credential():
|
||||
ct = CredentialType.defaults['kubernetes_bearer_token']()
|
||||
ct.save()
|
||||
return Credential.objects.create(
|
||||
credential_type=ct, name='kube-cred', inputs={'host': 'my.cluster', 'bearer_token': 'my-token', 'verify_ssl': False}
|
||||
)
|
||||
return Credential.objects.create(credential_type=ct, name='kube-cred', inputs={'host': 'my.cluster', 'bearer_token': 'my-token', 'verify_ssl': False})
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -288,3 +309,42 @@ def mock_has_unpartitioned_events():
|
||||
# We mock this out to circumvent the migration query.
|
||||
with mock.patch.object(UnifiedJob, 'has_unpartitioned_events', new=False) as _fixture:
|
||||
yield _fixture
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def workflow_job_template(organization, inventory):
|
||||
return WorkflowJobTemplate.objects.create(name='test-workflow_job_template', organization=organization, inventory=inventory)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def notification_template(organization):
|
||||
return NotificationTemplate.objects.create(
|
||||
name='test-notification_template',
|
||||
organization=organization,
|
||||
notification_type="webhook",
|
||||
notification_configuration=dict(
|
||||
url="http://localhost",
|
||||
username="",
|
||||
password="",
|
||||
headers={
|
||||
"Test": "Header",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def scm_credential(credentialtype_scm, organization): # noqa: F811
|
||||
return Credential.objects.create(
|
||||
credential_type=credentialtype_scm, name='scm-cred', inputs={'username': 'optimus', 'password': 'prime'}, organization=organization
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def rrule():
|
||||
return 'DTSTART:20151117T050000Z RRULE:FREQ=DAILY;INTERVAL=1;COUNT=1'
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def schedule(job_template, rrule):
|
||||
return Schedule.objects.create(unified_job_template=job_template, name='test-sched', rrule=rrule)
|
||||
|
||||
@@ -45,3 +45,28 @@ def test_bulk_host_create(run_module, admin_user, inventory):
|
||||
resp_hosts = inventory.hosts.all().values_list('name', flat=True)
|
||||
for h in hosts:
|
||||
assert h['name'] in resp_hosts
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_bulk_host_delete(run_module, admin_user, inventory):
|
||||
hosts = [dict(name="127.0.0.1"), dict(name="foo.dns.org")]
|
||||
result = run_module(
|
||||
'bulk_host_create',
|
||||
{
|
||||
'inventory': inventory.name,
|
||||
'hosts': hosts,
|
||||
},
|
||||
admin_user,
|
||||
)
|
||||
assert not result.get('failed', False), result.get('msg', result)
|
||||
assert result.get('changed'), result
|
||||
resp_hosts_ids = list(inventory.hosts.all().values_list('id', flat=True))
|
||||
result = run_module(
|
||||
'bulk_host_delete',
|
||||
{
|
||||
'hosts': resp_hosts_ids,
|
||||
},
|
||||
admin_user,
|
||||
)
|
||||
assert not result.get('failed', False), result.get('msg', result)
|
||||
assert result.get('changed'), result
|
||||
|
||||
@@ -50,6 +50,7 @@ no_endpoint_for_module = [
|
||||
extra_endpoints = {
|
||||
'bulk_job_launch': '/api/v2/bulk/job_launch/',
|
||||
'bulk_host_create': '/api/v2/bulk/host_create/',
|
||||
'bulk_host_delete': '/api/v2/bulk/host_delete/',
|
||||
}
|
||||
|
||||
# Global module parameters we can ignore
|
||||
|
||||
154
awx_collection/test/awx/test_export.py
Normal file
154
awx_collection/test/awx/test_export.py
Normal file
@@ -0,0 +1,154 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
from awx.main.models.execution_environments import ExecutionEnvironment
|
||||
from awx.main.models.jobs import JobTemplate
|
||||
|
||||
from awx.main.tests.functional.conftest import user, system_auditor # noqa: F401; pylint: disable=unused-import
|
||||
|
||||
|
||||
ASSETS = set([
|
||||
"users",
|
||||
"organizations",
|
||||
"teams",
|
||||
"credential_types",
|
||||
"credentials",
|
||||
"notification_templates",
|
||||
"projects",
|
||||
"inventory",
|
||||
"inventory_sources",
|
||||
"job_templates",
|
||||
"workflow_job_templates",
|
||||
"execution_environments",
|
||||
"applications",
|
||||
"schedules",
|
||||
])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def job_template(project, inventory, organization, machine_credential):
|
||||
jt = JobTemplate.objects.create(name='test-jt', project=project, inventory=inventory, organization=organization, playbook='helloworld.yml')
|
||||
jt.credentials.add(machine_credential)
|
||||
jt.save()
|
||||
return jt
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def execution_environment(organization):
|
||||
return ExecutionEnvironment.objects.create(name="test-ee", description="test-ee", managed=False, organization=organization)
|
||||
|
||||
|
||||
def find_by(result, name, key, value):
|
||||
for c in result[name]:
|
||||
if c[key] == value:
|
||||
return c
|
||||
values = [c.get(key, None) for c in result[name]]
|
||||
raise ValueError(f"Failed to find assets['{name}'][{key}] = '{value}' valid values are {values}")
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_export(run_module, admin_user):
|
||||
"""
|
||||
There should be nothing to export EXCEPT the admin user.
|
||||
"""
|
||||
result = run_module('export', dict(all=True), admin_user)
|
||||
assert not result.get('failed', False), result.get('msg', result)
|
||||
assets = result['assets']
|
||||
|
||||
assert set(result['assets'].keys()) == ASSETS
|
||||
|
||||
u = find_by(assets, 'users', 'username', 'admin')
|
||||
assert u['is_superuser'] is True
|
||||
|
||||
all_assets_except_users = {k: v for k, v in assets.items() if k != 'users'}
|
||||
|
||||
for k, v in all_assets_except_users.items():
|
||||
assert v == [], f"Expected resource {k} to be empty. Instead it is {v}"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_export_simple(
|
||||
run_module,
|
||||
organization,
|
||||
project,
|
||||
inventory,
|
||||
job_template,
|
||||
scm_credential,
|
||||
machine_credential,
|
||||
workflow_job_template,
|
||||
execution_environment,
|
||||
notification_template,
|
||||
rrule,
|
||||
schedule,
|
||||
admin_user,
|
||||
):
|
||||
"""
|
||||
TODO: Ensure there aren't _more_ results in each resource than we expect
|
||||
"""
|
||||
result = run_module('export', dict(all=True), admin_user)
|
||||
assert not result.get('failed', False), result.get('msg', result)
|
||||
assets = result['assets']
|
||||
|
||||
u = find_by(assets, 'users', 'username', 'admin')
|
||||
assert u['is_superuser'] is True
|
||||
|
||||
find_by(assets, 'organizations', 'name', 'Default')
|
||||
|
||||
r = find_by(assets, 'credentials', 'name', 'scm-cred')
|
||||
assert r['credential_type']['kind'] == 'scm'
|
||||
assert r['credential_type']['name'] == 'Source Control'
|
||||
|
||||
r = find_by(assets, 'credentials', 'name', 'machine-cred')
|
||||
assert r['credential_type']['kind'] == 'ssh'
|
||||
assert r['credential_type']['name'] == 'Machine'
|
||||
|
||||
r = find_by(assets, 'job_templates', 'name', 'test-jt')
|
||||
assert r['natural_key']['organization']['name'] == 'Default'
|
||||
assert r['inventory']['name'] == 'test-inv'
|
||||
assert r['project']['name'] == 'test-proj'
|
||||
|
||||
find_by(r['related'], 'credentials', 'name', 'machine-cred')
|
||||
|
||||
r = find_by(assets, 'inventory', 'name', 'test-inv')
|
||||
assert r['organization']['name'] == 'Default'
|
||||
|
||||
r = find_by(assets, 'projects', 'name', 'test-proj')
|
||||
assert r['organization']['name'] == 'Default'
|
||||
|
||||
r = find_by(assets, 'workflow_job_templates', 'name', 'test-workflow_job_template')
|
||||
assert r['natural_key']['organization']['name'] == 'Default'
|
||||
assert r['inventory']['name'] == 'test-inv'
|
||||
|
||||
r = find_by(assets, 'execution_environments', 'name', 'test-ee')
|
||||
assert r['organization']['name'] == 'Default'
|
||||
|
||||
r = find_by(assets, 'schedules', 'name', 'test-sched')
|
||||
assert r['rrule'] == rrule
|
||||
|
||||
r = find_by(assets, 'notification_templates', 'name', 'test-notification_template')
|
||||
assert r['organization']['name'] == 'Default'
|
||||
assert r['notification_configuration']['url'] == 'http://localhost'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_export_system_auditor(run_module, schedule, system_auditor): # noqa: F811
|
||||
"""
|
||||
This test illustrates that deficiency of export when ran as non-root user (i.e. system auditor).
|
||||
The OPTIONS endpoint does NOT return POST for a system auditor. This is bad for the export code
|
||||
because it relies on crawling the OPTIONS POST response to determine the fields to export.
|
||||
"""
|
||||
result = run_module('export', dict(all=True), system_auditor)
|
||||
assert result.get('failed', False), result.get('msg', result)
|
||||
|
||||
assert 'Failed to export assets substring not found' in result['msg'], (
|
||||
'If you found this error then you have probably fixed a feature! The export code attempts to assertain the POST fields from the `description` field,'
|
||||
' but both the API side and the client inference code are lacking.'
|
||||
)
|
||||
|
||||
# r = result['assets']['schedules'][0]
|
||||
# assert r['natural_key']['name'] == 'test-sched'
|
||||
|
||||
# assert 'rrule' not in r, 'If you found this error then you have probably fixed a feature! We WANT rrule to be found in the export schedule payload.'
|
||||
@@ -0,0 +1,80 @@
|
||||
---
|
||||
- name: "Generate a random string for test"
|
||||
set_fact:
|
||||
test_id: "{{ lookup('password', '/dev/null chars=ascii_letters length=16') }}"
|
||||
when: "test_id is not defined"
|
||||
|
||||
- name: "Generate a unique name"
|
||||
set_fact:
|
||||
bulk_inv_name: "AWX-Collection-tests-bulk_host_create-{{ test_id }}"
|
||||
|
||||
- name: "Get our collection package"
|
||||
controller_meta:
|
||||
register: "controller_meta"
|
||||
|
||||
- name: "Generate the name of our plugin"
|
||||
set_fact:
|
||||
plugin_name: "{{ controller_meta.prefix }}.controller_api"
|
||||
|
||||
- name: "Create an inventory"
|
||||
inventory:
|
||||
name: "{{ bulk_inv_name }}"
|
||||
organization: "Default"
|
||||
state: "present"
|
||||
register: "inventory_result"
|
||||
|
||||
- name: "Bulk Host Create"
|
||||
bulk_host_create:
|
||||
hosts:
|
||||
- name: "123.456.789.123"
|
||||
description: "myhost1"
|
||||
variables:
|
||||
food: "carrot"
|
||||
color: "orange"
|
||||
- name: "example.dns.gg"
|
||||
description: "myhost2"
|
||||
enabled: "false"
|
||||
inventory: "{{ bulk_inv_name }}"
|
||||
register: "result"
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is not failed"
|
||||
|
||||
- name: "Get our collection package"
|
||||
controller_meta:
|
||||
register: "controller_meta"
|
||||
|
||||
- name: "Generate the name of our plugin"
|
||||
set_fact:
|
||||
plugin_name: "{{ controller_meta.prefix }}.controller_api"
|
||||
|
||||
- name: "Setting the inventory hosts endpoint"
|
||||
set_fact:
|
||||
endpoint: "inventories/{{ inventory_result.id }}/hosts/"
|
||||
|
||||
- name: "Get hosts information from inventory"
|
||||
set_fact:
|
||||
hosts_created: "{{ query(plugin_name, endpoint, return_objects=True) }}"
|
||||
host_id_list: []
|
||||
|
||||
- name: "Extract host IDs from hosts information"
|
||||
set_fact:
|
||||
host_id_list: "{{ host_id_list + [item.id] }}"
|
||||
loop: "{{ hosts_created }}"
|
||||
|
||||
- name: "Bulk Host Delete"
|
||||
bulk_host_delete:
|
||||
hosts: "{{ host_id_list }}"
|
||||
register: "result"
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is not failed"
|
||||
|
||||
# cleanup
|
||||
- name: "Delete inventory"
|
||||
inventory:
|
||||
name: "{{ bulk_inv_name }}"
|
||||
organization: "Default"
|
||||
state: "absent"
|
||||
@@ -60,7 +60,7 @@
|
||||
- result['job_info']['skip_tags'] == "skipbaz"
|
||||
- result['job_info']['limit'] == "localhost"
|
||||
- result['job_info']['job_tags'] == "Hello World"
|
||||
- result['job_info']['inventory'] == {{ inventory_id }}
|
||||
- result['job_info']['inventory'] == inventory_id | int
|
||||
- "result['job_info']['extra_vars'] == '{\"animal\": \"bear\", \"food\": \"carrot\"}'"
|
||||
|
||||
# cleanup
|
||||
|
||||
@@ -71,6 +71,19 @@
|
||||
that:
|
||||
- "result is changed"
|
||||
|
||||
- name: Delete a credential without credential_type
|
||||
credential:
|
||||
name: "{{ ssh_cred_name1 }}"
|
||||
organization: Default
|
||||
state: absent
|
||||
register: result
|
||||
ignore_errors: true
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is failed"
|
||||
|
||||
|
||||
- name: Create an Org-specific credential with an ID with exists
|
||||
credential:
|
||||
name: "{{ ssh_cred_name1 }}"
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "{{ matching_jobs.count }} == 1"
|
||||
- matching_jobs.count == 1
|
||||
|
||||
- name: List failed jobs (which don't exist)
|
||||
job_list:
|
||||
@@ -26,7 +26,7 @@
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "{{ successful_jobs.count }} == 0"
|
||||
- successful_jobs.count == 0
|
||||
|
||||
- name: Get ALL result pages!
|
||||
job_list:
|
||||
|
||||
@@ -99,7 +99,7 @@
|
||||
that:
|
||||
- wait_results is failed
|
||||
- 'wait_results.status == "canceled"'
|
||||
- "wait_results.msg == 'Job with id {{ job.id }} failed' or 'Job with id={{ job.id }} failed, error: Job failed.'"
|
||||
- "'Job with id ~ job.id failed' or 'Job with id= ~ job.id failed, error: Job failed.' is in wait_results.msg"
|
||||
|
||||
# workflow wait test
|
||||
- name: Generate a random string for test
|
||||
|
||||
@@ -132,7 +132,7 @@
|
||||
|
||||
- name: Get the ID of the first user created and verify that it is correct
|
||||
assert:
|
||||
that: "{{ query(plugin_name, 'users', query_params={ 'username' : user_creation_results['results'][0]['item'] }, return_ids=True)[0] }} == {{ user_creation_results['results'][0]['id'] }}"
|
||||
that: "query(plugin_name, 'users', query_params={ 'username' : user_creation_results['results'][0]['item'] }, return_ids=True)[0] == user_creation_results['results'][0]['id'] | string"
|
||||
|
||||
- name: Try to get an ID of someone who does not exist
|
||||
set_fact:
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
|
||||
- name: Call ruleset with no rules
|
||||
set_fact:
|
||||
complex_rule: "{{ query(ruleset_plugin_name, '2022-04-30 10:30:45') }}"
|
||||
complex_rule: "{{ query(ruleset_plugin_name | string, '2022-04-30 10:30:45') }}"
|
||||
ignore_errors: True
|
||||
register: results
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
- assert:
|
||||
that:
|
||||
- result is failed
|
||||
- "'Unable to create schedule {{ sched1 }}' in result.msg"
|
||||
- "'Unable to create schedule '~ sched1 in result.msg"
|
||||
|
||||
- name: Create with options that the JT does not support
|
||||
schedule:
|
||||
@@ -62,7 +62,7 @@
|
||||
- assert:
|
||||
that:
|
||||
- result is failed
|
||||
- "'Unable to create schedule {{ sched1 }}' in result.msg"
|
||||
- "'Unable to create schedule '~ sched1 in result.msg"
|
||||
|
||||
- name: Build a real schedule
|
||||
schedule:
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
|
||||
- name: Test too many params (failure from validation of terms)
|
||||
debug:
|
||||
msg: "{{ query(plugin_name, 'none', 'weekly', start_date='2020-4-16 03:45:07') }}"
|
||||
msg: "{{ query(plugin_name | string, 'none', 'weekly', start_date='2020-4-16 03:45:07') }}"
|
||||
ignore_errors: true
|
||||
register: result
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
- assert:
|
||||
that:
|
||||
- result is failed
|
||||
- "'Monitoring of Workflow Job - {{ wfjt_name1 }} aborted due to timeout' in result.msg"
|
||||
- "'Monitoring of Workflow Job - '~ wfjt_name1 ~ ' aborted due to timeout' in result.msg"
|
||||
|
||||
- name: Kick off a workflow and wait for it
|
||||
workflow_launch:
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user