mirror of
https://github.com/ansible/awx.git
synced 2026-02-05 03:24:50 -03:30
Compare commits
1 Commits
upgrade-sq
...
test-ansib
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
faf604ca4b |
@@ -19,8 +19,6 @@ exclude_also =
|
||||
branch = True
|
||||
omit =
|
||||
awx/main/migrations/*
|
||||
awx/settings/defaults.py
|
||||
awx/settings/*_defaults.py
|
||||
source =
|
||||
.
|
||||
source_pkgs =
|
||||
|
||||
25
.github/actions/awx_devel_image/action.yml
vendored
25
.github/actions/awx_devel_image/action.yml
vendored
@@ -11,7 +11,9 @@ inputs:
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: ./.github/actions/setup-python
|
||||
- name: Get python version from Makefile
|
||||
shell: bash
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Set lower case owner name
|
||||
shell: bash
|
||||
@@ -24,9 +26,26 @@ runs:
|
||||
run: |
|
||||
echo "${{ inputs.github-token }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- uses: ./.github/actions/setup-ssh-agent
|
||||
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
|
||||
id: generate_key
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ -z "${{ inputs.private-github-key }}" ]]; then
|
||||
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
|
||||
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "${{ inputs.private-github-key }}" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Add private GitHub key to SSH agent
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ inputs.private-github-key }}
|
||||
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
|
||||
|
||||
|
||||
- name: Pre-pull latest devel image to warm cache
|
||||
shell: bash
|
||||
|
||||
27
.github/actions/setup-python/action.yml
vendored
27
.github/actions/setup-python/action.yml
vendored
@@ -1,27 +0,0 @@
|
||||
name: 'Setup Python from Makefile'
|
||||
description: 'Extract and set up Python version from Makefile'
|
||||
inputs:
|
||||
python-version:
|
||||
description: 'Override Python version (optional)'
|
||||
required: false
|
||||
default: ''
|
||||
working-directory:
|
||||
description: 'Directory containing the Makefile'
|
||||
required: false
|
||||
default: '.'
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Get python version from Makefile
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "${{ inputs.python-version }}" ]; then
|
||||
echo "py_version=${{ inputs.python-version }}" >> $GITHUB_ENV
|
||||
else
|
||||
cd ${{ inputs.working-directory }}
|
||||
echo "py_version=`make PYTHON_VERSION`" >> $GITHUB_ENV
|
||||
fi
|
||||
- name: Install python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.py_version }}
|
||||
29
.github/actions/setup-ssh-agent/action.yml
vendored
29
.github/actions/setup-ssh-agent/action.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: 'Setup SSH for GitHub'
|
||||
description: 'Configure SSH for private repository access'
|
||||
inputs:
|
||||
ssh-private-key:
|
||||
description: 'SSH private key for repository access'
|
||||
required: false
|
||||
default: ''
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
|
||||
id: generate_key
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ -z "${{ inputs.ssh-private-key }}" ]]; then
|
||||
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
|
||||
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "${{ inputs.ssh-private-key }}" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Add private GitHub key to SSH agent
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
|
||||
39
.github/workflows/ci.yml
vendored
39
.github/workflows/ci.yml
vendored
@@ -130,7 +130,7 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
@@ -161,10 +161,6 @@ jobs:
|
||||
show-progress: false
|
||||
path: awx
|
||||
|
||||
- uses: ./awx/.github/actions/setup-ssh-agent
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||
|
||||
- name: Checkout awx-operator
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -172,14 +168,39 @@ jobs:
|
||||
repository: ansible/awx-operator
|
||||
path: awx-operator
|
||||
|
||||
- uses: ./awx/.github/actions/setup-python
|
||||
- name: Get python version from Makefile
|
||||
working-directory: awx
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
working-directory: awx
|
||||
python-version: ${{ env.py_version }}
|
||||
|
||||
- name: Install playbook dependencies
|
||||
run: |
|
||||
python3 -m pip install docker
|
||||
|
||||
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
|
||||
id: generate_key
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ -z "${{ secrets.PRIVATE_GITHUB_KEY }}" ]]; then
|
||||
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
|
||||
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "${{ secrets.PRIVATE_GITHUB_KEY }}" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Add private GitHub key to SSH agent
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Build AWX image
|
||||
working-directory: awx
|
||||
run: |
|
||||
@@ -278,7 +299,7 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
@@ -354,7 +375,7 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
|
||||
26
.github/workflows/devel_images.yml
vendored
26
.github/workflows/devel_images.yml
vendored
@@ -49,10 +49,14 @@ jobs:
|
||||
run: |
|
||||
echo "DEV_DOCKER_TAG_BASE=ghcr.io/${OWNER,,}" >> $GITHUB_ENV
|
||||
echo "COMPOSE_TAG=${GITHUB_REF##*/}" >> $GITHUB_ENV
|
||||
echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
env:
|
||||
OWNER: '${{ github.repository_owner }}'
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.py_version }}
|
||||
|
||||
- name: Log in to registry
|
||||
run: |
|
||||
@@ -69,9 +73,25 @@ jobs:
|
||||
make ui
|
||||
if: matrix.build-targets.image-name == 'awx'
|
||||
|
||||
- uses: ./.github/actions/setup-ssh-agent
|
||||
- name: Generate placeholder SSH private key if SSH auth for private repos is not needed
|
||||
id: generate_key
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ -z "${{ secrets.PRIVATE_GITHUB_KEY }}" ]]; then
|
||||
ssh-keygen -t ed25519 -C "github-actions" -N "" -f ~/.ssh/id_ed25519
|
||||
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||
cat ~/.ssh/id_ed25519 >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "SSH_PRIVATE_KEY<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "${{ secrets.PRIVATE_GITHUB_KEY }}" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Add private GitHub key to SSH agent
|
||||
uses: webfactory/ssh-agent@v0.9.0
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||
ssh-private-key: ${{ steps.generate_key.outputs.SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Build and push AWX devel images
|
||||
run: |
|
||||
|
||||
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
|
||||
4
.github/workflows/label_issue.yml
vendored
4
.github/workflows/label_issue.yml
vendored
@@ -34,11 +34,9 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Install python requests
|
||||
run: pip install requests
|
||||
|
||||
- name: Check if user is a member of Ansible org
|
||||
uses: jannekem/run-python-script-action@v1
|
||||
id: check_user
|
||||
|
||||
2
.github/workflows/label_pr.yml
vendored
2
.github/workflows/label_pr.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
|
||||
8
.github/workflows/promote.yml
vendored
8
.github/workflows/promote.yml
vendored
@@ -36,7 +36,13 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.py_version }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
|
||||
9
.github/workflows/stage.yml
vendored
9
.github/workflows/stage.yml
vendored
@@ -64,9 +64,14 @@ jobs:
|
||||
repository: ansible/awx-logos
|
||||
path: awx-logos
|
||||
|
||||
- uses: ./awx/.github/actions/setup-python
|
||||
- name: Get python version from Makefile
|
||||
working-directory: awx
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
working-directory: awx
|
||||
python-version: ${{ env.py_version }}
|
||||
|
||||
- name: Install playbook dependencies
|
||||
run: |
|
||||
|
||||
15
.github/workflows/upload_schema.yml
vendored
15
.github/workflows/upload_schema.yml
vendored
@@ -5,7 +5,6 @@ env:
|
||||
LC_ALL: "C.UTF-8" # prevent ERROR: Ansible could not initialize the preferred locale: unsupported locale setting
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- devel
|
||||
@@ -23,16 +22,18 @@ jobs:
|
||||
with:
|
||||
show-progress: false
|
||||
|
||||
- uses: ./.github/actions/setup-python
|
||||
- name: Get python version from Makefile
|
||||
run: echo py_version=`make PYTHON_VERSION` >> $GITHUB_ENV
|
||||
|
||||
- name: Install python ${{ env.py_version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ env.py_version }}
|
||||
|
||||
- name: Log in to registry
|
||||
run: |
|
||||
echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
|
||||
|
||||
- uses: ./.github/actions/setup-ssh-agent
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.PRIVATE_GITHUB_KEY }}
|
||||
|
||||
- name: Pre-pull image to warm build cache
|
||||
run: |
|
||||
docker pull -q ghcr.io/${{ github.repository_owner }}/awx_devel:${GITHUB_REF##*/} || :
|
||||
@@ -55,3 +56,5 @@ jobs:
|
||||
ansible localhost -c local, -m command -a "{{ ansible_python_interpreter + ' -m pip install boto3'}}"
|
||||
ansible localhost -c local -m aws_s3 \
|
||||
-a "src=${{ github.workspace }}/schema.json bucket=awx-public-ci-files object=${GITHUB_REF##*/}/schema.json mode=put permission=public-read"
|
||||
|
||||
|
||||
|
||||
11
README.md
11
README.md
@@ -3,17 +3,6 @@
|
||||
|
||||
<img src="https://raw.githubusercontent.com/ansible/awx-logos/master/awx/ui/client/assets/logo-login.svg?sanitize=true" width=200 alt="AWX" />
|
||||
|
||||
> [!CAUTION]
|
||||
> The last release of this repository was released on Jul 2, 2024.
|
||||
> **Releases of this project are now paused during a large scale refactoring.**
|
||||
> For more information, follow [the Forum](https://forum.ansible.com/) and - more specifically - see the various communications on the matter:
|
||||
>
|
||||
> * [Blog: Upcoming Changes to the AWX Project](https://www.ansible.com/blog/upcoming-changes-to-the-awx-project/)
|
||||
> * [Streamlining AWX Releases](https://forum.ansible.com/t/streamlining-awx-releases/6894) Primary update
|
||||
> * [Refactoring AWX into a Pluggable, Service-Oriented Architecture](https://forum.ansible.com/t/refactoring-awx-into-a-pluggable-service-oriented-architecture/7404)
|
||||
> * [Upcoming changes to AWX Operator installation methods](https://forum.ansible.com/t/upcoming-changes-to-awx-operator-installation-methods/7598)
|
||||
> * [AWX UI and credential types transitioning to the new pluggable architecture](https://forum.ansible.com/t/awx-ui-and-credential-types-transitioning-to-the-new-pluggable-architecture/8027)
|
||||
|
||||
AWX provides a web-based user interface, REST API, and task engine built on top of [Ansible](https://github.com/ansible/ansible). It is one of the upstream projects for [Red Hat Ansible Automation Platform](https://www.ansible.com/products/automation-platform).
|
||||
|
||||
To install AWX, please view the [Install guide](./INSTALL.md).
|
||||
|
||||
@@ -62,8 +62,7 @@ else:
|
||||
|
||||
def prepare_env():
|
||||
# Update the default settings environment variable based on current mode.
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings')
|
||||
os.environ.setdefault('AWX_MODE', MODE)
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'awx.settings.%s' % MODE)
|
||||
# Hide DeprecationWarnings when running in production. Need to first load
|
||||
# settings to apply our filter after Django's own warnings filter.
|
||||
from django.conf import settings
|
||||
|
||||
@@ -161,7 +161,7 @@ def get_view_description(view, html=False):
|
||||
|
||||
|
||||
def get_default_schema():
|
||||
if settings.DYNACONF.is_development_mode:
|
||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
||||
from awx.api.swagger import schema_view
|
||||
|
||||
return schema_view
|
||||
|
||||
@@ -6,7 +6,6 @@ import copy
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import yaml
|
||||
from collections import Counter, OrderedDict
|
||||
from datetime import timedelta
|
||||
from uuid import uuid4
|
||||
@@ -627,41 +626,15 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
||||
return exclusions
|
||||
|
||||
def validate(self, attrs):
|
||||
"""
|
||||
Apply serializer validation. Called by DRF.
|
||||
|
||||
Can be extended by subclasses. Or consider overwriting
|
||||
`validate_with_obj` in subclasses, which provides access to the model
|
||||
object and exception handling for field validation.
|
||||
|
||||
:param dict attrs: The names and values of the model form fields.
|
||||
:raise rest_framework.exceptions.ValidationError: If the validation
|
||||
fails.
|
||||
|
||||
The exception must contain a dict with the names of the form fields
|
||||
which failed validation as keys, and a list of error messages as
|
||||
values. This ensures that the error messages are rendered near the
|
||||
relevant fields.
|
||||
:return: The names and values from the model form fields, possibly
|
||||
modified by the validations.
|
||||
:rtype: dict
|
||||
"""
|
||||
attrs = super(BaseSerializer, self).validate(attrs)
|
||||
# Create/update a model instance and run its full_clean() method to
|
||||
# do any validation implemented on the model class.
|
||||
exclusions = self.get_validation_exclusions(self.instance)
|
||||
# Create a new model instance or take the existing one if it exists,
|
||||
# and update its attributes with the respective field values from
|
||||
# attrs.
|
||||
obj = self.instance or self.Meta.model()
|
||||
for k, v in attrs.items():
|
||||
if k not in exclusions and k != 'canonical_address_port':
|
||||
setattr(obj, k, v)
|
||||
try:
|
||||
# Run serializer validators which need the model object for
|
||||
# validation.
|
||||
self.validate_with_obj(attrs, obj)
|
||||
# Apply any validations implemented on the model class.
|
||||
# Create/update a model instance and run its full_clean() method to
|
||||
# do any validation implemented on the model class.
|
||||
exclusions = self.get_validation_exclusions(self.instance)
|
||||
obj = self.instance or self.Meta.model()
|
||||
for k, v in attrs.items():
|
||||
if k not in exclusions and k != 'canonical_address_port':
|
||||
setattr(obj, k, v)
|
||||
obj.full_clean(exclude=exclusions)
|
||||
# full_clean may modify values on the instance; copy those changes
|
||||
# back to attrs so they are saved.
|
||||
@@ -690,32 +663,6 @@ class BaseSerializer(serializers.ModelSerializer, metaclass=BaseSerializerMetacl
|
||||
raise ValidationError(d)
|
||||
return attrs
|
||||
|
||||
def validate_with_obj(self, attrs, obj):
|
||||
"""
|
||||
Overwrite this if you need the model instance for your validation.
|
||||
|
||||
:param dict attrs: The names and values of the model form fields.
|
||||
:param obj: An instance of the class's meta model.
|
||||
|
||||
If the serializer runs on a newly created object, obj contains only
|
||||
the attrs from its serializer. If the serializer runs because an
|
||||
object has been edited, obj is the existing model instance with all
|
||||
attributes and values available.
|
||||
:raise django.core.exceptionsValidationError: Raise this if your
|
||||
validation fails.
|
||||
|
||||
To make the error appear at the respective form field, instantiate
|
||||
the Exception with a dict containing the field name as key and the
|
||||
error message as value.
|
||||
|
||||
Example: ``ValidationError({"password": "Not good enough!"})``
|
||||
|
||||
If the exception contains just a string, the message cannot be
|
||||
related to a field and is rendered at the top of the model form.
|
||||
:return: None
|
||||
"""
|
||||
return
|
||||
|
||||
def reverse(self, *args, **kwargs):
|
||||
kwargs['request'] = self.context.get('request')
|
||||
return reverse(*args, **kwargs)
|
||||
@@ -1037,6 +984,7 @@ class UserSerializer(BaseSerializer):
|
||||
return ret
|
||||
|
||||
def validate_password(self, value):
|
||||
django_validate_password(value)
|
||||
if not self.instance and value in (None, ''):
|
||||
raise serializers.ValidationError(_('Password required for new User.'))
|
||||
|
||||
@@ -1059,50 +1007,6 @@ class UserSerializer(BaseSerializer):
|
||||
|
||||
return value
|
||||
|
||||
def validate_with_obj(self, attrs, obj):
|
||||
"""
|
||||
Validate the password with the Django password validators
|
||||
|
||||
To enable the Django password validators, configure
|
||||
`settings.AUTH_PASSWORD_VALIDATORS` as described in the [Django
|
||||
docs](https://docs.djangoproject.com/en/5.1/topics/auth/passwords/#enabling-password-validation)
|
||||
|
||||
:param dict attrs: The User form field names and their values as a dict.
|
||||
Example::
|
||||
|
||||
{
|
||||
'username': 'TestUsername', 'first_name': 'FirstName',
|
||||
'last_name': 'LastName', 'email': 'First.Last@my.org',
|
||||
'is_superuser': False, 'is_system_auditor': False,
|
||||
'password': 'secret123'
|
||||
}
|
||||
|
||||
:param obj: The User model instance.
|
||||
:raises django.core.exceptions.ValidationError: Raise this if at least
|
||||
one Django password validator fails.
|
||||
|
||||
The exception contains a dict ``{"password": <error-message>``}
|
||||
which indicates that the password field has failed validation, and
|
||||
the reason for failure.
|
||||
:return: None.
|
||||
"""
|
||||
# We must do this here instead of in `validate_password` bacause some
|
||||
# django password validators need access to other model instance fields,
|
||||
# e.g. ``username`` for the ``UserAttributeSimilarityValidator``.
|
||||
password = attrs.get("password")
|
||||
# Skip validation if no password has been entered. This may happen when
|
||||
# an existing User is edited.
|
||||
if password and password != '$encrypted$':
|
||||
# Apply validators from settings.AUTH_PASSWORD_VALIDATORS. This may
|
||||
# raise ValidationError.
|
||||
#
|
||||
# If the validation fails, re-raise the exception with adjusted
|
||||
# content to make the error appear near the password field.
|
||||
try:
|
||||
django_validate_password(password, user=obj)
|
||||
except DjangoValidationError as exc:
|
||||
raise DjangoValidationError({"password": exc.messages})
|
||||
|
||||
def _update_password(self, obj, new_password):
|
||||
if new_password and new_password != '$encrypted$':
|
||||
obj.set_password(new_password)
|
||||
@@ -3448,17 +3352,11 @@ class JobRelaunchSerializer(BaseSerializer):
|
||||
choices=[('all', _('No change to job limit')), ('failed', _('All failed and unreachable hosts'))],
|
||||
write_only=True,
|
||||
)
|
||||
job_type = serializers.ChoiceField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
choices=NEW_JOB_TYPE_CHOICES,
|
||||
write_only=True,
|
||||
)
|
||||
credential_passwords = VerbatimField(required=True, write_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Job
|
||||
fields = ('passwords_needed_to_start', 'retry_counts', 'hosts', 'job_type', 'credential_passwords')
|
||||
fields = ('passwords_needed_to_start', 'retry_counts', 'hosts', 'credential_passwords')
|
||||
|
||||
def validate_credential_passwords(self, value):
|
||||
pnts = self.instance.passwords_needed_to_start
|
||||
@@ -5917,34 +5815,6 @@ class InstanceGroupSerializer(BaseSerializer):
|
||||
raise serializers.ValidationError(_('Only Kubernetes credentials can be associated with an Instance Group'))
|
||||
return value
|
||||
|
||||
def validate_pod_spec_override(self, value):
|
||||
if not value:
|
||||
return value
|
||||
|
||||
# value should be empty for non-container groups
|
||||
if self.instance and not self.instance.is_container_group:
|
||||
raise serializers.ValidationError(_('pod_spec_override is only valid for container groups'))
|
||||
|
||||
pod_spec_override_json = None
|
||||
# defect if the value is yaml or json if yaml convert to json
|
||||
try:
|
||||
# convert yaml to json
|
||||
pod_spec_override_json = yaml.safe_load(value)
|
||||
except yaml.YAMLError:
|
||||
try:
|
||||
pod_spec_override_json = json.loads(value)
|
||||
except json.JSONDecodeError:
|
||||
raise serializers.ValidationError(_('pod_spec_override must be valid yaml or json'))
|
||||
|
||||
# validate the
|
||||
spec = pod_spec_override_json.get('spec', {})
|
||||
automount_service_account_token = spec.get('automountServiceAccountToken', False)
|
||||
|
||||
if automount_service_account_token:
|
||||
raise serializers.ValidationError(_('automountServiceAccountToken is not allowed for security reasons'))
|
||||
|
||||
return value
|
||||
|
||||
def validate(self, attrs):
|
||||
attrs = super(InstanceGroupSerializer, self).validate(attrs)
|
||||
|
||||
|
||||
@@ -3435,7 +3435,6 @@ class JobRelaunch(RetrieveAPIView):
|
||||
|
||||
copy_kwargs = {}
|
||||
retry_hosts = serializer.validated_data.get('hosts', None)
|
||||
job_type = serializer.validated_data.get('job_type', None)
|
||||
if retry_hosts and retry_hosts != 'all':
|
||||
if obj.status in ACTIVE_STATES:
|
||||
return Response(
|
||||
@@ -3456,8 +3455,6 @@ class JobRelaunch(RetrieveAPIView):
|
||||
)
|
||||
copy_kwargs['limit'] = ','.join(retry_host_list)
|
||||
|
||||
if job_type:
|
||||
copy_kwargs['job_type'] = job_type
|
||||
new_job = obj.copy_unified_job(**copy_kwargs)
|
||||
result = new_job.signal_start(**serializer.validated_data['credential_passwords'])
|
||||
if not result:
|
||||
|
||||
@@ -10,7 +10,7 @@ from awx.api.generics import APIView, Response
|
||||
from awx.api.permissions import AnalyticsPermission
|
||||
from awx.api.versioning import reverse
|
||||
from awx.main.utils import get_awx_version
|
||||
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_TOKEN_ENDPOINT
|
||||
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_ENDPOINT
|
||||
from rest_framework import status
|
||||
|
||||
from collections import OrderedDict
|
||||
@@ -205,7 +205,7 @@ class AnalyticsGenericView(APIView):
|
||||
try:
|
||||
rh_user = self._get_setting('REDHAT_USERNAME', None, ERROR_MISSING_USER)
|
||||
rh_password = self._get_setting('REDHAT_PASSWORD', None, ERROR_MISSING_PASSWORD)
|
||||
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_TOKEN_ENDPOINT, ['api.console'])
|
||||
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_ENDPOINT, ['api.console'])
|
||||
response = client.make_request(
|
||||
method,
|
||||
url,
|
||||
|
||||
@@ -2098,7 +2098,7 @@ class WorkflowJobAccess(BaseAccess):
|
||||
def filtered_queryset(self):
|
||||
return WorkflowJob.objects.filter(
|
||||
Q(unified_job_template__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
|
||||
| Q(organization__in=Organization.accessible_pk_qs(self.user, 'auditor_role'))
|
||||
| Q(organization__in=Organization.objects.filter(Q(admin_role__members=self.user)), is_bulk_job=True)
|
||||
)
|
||||
|
||||
def can_read(self, obj):
|
||||
@@ -2496,11 +2496,12 @@ class UnifiedJobAccess(BaseAccess):
|
||||
|
||||
def filtered_queryset(self):
|
||||
inv_pk_qs = Inventory._accessible_pk_qs(Inventory, self.user, 'read_role')
|
||||
org_auditor_qs = Organization.objects.filter(Q(admin_role__members=self.user) | Q(auditor_role__members=self.user))
|
||||
qs = self.model.objects.filter(
|
||||
Q(unified_job_template_id__in=UnifiedJobTemplate.accessible_pk_qs(self.user, 'read_role'))
|
||||
| Q(inventoryupdate__inventory_source__inventory__id__in=inv_pk_qs)
|
||||
| Q(adhoccommand__inventory__id__in=inv_pk_qs)
|
||||
| Q(organization__in=Organization.accessible_pk_qs(self.user, 'auditor_role'))
|
||||
| Q(organization__in=org_auditor_qs)
|
||||
)
|
||||
return qs
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ from ansible_base.lib.utils.db import advisory_lock
|
||||
from awx.main.models import Job
|
||||
from awx.main.access import access_registry
|
||||
from awx.main.utils import get_awx_http_client_headers, set_environ, datetime_hook
|
||||
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_TOKEN_ENDPOINT
|
||||
from awx.main.utils.analytics_proxy import OIDCClient, DEFAULT_OIDC_ENDPOINT
|
||||
|
||||
__all__ = ['register', 'gather', 'ship']
|
||||
|
||||
@@ -379,7 +379,7 @@ def ship(path):
|
||||
with set_environ(**settings.AWX_TASK_ENV):
|
||||
if rh_user and rh_password:
|
||||
try:
|
||||
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_TOKEN_ENDPOINT, ['api.console'])
|
||||
client = OIDCClient(rh_user, rh_password, DEFAULT_OIDC_ENDPOINT, ['api.console'])
|
||||
response = client.make_request("POST", url, headers=s.headers, files=files, verify=settings.INSIGHTS_CERT_PATH, timeout=(31, 31))
|
||||
except requests.RequestException:
|
||||
logger.error("Automation Analytics API request failed, trying base auth method")
|
||||
|
||||
@@ -9,7 +9,6 @@ from prometheus_client.core import GaugeMetricFamily, HistogramMetricFamily
|
||||
from prometheus_client.registry import CollectorRegistry
|
||||
from django.conf import settings
|
||||
from django.http import HttpRequest
|
||||
import redis.exceptions
|
||||
from rest_framework.request import Request
|
||||
|
||||
from awx.main.consumers import emit_channel_notification
|
||||
@@ -291,12 +290,8 @@ class Metrics(MetricsNamespace):
|
||||
def send_metrics(self):
|
||||
# more than one thread could be calling this at the same time, so should
|
||||
# acquire redis lock before sending metrics
|
||||
try:
|
||||
lock = self.conn.lock(root_key + '-' + self._namespace + '_lock')
|
||||
if not lock.acquire(blocking=False):
|
||||
return
|
||||
except redis.exceptions.ConnectionError as exc:
|
||||
logger.warning(f'Connection error in send_metrics: {exc}')
|
||||
lock = self.conn.lock(root_key + '-' + self._namespace + '_lock')
|
||||
if not lock.acquire(blocking=False):
|
||||
return
|
||||
try:
|
||||
current_time = time.time()
|
||||
|
||||
@@ -88,10 +88,8 @@ class Scheduler:
|
||||
# internally times are all referenced relative to startup time, add grace period
|
||||
self.global_start = time.time() + 2.0
|
||||
|
||||
def get_and_mark_pending(self, reftime=None):
|
||||
if reftime is None:
|
||||
reftime = time.time() # mostly for tests
|
||||
relative_time = reftime - self.global_start
|
||||
def get_and_mark_pending(self):
|
||||
relative_time = time.time() - self.global_start
|
||||
to_run = []
|
||||
for job in self.jobs:
|
||||
if job.due_to_run(relative_time):
|
||||
@@ -100,10 +98,8 @@ class Scheduler:
|
||||
job.mark_run(relative_time)
|
||||
return to_run
|
||||
|
||||
def time_until_next_run(self, reftime=None):
|
||||
if reftime is None:
|
||||
reftime = time.time() # mostly for tests
|
||||
relative_time = reftime - self.global_start
|
||||
def time_until_next_run(self):
|
||||
relative_time = time.time() - self.global_start
|
||||
next_job = min(self.jobs, key=lambda j: j.next_run)
|
||||
delta = next_job.next_run - relative_time
|
||||
if delta <= 0.1:
|
||||
@@ -119,11 +115,10 @@ class Scheduler:
|
||||
def debug(self, *args, **kwargs):
|
||||
data = dict()
|
||||
data['title'] = 'Scheduler status'
|
||||
reftime = time.time()
|
||||
|
||||
now = datetime.fromtimestamp(reftime).strftime('%Y-%m-%d %H:%M:%S UTC')
|
||||
now = datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S UTC')
|
||||
start_time = datetime.fromtimestamp(self.global_start).strftime('%Y-%m-%d %H:%M:%S UTC')
|
||||
relative_time = reftime - self.global_start
|
||||
relative_time = time.time() - self.global_start
|
||||
data['started_time'] = start_time
|
||||
data['current_time'] = now
|
||||
data['current_time_relative'] = round(relative_time, 3)
|
||||
|
||||
@@ -15,7 +15,6 @@ from datetime import timedelta
|
||||
|
||||
from django import db
|
||||
from django.conf import settings
|
||||
import redis.exceptions
|
||||
|
||||
from ansible_base.lib.logging.runtime import log_excess_runtime
|
||||
|
||||
@@ -131,13 +130,10 @@ class AWXConsumerBase(object):
|
||||
@log_excess_runtime(logger, debug_cutoff=0.05, cutoff=0.2)
|
||||
def record_statistics(self):
|
||||
if time.time() - self.last_stats > 1: # buffer stat recording to once per second
|
||||
save_data = self.pool.debug()
|
||||
try:
|
||||
self.redis.set(f'awx_{self.name}_statistics', save_data)
|
||||
except redis.exceptions.ConnectionError as exc:
|
||||
logger.warning(f'Redis connection error saving {self.name} status data:\n{exc}\nmissed data:\n{save_data}')
|
||||
self.redis.set(f'awx_{self.name}_statistics', self.pool.debug())
|
||||
except Exception:
|
||||
logger.exception(f"Unknown redis error saving {self.name} status data:\nmissed data:\n{save_data}")
|
||||
logger.exception(f"encountered an error communicating with redis to store {self.name} statistics")
|
||||
self.last_stats = time.time()
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
@@ -193,10 +189,7 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
current_time = time.time()
|
||||
self.pool.produce_subsystem_metrics(self.subsystem_metrics)
|
||||
self.subsystem_metrics.set('dispatcher_availability', self.listen_cumulative_time / (current_time - self.last_metrics_gather))
|
||||
try:
|
||||
self.subsystem_metrics.pipe_execute()
|
||||
except redis.exceptions.ConnectionError as exc:
|
||||
logger.warning(f'Redis connection error saving dispatcher metrics, error:\n{exc}')
|
||||
self.subsystem_metrics.pipe_execute()
|
||||
self.listen_cumulative_time = 0.0
|
||||
self.last_metrics_gather = current_time
|
||||
|
||||
@@ -212,11 +205,7 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
except Exception as exc:
|
||||
logger.warning(f'Failed to save dispatcher statistics {exc}')
|
||||
|
||||
# Everything benchmarks to the same original time, so that skews due to
|
||||
# runtime of the actions, themselves, do not mess up scheduling expectations
|
||||
reftime = time.time()
|
||||
|
||||
for job in self.scheduler.get_and_mark_pending(reftime=reftime):
|
||||
for job in self.scheduler.get_and_mark_pending():
|
||||
if 'control' in job.data:
|
||||
try:
|
||||
job.data['control']()
|
||||
@@ -233,7 +222,7 @@ class AWXConsumerPG(AWXConsumerBase):
|
||||
|
||||
self.listen_start = time.time()
|
||||
|
||||
return self.scheduler.time_until_next_run(reftime=reftime)
|
||||
return self.scheduler.time_until_next_run()
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
super(AWXConsumerPG, self).run(*args, **kwargs)
|
||||
|
||||
@@ -86,7 +86,6 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
return os.getpid()
|
||||
|
||||
def read(self, queue):
|
||||
has_redis_error = False
|
||||
try:
|
||||
res = self.redis.blpop(self.queue_name, timeout=1)
|
||||
if res is None:
|
||||
@@ -96,21 +95,14 @@ class CallbackBrokerWorker(BaseWorker):
|
||||
self.subsystem_metrics.inc('callback_receiver_events_popped_redis', 1)
|
||||
self.subsystem_metrics.inc('callback_receiver_events_in_memory', 1)
|
||||
return json.loads(res[1])
|
||||
except redis.exceptions.ConnectionError as exc:
|
||||
# Low noise log, because very common and many workers will write this
|
||||
logger.error(f"redis connection error: {exc}")
|
||||
has_redis_error = True
|
||||
time.sleep(5)
|
||||
except redis.exceptions.RedisError:
|
||||
logger.exception("encountered an error communicating with redis")
|
||||
has_redis_error = True
|
||||
time.sleep(1)
|
||||
except (json.JSONDecodeError, KeyError):
|
||||
logger.exception("failed to decode JSON message from redis")
|
||||
finally:
|
||||
if not has_redis_error:
|
||||
self.record_statistics()
|
||||
self.record_read_metrics()
|
||||
self.record_statistics()
|
||||
self.record_read_metrics()
|
||||
|
||||
return {'event': 'FLUSH'}
|
||||
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
import redis
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
import redis.exceptions
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from awx.main.analytics.subsystem_metrics import CallbackReceiverMetricsServer
|
||||
|
||||
from awx.main.dispatch.control import Control
|
||||
from awx.main.dispatch.worker import AWXConsumerRedis, CallbackBrokerWorker
|
||||
|
||||
@@ -30,10 +27,7 @@ class Command(BaseCommand):
|
||||
return
|
||||
consumer = None
|
||||
|
||||
try:
|
||||
CallbackReceiverMetricsServer().start()
|
||||
except redis.exceptions.ConnectionError as exc:
|
||||
raise CommandError(f'Callback receiver could not connect to redis, error: {exc}')
|
||||
CallbackReceiverMetricsServer().start()
|
||||
|
||||
try:
|
||||
consumer = AWXConsumerRedis(
|
||||
|
||||
@@ -3,10 +3,8 @@
|
||||
import logging
|
||||
import yaml
|
||||
|
||||
import redis
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from awx.main.dispatch import get_task_queuename
|
||||
from awx.main.dispatch.control import Control
|
||||
@@ -65,10 +63,7 @@ class Command(BaseCommand):
|
||||
|
||||
consumer = None
|
||||
|
||||
try:
|
||||
DispatcherMetricsServer().start()
|
||||
except redis.exceptions.ConnectionError as exc:
|
||||
raise CommandError(f'Dispatcher could not connect to redis, error: {exc}')
|
||||
DispatcherMetricsServer().start()
|
||||
|
||||
try:
|
||||
queues = ['tower_broadcast_all', 'tower_settings_change', get_task_queuename()]
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
# Generated by Django 4.2.18 on 2025-03-17 16:10
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0196_indirect_managed_node_audit'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='inventory',
|
||||
name='opa_query_path',
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default=None,
|
||||
help_text='The query path for the OPA policy to evaluate prior to job execution. The query path should be formatted as package/rule.',
|
||||
max_length=128,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='jobtemplate',
|
||||
name='opa_query_path',
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default=None,
|
||||
help_text='The query path for the OPA policy to evaluate prior to job execution. The query path should be formatted as package/rule.',
|
||||
max_length=128,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='organization',
|
||||
name='opa_query_path',
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default=None,
|
||||
help_text='The query path for the OPA policy to evaluate prior to job execution. The query path should be formatted as package/rule.',
|
||||
max_length=128,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -5,7 +5,7 @@ from django.db import migrations
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('main', '0197_add_opa_query_path'),
|
||||
('main', '0196_indirect_managed_node_audit'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@@ -5,7 +5,7 @@ from django.db import migrations
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('main', '0198_delete_profile'),
|
||||
('main', '0197_delete_profile'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@@ -6,7 +6,7 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0199_remove_sso_app_content'),
|
||||
('main', '0198_remove_sso_app_content'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@@ -6,7 +6,7 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0200_alter_inventorysource_source_and_more'),
|
||||
('main', '0199_alter_inventorysource_source_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@@ -8,7 +8,7 @@ from awx.main.migrations._create_system_jobs import delete_clear_tokens_sjt
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('main', '0201_alter_oauth2application_unique_together_and_more'),
|
||||
('main', '0200_alter_oauth2application_unique_together_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@@ -550,10 +550,10 @@ class CredentialType(CommonModelNameNotUnique):
|
||||
# TODO: User "side-loaded" credential custom_injectors isn't supported
|
||||
ManagedCredentialType.registry[ns] = SimpleNamespace(namespace=ns, name=plugin.name, kind='external', inputs=plugin.inputs, backend=plugin.backend)
|
||||
|
||||
def inject_credential(self, credential, env, safe_env, args, private_data_dir, container_root=None):
|
||||
def inject_credential(self, credential, env, safe_env, args, private_data_dir):
|
||||
from awx_plugins.interfaces._temporary_private_inject_api import inject_credential
|
||||
|
||||
inject_credential(self, credential, env, safe_env, args, private_data_dir, container_root=container_root)
|
||||
inject_credential(self, credential, env, safe_env, args, private_data_dir)
|
||||
|
||||
|
||||
class CredentialTypeHelper:
|
||||
|
||||
@@ -565,6 +565,7 @@ class JobEvent(BasePlaybookEvent):
|
||||
summaries = dict()
|
||||
updated_hosts_list = list()
|
||||
for host in hostnames:
|
||||
updated_hosts_list.append(host.lower())
|
||||
host_id = host_map.get(host)
|
||||
if host_id not in existing_host_ids:
|
||||
host_id = None
|
||||
@@ -581,12 +582,6 @@ class JobEvent(BasePlaybookEvent):
|
||||
summary.failed = bool(summary.dark or summary.failures)
|
||||
summaries[(host_id, host)] = summary
|
||||
|
||||
# do not count dark / unreachable hosts as updated
|
||||
if not bool(summary.dark):
|
||||
updated_hosts_list.append(host.lower())
|
||||
else:
|
||||
logger.warning(f'host {host.lower()} is dark / unreachable, not marking it as updated')
|
||||
|
||||
JobHostSummary.objects.bulk_create(summaries.values())
|
||||
|
||||
# update the last_job_id and last_job_host_summary_id
|
||||
|
||||
@@ -43,7 +43,6 @@ from awx.main.models.mixins import (
|
||||
TaskManagerInventoryUpdateMixin,
|
||||
RelatedJobsMixin,
|
||||
CustomVirtualEnvMixin,
|
||||
OpaQueryPathMixin,
|
||||
)
|
||||
from awx.main.models.notifications import (
|
||||
NotificationTemplate,
|
||||
@@ -69,7 +68,7 @@ class InventoryConstructedInventoryMembership(models.Model):
|
||||
)
|
||||
|
||||
|
||||
class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin, OpaQueryPathMixin):
|
||||
class Inventory(CommonModelNameNotUnique, ResourceMixin, RelatedJobsMixin):
|
||||
"""
|
||||
an inventory source contains lists and hosts.
|
||||
"""
|
||||
|
||||
@@ -51,7 +51,6 @@ from awx.main.models.mixins import (
|
||||
RelatedJobsMixin,
|
||||
WebhookMixin,
|
||||
WebhookTemplateMixin,
|
||||
OpaQueryPathMixin,
|
||||
)
|
||||
from awx.main.constants import JOB_VARIABLE_PREFIXES
|
||||
|
||||
@@ -193,9 +192,7 @@ class JobOptions(BaseModel):
|
||||
return needed
|
||||
|
||||
|
||||
class JobTemplate(
|
||||
UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin, WebhookTemplateMixin, OpaQueryPathMixin
|
||||
):
|
||||
class JobTemplate(UnifiedJobTemplate, JobOptions, SurveyJobTemplateMixin, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin, WebhookTemplateMixin):
|
||||
"""
|
||||
A job template is a reusable job definition for applying a project (with
|
||||
playbook) to an inventory source with a given credential.
|
||||
|
||||
@@ -42,7 +42,6 @@ __all__ = [
|
||||
'TaskManagerInventoryUpdateMixin',
|
||||
'ExecutionEnvironmentMixin',
|
||||
'CustomVirtualEnvMixin',
|
||||
'OpaQueryPathMixin',
|
||||
]
|
||||
|
||||
|
||||
@@ -693,16 +692,3 @@ class WebhookMixin(models.Model):
|
||||
logger.debug("Webhook status update sent.")
|
||||
else:
|
||||
logger.error("Posting webhook status failed, code: {}\n" "{}\nPayload sent: {}".format(response.status_code, response.text, json.dumps(data)))
|
||||
|
||||
|
||||
class OpaQueryPathMixin(models.Model):
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
opa_query_path = models.CharField(
|
||||
max_length=128,
|
||||
blank=True,
|
||||
null=True,
|
||||
default=None,
|
||||
help_text=_("The query path for the OPA policy to evaluate prior to job execution. The query path should be formatted as package/rule."),
|
||||
)
|
||||
|
||||
@@ -22,12 +22,12 @@ from awx.main.models.rbac import (
|
||||
ROLE_SINGLETON_SYSTEM_AUDITOR,
|
||||
)
|
||||
from awx.main.models.unified_jobs import UnifiedJob
|
||||
from awx.main.models.mixins import ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin, OpaQueryPathMixin
|
||||
from awx.main.models.mixins import ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin
|
||||
|
||||
__all__ = ['Organization', 'Team', 'UserSessionMembership']
|
||||
|
||||
|
||||
class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin, OpaQueryPathMixin):
|
||||
class Organization(CommonModel, NotificationFieldsModel, ResourceMixin, CustomVirtualEnvMixin, RelatedJobsMixin):
|
||||
"""
|
||||
An organization is the basic unit of multi-tenancy divisions
|
||||
"""
|
||||
|
||||
@@ -53,8 +53,8 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
):
|
||||
super(GrafanaBackend, self).__init__(fail_silently=fail_silently)
|
||||
self.grafana_key = grafana_key
|
||||
self.dashboardId = int(dashboardId) if dashboardId is not None and panelId != "" else None
|
||||
self.panelId = int(panelId) if panelId is not None and panelId != "" else None
|
||||
self.dashboardId = int(dashboardId) if dashboardId is not None else None
|
||||
self.panelId = int(panelId) if panelId is not None else None
|
||||
self.annotation_tags = annotation_tags if annotation_tags is not None else []
|
||||
self.grafana_no_verify_ssl = grafana_no_verify_ssl
|
||||
self.isRegion = isRegion
|
||||
@@ -97,7 +97,6 @@ class GrafanaBackend(AWXBaseEmailBackend, CustomNotificationBase):
|
||||
r = requests.post(
|
||||
"{}/api/annotations".format(m.recipients()[0]), json=grafana_data, headers=grafana_headers, verify=(not self.grafana_no_verify_ssl)
|
||||
)
|
||||
|
||||
if r.status_code >= 400:
|
||||
logger.error(smart_str(_("Error sending notification grafana: {}").format(r.status_code)))
|
||||
if not self.fail_silently:
|
||||
|
||||
@@ -174,9 +174,6 @@ class PodManager(object):
|
||||
)
|
||||
pod_spec['spec']['containers'][0]['name'] = self.pod_name
|
||||
|
||||
# Prevent mounting of service account token in job pods in order to prevent job pods from accessing the k8s API via in cluster service account auth
|
||||
pod_spec['spec']['automountServiceAccountToken'] = False
|
||||
|
||||
return pod_spec
|
||||
|
||||
|
||||
|
||||
@@ -10,8 +10,6 @@ import time
|
||||
import sys
|
||||
import signal
|
||||
|
||||
import redis
|
||||
|
||||
# Django
|
||||
from django.db import transaction
|
||||
from django.utils.translation import gettext_lazy as _, gettext_noop
|
||||
@@ -122,8 +120,6 @@ class TaskBase:
|
||||
self.subsystem_metrics.pipe_execute()
|
||||
else:
|
||||
logger.debug(f"skipping recording {self.prefix} metrics, last recorded {time_last_recorded} seconds ago")
|
||||
except redis.exceptions.ConnectionError as exc:
|
||||
logger.warning(f"Redis connection error saving metrics for {self.prefix}, error: {exc}")
|
||||
except Exception:
|
||||
logger.exception(f"Error saving metrics for {self.prefix}")
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
|
||||
# Django
|
||||
from django.conf import settings
|
||||
from django.db.models.query import QuerySet
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.timezone import now
|
||||
from django.db import OperationalError
|
||||
@@ -25,7 +26,6 @@ system_tracking_logger = logging.getLogger('awx.analytics.system_tracking')
|
||||
def start_fact_cache(hosts, destination, log_data, timeout=None, inventory_id=None):
|
||||
log_data['inventory_id'] = inventory_id
|
||||
log_data['written_ct'] = 0
|
||||
hosts_cached = list()
|
||||
try:
|
||||
os.makedirs(destination, mode=0o700)
|
||||
except FileExistsError:
|
||||
@@ -34,17 +34,17 @@ def start_fact_cache(hosts, destination, log_data, timeout=None, inventory_id=No
|
||||
if timeout is None:
|
||||
timeout = settings.ANSIBLE_FACT_CACHE_TIMEOUT
|
||||
|
||||
if isinstance(hosts, QuerySet):
|
||||
hosts = hosts.iterator()
|
||||
|
||||
last_filepath_written = None
|
||||
for host in hosts:
|
||||
hosts_cached.append(host)
|
||||
if not host.ansible_facts_modified or (timeout and host.ansible_facts_modified < now() - datetime.timedelta(seconds=timeout)):
|
||||
if (not host.ansible_facts_modified) or (timeout and host.ansible_facts_modified < now() - datetime.timedelta(seconds=timeout)):
|
||||
continue # facts are expired - do not write them
|
||||
|
||||
filepath = os.sep.join(map(str, [destination, host.name]))
|
||||
if not os.path.realpath(filepath).startswith(destination):
|
||||
system_tracking_logger.error('facts for host {} could not be cached'.format(smart_str(host.name)))
|
||||
continue
|
||||
|
||||
try:
|
||||
with codecs.open(filepath, 'w', encoding='utf-8') as f:
|
||||
os.chmod(f.name, 0o600)
|
||||
@@ -54,11 +54,10 @@ def start_fact_cache(hosts, destination, log_data, timeout=None, inventory_id=No
|
||||
except IOError:
|
||||
system_tracking_logger.error('facts for host {} could not be cached'.format(smart_str(host.name)))
|
||||
continue
|
||||
|
||||
# make note of the time we wrote the last file so we can check if any file changed later
|
||||
if last_filepath_written:
|
||||
return os.path.getmtime(last_filepath_written), hosts_cached
|
||||
|
||||
return None, hosts_cached
|
||||
return os.path.getmtime(last_filepath_written)
|
||||
return None
|
||||
|
||||
|
||||
def raw_update_hosts(host_list):
|
||||
@@ -89,14 +88,17 @@ def update_hosts(host_list, max_tries=5):
|
||||
msg='Inventory {inventory_id} host facts: updated {updated_ct}, cleared {cleared_ct}, unchanged {unmodified_ct}, took {delta:.3f} s',
|
||||
add_log_data=True,
|
||||
)
|
||||
def finish_fact_cache(hosts_cached, destination, facts_write_time, log_data, job_id=None, inventory_id=None):
|
||||
def finish_fact_cache(hosts, destination, facts_write_time, log_data, job_id=None, inventory_id=None):
|
||||
log_data['inventory_id'] = inventory_id
|
||||
log_data['updated_ct'] = 0
|
||||
log_data['unmodified_ct'] = 0
|
||||
log_data['cleared_ct'] = 0
|
||||
|
||||
if isinstance(hosts, QuerySet):
|
||||
hosts = hosts.iterator()
|
||||
|
||||
hosts_to_update = []
|
||||
for host in hosts_cached:
|
||||
for host in hosts:
|
||||
filepath = os.sep.join(map(str, [destination, host.name]))
|
||||
if not os.path.realpath(filepath).startswith(destination):
|
||||
system_tracking_logger.error('facts for host {} could not be cached'.format(smart_str(host.name)))
|
||||
@@ -128,7 +130,6 @@ def finish_fact_cache(hosts_cached, destination, facts_write_time, log_data, job
|
||||
log_data['unmodified_ct'] += 1
|
||||
else:
|
||||
# if the file goes missing, ansible removed it (likely via clear_facts)
|
||||
# if the file goes missing, but the host has not started facts, then we should not clear the facts
|
||||
host.ansible_facts = {}
|
||||
host.ansible_facts_modified = now()
|
||||
hosts_to_update.append(host)
|
||||
|
||||
@@ -45,35 +45,22 @@ def build_indirect_host_data(job: Job, job_event_queries: dict[str, dict[str, st
|
||||
facts_missing_logged = False
|
||||
unhashable_facts_logged = False
|
||||
|
||||
job_event_queries_fqcn = {}
|
||||
for query_k, query_v in job_event_queries.items():
|
||||
if len(parts := query_k.split('.')) != 3:
|
||||
logger.info(f"Skiping malformed query '{query_k}'. Expected to be of the form 'a.b.c'")
|
||||
continue
|
||||
if parts[2] != '*':
|
||||
continue
|
||||
job_event_queries_fqcn['.'.join(parts[0:2])] = query_v
|
||||
|
||||
for event in job.job_events.filter(event_data__isnull=False).iterator():
|
||||
if 'res' not in event.event_data:
|
||||
continue
|
||||
|
||||
if not (resolved_action := event.event_data.get('resolved_action', None)):
|
||||
if 'resolved_action' not in event.event_data or event.event_data['resolved_action'] not in job_event_queries.keys():
|
||||
continue
|
||||
|
||||
if len(resolved_action_parts := resolved_action.split('.')) != 3:
|
||||
logger.debug(f"Malformed invocation module name '{resolved_action}'. Expected to be of the form 'a.b.c'")
|
||||
continue
|
||||
resolved_action = event.event_data['resolved_action']
|
||||
|
||||
resolved_action_fqcn = '.'.join(resolved_action_parts[0:2])
|
||||
|
||||
# Match module invocation to collection queries
|
||||
# First match against fully qualified query names i.e. a.b.c
|
||||
# Then try and match against wildcard queries i.e. a.b.*
|
||||
if not (jq_str_for_event := job_event_queries.get(resolved_action, job_event_queries_fqcn.get(resolved_action_fqcn, {})).get('query')):
|
||||
# We expect a dict with a 'query' key for the resolved_action
|
||||
if 'query' not in job_event_queries[resolved_action]:
|
||||
continue
|
||||
|
||||
# Recall from cache, or process the jq expression, and loop over the jq results
|
||||
jq_str_for_event = job_event_queries[resolved_action]['query']
|
||||
|
||||
if jq_str_for_event not in compiled_jq_expressions:
|
||||
compiled_jq_expressions[resolved_action] = jq.compile(jq_str_for_event)
|
||||
compiled_jq = compiled_jq_expressions[resolved_action]
|
||||
|
||||
@@ -522,13 +522,9 @@ class BaseTask(object):
|
||||
|
||||
credentials = self.build_credentials_list(self.instance)
|
||||
|
||||
container_root = None
|
||||
if settings.IS_K8S and isinstance(self.instance, ProjectUpdate):
|
||||
container_root = private_data_dir
|
||||
|
||||
for credential in credentials:
|
||||
if credential:
|
||||
credential.credential_type.inject_credential(credential, env, self.safe_cred_env, args, private_data_dir, container_root=container_root)
|
||||
credential.credential_type.inject_credential(credential, env, self.safe_cred_env, args, private_data_dir)
|
||||
|
||||
self.runner_callback.safe_env.update(self.safe_cred_env)
|
||||
|
||||
@@ -1091,7 +1087,7 @@ class RunJob(SourceControlMixin, BaseTask):
|
||||
# where ansible expects to find it
|
||||
if self.should_use_fact_cache():
|
||||
job.log_lifecycle("start_job_fact_cache")
|
||||
self.facts_write_time, self.hosts_with_facts_cached = start_fact_cache(
|
||||
self.facts_write_time = start_fact_cache(
|
||||
job.get_hosts_for_fact_cache(), os.path.join(private_data_dir, 'artifacts', str(job.id), 'fact_cache'), inventory_id=job.inventory_id
|
||||
)
|
||||
|
||||
@@ -1110,7 +1106,7 @@ class RunJob(SourceControlMixin, BaseTask):
|
||||
if self.should_use_fact_cache() and self.runner_callback.artifacts_processed:
|
||||
job.log_lifecycle("finish_job_fact_cache")
|
||||
finish_fact_cache(
|
||||
self.hosts_with_facts_cached,
|
||||
job.get_hosts_for_fact_cache(),
|
||||
os.path.join(private_data_dir, 'artifacts', str(job.id), 'fact_cache'),
|
||||
facts_write_time=self.facts_write_time,
|
||||
job_id=job.id,
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
---
|
||||
|
||||
- hosts: all
|
||||
gather_facts: false
|
||||
connection: local
|
||||
tasks:
|
||||
- meta: clear_facts
|
||||
@@ -1,17 +0,0 @@
|
||||
---
|
||||
|
||||
- hosts: all
|
||||
vars:
|
||||
extra_value: ""
|
||||
gather_facts: false
|
||||
connection: local
|
||||
tasks:
|
||||
- name: set a custom fact
|
||||
set_fact:
|
||||
foo: "bar{{ extra_value }}"
|
||||
bar:
|
||||
a:
|
||||
b:
|
||||
- "c"
|
||||
- "d"
|
||||
cacheable: true
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
|
||||
- hosts: all
|
||||
gather_facts: false
|
||||
connection: local
|
||||
vars:
|
||||
msg: 'hello'
|
||||
tasks:
|
||||
- debug: var=msg
|
||||
@@ -210,39 +210,6 @@ def test_disallowed_http_update_methods(put, patch, post, inventory, project, ad
|
||||
patch(url=reverse('api:job_detail', kwargs={'pk': job.pk}), data={}, user=admin_user, expect=405)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"job_type",
|
||||
[
|
||||
'run',
|
||||
'check',
|
||||
],
|
||||
)
|
||||
def test_job_relaunch_with_job_type(post, inventory, project, machine_credential, admin_user, job_type):
|
||||
# Create a job template
|
||||
jt = JobTemplate.objects.create(name='testjt', inventory=inventory, project=project)
|
||||
|
||||
# Set initial job type
|
||||
init_job_type = 'check' if job_type == 'run' else 'run'
|
||||
|
||||
# Create a job instance
|
||||
job = jt.create_unified_job(_eager_fields={'job_type': init_job_type})
|
||||
|
||||
# Perform the POST request
|
||||
url = reverse('api:job_relaunch', kwargs={'pk': job.pk})
|
||||
r = post(url=url, data={'job_type': job_type}, user=admin_user, expect=201)
|
||||
|
||||
# Assert that the response status code is 201 (Created)
|
||||
assert r.status_code == 201
|
||||
|
||||
# Retrieve the newly created job from the response
|
||||
new_job_id = r.data.get('id')
|
||||
new_job = Job.objects.get(id=new_job_id)
|
||||
|
||||
# Assert that the new job has the correct job type
|
||||
assert new_job.job_type == job_type
|
||||
|
||||
|
||||
class TestControllerNode:
|
||||
@pytest.fixture
|
||||
def project_update(self, project):
|
||||
|
||||
@@ -56,175 +56,6 @@ def test_user_create(post, admin):
|
||||
assert not response.data['is_system_auditor']
|
||||
|
||||
|
||||
# Disable local password checks to ensure that any ValidationError originates from the Django validators.
|
||||
@override_settings(
|
||||
LOCAL_PASSWORD_MIN_LENGTH=1,
|
||||
LOCAL_PASSWORD_MIN_DIGITS=0,
|
||||
LOCAL_PASSWORD_MIN_UPPER=0,
|
||||
LOCAL_PASSWORD_MIN_SPECIAL=0,
|
||||
)
|
||||
@pytest.mark.django_db
|
||||
def test_user_create_with_django_password_validation_basic(post, admin):
|
||||
"""Test if the Django password validators are applied correctly."""
|
||||
with override_settings(
|
||||
AUTH_PASSWORD_VALIDATORS=[
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
|
||||
'OPTIONS': {
|
||||
'min_length': 3,
|
||||
},
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
|
||||
},
|
||||
],
|
||||
):
|
||||
# This user should fail the UserAttrSimilarity, MinLength and CommonPassword validators.
|
||||
user_attrs = (
|
||||
{
|
||||
"password": "Password", # NOSONAR
|
||||
"username": "Password",
|
||||
"is_superuser": False,
|
||||
},
|
||||
)
|
||||
print(f"Create user with invalid password {user_attrs=}")
|
||||
response = post(reverse('api:user_list'), user_attrs, admin, middleware=SessionMiddleware(mock.Mock()))
|
||||
assert response.status_code == 400
|
||||
# This user should pass all Django validators.
|
||||
user_attrs = {
|
||||
"password": "r$TyKiOCb#ED", # NOSONAR
|
||||
"username": "TestUser",
|
||||
"is_superuser": False,
|
||||
}
|
||||
print(f"Create user with valid password {user_attrs=}")
|
||||
response = post(reverse('api:user_list'), user_attrs, admin, middleware=SessionMiddleware(mock.Mock()))
|
||||
assert response.status_code == 201
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"user_attrs,validators,expected_status_code",
|
||||
[
|
||||
# Test password similarity with username.
|
||||
(
|
||||
{"password": "TestUser1", "username": "TestUser1", "is_superuser": False}, # NOSONAR
|
||||
[
|
||||
{'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'},
|
||||
],
|
||||
400,
|
||||
),
|
||||
(
|
||||
{"password": "abc", "username": "TestUser1", "is_superuser": False}, # NOSONAR
|
||||
[
|
||||
{'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator'},
|
||||
],
|
||||
201,
|
||||
),
|
||||
# Test password min length criterion.
|
||||
(
|
||||
{"password": "TooShort", "username": "TestUser1", "is_superuser": False}, # NOSONAR
|
||||
[
|
||||
{'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': {'min_length': 9}},
|
||||
],
|
||||
400,
|
||||
),
|
||||
(
|
||||
{"password": "LongEnough", "username": "TestUser1", "is_superuser": False}, # NOSONAR
|
||||
[
|
||||
{'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': {'min_length': 9}},
|
||||
],
|
||||
201,
|
||||
),
|
||||
# Test password is too common criterion.
|
||||
(
|
||||
{"password": "Password", "username": "TestUser1", "is_superuser": False}, # NOSONAR
|
||||
[
|
||||
{'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator'},
|
||||
],
|
||||
400,
|
||||
),
|
||||
(
|
||||
{"password": "aEArV$5Vkdw", "username": "TestUser1", "is_superuser": False}, # NOSONAR
|
||||
[
|
||||
{'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator'},
|
||||
],
|
||||
201,
|
||||
),
|
||||
# Test if password is only numeric.
|
||||
(
|
||||
{"password": "1234567890", "username": "TestUser1", "is_superuser": False}, # NOSONAR
|
||||
[
|
||||
{'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'},
|
||||
],
|
||||
400,
|
||||
),
|
||||
(
|
||||
{"password": "abc4567890", "username": "TestUser1", "is_superuser": False}, # NOSONAR
|
||||
[
|
||||
{'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'},
|
||||
],
|
||||
201,
|
||||
),
|
||||
],
|
||||
)
|
||||
# Disable local password checks to ensure that any ValidationError originates from the Django validators.
|
||||
@override_settings(
|
||||
LOCAL_PASSWORD_MIN_LENGTH=1,
|
||||
LOCAL_PASSWORD_MIN_DIGITS=0,
|
||||
LOCAL_PASSWORD_MIN_UPPER=0,
|
||||
LOCAL_PASSWORD_MIN_SPECIAL=0,
|
||||
)
|
||||
@pytest.mark.django_db
|
||||
def test_user_create_with_django_password_validation_ext(post, delete, admin, user_attrs, validators, expected_status_code):
|
||||
"""Test the functionality of the single Django password validators."""
|
||||
#
|
||||
default_parameters = {
|
||||
# Default values for input parameters which are None.
|
||||
"user_attrs": {
|
||||
"password": "r$TyKiOCb#ED", # NOSONAR
|
||||
"username": "DefaultUser",
|
||||
"is_superuser": False,
|
||||
},
|
||||
"validators": [
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
|
||||
'OPTIONS': {
|
||||
'min_length': 8,
|
||||
},
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
|
||||
},
|
||||
],
|
||||
}
|
||||
user_attrs = user_attrs if user_attrs is not None else default_parameters["user_attrs"]
|
||||
validators = validators if validators is not None else default_parameters["validators"]
|
||||
with override_settings(AUTH_PASSWORD_VALIDATORS=validators):
|
||||
response = post(reverse('api:user_list'), user_attrs, admin, middleware=SessionMiddleware(mock.Mock()))
|
||||
assert response.status_code == expected_status_code
|
||||
# Delete user if it was created succesfully.
|
||||
if response.status_code == 201:
|
||||
response = delete(reverse('api:user_detail', kwargs={'pk': response.data['id']}), admin, middleware=SessionMiddleware(mock.Mock()))
|
||||
assert response.status_code == 204
|
||||
else:
|
||||
# Catch the unexpected behavior that sometimes the user is written
|
||||
# into the database before the validation fails. This actually can
|
||||
# happen if UserSerializer.validate instantiates User(**attrs)!
|
||||
username = user_attrs['username']
|
||||
assert not User.objects.filter(username=username)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_fail_double_create_user(post, admin):
|
||||
response = post(reverse('api:user_list'), EXAMPLE_USER_DATA, admin, middleware=SessionMiddleware(mock.Mock()))
|
||||
@@ -251,10 +82,6 @@ def test_updating_own_password_refreshes_session(patch, admin):
|
||||
Updating your own password should refresh the session id.
|
||||
'''
|
||||
with mock.patch('awx.api.serializers.update_session_auth_hash') as update_session_auth_hash:
|
||||
# Attention: If the Django password validator `CommonPasswordValidator`
|
||||
# is active, this test case will fail because this validator raises on
|
||||
# password 'newpassword'. Consider changing the hard-coded password to
|
||||
# something uncommon.
|
||||
patch(reverse('api:user_detail', kwargs={'pk': admin.pk}), {'password': 'newpassword'}, admin, middleware=SessionMiddleware(mock.Mock()))
|
||||
assert update_session_auth_hash.called
|
||||
|
||||
|
||||
@@ -106,17 +106,6 @@ def test_compat_role_naming(setup_managed_roles, job_template, rando, alice):
|
||||
assert rd.created_by is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_organization_admin_has_audit(setup_managed_roles):
|
||||
"""This formalizes a behavior change from old to new RBAC system
|
||||
|
||||
Previously, the auditor_role did not list admin_role as a parent
|
||||
this made various queries hard to deal with, requiring adding 2 conditions
|
||||
The new system should explicitly list the auditor permission in org admin role"""
|
||||
rd = RoleDefinition.objects.get(name='Organization Admin')
|
||||
assert 'audit_organization' in rd.permissions.values_list('codename', flat=True)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_organization_level_permissions(organization, inventory, setup_managed_roles):
|
||||
u1 = User.objects.create(username='alice')
|
||||
|
||||
@@ -135,9 +135,8 @@ class TestEvents:
|
||||
|
||||
self._create_job_event(ok=dict((hostname, len(hostname)) for hostname in self.hostnames))
|
||||
|
||||
# Soft delete 6 of the 12 host metrics, every even host like "Host 2" or "Host 4"
|
||||
for host_name in self.hostnames[::2]:
|
||||
hm = HostMetric.objects.get(hostname=host_name.lower())
|
||||
# Soft delete 6 host metrics
|
||||
for hm in HostMetric.objects.filter(id__in=[1, 3, 5, 7, 9, 11]):
|
||||
hm.soft_delete()
|
||||
|
||||
assert len(HostMetric.objects.filter(Q(deleted=False) & Q(deleted_counter=0) & Q(last_deleted__isnull=True))) == 6
|
||||
@@ -166,9 +165,7 @@ class TestEvents:
|
||||
skipped=dict((hostname, len(hostname)) for hostname in self.hostnames[10:12]),
|
||||
)
|
||||
assert len(HostMetric.objects.filter(Q(deleted=False) & Q(deleted_counter=0) & Q(last_deleted__isnull=True))) == 6
|
||||
|
||||
# one of those 6 hosts is dark, so will not be counted
|
||||
assert len(HostMetric.objects.filter(Q(deleted=False) & Q(deleted_counter=1) & Q(last_deleted__isnull=False))) == 5
|
||||
assert len(HostMetric.objects.filter(Q(deleted=False) & Q(deleted_counter=1) & Q(last_deleted__isnull=False))) == 6
|
||||
|
||||
def _generate_hosts(self, cnt, id_from=0):
|
||||
self.hostnames = [f'Host {i}' for i in range(id_from, id_from + cnt)]
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import pytest
|
||||
|
||||
from awx.main.access import (
|
||||
UnifiedJobAccess,
|
||||
WorkflowJobTemplateAccess,
|
||||
WorkflowJobTemplateNodeAccess,
|
||||
WorkflowJobAccess,
|
||||
@@ -246,30 +245,6 @@ class TestWorkflowJobAccess:
|
||||
inventory.use_role.members.add(rando)
|
||||
assert WorkflowJobAccess(rando).can_start(workflow_job)
|
||||
|
||||
@pytest.mark.parametrize('org_role', ['admin_role', 'auditor_role'])
|
||||
def test_workflow_job_org_audit_access(self, workflow_job_template, rando, org_role):
|
||||
assert workflow_job_template.organization # sanity
|
||||
workflow_job = workflow_job_template.create_unified_job()
|
||||
assert workflow_job.organization # sanity
|
||||
|
||||
assert not UnifiedJobAccess(rando).can_read(workflow_job)
|
||||
assert not WorkflowJobAccess(rando).can_read(workflow_job)
|
||||
assert workflow_job not in WorkflowJobAccess(rando).filtered_queryset()
|
||||
|
||||
org = workflow_job.organization
|
||||
role = getattr(org, org_role)
|
||||
role.members.add(rando)
|
||||
|
||||
assert UnifiedJobAccess(rando).can_read(workflow_job)
|
||||
assert WorkflowJobAccess(rando).can_read(workflow_job)
|
||||
assert workflow_job in WorkflowJobAccess(rando).filtered_queryset()
|
||||
|
||||
# Organization-level permissions should persist after deleting the WFJT
|
||||
workflow_job_template.delete()
|
||||
assert UnifiedJobAccess(rando).can_read(workflow_job)
|
||||
assert WorkflowJobAccess(rando).can_read(workflow_job)
|
||||
assert workflow_job in WorkflowJobAccess(rando).filtered_queryset()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestWFJTCopyAccess:
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import yaml
|
||||
from functools import reduce
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
@@ -21,46 +20,6 @@ from awx.main.models.indirect_managed_node_audit import IndirectManagedNodeAudit
|
||||
TEST_JQ = "{name: .name, canonical_facts: {host_name: .direct_host_name}, facts: {another_host_name: .direct_host_name}}"
|
||||
|
||||
|
||||
class Query(dict):
|
||||
def __init__(self, resolved_action: str, query_jq: dict):
|
||||
self._resolved_action = resolved_action.split('.')
|
||||
self._collection_ns, self._collection_name, self._module_name = self._resolved_action
|
||||
|
||||
super().__init__({self.resolve_key: {'query': query_jq}})
|
||||
|
||||
def get_fqcn(self):
|
||||
return f'{self._collection_ns}.{self._collection_name}'
|
||||
|
||||
@property
|
||||
def resolve_value(self):
|
||||
return self[self.resolve_key]
|
||||
|
||||
@property
|
||||
def resolve_key(self):
|
||||
return f'{self.get_fqcn()}.{self._module_name}'
|
||||
|
||||
def resolve(self, module_name=None):
|
||||
return {f'{self.get_fqcn()}.{module_name or self._module_name}': self.resolve_value}
|
||||
|
||||
def create_event_query(self, module_name=None):
|
||||
if (module_name := module_name or self._module_name) == '*':
|
||||
raise ValueError('Invalid module name *')
|
||||
return self.create_event_queries([module_name])
|
||||
|
||||
def create_event_queries(self, module_names):
|
||||
queries = {}
|
||||
for name in module_names:
|
||||
queries |= self.resolve(name)
|
||||
return EventQuery.objects.create(
|
||||
fqcn=self.get_fqcn(),
|
||||
collection_version='1.0.1',
|
||||
event_query=yaml.dump(queries, default_flow_style=False),
|
||||
)
|
||||
|
||||
def create_registered_event(self, job, module_name):
|
||||
job.job_events.create(event_data={'resolved_action': f'{self.get_fqcn()}.{module_name}', 'res': {'direct_host_name': 'foo_host', 'name': 'vm-foo'}})
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def bare_job(job_factory):
|
||||
job = job_factory()
|
||||
@@ -80,6 +39,11 @@ def job_with_counted_event(bare_job):
|
||||
return bare_job
|
||||
|
||||
|
||||
def create_event_query(fqcn='demo.query'):
|
||||
module_name = f'{fqcn}.example'
|
||||
return EventQuery.objects.create(fqcn=fqcn, collection_version='1.0.1', event_query=yaml.dump({module_name: {'query': TEST_JQ}}, default_flow_style=False))
|
||||
|
||||
|
||||
def create_audit_record(name, job, organization, created=now()):
|
||||
record = IndirectManagedNodeAudit.objects.create(name=name, job=job, organization=organization)
|
||||
record.created = created
|
||||
@@ -90,7 +54,7 @@ def create_audit_record(name, job, organization, created=now()):
|
||||
@pytest.fixture
|
||||
def event_query():
|
||||
"This is ordinarily created by the artifacts callback"
|
||||
return Query('demo.query.example', TEST_JQ).create_event_query()
|
||||
return create_event_query()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@@ -108,211 +72,105 @@ def new_audit_record(bare_job, organization):
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
'queries,expected_matches',
|
||||
(
|
||||
pytest.param(
|
||||
[],
|
||||
0,
|
||||
id='no_results',
|
||||
),
|
||||
pytest.param(
|
||||
[Query('demo.query.example', TEST_JQ)],
|
||||
1,
|
||||
id='fully_qualified',
|
||||
),
|
||||
pytest.param(
|
||||
[Query('demo.query.*', TEST_JQ)],
|
||||
1,
|
||||
id='wildcard',
|
||||
),
|
||||
pytest.param(
|
||||
[
|
||||
Query('demo.query.*', TEST_JQ),
|
||||
Query('demo.query.example', TEST_JQ),
|
||||
],
|
||||
1,
|
||||
id='wildcard_and_fully_qualified',
|
||||
),
|
||||
pytest.param(
|
||||
[
|
||||
Query('demo.query.*', TEST_JQ),
|
||||
Query('demo.query.example', {}),
|
||||
],
|
||||
0,
|
||||
id='wildcard_and_fully_qualified',
|
||||
),
|
||||
pytest.param(
|
||||
[
|
||||
Query('demo.query.example', {}),
|
||||
Query('demo.query.*', TEST_JQ),
|
||||
],
|
||||
0,
|
||||
id='ordering_should_not_matter',
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_build_indirect_host_data(job_with_counted_event, queries: Query, expected_matches: int):
|
||||
data = build_indirect_host_data(job_with_counted_event, {k: v for d in queries for k, v in d.items()})
|
||||
assert len(data) == expected_matches
|
||||
|
||||
|
||||
@mock.patch('awx.main.tasks.host_indirect.logger.debug')
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
'task_name',
|
||||
(
|
||||
pytest.param(
|
||||
'demo.query',
|
||||
id='no_results',
|
||||
),
|
||||
pytest.param(
|
||||
'demo',
|
||||
id='no_results',
|
||||
),
|
||||
pytest.param(
|
||||
'a.b.c.d',
|
||||
id='no_results',
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_build_indirect_host_data_malformed_module_name(mock_logger_debug, bare_job, task_name: str):
|
||||
create_registered_event(bare_job, task_name)
|
||||
assert build_indirect_host_data(bare_job, Query('demo.query.example', TEST_JQ)) == []
|
||||
mock_logger_debug.assert_called_once_with(f"Malformed invocation module name '{task_name}'. Expected to be of the form 'a.b.c'")
|
||||
|
||||
|
||||
@mock.patch('awx.main.tasks.host_indirect.logger.info')
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
'query',
|
||||
(
|
||||
pytest.param(
|
||||
'demo.query',
|
||||
id='no_results',
|
||||
),
|
||||
pytest.param(
|
||||
'demo',
|
||||
id='no_results',
|
||||
),
|
||||
pytest.param(
|
||||
'a.b.c.d',
|
||||
id='no_results',
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_build_indirect_host_data_malformed_query(mock_logger_info, job_with_counted_event, query: str):
|
||||
assert build_indirect_host_data(job_with_counted_event, {query: {'query': TEST_JQ}}) == []
|
||||
mock_logger_info.assert_called_once_with(f"Skiping malformed query '{query}'. Expected to be of the form 'a.b.c'")
|
||||
def test_build_with_no_results(bare_job):
|
||||
# never filled in events, should do nothing
|
||||
assert build_indirect_host_data(bare_job, {}) == []
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
'query',
|
||||
(
|
||||
pytest.param(
|
||||
Query('demo.query.example', TEST_JQ),
|
||||
id='fully_qualified',
|
||||
),
|
||||
pytest.param(
|
||||
Query('demo.query.*', TEST_JQ),
|
||||
id='wildcard',
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_fetch_job_event_query(bare_job, query: Query):
|
||||
query.create_event_query(module_name='example')
|
||||
assert fetch_job_event_query(bare_job) == query.resolve('example')
|
||||
def test_collect_an_event(job_with_counted_event):
|
||||
records = build_indirect_host_data(job_with_counted_event, {'demo.query.example': {'query': TEST_JQ}})
|
||||
assert len(records) == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
'queries',
|
||||
(
|
||||
[
|
||||
Query('demo.query.example', TEST_JQ),
|
||||
Query('demo2.query.example', TEST_JQ),
|
||||
],
|
||||
[
|
||||
Query('demo.query.*', TEST_JQ),
|
||||
Query('demo2.query.example', TEST_JQ),
|
||||
],
|
||||
),
|
||||
)
|
||||
def test_fetch_multiple_job_event_query(bare_job, queries: list[Query]):
|
||||
for q in queries:
|
||||
q.create_event_query(module_name='example')
|
||||
assert fetch_job_event_query(bare_job) == reduce(lambda acc, q: acc | q.resolve('example'), queries, {})
|
||||
def test_fetch_job_event_query(bare_job, event_query):
|
||||
assert fetch_job_event_query(bare_job) == {'demo.query.example': {'query': TEST_JQ}}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
('state',),
|
||||
(
|
||||
pytest.param(
|
||||
[
|
||||
(
|
||||
Query('demo.query.example', TEST_JQ),
|
||||
['example'],
|
||||
),
|
||||
],
|
||||
id='fully_qualified',
|
||||
),
|
||||
pytest.param(
|
||||
[
|
||||
(
|
||||
Query('demo.query.example', TEST_JQ),
|
||||
['example'] * 3,
|
||||
),
|
||||
],
|
||||
id='multiple_events_same_module_same_host',
|
||||
),
|
||||
pytest.param(
|
||||
[
|
||||
(
|
||||
Query('demo.query.example', TEST_JQ),
|
||||
['example'],
|
||||
),
|
||||
(
|
||||
Query('demo2.query.example', TEST_JQ),
|
||||
['example'],
|
||||
),
|
||||
],
|
||||
id='multiple_modules',
|
||||
),
|
||||
pytest.param(
|
||||
[
|
||||
(
|
||||
Query('demo.query.*', TEST_JQ),
|
||||
['example', 'example2'],
|
||||
),
|
||||
],
|
||||
id='multiple_modules_same_collection',
|
||||
),
|
||||
),
|
||||
)
|
||||
def test_save_indirect_host_entries(bare_job, state):
|
||||
all_task_names = []
|
||||
for entry in state:
|
||||
query, module_names = entry
|
||||
all_task_names.extend([f'{query.get_fqcn()}.{module_name}' for module_name in module_names])
|
||||
query.create_event_queries(module_names)
|
||||
[query.create_registered_event(bare_job, n) for n in module_names]
|
||||
def test_fetch_multiple_job_event_query(bare_job):
|
||||
create_event_query(fqcn='demo.query')
|
||||
create_event_query(fqcn='demo2.query')
|
||||
assert fetch_job_event_query(bare_job) == {'demo.query.example': {'query': TEST_JQ}, 'demo2.query.example': {'query': TEST_JQ}}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_save_indirect_host_entries(job_with_counted_event, event_query):
|
||||
assert job_with_counted_event.event_queries_processed is False
|
||||
save_indirect_host_entries(job_with_counted_event.id)
|
||||
job_with_counted_event.refresh_from_db()
|
||||
assert job_with_counted_event.event_queries_processed is True
|
||||
assert IndirectManagedNodeAudit.objects.filter(job=job_with_counted_event).count() == 1
|
||||
host_audit = IndirectManagedNodeAudit.objects.filter(job=job_with_counted_event).first()
|
||||
assert host_audit.count == 1
|
||||
assert host_audit.canonical_facts == {'host_name': 'foo_host'}
|
||||
assert host_audit.facts == {'another_host_name': 'foo_host'}
|
||||
assert host_audit.organization == job_with_counted_event.organization
|
||||
assert host_audit.name == 'vm-foo'
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_multiple_events_same_module_same_host(bare_job, event_query):
|
||||
"This tests that the count field gives correct answers"
|
||||
create_registered_event(bare_job)
|
||||
create_registered_event(bare_job)
|
||||
create_registered_event(bare_job)
|
||||
|
||||
save_indirect_host_entries(bare_job.id)
|
||||
bare_job.refresh_from_db()
|
||||
|
||||
assert bare_job.event_queries_processed is True
|
||||
|
||||
assert IndirectManagedNodeAudit.objects.filter(job=bare_job).count() == 1
|
||||
host_audit = IndirectManagedNodeAudit.objects.filter(job=bare_job).first()
|
||||
|
||||
assert host_audit.count == len(all_task_names)
|
||||
assert host_audit.canonical_facts == {'host_name': 'foo_host'}
|
||||
assert host_audit.facts == {'another_host_name': 'foo_host'}
|
||||
assert host_audit.organization == bare_job.organization
|
||||
assert host_audit.name == 'vm-foo'
|
||||
assert set(host_audit.events) == set(all_task_names)
|
||||
assert host_audit.count == 3
|
||||
assert host_audit.events == ['demo.query.example']
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_multiple_registered_modules(bare_job):
|
||||
"This tests that the events will list multiple modules if more than 1 module from different collections is registered and used"
|
||||
create_registered_event(bare_job, task_name='demo.query.example')
|
||||
create_registered_event(bare_job, task_name='demo2.query.example')
|
||||
|
||||
# These take the place of using the event_query fixture
|
||||
create_event_query(fqcn='demo.query')
|
||||
create_event_query(fqcn='demo2.query')
|
||||
|
||||
save_indirect_host_entries(bare_job.id)
|
||||
|
||||
assert IndirectManagedNodeAudit.objects.filter(job=bare_job).count() == 1
|
||||
host_audit = IndirectManagedNodeAudit.objects.filter(job=bare_job).first()
|
||||
|
||||
assert host_audit.count == 2
|
||||
assert set(host_audit.events) == {'demo.query.example', 'demo2.query.example'}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_multiple_registered_modules_same_collection(bare_job):
|
||||
"This tests that the events will list multiple modules if more than 1 module in same collection is registered and used"
|
||||
create_registered_event(bare_job, task_name='demo.query.example')
|
||||
create_registered_event(bare_job, task_name='demo.query.example2')
|
||||
|
||||
# Takes place of event_query fixture, doing manually here
|
||||
EventQuery.objects.create(
|
||||
fqcn='demo.query',
|
||||
collection_version='1.0.1',
|
||||
event_query=yaml.dump(
|
||||
{
|
||||
'demo.query.example': {'query': TEST_JQ},
|
||||
'demo.query.example2': {'query': TEST_JQ},
|
||||
},
|
||||
default_flow_style=False,
|
||||
),
|
||||
)
|
||||
|
||||
save_indirect_host_entries(bare_job.id)
|
||||
|
||||
assert IndirectManagedNodeAudit.objects.filter(job=bare_job).count() == 1
|
||||
host_audit = IndirectManagedNodeAudit.objects.filter(job=bare_job).first()
|
||||
|
||||
assert host_audit.count == 2
|
||||
assert set(host_audit.events) == {'demo.query.example', 'demo.query.example2'}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
||||
@@ -129,7 +129,7 @@ def podman_image_generator():
|
||||
|
||||
@pytest.fixture
|
||||
def run_job_from_playbook(default_org, demo_inv, post, admin):
|
||||
def _rf(test_name, playbook, local_path=None, scm_url=None, jt_params=None):
|
||||
def _rf(test_name, playbook, local_path=None, scm_url=None):
|
||||
project_name = f'{test_name} project'
|
||||
jt_name = f'{test_name} JT: {playbook}'
|
||||
|
||||
@@ -166,13 +166,9 @@ def run_job_from_playbook(default_org, demo_inv, post, admin):
|
||||
assert proj.get_project_path()
|
||||
assert playbook in proj.playbooks
|
||||
|
||||
jt_data = {'name': jt_name, 'project': proj.id, 'playbook': playbook, 'inventory': demo_inv.id}
|
||||
if jt_params:
|
||||
jt_data.update(jt_params)
|
||||
|
||||
result = post(
|
||||
reverse('api:job_template_list'),
|
||||
jt_data,
|
||||
{'name': jt_name, 'project': proj.id, 'playbook': playbook, 'inventory': demo_inv.id},
|
||||
admin,
|
||||
expect=201,
|
||||
)
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
import pytest
|
||||
|
||||
from awx.main.tests.live.tests.conftest import wait_for_events
|
||||
|
||||
from awx.main.models import Job, Inventory
|
||||
|
||||
|
||||
def assert_facts_populated(name):
|
||||
job = Job.objects.filter(name__icontains=name).order_by('-created').first()
|
||||
assert job is not None
|
||||
wait_for_events(job)
|
||||
|
||||
inventory = job.inventory
|
||||
assert inventory.hosts.count() > 0 # sanity
|
||||
for host in inventory.hosts.all():
|
||||
assert host.ansible_facts
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def general_facts_test(live_tmp_folder, run_job_from_playbook):
|
||||
def _rf(slug, jt_params):
|
||||
jt_params['use_fact_cache'] = True
|
||||
standard_kwargs = dict(scm_url=f'file://{live_tmp_folder}/facts', jt_params=jt_params)
|
||||
|
||||
# GATHER FACTS
|
||||
name = f'test_gather_ansible_facts_{slug}'
|
||||
run_job_from_playbook(name, 'gather.yml', **standard_kwargs)
|
||||
assert_facts_populated(name)
|
||||
|
||||
# KEEP FACTS
|
||||
name = f'test_clear_ansible_facts_{slug}'
|
||||
run_job_from_playbook(name, 'no_op.yml', **standard_kwargs)
|
||||
assert_facts_populated(name)
|
||||
|
||||
# CLEAR FACTS
|
||||
name = f'test_clear_ansible_facts_{slug}'
|
||||
run_job_from_playbook(name, 'clear.yml', **standard_kwargs)
|
||||
job = Job.objects.filter(name__icontains=name).order_by('-created').first()
|
||||
|
||||
assert job is not None
|
||||
wait_for_events(job)
|
||||
inventory = job.inventory
|
||||
assert inventory.hosts.count() > 0 # sanity
|
||||
for host in inventory.hosts.all():
|
||||
assert not host.ansible_facts
|
||||
|
||||
return _rf
|
||||
|
||||
|
||||
def test_basic_ansible_facts(general_facts_test):
|
||||
general_facts_test('basic', {})
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sliced_inventory():
|
||||
inv, _ = Inventory.objects.get_or_create(name='inventory-to-slice')
|
||||
if not inv.hosts.exists():
|
||||
for i in range(10):
|
||||
inv.hosts.create(name=f'sliced_host_{i}')
|
||||
return inv
|
||||
|
||||
|
||||
def test_slicing_with_facts(general_facts_test, sliced_inventory):
|
||||
general_facts_test('sliced', {'job_slice_count': 3, 'inventory': sliced_inventory.id})
|
||||
@@ -50,14 +50,13 @@ def test_indirect_host_counting(live_tmp_folder, run_job_from_playbook):
|
||||
job.refresh_from_db()
|
||||
if job.event_queries_processed is False:
|
||||
save_indirect_host_entries.delay(job.id, wait_for_events=False)
|
||||
|
||||
# event_queries_processed only assures the task has started, it might take a minor amount of time to finish
|
||||
for _ in range(10):
|
||||
if IndirectManagedNodeAudit.objects.filter(job=job).exists():
|
||||
break
|
||||
time.sleep(0.2)
|
||||
else:
|
||||
raise RuntimeError(f'No IndirectManagedNodeAudit records ever populated for job_id={job.id}')
|
||||
# This will poll for the background task to finish
|
||||
for _ in range(10):
|
||||
if IndirectManagedNodeAudit.objects.filter(job=job).exists():
|
||||
break
|
||||
time.sleep(0.2)
|
||||
else:
|
||||
raise RuntimeError(f'No IndirectManagedNodeAudit records ever populated for job_id={job.id}')
|
||||
|
||||
assert IndirectManagedNodeAudit.objects.filter(job=job).count() == 1
|
||||
host_audit = IndirectManagedNodeAudit.objects.filter(job=job).first()
|
||||
|
||||
@@ -34,7 +34,7 @@ def hosts(ref_time):
|
||||
|
||||
def test_start_job_fact_cache(hosts, tmpdir):
|
||||
fact_cache = os.path.join(tmpdir, 'facts')
|
||||
last_modified, _ = start_fact_cache(hosts, fact_cache, timeout=0)
|
||||
last_modified = start_fact_cache(hosts, fact_cache, timeout=0)
|
||||
|
||||
for host in hosts:
|
||||
filepath = os.path.join(fact_cache, host.name)
|
||||
@@ -61,7 +61,7 @@ def test_fact_cache_with_invalid_path_traversal(tmpdir):
|
||||
def test_start_job_fact_cache_past_timeout(hosts, tmpdir):
|
||||
fact_cache = os.path.join(tmpdir, 'facts')
|
||||
# the hosts fixture was modified 5s ago, which is more than 2s
|
||||
last_modified, _ = start_fact_cache(hosts, fact_cache, timeout=2)
|
||||
last_modified = start_fact_cache(hosts, fact_cache, timeout=2)
|
||||
assert last_modified is None
|
||||
|
||||
for host in hosts:
|
||||
@@ -71,7 +71,7 @@ def test_start_job_fact_cache_past_timeout(hosts, tmpdir):
|
||||
def test_start_job_fact_cache_within_timeout(hosts, tmpdir):
|
||||
fact_cache = os.path.join(tmpdir, 'facts')
|
||||
# the hosts fixture was modified 5s ago, which is less than 7s
|
||||
last_modified, _ = start_fact_cache(hosts, fact_cache, timeout=7)
|
||||
last_modified = start_fact_cache(hosts, fact_cache, timeout=7)
|
||||
assert last_modified
|
||||
|
||||
for host in hosts:
|
||||
@@ -80,7 +80,7 @@ def test_start_job_fact_cache_within_timeout(hosts, tmpdir):
|
||||
|
||||
def test_finish_job_fact_cache_with_existing_data(hosts, mocker, tmpdir, ref_time):
|
||||
fact_cache = os.path.join(tmpdir, 'facts')
|
||||
last_modified, _ = start_fact_cache(hosts, fact_cache, timeout=0)
|
||||
last_modified = start_fact_cache(hosts, fact_cache, timeout=0)
|
||||
|
||||
bulk_update = mocker.patch('django.db.models.query.QuerySet.bulk_update')
|
||||
|
||||
@@ -108,7 +108,7 @@ def test_finish_job_fact_cache_with_existing_data(hosts, mocker, tmpdir, ref_tim
|
||||
|
||||
def test_finish_job_fact_cache_with_bad_data(hosts, mocker, tmpdir):
|
||||
fact_cache = os.path.join(tmpdir, 'facts')
|
||||
last_modified, _ = start_fact_cache(hosts, fact_cache, timeout=0)
|
||||
last_modified = start_fact_cache(hosts, fact_cache, timeout=0)
|
||||
|
||||
bulk_update = mocker.patch('django.db.models.query.QuerySet.bulk_update')
|
||||
|
||||
@@ -127,7 +127,7 @@ def test_finish_job_fact_cache_with_bad_data(hosts, mocker, tmpdir):
|
||||
|
||||
def test_finish_job_fact_cache_clear(hosts, mocker, ref_time, tmpdir):
|
||||
fact_cache = os.path.join(tmpdir, 'facts')
|
||||
last_modified, _ = start_fact_cache(hosts, fact_cache, timeout=0)
|
||||
last_modified = start_fact_cache(hosts, fact_cache, timeout=0)
|
||||
|
||||
bulk_update = mocker.patch('django.db.models.query.QuerySet.bulk_update')
|
||||
|
||||
|
||||
@@ -1,162 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
|
||||
import pytest
|
||||
from unittest import mock
|
||||
|
||||
from awx.main.models import (
|
||||
Inventory,
|
||||
Host,
|
||||
)
|
||||
|
||||
from django.utils.timezone import now
|
||||
from django.db.models.query import QuerySet
|
||||
|
||||
from awx.main.models import (
|
||||
Job,
|
||||
Organization,
|
||||
Project,
|
||||
)
|
||||
from awx.main.tasks import jobs
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def private_data_dir():
|
||||
private_data = tempfile.mkdtemp(prefix='awx_')
|
||||
for subfolder in ('inventory', 'env'):
|
||||
runner_subfolder = os.path.join(private_data, subfolder)
|
||||
os.makedirs(runner_subfolder, exist_ok=True)
|
||||
yield private_data
|
||||
shutil.rmtree(private_data, True)
|
||||
|
||||
|
||||
@mock.patch('awx.main.tasks.facts.update_hosts')
|
||||
@mock.patch('awx.main.tasks.facts.settings')
|
||||
@mock.patch('awx.main.tasks.jobs.create_partition', return_value=True)
|
||||
def test_pre_post_run_hook_facts(mock_create_partition, mock_facts_settings, update_hosts, private_data_dir, execution_environment):
|
||||
# creates inventory_object with two hosts
|
||||
inventory = Inventory(pk=1)
|
||||
mock_inventory = mock.MagicMock(spec=Inventory, wraps=inventory)
|
||||
mock_inventory._state = mock.MagicMock()
|
||||
qs_hosts = QuerySet()
|
||||
hosts = [
|
||||
Host(id=1, name='host1', ansible_facts={"a": 1, "b": 2}, ansible_facts_modified=now(), inventory=mock_inventory),
|
||||
Host(id=2, name='host2', ansible_facts={"a": 1, "b": 2}, ansible_facts_modified=now(), inventory=mock_inventory),
|
||||
]
|
||||
qs_hosts._result_cache = hosts
|
||||
qs_hosts.only = mock.MagicMock(return_value=hosts)
|
||||
mock_inventory.hosts = qs_hosts
|
||||
assert mock_inventory.hosts.count() == 2
|
||||
|
||||
# creates job object with fact_cache enabled
|
||||
org = Organization(pk=1)
|
||||
proj = Project(pk=1, organization=org)
|
||||
job = mock.MagicMock(spec=Job, use_fact_cache=True, project=proj, organization=org, job_slice_number=1, job_slice_count=1)
|
||||
job.inventory = mock_inventory
|
||||
job.execution_environment = execution_environment
|
||||
job.get_hosts_for_fact_cache = Job.get_hosts_for_fact_cache.__get__(job) # to run original method
|
||||
job.job_env.get = mock.MagicMock(return_value=private_data_dir)
|
||||
|
||||
# creates the task object with job object as instance
|
||||
mock_facts_settings.ANSIBLE_FACT_CACHE_TIMEOUT = False # defines timeout to false
|
||||
task = jobs.RunJob()
|
||||
task.instance = job
|
||||
task.update_model = mock.Mock(return_value=job)
|
||||
task.model.objects.get = mock.Mock(return_value=job)
|
||||
|
||||
# run pre_run_hook
|
||||
task.facts_write_time = task.pre_run_hook(job, private_data_dir)
|
||||
|
||||
# updates inventory with one more host
|
||||
hosts.append(Host(id=3, name='host3', ansible_facts={"added": True}, ansible_facts_modified=now(), inventory=mock_inventory))
|
||||
assert mock_inventory.hosts.count() == 3
|
||||
|
||||
# run post_run_hook
|
||||
task.runner_callback.artifacts_processed = mock.MagicMock(return_value=True)
|
||||
|
||||
task.post_run_hook(job, "success")
|
||||
assert mock_inventory.hosts[2].ansible_facts == {"added": True}
|
||||
|
||||
|
||||
@mock.patch('awx.main.tasks.facts.update_hosts')
|
||||
@mock.patch('awx.main.tasks.facts.settings')
|
||||
@mock.patch('awx.main.tasks.jobs.create_partition', return_value=True)
|
||||
def test_pre_post_run_hook_facts_deleted_sliced(mock_create_partition, mock_facts_settings, update_hosts, private_data_dir, execution_environment):
|
||||
# creates inventory_object with two hosts
|
||||
inventory = Inventory(pk=1)
|
||||
mock_inventory = mock.MagicMock(spec=Inventory, wraps=inventory)
|
||||
mock_inventory._state = mock.MagicMock()
|
||||
qs_hosts = QuerySet()
|
||||
hosts = [Host(id=num, name=f'host{num}', ansible_facts={"a": 1, "b": 2}, ansible_facts_modified=now(), inventory=mock_inventory) for num in range(999)]
|
||||
|
||||
qs_hosts._result_cache = hosts
|
||||
qs_hosts.only = mock.MagicMock(return_value=hosts)
|
||||
mock_inventory.hosts = qs_hosts
|
||||
assert mock_inventory.hosts.count() == 999
|
||||
|
||||
# creates job object with fact_cache enabled
|
||||
org = Organization(pk=1)
|
||||
proj = Project(pk=1, organization=org)
|
||||
job = mock.MagicMock(spec=Job, use_fact_cache=True, project=proj, organization=org, job_slice_number=1, job_slice_count=3)
|
||||
job.inventory = mock_inventory
|
||||
job.execution_environment = execution_environment
|
||||
job.get_hosts_for_fact_cache = Job.get_hosts_for_fact_cache.__get__(job) # to run original method
|
||||
job.job_env.get = mock.MagicMock(return_value=private_data_dir)
|
||||
|
||||
# creates the task object with job object as instance
|
||||
mock_facts_settings.ANSIBLE_FACT_CACHE_TIMEOUT = False
|
||||
task = jobs.RunJob()
|
||||
task.instance = job
|
||||
task.update_model = mock.Mock(return_value=job)
|
||||
task.model.objects.get = mock.Mock(return_value=job)
|
||||
|
||||
# run pre_run_hook
|
||||
task.facts_write_time = task.pre_run_hook(job, private_data_dir)
|
||||
|
||||
hosts.pop(1)
|
||||
assert mock_inventory.hosts.count() == 998
|
||||
|
||||
# run post_run_hook
|
||||
task.runner_callback.artifacts_processed = mock.MagicMock(return_value=True)
|
||||
task.post_run_hook(job, "success")
|
||||
|
||||
for host in hosts:
|
||||
assert host.ansible_facts == {"a": 1, "b": 2}
|
||||
|
||||
failures = []
|
||||
for host in hosts:
|
||||
try:
|
||||
assert host.ansible_facts == {"a": 1, "b": 2, "unexpected_key": "bad"}
|
||||
except AssertionError:
|
||||
failures.append("Host named {} has facts {}".format(host.name, host.ansible_facts))
|
||||
|
||||
assert len(failures) > 0, f"Failures occurred for the following hosts: {failures}"
|
||||
|
||||
|
||||
@mock.patch('awx.main.tasks.facts.update_hosts')
|
||||
@mock.patch('awx.main.tasks.facts.settings')
|
||||
def test_invalid_host_facts(mock_facts_settings, update_hosts, private_data_dir, execution_environment):
|
||||
inventory = Inventory(pk=1)
|
||||
mock_inventory = mock.MagicMock(spec=Inventory, wraps=inventory)
|
||||
mock_inventory._state = mock.MagicMock()
|
||||
|
||||
hosts = [
|
||||
Host(id=0, name='host0', ansible_facts={"a": 1, "b": 2}, ansible_facts_modified=now(), inventory=mock_inventory),
|
||||
Host(id=1, name='host1', ansible_facts={"a": 1, "b": 2, "unexpected_key": "bad"}, ansible_facts_modified=now(), inventory=mock_inventory),
|
||||
]
|
||||
mock_inventory.hosts = hosts
|
||||
|
||||
failures = []
|
||||
for host in mock_inventory.hosts:
|
||||
assert "a" in host.ansible_facts
|
||||
if "unexpected_key" in host.ansible_facts:
|
||||
failures.append(host.name)
|
||||
|
||||
mock_facts_settings.SOME_SETTING = True
|
||||
update_hosts(mock_inventory.hosts)
|
||||
|
||||
with pytest.raises(pytest.fail.Exception):
|
||||
if failures:
|
||||
pytest.fail(f" {len(failures)} facts cleared failures : {','.join(failures)}")
|
||||
@@ -1,3 +1,6 @@
|
||||
from split_settings.tools import include
|
||||
|
||||
|
||||
LOCAL_SETTINGS = (
|
||||
'ALLOWED_HOSTS',
|
||||
'BROADCAST_WEBSOCKET_PORT',
|
||||
@@ -13,14 +16,13 @@ LOCAL_SETTINGS = (
|
||||
|
||||
|
||||
def test_postprocess_auth_basic_enabled():
|
||||
"""The final loaded settings should have basic auth enabled."""
|
||||
from awx.settings import REST_FRAMEWORK
|
||||
locals().update({'__file__': __file__})
|
||||
|
||||
assert 'awx.api.authentication.LoggedBasicAuthentication' in REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES']
|
||||
include('../../../settings/defaults.py', scope=locals())
|
||||
assert 'awx.api.authentication.LoggedBasicAuthentication' in locals()['REST_FRAMEWORK']['DEFAULT_AUTHENTICATION_CLASSES']
|
||||
|
||||
|
||||
def test_default_settings():
|
||||
"""Ensure that all default settings are present in the snapshot."""
|
||||
from django.conf import settings
|
||||
|
||||
for k in dir(settings):
|
||||
@@ -29,43 +31,3 @@ def test_default_settings():
|
||||
default_val = getattr(settings.default_settings, k, None)
|
||||
snapshot_val = settings.DEFAULTS_SNAPSHOT[k]
|
||||
assert default_val == snapshot_val, f'Setting for {k} does not match shapshot:\nsnapshot: {snapshot_val}\ndefault: {default_val}'
|
||||
|
||||
|
||||
def test_django_conf_settings_is_awx_settings():
|
||||
"""Ensure that the settings loaded from dynaconf are the same as the settings delivered to django."""
|
||||
from django.conf import settings
|
||||
from awx.settings import REST_FRAMEWORK
|
||||
|
||||
assert settings.REST_FRAMEWORK == REST_FRAMEWORK
|
||||
|
||||
|
||||
def test_dynaconf_is_awx_settings():
|
||||
"""Ensure that the settings loaded from dynaconf are the same as the settings delivered to django."""
|
||||
from django.conf import settings
|
||||
from awx.settings import REST_FRAMEWORK
|
||||
|
||||
assert settings.DYNACONF.REST_FRAMEWORK == REST_FRAMEWORK
|
||||
|
||||
|
||||
def test_development_settings_can_be_directly_imported(monkeypatch):
|
||||
"""Ensure that the development settings can be directly imported."""
|
||||
monkeypatch.setenv('AWX_MODE', 'development')
|
||||
from django.conf import settings
|
||||
from awx.settings.development import REST_FRAMEWORK
|
||||
from awx.settings.development import DEBUG # actually set on defaults.py and not overridden in development.py
|
||||
|
||||
assert settings.REST_FRAMEWORK == REST_FRAMEWORK
|
||||
assert DEBUG is True
|
||||
|
||||
|
||||
def test_merge_application_name():
|
||||
"""Ensure that the merge_application_name function works as expected."""
|
||||
from awx.settings.functions import merge_application_name
|
||||
|
||||
settings = {
|
||||
"DATABASES__default__ENGINE": "django.db.backends.postgresql",
|
||||
"CLUSTER_HOST_ID": "test-cluster-host-id",
|
||||
}
|
||||
result = merge_application_name(settings)["DATABASES__default__OPTIONS__application_name"]
|
||||
assert result.startswith("awx-")
|
||||
assert "test-cluster" in result
|
||||
|
||||
@@ -10,7 +10,7 @@ from typing import Optional, Any
|
||||
|
||||
import requests
|
||||
|
||||
DEFAULT_OIDC_TOKEN_ENDPOINT = 'https://sso.redhat.com/auth/realms/redhat-external/protocol/openid-connect/token'
|
||||
DEFAULT_OIDC_ENDPOINT = 'https://sso.redhat.com/auth/realms/redhat-external/protocol/openid-connect/token'
|
||||
|
||||
|
||||
class TokenError(requests.RequestException):
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
# Python
|
||||
import base64
|
||||
import logging
|
||||
import logging.handlers
|
||||
import sys
|
||||
import traceback
|
||||
import os
|
||||
@@ -28,9 +27,6 @@ from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
|
||||
from opentelemetry.sdk.resources import Resource
|
||||
|
||||
|
||||
__all__ = ['RSysLogHandler', 'SpecialInventoryHandler', 'ColorHandler']
|
||||
|
||||
|
||||
class RSysLogHandler(logging.handlers.SysLogHandler):
|
||||
append_nul = False
|
||||
|
||||
@@ -113,35 +109,39 @@ class SpecialInventoryHandler(logging.Handler):
|
||||
|
||||
|
||||
if settings.COLOR_LOGS is True:
|
||||
from logutils.colorize import ColorizingStreamHandler
|
||||
import colorama
|
||||
try:
|
||||
from logutils.colorize import ColorizingStreamHandler
|
||||
import colorama
|
||||
|
||||
colorama.deinit()
|
||||
colorama.init(wrap=False, convert=False, strip=False)
|
||||
colorama.deinit()
|
||||
colorama.init(wrap=False, convert=False, strip=False)
|
||||
|
||||
class ColorHandler(ColorizingStreamHandler):
|
||||
def colorize(self, line, record):
|
||||
# comment out this method if you don't like the job_lifecycle
|
||||
# logs rendered with cyan text
|
||||
previous_level_map = self.level_map.copy()
|
||||
if record.name == "awx.analytics.job_lifecycle":
|
||||
self.level_map[logging.INFO] = (None, 'cyan', True)
|
||||
msg = super(ColorHandler, self).colorize(line, record)
|
||||
self.level_map = previous_level_map
|
||||
return msg
|
||||
class ColorHandler(ColorizingStreamHandler):
|
||||
def colorize(self, line, record):
|
||||
# comment out this method if you don't like the job_lifecycle
|
||||
# logs rendered with cyan text
|
||||
previous_level_map = self.level_map.copy()
|
||||
if record.name == "awx.analytics.job_lifecycle":
|
||||
self.level_map[logging.INFO] = (None, 'cyan', True)
|
||||
msg = super(ColorHandler, self).colorize(line, record)
|
||||
self.level_map = previous_level_map
|
||||
return msg
|
||||
|
||||
def format(self, record):
|
||||
message = logging.StreamHandler.format(self, record)
|
||||
return '\n'.join([self.colorize(line, record) for line in message.splitlines()])
|
||||
def format(self, record):
|
||||
message = logging.StreamHandler.format(self, record)
|
||||
return '\n'.join([self.colorize(line, record) for line in message.splitlines()])
|
||||
|
||||
level_map = {
|
||||
logging.DEBUG: (None, 'green', True),
|
||||
logging.INFO: (None, None, True),
|
||||
logging.WARNING: (None, 'yellow', True),
|
||||
logging.ERROR: (None, 'red', True),
|
||||
logging.CRITICAL: (None, 'red', True),
|
||||
}
|
||||
level_map = {
|
||||
logging.DEBUG: (None, 'green', True),
|
||||
logging.INFO: (None, None, True),
|
||||
logging.WARNING: (None, 'yellow', True),
|
||||
logging.ERROR: (None, 'red', True),
|
||||
logging.CRITICAL: (None, 'red', True),
|
||||
}
|
||||
|
||||
except ImportError:
|
||||
# logutils is only used for colored logs in the dev environment
|
||||
pass
|
||||
else:
|
||||
ColorHandler = logging.StreamHandler
|
||||
|
||||
|
||||
@@ -1,82 +1,2 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
import os
|
||||
import copy
|
||||
from ansible_base.lib.dynamic_config import (
|
||||
factory,
|
||||
export,
|
||||
load_envvars,
|
||||
load_python_file_with_injected_context,
|
||||
load_standard_settings_files,
|
||||
toggle_feature_flags,
|
||||
)
|
||||
from .functions import (
|
||||
assert_production_settings,
|
||||
merge_application_name,
|
||||
add_backwards_compatibility,
|
||||
load_extra_development_files,
|
||||
)
|
||||
|
||||
add_backwards_compatibility()
|
||||
|
||||
# Create a the standard DYNACONF instance which will come with DAB defaults
|
||||
# This loads defaults.py and environment specific file e.g: development_defaults.py
|
||||
DYNACONF = factory(
|
||||
__name__,
|
||||
"AWX",
|
||||
environments=("development", "production", "quiet", "kube"),
|
||||
settings_files=["defaults.py"],
|
||||
)
|
||||
|
||||
# Store snapshot before loading any custom config file
|
||||
DYNACONF.set(
|
||||
"DEFAULTS_SNAPSHOT",
|
||||
copy.deepcopy(DYNACONF.as_dict(internal=False)),
|
||||
loader_identifier="awx.settings:DEFAULTS_SNAPSHOT",
|
||||
)
|
||||
|
||||
#############################################################################################
|
||||
# Settings loaded before this point will be allowed to be overridden by the database settings
|
||||
# Any settings loaded after this point will be marked as as a read_only database setting
|
||||
#############################################################################################
|
||||
|
||||
# Load extra settings files from the following directories
|
||||
# /etc/tower/conf.d/ and /etc/tower/
|
||||
# this is the legacy location, kept for backwards compatibility
|
||||
settings_dir = os.environ.get('AWX_SETTINGS_DIR', '/etc/tower/conf.d/')
|
||||
settings_files_path = os.path.join(settings_dir, '*.py')
|
||||
settings_file_path = os.environ.get('AWX_SETTINGS_FILE', '/etc/tower/settings.py')
|
||||
load_python_file_with_injected_context(settings_files_path, settings=DYNACONF)
|
||||
load_python_file_with_injected_context(settings_file_path, settings=DYNACONF)
|
||||
|
||||
# Load extra settings files from the following directories
|
||||
# /etc/ansible-automation-platform/{settings,flags,.secrets}.yaml
|
||||
# and /etc/ansible-automation-platform/awx/{settings,flags,.secrets}.yaml
|
||||
# this is the new standard location for all services
|
||||
load_standard_settings_files(DYNACONF)
|
||||
|
||||
# Load optional development only settings files
|
||||
load_extra_development_files(DYNACONF)
|
||||
|
||||
# Check at least one setting file has been loaded in production mode
|
||||
assert_production_settings(DYNACONF, settings_dir, settings_file_path)
|
||||
|
||||
# Load envvars at the end to allow them to override everything loaded so far
|
||||
load_envvars(DYNACONF)
|
||||
|
||||
# This must run after all custom settings are loaded
|
||||
DYNACONF.update(
|
||||
merge_application_name(DYNACONF),
|
||||
loader_identifier="awx.settings:merge_application_name",
|
||||
merge=True,
|
||||
)
|
||||
|
||||
# Toggle feature flags based on installer settings
|
||||
DYNACONF.update(
|
||||
toggle_feature_flags(DYNACONF),
|
||||
loader_identifier="awx.settings:toggle_feature_flags",
|
||||
merge=True,
|
||||
)
|
||||
|
||||
# Update django.conf.settings with DYNACONF values
|
||||
export(__name__, DYNACONF)
|
||||
|
||||
@@ -25,7 +25,6 @@ def get_application_name(CLUSTER_HOST_ID, function=''):
|
||||
|
||||
|
||||
def set_application_name(DATABASES, CLUSTER_HOST_ID, function=''):
|
||||
"""In place modification of DATABASES to set the application name for the connection."""
|
||||
# If settings files were not properly passed DATABASES could be {} at which point we don't need to set the app name.
|
||||
if not DATABASES or 'default' not in DATABASES:
|
||||
return
|
||||
|
||||
@@ -9,6 +9,9 @@ import tempfile
|
||||
import socket
|
||||
from datetime import timedelta
|
||||
|
||||
from split_settings.tools import include
|
||||
|
||||
|
||||
DEBUG = True
|
||||
SQL_DEBUG = DEBUG
|
||||
|
||||
@@ -80,6 +83,10 @@ LANGUAGE_CODE = 'en-us'
|
||||
# to load the internationalization machinery.
|
||||
USE_I18N = True
|
||||
|
||||
# If you set this to False, Django will not format dates, numbers and
|
||||
# calendars according to the current locale
|
||||
USE_L10N = True
|
||||
|
||||
USE_TZ = True
|
||||
|
||||
STATICFILES_DIRS = [
|
||||
@@ -1008,15 +1015,16 @@ METRICS_SUBSYSTEM_CONFIG = {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# django-ansible-base
|
||||
ANSIBLE_BASE_TEAM_MODEL = 'main.Team'
|
||||
ANSIBLE_BASE_ORGANIZATION_MODEL = 'main.Organization'
|
||||
ANSIBLE_BASE_RESOURCE_CONFIG_MODULE = 'awx.resource_api'
|
||||
ANSIBLE_BASE_PERMISSION_MODEL = 'main.Permission'
|
||||
|
||||
# Defaults to be overridden by DAB
|
||||
SPECTACULAR_SETTINGS = {}
|
||||
OAUTH2_PROVIDER = {}
|
||||
from ansible_base.lib import dynamic_config # noqa: E402
|
||||
|
||||
include(os.path.join(os.path.dirname(dynamic_config.__file__), 'dynamic_settings.py'))
|
||||
|
||||
# Add a postfix to the API URL patterns
|
||||
# example if set to '' API pattern will be /api
|
||||
|
||||
@@ -1,13 +1,129 @@
|
||||
# This file exists for backwards compatibility only
|
||||
# the current way of running AWX is to point settings to
|
||||
# awx/settings/__init__.py as the entry point for the settings
|
||||
# that is done by exporting: export DJANGO_SETTINGS_MODULE=awx.settings
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Development settings for AWX project.
|
||||
|
||||
# Python
|
||||
import os
|
||||
import socket
|
||||
import copy
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "awx.settings")
|
||||
os.environ.setdefault("AWX_MODE", "development")
|
||||
# Centos-7 doesn't include the svg mime type
|
||||
# /usr/lib64/python/mimetypes.py
|
||||
import mimetypes
|
||||
|
||||
from ansible_base.lib.dynamic_config import export
|
||||
from . import DYNACONF # noqa
|
||||
# Django Split Settings
|
||||
from split_settings.tools import optional, include
|
||||
|
||||
export(__name__, DYNACONF)
|
||||
# Load default settings.
|
||||
from .defaults import * # NOQA
|
||||
|
||||
# awx-manage shell_plus --notebook
|
||||
NOTEBOOK_ARGUMENTS = ['--NotebookApp.token=', '--ip', '0.0.0.0', '--port', '9888', '--allow-root', '--no-browser']
|
||||
|
||||
# print SQL queries in shell_plus
|
||||
SHELL_PLUS_PRINT_SQL = False
|
||||
|
||||
# show colored logs in the dev environment
|
||||
# to disable this, set `COLOR_LOGS = False` in awx/settings/local_settings.py
|
||||
COLOR_LOGS = True
|
||||
LOGGING['handlers']['console']['()'] = 'awx.main.utils.handlers.ColorHandler' # noqa
|
||||
|
||||
ALLOWED_HOSTS = ['*']
|
||||
|
||||
mimetypes.add_type("image/svg+xml", ".svg", True)
|
||||
mimetypes.add_type("image/svg+xml", ".svgz", True)
|
||||
|
||||
# Disallow sending session cookies over insecure connections
|
||||
SESSION_COOKIE_SECURE = False
|
||||
|
||||
# Disallow sending csrf cookies over insecure connections
|
||||
CSRF_COOKIE_SECURE = False
|
||||
|
||||
# Disable Pendo on the UI for development/test.
|
||||
# Note: This setting may be overridden by database settings.
|
||||
PENDO_TRACKING_STATE = "off"
|
||||
INSIGHTS_TRACKING_STATE = False
|
||||
|
||||
# debug toolbar and swagger assume that requirements/requirements_dev.txt are installed
|
||||
|
||||
INSTALLED_APPS += ['drf_yasg', 'debug_toolbar'] # NOQA
|
||||
|
||||
MIDDLEWARE = ['debug_toolbar.middleware.DebugToolbarMiddleware'] + MIDDLEWARE # NOQA
|
||||
|
||||
DEBUG_TOOLBAR_CONFIG = {'ENABLE_STACKTRACES': True}
|
||||
|
||||
# Configure a default UUID for development only.
|
||||
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
INSTALL_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
# Ansible base virtualenv paths and enablement
|
||||
# only used for deprecated fields and management commands for them
|
||||
BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv")
|
||||
|
||||
CLUSTER_HOST_ID = socket.gethostname()
|
||||
|
||||
AWX_CALLBACK_PROFILE = True
|
||||
|
||||
# this modifies FLAGS set by defaults
|
||||
FLAGS['FEATURE_INDIRECT_NODE_COUNTING_ENABLED'] = [{'condition': 'boolean', 'value': True}] # noqa
|
||||
|
||||
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
||||
# Disable normal scheduled/triggered task managers (DependencyManager, TaskManager, WorkflowManager).
|
||||
# Allows user to trigger task managers directly for debugging and profiling purposes.
|
||||
# Only works in combination with settings.SETTINGS_MODULE == 'awx.settings.development'
|
||||
AWX_DISABLE_TASK_MANAGERS = False
|
||||
|
||||
# Needed for launching runserver in debug mode
|
||||
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
||||
|
||||
# Store a snapshot of default settings at this point before loading any
|
||||
# customizable config files.
|
||||
this_module = sys.modules[__name__]
|
||||
local_vars = dir(this_module)
|
||||
DEFAULTS_SNAPSHOT = {} # define after we save local_vars so we do not snapshot the snapshot
|
||||
for setting in local_vars:
|
||||
if setting.isupper():
|
||||
DEFAULTS_SNAPSHOT[setting] = copy.deepcopy(getattr(this_module, setting))
|
||||
|
||||
del local_vars # avoid temporary variables from showing up in dir(settings)
|
||||
del this_module
|
||||
#
|
||||
###############################################################################################
|
||||
#
|
||||
# Any settings defined after this point will be marked as as a read_only database setting
|
||||
#
|
||||
################################################################################################
|
||||
|
||||
# If there is an `/etc/tower/settings.py`, include it.
|
||||
# If there is a `/etc/tower/conf.d/*.py`, include them.
|
||||
include(optional('/etc/tower/settings.py'), scope=locals())
|
||||
include(optional('/etc/tower/conf.d/*.py'), scope=locals())
|
||||
|
||||
# If any local_*.py files are present in awx/settings/, use them to override
|
||||
# default settings for development. If not present, we can still run using
|
||||
# only the defaults.
|
||||
# this needs to stay at the bottom of this file
|
||||
try:
|
||||
if os.getenv('AWX_KUBE_DEVEL', False):
|
||||
include(optional('development_kube.py'), scope=locals())
|
||||
else:
|
||||
include(optional('local_*.py'), scope=locals())
|
||||
except ImportError:
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
# The below runs AFTER all of the custom settings are imported
|
||||
# because conf.d files will define DATABASES and this should modify that
|
||||
from .application_name import set_application_name
|
||||
|
||||
set_application_name(DATABASES, CLUSTER_HOST_ID) # NOQA
|
||||
|
||||
del set_application_name
|
||||
|
||||
# Set the value of any feature flags that are defined in the local settings
|
||||
for feature in list(FLAGS.keys()): # noqa: F405
|
||||
if feature in locals():
|
||||
FLAGS[feature][0]['value'] = locals()[feature] # noqa: F405
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Development settings for AWX project.
|
||||
|
||||
# Python
|
||||
import os
|
||||
import socket
|
||||
|
||||
# Centos-7 doesn't include the svg mime type
|
||||
# /usr/lib64/python/mimetypes.py
|
||||
import mimetypes
|
||||
|
||||
from dynaconf import post_hook
|
||||
|
||||
# awx-manage shell_plus --notebook
|
||||
NOTEBOOK_ARGUMENTS = ['--NotebookApp.token=', '--ip', '0.0.0.0', '--port', '9888', '--allow-root', '--no-browser']
|
||||
|
||||
# print SQL queries in shell_plus
|
||||
SHELL_PLUS_PRINT_SQL = False
|
||||
|
||||
# show colored logs in the dev environment
|
||||
# to disable this, set `COLOR_LOGS = False` in awx/settings/local_settings.py
|
||||
COLOR_LOGS = True
|
||||
LOGGING__handlers__console = '@merge {"()": "awx.main.utils.handlers.ColorHandler"}'
|
||||
|
||||
ALLOWED_HOSTS = ['*']
|
||||
|
||||
mimetypes.add_type("image/svg+xml", ".svg", True)
|
||||
mimetypes.add_type("image/svg+xml", ".svgz", True)
|
||||
|
||||
# Disallow sending session cookies over insecure connections
|
||||
SESSION_COOKIE_SECURE = False
|
||||
|
||||
# Disallow sending csrf cookies over insecure connections
|
||||
CSRF_COOKIE_SECURE = False
|
||||
|
||||
# Disable Pendo on the UI for development/test.
|
||||
# Note: This setting may be overridden by database settings.
|
||||
PENDO_TRACKING_STATE = "off"
|
||||
INSIGHTS_TRACKING_STATE = False
|
||||
|
||||
# debug toolbar and swagger assume that requirements/requirements_dev.txt are installed
|
||||
INSTALLED_APPS = "@merge drf_yasg,debug_toolbar"
|
||||
MIDDLEWARE = "@insert 0 debug_toolbar.middleware.DebugToolbarMiddleware"
|
||||
|
||||
DEBUG_TOOLBAR_CONFIG = {'ENABLE_STACKTRACES': True}
|
||||
|
||||
# Configure a default UUID for development only.
|
||||
SYSTEM_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
INSTALL_UUID = '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
# Ansible base virtualenv paths and enablement
|
||||
# only used for deprecated fields and management commands for them
|
||||
BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv")
|
||||
|
||||
CLUSTER_HOST_ID = socket.gethostname()
|
||||
|
||||
AWX_CALLBACK_PROFILE = True
|
||||
|
||||
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
||||
# Disable normal scheduled/triggered task managers (DependencyManager, TaskManager, WorkflowManager).
|
||||
# Allows user to trigger task managers directly for debugging and profiling purposes.
|
||||
# Only works in combination with settings.SETTINGS_MODULE == 'awx.settings.development'
|
||||
AWX_DISABLE_TASK_MANAGERS = False
|
||||
|
||||
# Needed for launching runserver in debug mode
|
||||
# ======================!!!!!!! FOR DEVELOPMENT ONLY !!!!!!!=================================
|
||||
|
||||
|
||||
# This modifies FLAGS set by defaults, must be deferred to run later
|
||||
@post_hook
|
||||
def set_dev_flags(settings):
|
||||
defaults_flags = settings.get("FLAGS", {})
|
||||
defaults_flags['FEATURE_INDIRECT_NODE_COUNTING_ENABLED'] = [{'condition': 'boolean', 'value': True}]
|
||||
return {'FLAGS': defaults_flags}
|
||||
@@ -1,13 +1,4 @@
|
||||
# This file exists for backwards compatibility only
|
||||
# the current way of running AWX is to point settings to
|
||||
# awx/settings/__init__.py as the entry point for the settings
|
||||
# that is done by exporting: export DJANGO_SETTINGS_MODULE=awx.settings
|
||||
import os
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "awx.settings")
|
||||
os.environ.setdefault("AWX_MODE", "development,kube")
|
||||
|
||||
from ansible_base.lib.dynamic_config import export
|
||||
from . import DYNACONF # noqa
|
||||
|
||||
export(__name__, DYNACONF)
|
||||
BROADCAST_WEBSOCKET_SECRET = '🤖starscream🤖'
|
||||
BROADCAST_WEBSOCKET_PORT = 8052
|
||||
BROADCAST_WEBSOCKET_VERIFY_CERT = False
|
||||
BROADCAST_WEBSOCKET_PROTOCOL = 'http'
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
# This file exists for backwards compatibility only
|
||||
# the current way of running AWX is to point settings to
|
||||
# awx/settings/__init__.py as the entry point for the settings
|
||||
# that is done by exporting: export DJANGO_SETTINGS_MODULE=awx.settings
|
||||
import os
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "awx.settings")
|
||||
os.environ.setdefault("AWX_MODE", "development,quiet")
|
||||
# Development settings for AWX project, but with DEBUG disabled
|
||||
|
||||
from ansible_base.lib.dynamic_config import export
|
||||
from . import DYNACONF # noqa
|
||||
# Load development settings.
|
||||
from defaults import * # NOQA
|
||||
|
||||
export(__name__, DYNACONF)
|
||||
# Load development settings.
|
||||
from development import * # NOQA
|
||||
|
||||
# Disable capturing DEBUG
|
||||
DEBUG = False
|
||||
TEMPLATE_DEBUG = DEBUG
|
||||
SQL_DEBUG = DEBUG
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
import os
|
||||
from ansible_base.lib.dynamic_config import load_python_file_with_injected_context
|
||||
from dynaconf import Dynaconf
|
||||
from .application_name import get_application_name
|
||||
|
||||
|
||||
def merge_application_name(settings):
|
||||
"""Return a dynaconf merge dict to set the application name for the connection."""
|
||||
data = {}
|
||||
if "sqlite3" not in settings.get("DATABASES__default__ENGINE", ""):
|
||||
data["DATABASES__default__OPTIONS__application_name"] = get_application_name(settings.get("CLUSTER_HOST_ID"))
|
||||
return data
|
||||
|
||||
|
||||
def add_backwards_compatibility():
|
||||
"""Add backwards compatibility for AWX_MODE.
|
||||
|
||||
Before dynaconf integration the usage of AWX settings was supported to be just
|
||||
DJANGO_SETTINGS_MODULE=awx.settings.production or DJANGO_SETTINGS_MODULE=awx.settings.development
|
||||
(development_quiet and development_kube were also supported).
|
||||
|
||||
With dynaconf the DJANGO_SETTINGS_MODULE should be set always to "awx.settings" as the only entry point
|
||||
for settings and then "AWX_MODE" can be set to any of production,development,quiet,kube
|
||||
or a combination of them separated by comma.
|
||||
|
||||
E.g:
|
||||
|
||||
export DJANGO_SETTINGS_MODULE=awx.settings
|
||||
export AWX_MODE=production
|
||||
awx-manage [command]
|
||||
dynaconf [command]
|
||||
|
||||
If pointing `DJANGO_SETTINGS_MODULE` to `awx.settings.production` or `awx.settings.development` then
|
||||
this function will set `AWX_MODE` to the correct value.
|
||||
"""
|
||||
django_settings_module = os.getenv("DJANGO_SETTINGS_MODULE", "awx.settings")
|
||||
if django_settings_module == "awx.settings":
|
||||
return
|
||||
|
||||
current_mode = os.getenv("AWX_MODE", "")
|
||||
for _module_name in ["development", "production", "development_quiet", "development_kube"]:
|
||||
if django_settings_module == f"awx.settings.{_module_name}":
|
||||
_mode = current_mode.split(",")
|
||||
if "development_" in _module_name and "development" not in current_mode:
|
||||
_mode.append("development")
|
||||
_mode_fragment = _module_name.replace("development_", "")
|
||||
if _mode_fragment not in _mode:
|
||||
_mode.append(_mode_fragment)
|
||||
os.environ["AWX_MODE"] = ",".join(_mode)
|
||||
|
||||
|
||||
def load_extra_development_files(settings: Dynaconf):
|
||||
"""Load optional development only settings files."""
|
||||
if not settings.is_development_mode:
|
||||
return
|
||||
|
||||
if settings.get_environ("AWX_KUBE_DEVEL"):
|
||||
load_python_file_with_injected_context("kube_defaults.py", settings=settings)
|
||||
else:
|
||||
load_python_file_with_injected_context("local_*.py", settings=settings)
|
||||
|
||||
|
||||
def assert_production_settings(settings: Dynaconf, settings_dir: str, settings_file_path: str): # pragma: no cover
|
||||
"""Ensure at least one setting file has been loaded in production mode.
|
||||
Current systems will require /etc/tower/settings.py and
|
||||
new systems will require /etc/ansible-automation-platform/*.yaml
|
||||
"""
|
||||
if "production" not in settings.current_env.lower():
|
||||
return
|
||||
|
||||
required_settings_paths = [
|
||||
os.path.dirname(settings_file_path),
|
||||
"/etc/ansible-automation-platform/",
|
||||
settings_dir,
|
||||
]
|
||||
|
||||
for path in required_settings_paths:
|
||||
if any([path in os.path.dirname(f) for f in settings._loaded_files]):
|
||||
break
|
||||
else:
|
||||
from django.core.exceptions import ImproperlyConfigured # noqa
|
||||
|
||||
msg = 'No AWX configuration found at %s.' % required_settings_paths
|
||||
msg += '\nDefine the AWX_SETTINGS_FILE environment variable to '
|
||||
msg += 'specify an alternate path.'
|
||||
raise ImproperlyConfigured(msg)
|
||||
@@ -1,4 +0,0 @@
|
||||
BROADCAST_WEBSOCKET_SECRET = '🤖starscream🤖'
|
||||
BROADCAST_WEBSOCKET_PORT = 8052
|
||||
BROADCAST_WEBSOCKET_VERIFY_CERT = False
|
||||
BROADCAST_WEBSOCKET_PROTOCOL = 'http'
|
||||
@@ -1,13 +1,111 @@
|
||||
# This file exists for backwards compatibility only
|
||||
# the current way of running AWX is to point settings to
|
||||
# awx/settings/__init__.py as the entry point for the settings
|
||||
# that is done by exporting: export DJANGO_SETTINGS_MODULE=awx.settings
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Production settings for AWX project.
|
||||
|
||||
# Python
|
||||
import os
|
||||
import copy
|
||||
import errno
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "awx.settings")
|
||||
os.environ.setdefault("AWX_MODE", "production")
|
||||
# Django Split Settings
|
||||
from split_settings.tools import optional, include
|
||||
|
||||
from ansible_base.lib.dynamic_config import export
|
||||
from . import DYNACONF # noqa
|
||||
# Load default settings.
|
||||
from .defaults import * # NOQA
|
||||
|
||||
export(__name__, DYNACONF)
|
||||
DEBUG = False
|
||||
TEMPLATE_DEBUG = DEBUG
|
||||
SQL_DEBUG = DEBUG
|
||||
|
||||
# Clear database settings to force production environment to define them.
|
||||
DATABASES = {}
|
||||
|
||||
# Clear the secret key to force production environment to define it.
|
||||
SECRET_KEY = None
|
||||
|
||||
# Hosts/domain names that are valid for this site; required if DEBUG is False
|
||||
# See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
|
||||
ALLOWED_HOSTS = []
|
||||
|
||||
# Ansible base virtualenv paths and enablement
|
||||
# only used for deprecated fields and management commands for them
|
||||
BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv")
|
||||
|
||||
# Very important that this is editable (not read_only) in the API
|
||||
AWX_ISOLATION_SHOW_PATHS = [
|
||||
'/etc/pki/ca-trust:/etc/pki/ca-trust:O',
|
||||
'/usr/share/pki:/usr/share/pki:O',
|
||||
]
|
||||
|
||||
# Store a snapshot of default settings at this point before loading any
|
||||
# customizable config files.
|
||||
this_module = sys.modules[__name__]
|
||||
local_vars = dir(this_module)
|
||||
DEFAULTS_SNAPSHOT = {} # define after we save local_vars so we do not snapshot the snapshot
|
||||
for setting in local_vars:
|
||||
if setting.isupper():
|
||||
DEFAULTS_SNAPSHOT[setting] = copy.deepcopy(getattr(this_module, setting))
|
||||
|
||||
del local_vars # avoid temporary variables from showing up in dir(settings)
|
||||
del this_module
|
||||
#
|
||||
###############################################################################################
|
||||
#
|
||||
# Any settings defined after this point will be marked as as a read_only database setting
|
||||
#
|
||||
################################################################################################
|
||||
|
||||
# Load settings from any .py files in the global conf.d directory specified in
|
||||
# the environment, defaulting to /etc/tower/conf.d/.
|
||||
settings_dir = os.environ.get('AWX_SETTINGS_DIR', '/etc/tower/conf.d/')
|
||||
settings_files = os.path.join(settings_dir, '*.py')
|
||||
|
||||
# Load remaining settings from the global settings file specified in the
|
||||
# environment, defaulting to /etc/tower/settings.py.
|
||||
settings_file = os.environ.get('AWX_SETTINGS_FILE', '/etc/tower/settings.py')
|
||||
|
||||
# Attempt to load settings from /etc/tower/settings.py first, followed by
|
||||
# /etc/tower/conf.d/*.py.
|
||||
try:
|
||||
include(settings_file, optional(settings_files), scope=locals())
|
||||
except ImportError:
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
except IOError:
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
|
||||
included_file = locals().get('__included_file__', '')
|
||||
if not included_file or included_file == settings_file:
|
||||
# The import doesn't always give permission denied, so try to open the
|
||||
# settings file directly.
|
||||
try:
|
||||
e = None
|
||||
open(settings_file)
|
||||
except IOError:
|
||||
pass
|
||||
if e and e.errno == errno.EACCES:
|
||||
SECRET_KEY = 'permission-denied'
|
||||
LOGGING = {}
|
||||
else:
|
||||
msg = 'No AWX configuration found at %s.' % settings_file
|
||||
msg += '\nDefine the AWX_SETTINGS_FILE environment variable to '
|
||||
msg += 'specify an alternate path.'
|
||||
raise ImproperlyConfigured(msg)
|
||||
else:
|
||||
raise
|
||||
|
||||
# The below runs AFTER all of the custom settings are imported
|
||||
# because conf.d files will define DATABASES and this should modify that
|
||||
from .application_name import set_application_name
|
||||
|
||||
set_application_name(DATABASES, CLUSTER_HOST_ID) # NOQA
|
||||
|
||||
del set_application_name
|
||||
|
||||
# Set the value of any feature flags that are defined in the local settings
|
||||
for feature in list(FLAGS.keys()): # noqa: F405
|
||||
if feature in locals():
|
||||
FLAGS[feature][0]['value'] = locals()[feature] # noqa: F405
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
|
||||
# Production settings for AWX project.
|
||||
|
||||
import os
|
||||
|
||||
DEBUG = False
|
||||
TEMPLATE_DEBUG = DEBUG
|
||||
SQL_DEBUG = DEBUG
|
||||
|
||||
# Clear database settings to force production environment to define them.
|
||||
DATABASES = {}
|
||||
|
||||
# Clear the secret key to force production environment to define it.
|
||||
SECRET_KEY = None
|
||||
|
||||
# Hosts/domain names that are valid for this site; required if DEBUG is False
|
||||
# See https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
|
||||
ALLOWED_HOSTS = []
|
||||
|
||||
# Ansible base virtualenv paths and enablement
|
||||
# only used for deprecated fields and management commands for them
|
||||
BASE_VENV_PATH = os.path.realpath("/var/lib/awx/venv")
|
||||
|
||||
# Very important that this is editable (not read_only) in the API
|
||||
AWX_ISOLATION_SHOW_PATHS = [
|
||||
'/etc/pki/ca-trust:/etc/pki/ca-trust:O',
|
||||
'/usr/share/pki:/usr/share/pki:O',
|
||||
]
|
||||
@@ -1,8 +0,0 @@
|
||||
# Copyright (c) 2015 Ansible, Inc.
|
||||
# All Rights Reserved.
|
||||
# Development settings for AWX project, but with DEBUG disabled
|
||||
|
||||
# Disable capturing DEBUG
|
||||
DEBUG = False
|
||||
TEMPLATE_DEBUG = DEBUG
|
||||
SQL_DEBUG = DEBUG
|
||||
@@ -37,7 +37,7 @@ def get_urlpatterns(prefix=None):
|
||||
re_path(r'^(?!api/).*', include('awx.ui.urls', namespace='ui')),
|
||||
]
|
||||
|
||||
if settings.DYNACONF.is_development_mode:
|
||||
if settings.SETTINGS_MODULE == 'awx.settings.development':
|
||||
try:
|
||||
import debug_toolbar
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ DOCUMENTATION = """
|
||||
description:
|
||||
- The date to start the rule
|
||||
- Used for all frequencies
|
||||
- Format should be 'YYYY-MM-DD HH:MM:SS'
|
||||
- Format should be YYYY-MM-DD [HH:MM:SS]
|
||||
type: str
|
||||
timezone:
|
||||
description:
|
||||
@@ -47,8 +47,8 @@ DOCUMENTATION = """
|
||||
description:
|
||||
- How to end this schedule
|
||||
- If this is not defined, this schedule will never end
|
||||
- If this is a positive number, specified as a string, this schedule will end after this number of occurrences
|
||||
- If this is a date in the format 'YYYY-MM-DD HH:MM:SS', this schedule ends after this date
|
||||
- If this is a positive integer, this schedule will end after this number of occurences
|
||||
- If this is a date in the format YYYY-MM-DD [HH:MM:SS], this schedule ends after this date
|
||||
- Used for all types except none
|
||||
type: str
|
||||
on_days:
|
||||
|
||||
@@ -257,8 +257,6 @@ def main():
|
||||
copy_lookup_data = lookup_data
|
||||
if organization:
|
||||
lookup_data['organization'] = org_id
|
||||
if user:
|
||||
lookup_data['organization'] = None
|
||||
|
||||
credential = module.get_one('credentials', name_or_id=name, check_exists=(state == 'exists'), **{'data': lookup_data})
|
||||
|
||||
@@ -292,11 +290,8 @@ def main():
|
||||
|
||||
if inputs:
|
||||
credential_fields['inputs'] = inputs
|
||||
if description is not None:
|
||||
if description == '':
|
||||
credential_fields['description'] = ''
|
||||
else:
|
||||
credential_fields['description'] = description
|
||||
if description:
|
||||
credential_fields['description'] = description
|
||||
if organization:
|
||||
credential_fields['organization'] = org_id
|
||||
|
||||
|
||||
@@ -116,11 +116,8 @@ def main():
|
||||
}
|
||||
if kind:
|
||||
credential_type_params['kind'] = kind
|
||||
if module.params.get('description') is not None:
|
||||
if module.params.get('description') == '':
|
||||
credential_type_params['description'] = ''
|
||||
else:
|
||||
credential_type_params['description'] = module.params.get('description')
|
||||
if module.params.get('description'):
|
||||
credential_type_params['description'] = module.params.get('description')
|
||||
if module.params.get('inputs'):
|
||||
credential_type_params['inputs'] = module.params.get('inputs')
|
||||
if module.params.get('injectors'):
|
||||
|
||||
@@ -268,7 +268,7 @@ def main():
|
||||
for resource in value:
|
||||
# Attempt to look up project based on the provided name, ID, or named URL and lookup data
|
||||
lookup_key = key
|
||||
if key == 'organizations' or key == 'users' or key == 'teams':
|
||||
if key == 'organizations' or key == 'users':
|
||||
lookup_data_populated = {}
|
||||
else:
|
||||
lookup_data_populated = lookup_data
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
wfjt_name: "AWX-Collection-tests-role-project-wfjt-{{ test_id }}"
|
||||
team_name: "AWX-Collection-tests-team-team-{{ test_id }}"
|
||||
team2_name: "AWX-Collection-tests-team-team-{{ test_id }}2"
|
||||
org2_name: "AWX-Collection-tests-organization-{{ test_id }}2"
|
||||
|
||||
- block:
|
||||
- name: Create a User
|
||||
@@ -210,40 +209,6 @@
|
||||
that:
|
||||
- "result is changed"
|
||||
|
||||
- name: Create a 2nd organization
|
||||
organization:
|
||||
name: "{{ org2_name }}"
|
||||
|
||||
- name: Create a project in 2nd Organization
|
||||
project:
|
||||
name: "{{ project_name }}"
|
||||
organization: "{{ org2_name }}"
|
||||
scm_type: git
|
||||
scm_url: https://github.com/ansible/test-playbooks
|
||||
wait: true
|
||||
register: project_info
|
||||
|
||||
- name: Add Joe and teams to the update role of the default Project with lookup from the 2nd Organization
|
||||
role:
|
||||
user: "{{ username }}"
|
||||
users:
|
||||
- "{{ username }}2"
|
||||
teams:
|
||||
- "{{ team_name }}"
|
||||
- "{{ team2_name }}"
|
||||
role: update
|
||||
lookup_organization: "{{ org2_name }}"
|
||||
project: "{{ project_name }}"
|
||||
state: "{{ item }}"
|
||||
register: result
|
||||
with_items:
|
||||
- "present"
|
||||
- "absent"
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "result is changed"
|
||||
|
||||
always:
|
||||
- name: Delete a User
|
||||
user:
|
||||
@@ -287,16 +252,3 @@
|
||||
organization: Default
|
||||
state: absent
|
||||
register: result
|
||||
|
||||
- name: Delete the 2nd project
|
||||
project:
|
||||
name: "{{ project_name }}"
|
||||
organization: "{{ org2_name }}"
|
||||
state: absent
|
||||
register: result
|
||||
|
||||
- name: Delete the 2nd organization
|
||||
organization:
|
||||
name: "{{ org2_name }}"
|
||||
state: absent
|
||||
register: result
|
||||
|
||||
@@ -47,7 +47,6 @@ These can be specified via (from highest to lowest precedence):
|
||||
- direct module parameters
|
||||
- environment variables (most useful when running against localhost)
|
||||
- a config file path specified by the `tower_config_file` parameter
|
||||
- a config file at `./tower_cli.cfg`, i.e. in the current directory
|
||||
- a config file at `~/.tower_cli.cfg`
|
||||
- a config file at `/etc/tower/tower_cli.cfg`
|
||||
|
||||
@@ -61,15 +60,6 @@ username = foo
|
||||
password = bar
|
||||
```
|
||||
|
||||
or like this:
|
||||
|
||||
```
|
||||
host: https://localhost:8043
|
||||
verify_ssl: true
|
||||
oauth_token: <token>
|
||||
|
||||
```
|
||||
|
||||
## Release and Upgrade Notes
|
||||
|
||||
Notable releases of the `{{ collection_namespace }}.{{ collection_package }}` collection:
|
||||
|
||||
27
licenses/django-split-settings.txt
Normal file
27
licenses/django-split-settings.txt
Normal file
@@ -0,0 +1,27 @@
|
||||
Copyright (c) 2013, 2General Oy
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of django-split-settings nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@@ -1,21 +0,0 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Bruno Rocha
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
11
licenses/portalocker.txt
Normal file
11
licenses/portalocker.txt
Normal file
@@ -0,0 +1,11 @@
|
||||
Copyright 2022 Rick van Hattem
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
BIN
licenses/psycopg-3.2.3.tar.gz
Normal file
BIN
licenses/psycopg-3.2.3.tar.gz
Normal file
Binary file not shown.
Binary file not shown.
BIN
licenses/uwsgi-2.0.28.tar.gz
Normal file
BIN
licenses/uwsgi-2.0.28.tar.gz
Normal file
Binary file not shown.
Binary file not shown.
16
pytest.ini
16
pytest.ini
@@ -15,6 +15,10 @@ markers =
|
||||
filterwarnings =
|
||||
error
|
||||
|
||||
# NOTE: The following are introduced upgrading python 3.11 to python 3.12
|
||||
# FIXME: Upgrade django-polymorphic https://github.com/jazzband/django-polymorphic/pull/541
|
||||
once:Deprecated call to `pkg_resources.declare_namespace\('sphinxcontrib'\)`.\nImplementing implicit namespace packages \(as specified in PEP 420\) is preferred to `pkg_resources.declare_namespace`.:DeprecationWarning
|
||||
|
||||
# FIXME: Upgrade protobuf https://github.com/protocolbuffers/protobuf/issues/15077
|
||||
once:Type google._upb._message.* uses PyType_Spec with a metaclass that has custom tp_new:DeprecationWarning
|
||||
|
||||
@@ -25,6 +29,9 @@ filterwarnings =
|
||||
# FIXME: Set `USE_TZ` to `True`.
|
||||
once:The default value of USE_TZ will change from False to True in Django 5.0. Set USE_TZ to False in your project settings if you want to keep the current default behavior.:django.utils.deprecation.RemovedInDjango50Warning:django.conf
|
||||
|
||||
# FIXME: Delete this entry once `USE_L10N` use is removed.
|
||||
once:The USE_L10N setting is deprecated. Starting with Django 5.0, localized formatting of data will always be enabled. For example Django will display numbers and dates using the format of the current locale.:django.utils.deprecation.RemovedInDjango50Warning:django.conf
|
||||
|
||||
# FIXME: Delete this entry once `pyparsing` is updated.
|
||||
once:module 'sre_constants' is deprecated:DeprecationWarning:_pytest.assertion.rewrite
|
||||
|
||||
@@ -34,6 +41,9 @@ filterwarnings =
|
||||
# FIXME: Delete this entry once `zope` is updated.
|
||||
once:Deprecated call to `pkg_resources.declare_namespace.'zope'.`.\nImplementing implicit namespace packages .as specified in PEP 420. is preferred to `pkg_resources.declare_namespace`. See https.//setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages:DeprecationWarning:
|
||||
|
||||
# FIXME: Delete this entry once `coreapi` is updated.
|
||||
once:'cgi' is deprecated and slated for removal in Python 3.13:DeprecationWarning:_pytest.assertion.rewrite
|
||||
|
||||
# FIXME: Delete this entry once the use of `distutils` is exterminated from the repo.
|
||||
once:The distutils package is deprecated and slated for removal in Python 3.12. Use setuptools or check PEP 632 for potential alternatives:DeprecationWarning:_pytest.assertion.rewrite
|
||||
|
||||
@@ -69,6 +79,12 @@ filterwarnings =
|
||||
# FIXME: in `awx/main/analytics/collectors.py` and then delete the entry.
|
||||
once:distro.linux_distribution.. is deprecated. It should only be used as a compatibility shim with Python's platform.linux_distribution... Please use distro.id.., distro.version.. and distro.name.. instead.:DeprecationWarning:awx.main.analytics.collectors
|
||||
|
||||
# FIXME: Figure this out, fix and then delete the entry.
|
||||
once:\nUsing ProtocolTypeRouter without an explicit "http" key is deprecated.\nGiven that you have not passed the "http" you likely should use Django's\nget_asgi_application...:DeprecationWarning:awx.main.routing
|
||||
|
||||
# FIXME: Figure this out, fix and then delete the entry.
|
||||
once:Channel's inbuilt http protocol AsgiHandler is deprecated. Use Django's get_asgi_application.. instead.:DeprecationWarning:channels.routing
|
||||
|
||||
# FIXME: Use `codecs.open()` via a context manager
|
||||
# FIXME: in `awx/main/utils/ansible.py` to close hanging file descriptors
|
||||
# FIXME: and then delete the entry.
|
||||
|
||||
@@ -14,7 +14,7 @@ cryptography<42.0.0 # investigation is needed for 42+ to work with OpenSSL v3.0
|
||||
Cython
|
||||
daphne
|
||||
distro
|
||||
django==4.2.20 # CVE-2025-26699
|
||||
django==4.2.16 # CVE-2024-24680
|
||||
django-cors-headers
|
||||
django-crum
|
||||
django-extensions
|
||||
@@ -22,9 +22,9 @@ django-guid
|
||||
django-oauth-toolkit<2.0.0 # Version 2.0.0 has breaking changes that will need to be worked out before upgrading
|
||||
django-polymorphic
|
||||
django-solo
|
||||
django-split-settings
|
||||
djangorestframework>=3.15.0
|
||||
djangorestframework-yaml
|
||||
dynaconf<4
|
||||
filelock
|
||||
GitPython>=3.1.37 # CVE-2023-41040
|
||||
grpcio
|
||||
@@ -54,7 +54,7 @@ python-tss-sdk>=1.2.1
|
||||
pyyaml>=6.0.2 # require packing fix for cython 3 or higher
|
||||
pyzstd # otel collector log file compression library
|
||||
receptorctl
|
||||
sqlparse>=0.5.2
|
||||
sqlparse>=0.4.4 # Required by django https://github.com/ansible/awx/security/dependabot/96
|
||||
redis[hiredis]
|
||||
requests
|
||||
slack-sdk
|
||||
@@ -69,3 +69,6 @@ setuptools_scm[toml] # see UPGRADE BLOCKERs, xmlsec build dep
|
||||
setuptools-rust>=0.11.4 # cryptography build dep
|
||||
pkgconfig>=1.5.1 # xmlsec build dep - needed for offline build
|
||||
django-flags>=5.0.13
|
||||
# Temporarily added to use ansible-runner from git branch, to be removed
|
||||
# when ansible-runner moves from requirements_git.txt to here
|
||||
pbr
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
adal==1.2.7
|
||||
# via msrestazure
|
||||
aiohappyeyeballs==2.6.1
|
||||
aiohappyeyeballs==2.4.4
|
||||
# via aiohttp
|
||||
aiohttp==3.11.16
|
||||
aiohttp==3.11.11
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# aiohttp-retry
|
||||
# twilio
|
||||
aiohttp-retry==2.9.1
|
||||
aiohttp-retry==2.8.3
|
||||
# via twilio
|
||||
aiosignal==1.3.2
|
||||
# via aiohttp
|
||||
@@ -25,9 +25,9 @@ asgiref==3.8.1
|
||||
# django
|
||||
# django-ansible-base
|
||||
# django-cors-headers
|
||||
asn1==3.0.0
|
||||
asn1==2.7.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
attrs==25.3.0
|
||||
attrs==24.3.0
|
||||
# via
|
||||
# aiohttp
|
||||
# jsonschema
|
||||
@@ -46,14 +46,14 @@ awx-plugins.interfaces @ git+https://github.com/ansible/awx_plugins.interfaces.g
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements_git.txt
|
||||
# awx-plugins-core
|
||||
azure-core==1.33.0
|
||||
azure-core==1.32.0
|
||||
# via
|
||||
# azure-identity
|
||||
# azure-keyvault-certificates
|
||||
# azure-keyvault-keys
|
||||
# azure-keyvault-secrets
|
||||
# msrest
|
||||
azure-identity==1.21.0
|
||||
azure-identity==1.19.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
azure-keyvault==4.2.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
@@ -65,14 +65,14 @@ azure-keyvault-secrets==4.9.0
|
||||
# via azure-keyvault
|
||||
backports-tarfile==1.2.0
|
||||
# via jaraco-context
|
||||
boto3==1.37.34
|
||||
boto3==1.35.96
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
botocore==1.37.34
|
||||
botocore==1.35.96
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# boto3
|
||||
# s3transfer
|
||||
cachetools==5.5.2
|
||||
cachetools==5.5.0
|
||||
# via google-auth
|
||||
# git+https://github.com/ansible/system-certifi.git@devel # git requirements installed separately
|
||||
# via
|
||||
@@ -84,7 +84,7 @@ cffi==1.17.1
|
||||
# via
|
||||
# cryptography
|
||||
# pynacl
|
||||
channels==4.2.2
|
||||
channels==4.2.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# channels-redis
|
||||
@@ -109,11 +109,11 @@ cryptography==41.0.7
|
||||
# pyjwt
|
||||
# pyopenssl
|
||||
# service-identity
|
||||
cython==3.0.12
|
||||
cython==3.0.11
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
daphne==4.1.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
deprecated==1.2.18
|
||||
deprecated==1.2.15
|
||||
# via
|
||||
# opentelemetry-api
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
@@ -122,7 +122,7 @@ deprecated==1.2.18
|
||||
# pygithub
|
||||
distro==1.9.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django==4.2.20
|
||||
django==4.2.16
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# channels
|
||||
@@ -138,19 +138,19 @@ django==4.2.20
|
||||
# djangorestframework
|
||||
# django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel # git requirements installed separately
|
||||
# via -r /awx_devel/requirements/requirements_git.txt
|
||||
django-cors-headers==4.7.0
|
||||
django-cors-headers==4.6.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-crum==0.7.9
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
django-extensions==4.1
|
||||
django-extensions==3.2.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-flags==5.0.13
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
django-guid==3.5.1
|
||||
django-guid==3.5.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-oauth-toolkit==1.7.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
@@ -158,7 +158,11 @@ django-polymorphic==3.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
django-solo==2.4.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
djangorestframework==3.16.0
|
||||
django-split-settings==1.3.2
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
djangorestframework==3.15.2
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
@@ -166,13 +170,9 @@ djangorestframework-yaml==2.0.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
durationpy==0.9
|
||||
# via kubernetes
|
||||
dynaconf==3.2.10
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# django-ansible-base
|
||||
enum-compat==0.0.3
|
||||
# via asn1
|
||||
filelock==3.18.0
|
||||
filelock==3.16.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
frozenlist==1.5.0
|
||||
# via
|
||||
@@ -182,13 +182,13 @@ gitdb==4.0.12
|
||||
# via gitpython
|
||||
gitpython==3.1.44
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
google-auth==2.39.0
|
||||
google-auth==2.37.0
|
||||
# via kubernetes
|
||||
googleapis-common-protos==1.70.0
|
||||
googleapis-common-protos==1.66.0
|
||||
# via
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
grpcio==1.71.0
|
||||
grpcio==1.69.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
@@ -204,7 +204,7 @@ idna==3.10
|
||||
# requests
|
||||
# twisted
|
||||
# yarl
|
||||
importlib-metadata==8.6.1
|
||||
importlib-metadata==8.5.0
|
||||
# via opentelemetry-api
|
||||
importlib-resources==6.5.2
|
||||
# via irc
|
||||
@@ -237,7 +237,7 @@ jaraco-text==4.0.0
|
||||
# via
|
||||
# irc
|
||||
# jaraco-collections
|
||||
jinja2==3.1.6
|
||||
jinja2==3.1.5
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
jmespath==1.0.1
|
||||
# via
|
||||
@@ -245,7 +245,7 @@ jmespath==1.0.1
|
||||
# botocore
|
||||
jq==1.8.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
json-log-formatter==1.1.1
|
||||
json-log-formatter==1.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
jsonschema==4.23.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
@@ -253,27 +253,27 @@ jsonschema-specifications==2024.10.1
|
||||
# via jsonschema
|
||||
jwcrypto==1.5.6
|
||||
# via django-oauth-toolkit
|
||||
kubernetes==32.0.1
|
||||
kubernetes==31.0.0
|
||||
# via openshift
|
||||
lockfile==0.12.2
|
||||
# via python-daemon
|
||||
markdown==3.8
|
||||
markdown==3.7
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
markupsafe==3.0.2
|
||||
# via jinja2
|
||||
maturin==1.8.3
|
||||
maturin==1.8.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
more-itertools==10.6.0
|
||||
more-itertools==10.5.0
|
||||
# via
|
||||
# irc
|
||||
# jaraco-functools
|
||||
# jaraco-stream
|
||||
# jaraco-text
|
||||
msal==1.32.0
|
||||
msal==1.31.1
|
||||
# via
|
||||
# azure-identity
|
||||
# msal-extensions
|
||||
msal-extensions==1.3.1
|
||||
msal-extensions==1.2.0
|
||||
# via azure-identity
|
||||
msgpack==1.1.0
|
||||
# via
|
||||
@@ -283,7 +283,7 @@ msrest==0.7.1
|
||||
# via msrestazure
|
||||
msrestazure==0.6.4.post1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
multidict==6.4.3
|
||||
multidict==6.1.0
|
||||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
@@ -294,7 +294,7 @@ oauthlib==3.2.2
|
||||
# requests-oauthlib
|
||||
openshift==0.13.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
opentelemetry-api==1.32.0
|
||||
opentelemetry-api==1.29.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
@@ -303,31 +303,31 @@ opentelemetry-api==1.32.0
|
||||
# opentelemetry-instrumentation-logging
|
||||
# opentelemetry-sdk
|
||||
# opentelemetry-semantic-conventions
|
||||
opentelemetry-exporter-otlp==1.32.0
|
||||
opentelemetry-exporter-otlp==1.29.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
opentelemetry-exporter-otlp-proto-common==1.32.0
|
||||
opentelemetry-exporter-otlp-proto-common==1.29.0
|
||||
# via
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-exporter-otlp-proto-grpc==1.32.0
|
||||
opentelemetry-exporter-otlp-proto-grpc==1.29.0
|
||||
# via opentelemetry-exporter-otlp
|
||||
opentelemetry-exporter-otlp-proto-http==1.32.0
|
||||
opentelemetry-exporter-otlp-proto-http==1.29.0
|
||||
# via opentelemetry-exporter-otlp
|
||||
opentelemetry-instrumentation==0.53b0
|
||||
opentelemetry-instrumentation==0.50b0
|
||||
# via opentelemetry-instrumentation-logging
|
||||
opentelemetry-instrumentation-logging==0.53b0
|
||||
opentelemetry-instrumentation-logging==0.50b0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
opentelemetry-proto==1.32.0
|
||||
opentelemetry-proto==1.29.0
|
||||
# via
|
||||
# opentelemetry-exporter-otlp-proto-common
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-sdk==1.32.0
|
||||
opentelemetry-sdk==1.29.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# opentelemetry-exporter-otlp-proto-grpc
|
||||
# opentelemetry-exporter-otlp-proto-http
|
||||
opentelemetry-semantic-conventions==0.53b0
|
||||
opentelemetry-semantic-conventions==0.50b0
|
||||
# via
|
||||
# opentelemetry-instrumentation
|
||||
# opentelemetry-sdk
|
||||
@@ -336,25 +336,29 @@ packaging==24.2
|
||||
# ansible-runner
|
||||
# opentelemetry-instrumentation
|
||||
# setuptools-scm
|
||||
pbr==6.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
pexpect==4.7.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# ansible-runner
|
||||
pkgconfig==1.5.5
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
portalocker==2.10.1
|
||||
# via msal-extensions
|
||||
prometheus-client==0.21.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
propcache==0.3.1
|
||||
propcache==0.2.1
|
||||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
protobuf==5.29.4
|
||||
protobuf==5.29.3
|
||||
# via
|
||||
# googleapis-common-protos
|
||||
# opentelemetry-proto
|
||||
psutil==7.0.0
|
||||
psutil==6.1.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
psycopg==3.2.6
|
||||
psycopg==3.2.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
ptyprocess==0.7.0
|
||||
# via pexpect
|
||||
@@ -363,7 +367,7 @@ pyasn1==0.6.1
|
||||
# pyasn1-modules
|
||||
# rsa
|
||||
# service-identity
|
||||
pyasn1-modules==0.4.2
|
||||
pyasn1-modules==0.4.1
|
||||
# via
|
||||
# google-auth
|
||||
# service-identity
|
||||
@@ -382,7 +386,7 @@ pyjwt[crypto]==2.10.1
|
||||
# twilio
|
||||
pynacl==1.5.0
|
||||
# via pygithub
|
||||
pyopenssl==25.0.0
|
||||
pyopenssl==24.3.0
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# twisted
|
||||
@@ -405,7 +409,7 @@ python-string-utils==1.0.0
|
||||
# via openshift
|
||||
python-tss-sdk==1.2.3
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
pytz==2025.2
|
||||
pytz==2024.2
|
||||
# via irc
|
||||
pyyaml==6.0.2
|
||||
# via
|
||||
@@ -416,13 +420,13 @@ pyyaml==6.0.2
|
||||
# receptorctl
|
||||
pyzstd==0.16.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
receptorctl==1.5.4
|
||||
receptorctl==1.5.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
redis[hiredis]==5.2.1
|
||||
# via
|
||||
# -r /awx_devel/requirements/requirements.in
|
||||
# channels-redis
|
||||
referencing==0.36.2
|
||||
referencing==0.35.1
|
||||
# via
|
||||
# jsonschema
|
||||
# jsonschema-specifications
|
||||
@@ -446,21 +450,21 @@ requests-oauthlib==2.0.0
|
||||
# via
|
||||
# kubernetes
|
||||
# msrest
|
||||
rpds-py==0.24.0
|
||||
rpds-py==0.22.3
|
||||
# via
|
||||
# jsonschema
|
||||
# referencing
|
||||
rsa==4.9
|
||||
# via google-auth
|
||||
s3transfer==0.11.4
|
||||
s3transfer==0.10.4
|
||||
# via boto3
|
||||
semantic-version==2.10.0
|
||||
# via setuptools-rust
|
||||
service-identity==24.2.0
|
||||
# via twisted
|
||||
setuptools-rust==1.11.1
|
||||
setuptools-rust==1.10.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
setuptools-scm[toml]==8.2.0
|
||||
setuptools-scm[toml]==8.1.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
six==1.17.0
|
||||
# via
|
||||
@@ -470,7 +474,7 @@ six==1.17.0
|
||||
# openshift
|
||||
# pygerduty
|
||||
# python-dateutil
|
||||
slack-sdk==3.35.0
|
||||
slack-sdk==3.34.0
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
smmap==5.0.2
|
||||
# via gitdb
|
||||
@@ -483,7 +487,7 @@ tempora==5.8.0
|
||||
# via
|
||||
# irc
|
||||
# jaraco-logging
|
||||
twilio==9.5.2
|
||||
twilio==9.4.2
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
twisted[tls]==24.11.0
|
||||
# via
|
||||
@@ -491,7 +495,7 @@ twisted[tls]==24.11.0
|
||||
# daphne
|
||||
txaio==23.1.1
|
||||
# via autobahn
|
||||
typing-extensions==4.13.2
|
||||
typing-extensions==4.12.2
|
||||
# via
|
||||
# azure-core
|
||||
# azure-identity
|
||||
@@ -502,17 +506,15 @@ typing-extensions==4.13.2
|
||||
# opentelemetry-sdk
|
||||
# psycopg
|
||||
# pygithub
|
||||
# pyopenssl
|
||||
# referencing
|
||||
# twisted
|
||||
urllib3==2.4.0
|
||||
urllib3==2.3.0
|
||||
# via
|
||||
# botocore
|
||||
# django-ansible-base
|
||||
# kubernetes
|
||||
# pygithub
|
||||
# requests
|
||||
uwsgi==2.0.29
|
||||
uwsgi==2.0.28
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
uwsgitop==0.12
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
@@ -520,11 +522,11 @@ websocket-client==1.8.0
|
||||
# via kubernetes
|
||||
wheel==0.45.1
|
||||
# via -r /awx_devel/requirements/requirements.in
|
||||
wrapt==1.17.2
|
||||
wrapt==1.17.0
|
||||
# via
|
||||
# deprecated
|
||||
# opentelemetry-instrumentation
|
||||
yarl==1.19.0
|
||||
yarl==1.18.3
|
||||
# via aiohttp
|
||||
zipp==3.21.0
|
||||
# via importlib-metadata
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
build
|
||||
coreapi
|
||||
django-debug-toolbar==3.2.4
|
||||
django-test-migrations
|
||||
drf-yasg<1.21.10 # introduces new DeprecationWarning that is turned into error
|
||||
drf-yasg
|
||||
# pprofile - re-add once https://github.com/vpelletier/pprofile/issues/41 is addressed
|
||||
ipython>=7.31.1 # https://github.com/ansible/awx/security/dependabot/30
|
||||
unittest2
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
git+https://github.com/ansible/system-certifi.git@devel#egg=certifi
|
||||
git+https://github.com/ansible/ansible-runner.git@devel#egg=ansible-runner
|
||||
awx-plugins-core @ git+https://github.com/ansible/awx-plugins.git@devel#egg=awx-plugins-core[credentials-github-app]
|
||||
# Remove pbr from requirements.in when moving ansible-runner to requirements.in
|
||||
git+https://github.com/demonpig/ansible-runner.git@AAP-37599#egg=ansible-runner
|
||||
django-ansible-base @ git+https://github.com/ansible/django-ansible-base@devel#egg=django-ansible-base[rest-filters,jwt_consumer,resource-registry,rbac,feature-flags]
|
||||
awx-plugins-core @ git+https://github.com/ansible/awx-plugins.git@devel#egg=awx-plugins-core[credentials-github-app]
|
||||
awx_plugins.interfaces @ git+https://github.com/ansible/awx_plugins.interfaces.git
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
# Community BugScrub tooling
|
||||
|
||||
Small python script that automatically distributes PRs and Issues given a list of `people` and dumps the contents in a Spreadsheet.
|
||||
|
||||
To be used when distributing the work of reviewing community contributions.
|
||||
|
||||
## Usage
|
||||
|
||||
Install requirements.
|
||||
|
||||
```
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
Get the usage.
|
||||
|
||||
```
|
||||
python generate-sheet.py -h
|
||||
```
|
||||
|
||||
## Adding a github Personal Access Token
|
||||
The scripts looks first for a github personal access token to use to avoid having the scripts calls rate limited, you can create one or use an existing one if you have. The script looks for the PAT under the environment var `GITHUB_ACCESS_TOKEN`.
|
||||
|
||||
|
||||
# For internal spreadsheet usage
|
||||
AWX engineers will need to import the data generated from the script into a spreadshet manager. Please make sure that you do not replace the existing sheets but make a new one or create a new sheet inside the existing spreadsheet upon import.
|
||||
@@ -1,125 +0,0 @@
|
||||
import argparse
|
||||
import os
|
||||
from typing import OrderedDict
|
||||
import pyexcel
|
||||
import requests
|
||||
import sys
|
||||
|
||||
|
||||
def get_headers():
|
||||
access_token_env_var = "GITHUB_ACCESS_TOKEN"
|
||||
if access_token_env_var in os.environ:
|
||||
access_token = os.environ[access_token_env_var]
|
||||
return {"Authorization": f"token {access_token}"}
|
||||
else:
|
||||
print(f"{access_token_env_var} not present, performing unathenticated calls that might hit rate limits.")
|
||||
return None
|
||||
|
||||
|
||||
def fetch_items(url, params, headers):
|
||||
response = requests.get(url, params=params, headers=headers)
|
||||
if response.status_code == 200:
|
||||
return response
|
||||
else:
|
||||
print(f"Failed to fetch items: {response.status_code}", file=sys.stderr)
|
||||
print(f"{response.content}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
|
||||
def extract_next_url(response):
|
||||
if 'Link' in response.headers:
|
||||
links = response.headers['Link'].split(',')
|
||||
for link in links:
|
||||
if 'rel="next"' in link:
|
||||
return link.split(';')[0].strip('<> ')
|
||||
return None
|
||||
|
||||
|
||||
def get_all_items(url, params, limit=None):
|
||||
items = []
|
||||
headers = get_headers()
|
||||
while url:
|
||||
response = fetch_items(url, params, headers)
|
||||
if response:
|
||||
items.extend(response.json())
|
||||
print(f"Processing {len(items)}", file=sys.stderr)
|
||||
if limit and len(items) > limit:
|
||||
break
|
||||
url = extract_next_url(response)
|
||||
else:
|
||||
url = None
|
||||
return items
|
||||
|
||||
|
||||
def get_open_issues(repo_url, limit):
|
||||
owner, repo = repo_url.rstrip('/').split('/')[-2:]
|
||||
url = f"https://api.github.com/repos/{owner}/{repo}/issues"
|
||||
params = {'state': 'open', 'per_page': 100}
|
||||
issues = get_all_items(url, params, limit)
|
||||
open_issues = [issue for issue in issues if 'pull_request' not in issue]
|
||||
return open_issues
|
||||
|
||||
|
||||
def get_open_pull_requests(repo_url, limit):
|
||||
owner, repo = repo_url.rstrip('/').split('/')[-2:]
|
||||
url = f"https://api.github.com/repos/{owner}/{repo}/pulls"
|
||||
params = {'state': 'open', 'per_page': 100}
|
||||
pull_requests = get_all_items(url, params, limit)
|
||||
return pull_requests
|
||||
|
||||
|
||||
def generate_ods(issues, pull_requests, filename, people):
|
||||
data = OrderedDict()
|
||||
|
||||
# Prepare issues data
|
||||
issues_data = []
|
||||
for n, issue in enumerate(issues):
|
||||
issues_data.append(
|
||||
[
|
||||
issue['html_url'],
|
||||
issue['title'],
|
||||
issue['created_at'],
|
||||
issue['user']['login'],
|
||||
issue['assignee']['login'] if issue['assignee'] else 'None',
|
||||
people[n % len(people)],
|
||||
]
|
||||
)
|
||||
issues_headers = ['url', 'title', 'created_at', 'user', 'assignee', 'action']
|
||||
issues_data.insert(0, issues_headers)
|
||||
data.update({"Issues": issues_data})
|
||||
|
||||
# Prepare pull requests data
|
||||
prs_data = []
|
||||
for n, pr in enumerate(pull_requests):
|
||||
prs_data.append(
|
||||
[pr['html_url'], pr['title'], pr['created_at'], pr['user']['login'], pr['assignee']['login'] if pr['assignee'] else 'None', people[n % len(people)]]
|
||||
)
|
||||
prs_headers = ['url', 'title', 'created_at', 'user', 'assignee', 'action']
|
||||
prs_data.insert(0, prs_headers)
|
||||
data.update({"Pull Requests": prs_data})
|
||||
|
||||
# Save to ODS file
|
||||
pyexcel.save_book_as(bookdict=data, dest_file_name=filename)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--limit", type=int, help="minimum number of issues/PRs to pull [Pulls all by default]", default=None)
|
||||
parser.add_argument("--out", type=str, help="output file name [awx_community-triage.ods]", default="awx_community-triage.ods")
|
||||
parser.add_argument("--repository-url", type=str, help="repository url [https://github.com/ansible/awx]", default="https://github.com/ansible/awx")
|
||||
parser.add_argument("--people", type=str, help="comma separated list of names to distribute the issues/PRs among [Alice,Bob]", default="Alice,Bob")
|
||||
args = parser.parse_args()
|
||||
limit = args.limit
|
||||
output_file_name = args.out
|
||||
repo_url = args.repository_url
|
||||
people = str(args.people).split(",")
|
||||
open_issues = get_open_issues(repo_url, limit)
|
||||
open_pull_requests = get_open_pull_requests(repo_url, limit)
|
||||
print(f"Open issues: {len(open_issues)}")
|
||||
print(f"Open Pull Requests: {len(open_pull_requests)}")
|
||||
generate_ods(open_issues, open_pull_requests, output_file_name, people)
|
||||
print(f"Generated {output_file_name} with open issues and pull requests.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,3 +0,0 @@
|
||||
requests
|
||||
pyexcel
|
||||
pyexcel-ods3
|
||||
Reference in New Issue
Block a user